mirror of
https://github.com/unisonweb/unison.git
synced 2024-09-20 14:57:41 +03:00
stripping and unknotting some stuff
- moved everything that depended on V1 `Reference` into `FileCodebase` namespace, duplicating a lot of code for better or worse. - pulled these into their own modules: - Codebase (types) into `Codebase.Type`; `Codebase` module re-exports them - `Codebase.Branch.Merge`, - `Codebase.Branch.Names` (Branch shouldn't depend on `Names`) - `Codebase.BuiltinAnnotation` - `Codebase.Causal.FoldHistory` - `Codebase.CodeLookup.Util` (remove `CodeLookup` dependency on `UnisonFile`) - `Codebase.DataDeclaration.ConstructorId` trying to use this alias in relevant places - `Codebase.Init` - `CodebasePath`, the `FilePath` alias. - `Lexer.Pos` (because `AnnotatedText`, `Range`, `Parser.Ann` shouldn't depend on the whole lexer) - `Names.ResolutionResult` (`Type` shouldn't depend on `Names`) - `PrettyPrintEnv.FQN` - `PrettyPrintEnv.Names` (pulled out references to `Names` from `PPE`) - `PPE.Util` not 100% sure what's happening here - `PrettyPrintEnvDecl` pull this data type and supporting functions into separate module - `Path.Parse` (`Path` shouldn't depend on the lexer) - `Path.Convert`, `Path.Parse` into `Unison.Util.Convert` - `Parser.Ann` (`Codebase`, etc. etc. shouldn't depend on `Parser`) - `Referent'` (`SyntaxText`/`ColorText` shouldn't depend on `Reference`) - `Referent` module re-exports stuff from `Referent'`, hard-coded to `Reference`. - removed `SyntaxText.SyntaxText` - `TermEdit.Typing` (because `TermEdit` shouldn't depend on the full typechecker) - `UnisonFile` / `TypecheckedUnisonFile` (types into `UnisonFile.Type`) - `UnisonFile.Env` - `UnisonFile.Error` - `UnisonFile.Names` - `Var.WatchKind` - `Var.refNamed` - Deleted unused `Codebase.Classes` typeclasses wip - Deleted unused `Unison.Util.Menu`, ancient modal stuff - Moved `Codebase.makeSelfContained` into `UnisonFile` module, since it deals with `UnisonFile` and not with `Codebase`. - split up `GitError` into a more codebase-agnostic hierarchy (see `Codebase.Type.GitError`) - changed `bindNames` to `bindReferences` in some cases; `bindNames` remains in `.Names` compatibility module - move `Unison.Var.refNamed` into `Unison.Term` - tweaked GitError to separate obviously codebase-format specific errors from non-obviously-corbase-format-specific errors. - tweaked `Reflog.Entry` to support anything that's coercible to `Unison.Hash`, but also changed its kind - removed `DebugBranchHistoryI` input because I didn't want to maintain its implementation - removed `ShortBranchHash` dependency on `Hash`, and let it work on anything coercible to `Hash`. P.S./todo This class should be called ShortHash; the existing ShortHash is really a ShortReference! - removed SyntaxText dependency on Reference - cleaned up `Path` somewhat - held off on: - move `DD.updateDependencies` - splitting up `Path` into the billion different components - e.g. `Path` becomes `RelativePath`, `Path'` becomes `Path`, and `Absolute` wraps `RelativePath` - `Split` / `Split'` maybe rename to `Path.NonEmpty`? - rename `Branch.getPatch` / `.getMaybePatch` - split out `Name.Parse`, `Name.Convert`, substitute a lot of specific functions like `Path.hqSplitFromName'` with `Convert.parse`. - todo: - clear unreferenced junk - restore: - `NameEdit`? - `PatternCompat`?
This commit is contained in:
parent
18599ba54c
commit
2f13ea1f84
@ -75,6 +75,7 @@ library:
|
||||
- openapi3
|
||||
- optparse-applicative
|
||||
- pem
|
||||
- prelude-extras
|
||||
- process
|
||||
- primitive
|
||||
- random >= 1.2.0
|
||||
|
@ -33,7 +33,7 @@ import Unison.Codebase.CodeLookup ( CodeLookup(..) )
|
||||
import qualified Unison.Builtin.Decls as DD
|
||||
import qualified Unison.Builtin.Terms as TD
|
||||
import qualified Unison.DataDeclaration as DD
|
||||
import Unison.Parser ( Ann(..) )
|
||||
import Unison.Parser.Ann (Ann (..))
|
||||
import qualified Unison.Reference as R
|
||||
import qualified Unison.Referent as Referent
|
||||
import Unison.Symbol ( Symbol )
|
||||
|
@ -18,10 +18,10 @@ import qualified Unison.DataDeclaration as DD
|
||||
import qualified Unison.Pattern as Pattern
|
||||
import Unison.Reference (Reference)
|
||||
import qualified Unison.Reference as Reference
|
||||
import Unison.Referent (Referent)
|
||||
import Unison.Referent (Referent, ConstructorId)
|
||||
import qualified Unison.Referent as Referent
|
||||
import Unison.Symbol (Symbol)
|
||||
import Unison.Term (ConstructorId, Term, Term2)
|
||||
import Unison.Term (Term, Term2)
|
||||
import qualified Unison.Term as Term
|
||||
import Unison.Type (Type)
|
||||
import qualified Unison.Type as Type
|
||||
|
@ -1,130 +1,63 @@
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
module Unison.Codebase where
|
||||
|
||||
import Control.Lens ((%=), _1, _2)
|
||||
import Control.Monad.Except (ExceptT (ExceptT), runExceptT)
|
||||
import Control.Monad.State (State, evalState, get)
|
||||
import Data.Bifunctor (bimap)
|
||||
import qualified Data.Map as Map
|
||||
import qualified Data.Set as Set
|
||||
import qualified Unison.ABT as ABT
|
||||
import qualified Unison.Builtin as Builtin
|
||||
import qualified Unison.Builtin.Terms as Builtin
|
||||
import Unison.Codebase.Branch (Branch)
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import qualified Unison.Codebase.CodeLookup as CL
|
||||
import Unison.Codebase.Editor.Git (withStatus)
|
||||
import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRepo)
|
||||
import Unison.Codebase.GitError (GitError)
|
||||
import Unison.Codebase.Patch (Patch)
|
||||
import qualified Unison.Codebase.Reflog as Reflog
|
||||
import Unison.Codebase.ShortBranchHash (ShortBranchHash)
|
||||
import Unison.Codebase.SyncMode (SyncMode)
|
||||
import Unison.DataDeclaration (Decl)
|
||||
import qualified Unison.DataDeclaration as DD
|
||||
import qualified Unison.Parser as Parser
|
||||
module Unison.Codebase
|
||||
( Codebase (..),
|
||||
CodebasePath,
|
||||
GetRootBranchError (..),
|
||||
getCodebaseDir,
|
||||
SyncToDir,
|
||||
addDefsToCodebase,
|
||||
installUcmDependencies,
|
||||
getTypeOfTerm,
|
||||
getTypeOfReferent,
|
||||
lca,
|
||||
lookupWatchCache,
|
||||
toCodeLookup,
|
||||
typeLookupForDependencies,
|
||||
importRemoteBranch,
|
||||
viewRemoteBranch,
|
||||
termsOfType,
|
||||
termsMentioningType,
|
||||
dependents,
|
||||
isTerm,
|
||||
isType,
|
||||
)
|
||||
where
|
||||
|
||||
import Unison.Codebase.Type (Codebase (..), GetRootBranchError (..), SyncToDir, GitError (GitCodebaseError))
|
||||
import Unison.CodebasePath (CodebasePath, getCodebaseDir)
|
||||
import Unison.Prelude
|
||||
import Unison.Reference (Reference)
|
||||
import qualified Unison.Reference as Reference
|
||||
import qualified Unison.Referent as Referent
|
||||
import Unison.ShortHash (ShortHash)
|
||||
import Unison.Symbol (Symbol)
|
||||
import Unison.Term (Term)
|
||||
import qualified Unison.Term as Term
|
||||
import Unison.Type (Type)
|
||||
import qualified Unison.Type as Type
|
||||
import Unison.Typechecker.TypeLookup (TypeLookup (TypeLookup))
|
||||
import qualified Unison.Typechecker.TypeLookup as TL
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import qualified Unison.Util.Relation as Rel
|
||||
import qualified Unison.Util.Set as Set
|
||||
import U.Util.Timing (time)
|
||||
import qualified Data.Map as Map
|
||||
import Unison.Symbol (Symbol)
|
||||
import qualified Unison.Parser.Ann as Parser
|
||||
import qualified Unison.Builtin.Terms as Builtin
|
||||
import qualified Unison.Builtin as Builtin
|
||||
import Unison.DataDeclaration (Decl)
|
||||
import qualified Unison.Reference as Reference
|
||||
import Unison.Var (Var)
|
||||
import qualified Unison.Var as Var
|
||||
import UnliftIO.Directory (getHomeDirectory)
|
||||
import Unison.Reference (Reference)
|
||||
import Unison.Codebase.BuiltinAnnotation (BuiltinAnnotation (builtinAnnotation))
|
||||
import Unison.Type (Type)
|
||||
import qualified Unison.Referent as Referent
|
||||
import qualified Unison.DataDeclaration as DD
|
||||
import qualified Unison.Codebase.CodeLookup as CL
|
||||
import qualified Unison.WatchKind as WK
|
||||
import Unison.Term (Term)
|
||||
import qualified Unison.Typechecker.TypeLookup as TL
|
||||
import Unison.Typechecker.TypeLookup (TypeLookup(TypeLookup))
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import Unison.Codebase.Branch (Branch)
|
||||
import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace)
|
||||
import Control.Monad.Except (runExceptT, ExceptT (ExceptT))
|
||||
import Unison.Codebase.SyncMode (SyncMode)
|
||||
import qualified Unison.Codebase.GitError as GitError
|
||||
|
||||
type DataDeclaration v a = DD.DataDeclaration v a
|
||||
|
||||
type EffectDeclaration v a = DD.EffectDeclaration v a
|
||||
|
||||
-- | this FileCodebase detail lives here, because the interface depends on it 🙃
|
||||
type CodebasePath = FilePath
|
||||
|
||||
type SyncToDir m =
|
||||
CodebasePath -> -- dest codebase
|
||||
SyncMode ->
|
||||
Branch m -> -- branch to sync to dest codebase
|
||||
m ()
|
||||
|
||||
-- | Abstract interface to a user's codebase.
|
||||
--
|
||||
-- One implementation is 'Unison.Codebase.FileCodebase' which uses the filesystem.
|
||||
data Codebase m v a =
|
||||
Codebase { getTerm :: Reference.Id -> m (Maybe (Term v a))
|
||||
, getTypeOfTermImpl :: Reference.Id -> m (Maybe (Type v a))
|
||||
, getTypeDeclaration :: Reference.Id -> m (Maybe (Decl v a))
|
||||
|
||||
, putTerm :: Reference.Id -> Term v a -> Type v a -> m ()
|
||||
, putTypeDeclaration :: Reference.Id -> Decl v a -> m ()
|
||||
|
||||
, getRootBranch :: m (Either GetRootBranchError (Branch m))
|
||||
, putRootBranch :: Branch m -> m ()
|
||||
, rootBranchUpdates :: m (IO (), IO (Set Branch.Hash))
|
||||
, getBranchForHash :: Branch.Hash -> m (Maybe (Branch m))
|
||||
, putBranch :: Branch m -> m ()
|
||||
, branchExists :: Branch.Hash -> m Bool
|
||||
|
||||
, getPatch :: Branch.EditHash -> m (Maybe Patch)
|
||||
, putPatch :: Branch.EditHash -> Patch -> m ()
|
||||
, patchExists :: Branch.EditHash -> m Bool
|
||||
|
||||
, dependentsImpl :: Reference -> m (Set Reference.Id)
|
||||
-- This copies all the dependencies of `b` from the specified Codebase into this one
|
||||
, syncFromDirectory :: CodebasePath -> SyncMode -> Branch m -> m ()
|
||||
-- This copies all the dependencies of `b` from this Codebase
|
||||
, syncToDirectory :: CodebasePath -> SyncMode -> Branch m -> m ()
|
||||
, viewRemoteBranch' :: ReadRemoteNamespace -> m (Either GitError (m (), Branch m, CodebasePath))
|
||||
, pushGitRootBranch :: Branch m -> WriteRepo -> SyncMode -> m (Either GitError ())
|
||||
|
||||
-- Watch expressions are part of the codebase, the `Reference.Id` is
|
||||
-- the hash of the source of the watch expression, and the `Term v a`
|
||||
-- is the evaluated result of the expression, decompiled to a term.
|
||||
, watches :: UF.WatchKind -> m [Reference.Id]
|
||||
, getWatch :: UF.WatchKind -> Reference.Id -> m (Maybe (Term v a))
|
||||
, putWatch :: UF.WatchKind -> Reference.Id -> Term v a -> m ()
|
||||
, clearWatches :: m ()
|
||||
|
||||
, getReflog :: m [Reflog.Entry]
|
||||
, appendReflog :: Text -> Branch m -> Branch m -> m ()
|
||||
|
||||
-- list of terms of the given type
|
||||
, termsOfTypeImpl :: Reference -> m (Set Referent.Id)
|
||||
-- list of terms that mention the given type anywhere in their signature
|
||||
, termsMentioningTypeImpl :: Reference -> m (Set Referent.Id)
|
||||
-- number of base58 characters needed to distinguish any two references in the codebase
|
||||
, hashLength :: m Int
|
||||
, termReferencesByPrefix :: ShortHash -> m (Set Reference.Id)
|
||||
, typeReferencesByPrefix :: ShortHash -> m (Set Reference.Id)
|
||||
, termReferentsByPrefix :: ShortHash -> m (Set Referent.Id)
|
||||
|
||||
, branchHashLength :: m Int
|
||||
, branchHashesByPrefix :: ShortBranchHash -> m (Set Branch.Hash)
|
||||
|
||||
-- returns `Nothing` to not implemented, fallback to in-memory
|
||||
-- also `Nothing` if no LCA
|
||||
-- The result is undefined if the two hashes are not in the codebase.
|
||||
-- Use `Codebase.lca` which wraps this in a nice API.
|
||||
, lcaImpl :: Maybe (Branch.Hash -> Branch.Hash -> m (Maybe Branch.Hash))
|
||||
|
||||
-- `beforeImpl` returns `Nothing` if not implemented by the codebase
|
||||
-- `beforeImpl b1 b2` is undefined if `b2` not in the codebase
|
||||
--
|
||||
-- Use `Codebase.before` which wraps this in a nice API.
|
||||
, beforeImpl :: Maybe (Branch.Hash -> Branch.Hash -> m Bool)
|
||||
}
|
||||
import U.Util.Timing (time)
|
||||
import Unison.Codebase.Editor.Git (withStatus)
|
||||
import qualified Data.Set as Set
|
||||
import qualified Unison.Util.Relation as Rel
|
||||
import qualified Unison.Type as Type
|
||||
|
||||
lca :: Monad m => Codebase m v a -> Branch m -> Branch m -> m (Maybe (Branch m))
|
||||
lca code b1@(Branch.headHash -> h1) b2@(Branch.headHash -> h2) = case lcaImpl code of
|
||||
@ -138,35 +71,35 @@ lca code b1@(Branch.headHash -> h1) b2@(Branch.headHash -> h2) = case lcaImpl co
|
||||
Nothing -> pure Nothing -- no common ancestor
|
||||
else Branch.lca b1 b2
|
||||
|
||||
before :: Monad m => Codebase m v a -> Branch m -> Branch m -> m Bool
|
||||
before code b1 b2 = case beforeImpl code of
|
||||
Nothing -> Branch.before b1 b2
|
||||
Just before -> before' (branchExists code) before b1 b2
|
||||
-- before :: Monad m => Codebase m v a -> Branch m -> Branch m -> m Bool
|
||||
-- before code b1 b2 = case beforeImpl code of
|
||||
-- Nothing -> Branch.before b1 b2
|
||||
-- Just before -> before' (branchExists code) before b1 b2
|
||||
|
||||
before' :: Monad m => (Branch.Hash -> m Bool) -> (Branch.Hash -> Branch.Hash -> m Bool) -> Branch m -> Branch m -> m Bool
|
||||
before' branchExists before b1@(Branch.headHash -> h1) b2@(Branch.headHash -> h2) =
|
||||
ifM
|
||||
(branchExists h2)
|
||||
(ifM
|
||||
(branchExists h2)
|
||||
(before h1 h2)
|
||||
(pure False))
|
||||
(Branch.before b1 b2)
|
||||
-- before' :: Monad m => (Branch.Hash -> m Bool) -> (Branch.Hash -> Branch.Hash -> m Bool) -> Branch m -> Branch m -> m Bool
|
||||
-- before' branchExists before b1@(Branch.headHash -> h1) b2@(Branch.headHash -> h2) =
|
||||
-- ifM
|
||||
-- (branchExists h2)
|
||||
-- (ifM
|
||||
-- (branchExists h2)
|
||||
-- (before h1 h2)
|
||||
-- (pure False))
|
||||
-- (Branch.before b1 b2)
|
||||
|
||||
|
||||
data GetRootBranchError
|
||||
= NoRootBranch
|
||||
| CouldntParseRootBranch String
|
||||
| CouldntLoadRootBranch Branch.Hash
|
||||
deriving Show
|
||||
-- data GetRootBranchError
|
||||
-- = NoRootBranch
|
||||
-- | CouldntParseRootBranch String
|
||||
-- | CouldntLoadRootBranch Branch.Hash
|
||||
-- deriving Show
|
||||
|
||||
debug :: Bool
|
||||
debug = False
|
||||
|
||||
data SyncFileCodebaseResult = SyncOk | UnknownDestinationRootBranch Branch.Hash | NotFastForward
|
||||
-- data SyncFileCodebaseResult = SyncOk | UnknownDestinationRootBranch Branch.Hash | NotFastForward
|
||||
|
||||
getCodebaseDir :: MonadIO m => Maybe FilePath -> m FilePath
|
||||
getCodebaseDir = maybe getHomeDirectory pure
|
||||
-- getCodebaseDir :: MonadIO m => Maybe FilePath -> m FilePath
|
||||
-- getCodebaseDir = maybe getHomeDirectory pure
|
||||
|
||||
-- | Write all of UCM's dependencies (builtins types and an empty namespace) into the codebase
|
||||
installUcmDependencies :: forall m. Monad m => Codebase m Symbol Parser.Ann -> m ()
|
||||
@ -205,8 +138,8 @@ getTypeOfConstructor _ r cid =
|
||||
|
||||
lookupWatchCache :: (Monad m) => Codebase m v a -> Reference -> m (Maybe (Term v a))
|
||||
lookupWatchCache codebase (Reference.DerivedId h) = do
|
||||
m1 <- getWatch codebase UF.RegularWatch h
|
||||
maybe (getWatch codebase UF.TestWatch h) (pure . Just) m1
|
||||
m1 <- getWatch codebase WK.RegularWatch h
|
||||
maybe (getWatch codebase WK.TestWatch h) (pure . Just) m1
|
||||
lookupWatchCache _ Reference.Builtin{} = pure Nothing
|
||||
|
||||
typeLookupForDependencies
|
||||
@ -227,99 +160,35 @@ typeLookupForDependencies codebase s = do
|
||||
Nothing -> pure mempty
|
||||
go tl Reference.Builtin{} = pure tl -- codebase isn't consulted for builtins
|
||||
|
||||
-- todo: can this be implemented in terms of TransitiveClosure.transitiveClosure?
|
||||
-- todo: add some tests on this guy?
|
||||
transitiveDependencies
|
||||
:: (Monad m, Var v)
|
||||
=> CL.CodeLookup v m a
|
||||
-> Set Reference.Id
|
||||
-> Reference.Id
|
||||
-> m (Set Reference.Id)
|
||||
transitiveDependencies code seen0 rid = if Set.member rid seen0
|
||||
then pure seen0
|
||||
else
|
||||
let seen = Set.insert rid seen0
|
||||
getIds = Set.mapMaybe Reference.toId
|
||||
in CL.getTerm code rid >>= \case
|
||||
Just t ->
|
||||
foldM (transitiveDependencies code) seen (getIds $ Term.dependencies t)
|
||||
Nothing ->
|
||||
CL.getTypeDeclaration code rid >>= \case
|
||||
Nothing -> pure seen
|
||||
Just (Left ed) -> foldM (transitiveDependencies code)
|
||||
seen
|
||||
(getIds $ DD.dependencies (DD.toDataDecl ed))
|
||||
Just (Right dd) -> foldM (transitiveDependencies code)
|
||||
seen
|
||||
(getIds $ DD.dependencies dd)
|
||||
-- -- todo: can this be implemented in terms of TransitiveClosure.transitiveClosure?
|
||||
-- -- todo: add some tests on this guy?
|
||||
-- transitiveDependencies
|
||||
-- :: (Monad m, Var v)
|
||||
-- => CL.CodeLookup v m a
|
||||
-- -> Set Reference.Id
|
||||
-- -> Reference.Id
|
||||
-- -> m (Set Reference.Id)
|
||||
-- transitiveDependencies code seen0 rid = if Set.member rid seen0
|
||||
-- then pure seen0
|
||||
-- else
|
||||
-- let seen = Set.insert rid seen0
|
||||
-- getIds = Set.mapMaybe Reference.toId
|
||||
-- in CL.getTerm code rid >>= \case
|
||||
-- Just t ->
|
||||
-- foldM (transitiveDependencies code) seen (getIds $ Term.dependencies t)
|
||||
-- Nothing ->
|
||||
-- CL.getTypeDeclaration code rid >>= \case
|
||||
-- Nothing -> pure seen
|
||||
-- Just (Left ed) -> foldM (transitiveDependencies code)
|
||||
-- seen
|
||||
-- (getIds $ DD.dependencies (DD.toDataDecl ed))
|
||||
-- Just (Right dd) -> foldM (transitiveDependencies code)
|
||||
-- seen
|
||||
-- (getIds $ DD.dependencies dd)
|
||||
|
||||
toCodeLookup :: Codebase m v a -> CL.CodeLookup v m a
|
||||
toCodeLookup c = CL.CodeLookup (getTerm c) (getTypeDeclaration c)
|
||||
|
||||
-- Like the other `makeSelfContained`, but takes and returns a `UnisonFile`.
|
||||
-- Any watches in the input `UnisonFile` will be watches in the returned
|
||||
-- `UnisonFile`.
|
||||
makeSelfContained'
|
||||
:: forall m v a . (Monad m, Monoid a, Var v)
|
||||
=> CL.CodeLookup v m a
|
||||
-> UF.UnisonFile v a
|
||||
-> m (UF.UnisonFile v a)
|
||||
makeSelfContained' code uf = do
|
||||
let UF.UnisonFileId ds0 es0 bs0 ws0 = uf
|
||||
deps0 = getIds . Term.dependencies . snd <$> (UF.allWatches uf <> bs0)
|
||||
where getIds = Set.mapMaybe Reference.toId
|
||||
-- transitive dependencies (from codebase) of all terms (including watches) in the UF
|
||||
deps <- foldM (transitiveDependencies code) Set.empty (Set.unions deps0)
|
||||
-- load all decls from deps list
|
||||
decls <- fmap catMaybes
|
||||
. forM (toList deps)
|
||||
$ \rid -> fmap (rid, ) <$> CL.getTypeDeclaration code rid
|
||||
-- partition the decls into effects and data
|
||||
let es1 :: [(Reference.Id, DD.EffectDeclaration v a)]
|
||||
ds1 :: [(Reference.Id, DD.DataDeclaration v a)]
|
||||
(es1, ds1) = partitionEithers [ bimap (r,) (r,) d | (r, d) <- decls ]
|
||||
-- load all terms from deps list
|
||||
bs1 <- fmap catMaybes
|
||||
. forM (toList deps)
|
||||
$ \rid -> fmap (rid, ) <$> CL.getTerm code rid
|
||||
let
|
||||
allVars :: Set v
|
||||
allVars = Set.unions
|
||||
[ UF.allVars uf
|
||||
, Set.unions [ DD.allVars dd | (_, dd) <- ds1 ]
|
||||
, Set.unions [ DD.allVars (DD.toDataDecl ed) | (_, ed) <- es1 ]
|
||||
, Set.unions [ Term.allVars tm | (_, tm) <- bs1 ]
|
||||
]
|
||||
refVar :: Reference.Id -> State (Set v, Map Reference.Id v) v
|
||||
refVar r = do
|
||||
m <- snd <$> get
|
||||
case Map.lookup r m of
|
||||
Just v -> pure v
|
||||
Nothing -> do
|
||||
v <- ABT.freshenS' _1 (Var.refNamed (Reference.DerivedId r))
|
||||
_2 %= Map.insert r v
|
||||
pure v
|
||||
assignVars :: [(Reference.Id, b)] -> State (Set v, Map Reference.Id v) [(v, (Reference.Id, b))]
|
||||
assignVars = traverse (\e@(r, _) -> (,e) <$> refVar r)
|
||||
unref :: Term v a -> State (Set v, Map Reference.Id v) (Term v a)
|
||||
unref = ABT.visit go where
|
||||
go t@(Term.Ref' (Reference.DerivedId r)) =
|
||||
Just (Term.var (ABT.annotation t) <$> refVar r)
|
||||
go _ = Nothing
|
||||
unrefb = traverse (\(v, tm) -> (v,) <$> unref tm)
|
||||
pair :: forall f a b. Applicative f => f a -> f b -> f (a,b)
|
||||
pair = liftA2 (,)
|
||||
uf' = flip evalState (allVars, Map.empty) $ do
|
||||
datas' <- Map.union ds0 . Map.fromList <$> assignVars ds1
|
||||
effects' <- Map.union es0 . Map.fromList <$> assignVars es1
|
||||
-- bs0 is terms from the input file
|
||||
bs0' <- unrefb bs0
|
||||
ws0' <- traverse unrefb ws0
|
||||
-- bs1 is dependency terms
|
||||
bs1' <- traverse (\(r, tm) -> refVar r `pair` unref tm) bs1
|
||||
pure $ UF.UnisonFileId datas' effects' (bs1' ++ bs0') ws0'
|
||||
pure uf'
|
||||
|
||||
getTypeOfTerm :: (Applicative m, Var v, BuiltinAnnotation a) =>
|
||||
Codebase m v a -> Reference -> m (Maybe (Type v a))
|
||||
getTypeOfTerm _c r | debug && trace ("Codebase.getTypeOfTerm " ++ show r) False = undefined
|
||||
@ -335,7 +204,7 @@ getTypeOfReferent c (Referent.Ref r) = getTypeOfTerm c r
|
||||
getTypeOfReferent c (Referent.Con r cid _) =
|
||||
getTypeOfConstructor c r cid
|
||||
|
||||
-- The dependents of a builtin type is the set of builtin terms which
|
||||
-- | The dependents of a builtin type includes the set of builtin terms which
|
||||
-- mention that type.
|
||||
dependents :: Functor m => Codebase m v a -> Reference -> m (Set Reference)
|
||||
dependents c r
|
||||
@ -369,13 +238,13 @@ isType c r = case r of
|
||||
Reference.Builtin{} -> pure $ Builtin.isBuiltinType r
|
||||
Reference.DerivedId r -> isJust <$> getTypeDeclaration c r
|
||||
|
||||
class BuiltinAnnotation a where
|
||||
builtinAnnotation :: a
|
||||
-- class BuiltinAnnotation a where
|
||||
-- builtinAnnotation :: a
|
||||
|
||||
instance BuiltinAnnotation Parser.Ann where
|
||||
builtinAnnotation = Parser.Intrinsic
|
||||
-- instance BuiltinAnnotation Parser.Ann where
|
||||
-- builtinAnnotation = Parser.Intrinsic
|
||||
|
||||
-- * Git stuff
|
||||
-- -- * Git stuff
|
||||
|
||||
-- | Sync elements as needed from a remote codebase into the local one.
|
||||
-- If `sbh` is supplied, we try to load the specified branch hash;
|
||||
@ -394,7 +263,7 @@ importRemoteBranch codebase ns mode = runExceptT do
|
||||
lift $ syncFromDirectory codebase cacheDir mode branch
|
||||
ExceptT
|
||||
let h = Branch.headHash branch
|
||||
err = Left $ GitError.CouldntLoadSyncedBranch h
|
||||
err = Left . GitCodebaseError $ GitError.CouldntLoadSyncedBranch ns h
|
||||
in time "load fresh local branch after sync" $
|
||||
(getBranchForHash codebase h <&> maybe err Right) <* cleanup
|
||||
|
||||
@ -407,4 +276,4 @@ viewRemoteBranch ::
|
||||
m (Either GitError (m (), Branch m))
|
||||
viewRemoteBranch codebase ns = runExceptT do
|
||||
(cleanup, branch, _) <- ExceptT $ viewRemoteBranch' codebase ns
|
||||
pure (cleanup, branch)
|
||||
pure (cleanup, branch)
|
@ -7,108 +7,72 @@
|
||||
module Unison.Codebase.Branch
|
||||
( -- * Branch types
|
||||
Branch(..)
|
||||
, BranchDiff(..)
|
||||
, UnwrappedBranch
|
||||
, Branch0(..)
|
||||
, MergeMode(..)
|
||||
, Raw(..)
|
||||
, Star
|
||||
, Hash
|
||||
, EditHash
|
||||
, pattern Hash
|
||||
|
||||
-- * Branch construction
|
||||
, empty
|
||||
, empty0
|
||||
-- * Branch construction
|
||||
, branch0
|
||||
, one
|
||||
, cons
|
||||
, uncons
|
||||
, empty
|
||||
, empty0
|
||||
, discardHistory0
|
||||
, toCausalRaw
|
||||
, transform
|
||||
|
||||
-- * Branch history
|
||||
-- ** History queries
|
||||
-- * Branch tests
|
||||
, isEmpty
|
||||
, isEmpty0
|
||||
, isOne
|
||||
, before
|
||||
, lca
|
||||
-- * diff
|
||||
, diff0
|
||||
-- * properties
|
||||
, head
|
||||
, headHash
|
||||
, before
|
||||
, before'
|
||||
, findHistoricalHQs
|
||||
, findHistoricalRefs
|
||||
, findHistoricalRefs'
|
||||
, namesDiff
|
||||
-- ** History updates
|
||||
, step
|
||||
, stepEverywhere
|
||||
, uncons
|
||||
, merge
|
||||
, merge'
|
||||
, merge''
|
||||
|
||||
-- * Branch children
|
||||
-- ** Children lenses
|
||||
, children
|
||||
-- ** Children queries
|
||||
, deepEdits'
|
||||
, toList0
|
||||
-- * step
|
||||
, stepManyAt
|
||||
, stepManyAtM
|
||||
, stepManyAt0
|
||||
, stepEverywhere
|
||||
-- *
|
||||
, addTermName
|
||||
, addTypeName
|
||||
, deleteTermName
|
||||
, deleteTypeName
|
||||
, setChildBranch
|
||||
, replacePatch
|
||||
, deletePatch
|
||||
, getMaybePatch
|
||||
, getPatch
|
||||
, modifyPatches
|
||||
-- ** Children queries
|
||||
, getAt
|
||||
, getAt'
|
||||
, getAt0
|
||||
-- ** Children updates
|
||||
, setChildBranch
|
||||
, stepManyAt
|
||||
, stepManyAt0
|
||||
, stepManyAtM
|
||||
, modifyAtM
|
||||
, modifyAt
|
||||
|
||||
-- * Branch terms/types/edits
|
||||
-- ** Term/type/edits lenses
|
||||
, modifyAtM
|
||||
-- * Branch terms/types/edits
|
||||
-- ** Term/type/edits lenses
|
||||
, terms
|
||||
, types
|
||||
, edits
|
||||
-- ** Term/type queries
|
||||
, deepReferents
|
||||
, deepTypeReferences
|
||||
, toNames0
|
||||
-- ** Term/type updates
|
||||
, addTermName
|
||||
, addTypeName
|
||||
, deleteTermName
|
||||
, deleteTypeName
|
||||
|
||||
|
||||
-- * Branch patches
|
||||
-- ** Patch queries
|
||||
, deepEdits'
|
||||
, getPatch
|
||||
, getMaybePatch
|
||||
-- ** Patch updates
|
||||
, replacePatch
|
||||
, deletePatch
|
||||
, modifyPatches
|
||||
|
||||
-- * Branch serialization
|
||||
-- * Branch serialization
|
||||
, cachedRead
|
||||
, boundedCache
|
||||
, Cache
|
||||
, sync
|
||||
|
||||
-- * Unused
|
||||
, childrenR
|
||||
, debugPaths
|
||||
, editedPatchRemoved
|
||||
, editsR
|
||||
, findHistoricalSHs
|
||||
, fork
|
||||
, lca
|
||||
, move
|
||||
, numHashChars
|
||||
, printDebugPaths
|
||||
, removedPatchEdited
|
||||
, stepAt
|
||||
, stepAtM
|
||||
, termsR
|
||||
, typesR
|
||||
) where
|
||||
|
||||
import Unison.Prelude hiding (empty)
|
||||
@ -140,27 +104,16 @@ import Unison.Hashable ( Hashable )
|
||||
import qualified Unison.Hashable as H
|
||||
import Unison.Name ( Name(..) )
|
||||
import qualified Unison.Name as Name
|
||||
import qualified Unison.Names2 as Names
|
||||
import qualified Unison.Names3 as Names
|
||||
import Unison.Names2 ( Names'(Names), Names0 )
|
||||
import Unison.Reference ( Reference )
|
||||
import Unison.Referent ( Referent )
|
||||
import qualified Unison.Referent as Referent
|
||||
import qualified Unison.Reference as Reference
|
||||
|
||||
import qualified U.Util.Cache as Cache
|
||||
import qualified Unison.Util.Relation as R
|
||||
import Unison.Util.Relation ( Relation )
|
||||
import qualified Unison.Util.Relation4 as R4
|
||||
import qualified Unison.Util.List as List
|
||||
import Unison.Util.Map ( unionWithM )
|
||||
import qualified Unison.Util.Star3 as Star3
|
||||
import Unison.ShortHash (ShortHash)
|
||||
import qualified Unison.ShortHash as SH
|
||||
import qualified Unison.HashQualified as HQ
|
||||
import Unison.HashQualified (HashQualified)
|
||||
import qualified Unison.LabeledDependency as LD
|
||||
import Unison.LabeledDependency (LabeledDependency)
|
||||
import qualified Unison.Util.List as List
|
||||
|
||||
-- | A node in the Unison namespace hierarchy
|
||||
-- along with its history.
|
||||
@ -171,7 +124,6 @@ type UnwrappedBranch m = Causal m Raw (Branch0 m)
|
||||
type Hash = Causal.RawHash Raw
|
||||
type EditHash = Hash.Hash
|
||||
|
||||
-- Star3 r n Metadata.Type (Metadata.Type, Metadata.Value)
|
||||
type Star r n = Metadata.Star r n
|
||||
|
||||
-- | A node in the Unison namespace hierarchy.
|
||||
@ -233,70 +185,6 @@ data Raw = Raw
|
||||
|
||||
makeLenses ''Branch
|
||||
makeLensesFor [("_edits", "edits")] ''Branch0
|
||||
makeLenses ''Raw
|
||||
|
||||
toNames0 :: Branch0 m -> Names0
|
||||
toNames0 b = Names (R.swap . deepTerms $ b)
|
||||
(R.swap . deepTypes $ b)
|
||||
|
||||
-- This stops searching for a given ShortHash once it encounters
|
||||
-- any term or type in any Branch0 that satisfies that ShortHash.
|
||||
findHistoricalSHs
|
||||
:: Monad m => Set ShortHash -> Branch m -> m (Set ShortHash, Names0)
|
||||
findHistoricalSHs = findInHistory
|
||||
(\sh r _n -> sh `SH.isPrefixOf` Referent.toShortHash r)
|
||||
(\sh r _n -> sh `SH.isPrefixOf` Reference.toShortHash r)
|
||||
|
||||
-- This stops searching for a given HashQualified once it encounters
|
||||
-- any term or type in any Branch0 that satisfies that HashQualified.
|
||||
findHistoricalHQs :: Monad m
|
||||
=> Set (HashQualified Name)
|
||||
-> Branch m
|
||||
-> m (Set (HashQualified Name), Names0)
|
||||
findHistoricalHQs = findInHistory
|
||||
(\hq r n -> HQ.matchesNamedReferent n r hq)
|
||||
(\hq r n -> HQ.matchesNamedReference n r hq)
|
||||
|
||||
findHistoricalRefs :: Monad m => Set LabeledDependency -> Branch m
|
||||
-> m (Set LabeledDependency, Names0)
|
||||
findHistoricalRefs = findInHistory
|
||||
(\query r _n -> LD.fold (const False) (==r) query)
|
||||
(\query r _n -> LD.fold (==r) (const False) query)
|
||||
|
||||
findHistoricalRefs' :: Monad m => Set Reference -> Branch m
|
||||
-> m (Set Reference, Names0)
|
||||
findHistoricalRefs' = findInHistory
|
||||
(\queryRef r _n -> r == Referent.Ref queryRef)
|
||||
(\queryRef r _n -> r == queryRef)
|
||||
|
||||
findInHistory :: forall m q. (Monad m, Ord q)
|
||||
=> (q -> Referent -> Name -> Bool)
|
||||
-> (q -> Reference -> Name -> Bool)
|
||||
-> Set q -> Branch m -> m (Set q, Names0)
|
||||
findInHistory termMatches typeMatches queries b =
|
||||
(Causal.foldHistoryUntil f (queries, mempty) . _history) b <&> \case
|
||||
-- could do something more sophisticated here later to report that some SH
|
||||
-- couldn't be found anywhere in the history. but for now, I assume that
|
||||
-- the normal thing will happen when it doesn't show up in the namespace.
|
||||
Causal.Satisfied (_, names) -> (mempty, names)
|
||||
Causal.Unsatisfied (missing, names) -> (missing, names)
|
||||
where
|
||||
-- in order to not favor terms over types, we iterate through the ShortHashes,
|
||||
-- for each `remainingQueries`, if we find a matching Referent or Reference,
|
||||
-- we remove `q` from the accumulated `remainingQueries`, and add the Ref* to
|
||||
-- the accumulated `names0`.
|
||||
f acc@(remainingQueries, _) b0 = (acc', null remainingQueries')
|
||||
where
|
||||
acc'@(remainingQueries', _) = foldl' findQ acc remainingQueries
|
||||
findQ :: (Set q, Names0) -> q -> (Set q, Names0)
|
||||
findQ acc sh =
|
||||
foldl' (doType sh) (foldl' (doTerm sh) acc
|
||||
(R.toList $ deepTerms b0))
|
||||
(R.toList $ deepTypes b0)
|
||||
doTerm q acc@(remainingSHs, names0) (r, n) = if termMatches q r n
|
||||
then (Set.delete q remainingSHs, Names.addTerm n r names0) else acc
|
||||
doType q acc@(remainingSHs, names0) (r, n) = if typeMatches q r n
|
||||
then (Set.delete q remainingSHs, Names.addType n r names0) else acc
|
||||
|
||||
deepReferents :: Branch0 m -> Set Referent
|
||||
deepReferents = R.dom . deepTerms
|
||||
@ -361,6 +249,7 @@ head (Branch c) = Causal.head c
|
||||
headHash :: Branch m -> Hash
|
||||
headHash (Branch c) = Causal.currentHash c
|
||||
|
||||
-- | a version of `deepEdits` that returns the `m Patch` as well.
|
||||
deepEdits' :: Branch0 m -> Map Name (EditHash, m Patch)
|
||||
deepEdits' b = go id b where
|
||||
-- can change this to an actual prefix once Name is a [NameSegment]
|
||||
@ -372,127 +261,42 @@ deepEdits' b = go id b where
|
||||
f :: (NameSegment, Branch m) -> Map Name (EditHash, m Patch)
|
||||
f (c, b) = go (addPrefix . Name.joinDot (Name.fromSegment c)) (head b)
|
||||
|
||||
data MergeMode = RegularMerge | SquashMerge deriving (Eq,Ord,Show)
|
||||
|
||||
merge :: forall m . Monad m => Branch m -> Branch m -> m (Branch m)
|
||||
merge = merge' RegularMerge
|
||||
|
||||
-- Discards the history of a Branch0's children, recursively
|
||||
discardHistory0 :: Applicative m => Branch0 m -> Branch0 m
|
||||
discardHistory0 = over children (fmap tweak) where
|
||||
tweak b = cons (discardHistory0 (head b)) empty
|
||||
|
||||
merge' :: forall m . Monad m => MergeMode -> Branch m -> Branch m -> m (Branch m)
|
||||
merge' = merge'' lca
|
||||
|
||||
merge'' :: forall m . Monad m
|
||||
=> (Branch m -> Branch m -> m (Maybe (Branch m))) -- lca calculator
|
||||
-> MergeMode
|
||||
-> Branch m
|
||||
-> Branch m
|
||||
-> m (Branch m)
|
||||
merge'' _ _ b1 b2 | isEmpty b1 = pure b2
|
||||
merge'' _ mode b1 b2 | isEmpty b2 = case mode of
|
||||
RegularMerge -> pure b1
|
||||
SquashMerge -> pure $ cons (discardHistory0 (head b1)) b2
|
||||
merge'' lca mode (Branch x) (Branch y) =
|
||||
Branch <$> case mode of
|
||||
RegularMerge -> Causal.threeWayMerge' lca' combine x y
|
||||
SquashMerge -> Causal.squashMerge' lca' (pure . discardHistory0) combine x y
|
||||
where
|
||||
lca' c1 c2 = fmap _history <$> lca (Branch c1) (Branch c2)
|
||||
combine :: Maybe (Branch0 m) -> Branch0 m -> Branch0 m -> m (Branch0 m)
|
||||
combine Nothing l r = merge0 lca mode l r
|
||||
combine (Just ca) l r = do
|
||||
dl <- diff0 ca l
|
||||
dr <- diff0 ca r
|
||||
head0 <- apply ca (dl <> dr)
|
||||
children <- Map.mergeA
|
||||
(Map.traverseMaybeMissing $ combineMissing ca)
|
||||
(Map.traverseMaybeMissing $ combineMissing ca)
|
||||
(Map.zipWithAMatched $ const (merge'' lca mode))
|
||||
(_children l) (_children r)
|
||||
pure $ branch0 (_terms head0) (_types head0) children (_edits head0)
|
||||
|
||||
combineMissing ca k cur =
|
||||
case Map.lookup k (_children ca) of
|
||||
Nothing -> pure $ Just cur
|
||||
Just old -> do
|
||||
nw <- merge'' lca mode (cons empty0 old) cur
|
||||
if isEmpty0 $ head nw
|
||||
then pure Nothing
|
||||
else pure $ Just nw
|
||||
|
||||
apply :: Branch0 m -> BranchDiff -> m (Branch0 m)
|
||||
apply b0 BranchDiff {..} = do
|
||||
patches <- sequenceA
|
||||
$ Map.differenceWith patchMerge (pure @m <$> _edits b0) changedPatches
|
||||
let newPatches = makePatch <$> Map.difference changedPatches (_edits b0)
|
||||
makePatch Patch.PatchDiff {..} =
|
||||
let p = Patch.Patch _addedTermEdits _addedTypeEdits
|
||||
in (H.accumulate' p, pure p)
|
||||
pure $ branch0 (Star3.difference (_terms b0) removedTerms <> addedTerms)
|
||||
(Star3.difference (_types b0) removedTypes <> addedTypes)
|
||||
(_children b0)
|
||||
(patches <> newPatches)
|
||||
patchMerge mhp Patch.PatchDiff {..} = Just $ do
|
||||
(_, mp) <- mhp
|
||||
p <- mp
|
||||
let np = Patch.Patch
|
||||
{ _termEdits = R.difference (Patch._termEdits p) _removedTermEdits
|
||||
<> _addedTermEdits
|
||||
, _typeEdits = R.difference (Patch._typeEdits p) _removedTypeEdits
|
||||
<> _addedTypeEdits
|
||||
}
|
||||
pure (H.accumulate' np, pure np)
|
||||
|
||||
-- `before' lca b1 b2` is true if `b2` incorporates all of `b1`
|
||||
-- It's defined as: lca b1 b2 == Just b1
|
||||
before' :: Monad m => (Branch m -> Branch m -> m (Maybe (Branch m)))
|
||||
-> Branch m -> Branch m -> m Bool
|
||||
before' lca (Branch x) (Branch y) = Causal.before' lca' x y
|
||||
where
|
||||
lca' c1 c2 = fmap _history <$> lca (Branch c1) (Branch c2)
|
||||
-- -- `before' lca b1 b2` is true if `b2` incorporates all of `b1`
|
||||
-- -- It's defined as: lca b1 b2 == Just b1
|
||||
-- before' :: Monad m => (Branch m -> Branch m -> m (Maybe (Branch m)))
|
||||
-- -> Branch m -> Branch m -> m Bool
|
||||
-- before' lca (Branch x) (Branch y) = Causal.before' lca' x y
|
||||
-- where
|
||||
-- lca' c1 c2 = fmap _history <$> lca (Branch c1) (Branch c2)
|
||||
|
||||
-- `before b1 b2` is true if `b2` incorporates all of `b1`
|
||||
before :: Monad m => Branch m -> Branch m -> m Bool
|
||||
before (Branch b1) (Branch b2) = Causal.before b1 b2
|
||||
|
||||
merge0 :: forall m. Monad m => (Branch m -> Branch m -> m (Maybe (Branch m)))
|
||||
-> MergeMode -> Branch0 m -> Branch0 m -> m (Branch0 m)
|
||||
merge0 lca mode b1 b2 = do
|
||||
c3 <- unionWithM (merge'' lca mode) (_children b1) (_children b2)
|
||||
e3 <- unionWithM g (_edits b1) (_edits b2)
|
||||
pure $ branch0 (_terms b1 <> _terms b2)
|
||||
(_types b1 <> _types b2)
|
||||
c3
|
||||
e3
|
||||
where
|
||||
g :: (EditHash, m Patch) -> (EditHash, m Patch) -> m (EditHash, m Patch)
|
||||
g (h1, m1) (h2, _) | h1 == h2 = pure (h1, m1)
|
||||
g (_, m1) (_, m2) = do
|
||||
e1 <- m1
|
||||
e2 <- m2
|
||||
let e3 = e1 <> e2
|
||||
pure (H.accumulate' e3, pure e3)
|
||||
|
||||
pattern Hash h = Causal.RawHash h
|
||||
|
||||
-- | what does this do? —AI
|
||||
toList0 :: Branch0 m -> [(Path, Branch0 m)]
|
||||
toList0 = go Path.empty where
|
||||
go p b = (p, b) : (Map.toList (_children b) >>= (\(seg, cb) ->
|
||||
go (Path.snoc p seg) (head cb) ))
|
||||
|
||||
printDebugPaths :: Branch m -> String
|
||||
printDebugPaths = unlines . map show . Set.toList . debugPaths
|
||||
-- printDebugPaths :: Branch m -> String
|
||||
-- printDebugPaths = unlines . map show . Set.toList . debugPaths
|
||||
|
||||
debugPaths :: Branch m -> Set (Path, Hash)
|
||||
debugPaths = go Path.empty where
|
||||
go p b = Set.insert (p, headHash b) . Set.unions $
|
||||
[ go (Path.snoc p seg) b | (seg, b) <- Map.toList $ _children (head b) ]
|
||||
-- debugPaths :: Branch m -> Set (Path, Hash)
|
||||
-- debugPaths = go Path.empty where
|
||||
-- go p b = Set.insert (p, headHash b) . Set.unions $
|
||||
-- [ go (Path.snoc p seg) b | (seg, b) <- Map.toList $ _children (head b) ]
|
||||
|
||||
data Target = TargetType | TargetTerm | TargetBranch
|
||||
deriving (Eq, Ord, Show)
|
||||
-- data Target = TargetType | TargetTerm | TargetBranch
|
||||
-- deriving (Eq, Ord, Show)
|
||||
|
||||
instance Eq (Branch0 m) where
|
||||
a == b = view terms a == view terms b
|
||||
@ -500,21 +304,21 @@ instance Eq (Branch0 m) where
|
||||
&& view children a == view children b
|
||||
&& (fmap fst . view edits) a == (fmap fst . view edits) b
|
||||
|
||||
data ForkFailure = SrcNotFound | DestExists
|
||||
-- data ForkFailure = SrcNotFound | DestExists
|
||||
|
||||
-- consider delegating to Names.numHashChars when ready to implement?
|
||||
-- are those enough?
|
||||
-- could move this to a read-only field in Branch0
|
||||
-- could move a Names0 to a read-only field in Branch0 until it gets too big
|
||||
numHashChars :: Branch m -> Int
|
||||
numHashChars _b = 3
|
||||
-- -- consider delegating to Names.numHashChars when ready to implement?
|
||||
-- -- are those enough?
|
||||
-- -- could move this to a read-only field in Branch0
|
||||
-- -- could move a Names0 to a read-only field in Branch0 until it gets too big
|
||||
-- numHashChars :: Branch m -> Int
|
||||
-- numHashChars _b = 3
|
||||
|
||||
-- This type is a little ugly, so we wrap it up with a nice type alias for
|
||||
-- use outside this module.
|
||||
type Cache m = Cache.Cache (Causal.RawHash Raw) (UnwrappedBranch m)
|
||||
|
||||
boundedCache :: MonadIO m => Word -> m (Cache m2)
|
||||
boundedCache = Cache.semispaceCache
|
||||
-- boundedCache :: MonadIO m => Word -> m (Cache m2)
|
||||
-- boundedCache = Cache.semispaceCache
|
||||
|
||||
-- Can use `Cache.nullCache` to disable caching if needed
|
||||
cachedRead :: forall m . MonadIO m
|
||||
@ -596,51 +400,51 @@ toCausalRaw = \case
|
||||
Branch (Causal.Cons _h e (ht, _m)) -> RawCons (toRaw e) ht
|
||||
Branch (Causal.Merge _h e tls) -> RawMerge (toRaw e) (Map.keysSet tls)
|
||||
|
||||
-- copy a path to another path
|
||||
fork
|
||||
:: Applicative m
|
||||
=> Path
|
||||
-> Path
|
||||
-> Branch m
|
||||
-> Either ForkFailure (Branch m)
|
||||
fork src dest root = case getAt src root of
|
||||
Nothing -> Left SrcNotFound
|
||||
Just src' -> case setIfNotExists dest src' root of
|
||||
Nothing -> Left DestExists
|
||||
Just root' -> Right root'
|
||||
-- -- copy a path to another path
|
||||
-- fork
|
||||
-- :: Applicative m
|
||||
-- => Path
|
||||
-- -> Path
|
||||
-- -> Branch m
|
||||
-- -> Either ForkFailure (Branch m)
|
||||
-- fork src dest root = case getAt src root of
|
||||
-- Nothing -> Left SrcNotFound
|
||||
-- Just src' -> case setIfNotExists dest src' root of
|
||||
-- Nothing -> Left DestExists
|
||||
-- Just root' -> Right root'
|
||||
|
||||
-- Move the node at src to dest.
|
||||
-- It's okay if `dest` is inside `src`, just create empty levels.
|
||||
-- Try not to `step` more than once at each node.
|
||||
move :: Applicative m
|
||||
=> Path
|
||||
-> Path
|
||||
-> Branch m
|
||||
-> Either ForkFailure (Branch m)
|
||||
move src dest root = case getAt src root of
|
||||
Nothing -> Left SrcNotFound
|
||||
Just src' ->
|
||||
-- make sure dest doesn't already exist
|
||||
case getAt dest root of
|
||||
Just _destExists -> Left DestExists
|
||||
Nothing ->
|
||||
-- find and update common ancestor of `src` and `dest`:
|
||||
Right $ modifyAt ancestor go root
|
||||
where
|
||||
(ancestor, relSrc, relDest) = Path.relativeToAncestor src dest
|
||||
go = deleteAt relSrc . setAt relDest src'
|
||||
-- -- Move the node at src to dest.
|
||||
-- -- It's okay if `dest` is inside `src`, just create empty levels.
|
||||
-- -- Try not to `step` more than once at each node.
|
||||
-- move :: Applicative m
|
||||
-- => Path
|
||||
-- -> Path
|
||||
-- -> Branch m
|
||||
-- -> Either ForkFailure (Branch m)
|
||||
-- move src dest root = case getAt src root of
|
||||
-- Nothing -> Left SrcNotFound
|
||||
-- Just src' ->
|
||||
-- -- make sure dest doesn't already exist
|
||||
-- case getAt dest root of
|
||||
-- Just _destExists -> Left DestExists
|
||||
-- Nothing ->
|
||||
-- -- find and update common ancestor of `src` and `dest`:
|
||||
-- Right $ modifyAt ancestor go root
|
||||
-- where
|
||||
-- (ancestor, relSrc, relDest) = Path.relativeToAncestor src dest
|
||||
-- go = deleteAt relSrc . setAt relDest src'
|
||||
|
||||
setIfNotExists
|
||||
:: Applicative m => Path -> Branch m -> Branch m -> Maybe (Branch m)
|
||||
setIfNotExists dest b root = case getAt dest root of
|
||||
Just _destExists -> Nothing
|
||||
Nothing -> Just $ setAt dest b root
|
||||
-- setIfNotExists
|
||||
-- :: Applicative m => Path -> Branch m -> Branch m -> Maybe (Branch m)
|
||||
-- setIfNotExists dest b root = case getAt dest root of
|
||||
-- Just _destExists -> Nothing
|
||||
-- Nothing -> Just $ setAt dest b root
|
||||
|
||||
setAt :: Applicative m => Path -> Branch m -> Branch m -> Branch m
|
||||
setAt path b = modifyAt path (const b)
|
||||
-- setAt :: Applicative m => Path -> Branch m -> Branch m -> Branch m
|
||||
-- setAt path b = modifyAt path (const b)
|
||||
|
||||
deleteAt :: Applicative m => Path -> Branch m -> Branch m
|
||||
deleteAt path = setAt path empty
|
||||
-- deleteAt :: Applicative m => Path -> Branch m -> Branch m
|
||||
-- deleteAt path = setAt path empty
|
||||
|
||||
-- returns `Nothing` if no Branch at `path` or if Branch is empty at `path`
|
||||
getAt :: Path
|
||||
@ -874,8 +678,8 @@ deleteTypeName r n b | Star3.memberD1 (r,n) (view types b)
|
||||
= over types (Star3.deletePrimaryD1 (r,n)) b
|
||||
deleteTypeName _ _ b = b
|
||||
|
||||
namesDiff :: Branch m -> Branch m -> Names.Diff
|
||||
namesDiff b1 b2 = Names.diff0 (toNames0 (head b1)) (toNames0 (head b2))
|
||||
-- namesDiff :: Branch m -> Branch m -> Names.Diff
|
||||
-- namesDiff b1 b2 = Names.diff0 (toNames0 (head b1)) (toNames0 (head b2))
|
||||
|
||||
lca :: Monad m => Branch m -> Branch m -> m (Maybe (Branch m))
|
||||
lca (Branch a) (Branch b) = fmap Branch <$> Causal.lca a b
|
||||
@ -912,29 +716,29 @@ transform f b = case _history b of
|
||||
-> Causal m Raw (Branch0 n)
|
||||
transformB0s f = Causal.unsafeMapHashPreserving (transformB0 f)
|
||||
|
||||
data BranchAttentions = BranchAttentions
|
||||
{ -- Patches that were edited on the right but entirely removed on the left.
|
||||
removedPatchEdited :: [Name]
|
||||
-- Patches that were edited on the left but entirely removed on the right.
|
||||
, editedPatchRemoved :: [Name]
|
||||
}
|
||||
-- data BranchAttentions = BranchAttentions
|
||||
-- { -- Patches that were edited on the right but entirely removed on the left.
|
||||
-- removedPatchEdited :: [Name]
|
||||
-- -- Patches that were edited on the left but entirely removed on the right.
|
||||
-- , editedPatchRemoved :: [Name]
|
||||
-- }
|
||||
|
||||
instance Semigroup BranchAttentions where
|
||||
BranchAttentions edited1 removed1 <> BranchAttentions edited2 removed2
|
||||
= BranchAttentions (edited1 <> edited2) (removed1 <> removed2)
|
||||
-- instance Semigroup BranchAttentions where
|
||||
-- BranchAttentions edited1 removed1 <> BranchAttentions edited2 removed2
|
||||
-- = BranchAttentions (edited1 <> edited2) (removed1 <> removed2)
|
||||
|
||||
instance Monoid BranchAttentions where
|
||||
mempty = BranchAttentions [] []
|
||||
mappend = (<>)
|
||||
-- instance Monoid BranchAttentions where
|
||||
-- mempty = BranchAttentions [] []
|
||||
-- mappend = (<>)
|
||||
|
||||
data RefCollisions =
|
||||
RefCollisions { termCollisions :: Relation Name Name
|
||||
, typeCollisions :: Relation Name Name
|
||||
} deriving (Eq, Show)
|
||||
-- data RefCollisions =
|
||||
-- RefCollisions { termCollisions :: Relation Name Name
|
||||
-- , typeCollisions :: Relation Name Name
|
||||
-- } deriving (Eq, Show)
|
||||
|
||||
instance Semigroup RefCollisions where
|
||||
(<>) = mappend
|
||||
instance Monoid RefCollisions where
|
||||
mempty = RefCollisions mempty mempty
|
||||
mappend r1 r2 = RefCollisions (termCollisions r1 <> termCollisions r2)
|
||||
(typeCollisions r1 <> typeCollisions r2)
|
||||
-- instance Semigroup RefCollisions where
|
||||
-- (<>) = mappend
|
||||
-- instance Monoid RefCollisions where
|
||||
-- mempty = RefCollisions mempty mempty
|
||||
-- mappend r1 r2 = RefCollisions (termCollisions r1 <> termCollisions r2)
|
||||
-- (typeCollisions r1 <> typeCollisions r2)
|
||||
|
756
parser-typechecker/src/Unison/Codebase/Branch/Merge.hs
Normal file
756
parser-typechecker/src/Unison/Codebase/Branch/Merge.hs
Normal file
@ -0,0 +1,756 @@
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
{-# LANGUAGE RecordWildCards #-}
|
||||
{-# LANGUAGE PatternSynonyms #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
{-# LANGUAGE RankNTypes #-}
|
||||
|
||||
module Unison.Codebase.Branch.Merge
|
||||
( MergeMode(..)
|
||||
, merge''
|
||||
) where
|
||||
|
||||
import Unison.Prelude hiding (empty)
|
||||
import Unison.Codebase.Branch
|
||||
|
||||
import Prelude hiding (head,read,subtract)
|
||||
|
||||
import Control.Lens hiding ( children, cons, transform, uncons )
|
||||
import qualified Control.Monad.State as State
|
||||
import Control.Monad.State ( StateT )
|
||||
import Data.Bifunctor ( second )
|
||||
import qualified Data.Map as Map
|
||||
import qualified Data.Map.Merge.Lazy as Map
|
||||
import qualified Data.Set as Set
|
||||
import qualified Unison.Codebase.Patch as Patch
|
||||
import Unison.Codebase.Patch ( Patch )
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import Unison.Codebase.Causal ( Causal
|
||||
, pattern RawOne
|
||||
, pattern RawCons
|
||||
, pattern RawMerge
|
||||
)
|
||||
import Unison.Codebase.Path ( Path(..) )
|
||||
import qualified Unison.Codebase.Path as Path
|
||||
import Unison.NameSegment ( NameSegment )
|
||||
import qualified Unison.NameSegment as NameSegment
|
||||
import qualified Unison.Codebase.Metadata as Metadata
|
||||
import qualified Unison.Hash as Hash
|
||||
import Unison.Hashable ( Hashable )
|
||||
import qualified Unison.Hashable as H
|
||||
import Unison.Name ( Name(..) )
|
||||
import qualified Unison.Name as Name
|
||||
import Unison.Reference ( Reference )
|
||||
import Unison.Referent ( Referent )
|
||||
|
||||
import qualified U.Util.Cache as Cache
|
||||
import qualified Unison.Util.Relation as R
|
||||
import Unison.Util.Relation ( Relation )
|
||||
import qualified Unison.Util.Relation4 as R4
|
||||
import Unison.Util.Map ( unionWithM )
|
||||
import qualified Unison.Util.Star3 as Star3
|
||||
|
||||
-- -- | A node in the Unison namespace hierarchy
|
||||
-- -- along with its history.
|
||||
-- newtype Branch m = Branch { _history :: UnwrappedBranch m }
|
||||
-- deriving (Eq, Ord)
|
||||
-- type UnwrappedBranch m = Causal m Raw (Branch0 m)
|
||||
|
||||
-- type Hash = Causal.RawHash Raw
|
||||
-- type EditHash = Hash.Hash
|
||||
|
||||
-- type Star r n = Metadata.Star r n
|
||||
|
||||
-- -- | A node in the Unison namespace hierarchy.
|
||||
-- --
|
||||
-- -- '_terms' and '_types' are the declarations at this level.
|
||||
-- -- '_children' are the nodes one level below us.
|
||||
-- -- '_edits' are the 'Patch's stored at this node in the code.
|
||||
-- --
|
||||
-- -- The @deep*@ fields are derived from the four above.
|
||||
-- data Branch0 m = Branch0
|
||||
-- { _terms :: Star Referent NameSegment
|
||||
-- , _types :: Star Reference NameSegment
|
||||
-- , _children :: Map NameSegment (Branch m)
|
||||
-- -- ^ Note the 'Branch' here, not 'Branch0'.
|
||||
-- -- Every level in the tree has a history.
|
||||
-- , _edits :: Map NameSegment (EditHash, m Patch)
|
||||
-- -- names and metadata for this branch and its children
|
||||
-- -- (ref, (name, value)) iff ref has metadata `value` at name `name`
|
||||
-- , deepTerms :: Relation Referent Name
|
||||
-- , deepTypes :: Relation Reference Name
|
||||
-- , deepTermMetadata :: Metadata.R4 Referent Name
|
||||
-- , deepTypeMetadata :: Metadata.R4 Reference Name
|
||||
-- , deepPaths :: Set Path
|
||||
-- , deepEdits :: Map Name EditHash
|
||||
-- }
|
||||
|
||||
-- -- Represents a shallow diff of a Branch0.
|
||||
-- -- Each of these `Star`s contain metadata as well, so an entry in
|
||||
-- -- `added` or `removed` could be an update to the metadata.
|
||||
-- data BranchDiff = BranchDiff
|
||||
-- { addedTerms :: Star Referent NameSegment
|
||||
-- , removedTerms :: Star Referent NameSegment
|
||||
-- , addedTypes :: Star Reference NameSegment
|
||||
-- , removedTypes :: Star Reference NameSegment
|
||||
-- , changedPatches :: Map NameSegment Patch.PatchDiff
|
||||
-- } deriving (Eq, Ord, Show)
|
||||
|
||||
-- instance Semigroup BranchDiff where
|
||||
-- left <> right = BranchDiff
|
||||
-- { addedTerms = addedTerms left <> addedTerms right
|
||||
-- , removedTerms = removedTerms left <> removedTerms right
|
||||
-- , addedTypes = addedTypes left <> addedTypes right
|
||||
-- , removedTypes = removedTypes left <> removedTypes right
|
||||
-- , changedPatches =
|
||||
-- Map.unionWith (<>) (changedPatches left) (changedPatches right)
|
||||
-- }
|
||||
|
||||
-- instance Monoid BranchDiff where
|
||||
-- mappend = (<>)
|
||||
-- mempty = BranchDiff mempty mempty mempty mempty mempty
|
||||
|
||||
-- -- The raw Branch
|
||||
-- data Raw = Raw
|
||||
-- { _termsR :: Star Referent NameSegment
|
||||
-- , _typesR :: Star Reference NameSegment
|
||||
-- , _childrenR :: Map NameSegment Hash
|
||||
-- , _editsR :: Map NameSegment EditHash
|
||||
-- }
|
||||
|
||||
-- makeLenses ''Branch
|
||||
-- makeLensesFor [("_edits", "edits")] ''Branch0
|
||||
|
||||
-- terms :: Lens' (Branch0 m) (Star Referent NameSegment)
|
||||
-- terms = lens _terms (\Branch0{..} x -> branch0 x _types _children _edits)
|
||||
|
||||
-- types :: Lens' (Branch0 m) (Star Reference NameSegment)
|
||||
-- types = lens _types (\Branch0{..} x -> branch0 _terms x _children _edits)
|
||||
|
||||
-- children :: Lens' (Branch0 m) (Map NameSegment (Branch m))
|
||||
-- children = lens _children (\Branch0{..} x -> branch0 _terms _types x _edits)
|
||||
|
||||
-- -- creates a Branch0 from the primary fields and derives the others.
|
||||
-- branch0 :: Metadata.Star Referent NameSegment
|
||||
-- -> Metadata.Star Reference NameSegment
|
||||
-- -> Map NameSegment (Branch m)
|
||||
-- -> Map NameSegment (EditHash, m Patch)
|
||||
-- -> Branch0 m
|
||||
-- branch0 terms types children edits =
|
||||
-- Branch0 terms types children edits
|
||||
-- deepTerms' deepTypes'
|
||||
-- deepTermMetadata' deepTypeMetadata'
|
||||
-- deepPaths' deepEdits'
|
||||
-- where
|
||||
-- nameSegToName = Name.unsafeFromText . NameSegment.toText
|
||||
-- deepTerms' = (R.mapRan nameSegToName . Star3.d1) terms
|
||||
-- <> foldMap go (Map.toList children)
|
||||
-- where
|
||||
-- go (nameSegToName -> n, b) =
|
||||
-- R.mapRan (Name.joinDot n) (deepTerms $ head b) -- could use mapKeysMonotonic
|
||||
-- deepTypes' = (R.mapRan nameSegToName . Star3.d1) types
|
||||
-- <> foldMap go (Map.toList children)
|
||||
-- where
|
||||
-- go (nameSegToName -> n, b) =
|
||||
-- R.mapRan (Name.joinDot n) (deepTypes $ head b) -- could use mapKeysMonotonic
|
||||
-- deepTermMetadata' = R4.mapD2 nameSegToName (Metadata.starToR4 terms)
|
||||
-- <> foldMap go (Map.toList children)
|
||||
-- where
|
||||
-- go (nameSegToName -> n, b) =
|
||||
-- R4.mapD2 (Name.joinDot n) (deepTermMetadata $ head b)
|
||||
-- deepTypeMetadata' = R4.mapD2 nameSegToName (Metadata.starToR4 types)
|
||||
-- <> foldMap go (Map.toList children)
|
||||
-- where
|
||||
-- go (nameSegToName -> n, b) =
|
||||
-- R4.mapD2 (Name.joinDot n) (deepTypeMetadata $ head b)
|
||||
-- deepPaths' = Set.map Path.singleton (Map.keysSet children)
|
||||
-- <> foldMap go (Map.toList children)
|
||||
-- where go (nameSeg, b) = Set.map (Path.cons nameSeg) (deepPaths $ head b)
|
||||
-- deepEdits' = Map.mapKeys nameSegToName (Map.map fst edits)
|
||||
-- <> foldMap go (Map.toList children)
|
||||
-- where
|
||||
-- go (nameSeg, b) =
|
||||
-- Map.mapKeys (nameSegToName nameSeg `Name.joinDot`) . deepEdits $ head b
|
||||
|
||||
-- head :: Branch m -> Branch0 m
|
||||
-- head (Branch c) = Causal.head c
|
||||
|
||||
-- headHash :: Branch m -> Hash
|
||||
-- headHash (Branch c) = Causal.currentHash c
|
||||
|
||||
-- -- deepEdits' :: Branch0 m -> Map Name (EditHash, m Patch)
|
||||
-- -- deepEdits' b = go id b where
|
||||
-- -- -- can change this to an actual prefix once Name is a [NameSegment]
|
||||
-- -- go :: (Name -> Name) -> Branch0 m -> Map Name (EditHash, m Patch)
|
||||
-- -- go addPrefix Branch0{..} =
|
||||
-- -- Map.mapKeysMonotonic (addPrefix . Name.fromSegment) _edits
|
||||
-- -- <> foldMap f (Map.toList _children)
|
||||
-- -- where
|
||||
-- -- f :: (NameSegment, Branch m) -> Map Name (EditHash, m Patch)
|
||||
-- -- f (c, b) = go (addPrefix . Name.joinDot (Name.fromSegment c)) (head b)
|
||||
|
||||
data MergeMode = RegularMerge | SquashMerge deriving (Eq,Ord,Show)
|
||||
|
||||
merge :: forall m . Monad m => Branch m -> Branch m -> m (Branch m)
|
||||
merge = merge' RegularMerge
|
||||
|
||||
-- -- Discards the history of a Branch0's children, recursively
|
||||
-- discardHistory0 :: Applicative m => Branch0 m -> Branch0 m
|
||||
-- discardHistory0 = over children (fmap tweak) where
|
||||
-- tweak b = cons (discardHistory0 (head b)) empty
|
||||
|
||||
merge' :: forall m . Monad m => MergeMode -> Branch m -> Branch m -> m (Branch m)
|
||||
merge' = merge'' lca
|
||||
|
||||
merge'' :: forall m . Monad m
|
||||
=> (Branch m -> Branch m -> m (Maybe (Branch m))) -- lca calculator
|
||||
-> MergeMode
|
||||
-> Branch m
|
||||
-> Branch m
|
||||
-> m (Branch m)
|
||||
merge'' _ _ b1 b2 | isEmpty b1 = pure b2
|
||||
merge'' _ mode b1 b2 | isEmpty b2 = case mode of
|
||||
RegularMerge -> pure b1
|
||||
SquashMerge -> pure $ cons (discardHistory0 (head b1)) b2
|
||||
merge'' lca mode (Branch x) (Branch y) =
|
||||
Branch <$> case mode of
|
||||
RegularMerge -> Causal.threeWayMerge' lca' combine x y
|
||||
SquashMerge -> Causal.squashMerge' lca' (pure . discardHistory0) combine x y
|
||||
where
|
||||
lca' c1 c2 = fmap _history <$> lca (Branch c1) (Branch c2)
|
||||
combine :: Maybe (Branch0 m) -> Branch0 m -> Branch0 m -> m (Branch0 m)
|
||||
combine Nothing l r = merge0 lca mode l r
|
||||
combine (Just ca) l r = do
|
||||
dl <- diff0 ca l
|
||||
dr <- diff0 ca r
|
||||
head0 <- apply ca (dl <> dr)
|
||||
children <- Map.mergeA
|
||||
(Map.traverseMaybeMissing $ combineMissing ca)
|
||||
(Map.traverseMaybeMissing $ combineMissing ca)
|
||||
(Map.zipWithAMatched $ const (merge'' lca mode))
|
||||
(_children l) (_children r)
|
||||
pure $ branch0 (_terms head0) (_types head0) children (_edits head0)
|
||||
|
||||
combineMissing ca k cur =
|
||||
case Map.lookup k (_children ca) of
|
||||
Nothing -> pure $ Just cur
|
||||
Just old -> do
|
||||
nw <- merge'' lca mode (cons empty0 old) cur
|
||||
if isEmpty0 $ head nw
|
||||
then pure Nothing
|
||||
else pure $ Just nw
|
||||
|
||||
apply :: Branch0 m -> BranchDiff -> m (Branch0 m)
|
||||
apply b0 BranchDiff {..} = do
|
||||
patches <- sequenceA
|
||||
$ Map.differenceWith patchMerge (pure @m <$> _edits b0) changedPatches
|
||||
let newPatches = makePatch <$> Map.difference changedPatches (_edits b0)
|
||||
makePatch Patch.PatchDiff {..} =
|
||||
let p = Patch.Patch _addedTermEdits _addedTypeEdits
|
||||
in (H.accumulate' p, pure p)
|
||||
pure $ branch0 (Star3.difference (_terms b0) removedTerms <> addedTerms)
|
||||
(Star3.difference (_types b0) removedTypes <> addedTypes)
|
||||
(_children b0)
|
||||
(patches <> newPatches)
|
||||
patchMerge mhp Patch.PatchDiff {..} = Just $ do
|
||||
(_, mp) <- mhp
|
||||
p <- mp
|
||||
let np = Patch.Patch
|
||||
{ _termEdits = R.difference (Patch._termEdits p) _removedTermEdits
|
||||
<> _addedTermEdits
|
||||
, _typeEdits = R.difference (Patch._typeEdits p) _removedTypeEdits
|
||||
<> _addedTypeEdits
|
||||
}
|
||||
pure (H.accumulate' np, pure np)
|
||||
|
||||
-- -- `before' lca b1 b2` is true if `b2` incorporates all of `b1`
|
||||
-- -- It's defined as: lca b1 b2 == Just b1
|
||||
-- before' :: Monad m => (Branch m -> Branch m -> m (Maybe (Branch m)))
|
||||
-- -> Branch m -> Branch m -> m Bool
|
||||
-- before' lca (Branch x) (Branch y) = Causal.before' lca' x y
|
||||
-- where
|
||||
-- lca' c1 c2 = fmap _history <$> lca (Branch c1) (Branch c2)
|
||||
|
||||
-- `before b1 b2` is true if `b2` incorporates all of `b1`
|
||||
before :: Monad m => Branch m -> Branch m -> m Bool
|
||||
before (Branch b1) (Branch b2) = Causal.before b1 b2
|
||||
|
||||
merge0 :: forall m. Monad m => (Branch m -> Branch m -> m (Maybe (Branch m)))
|
||||
-> MergeMode -> Branch0 m -> Branch0 m -> m (Branch0 m)
|
||||
merge0 lca mode b1 b2 = do
|
||||
c3 <- unionWithM (merge'' lca mode) (_children b1) (_children b2)
|
||||
e3 <- unionWithM g (_edits b1) (_edits b2)
|
||||
pure $ branch0 (_terms b1 <> _terms b2)
|
||||
(_types b1 <> _types b2)
|
||||
c3
|
||||
e3
|
||||
where
|
||||
g :: (EditHash, m Patch) -> (EditHash, m Patch) -> m (EditHash, m Patch)
|
||||
g (h1, m1) (h2, _) | h1 == h2 = pure (h1, m1)
|
||||
g (_, m1) (_, m2) = do
|
||||
e1 <- m1
|
||||
e2 <- m2
|
||||
let e3 = e1 <> e2
|
||||
pure (H.accumulate' e3, pure e3)
|
||||
|
||||
-- pattern Hash h = Causal.RawHash h
|
||||
|
||||
-- -- toList0 :: Branch0 m -> [(Path, Branch0 m)]
|
||||
-- -- toList0 = go Path.empty where
|
||||
-- -- go p b = (p, b) : (Map.toList (_children b) >>= (\(seg, cb) ->
|
||||
-- -- go (Path.snoc p seg) (head cb) ))
|
||||
|
||||
-- -- printDebugPaths :: Branch m -> String
|
||||
-- -- printDebugPaths = unlines . map show . Set.toList . debugPaths
|
||||
|
||||
-- -- debugPaths :: Branch m -> Set (Path, Hash)
|
||||
-- -- debugPaths = go Path.empty where
|
||||
-- -- go p b = Set.insert (p, headHash b) . Set.unions $
|
||||
-- -- [ go (Path.snoc p seg) b | (seg, b) <- Map.toList $ _children (head b) ]
|
||||
|
||||
-- -- data Target = TargetType | TargetTerm | TargetBranch
|
||||
-- -- deriving (Eq, Ord, Show)
|
||||
|
||||
-- instance Eq (Branch0 m) where
|
||||
-- a == b = view terms a == view terms b
|
||||
-- && view types a == view types b
|
||||
-- && view children a == view children b
|
||||
-- && (fmap fst . view edits) a == (fmap fst . view edits) b
|
||||
|
||||
-- -- data ForkFailure = SrcNotFound | DestExists
|
||||
|
||||
-- -- -- consider delegating to Names.numHashChars when ready to implement?
|
||||
-- -- -- are those enough?
|
||||
-- -- -- could move this to a read-only field in Branch0
|
||||
-- -- -- could move a Names0 to a read-only field in Branch0 until it gets too big
|
||||
-- -- numHashChars :: Branch m -> Int
|
||||
-- -- numHashChars _b = 3
|
||||
|
||||
-- -- This type is a little ugly, so we wrap it up with a nice type alias for
|
||||
-- -- use outside this module.
|
||||
-- type Cache m = Cache.Cache (Causal.RawHash Raw) (UnwrappedBranch m)
|
||||
|
||||
-- -- boundedCache :: MonadIO m => Word -> m (Cache m2)
|
||||
-- -- boundedCache = Cache.semispaceCache
|
||||
|
||||
-- -- Can use `Cache.nullCache` to disable caching if needed
|
||||
-- cachedRead :: forall m . MonadIO m
|
||||
-- => Cache m
|
||||
-- -> Causal.Deserialize m Raw Raw
|
||||
-- -> (EditHash -> m Patch)
|
||||
-- -> Hash
|
||||
-- -> m (Branch m)
|
||||
-- cachedRead cache deserializeRaw deserializeEdits h =
|
||||
-- Branch <$> Causal.cachedRead cache d h
|
||||
-- where
|
||||
-- fromRaw :: Raw -> m (Branch0 m)
|
||||
-- fromRaw Raw {..} = do
|
||||
-- children <- traverse go _childrenR
|
||||
-- edits <- for _editsR $ \hash -> (hash,) . pure <$> deserializeEdits hash
|
||||
-- pure $ branch0 _termsR _typesR children edits
|
||||
-- go = cachedRead cache deserializeRaw deserializeEdits
|
||||
-- d :: Causal.Deserialize m Raw (Branch0 m)
|
||||
-- d h = deserializeRaw h >>= \case
|
||||
-- RawOne raw -> RawOne <$> fromRaw raw
|
||||
-- RawCons raw h -> flip RawCons h <$> fromRaw raw
|
||||
-- RawMerge raw hs -> flip RawMerge hs <$> fromRaw raw
|
||||
|
||||
-- sync
|
||||
-- :: Monad m
|
||||
-- => (Hash -> m Bool)
|
||||
-- -> Causal.Serialize m Raw Raw
|
||||
-- -> (EditHash -> m Patch -> m ())
|
||||
-- -> Branch m
|
||||
-- -> m ()
|
||||
-- sync exists serializeRaw serializeEdits b = do
|
||||
-- _written <- State.execStateT (sync' exists serializeRaw serializeEdits b) mempty
|
||||
-- -- traceM $ "Branch.sync wrote " <> show (Set.size written) <> " namespace files."
|
||||
-- pure ()
|
||||
|
||||
-- -- serialize a `Branch m` indexed by the hash of its corresponding Raw
|
||||
-- sync'
|
||||
-- :: forall m
|
||||
-- . Monad m
|
||||
-- => (Hash -> m Bool)
|
||||
-- -> Causal.Serialize m Raw Raw
|
||||
-- -> (EditHash -> m Patch -> m ())
|
||||
-- -> Branch m
|
||||
-- -> StateT (Set Hash) m ()
|
||||
-- sync' exists serializeRaw serializeEdits b = Causal.sync exists
|
||||
-- serialize0
|
||||
-- (view history b)
|
||||
-- where
|
||||
-- serialize0 :: Causal.Serialize (StateT (Set Hash) m) Raw (Branch0 m)
|
||||
-- serialize0 h b0 = case b0 of
|
||||
-- RawOne b0 -> do
|
||||
-- writeB0 b0
|
||||
-- lift $ serializeRaw h $ RawOne (toRaw b0)
|
||||
-- RawCons b0 ht -> do
|
||||
-- writeB0 b0
|
||||
-- lift $ serializeRaw h $ RawCons (toRaw b0) ht
|
||||
-- RawMerge b0 hs -> do
|
||||
-- writeB0 b0
|
||||
-- lift $ serializeRaw h $ RawMerge (toRaw b0) hs
|
||||
-- where
|
||||
-- writeB0 :: Branch0 m -> StateT (Set Hash) m ()
|
||||
-- writeB0 b0 = do
|
||||
-- for_ (view children b0) $ \c -> do
|
||||
-- queued <- State.get
|
||||
-- when (Set.notMember (headHash c) queued) $
|
||||
-- sync' exists serializeRaw serializeEdits c
|
||||
-- for_ (view edits b0) (lift . uncurry serializeEdits)
|
||||
|
||||
-- -- this has to serialize the branch0 and its descendants in the tree,
|
||||
-- -- and then serialize the rest of the history of the branch as well
|
||||
|
||||
-- toRaw :: Branch0 m -> Raw
|
||||
-- toRaw Branch0 {..} =
|
||||
-- Raw _terms _types (headHash <$> _children) (fst <$> _edits)
|
||||
|
||||
-- toCausalRaw :: Branch m -> Causal.Raw Raw Raw
|
||||
-- toCausalRaw = \case
|
||||
-- Branch (Causal.One _h e) -> RawOne (toRaw e)
|
||||
-- Branch (Causal.Cons _h e (ht, _m)) -> RawCons (toRaw e) ht
|
||||
-- Branch (Causal.Merge _h e tls) -> RawMerge (toRaw e) (Map.keysSet tls)
|
||||
|
||||
-- -- -- copy a path to another path
|
||||
-- -- fork
|
||||
-- -- :: Applicative m
|
||||
-- -- => Path
|
||||
-- -- -> Path
|
||||
-- -- -> Branch m
|
||||
-- -- -> Either ForkFailure (Branch m)
|
||||
-- -- fork src dest root = case getAt src root of
|
||||
-- -- Nothing -> Left SrcNotFound
|
||||
-- -- Just src' -> case setIfNotExists dest src' root of
|
||||
-- -- Nothing -> Left DestExists
|
||||
-- -- Just root' -> Right root'
|
||||
|
||||
-- -- -- Move the node at src to dest.
|
||||
-- -- -- It's okay if `dest` is inside `src`, just create empty levels.
|
||||
-- -- -- Try not to `step` more than once at each node.
|
||||
-- -- move :: Applicative m
|
||||
-- -- => Path
|
||||
-- -- -> Path
|
||||
-- -- -> Branch m
|
||||
-- -- -> Either ForkFailure (Branch m)
|
||||
-- -- move src dest root = case getAt src root of
|
||||
-- -- Nothing -> Left SrcNotFound
|
||||
-- -- Just src' ->
|
||||
-- -- -- make sure dest doesn't already exist
|
||||
-- -- case getAt dest root of
|
||||
-- -- Just _destExists -> Left DestExists
|
||||
-- -- Nothing ->
|
||||
-- -- -- find and update common ancestor of `src` and `dest`:
|
||||
-- -- Right $ modifyAt ancestor go root
|
||||
-- -- where
|
||||
-- -- (ancestor, relSrc, relDest) = Path.relativeToAncestor src dest
|
||||
-- -- go = deleteAt relSrc . setAt relDest src'
|
||||
|
||||
-- -- setIfNotExists
|
||||
-- -- :: Applicative m => Path -> Branch m -> Branch m -> Maybe (Branch m)
|
||||
-- -- setIfNotExists dest b root = case getAt dest root of
|
||||
-- -- Just _destExists -> Nothing
|
||||
-- -- Nothing -> Just $ setAt dest b root
|
||||
|
||||
-- -- setAt :: Applicative m => Path -> Branch m -> Branch m -> Branch m
|
||||
-- -- setAt path b = modifyAt path (const b)
|
||||
|
||||
-- -- deleteAt :: Applicative m => Path -> Branch m -> Branch m
|
||||
-- -- deleteAt path = setAt path empty
|
||||
|
||||
-- -- returns `Nothing` if no Branch at `path` or if Branch is empty at `path`
|
||||
-- getAt :: Path
|
||||
-- -> Branch m
|
||||
-- -> Maybe (Branch m)
|
||||
-- getAt path root = case Path.uncons path of
|
||||
-- Nothing -> if isEmpty root then Nothing else Just root
|
||||
-- Just (seg, path) -> case Map.lookup seg (_children $ head root) of
|
||||
-- Just b -> getAt path b
|
||||
-- Nothing -> Nothing
|
||||
|
||||
-- getAt' :: Path -> Branch m -> Branch m
|
||||
-- getAt' p b = fromMaybe empty $ getAt p b
|
||||
|
||||
-- -- getAt0 :: Path -> Branch0 m -> Branch0 m
|
||||
-- -- getAt0 p b = case Path.uncons p of
|
||||
-- -- Nothing -> b
|
||||
-- -- Just (seg, path) -> case Map.lookup seg (_children b) of
|
||||
-- -- Just c -> getAt0 path (head c)
|
||||
-- -- Nothing -> empty0
|
||||
|
||||
-- empty :: Branch m
|
||||
-- empty = Branch $ Causal.one empty0
|
||||
|
||||
-- -- one :: Branch0 m -> Branch m
|
||||
-- -- one = Branch . Causal.one
|
||||
|
||||
-- empty0 :: Branch0 m
|
||||
-- empty0 =
|
||||
-- Branch0 mempty mempty mempty mempty mempty mempty mempty mempty mempty mempty
|
||||
|
||||
-- isEmpty0 :: Branch0 m -> Bool
|
||||
-- isEmpty0 = (== empty0)
|
||||
|
||||
-- isEmpty :: Branch m -> Bool
|
||||
-- isEmpty = (== empty)
|
||||
|
||||
-- step :: Applicative m => (Branch0 m -> Branch0 m) -> Branch m -> Branch m
|
||||
-- step f = \case
|
||||
-- Branch (Causal.One _h e) | e == empty0 -> Branch (Causal.one (f empty0))
|
||||
-- b -> over history (Causal.stepDistinct f) b
|
||||
|
||||
-- -- stepM :: (Monad m, Monad n) => (Branch0 m -> n (Branch0 m)) -> Branch m -> n (Branch m)
|
||||
-- -- stepM f = \case
|
||||
-- -- Branch (Causal.One _h e) | e == empty0 -> Branch . Causal.one <$> f empty0
|
||||
-- -- b -> mapMOf history (Causal.stepDistinctM f) b
|
||||
|
||||
-- cons :: Applicative m => Branch0 m -> Branch m -> Branch m
|
||||
-- cons = step . const
|
||||
|
||||
-- -- isOne :: Branch m -> Bool
|
||||
-- -- isOne (Branch Causal.One{}) = True
|
||||
-- -- isOne _ = False
|
||||
|
||||
-- -- uncons :: Applicative m => Branch m -> m (Maybe (Branch0 m, Branch m))
|
||||
-- -- uncons (Branch b) = go <$> Causal.uncons b where
|
||||
-- -- go = over (_Just . _2) Branch
|
||||
|
||||
-- -- -- Modify the branch0 at the head of at `path` with `f`,
|
||||
-- -- -- after creating it if necessary. Preserves history.
|
||||
-- -- stepAt :: forall m. Applicative m
|
||||
-- -- => Path
|
||||
-- -- -> (Branch0 m -> Branch0 m)
|
||||
-- -- -> Branch m -> Branch m
|
||||
-- -- stepAt p f = modifyAt p g where
|
||||
-- -- g :: Branch m -> Branch m
|
||||
-- -- g (Branch b) = Branch . Causal.consDistinct (f (Causal.head b)) $ b
|
||||
|
||||
-- -- stepManyAt :: (Monad m, Foldable f)
|
||||
-- -- => f (Path, Branch0 m -> Branch0 m) -> Branch m -> Branch m
|
||||
-- -- stepManyAt actions = step (stepManyAt0 actions)
|
||||
|
||||
-- -- -- Modify the branch0 at the head of at `path` with `f`,
|
||||
-- -- -- after creating it if necessary. Preserves history.
|
||||
-- -- stepAtM :: forall n m. (Functor n, Applicative m)
|
||||
-- -- => Path -> (Branch0 m -> n (Branch0 m)) -> Branch m -> n (Branch m)
|
||||
-- -- stepAtM p f = modifyAtM p g where
|
||||
-- -- g :: Branch m -> n (Branch m)
|
||||
-- -- g (Branch b) = do
|
||||
-- -- b0' <- f (Causal.head b)
|
||||
-- -- pure $ Branch . Causal.consDistinct b0' $ b
|
||||
|
||||
-- -- stepManyAtM :: (Monad m, Monad n, Foldable f)
|
||||
-- -- => f (Path, Branch0 m -> n (Branch0 m)) -> Branch m -> n (Branch m)
|
||||
-- -- stepManyAtM actions = stepM (stepManyAt0M actions)
|
||||
|
||||
-- -- -- starting at the leaves, apply `f` to every level of the branch.
|
||||
-- -- stepEverywhere
|
||||
-- -- :: Applicative m => (Branch0 m -> Branch0 m) -> (Branch0 m -> Branch0 m)
|
||||
-- -- stepEverywhere f Branch0 {..} = f (branch0 _terms _types children _edits)
|
||||
-- -- where children = fmap (step $ stepEverywhere f) _children
|
||||
|
||||
-- -- -- Creates a function to fix up the children field._1
|
||||
-- -- -- If the action emptied a child, then remove the mapping,
|
||||
-- -- -- otherwise update it.
|
||||
-- -- -- Todo: Fix this in hashing & serialization instead of here?
|
||||
-- -- getChildBranch :: NameSegment -> Branch0 m -> Branch m
|
||||
-- -- getChildBranch seg b = fromMaybe empty $ Map.lookup seg (_children b)
|
||||
|
||||
-- -- setChildBranch :: NameSegment -> Branch m -> Branch0 m -> Branch0 m
|
||||
-- -- setChildBranch seg b = over children (updateChildren seg b)
|
||||
|
||||
-- -- getPatch :: Applicative m => NameSegment -> Branch0 m -> m Patch
|
||||
-- -- getPatch seg b = case Map.lookup seg (_edits b) of
|
||||
-- -- Nothing -> pure Patch.empty
|
||||
-- -- Just (_, p) -> p
|
||||
|
||||
-- -- getMaybePatch :: Applicative m => NameSegment -> Branch0 m -> m (Maybe Patch)
|
||||
-- -- getMaybePatch seg b = case Map.lookup seg (_edits b) of
|
||||
-- -- Nothing -> pure Nothing
|
||||
-- -- Just (_, p) -> Just <$> p
|
||||
|
||||
-- -- modifyPatches
|
||||
-- -- :: Monad m => NameSegment -> (Patch -> Patch) -> Branch0 m -> m (Branch0 m)
|
||||
-- -- modifyPatches seg f = mapMOf edits update
|
||||
-- -- where
|
||||
-- -- update m = do
|
||||
-- -- p' <- case Map.lookup seg m of
|
||||
-- -- Nothing -> pure $ f Patch.empty
|
||||
-- -- Just (_, p) -> f <$> p
|
||||
-- -- let h = H.accumulate' p'
|
||||
-- -- pure $ Map.insert seg (h, pure p') m
|
||||
|
||||
-- -- replacePatch :: Applicative m => NameSegment -> Patch -> Branch0 m -> Branch0 m
|
||||
-- -- replacePatch n p = over edits (Map.insert n (H.accumulate' p, pure p))
|
||||
|
||||
-- -- deletePatch :: NameSegment -> Branch0 m -> Branch0 m
|
||||
-- -- deletePatch n = over edits (Map.delete n)
|
||||
|
||||
-- -- updateChildren ::NameSegment
|
||||
-- -- -> Branch m
|
||||
-- -- -> Map NameSegment (Branch m)
|
||||
-- -- -> Map NameSegment (Branch m)
|
||||
-- -- updateChildren seg updatedChild =
|
||||
-- -- if isEmpty updatedChild
|
||||
-- -- then Map.delete seg
|
||||
-- -- else Map.insert seg updatedChild
|
||||
|
||||
-- -- -- Modify the Branch at `path` with `f`, after creating it if necessary.
|
||||
-- -- -- Because it's a `Branch`, it overwrites the history at `path`.
|
||||
-- -- modifyAt :: Applicative m
|
||||
-- -- => Path -> (Branch m -> Branch m) -> Branch m -> Branch m
|
||||
-- -- modifyAt path f = runIdentity . modifyAtM path (pure . f)
|
||||
|
||||
-- -- -- Modify the Branch at `path` with `f`, after creating it if necessary.
|
||||
-- -- -- Because it's a `Branch`, it overwrites the history at `path`.
|
||||
-- -- modifyAtM
|
||||
-- -- :: forall n m
|
||||
-- -- . Functor n
|
||||
-- -- => Applicative m -- because `Causal.cons` uses `pure`
|
||||
-- -- => Path
|
||||
-- -- -> (Branch m -> n (Branch m))
|
||||
-- -- -> Branch m
|
||||
-- -- -> n (Branch m)
|
||||
-- -- modifyAtM path f b = case Path.uncons path of
|
||||
-- -- Nothing -> f b
|
||||
-- -- Just (seg, path) -> do -- Functor
|
||||
-- -- let child = getChildBranch seg (head b)
|
||||
-- -- child' <- modifyAtM path f child
|
||||
-- -- -- step the branch by updating its children according to fixup
|
||||
-- -- pure $ step (setChildBranch seg child') b
|
||||
|
||||
-- -- -- stepManyAt0 consolidates several changes into a single step
|
||||
-- -- stepManyAt0 :: forall f m . (Monad m, Foldable f)
|
||||
-- -- => f (Path, Branch0 m -> Branch0 m)
|
||||
-- -- -> Branch0 m -> Branch0 m
|
||||
-- -- stepManyAt0 actions =
|
||||
-- -- runIdentity . stepManyAt0M [ (p, pure . f) | (p,f) <- toList actions ]
|
||||
|
||||
-- -- stepManyAt0M :: forall m n f . (Monad m, Monad n, Foldable f)
|
||||
-- -- => f (Path, Branch0 m -> n (Branch0 m))
|
||||
-- -- -> Branch0 m -> n (Branch0 m)
|
||||
-- -- stepManyAt0M actions b = go (toList actions) b where
|
||||
-- -- go :: [(Path, Branch0 m -> n (Branch0 m))] -> Branch0 m -> n (Branch0 m)
|
||||
-- -- go actions b = let
|
||||
-- -- -- combines the functions that apply to this level of the tree
|
||||
-- -- currentAction b = foldM (\b f -> f b) b [ f | (Path.Empty, f) <- actions ]
|
||||
|
||||
-- -- -- groups the actions based on the child they apply to
|
||||
-- -- childActions :: Map NameSegment [(Path, Branch0 m -> n (Branch0 m))]
|
||||
-- -- childActions =
|
||||
-- -- List.multimap [ (seg, (rest,f)) | (seg :< rest, f) <- actions ]
|
||||
|
||||
-- -- -- alters the children of `b` based on the `childActions` map
|
||||
-- -- stepChildren :: Map NameSegment (Branch m) -> n (Map NameSegment (Branch m))
|
||||
-- -- stepChildren children0 = foldM g children0 $ Map.toList childActions
|
||||
-- -- where
|
||||
-- -- g children (seg, actions) = do
|
||||
-- -- -- Recursively applies the relevant actions to the child branch
|
||||
-- -- -- The `findWithDefault` is important - it allows the stepManyAt
|
||||
-- -- -- to create new children at paths that don't previously exist.
|
||||
-- -- child <- stepM (go actions) (Map.findWithDefault empty seg children0)
|
||||
-- -- pure $ updateChildren seg child children
|
||||
-- -- in do
|
||||
-- -- c2 <- stepChildren (view children b)
|
||||
-- -- currentAction (set children c2 b)
|
||||
|
||||
-- instance Hashable (Branch0 m) where
|
||||
-- tokens b =
|
||||
-- [ H.accumulateToken (_terms b)
|
||||
-- , H.accumulateToken (_types b)
|
||||
-- , H.accumulateToken (headHash <$> _children b)
|
||||
-- , H.accumulateToken (fst <$> _edits b)
|
||||
-- ]
|
||||
|
||||
-- -- -- getLocalBranch :: Hash -> IO Branch
|
||||
-- -- -- getGithubBranch :: RemotePath -> IO Branch
|
||||
-- -- -- getLocalEdit :: GUID -> IO Patch
|
||||
|
||||
-- -- -- todo: consider inlining these into Actions2
|
||||
-- -- addTermName
|
||||
-- -- :: Referent -> NameSegment -> Metadata.Metadata -> Branch0 m -> Branch0 m
|
||||
-- -- addTermName r new md =
|
||||
-- -- over terms (Metadata.insertWithMetadata (r, md) . Star3.insertD1 (r, new))
|
||||
|
||||
-- -- addTypeName
|
||||
-- -- :: Reference -> NameSegment -> Metadata.Metadata -> Branch0 m -> Branch0 m
|
||||
-- -- addTypeName r new md =
|
||||
-- -- over types (Metadata.insertWithMetadata (r, md) . Star3.insertD1 (r, new))
|
||||
|
||||
-- -- -- addTermNameAt :: Path.Split -> Referent -> Branch0 m -> Branch0 m
|
||||
-- -- -- addTypeNameAt :: Path.Split -> Reference -> Branch0 m -> Branch0 m
|
||||
|
||||
-- -- deleteTermName :: Referent -> NameSegment -> Branch0 m -> Branch0 m
|
||||
-- -- deleteTermName r n b | Star3.memberD1 (r,n) (view terms b)
|
||||
-- -- = over terms (Star3.deletePrimaryD1 (r,n)) b
|
||||
-- -- deleteTermName _ _ b = b
|
||||
|
||||
-- -- deleteTypeName :: Reference -> NameSegment -> Branch0 m -> Branch0 m
|
||||
-- -- deleteTypeName r n b | Star3.memberD1 (r,n) (view types b)
|
||||
-- -- = over types (Star3.deletePrimaryD1 (r,n)) b
|
||||
-- -- deleteTypeName _ _ b = b
|
||||
|
||||
-- -- namesDiff :: Branch m -> Branch m -> Names.Diff
|
||||
-- -- namesDiff b1 b2 = Names.diff0 (toNames0 (head b1)) (toNames0 (head b2))
|
||||
|
||||
-- lca :: Monad m => Branch m -> Branch m -> m (Maybe (Branch m))
|
||||
-- lca (Branch a) (Branch b) = fmap Branch <$> Causal.lca a b
|
||||
|
||||
-- diff0 :: Monad m => Branch0 m -> Branch0 m -> m BranchDiff
|
||||
-- diff0 old new = do
|
||||
-- newEdits <- sequenceA $ snd <$> _edits new
|
||||
-- oldEdits <- sequenceA $ snd <$> _edits old
|
||||
-- let diffEdits = Map.merge (Map.mapMissing $ \_ p -> Patch.diff p mempty)
|
||||
-- (Map.mapMissing $ \_ p -> Patch.diff mempty p)
|
||||
-- (Map.zipWithMatched (const Patch.diff))
|
||||
-- newEdits
|
||||
-- oldEdits
|
||||
-- pure $ BranchDiff
|
||||
-- { addedTerms = Star3.difference (_terms new) (_terms old)
|
||||
-- , removedTerms = Star3.difference (_terms old) (_terms new)
|
||||
-- , addedTypes = Star3.difference (_types new) (_types old)
|
||||
-- , removedTypes = Star3.difference (_types old) (_types new)
|
||||
-- , changedPatches = diffEdits
|
||||
-- }
|
||||
|
||||
-- transform :: Functor m => (forall a . m a -> n a) -> Branch m -> Branch n
|
||||
-- transform f b = case _history b of
|
||||
-- causal -> Branch . Causal.transform f $ transformB0s f causal
|
||||
-- where
|
||||
-- transformB0 :: Functor m => (forall a . m a -> n a) -> Branch0 m -> Branch0 n
|
||||
-- transformB0 f b =
|
||||
-- b { _children = transform f <$> _children b
|
||||
-- , _edits = second f <$> _edits b
|
||||
-- }
|
||||
|
||||
-- transformB0s :: Functor m => (forall a . m a -> n a)
|
||||
-- -> Causal m Raw (Branch0 m)
|
||||
-- -> Causal m Raw (Branch0 n)
|
||||
-- transformB0s f = Causal.unsafeMapHashPreserving (transformB0 f)
|
||||
|
||||
-- -- data BranchAttentions = BranchAttentions
|
||||
-- -- { -- Patches that were edited on the right but entirely removed on the left.
|
||||
-- -- removedPatchEdited :: [Name]
|
||||
-- -- -- Patches that were edited on the left but entirely removed on the right.
|
||||
-- -- , editedPatchRemoved :: [Name]
|
||||
-- -- }
|
||||
|
||||
-- -- instance Semigroup BranchAttentions where
|
||||
-- -- BranchAttentions edited1 removed1 <> BranchAttentions edited2 removed2
|
||||
-- -- = BranchAttentions (edited1 <> edited2) (removed1 <> removed2)
|
||||
|
||||
-- -- instance Monoid BranchAttentions where
|
||||
-- -- mempty = BranchAttentions [] []
|
||||
-- -- mappend = (<>)
|
||||
|
||||
-- -- data RefCollisions =
|
||||
-- -- RefCollisions { termCollisions :: Relation Name Name
|
||||
-- -- , typeCollisions :: Relation Name Name
|
||||
-- -- } deriving (Eq, Show)
|
||||
|
||||
-- -- instance Semigroup RefCollisions where
|
||||
-- -- (<>) = mappend
|
||||
-- -- instance Monoid RefCollisions where
|
||||
-- -- mempty = RefCollisions mempty mempty
|
||||
-- -- mappend r1 r2 = RefCollisions (termCollisions r1 <> termCollisions r2)
|
||||
-- -- (typeCollisions r1 <> typeCollisions r2)
|
771
parser-typechecker/src/Unison/Codebase/Branch/Names.hs
Normal file
771
parser-typechecker/src/Unison/Codebase/Branch/Names.hs
Normal file
@ -0,0 +1,771 @@
|
||||
{-# LANGUAGE PatternSynonyms #-}
|
||||
{-# LANGUAGE RankNTypes #-}
|
||||
{-# LANGUAGE RecordWildCards #-}
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
|
||||
module Unison.Codebase.Branch.Names
|
||||
( findHistoricalHQs,
|
||||
findHistoricalRefs,
|
||||
findHistoricalRefs',
|
||||
namesDiff,
|
||||
toNames0,
|
||||
)
|
||||
where
|
||||
|
||||
import Unison.Prelude hiding (empty)
|
||||
|
||||
import Prelude hiding (head,read,subtract)
|
||||
|
||||
import Control.Lens hiding ( children, cons, transform, uncons )
|
||||
import qualified Control.Monad.State as State
|
||||
import Control.Monad.State ( StateT )
|
||||
import Data.Bifunctor ( second )
|
||||
import qualified Data.Map as Map
|
||||
import qualified Data.Map.Merge.Lazy as Map
|
||||
import qualified Data.Set as Set
|
||||
import qualified Unison.Codebase.Patch as Patch
|
||||
import Unison.Codebase.Patch ( Patch )
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import qualified Unison.Codebase.Causal.FoldHistory as Causal
|
||||
import Unison.Codebase.Causal ( Causal
|
||||
, pattern RawOne
|
||||
, pattern RawCons
|
||||
, pattern RawMerge
|
||||
)
|
||||
import Unison.Codebase.Path ( Path(..) )
|
||||
import qualified Unison.Codebase.Path as Path
|
||||
import Unison.NameSegment ( NameSegment )
|
||||
import qualified Unison.NameSegment as NameSegment
|
||||
import qualified Unison.Codebase.Metadata as Metadata
|
||||
import qualified Unison.Hash as Hash
|
||||
import Unison.Hashable ( Hashable )
|
||||
import qualified Unison.Hashable as H
|
||||
import Unison.Name ( Name(..) )
|
||||
import qualified Unison.Name as Name
|
||||
import qualified Unison.Names2 as Names
|
||||
import qualified Unison.Names3 as Names
|
||||
import Unison.Names2 ( Names'(Names), Names0 )
|
||||
import Unison.Reference ( Reference )
|
||||
import Unison.Referent ( Referent )
|
||||
import qualified Unison.Referent as Referent
|
||||
import qualified Unison.Reference as Reference
|
||||
|
||||
import qualified U.Util.Cache as Cache
|
||||
import qualified Unison.Util.Relation as R
|
||||
import Unison.Util.Relation ( Relation )
|
||||
import qualified Unison.Util.Relation4 as R4
|
||||
import qualified Unison.Util.List as List
|
||||
import Unison.Util.Map ( unionWithM )
|
||||
import qualified Unison.Util.Star3 as Star3
|
||||
import Unison.ShortHash (ShortHash)
|
||||
import qualified Unison.ShortHash as SH
|
||||
import qualified Unison.HashQualified as HQ
|
||||
import Unison.HashQualified (HashQualified)
|
||||
import qualified Unison.LabeledDependency as LD
|
||||
import Unison.LabeledDependency (LabeledDependency)
|
||||
import Unison.Codebase.Branch
|
||||
|
||||
toNames0 :: Branch0 m -> Names0
|
||||
toNames0 b = Names (R.swap . deepTerms $ b)
|
||||
(R.swap . deepTypes $ b)
|
||||
|
||||
-- This stops searching for a given ShortHash once it encounters
|
||||
-- any term or type in any Branch0 that satisfies that ShortHash.
|
||||
findHistoricalSHs
|
||||
:: Monad m => Set ShortHash -> Branch m -> m (Set ShortHash, Names0)
|
||||
findHistoricalSHs = findInHistory
|
||||
(\sh r _n -> sh `SH.isPrefixOf` Referent.toShortHash r)
|
||||
(\sh r _n -> sh `SH.isPrefixOf` Reference.toShortHash r)
|
||||
|
||||
-- This stops searching for a given HashQualified once it encounters
|
||||
-- any term or type in any Branch0 that satisfies that HashQualified.
|
||||
findHistoricalHQs :: Monad m
|
||||
=> Set (HashQualified Name)
|
||||
-> Branch m
|
||||
-> m (Set (HashQualified Name), Names0)
|
||||
findHistoricalHQs = findInHistory
|
||||
(\hq r n -> HQ.matchesNamedReferent n r hq)
|
||||
(\hq r n -> HQ.matchesNamedReference n r hq)
|
||||
|
||||
findHistoricalRefs :: Monad m => Set LabeledDependency -> Branch m
|
||||
-> m (Set LabeledDependency, Names0)
|
||||
findHistoricalRefs = findInHistory
|
||||
(\query r _n -> LD.fold (const False) (==r) query)
|
||||
(\query r _n -> LD.fold (==r) (const False) query)
|
||||
|
||||
findHistoricalRefs' :: Monad m => Set Reference -> Branch m
|
||||
-> m (Set Reference, Names0)
|
||||
findHistoricalRefs' = findInHistory
|
||||
(\queryRef r _n -> r == Referent.Ref queryRef)
|
||||
(\queryRef r _n -> r == queryRef)
|
||||
|
||||
findInHistory :: forall m q. (Monad m, Ord q)
|
||||
=> (q -> Referent -> Name -> Bool)
|
||||
-> (q -> Reference -> Name -> Bool)
|
||||
-> Set q -> Branch m -> m (Set q, Names0)
|
||||
findInHistory termMatches typeMatches queries b =
|
||||
(Causal.foldHistoryUntil f (queries, mempty) . _history) b <&> \case
|
||||
-- could do something more sophisticated here later to report that some SH
|
||||
-- couldn't be found anywhere in the history. but for now, I assume that
|
||||
-- the normal thing will happen when it doesn't show up in the namespace.
|
||||
Causal.Satisfied (_, names) -> (mempty, names)
|
||||
Causal.Unsatisfied (missing, names) -> (missing, names)
|
||||
where
|
||||
-- in order to not favor terms over types, we iterate through the ShortHashes,
|
||||
-- for each `remainingQueries`, if we find a matching Referent or Reference,
|
||||
-- we remove `q` from the accumulated `remainingQueries`, and add the Ref* to
|
||||
-- the accumulated `names0`.
|
||||
f acc@(remainingQueries, _) b0 = (acc', null remainingQueries')
|
||||
where
|
||||
acc'@(remainingQueries', _) = foldl' findQ acc remainingQueries
|
||||
findQ :: (Set q, Names0) -> q -> (Set q, Names0)
|
||||
findQ acc sh =
|
||||
foldl' (doType sh) (foldl' (doTerm sh) acc
|
||||
(R.toList $ deepTerms b0))
|
||||
(R.toList $ deepTypes b0)
|
||||
doTerm q acc@(remainingSHs, names0) (r, n) = if termMatches q r n
|
||||
then (Set.delete q remainingSHs, Names.addTerm n r names0) else acc
|
||||
doType q acc@(remainingSHs, names0) (r, n) = if typeMatches q r n
|
||||
then (Set.delete q remainingSHs, Names.addType n r names0) else acc
|
||||
|
||||
-- deepReferents :: Branch0 m -> Set Referent
|
||||
-- deepReferents = R.dom . deepTerms
|
||||
|
||||
-- deepTypeReferences :: Branch0 m -> Set Reference
|
||||
-- deepTypeReferences = R.dom . deepTypes
|
||||
|
||||
-- terms :: Lens' (Branch0 m) (Star Referent NameSegment)
|
||||
-- terms = lens _terms (\Branch0{..} x -> branch0 x _types _children _edits)
|
||||
|
||||
-- types :: Lens' (Branch0 m) (Star Reference NameSegment)
|
||||
-- types = lens _types (\Branch0{..} x -> branch0 _terms x _children _edits)
|
||||
|
||||
-- children :: Lens' (Branch0 m) (Map NameSegment (Branch m))
|
||||
-- children = lens _children (\Branch0{..} x -> branch0 _terms _types x _edits)
|
||||
|
||||
-- -- -- creates a Branch0 from the primary fields and derives the others.
|
||||
-- -- branch0 :: Metadata.Star Referent NameSegment
|
||||
-- -- -> Metadata.Star Reference NameSegment
|
||||
-- -- -> Map NameSegment (Branch m)
|
||||
-- -- -> Map NameSegment (EditHash, m Patch)
|
||||
-- -- -> Branch0 m
|
||||
-- -- branch0 terms types children edits =
|
||||
-- -- Branch0 terms types children edits
|
||||
-- -- deepTerms' deepTypes'
|
||||
-- -- deepTermMetadata' deepTypeMetadata'
|
||||
-- -- deepPaths' deepEdits'
|
||||
-- -- where
|
||||
-- -- nameSegToName = Name.unsafeFromText . NameSegment.toText
|
||||
-- -- deepTerms' = (R.mapRan nameSegToName . Star3.d1) terms
|
||||
-- -- <> foldMap go (Map.toList children)
|
||||
-- -- where
|
||||
-- -- go (nameSegToName -> n, b) =
|
||||
-- -- R.mapRan (Name.joinDot n) (deepTerms $ head b) -- could use mapKeysMonotonic
|
||||
-- -- deepTypes' = (R.mapRan nameSegToName . Star3.d1) types
|
||||
-- -- <> foldMap go (Map.toList children)
|
||||
-- -- where
|
||||
-- -- go (nameSegToName -> n, b) =
|
||||
-- -- R.mapRan (Name.joinDot n) (deepTypes $ head b) -- could use mapKeysMonotonic
|
||||
-- -- deepTermMetadata' = R4.mapD2 nameSegToName (Metadata.starToR4 terms)
|
||||
-- -- <> foldMap go (Map.toList children)
|
||||
-- -- where
|
||||
-- -- go (nameSegToName -> n, b) =
|
||||
-- -- R4.mapD2 (Name.joinDot n) (deepTermMetadata $ head b)
|
||||
-- -- deepTypeMetadata' = R4.mapD2 nameSegToName (Metadata.starToR4 types)
|
||||
-- -- <> foldMap go (Map.toList children)
|
||||
-- -- where
|
||||
-- -- go (nameSegToName -> n, b) =
|
||||
-- -- R4.mapD2 (Name.joinDot n) (deepTypeMetadata $ head b)
|
||||
-- -- deepPaths' = Set.map Path.singleton (Map.keysSet children)
|
||||
-- -- <> foldMap go (Map.toList children)
|
||||
-- -- where go (nameSeg, b) = Set.map (Path.cons nameSeg) (deepPaths $ head b)
|
||||
-- -- deepEdits' = Map.mapKeys nameSegToName (Map.map fst edits)
|
||||
-- -- <> foldMap go (Map.toList children)
|
||||
-- -- where
|
||||
-- -- go (nameSeg, b) =
|
||||
-- -- Map.mapKeys (nameSegToName nameSeg `Name.joinDot`) . deepEdits $ head b
|
||||
|
||||
-- -- head :: Branch m -> Branch0 m
|
||||
-- -- head (Branch c) = Causal.head c
|
||||
|
||||
-- headHash :: Branch m -> Hash
|
||||
-- headHash (Branch c) = Causal.currentHash c
|
||||
|
||||
-- deepEdits' :: Branch0 m -> Map Name (EditHash, m Patch)
|
||||
-- deepEdits' b = go id b where
|
||||
-- -- can change this to an actual prefix once Name is a [NameSegment]
|
||||
-- go :: (Name -> Name) -> Branch0 m -> Map Name (EditHash, m Patch)
|
||||
-- go addPrefix Branch0{..} =
|
||||
-- Map.mapKeysMonotonic (addPrefix . Name.fromSegment) _edits
|
||||
-- <> foldMap f (Map.toList _children)
|
||||
-- where
|
||||
-- f :: (NameSegment, Branch m) -> Map Name (EditHash, m Patch)
|
||||
-- f (c, b) = go (addPrefix . Name.joinDot (Name.fromSegment c)) (head b)
|
||||
|
||||
-- data MergeMode = RegularMerge | SquashMerge deriving (Eq,Ord,Show)
|
||||
|
||||
-- merge :: forall m . Monad m => Branch m -> Branch m -> m (Branch m)
|
||||
-- merge = merge' RegularMerge
|
||||
|
||||
-- -- Discards the history of a Branch0's children, recursively
|
||||
-- discardHistory0 :: Applicative m => Branch0 m -> Branch0 m
|
||||
-- discardHistory0 = over children (fmap tweak) where
|
||||
-- tweak b = cons (discardHistory0 (head b)) empty
|
||||
|
||||
-- merge' :: forall m . Monad m => MergeMode -> Branch m -> Branch m -> m (Branch m)
|
||||
-- merge' = merge'' lca
|
||||
|
||||
-- merge'' :: forall m . Monad m
|
||||
-- => (Branch m -> Branch m -> m (Maybe (Branch m))) -- lca calculator
|
||||
-- -> MergeMode
|
||||
-- -> Branch m
|
||||
-- -> Branch m
|
||||
-- -> m (Branch m)
|
||||
-- merge'' _ _ b1 b2 | isEmpty b1 = pure b2
|
||||
-- merge'' _ mode b1 b2 | isEmpty b2 = case mode of
|
||||
-- RegularMerge -> pure b1
|
||||
-- SquashMerge -> pure $ cons (discardHistory0 (head b1)) b2
|
||||
-- merge'' lca mode (Branch x) (Branch y) =
|
||||
-- Branch <$> case mode of
|
||||
-- RegularMerge -> Causal.threeWayMerge' lca' combine x y
|
||||
-- SquashMerge -> Causal.squashMerge' lca' (pure . discardHistory0) combine x y
|
||||
-- where
|
||||
-- lca' c1 c2 = fmap _history <$> lca (Branch c1) (Branch c2)
|
||||
-- combine :: Maybe (Branch0 m) -> Branch0 m -> Branch0 m -> m (Branch0 m)
|
||||
-- combine Nothing l r = merge0 lca mode l r
|
||||
-- combine (Just ca) l r = do
|
||||
-- dl <- diff0 ca l
|
||||
-- dr <- diff0 ca r
|
||||
-- head0 <- apply ca (dl <> dr)
|
||||
-- children <- Map.mergeA
|
||||
-- (Map.traverseMaybeMissing $ combineMissing ca)
|
||||
-- (Map.traverseMaybeMissing $ combineMissing ca)
|
||||
-- (Map.zipWithAMatched $ const (merge'' lca mode))
|
||||
-- (_children l) (_children r)
|
||||
-- pure $ branch0 (_terms head0) (_types head0) children (_edits head0)
|
||||
|
||||
-- combineMissing ca k cur =
|
||||
-- case Map.lookup k (_children ca) of
|
||||
-- Nothing -> pure $ Just cur
|
||||
-- Just old -> do
|
||||
-- nw <- merge'' lca mode (cons empty0 old) cur
|
||||
-- if isEmpty0 $ head nw
|
||||
-- then pure Nothing
|
||||
-- else pure $ Just nw
|
||||
|
||||
-- apply :: Branch0 m -> BranchDiff -> m (Branch0 m)
|
||||
-- apply b0 BranchDiff {..} = do
|
||||
-- patches <- sequenceA
|
||||
-- $ Map.differenceWith patchMerge (pure @m <$> _edits b0) changedPatches
|
||||
-- let newPatches = makePatch <$> Map.difference changedPatches (_edits b0)
|
||||
-- makePatch Patch.PatchDiff {..} =
|
||||
-- let p = Patch.Patch _addedTermEdits _addedTypeEdits
|
||||
-- in (H.accumulate' p, pure p)
|
||||
-- pure $ branch0 (Star3.difference (_terms b0) removedTerms <> addedTerms)
|
||||
-- (Star3.difference (_types b0) removedTypes <> addedTypes)
|
||||
-- (_children b0)
|
||||
-- (patches <> newPatches)
|
||||
-- patchMerge mhp Patch.PatchDiff {..} = Just $ do
|
||||
-- (_, mp) <- mhp
|
||||
-- p <- mp
|
||||
-- let np = Patch.Patch
|
||||
-- { _termEdits = R.difference (Patch._termEdits p) _removedTermEdits
|
||||
-- <> _addedTermEdits
|
||||
-- , _typeEdits = R.difference (Patch._typeEdits p) _removedTypeEdits
|
||||
-- <> _addedTypeEdits
|
||||
-- }
|
||||
-- pure (H.accumulate' np, pure np)
|
||||
|
||||
-- -- `before' lca b1 b2` is true if `b2` incorporates all of `b1`
|
||||
-- -- It's defined as: lca b1 b2 == Just b1
|
||||
-- before' :: Monad m => (Branch m -> Branch m -> m (Maybe (Branch m)))
|
||||
-- -> Branch m -> Branch m -> m Bool
|
||||
-- before' lca (Branch x) (Branch y) = Causal.before' lca' x y
|
||||
-- where
|
||||
-- lca' c1 c2 = fmap _history <$> lca (Branch c1) (Branch c2)
|
||||
|
||||
-- -- `before b1 b2` is true if `b2` incorporates all of `b1`
|
||||
-- before :: Monad m => Branch m -> Branch m -> m Bool
|
||||
-- before (Branch b1) (Branch b2) = Causal.before b1 b2
|
||||
|
||||
-- merge0 :: forall m. Monad m => (Branch m -> Branch m -> m (Maybe (Branch m)))
|
||||
-- -> MergeMode -> Branch0 m -> Branch0 m -> m (Branch0 m)
|
||||
-- merge0 lca mode b1 b2 = do
|
||||
-- c3 <- unionWithM (merge'' lca mode) (_children b1) (_children b2)
|
||||
-- e3 <- unionWithM g (_edits b1) (_edits b2)
|
||||
-- pure $ branch0 (_terms b1 <> _terms b2)
|
||||
-- (_types b1 <> _types b2)
|
||||
-- c3
|
||||
-- e3
|
||||
-- where
|
||||
-- g :: (EditHash, m Patch) -> (EditHash, m Patch) -> m (EditHash, m Patch)
|
||||
-- g (h1, m1) (h2, _) | h1 == h2 = pure (h1, m1)
|
||||
-- g (_, m1) (_, m2) = do
|
||||
-- e1 <- m1
|
||||
-- e2 <- m2
|
||||
-- let e3 = e1 <> e2
|
||||
-- pure (H.accumulate' e3, pure e3)
|
||||
|
||||
-- pattern Hash h = Causal.RawHash h
|
||||
|
||||
-- toList0 :: Branch0 m -> [(Path, Branch0 m)]
|
||||
-- toList0 = go Path.empty where
|
||||
-- go p b = (p, b) : (Map.toList (_children b) >>= (\(seg, cb) ->
|
||||
-- go (Path.snoc p seg) (head cb) ))
|
||||
|
||||
-- printDebugPaths :: Branch m -> String
|
||||
-- printDebugPaths = unlines . map show . Set.toList . debugPaths
|
||||
|
||||
-- debugPaths :: Branch m -> Set (Path, Hash)
|
||||
-- debugPaths = go Path.empty where
|
||||
-- go p b = Set.insert (p, headHash b) . Set.unions $
|
||||
-- [ go (Path.snoc p seg) b | (seg, b) <- Map.toList $ _children (head b) ]
|
||||
|
||||
-- data Target = TargetType | TargetTerm | TargetBranch
|
||||
-- deriving (Eq, Ord, Show)
|
||||
|
||||
-- instance Eq (Branch0 m) where
|
||||
-- a == b = view terms a == view terms b
|
||||
-- && view types a == view types b
|
||||
-- && view children a == view children b
|
||||
-- && (fmap fst . view edits) a == (fmap fst . view edits) b
|
||||
|
||||
-- data ForkFailure = SrcNotFound | DestExists
|
||||
|
||||
-- -- consider delegating to Names.numHashChars when ready to implement?
|
||||
-- -- are those enough?
|
||||
-- -- could move this to a read-only field in Branch0
|
||||
-- -- could move a Names0 to a read-only field in Branch0 until it gets too big
|
||||
-- numHashChars :: Branch m -> Int
|
||||
-- numHashChars _b = 3
|
||||
|
||||
-- -- This type is a little ugly, so we wrap it up with a nice type alias for
|
||||
-- -- use outside this module.
|
||||
-- type Cache m = Cache.Cache (Causal.RawHash Raw) (UnwrappedBranch m)
|
||||
|
||||
-- boundedCache :: MonadIO m => Word -> m (Cache m2)
|
||||
-- boundedCache = Cache.semispaceCache
|
||||
|
||||
-- -- Can use `Cache.nullCache` to disable caching if needed
|
||||
-- cachedRead :: forall m . MonadIO m
|
||||
-- => Cache m
|
||||
-- -> Causal.Deserialize m Raw Raw
|
||||
-- -> (EditHash -> m Patch)
|
||||
-- -> Hash
|
||||
-- -> m (Branch m)
|
||||
-- cachedRead cache deserializeRaw deserializeEdits h =
|
||||
-- Branch <$> Causal.cachedRead cache d h
|
||||
-- where
|
||||
-- fromRaw :: Raw -> m (Branch0 m)
|
||||
-- fromRaw Raw {..} = do
|
||||
-- children <- traverse go _childrenR
|
||||
-- edits <- for _editsR $ \hash -> (hash,) . pure <$> deserializeEdits hash
|
||||
-- pure $ branch0 _termsR _typesR children edits
|
||||
-- go = cachedRead cache deserializeRaw deserializeEdits
|
||||
-- d :: Causal.Deserialize m Raw (Branch0 m)
|
||||
-- d h = deserializeRaw h >>= \case
|
||||
-- RawOne raw -> RawOne <$> fromRaw raw
|
||||
-- RawCons raw h -> flip RawCons h <$> fromRaw raw
|
||||
-- RawMerge raw hs -> flip RawMerge hs <$> fromRaw raw
|
||||
|
||||
-- sync
|
||||
-- :: Monad m
|
||||
-- => (Hash -> m Bool)
|
||||
-- -> Causal.Serialize m Raw Raw
|
||||
-- -> (EditHash -> m Patch -> m ())
|
||||
-- -> Branch m
|
||||
-- -> m ()
|
||||
-- sync exists serializeRaw serializeEdits b = do
|
||||
-- _written <- State.execStateT (sync' exists serializeRaw serializeEdits b) mempty
|
||||
-- -- traceM $ "Branch.sync wrote " <> show (Set.size written) <> " namespace files."
|
||||
-- pure ()
|
||||
|
||||
-- -- serialize a `Branch m` indexed by the hash of its corresponding Raw
|
||||
-- sync'
|
||||
-- :: forall m
|
||||
-- . Monad m
|
||||
-- => (Hash -> m Bool)
|
||||
-- -> Causal.Serialize m Raw Raw
|
||||
-- -> (EditHash -> m Patch -> m ())
|
||||
-- -> Branch m
|
||||
-- -> StateT (Set Hash) m ()
|
||||
-- sync' exists serializeRaw serializeEdits b = Causal.sync exists
|
||||
-- serialize0
|
||||
-- (view history b)
|
||||
-- where
|
||||
-- serialize0 :: Causal.Serialize (StateT (Set Hash) m) Raw (Branch0 m)
|
||||
-- serialize0 h b0 = case b0 of
|
||||
-- RawOne b0 -> do
|
||||
-- writeB0 b0
|
||||
-- lift $ serializeRaw h $ RawOne (toRaw b0)
|
||||
-- RawCons b0 ht -> do
|
||||
-- writeB0 b0
|
||||
-- lift $ serializeRaw h $ RawCons (toRaw b0) ht
|
||||
-- RawMerge b0 hs -> do
|
||||
-- writeB0 b0
|
||||
-- lift $ serializeRaw h $ RawMerge (toRaw b0) hs
|
||||
-- where
|
||||
-- writeB0 :: Branch0 m -> StateT (Set Hash) m ()
|
||||
-- writeB0 b0 = do
|
||||
-- for_ (view children b0) $ \c -> do
|
||||
-- queued <- State.get
|
||||
-- when (Set.notMember (headHash c) queued) $
|
||||
-- sync' exists serializeRaw serializeEdits c
|
||||
-- for_ (view edits b0) (lift . uncurry serializeEdits)
|
||||
|
||||
-- -- this has to serialize the branch0 and its descendants in the tree,
|
||||
-- -- and then serialize the rest of the history of the branch as well
|
||||
|
||||
-- toRaw :: Branch0 m -> Raw
|
||||
-- toRaw Branch0 {..} =
|
||||
-- Raw _terms _types (headHash <$> _children) (fst <$> _edits)
|
||||
|
||||
-- toCausalRaw :: Branch m -> Causal.Raw Raw Raw
|
||||
-- toCausalRaw = \case
|
||||
-- Branch (Causal.One _h e) -> RawOne (toRaw e)
|
||||
-- Branch (Causal.Cons _h e (ht, _m)) -> RawCons (toRaw e) ht
|
||||
-- Branch (Causal.Merge _h e tls) -> RawMerge (toRaw e) (Map.keysSet tls)
|
||||
|
||||
-- -- copy a path to another path
|
||||
-- fork
|
||||
-- :: Applicative m
|
||||
-- => Path
|
||||
-- -> Path
|
||||
-- -> Branch m
|
||||
-- -> Either ForkFailure (Branch m)
|
||||
-- fork src dest root = case getAt src root of
|
||||
-- Nothing -> Left SrcNotFound
|
||||
-- Just src' -> case setIfNotExists dest src' root of
|
||||
-- Nothing -> Left DestExists
|
||||
-- Just root' -> Right root'
|
||||
|
||||
-- -- Move the node at src to dest.
|
||||
-- -- It's okay if `dest` is inside `src`, just create empty levels.
|
||||
-- -- Try not to `step` more than once at each node.
|
||||
-- move :: Applicative m
|
||||
-- => Path
|
||||
-- -> Path
|
||||
-- -> Branch m
|
||||
-- -> Either ForkFailure (Branch m)
|
||||
-- move src dest root = case getAt src root of
|
||||
-- Nothing -> Left SrcNotFound
|
||||
-- Just src' ->
|
||||
-- -- make sure dest doesn't already exist
|
||||
-- case getAt dest root of
|
||||
-- Just _destExists -> Left DestExists
|
||||
-- Nothing ->
|
||||
-- -- find and update common ancestor of `src` and `dest`:
|
||||
-- Right $ modifyAt ancestor go root
|
||||
-- where
|
||||
-- (ancestor, relSrc, relDest) = Path.relativeToAncestor src dest
|
||||
-- go = deleteAt relSrc . setAt relDest src'
|
||||
|
||||
-- setIfNotExists
|
||||
-- :: Applicative m => Path -> Branch m -> Branch m -> Maybe (Branch m)
|
||||
-- setIfNotExists dest b root = case getAt dest root of
|
||||
-- Just _destExists -> Nothing
|
||||
-- Nothing -> Just $ setAt dest b root
|
||||
|
||||
-- setAt :: Applicative m => Path -> Branch m -> Branch m -> Branch m
|
||||
-- setAt path b = modifyAt path (const b)
|
||||
|
||||
-- deleteAt :: Applicative m => Path -> Branch m -> Branch m
|
||||
-- deleteAt path = setAt path empty
|
||||
|
||||
-- -- returns `Nothing` if no Branch at `path` or if Branch is empty at `path`
|
||||
-- getAt :: Path
|
||||
-- -> Branch m
|
||||
-- -> Maybe (Branch m)
|
||||
-- getAt path root = case Path.uncons path of
|
||||
-- Nothing -> if isEmpty root then Nothing else Just root
|
||||
-- Just (seg, path) -> case Map.lookup seg (_children $ head root) of
|
||||
-- Just b -> getAt path b
|
||||
-- Nothing -> Nothing
|
||||
|
||||
-- getAt' :: Path -> Branch m -> Branch m
|
||||
-- getAt' p b = fromMaybe empty $ getAt p b
|
||||
|
||||
-- getAt0 :: Path -> Branch0 m -> Branch0 m
|
||||
-- getAt0 p b = case Path.uncons p of
|
||||
-- Nothing -> b
|
||||
-- Just (seg, path) -> case Map.lookup seg (_children b) of
|
||||
-- Just c -> getAt0 path (head c)
|
||||
-- Nothing -> empty0
|
||||
|
||||
-- empty :: Branch m
|
||||
-- empty = Branch $ Causal.one empty0
|
||||
|
||||
-- one :: Branch0 m -> Branch m
|
||||
-- one = Branch . Causal.one
|
||||
|
||||
-- empty0 :: Branch0 m
|
||||
-- empty0 =
|
||||
-- Branch0 mempty mempty mempty mempty mempty mempty mempty mempty mempty mempty
|
||||
|
||||
-- isEmpty0 :: Branch0 m -> Bool
|
||||
-- isEmpty0 = (== empty0)
|
||||
|
||||
-- isEmpty :: Branch m -> Bool
|
||||
-- isEmpty = (== empty)
|
||||
|
||||
-- step :: Applicative m => (Branch0 m -> Branch0 m) -> Branch m -> Branch m
|
||||
-- step f = \case
|
||||
-- Branch (Causal.One _h e) | e == empty0 -> Branch (Causal.one (f empty0))
|
||||
-- b -> over history (Causal.stepDistinct f) b
|
||||
|
||||
-- stepM :: (Monad m, Monad n) => (Branch0 m -> n (Branch0 m)) -> Branch m -> n (Branch m)
|
||||
-- stepM f = \case
|
||||
-- Branch (Causal.One _h e) | e == empty0 -> Branch . Causal.one <$> f empty0
|
||||
-- b -> mapMOf history (Causal.stepDistinctM f) b
|
||||
|
||||
-- cons :: Applicative m => Branch0 m -> Branch m -> Branch m
|
||||
-- cons = step . const
|
||||
|
||||
-- isOne :: Branch m -> Bool
|
||||
-- isOne (Branch Causal.One{}) = True
|
||||
-- isOne _ = False
|
||||
|
||||
-- uncons :: Applicative m => Branch m -> m (Maybe (Branch0 m, Branch m))
|
||||
-- uncons (Branch b) = go <$> Causal.uncons b where
|
||||
-- go = over (_Just . _2) Branch
|
||||
|
||||
-- -- Modify the branch0 at the head of at `path` with `f`,
|
||||
-- -- after creating it if necessary. Preserves history.
|
||||
-- stepAt :: forall m. Applicative m
|
||||
-- => Path
|
||||
-- -> (Branch0 m -> Branch0 m)
|
||||
-- -> Branch m -> Branch m
|
||||
-- stepAt p f = modifyAt p g where
|
||||
-- g :: Branch m -> Branch m
|
||||
-- g (Branch b) = Branch . Causal.consDistinct (f (Causal.head b)) $ b
|
||||
|
||||
-- stepManyAt :: (Monad m, Foldable f)
|
||||
-- => f (Path, Branch0 m -> Branch0 m) -> Branch m -> Branch m
|
||||
-- stepManyAt actions = step (stepManyAt0 actions)
|
||||
|
||||
-- -- Modify the branch0 at the head of at `path` with `f`,
|
||||
-- -- after creating it if necessary. Preserves history.
|
||||
-- stepAtM :: forall n m. (Functor n, Applicative m)
|
||||
-- => Path -> (Branch0 m -> n (Branch0 m)) -> Branch m -> n (Branch m)
|
||||
-- stepAtM p f = modifyAtM p g where
|
||||
-- g :: Branch m -> n (Branch m)
|
||||
-- g (Branch b) = do
|
||||
-- b0' <- f (Causal.head b)
|
||||
-- pure $ Branch . Causal.consDistinct b0' $ b
|
||||
|
||||
-- stepManyAtM :: (Monad m, Monad n, Foldable f)
|
||||
-- => f (Path, Branch0 m -> n (Branch0 m)) -> Branch m -> n (Branch m)
|
||||
-- stepManyAtM actions = stepM (stepManyAt0M actions)
|
||||
|
||||
-- -- starting at the leaves, apply `f` to every level of the branch.
|
||||
-- stepEverywhere
|
||||
-- :: Applicative m => (Branch0 m -> Branch0 m) -> (Branch0 m -> Branch0 m)
|
||||
-- stepEverywhere f Branch0 {..} = f (branch0 _terms _types children _edits)
|
||||
-- where children = fmap (step $ stepEverywhere f) _children
|
||||
|
||||
-- -- Creates a function to fix up the children field._1
|
||||
-- -- If the action emptied a child, then remove the mapping,
|
||||
-- -- otherwise update it.
|
||||
-- -- Todo: Fix this in hashing & serialization instead of here?
|
||||
-- getChildBranch :: NameSegment -> Branch0 m -> Branch m
|
||||
-- getChildBranch seg b = fromMaybe empty $ Map.lookup seg (_children b)
|
||||
|
||||
-- setChildBranch :: NameSegment -> Branch m -> Branch0 m -> Branch0 m
|
||||
-- setChildBranch seg b = over children (updateChildren seg b)
|
||||
|
||||
-- getPatch :: Applicative m => NameSegment -> Branch0 m -> m Patch
|
||||
-- getPatch seg b = case Map.lookup seg (_edits b) of
|
||||
-- Nothing -> pure Patch.empty
|
||||
-- Just (_, p) -> p
|
||||
|
||||
-- getMaybePatch :: Applicative m => NameSegment -> Branch0 m -> m (Maybe Patch)
|
||||
-- getMaybePatch seg b = case Map.lookup seg (_edits b) of
|
||||
-- Nothing -> pure Nothing
|
||||
-- Just (_, p) -> Just <$> p
|
||||
|
||||
-- modifyPatches
|
||||
-- :: Monad m => NameSegment -> (Patch -> Patch) -> Branch0 m -> m (Branch0 m)
|
||||
-- modifyPatches seg f = mapMOf edits update
|
||||
-- where
|
||||
-- update m = do
|
||||
-- p' <- case Map.lookup seg m of
|
||||
-- Nothing -> pure $ f Patch.empty
|
||||
-- Just (_, p) -> f <$> p
|
||||
-- let h = H.accumulate' p'
|
||||
-- pure $ Map.insert seg (h, pure p') m
|
||||
|
||||
-- replacePatch :: Applicative m => NameSegment -> Patch -> Branch0 m -> Branch0 m
|
||||
-- replacePatch n p = over edits (Map.insert n (H.accumulate' p, pure p))
|
||||
|
||||
-- deletePatch :: NameSegment -> Branch0 m -> Branch0 m
|
||||
-- deletePatch n = over edits (Map.delete n)
|
||||
|
||||
-- updateChildren ::NameSegment
|
||||
-- -> Branch m
|
||||
-- -> Map NameSegment (Branch m)
|
||||
-- -> Map NameSegment (Branch m)
|
||||
-- updateChildren seg updatedChild =
|
||||
-- if isEmpty updatedChild
|
||||
-- then Map.delete seg
|
||||
-- else Map.insert seg updatedChild
|
||||
|
||||
-- -- Modify the Branch at `path` with `f`, after creating it if necessary.
|
||||
-- -- Because it's a `Branch`, it overwrites the history at `path`.
|
||||
-- modifyAt :: Applicative m
|
||||
-- => Path -> (Branch m -> Branch m) -> Branch m -> Branch m
|
||||
-- modifyAt path f = runIdentity . modifyAtM path (pure . f)
|
||||
|
||||
-- -- Modify the Branch at `path` with `f`, after creating it if necessary.
|
||||
-- -- Because it's a `Branch`, it overwrites the history at `path`.
|
||||
-- modifyAtM
|
||||
-- :: forall n m
|
||||
-- . Functor n
|
||||
-- => Applicative m -- because `Causal.cons` uses `pure`
|
||||
-- => Path
|
||||
-- -> (Branch m -> n (Branch m))
|
||||
-- -> Branch m
|
||||
-- -> n (Branch m)
|
||||
-- modifyAtM path f b = case Path.uncons path of
|
||||
-- Nothing -> f b
|
||||
-- Just (seg, path) -> do -- Functor
|
||||
-- let child = getChildBranch seg (head b)
|
||||
-- child' <- modifyAtM path f child
|
||||
-- -- step the branch by updating its children according to fixup
|
||||
-- pure $ step (setChildBranch seg child') b
|
||||
|
||||
-- -- stepManyAt0 consolidates several changes into a single step
|
||||
-- stepManyAt0 :: forall f m . (Monad m, Foldable f)
|
||||
-- => f (Path, Branch0 m -> Branch0 m)
|
||||
-- -> Branch0 m -> Branch0 m
|
||||
-- stepManyAt0 actions =
|
||||
-- runIdentity . stepManyAt0M [ (p, pure . f) | (p,f) <- toList actions ]
|
||||
|
||||
-- stepManyAt0M :: forall m n f . (Monad m, Monad n, Foldable f)
|
||||
-- => f (Path, Branch0 m -> n (Branch0 m))
|
||||
-- -> Branch0 m -> n (Branch0 m)
|
||||
-- stepManyAt0M actions b = go (toList actions) b where
|
||||
-- go :: [(Path, Branch0 m -> n (Branch0 m))] -> Branch0 m -> n (Branch0 m)
|
||||
-- go actions b = let
|
||||
-- -- combines the functions that apply to this level of the tree
|
||||
-- currentAction b = foldM (\b f -> f b) b [ f | (Path.Empty, f) <- actions ]
|
||||
|
||||
-- -- groups the actions based on the child they apply to
|
||||
-- childActions :: Map NameSegment [(Path, Branch0 m -> n (Branch0 m))]
|
||||
-- childActions =
|
||||
-- List.multimap [ (seg, (rest,f)) | (seg :< rest, f) <- actions ]
|
||||
|
||||
-- -- alters the children of `b` based on the `childActions` map
|
||||
-- stepChildren :: Map NameSegment (Branch m) -> n (Map NameSegment (Branch m))
|
||||
-- stepChildren children0 = foldM g children0 $ Map.toList childActions
|
||||
-- where
|
||||
-- g children (seg, actions) = do
|
||||
-- -- Recursively applies the relevant actions to the child branch
|
||||
-- -- The `findWithDefault` is important - it allows the stepManyAt
|
||||
-- -- to create new children at paths that don't previously exist.
|
||||
-- child <- stepM (go actions) (Map.findWithDefault empty seg children0)
|
||||
-- pure $ updateChildren seg child children
|
||||
-- in do
|
||||
-- c2 <- stepChildren (view children b)
|
||||
-- currentAction (set children c2 b)
|
||||
|
||||
-- instance Hashable (Branch0 m) where
|
||||
-- tokens b =
|
||||
-- [ H.accumulateToken (_terms b)
|
||||
-- , H.accumulateToken (_types b)
|
||||
-- , H.accumulateToken (headHash <$> _children b)
|
||||
-- , H.accumulateToken (fst <$> _edits b)
|
||||
-- ]
|
||||
|
||||
-- -- getLocalBranch :: Hash -> IO Branch
|
||||
-- -- getGithubBranch :: RemotePath -> IO Branch
|
||||
-- -- getLocalEdit :: GUID -> IO Patch
|
||||
|
||||
-- -- todo: consider inlining these into Actions2
|
||||
-- addTermName
|
||||
-- :: Referent -> NameSegment -> Metadata.Metadata -> Branch0 m -> Branch0 m
|
||||
-- addTermName r new md =
|
||||
-- over terms (Metadata.insertWithMetadata (r, md) . Star3.insertD1 (r, new))
|
||||
|
||||
-- addTypeName
|
||||
-- :: Reference -> NameSegment -> Metadata.Metadata -> Branch0 m -> Branch0 m
|
||||
-- addTypeName r new md =
|
||||
-- over types (Metadata.insertWithMetadata (r, md) . Star3.insertD1 (r, new))
|
||||
|
||||
-- -- addTermNameAt :: Path.Split -> Referent -> Branch0 m -> Branch0 m
|
||||
-- -- addTypeNameAt :: Path.Split -> Reference -> Branch0 m -> Branch0 m
|
||||
|
||||
-- deleteTermName :: Referent -> NameSegment -> Branch0 m -> Branch0 m
|
||||
-- deleteTermName r n b | Star3.memberD1 (r,n) (view terms b)
|
||||
-- = over terms (Star3.deletePrimaryD1 (r,n)) b
|
||||
-- deleteTermName _ _ b = b
|
||||
|
||||
-- deleteTypeName :: Reference -> NameSegment -> Branch0 m -> Branch0 m
|
||||
-- deleteTypeName r n b | Star3.memberD1 (r,n) (view types b)
|
||||
-- = over types (Star3.deletePrimaryD1 (r,n)) b
|
||||
-- deleteTypeName _ _ b = b
|
||||
|
||||
namesDiff :: Branch m -> Branch m -> Names.Diff
|
||||
namesDiff b1 b2 = Names.diff0 (toNames0 (head b1)) (toNames0 (head b2))
|
||||
|
||||
-- lca :: Monad m => Branch m -> Branch m -> m (Maybe (Branch m))
|
||||
-- lca (Branch a) (Branch b) = fmap Branch <$> Causal.lca a b
|
||||
|
||||
-- diff0 :: Monad m => Branch0 m -> Branch0 m -> m BranchDiff
|
||||
-- diff0 old new = do
|
||||
-- newEdits <- sequenceA $ snd <$> _edits new
|
||||
-- oldEdits <- sequenceA $ snd <$> _edits old
|
||||
-- let diffEdits = Map.merge (Map.mapMissing $ \_ p -> Patch.diff p mempty)
|
||||
-- (Map.mapMissing $ \_ p -> Patch.diff mempty p)
|
||||
-- (Map.zipWithMatched (const Patch.diff))
|
||||
-- newEdits
|
||||
-- oldEdits
|
||||
-- pure $ BranchDiff
|
||||
-- { addedTerms = Star3.difference (_terms new) (_terms old)
|
||||
-- , removedTerms = Star3.difference (_terms old) (_terms new)
|
||||
-- , addedTypes = Star3.difference (_types new) (_types old)
|
||||
-- , removedTypes = Star3.difference (_types old) (_types new)
|
||||
-- , changedPatches = diffEdits
|
||||
-- }
|
||||
|
||||
-- transform :: Functor m => (forall a . m a -> n a) -> Branch m -> Branch n
|
||||
-- transform f b = case _history b of
|
||||
-- causal -> Branch . Causal.transform f $ transformB0s f causal
|
||||
-- where
|
||||
-- transformB0 :: Functor m => (forall a . m a -> n a) -> Branch0 m -> Branch0 n
|
||||
-- transformB0 f b =
|
||||
-- b { _children = transform f <$> _children b
|
||||
-- , _edits = second f <$> _edits b
|
||||
-- }
|
||||
|
||||
-- transformB0s :: Functor m => (forall a . m a -> n a)
|
||||
-- -> Causal m Raw (Branch0 m)
|
||||
-- -> Causal m Raw (Branch0 n)
|
||||
-- transformB0s f = Causal.unsafeMapHashPreserving (transformB0 f)
|
||||
|
||||
-- data BranchAttentions = BranchAttentions
|
||||
-- { -- Patches that were edited on the right but entirely removed on the left.
|
||||
-- removedPatchEdited :: [Name]
|
||||
-- -- Patches that were edited on the left but entirely removed on the right.
|
||||
-- , editedPatchRemoved :: [Name]
|
||||
-- }
|
||||
|
||||
-- instance Semigroup BranchAttentions where
|
||||
-- BranchAttentions edited1 removed1 <> BranchAttentions edited2 removed2
|
||||
-- = BranchAttentions (edited1 <> edited2) (removed1 <> removed2)
|
||||
|
||||
-- instance Monoid BranchAttentions where
|
||||
-- mempty = BranchAttentions [] []
|
||||
-- mappend = (<>)
|
||||
|
||||
-- data RefCollisions =
|
||||
-- RefCollisions { termCollisions :: Relation Name Name
|
||||
-- , typeCollisions :: Relation Name Name
|
||||
-- } deriving (Eq, Show)
|
||||
|
||||
-- instance Semigroup RefCollisions where
|
||||
-- (<>) = mappend
|
||||
-- instance Monoid RefCollisions where
|
||||
-- mempty = RefCollisions mempty mempty
|
||||
-- mappend r1 r2 = RefCollisions (termCollisions r1 <> termCollisions r2)
|
||||
-- (typeCollisions r1 <> typeCollisions r2)
|
13
parser-typechecker/src/Unison/Codebase/BuiltinAnnotation.hs
Normal file
13
parser-typechecker/src/Unison/Codebase/BuiltinAnnotation.hs
Normal file
@ -0,0 +1,13 @@
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
|
||||
module Unison.Codebase.BuiltinAnnotation (BuiltinAnnotation (..)) where
|
||||
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import qualified Unison.Parser.Ann as Ann
|
||||
|
||||
class BuiltinAnnotation a where
|
||||
builtinAnnotation :: a
|
||||
|
||||
instance BuiltinAnnotation Ann where
|
||||
builtinAnnotation = Ann.Intrinsic
|
@ -1,7 +1,32 @@
|
||||
{-# LANGUAGE RankNTypes #-}
|
||||
{-# LANGUAGE DeriveGeneric #-}
|
||||
|
||||
module Unison.Codebase.Causal where
|
||||
module Unison.Codebase.Causal
|
||||
( Causal (..),
|
||||
Raw (..),
|
||||
RawHash (..),
|
||||
one,
|
||||
cons,
|
||||
cons',
|
||||
consDistinct,
|
||||
uncons,
|
||||
hash,
|
||||
children,
|
||||
Deserialize,
|
||||
Serialize,
|
||||
cachedRead,
|
||||
threeWayMerge,
|
||||
threeWayMerge',
|
||||
squashMerge',
|
||||
lca,
|
||||
stepDistinct,
|
||||
stepDistinctM,
|
||||
sync,
|
||||
transform,
|
||||
unsafeMapHashPreserving,
|
||||
before,
|
||||
)
|
||||
where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
@ -65,27 +90,6 @@ data Causal m h e
|
||||
, tails :: Map (RawHash h) (m (Causal m h e))
|
||||
}
|
||||
|
||||
-- Convert the Causal to an adjacency matrix for debugging purposes.
|
||||
toGraph
|
||||
:: Monad m
|
||||
=> Set (RawHash h)
|
||||
-> Causal m h e
|
||||
-> m (Seq (RawHash h, RawHash h))
|
||||
toGraph seen c = case c of
|
||||
One _ _ -> pure Seq.empty
|
||||
Cons h1 _ (h2, m) -> if Set.notMember h1 seen
|
||||
then do
|
||||
tail <- m
|
||||
g <- toGraph (Set.insert h1 seen) tail
|
||||
pure $ (h1, h2) Seq.<| g
|
||||
else pure Seq.empty
|
||||
Merge h _ ts -> if Set.notMember h seen
|
||||
then do
|
||||
tails <- sequence $ Map.elems ts
|
||||
gs <- Seq.fromList <$> traverse (toGraph (Set.insert h seen)) tails
|
||||
pure $ Seq.fromList ((h, ) <$> Set.toList (Map.keysSet ts)) <> join gs
|
||||
else pure Seq.empty
|
||||
|
||||
-- A serializer `Causal m h e`. Nonrecursive -- only responsible for
|
||||
-- writing a single node of the causal structure.
|
||||
data Raw h e
|
||||
@ -93,17 +97,6 @@ data Raw h e
|
||||
| RawCons e (RawHash h)
|
||||
| RawMerge e (Set (RawHash h))
|
||||
|
||||
rawHead :: Raw h e -> e
|
||||
rawHead (RawOne e ) = e
|
||||
rawHead (RawCons e _) = e
|
||||
rawHead (RawMerge e _) = e
|
||||
|
||||
-- Don't need to deserialize the `e` to calculate `before`.
|
||||
data Tails h
|
||||
= TailsOne
|
||||
| TailsCons (RawHash h)
|
||||
| TailsMerge (Set (RawHash h))
|
||||
|
||||
type Deserialize m h e = RawHash h -> m (Raw h e)
|
||||
|
||||
cachedRead :: MonadIO m
|
||||
@ -263,37 +256,15 @@ threeWayMerge' lca combine c1 c2 = do
|
||||
done newHead =
|
||||
Merge (RawHash (hash (newHead, Map.keys children))) newHead children
|
||||
|
||||
before' :: Monad m
|
||||
=> (Causal m h e -> Causal m h e -> m (Maybe (Causal m h e)))
|
||||
-> Causal m h e
|
||||
-> Causal m h e
|
||||
-> m Bool
|
||||
before' lca a b = (== Just a) <$> lca a b
|
||||
|
||||
before :: Monad m => Causal m h e -> Causal m h e -> m Bool
|
||||
before a b = (== Just a) <$> lca a b
|
||||
|
||||
hash :: Hashable e => e -> Hash
|
||||
hash = Hashable.accumulate'
|
||||
|
||||
step :: (Applicative m, Hashable e) => (e -> e) -> Causal m h e -> Causal m h e
|
||||
step f c = f (head c) `cons` c
|
||||
|
||||
stepDistinct :: (Applicative m, Eq e, Hashable e) => (e -> e) -> Causal m h e -> Causal m h e
|
||||
stepDistinct f c = f (head c) `consDistinct` c
|
||||
|
||||
stepIf
|
||||
:: (Applicative m, Hashable e)
|
||||
=> (e -> Bool)
|
||||
-> (e -> e)
|
||||
-> Causal m h e
|
||||
-> Causal m h e
|
||||
stepIf cond f c = if cond (head c) then step f c else c
|
||||
|
||||
stepM
|
||||
:: (Applicative m, Hashable e) => (e -> m e) -> Causal m h e -> m (Causal m h e)
|
||||
stepM f c = (`cons` c) <$> f (head c)
|
||||
|
||||
stepDistinctM
|
||||
:: (Applicative m, Functor n, Eq e, Hashable e)
|
||||
=> (e -> n e) -> Causal m h e -> n (Causal m h e)
|
||||
@ -331,55 +302,3 @@ unsafeMapHashPreserving f c = case c of
|
||||
Merge h e tls -> Merge h (f e) $ Map.map (fmap $ unsafeMapHashPreserving f) tls
|
||||
|
||||
data FoldHistoryResult a = Satisfied a | Unsatisfied a deriving (Eq,Ord,Show)
|
||||
|
||||
-- foldHistoryUntil some condition on the accumulator is met,
|
||||
-- attempting to work backwards fairly through merge nodes
|
||||
-- (rather than following one back all the way to its root before working
|
||||
-- through others). Returns Unsatisfied if the condition was never satisfied,
|
||||
-- otherwise Satisfied.
|
||||
--
|
||||
-- NOTE by RÓB: this short-circuits immediately and only looks at the first
|
||||
-- entry in the history, since this operation is far too slow to be practical.
|
||||
foldHistoryUntil
|
||||
:: forall m h e a
|
||||
. (Monad m)
|
||||
=> (a -> e -> (a, Bool))
|
||||
-> a
|
||||
-> Causal m h e
|
||||
-> m (FoldHistoryResult a)
|
||||
foldHistoryUntil f a c = step a mempty (pure c) where
|
||||
step :: a -> Set (RawHash h) -> Seq (Causal m h e) -> m (FoldHistoryResult a)
|
||||
step a _seen Seq.Empty = pure (Unsatisfied a)
|
||||
step a seen (c Seq.:<| rest) | currentHash c `Set.member` seen =
|
||||
step a seen rest
|
||||
step a seen (c Seq.:<| rest) = case f a (head c) of
|
||||
(a, True ) -> pure (Satisfied a)
|
||||
(a, False) -> do
|
||||
tails <- case c of
|
||||
One{} -> pure mempty
|
||||
Cons{} ->
|
||||
let (_, t) = tail c
|
||||
in --if h `Set.member` seen
|
||||
if not (Set.null seen) then pure mempty else Seq.singleton <$> t
|
||||
Merge{} ->
|
||||
fmap Seq.fromList
|
||||
. traverse snd
|
||||
. filter (\(_, _) -> not (Set.null seen))
|
||||
. Map.toList
|
||||
$ tails c
|
||||
step a (Set.insert (currentHash c) seen) (rest <> tails)
|
||||
|
||||
hashToRaw ::
|
||||
forall m h e. Monad m => Causal m h e -> m (Map (RawHash h) [RawHash h])
|
||||
hashToRaw c = go mempty [c] where
|
||||
go :: Map (RawHash h) [RawHash h] -> [Causal m h e]
|
||||
-> m (Map (RawHash h) [RawHash h])
|
||||
go output [] = pure output
|
||||
go output (c : queue) = case c of
|
||||
One h _ -> go (Map.insert h [] output) queue
|
||||
Cons h _ (htail, mctail) -> do
|
||||
ctail <- mctail
|
||||
go (Map.insert h [htail] output) (ctail : queue)
|
||||
Merge h _ mtails -> do
|
||||
tails <- sequence mtails
|
||||
go (Map.insert h (Map.keys tails) output) (toList tails ++ queue)
|
||||
|
51
parser-typechecker/src/Unison/Codebase/Causal/FoldHistory.hs
Normal file
51
parser-typechecker/src/Unison/Codebase/Causal/FoldHistory.hs
Normal file
@ -0,0 +1,51 @@
|
||||
{-# LANGUAGE RankNTypes #-}
|
||||
{-# LANGUAGE DeriveGeneric #-}
|
||||
|
||||
module Unison.Codebase.Causal.FoldHistory (FoldHistoryResult (..), foldHistoryUntil) where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import Unison.Codebase.Causal ( Causal(..), RawHash )
|
||||
import Prelude hiding (tail, head)
|
||||
import qualified Data.Sequence as Seq
|
||||
import qualified Data.Set as Set
|
||||
import qualified Data.Map as Map
|
||||
|
||||
data FoldHistoryResult a = Satisfied a | Unsatisfied a deriving (Eq,Ord,Show)
|
||||
|
||||
-- foldHistoryUntil some condition on the accumulator is met,
|
||||
-- attempting to work backwards fairly through merge nodes
|
||||
-- (rather than following one back all the way to its root before working
|
||||
-- through others). Returns Unsatisfied if the condition was never satisfied,
|
||||
-- otherwise Satisfied.
|
||||
--
|
||||
-- NOTE by RÓB: this short-circuits immediately and only looks at the first
|
||||
-- entry in the history, since this operation is far too slow to be practical.
|
||||
foldHistoryUntil
|
||||
:: forall m h e a
|
||||
. (Monad m)
|
||||
=> (a -> e -> (a, Bool))
|
||||
-> a
|
||||
-> Causal m h e
|
||||
-> m (FoldHistoryResult a)
|
||||
foldHistoryUntil f a c = step a mempty (pure c) where
|
||||
step :: a -> Set (RawHash h) -> Seq (Causal m h e) -> m (FoldHistoryResult a)
|
||||
step a _seen Seq.Empty = pure (Unsatisfied a)
|
||||
step a seen (c Seq.:<| rest) | currentHash c `Set.member` seen =
|
||||
step a seen rest
|
||||
step a seen (c Seq.:<| rest) = case f a (head c) of
|
||||
(a, True ) -> pure (Satisfied a)
|
||||
(a, False) -> do
|
||||
tails <- case c of
|
||||
One{} -> pure mempty
|
||||
Cons{} ->
|
||||
let (_, t) = tail c
|
||||
in --if h `Set.member` seen
|
||||
if not (Set.null seen) then pure mempty else Seq.singleton <$> t
|
||||
Merge{} ->
|
||||
fmap Seq.fromList
|
||||
. traverse snd
|
||||
. filter (\(_, _) -> not (Set.null seen))
|
||||
. Map.toList
|
||||
$ tails c
|
||||
step a (Set.insert (currentHash c) seen) (rest <> tails)
|
@ -1,40 +0,0 @@
|
||||
|
||||
module Unison.Codebase.Classes where
|
||||
-- ( GetDecls(..)
|
||||
-- , PutDecls(..)
|
||||
-- , GetBranch(..)
|
||||
-- , PutBranch(..)
|
||||
-- , GetDependents(..)
|
||||
-- ) where
|
||||
--
|
||||
--import Data.Set ( Set )
|
||||
--import Unison.Codebase.Branch ( Branch )
|
||||
--import Unison.DataDeclaration ( Decl )
|
||||
--import qualified Unison.Reference as Reference
|
||||
--import Unison.Reference ( Reference )
|
||||
--import qualified Unison.Term as Term
|
||||
--import qualified Unison.Type as Type
|
||||
--import qualified Unison.Typechecker.TypeLookup as TL
|
||||
--
|
||||
--type Term v a = Term.AnnotatedTerm v a
|
||||
--type Type v a = Type.AnnotatedType v a
|
||||
--
|
||||
--class GetDecls d m v a | d -> m v a where
|
||||
-- getTerm :: d -> Reference.Id -> m (Maybe (Term v a))
|
||||
-- getTypeOfTerm :: d -> Reference -> m (Maybe (Type v a))
|
||||
-- getTypeDeclaration :: d -> Reference.Id -> m (Maybe (Decl v a))
|
||||
-- hasTerm :: d -> Reference.Id -> m Bool
|
||||
-- hasType :: d -> Reference.Id -> m Bool
|
||||
--
|
||||
--class PutDecls d m v a | d -> m v a where
|
||||
-- putTerm :: d -> Reference.Id -> Term v a -> Type v a -> m ()
|
||||
-- putTypeDeclarationImpl :: d -> Reference.Id -> Decl v a -> m ()
|
||||
--
|
||||
--class GetBranch b m | b -> m where
|
||||
-- getRootBranch :: b -> m (Branch m)
|
||||
--
|
||||
--class PutBranch b m | b -> m where
|
||||
-- putRootBranch :: b -> Branch m -> m ()
|
||||
--
|
||||
--class GetDependents d m | d -> m where
|
||||
-- dependentsImpl :: d -> Reference -> m (Set Reference.Id)
|
@ -4,28 +4,31 @@ import Unison.Prelude
|
||||
|
||||
import Control.Monad.Morph
|
||||
import qualified Data.Map as Map
|
||||
import Unison.UnisonFile ( UnisonFile )
|
||||
import qualified Unison.UnisonFile as UF
|
||||
-- import Unison.UnisonFile ( UnisonFile )
|
||||
-- import qualified Unison.UnisonFile as UF
|
||||
import qualified Unison.Term as Term
|
||||
import Unison.Term ( Term )
|
||||
import Unison.Var ( Var )
|
||||
import qualified Unison.Reference as Reference
|
||||
import Unison.DataDeclaration (Decl)
|
||||
import qualified Data.Set as Set
|
||||
import qualified Unison.Util.Set as Set
|
||||
import qualified Unison.DataDeclaration as DD
|
||||
|
||||
fromUnisonFile :: (Var v, Monad m) => UnisonFile v a -> CodeLookup v m a
|
||||
fromUnisonFile uf = CodeLookup tm ty where
|
||||
tm id = pure $ Map.lookup id termMap
|
||||
ty id = pure $ Map.lookup id typeMap1 <|> Map.lookup id typeMap2
|
||||
typeMap1 = Map.fromList [ (id, Right dd) |
|
||||
(_, (Reference.DerivedId id, dd)) <-
|
||||
Map.toList (UF.dataDeclarations uf) ]
|
||||
typeMap2 = Map.fromList [ (id, Left ad) |
|
||||
(_, (Reference.DerivedId id, ad)) <-
|
||||
Map.toList (UF.effectDeclarations uf) ]
|
||||
tmm = Map.fromList (UF.terms uf)
|
||||
termMap = Map.fromList [ (id, e) |
|
||||
(_, (id, e)) <-
|
||||
Map.toList (Term.hashComponents tmm) ]
|
||||
-- fromUnisonFile :: (Var v, Monad m) => UnisonFile v a -> CodeLookup v m a
|
||||
-- fromUnisonFile uf = CodeLookup tm ty where
|
||||
-- tm id = pure $ Map.lookup id termMap
|
||||
-- ty id = pure $ Map.lookup id typeMap1 <|> Map.lookup id typeMap2
|
||||
-- typeMap1 = Map.fromList [ (id, Right dd) |
|
||||
-- (_, (Reference.DerivedId id, dd)) <-
|
||||
-- Map.toList (UF.dataDeclarations uf) ]
|
||||
-- typeMap2 = Map.fromList [ (id, Left ad) |
|
||||
-- (_, (Reference.DerivedId id, ad)) <-
|
||||
-- Map.toList (UF.effectDeclarations uf) ]
|
||||
-- tmm = Map.fromList (UF.terms uf)
|
||||
-- termMap = Map.fromList [ (id, e) |
|
||||
-- (_, (id, e)) <-
|
||||
-- Map.toList (Term.hashComponents tmm) ]
|
||||
|
||||
data CodeLookup v m a
|
||||
= CodeLookup {
|
||||
@ -55,3 +58,29 @@ instance Monad m => Monoid (CodeLookup v m a) where
|
||||
ty id = do
|
||||
o <- getTypeDeclaration c1 id
|
||||
case o of Nothing -> getTypeDeclaration c2 id; Just _ -> pure o
|
||||
|
||||
-- todo: can this be implemented in terms of TransitiveClosure.transitiveClosure?
|
||||
-- todo: add some tests on this guy?
|
||||
transitiveDependencies
|
||||
:: (Monad m, Var v)
|
||||
=> CodeLookup v m a
|
||||
-> Set Reference.Id
|
||||
-> Reference.Id
|
||||
-> m (Set Reference.Id)
|
||||
transitiveDependencies code seen0 rid = if Set.member rid seen0
|
||||
then pure seen0
|
||||
else
|
||||
let seen = Set.insert rid seen0
|
||||
getIds = Set.mapMaybe Reference.toId
|
||||
in getTerm code rid >>= \case
|
||||
Just t ->
|
||||
foldM (transitiveDependencies code) seen (getIds $ Term.dependencies t)
|
||||
Nothing ->
|
||||
getTypeDeclaration code rid >>= \case
|
||||
Nothing -> pure seen
|
||||
Just (Left ed) -> foldM (transitiveDependencies code)
|
||||
seen
|
||||
(getIds $ DD.dependencies (DD.toDataDecl ed))
|
||||
Just (Right dd) -> foldM (transitiveDependencies code)
|
||||
seen
|
||||
(getIds $ DD.dependencies dd)
|
30
parser-typechecker/src/Unison/Codebase/CodeLookup/Util.hs
Normal file
30
parser-typechecker/src/Unison/Codebase/CodeLookup/Util.hs
Normal file
@ -0,0 +1,30 @@
|
||||
module Unison.Codebase.CodeLookup.Util where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import Control.Monad.Morph
|
||||
import qualified Data.Map as Map
|
||||
import Unison.Codebase.CodeLookup
|
||||
import Unison.DataDeclaration (Decl)
|
||||
import qualified Unison.Reference as Reference
|
||||
import Unison.Term (Term)
|
||||
import qualified Unison.Term as Term
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import Unison.UnisonFile.Type (UnisonFile)
|
||||
import Unison.Var (Var)
|
||||
import qualified Unison.UnisonFile.Type as UF
|
||||
|
||||
fromUnisonFile :: (Var v, Monad m) => UnisonFile v a -> CodeLookup v m a
|
||||
fromUnisonFile uf = CodeLookup tm ty where
|
||||
tm id = pure $ Map.lookup id termMap
|
||||
ty id = pure $ Map.lookup id typeMap1 <|> Map.lookup id typeMap2
|
||||
typeMap1 = Map.fromList [ (id, Right dd) |
|
||||
(_, (Reference.DerivedId id, dd)) <-
|
||||
Map.toList (UF.dataDeclarations uf) ]
|
||||
typeMap2 = Map.fromList [ (id, Left ad) |
|
||||
(_, (Reference.DerivedId id, ad)) <-
|
||||
Map.toList (UF.effectDeclarations uf) ]
|
||||
tmm = Map.fromList (UF.terms uf)
|
||||
termMap = Map.fromList [ (id, e) |
|
||||
(_, (id, e)) <-
|
||||
Map.toList (Term.hashComponents tmm) ]
|
@ -64,10 +64,10 @@ import Unison.Term (Term)
|
||||
import qualified Unison.Term as Term
|
||||
import Unison.Type (Type)
|
||||
import qualified Unison.Type as Type
|
||||
import Unison.UnisonFile (WatchKind)
|
||||
import Unison.Util.Relation (Relation)
|
||||
import qualified Unison.Util.Relation as Relation
|
||||
import Unison.Util.Star3 (Star3 (Star3))
|
||||
import Unison.WatchKind (WatchKind)
|
||||
|
||||
debug :: Bool
|
||||
debug = False
|
||||
|
@ -1,73 +0,0 @@
|
||||
{-# LANGUAGE PartialTypeSignatures #-}
|
||||
{-# LANGUAGE RankNTypes #-}
|
||||
{-# LANGUAGE TypeApplications #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
|
||||
module Unison.Codebase.Conversion.Upgrade12 where
|
||||
|
||||
import Control.Exception.Safe (MonadCatch)
|
||||
import Control.Lens (Lens', (&), (.~), (^.))
|
||||
import qualified Control.Lens as Lens
|
||||
import Control.Monad.Except (ExceptT (ExceptT), runExceptT)
|
||||
import qualified Control.Monad.Reader as Reader
|
||||
import Control.Monad.State (StateT (StateT, runStateT))
|
||||
import qualified Control.Monad.State as State
|
||||
import Control.Monad.Trans (lift)
|
||||
import qualified Data.Map as Map
|
||||
import qualified U.Codebase.Sync as Sync
|
||||
import Unison.Codebase (CodebasePath)
|
||||
import qualified Unison.Codebase as Codebase
|
||||
import Unison.Codebase.Branch (Branch (Branch))
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import qualified Unison.Codebase.Conversion.Sync12 as Sync12
|
||||
import qualified Unison.Codebase.FileCodebase as FC
|
||||
import qualified Unison.Codebase.Init as Codebase
|
||||
import qualified Unison.Codebase.SqliteCodebase as SC
|
||||
import qualified Unison.PrettyTerminal as CT
|
||||
import Unison.UnisonFile (WatchKind)
|
||||
import qualified Unison.UnisonFile as WK
|
||||
import UnliftIO (MonadIO, liftIO)
|
||||
|
||||
syncWatchKinds :: [WatchKind]
|
||||
syncWatchKinds = [WK.TestWatch]
|
||||
|
||||
upgradeCodebase :: forall m. (MonadIO m, MonadCatch m) => CodebasePath -> m ()
|
||||
upgradeCodebase root = do
|
||||
either (liftIO . CT.putPrettyLn) pure =<< runExceptT do
|
||||
(cleanupSrc, srcCB) <- ExceptT $ Codebase.openCodebase FC.init "upgradeCodebase srcCB" root
|
||||
(cleanupDest, destCB) <- ExceptT $ Codebase.createCodebase SC.init "upgradeCodebase destCB" root
|
||||
destDB <- SC.unsafeGetConnection "upgradeCodebase destDB" root
|
||||
let env = Sync12.Env srcCB destCB destDB
|
||||
let initialState = (Sync12.emptyDoneCount, Sync12.emptyErrorCount, Sync12.emptyStatus)
|
||||
rootEntity <-
|
||||
lift (Codebase.getRootBranch srcCB) >>= \case
|
||||
Left e -> error $ "Error loading source codebase root branch: " ++ show e
|
||||
Right (Branch c) -> pure $ Sync12.C (Causal.currentHash c) (pure c)
|
||||
watchResults <-
|
||||
lift $
|
||||
concat
|
||||
<$> traverse
|
||||
(\k -> fmap (Sync12.W k) <$> Codebase.watches srcCB k)
|
||||
syncWatchKinds
|
||||
(_, _, s) <- flip Reader.runReaderT env . flip State.execStateT initialState $ do
|
||||
sync <- Sync12.sync12 (lift . lift . lift)
|
||||
Sync.sync @_ @(Sync12.Entity _)
|
||||
(Sync.transformSync (lensStateT Lens._3) sync)
|
||||
Sync12.simpleProgress
|
||||
(rootEntity : watchResults)
|
||||
lift $
|
||||
Codebase.putRootBranch destCB =<< fmap Branch case rootEntity of
|
||||
Sync12.C h mc -> case Map.lookup h (Sync12._branchStatus s) of
|
||||
Just Sync12.BranchOk -> mc
|
||||
Just (Sync12.BranchReplaced _h' c') -> pure c'
|
||||
Nothing -> error "We didn't sync the root?"
|
||||
_ -> error "The root wasn't a causal?"
|
||||
SC.shutdownConnection destDB
|
||||
lift cleanupSrc
|
||||
lift cleanupDest
|
||||
pure ()
|
||||
where
|
||||
lensStateT :: forall m s1 s2 a. Monad m => Lens' s2 s1 -> StateT s1 m a -> StateT s2 m a
|
||||
lensStateT l m = StateT \s2 -> do
|
||||
(a, s1') <- runStateT m (s2 ^. l)
|
||||
pure (a, s2 & l .~ s1')
|
@ -31,11 +31,12 @@ import Unison.Codebase.Editor.RemoteRepo
|
||||
|
||||
import Unison.Codebase.Branch ( Branch )
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import qualified Unison.Codebase.Branch.Merge as Branch
|
||||
import Unison.Codebase.GitError
|
||||
import qualified Unison.Codebase.Reflog as Reflog
|
||||
import Unison.Codebase.SyncMode ( SyncMode )
|
||||
import Unison.Names3 ( Names, Names0 )
|
||||
import Unison.Parser ( Ann )
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import Unison.Referent ( Referent )
|
||||
import Unison.Reference ( Reference )
|
||||
import Unison.Result ( Note
|
||||
@ -60,6 +61,8 @@ import Unison.Name (Name)
|
||||
import Unison.Server.QueryResult (QueryResult)
|
||||
import qualified Unison.Server.SearchResult as SR
|
||||
import qualified Unison.Server.SearchResult' as SR'
|
||||
import qualified Unison.WatchKind as WK
|
||||
import Unison.Codebase.Type (GitError)
|
||||
|
||||
type AmbientAbilities v = [Type v Ann]
|
||||
type SourceName = Text
|
||||
@ -160,10 +163,10 @@ data Command m i v a where
|
||||
Evaluate1 :: PPE.PrettyPrintEnv -> UseCache -> Term v Ann -> Command m i v (Either Runtime.Error (Term v Ann))
|
||||
|
||||
-- Add a cached watch to the codebase
|
||||
PutWatch :: UF.WatchKind -> Reference.Id -> Term v Ann -> Command m i v ()
|
||||
PutWatch :: WK.WatchKind -> Reference.Id -> Term v Ann -> Command m i v ()
|
||||
|
||||
-- Loads any cached watches of the given kind
|
||||
LoadWatches :: UF.WatchKind -> Set Reference -> Command m i v [(Reference, Term v Ann)]
|
||||
LoadWatches :: WK.WatchKind -> Set Reference -> Command m i v [(Reference, Term v Ann)]
|
||||
|
||||
-- Loads a root branch from some codebase, returning `Nothing` if not found.
|
||||
-- Any definitions in the head of the requested root that aren't in the local
|
||||
@ -196,7 +199,7 @@ data Command m i v a where
|
||||
AppendToReflog :: Text -> Branch m -> Branch m -> Command m i v ()
|
||||
|
||||
-- load the reflog in file (chronological) order
|
||||
LoadReflog :: Command m i v [Reflog.Entry]
|
||||
LoadReflog :: Command m i v [Reflog.Entry Branch.Hash]
|
||||
|
||||
LoadTerm :: Reference.Id -> Command m i v (Maybe (Term v Ann))
|
||||
|
||||
@ -237,7 +240,7 @@ type UseCache = Bool
|
||||
|
||||
type EvalResult v =
|
||||
( [(v, Term v ())]
|
||||
, Map v (Ann, UF.WatchKind, Reference, Term v (), Term v (), Runtime.IsCacheHit)
|
||||
, Map v (Ann, WK.WatchKind, Reference, Term v (), Term v (), Runtime.IsCacheHit)
|
||||
)
|
||||
|
||||
lookupEvalResult :: Ord v => v -> EvalResult v -> Maybe (Term v ())
|
||||
|
@ -1,7 +1,7 @@
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
|
||||
module Unison.Codebase.Editor.Git where
|
||||
module Unison.Codebase.Editor.Git (gitIn, gitTextIn, pullBranch, withIOError, withStatus) where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
@ -11,16 +11,16 @@ import qualified Data.Text as Text
|
||||
import Shellmet (($?), ($^), ($|))
|
||||
import System.FilePath ((</>))
|
||||
import Unison.Codebase.Editor.RemoteRepo (ReadRepo (ReadGitRepo))
|
||||
import Unison.Codebase.GitError (GitError)
|
||||
import qualified Unison.Codebase.GitError as GitError
|
||||
import Unison.CodebasePath (CodebasePath)
|
||||
import qualified Unison.Util.Exception as Ex
|
||||
import UnliftIO.Directory (XdgDirectory (XdgCache), doesDirectoryExist, findExecutable, getXdgDirectory, removeDirectoryRecursive)
|
||||
import UnliftIO.IO (hFlush, stdout)
|
||||
import qualified Data.ByteString.Base16 as ByteString
|
||||
import qualified Data.Char as Char
|
||||
import Control.Exception.Safe (catchIO, MonadCatch)
|
||||
import Unison.Codebase.GitError (GitProtocolError)
|
||||
|
||||
type CodebasePath = FilePath
|
||||
|
||||
-- https://superuser.com/questions/358855/what-characters-are-safe-in-cross-platform-file-names-for-linux-windows-and-os
|
||||
encodeFileName :: String -> FilePath
|
||||
@ -56,7 +56,7 @@ withStatus str ma = do
|
||||
|
||||
-- | Given a remote git repo url, and branch/commit hash (currently
|
||||
-- not allowed): checks for git, clones or updates a cached copy of the repo
|
||||
pullBranch :: (MonadIO m, MonadCatch m, MonadError GitError m) => ReadRepo -> m CodebasePath
|
||||
pullBranch :: (MonadIO m, MonadCatch m, MonadError GitProtocolError m) => ReadRepo -> m CodebasePath
|
||||
pullBranch repo@(ReadGitRepo uri) = do
|
||||
checkForGit
|
||||
localPath <- tempGitDir uri
|
||||
@ -64,14 +64,14 @@ pullBranch repo@(ReadGitRepo uri) = do
|
||||
-- try to update existing directory
|
||||
(ifM (isGitRepo localPath)
|
||||
(checkoutExisting localPath)
|
||||
(throwError (GitError.UnrecognizableCacheDir uri localPath)))
|
||||
(throwError (GitError.UnrecognizableCacheDir repo localPath)))
|
||||
-- directory doesn't exist, so clone anew
|
||||
(checkOutNew localPath Nothing)
|
||||
pure localPath
|
||||
|
||||
where
|
||||
-- | Do a `git clone` (for a not-previously-cached repo).
|
||||
checkOutNew :: (MonadIO m, MonadError GitError m) => CodebasePath -> Maybe Text -> m ()
|
||||
checkOutNew :: (MonadIO m, MonadError GitProtocolError m) => CodebasePath -> Maybe Text -> m ()
|
||||
checkOutNew localPath branch = do
|
||||
withStatus ("Downloading from " ++ Text.unpack uri ++ " ...") $
|
||||
(liftIO $
|
||||
@ -80,10 +80,10 @@ pullBranch repo@(ReadGitRepo uri) = do
|
||||
++ [uri, Text.pack localPath]))
|
||||
`withIOError` (throwError . GitError.CloneException repo . show)
|
||||
isGitDir <- liftIO $ isGitRepo localPath
|
||||
unless isGitDir . throwError $ GitError.UnrecognizableCheckoutDir uri localPath
|
||||
unless isGitDir . throwError $ GitError.UnrecognizableCheckoutDir repo localPath
|
||||
|
||||
-- | Do a `git pull` on a cached repo.
|
||||
checkoutExisting :: (MonadIO m, MonadCatch m, MonadError GitError m) => FilePath -> m ()
|
||||
checkoutExisting :: (MonadIO m, MonadCatch m, MonadError GitProtocolError m) => FilePath -> m ()
|
||||
checkoutExisting localPath =
|
||||
ifM (isEmptyGitRepo localPath)
|
||||
-- I don't know how to properly update from an empty remote repo.
|
||||
@ -99,7 +99,7 @@ pullBranch repo@(ReadGitRepo uri) = do
|
||||
(const $ goFromScratch))
|
||||
|
||||
where
|
||||
goFromScratch :: (MonadIO m, MonadError GitError m) => m ()
|
||||
goFromScratch :: (MonadIO m, MonadError GitProtocolError m) => m ()
|
||||
goFromScratch = do wipeDir localPath; checkOutNew localPath Nothing
|
||||
|
||||
isEmptyGitRepo :: MonadIO m => FilePath -> m Bool
|
||||
@ -113,11 +113,11 @@ pullBranch repo@(ReadGitRepo uri) = do
|
||||
e <- Ex.tryAny . whenM (doesDirectoryExist localPath) $
|
||||
removeDirectoryRecursive localPath
|
||||
case e of
|
||||
Left e -> throwError (GitError.SomeOtherError (show e))
|
||||
Left e -> throwError (GitError.CleanupError e)
|
||||
Right _ -> pure ()
|
||||
|
||||
-- | See if `git` is on the system path.
|
||||
checkForGit :: MonadIO m => MonadError GitError m => m ()
|
||||
checkForGit :: MonadIO m => MonadError GitProtocolError m => m ()
|
||||
checkForGit = do
|
||||
gitPath <- liftIO $ findExecutable "git"
|
||||
when (isNothing gitPath) $ throwError GitError.NoGit
|
||||
|
@ -26,7 +26,8 @@ import Unison.Codebase ( Codebase )
|
||||
import qualified Unison.Codebase as Codebase
|
||||
import Unison.Codebase.Branch ( Branch )
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import Unison.Parser ( Ann )
|
||||
import qualified Unison.Codebase.Branch.Merge as Branch
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import qualified Unison.Parser as Parser
|
||||
import qualified Unison.Parsers as Parsers
|
||||
import qualified Unison.Reference as Reference
|
||||
@ -45,6 +46,8 @@ import qualified Unison.PrettyPrintEnv as PPE
|
||||
import Unison.Term (Term)
|
||||
import Unison.Type (Type)
|
||||
import qualified Unison.Codebase.Editor.AuthorInfo as AuthorInfo
|
||||
import qualified Unison.Parser.Ann as Ann
|
||||
import qualified Unison.WatchKind as WK
|
||||
|
||||
typecheck
|
||||
:: (Monad m, Var v)
|
||||
@ -170,7 +173,7 @@ commandLine config awaitInput setBranchRef rt notifyUser notifyNumbered loadSour
|
||||
lift $ evalUnisonFile ppe uf
|
||||
AppendToReflog reason old new -> lift $ Codebase.appendReflog codebase reason old new
|
||||
LoadReflog -> lift $ Codebase.getReflog codebase
|
||||
CreateAuthorInfo t -> AuthorInfo.createAuthorInfo Parser.External t
|
||||
CreateAuthorInfo t -> AuthorInfo.createAuthorInfo Ann.External t
|
||||
HQNameQuery mayPath branch query ->
|
||||
lift $ Backend.hqNameQuery mayPath branch codebase query
|
||||
LoadSearchResults srs -> lift $ Backend.loadSearchResults codebase srs
|
||||
@ -180,8 +183,8 @@ commandLine config awaitInput setBranchRef rt notifyUser notifyNumbered loadSour
|
||||
ClearWatchCache -> lift $ Codebase.clearWatches codebase
|
||||
|
||||
watchCache (Reference.DerivedId h) = do
|
||||
m1 <- Codebase.getWatch codebase UF.RegularWatch h
|
||||
m2 <- maybe (Codebase.getWatch codebase UF.TestWatch h) (pure . Just) m1
|
||||
m1 <- Codebase.getWatch codebase WK.RegularWatch h
|
||||
m2 <- maybe (Codebase.getWatch codebase WK.TestWatch h) (pure . Just) m1
|
||||
pure $ Term.amap (const ()) <$> m2
|
||||
watchCache Reference.Builtin{} = pure Nothing
|
||||
|
||||
@ -191,19 +194,15 @@ commandLine config awaitInput setBranchRef rt notifyUser notifyNumbered loadSour
|
||||
cache = if useCache then watchCache else Runtime.noCache
|
||||
r <- Runtime.evaluateTerm' codeLookup cache ppe rt tm
|
||||
when useCache $ case r of
|
||||
Right tmr -> Codebase.putWatch codebase UF.RegularWatch (Term.hashClosedTerm tm)
|
||||
(Term.amap (const Parser.External) tmr)
|
||||
Right tmr -> Codebase.putWatch codebase WK.RegularWatch (Term.hashClosedTerm tm)
|
||||
(Term.amap (const Ann.External) tmr)
|
||||
Left _ -> pure ()
|
||||
pure $ r <&> Term.amap (const Parser.External)
|
||||
pure $ r <&> Term.amap (const Ann.External)
|
||||
|
||||
evalUnisonFile :: PPE.PrettyPrintEnv -> UF.TypecheckedUnisonFile v Ann -> _
|
||||
evalUnisonFile ppe (UF.discardTypes -> unisonFile) = do
|
||||
let codeLookup = Codebase.toCodeLookup codebase
|
||||
evalFile <-
|
||||
if Runtime.needsContainment rt
|
||||
then Codebase.makeSelfContained' codeLookup unisonFile
|
||||
else pure unisonFile
|
||||
r <- Runtime.evaluateWatches codeLookup ppe watchCache rt evalFile
|
||||
r <- Runtime.evaluateWatches codeLookup ppe watchCache rt unisonFile
|
||||
case r of
|
||||
Left e -> pure (Left e)
|
||||
Right rs@(_,map) -> do
|
||||
@ -211,7 +210,7 @@ commandLine config awaitInput setBranchRef rt notifyUser notifyNumbered loadSour
|
||||
if isHit then pure ()
|
||||
else case hash of
|
||||
Reference.DerivedId h -> do
|
||||
let value' = Term.amap (const Parser.External) value
|
||||
let value' = Term.amap (const Ann.External) value
|
||||
Codebase.putWatch codebase kind h value'
|
||||
Reference.Builtin{} -> pure ()
|
||||
pure $ Right rs
|
||||
|
@ -60,6 +60,8 @@ import Unison.Codebase.Branch ( Branch(..)
|
||||
, Branch0(..)
|
||||
)
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import qualified Unison.Codebase.Branch.Merge as Branch
|
||||
import qualified Unison.Codebase.Branch.Names as Branch
|
||||
import qualified Unison.Codebase.BranchUtil as BranchUtil
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import qualified Unison.Codebase.Editor.Output.DumpNamespace as Output.DN
|
||||
@ -69,6 +71,7 @@ import qualified Unison.Codebase.Patch as Patch
|
||||
import Unison.Codebase.Path ( Path
|
||||
, Path'(..) )
|
||||
import qualified Unison.Codebase.Path as Path
|
||||
import qualified Unison.Codebase.Path.Parse as Path
|
||||
import qualified Unison.Codebase.Reflog as Reflog
|
||||
import Unison.Server.SearchResult ( SearchResult )
|
||||
import qualified Unison.Server.SearchResult as SR
|
||||
@ -86,7 +89,7 @@ import Unison.Names3 ( Names(..), Names0
|
||||
, pattern Names0 )
|
||||
import qualified Unison.Names2 as Names
|
||||
import qualified Unison.Names3 as Names3
|
||||
import Unison.Parser ( Ann(..) )
|
||||
import Unison.Parser.Ann (Ann(..))
|
||||
import Unison.Reference ( Reference(..) )
|
||||
import qualified Unison.Reference as Reference
|
||||
import Unison.Referent ( Referent )
|
||||
@ -96,8 +99,10 @@ import qualified Unison.ShortHash as SH
|
||||
import Unison.Term (Term)
|
||||
import qualified Unison.Term as Term
|
||||
import qualified Unison.Type as Type
|
||||
import qualified Unison.Type.Names as Type
|
||||
import qualified Unison.Result as Result
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import qualified Unison.UnisonFile.Names as UF
|
||||
import qualified Unison.Util.Find as Find
|
||||
import Unison.Util.Free ( Free )
|
||||
import qualified Unison.Util.Free as Free
|
||||
@ -111,8 +116,13 @@ import qualified Unison.Var as Var
|
||||
import qualified Unison.Codebase.TypeEdit as TypeEdit
|
||||
import Unison.Codebase.TermEdit (TermEdit(..))
|
||||
import qualified Unison.Codebase.TermEdit as TermEdit
|
||||
import qualified Unison.Codebase.TermEdit.Typing as TermEdit
|
||||
import qualified Unison.Typechecker as Typechecker
|
||||
import qualified Unison.WatchKind as WK
|
||||
import qualified Unison.PrettyPrintEnv as PPE
|
||||
import qualified Unison.PrettyPrintEnv.Names as PPE
|
||||
import qualified Unison.PrettyPrintEnvDecl as PPE
|
||||
import qualified Unison.PrettyPrintEnvDecl.Names as PPE
|
||||
import Unison.Runtime.IOSource ( isTest )
|
||||
import qualified Unison.Runtime.IOSource as IOSource
|
||||
import qualified Unison.Util.Monoid as Monoid
|
||||
@ -672,7 +682,7 @@ loop = do
|
||||
-- discontinuity in the reflog.
|
||||
convertEntries :: Maybe Branch.Hash
|
||||
-> [Output.ReflogEntry]
|
||||
-> [Reflog.Entry]
|
||||
-> [Reflog.Entry Branch.Hash]
|
||||
-> [Output.ReflogEntry]
|
||||
convertEntries _ acc [] = acc
|
||||
convertEntries Nothing acc entries@(Reflog.Entry old _ _ : _) =
|
||||
@ -1539,7 +1549,7 @@ loop = do
|
||||
| (r, Term.List' ts) <- Map.toList results
|
||||
, Term.App' (Term.Constructor' ref cid) (Term.Text' msg) <- toList ts
|
||||
, cid == DD.failConstructorId && ref == DD.testResultRef ]
|
||||
cachedTests <- fmap Map.fromList . eval $ LoadWatches UF.TestWatch testRefs
|
||||
cachedTests <- fmap Map.fromList . eval $ LoadWatches WK.TestWatch testRefs
|
||||
let stats = Output.CachedTests (Set.size testRefs) (Map.size cachedTests)
|
||||
names <- makePrintNamesFromLabeled' $
|
||||
LD.referents testTerms <>
|
||||
@ -1564,7 +1574,7 @@ loop = do
|
||||
Left e -> respond (EvaluationFailure e) $> []
|
||||
Right tm' -> do
|
||||
-- After evaluation, cache the result of the test
|
||||
eval $ PutWatch UF.TestWatch rid tm'
|
||||
eval $ PutWatch WK.TestWatch rid tm'
|
||||
respond $ TestIncrementalOutputEnd ppe (n,total) r tm'
|
||||
pure [(r, tm')]
|
||||
r -> error $ "unpossible, tests can't be builtins: " <> show r
|
||||
@ -1763,9 +1773,6 @@ loop = do
|
||||
numberedArgs .= fmap (Text.unpack . Reference.toText) ((fmap snd names) <> toList missing)
|
||||
respond $ ListDependencies hqLength ld names missing
|
||||
DebugNumberedArgsI -> use numberedArgs >>= respond . DumpNumberedArgs
|
||||
DebugBranchHistoryI ->
|
||||
eval . Notify . DumpBitBooster (Branch.headHash currentBranch') =<<
|
||||
(eval . Eval $ Causal.hashToRaw (Branch._history currentBranch'))
|
||||
DebugTypecheckedUnisonFileI -> case uf of
|
||||
Nothing -> respond NoUnisonFile
|
||||
Just uf -> let
|
||||
@ -2591,7 +2598,7 @@ doSlurpAdds slurp uf = Branch.stepManyAt0 (typeActions <> termActions)
|
||||
termActions = map doTerm . toList $
|
||||
SC.terms slurp <> Slurp.constructorsFor (SC.types slurp) uf
|
||||
names = UF.typecheckedToNames0 uf
|
||||
tests = Set.fromList $ fst <$> UF.watchesOfKind UF.TestWatch (UF.discardTypes uf)
|
||||
tests = Set.fromList $ fst <$> UF.watchesOfKind WK.TestWatch (UF.discardTypes uf)
|
||||
(isTestType, isTestValue) = isTest
|
||||
md v =
|
||||
if Set.member v tests then Metadata.singleton isTestType isTestValue
|
||||
@ -2840,7 +2847,7 @@ addWatch watchName (Just uf) = do
|
||||
(UF.dataDeclarationsId' uf)
|
||||
(UF.effectDeclarationsId' uf)
|
||||
(UF.topLevelComponents' uf)
|
||||
(UF.watchComponents uf <> [(UF.RegularWatch, [(v2, Term.var a v, ty)])]))
|
||||
(UF.watchComponents uf <> [(WK.RegularWatch, [(v2, Term.var a v, ty)])]))
|
||||
_ -> addWatch watchName Nothing
|
||||
|
||||
-- Given a typechecked file with a main function called `mainName`
|
||||
|
@ -9,11 +9,13 @@ module Unison.Codebase.Editor.Input
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import qualified Unison.Codebase.Branch.Merge as Branch
|
||||
import qualified Unison.HashQualified as HQ
|
||||
import qualified Unison.HashQualified' as HQ'
|
||||
import Unison.Codebase.Path ( Path' )
|
||||
import qualified Unison.Codebase.Path as Path
|
||||
import qualified Unison.Codebase.Path.Parse as Path
|
||||
import Unison.Codebase.Editor.RemoteRepo
|
||||
import Unison.ShortHash (ShortHash)
|
||||
import Unison.Codebase.ShortBranchHash (ShortBranchHash)
|
||||
|
@ -21,12 +21,12 @@ import Unison.Server.Backend (ShallowListEntry(..))
|
||||
import Unison.Codebase.Editor.Input
|
||||
import Unison.Codebase (GetRootBranchError)
|
||||
import Unison.Codebase.Editor.SlurpResult (SlurpResult(..))
|
||||
import Unison.Codebase.GitError
|
||||
import Unison.Codebase.Path (Path')
|
||||
import Unison.Codebase.Patch (Patch)
|
||||
import Unison.Codebase.Type (GitError)
|
||||
import Unison.Name ( Name )
|
||||
import Unison.Names2 ( Names )
|
||||
import Unison.Parser ( Ann )
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import qualified Unison.Reference as Reference
|
||||
import Unison.Reference ( Reference )
|
||||
import Unison.Referent ( Referent )
|
||||
@ -41,6 +41,7 @@ import qualified Unison.HashQualified as HQ
|
||||
import qualified Unison.HashQualified' as HQ'
|
||||
import qualified Unison.Parser as Parser
|
||||
import qualified Unison.PrettyPrintEnv as PPE
|
||||
import qualified Unison.PrettyPrintEnvDecl as PPE
|
||||
import qualified Unison.Typechecker.Context as Context
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import qualified Unison.Util.Pretty as P
|
||||
@ -49,6 +50,7 @@ import qualified Unison.Codebase.Editor.TodoOutput as TO
|
||||
import Unison.Server.SearchResult' (SearchResult')
|
||||
import Unison.Term (Term)
|
||||
import Unison.Type (Type)
|
||||
import qualified Unison.Names.ResolutionResult as Names
|
||||
import qualified Unison.Names3 as Names
|
||||
import qualified Data.Set as Set
|
||||
import Unison.NameSegment (NameSegment)
|
||||
@ -57,6 +59,7 @@ import Unison.Codebase.ShortBranchHash (ShortBranchHash)
|
||||
import Unison.Codebase.Editor.RemoteRepo
|
||||
import Unison.Codebase.Editor.Output.BranchDiff (BranchDiffOutput)
|
||||
import Unison.LabeledDependency (LabeledDependency)
|
||||
import qualified Unison.WatchKind as WK
|
||||
|
||||
type ListDetailed = Bool
|
||||
type SourceName = Text
|
||||
@ -152,7 +155,7 @@ data Output v
|
||||
| Evaluated SourceFileContents
|
||||
PPE.PrettyPrintEnv
|
||||
[(v, Term v ())]
|
||||
(Map v (Ann, UF.WatchKind, Term v (), Runtime.IsCacheHit))
|
||||
(Map v (Ann, WK.WatchKind, Term v (), Runtime.IsCacheHit))
|
||||
| Typechecked SourceName PPE.PrettyPrintEnv (SlurpResult v) (UF.TypecheckedUnisonFile v Ann)
|
||||
| DisplayRendered (Maybe FilePath) (P.Pretty P.ColorText)
|
||||
-- "display" definitions, possibly to a FilePath on disk (e.g. editing)
|
||||
|
@ -14,6 +14,7 @@ import qualified Data.Set as Set
|
||||
import Unison.Codebase.Branch ( Branch0(..) )
|
||||
import Unison.Prelude
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import qualified Unison.Codebase.Branch.Names as Branch
|
||||
import Unison.Codebase.Editor.Command
|
||||
import Unison.Codebase.Editor.Output
|
||||
import Unison.Codebase.Patch ( Patch(..) )
|
||||
@ -22,7 +23,7 @@ import Unison.DataDeclaration ( Decl )
|
||||
import qualified Unison.DataDeclaration as Decl
|
||||
import Unison.Names3 ( Names0 )
|
||||
import qualified Unison.Names2 as Names
|
||||
import Unison.Parser ( Ann(..) )
|
||||
import Unison.Parser.Ann (Ann(..))
|
||||
import Unison.Reference ( Reference(..) )
|
||||
import qualified Unison.Reference as Reference
|
||||
import qualified Unison.Referent as Referent
|
||||
@ -39,6 +40,7 @@ import qualified Unison.Codebase.Metadata as Metadata
|
||||
import qualified Unison.Codebase.TypeEdit as TypeEdit
|
||||
import Unison.Codebase.TermEdit ( TermEdit(..) )
|
||||
import qualified Unison.Codebase.TermEdit as TermEdit
|
||||
import qualified Unison.Codebase.TermEdit.Typing as TermEdit
|
||||
import Unison.Codebase.TypeEdit ( TypeEdit(..) )
|
||||
import Unison.UnisonFile ( UnisonFile(..) )
|
||||
import qualified Unison.UnisonFile as UF
|
||||
|
@ -9,7 +9,7 @@ import Unison.Prelude
|
||||
|
||||
import Unison.Codebase.Editor.SlurpComponent (SlurpComponent(..))
|
||||
import Unison.Name ( Name )
|
||||
import Unison.Parser ( Ann )
|
||||
import Unison.Parser.Ann ( Ann )
|
||||
import Unison.Var (Var)
|
||||
import qualified Data.Map as Map
|
||||
import qualified Data.Set as Set
|
||||
@ -23,6 +23,7 @@ import qualified Unison.PrettyPrintEnv as PPE
|
||||
import qualified Unison.Referent as Referent
|
||||
import qualified Unison.TypePrinter as TP
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import qualified Unison.UnisonFile.Names as UF
|
||||
import qualified Unison.Util.Monoid as Monoid
|
||||
import qualified Unison.Util.Pretty as P
|
||||
import qualified Unison.Util.Relation as R
|
||||
|
@ -15,13 +15,14 @@ import Unison.Prelude
|
||||
import Unison.Codebase.MainTerm ( getMainTerm )
|
||||
import qualified Unison.Codebase.MainTerm as MainTerm
|
||||
import qualified Unison.Codebase as Codebase
|
||||
import Unison.Parser ( Ann )
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import qualified Unison.Codebase.Runtime as Runtime
|
||||
import Unison.Codebase.Runtime ( Runtime )
|
||||
import Unison.Var ( Var )
|
||||
import qualified Unison.PrettyPrintEnv as PPE
|
||||
import qualified Unison.Names3 as Names3
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import qualified Unison.Codebase.Branch.Names as Branch
|
||||
import System.Exit (die)
|
||||
import Control.Exception (finally)
|
||||
|
||||
|
@ -6,38 +6,39 @@
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
|
||||
module Unison.Codebase.FileCodebase
|
||||
(
|
||||
codebase1', -- used by Test/Git
|
||||
( codebase1', -- used by Test/Git
|
||||
Unison.Codebase.FileCodebase.init,
|
||||
openCodebase -- since init requires a bunch of irrelevant args now
|
||||
openCodebase, -- since init requires a bunch of irrelevant args now
|
||||
)
|
||||
where
|
||||
|
||||
import Control.Concurrent (forkIO, killThread)
|
||||
import Control.Exception.Safe (MonadCatch, catchIO)
|
||||
import Control.Monad.Except (ExceptT, runExceptT, throwError)
|
||||
import Control.Monad.Extra ((||^))
|
||||
import Control.Monad.Trans.Except (withExceptT)
|
||||
import qualified Data.Set as Set
|
||||
import qualified Data.Text as Text
|
||||
import qualified Data.Text.IO as TextIO
|
||||
import System.Directory (canonicalizePath)
|
||||
import System.FilePath (dropExtension)
|
||||
import Unison.Codebase (BuiltinAnnotation, Codebase (Codebase), CodebasePath)
|
||||
import qualified Unison.Codebase as Codebase
|
||||
import Unison.Codebase.Branch (Branch)
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import Control.Monad.Except (ExceptT, runExceptT, throwError)
|
||||
import Control.Monad.Extra ((||^))
|
||||
import System.FilePath ((</>))
|
||||
import System.FilePath (dropExtension, (</>))
|
||||
import qualified U.Util.Cache as Cache
|
||||
import qualified Unison.Codebase.Init as Codebase
|
||||
import Unison.Codebase.Branch (headHash)
|
||||
import U.Util.Timing (time)
|
||||
import Unison.Codebase.BuiltinAnnotation (BuiltinAnnotation)
|
||||
import Unison.Codebase.Editor.Git (gitIn, gitTextIn, pullBranch, withIOError, withStatus)
|
||||
import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRepo (WriteGitRepo), writeToRead, printWriteRepo)
|
||||
import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRepo (WriteGitRepo), printWriteRepo, writeToRead)
|
||||
import Unison.Codebase.FileCodebase.Branch (Branch, headHash)
|
||||
import qualified Unison.Codebase.FileCodebase.Branch as Branch
|
||||
import Unison.Codebase.FileCodebase.Codebase (Codebase (Codebase), CodebasePath, GitError (GitCodebaseError, GitFileCodebaseError, GitProtocolError))
|
||||
import qualified Unison.Codebase.FileCodebase.Codebase as Codebase
|
||||
import Unison.Codebase.Causal (RawHash(RawHash))
|
||||
import Unison.Codebase.FileCodebase.Common
|
||||
( Err (CantParseBranchHead),
|
||||
branchFromFiles,
|
||||
branchHashesByPrefix,
|
||||
branchHeadDir,
|
||||
codebaseExists,
|
||||
codebasePath,
|
||||
componentIdFromString,
|
||||
decodeFileName,
|
||||
dependentsDir,
|
||||
@ -67,32 +68,30 @@ import Unison.Codebase.FileCodebase.Common
|
||||
typeMentionsIndexDir,
|
||||
typeReferencesByPrefix,
|
||||
updateCausalHead,
|
||||
watchesDir, codebasePath
|
||||
watchesDir,
|
||||
)
|
||||
import qualified Unison.Codebase.FileCodebase.Common as Common
|
||||
import qualified Unison.Codebase.FileCodebase.Init as Codebase (CreateCodebaseError (..), Init (Init), Pretty)
|
||||
import Unison.Codebase.FileCodebase.Reference (Reference)
|
||||
import qualified Unison.Codebase.FileCodebase.Reference as Reference
|
||||
import qualified Unison.Codebase.FileCodebase.Referent as Referent
|
||||
import qualified Unison.Codebase.FileCodebase.Serialization.V1 as V1 (formatSymbol)
|
||||
import qualified Unison.Codebase.FileCodebase.SlimCopyRegenerateIndex as Sync
|
||||
import Unison.Codebase.GitError (GitError)
|
||||
import qualified Unison.Codebase.GitError as GitError
|
||||
import qualified Unison.Codebase.Path as Path
|
||||
import qualified Unison.Codebase.Reflog as Reflog
|
||||
import qualified Unison.Codebase.Serialization as S
|
||||
import qualified Unison.Codebase.Serialization.V1 as V1
|
||||
import qualified Unison.Codebase.Reflog as Reflog (Entry (..), fromText, toText)
|
||||
import qualified Unison.Codebase.Serialization as S (Format (..))
|
||||
import Unison.Codebase.SyncMode (SyncMode)
|
||||
import qualified Unison.Codebase.Watch as Watch
|
||||
import Unison.Parser (Ann ())
|
||||
import qualified Unison.Codebase.Watch as Watch (collectUntilPause, watchDirectory')
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import Unison.Prelude
|
||||
import Unison.Reference (Reference)
|
||||
import qualified Unison.Reference as Reference
|
||||
import qualified Unison.Referent as Referent
|
||||
import Unison.Symbol (Symbol)
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import qualified Unison.Util.Pretty as P
|
||||
import qualified Unison.Util.TQueue as TQueue
|
||||
import U.Util.Timing (time)
|
||||
import Unison.Var (Var)
|
||||
import Unison.WatchKind (WatchKind)
|
||||
import UnliftIO.Directory (createDirectoryIfMissing, doesDirectoryExist, removeDirectoryRecursive)
|
||||
import UnliftIO.STM (atomically)
|
||||
|
||||
import qualified Unison.Codebase.GitError as GitError
|
||||
init :: (MonadIO m, MonadCatch m) => Codebase.Init m Symbol Ann
|
||||
init = Codebase.Init
|
||||
(const $ (fmap . fmap) (pure (),) . openCodebase)
|
||||
@ -209,14 +208,14 @@ codebase1' syncToDirectory branchCache fmtV@(S.Format getV putV) fmtA@(S.Format
|
||||
ls <- fmap decodeFileName <$> listDirectory d
|
||||
pure . Set.fromList $ ls >>= (toList . referentIdFromString)
|
||||
else pure Set.empty
|
||||
watches :: UF.WatchKind -> m [Reference.Id]
|
||||
watches :: WatchKind -> m [Reference.Id]
|
||||
watches k =
|
||||
liftIO $ do
|
||||
let wp = watchesDir path (Text.pack k)
|
||||
createDirectoryIfMissing True wp
|
||||
ls <- listDirectory wp
|
||||
pure $ ls >>= (toList . componentIdFromString . dropExtension)
|
||||
getReflog :: m [Reflog.Entry]
|
||||
getReflog :: m [Reflog.Entry Branch.Hash]
|
||||
getReflog =
|
||||
liftIO
|
||||
(do contents <- TextIO.readFile (reflogPath path)
|
||||
@ -266,26 +265,26 @@ viewRemoteBranch' :: forall m. (MonadIO m, MonadCatch m)
|
||||
=> Branch.Cache m -> ReadRemoteNamespace -> ExceptT GitError m (Branch m, CodebasePath)
|
||||
viewRemoteBranch' cache (repo, sbh, path) = do
|
||||
-- set up the cache dir
|
||||
remotePath <- time "Git fetch" $ pullBranch repo
|
||||
remotePath <- time "Git fetch" . withExceptT GitProtocolError $ pullBranch repo
|
||||
-- try to load the requested branch from it
|
||||
branch <- time "Git fetch (sbh)" $ case sbh of
|
||||
-- load the root branch
|
||||
Nothing -> lift (getRootBranch cache remotePath) >>= \case
|
||||
Left Codebase.NoRootBranch -> pure Branch.empty
|
||||
Left (Codebase.CouldntLoadRootBranch h) ->
|
||||
throwError $ GitError.CouldntLoadRootBranch repo h
|
||||
throwError . GitCodebaseError $ GitError.CouldntLoadRootBranch repo h
|
||||
Left (Codebase.CouldntParseRootBranch s) ->
|
||||
throwError $ GitError.CouldntParseRootBranch repo s
|
||||
throwError . GitFileCodebaseError $ Codebase.GitCouldntParseRootBranchHash repo s
|
||||
Right b -> pure b
|
||||
-- load from a specific `ShortBranchHash`
|
||||
Just sbh -> do
|
||||
branchCompletions <- lift $ branchHashesByPrefix remotePath sbh
|
||||
case toList branchCompletions of
|
||||
[] -> throwError $ GitError.NoRemoteNamespaceWithHash repo sbh
|
||||
[] -> throwError . GitCodebaseError $ GitError.NoRemoteNamespaceWithHash repo sbh
|
||||
[h] -> (lift $ branchFromFiles cache remotePath h) >>= \case
|
||||
Just b -> pure b
|
||||
Nothing -> throwError $ GitError.NoRemoteNamespaceWithHash repo sbh
|
||||
_ -> throwError $ GitError.RemoteNamespaceHashAmbiguous repo sbh branchCompletions
|
||||
Nothing -> throwError . GitCodebaseError $ GitError.NoRemoteNamespaceWithHash repo sbh
|
||||
_ -> throwError . GitCodebaseError $ GitError.RemoteNamespaceHashAmbiguous repo sbh branchCompletions
|
||||
pure (Branch.getAt' path branch, remotePath)
|
||||
|
||||
-- Given a branch that is "after" the existing root of a given git repo,
|
||||
@ -301,7 +300,7 @@ pushGitRootBranch
|
||||
pushGitRootBranch syncToDirectory cache branch repo syncMode = do
|
||||
-- Pull the remote repo into a staging directory
|
||||
(remoteRoot, remotePath) <- viewRemoteBranch' cache (writeToRead repo, Nothing, Path.empty)
|
||||
ifM (pure (remoteRoot == Branch.empty)
|
||||
withExceptT GitProtocolError $ ifM (pure (remoteRoot == Branch.empty)
|
||||
||^ lift (remoteRoot `Branch.before` branch))
|
||||
-- ours is newer 👍, meaning this is a fast-forward push,
|
||||
-- so sync branch to staging area
|
||||
|
783
parser-typechecker/src/Unison/Codebase/FileCodebase/Branch.hs
Normal file
783
parser-typechecker/src/Unison/Codebase/FileCodebase/Branch.hs
Normal file
@ -0,0 +1,783 @@
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
{-# LANGUAGE RecordWildCards #-}
|
||||
{-# LANGUAGE PatternSynonyms #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
{-# LANGUAGE RankNTypes #-}
|
||||
|
||||
module Unison.Codebase.FileCodebase.Branch
|
||||
( -- * Branch types
|
||||
Branch(..)
|
||||
, UnwrappedBranch
|
||||
, Branch0(..)
|
||||
, Raw(..)
|
||||
, Star
|
||||
, Hash
|
||||
, EditHash
|
||||
, pattern Hash
|
||||
-- * Branch construction
|
||||
, empty
|
||||
, branch0
|
||||
, toCausalRaw
|
||||
, transform
|
||||
, headHash
|
||||
, before
|
||||
, merge
|
||||
-- ** Children lenses
|
||||
, children
|
||||
-- ** Children queries
|
||||
, getAt'
|
||||
-- * Branch terms/types/edits
|
||||
-- ** Term/type/edits lenses
|
||||
, terms
|
||||
, types
|
||||
, edits
|
||||
-- * Branch serialization
|
||||
, cachedRead
|
||||
, Cache
|
||||
, sync
|
||||
) where
|
||||
|
||||
import Unison.Prelude hiding (empty)
|
||||
|
||||
import Prelude hiding (head,read,subtract)
|
||||
|
||||
import Control.Lens hiding ( children, cons, transform, uncons )
|
||||
import qualified Control.Monad.State as State
|
||||
import Control.Monad.State ( StateT )
|
||||
import Data.Bifunctor ( second )
|
||||
import qualified Data.Map as Map
|
||||
import qualified Data.Map.Merge.Lazy as Map
|
||||
import qualified Data.Set as Set
|
||||
import qualified Unison.Codebase.FileCodebase.Patch as Patch
|
||||
import Unison.Codebase.FileCodebase.Patch (Patch)
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import Unison.Codebase.Causal ( Causal
|
||||
, pattern RawOne
|
||||
, pattern RawCons
|
||||
, pattern RawMerge
|
||||
)
|
||||
import Unison.Codebase.Path ( Path(..) )
|
||||
import qualified Unison.Codebase.Path as Path
|
||||
import Unison.NameSegment ( NameSegment )
|
||||
import qualified Unison.NameSegment as NameSegment
|
||||
import qualified Unison.Codebase.FileCodebase.Metadata as Metadata
|
||||
import qualified Unison.Hash as Hash
|
||||
import Unison.Hashable ( Hashable )
|
||||
import qualified Unison.Hashable as H
|
||||
import Unison.Name ( Name(..) )
|
||||
import qualified Unison.Name as Name
|
||||
import Unison.Codebase.FileCodebase.Reference (Reference)
|
||||
import Unison.Codebase.FileCodebase.Referent (Referent)
|
||||
|
||||
import qualified U.Util.Cache as Cache
|
||||
import qualified Unison.Util.Relation as R
|
||||
import Unison.Util.Relation ( Relation )
|
||||
import qualified Unison.Util.Relation4 as R4
|
||||
import Unison.Util.Map ( unionWithM )
|
||||
import qualified Unison.Util.Star3 as Star3
|
||||
|
||||
-- | A node in the Unison namespace hierarchy
|
||||
-- along with its history.
|
||||
newtype Branch m = Branch { _history :: UnwrappedBranch m }
|
||||
deriving (Eq, Ord)
|
||||
type UnwrappedBranch m = Causal m Raw (Branch0 m)
|
||||
|
||||
type Hash = Causal.RawHash Raw
|
||||
type EditHash = Hash.Hash
|
||||
|
||||
type Star r n = Metadata.Star r n
|
||||
|
||||
-- | A node in the Unison namespace hierarchy.
|
||||
--
|
||||
-- '_terms' and '_types' are the declarations at this level.
|
||||
-- '_children' are the nodes one level below us.
|
||||
-- '_edits' are the 'Patch's stored at this node in the code.
|
||||
--
|
||||
-- The @deep*@ fields are derived from the four above.
|
||||
data Branch0 m = Branch0
|
||||
{ _terms :: Star Referent NameSegment
|
||||
, _types :: Star Reference NameSegment
|
||||
, _children :: Map NameSegment (Branch m)
|
||||
-- ^ Note the 'Branch' here, not 'Branch0'.
|
||||
-- Every level in the tree has a history.
|
||||
, _edits :: Map NameSegment (EditHash, m Patch)
|
||||
-- names and metadata for this branch and its children
|
||||
-- (ref, (name, value)) iff ref has metadata `value` at name `name`
|
||||
, deepTerms :: Relation Referent Name
|
||||
, deepTypes :: Relation Reference Name
|
||||
, deepTermMetadata :: Metadata.R4 Referent Name
|
||||
, deepTypeMetadata :: Metadata.R4 Reference Name
|
||||
, deepPaths :: Set Path
|
||||
, deepEdits :: Map Name EditHash
|
||||
}
|
||||
|
||||
-- Represents a shallow diff of a Branch0.
|
||||
-- Each of these `Star`s contain metadata as well, so an entry in
|
||||
-- `added` or `removed` could be an update to the metadata.
|
||||
data BranchDiff = BranchDiff
|
||||
{ addedTerms :: Star Referent NameSegment
|
||||
, removedTerms :: Star Referent NameSegment
|
||||
, addedTypes :: Star Reference NameSegment
|
||||
, removedTypes :: Star Reference NameSegment
|
||||
, changedPatches :: Map NameSegment Patch.PatchDiff
|
||||
} deriving (Eq, Ord, Show)
|
||||
|
||||
instance Semigroup BranchDiff where
|
||||
left <> right = BranchDiff
|
||||
{ addedTerms = addedTerms left <> addedTerms right
|
||||
, removedTerms = removedTerms left <> removedTerms right
|
||||
, addedTypes = addedTypes left <> addedTypes right
|
||||
, removedTypes = removedTypes left <> removedTypes right
|
||||
, changedPatches =
|
||||
Map.unionWith (<>) (changedPatches left) (changedPatches right)
|
||||
}
|
||||
|
||||
instance Monoid BranchDiff where
|
||||
mappend = (<>)
|
||||
mempty = BranchDiff mempty mempty mempty mempty mempty
|
||||
|
||||
-- The raw Branch
|
||||
data Raw = Raw
|
||||
{ _termsR :: Star Referent NameSegment
|
||||
, _typesR :: Star Reference NameSegment
|
||||
, _childrenR :: Map NameSegment Hash
|
||||
, _editsR :: Map NameSegment EditHash
|
||||
}
|
||||
|
||||
makeLenses ''Branch
|
||||
makeLensesFor [("_edits", "edits")] ''Branch0
|
||||
|
||||
terms :: Lens' (Branch0 m) (Star Referent NameSegment)
|
||||
terms = lens _terms (\Branch0{..} x -> branch0 x _types _children _edits)
|
||||
|
||||
types :: Lens' (Branch0 m) (Star Reference NameSegment)
|
||||
types = lens _types (\Branch0{..} x -> branch0 _terms x _children _edits)
|
||||
|
||||
children :: Lens' (Branch0 m) (Map NameSegment (Branch m))
|
||||
children = lens _children (\Branch0{..} x -> branch0 _terms _types x _edits)
|
||||
|
||||
-- creates a Branch0 from the primary fields and derives the others.
|
||||
branch0 :: Metadata.Star Referent NameSegment
|
||||
-> Metadata.Star Reference NameSegment
|
||||
-> Map NameSegment (Branch m)
|
||||
-> Map NameSegment (EditHash, m Patch)
|
||||
-> Branch0 m
|
||||
branch0 terms types children edits =
|
||||
Branch0 terms types children edits
|
||||
deepTerms' deepTypes'
|
||||
deepTermMetadata' deepTypeMetadata'
|
||||
deepPaths' deepEdits'
|
||||
where
|
||||
nameSegToName = Name.unsafeFromText . NameSegment.toText
|
||||
deepTerms' = (R.mapRan nameSegToName . Star3.d1) terms
|
||||
<> foldMap go (Map.toList children)
|
||||
where
|
||||
go (nameSegToName -> n, b) =
|
||||
R.mapRan (Name.joinDot n) (deepTerms $ head b) -- could use mapKeysMonotonic
|
||||
deepTypes' = (R.mapRan nameSegToName . Star3.d1) types
|
||||
<> foldMap go (Map.toList children)
|
||||
where
|
||||
go (nameSegToName -> n, b) =
|
||||
R.mapRan (Name.joinDot n) (deepTypes $ head b) -- could use mapKeysMonotonic
|
||||
deepTermMetadata' = R4.mapD2 nameSegToName (Metadata.starToR4 terms)
|
||||
<> foldMap go (Map.toList children)
|
||||
where
|
||||
go (nameSegToName -> n, b) =
|
||||
R4.mapD2 (Name.joinDot n) (deepTermMetadata $ head b)
|
||||
deepTypeMetadata' = R4.mapD2 nameSegToName (Metadata.starToR4 types)
|
||||
<> foldMap go (Map.toList children)
|
||||
where
|
||||
go (nameSegToName -> n, b) =
|
||||
R4.mapD2 (Name.joinDot n) (deepTypeMetadata $ head b)
|
||||
deepPaths' = Set.map Path.singleton (Map.keysSet children)
|
||||
<> foldMap go (Map.toList children)
|
||||
where go (nameSeg, b) = Set.map (Path.cons nameSeg) (deepPaths $ head b)
|
||||
deepEdits' = Map.mapKeys nameSegToName (Map.map fst edits)
|
||||
<> foldMap go (Map.toList children)
|
||||
where
|
||||
go (nameSeg, b) =
|
||||
Map.mapKeys (nameSegToName nameSeg `Name.joinDot`) . deepEdits $ head b
|
||||
|
||||
head :: Branch m -> Branch0 m
|
||||
head (Branch c) = Causal.head c
|
||||
|
||||
headHash :: Branch m -> Hash
|
||||
headHash (Branch c) = Causal.currentHash c
|
||||
|
||||
-- deepEdits' :: Branch0 m -> Map Name (EditHash, m Patch)
|
||||
-- deepEdits' b = go id b where
|
||||
-- -- can change this to an actual prefix once Name is a [NameSegment]
|
||||
-- go :: (Name -> Name) -> Branch0 m -> Map Name (EditHash, m Patch)
|
||||
-- go addPrefix Branch0{..} =
|
||||
-- Map.mapKeysMonotonic (addPrefix . Name.fromSegment) _edits
|
||||
-- <> foldMap f (Map.toList _children)
|
||||
-- where
|
||||
-- f :: (NameSegment, Branch m) -> Map Name (EditHash, m Patch)
|
||||
-- f (c, b) = go (addPrefix . Name.joinDot (Name.fromSegment c)) (head b)
|
||||
|
||||
data MergeMode = RegularMerge | SquashMerge deriving (Eq,Ord,Show)
|
||||
|
||||
merge :: forall m . Monad m => Branch m -> Branch m -> m (Branch m)
|
||||
merge = merge' RegularMerge
|
||||
|
||||
-- Discards the history of a Branch0's children, recursively
|
||||
discardHistory0 :: Applicative m => Branch0 m -> Branch0 m
|
||||
discardHistory0 = over children (fmap tweak) where
|
||||
tweak b = cons (discardHistory0 (head b)) empty
|
||||
|
||||
merge' :: forall m . Monad m => MergeMode -> Branch m -> Branch m -> m (Branch m)
|
||||
merge' = merge'' lca
|
||||
|
||||
merge'' :: forall m . Monad m
|
||||
=> (Branch m -> Branch m -> m (Maybe (Branch m))) -- lca calculator
|
||||
-> MergeMode
|
||||
-> Branch m
|
||||
-> Branch m
|
||||
-> m (Branch m)
|
||||
merge'' _ _ b1 b2 | isEmpty b1 = pure b2
|
||||
merge'' _ mode b1 b2 | isEmpty b2 = case mode of
|
||||
RegularMerge -> pure b1
|
||||
SquashMerge -> pure $ cons (discardHistory0 (head b1)) b2
|
||||
merge'' lca mode (Branch x) (Branch y) =
|
||||
Branch <$> case mode of
|
||||
RegularMerge -> Causal.threeWayMerge' lca' combine x y
|
||||
SquashMerge -> Causal.squashMerge' lca' (pure . discardHistory0) combine x y
|
||||
where
|
||||
lca' c1 c2 = fmap _history <$> lca (Branch c1) (Branch c2)
|
||||
combine :: Maybe (Branch0 m) -> Branch0 m -> Branch0 m -> m (Branch0 m)
|
||||
combine Nothing l r = merge0 lca mode l r
|
||||
combine (Just ca) l r = do
|
||||
dl <- diff0 ca l
|
||||
dr <- diff0 ca r
|
||||
head0 <- apply ca (dl <> dr)
|
||||
children <- Map.mergeA
|
||||
(Map.traverseMaybeMissing $ combineMissing ca)
|
||||
(Map.traverseMaybeMissing $ combineMissing ca)
|
||||
(Map.zipWithAMatched $ const (merge'' lca mode))
|
||||
(_children l) (_children r)
|
||||
pure $ branch0 (_terms head0) (_types head0) children (_edits head0)
|
||||
|
||||
combineMissing ca k cur =
|
||||
case Map.lookup k (_children ca) of
|
||||
Nothing -> pure $ Just cur
|
||||
Just old -> do
|
||||
nw <- merge'' lca mode (cons empty0 old) cur
|
||||
if isEmpty0 $ head nw
|
||||
then pure Nothing
|
||||
else pure $ Just nw
|
||||
|
||||
apply :: Branch0 m -> BranchDiff -> m (Branch0 m)
|
||||
apply b0 BranchDiff {..} = do
|
||||
patches <- sequenceA
|
||||
$ Map.differenceWith patchMerge (pure @m <$> _edits b0) changedPatches
|
||||
let newPatches = makePatch <$> Map.difference changedPatches (_edits b0)
|
||||
makePatch Patch.PatchDiff {..} =
|
||||
let p = Patch.Patch _addedTermEdits _addedTypeEdits
|
||||
in (H.accumulate' p, pure p)
|
||||
pure $ branch0 (Star3.difference (_terms b0) removedTerms <> addedTerms)
|
||||
(Star3.difference (_types b0) removedTypes <> addedTypes)
|
||||
(_children b0)
|
||||
(patches <> newPatches)
|
||||
patchMerge mhp Patch.PatchDiff {..} = Just $ do
|
||||
(_, mp) <- mhp
|
||||
p <- mp
|
||||
let np = Patch.Patch
|
||||
{ _termEdits = R.difference (Patch._termEdits p) _removedTermEdits
|
||||
<> _addedTermEdits
|
||||
, _typeEdits = R.difference (Patch._typeEdits p) _removedTypeEdits
|
||||
<> _addedTypeEdits
|
||||
}
|
||||
pure (H.accumulate' np, pure np)
|
||||
|
||||
-- -- `before' lca b1 b2` is true if `b2` incorporates all of `b1`
|
||||
-- -- It's defined as: lca b1 b2 == Just b1
|
||||
-- before' :: Monad m => (Branch m -> Branch m -> m (Maybe (Branch m)))
|
||||
-- -> Branch m -> Branch m -> m Bool
|
||||
-- before' lca (Branch x) (Branch y) = Causal.before' lca' x y
|
||||
-- where
|
||||
-- lca' c1 c2 = fmap _history <$> lca (Branch c1) (Branch c2)
|
||||
|
||||
-- `before b1 b2` is true if `b2` incorporates all of `b1`
|
||||
before :: Monad m => Branch m -> Branch m -> m Bool
|
||||
before (Branch b1) (Branch b2) = Causal.before b1 b2
|
||||
|
||||
merge0 :: forall m. Monad m => (Branch m -> Branch m -> m (Maybe (Branch m)))
|
||||
-> MergeMode -> Branch0 m -> Branch0 m -> m (Branch0 m)
|
||||
merge0 lca mode b1 b2 = do
|
||||
c3 <- unionWithM (merge'' lca mode) (_children b1) (_children b2)
|
||||
e3 <- unionWithM g (_edits b1) (_edits b2)
|
||||
pure $ branch0 (_terms b1 <> _terms b2)
|
||||
(_types b1 <> _types b2)
|
||||
c3
|
||||
e3
|
||||
where
|
||||
g :: (EditHash, m Patch) -> (EditHash, m Patch) -> m (EditHash, m Patch)
|
||||
g (h1, m1) (h2, _) | h1 == h2 = pure (h1, m1)
|
||||
g (_, m1) (_, m2) = do
|
||||
e1 <- m1
|
||||
e2 <- m2
|
||||
let e3 = e1 <> e2
|
||||
pure (H.accumulate' e3, pure e3)
|
||||
|
||||
pattern Hash h = Causal.RawHash h
|
||||
|
||||
-- toList0 :: Branch0 m -> [(Path, Branch0 m)]
|
||||
-- toList0 = go Path.empty where
|
||||
-- go p b = (p, b) : (Map.toList (_children b) >>= (\(seg, cb) ->
|
||||
-- go (Path.snoc p seg) (head cb) ))
|
||||
|
||||
-- printDebugPaths :: Branch m -> String
|
||||
-- printDebugPaths = unlines . map show . Set.toList . debugPaths
|
||||
|
||||
-- debugPaths :: Branch m -> Set (Path, Hash)
|
||||
-- debugPaths = go Path.empty where
|
||||
-- go p b = Set.insert (p, headHash b) . Set.unions $
|
||||
-- [ go (Path.snoc p seg) b | (seg, b) <- Map.toList $ _children (head b) ]
|
||||
|
||||
-- data Target = TargetType | TargetTerm | TargetBranch
|
||||
-- deriving (Eq, Ord, Show)
|
||||
|
||||
instance Eq (Branch0 m) where
|
||||
a == b = view terms a == view terms b
|
||||
&& view types a == view types b
|
||||
&& view children a == view children b
|
||||
&& (fmap fst . view edits) a == (fmap fst . view edits) b
|
||||
|
||||
-- data ForkFailure = SrcNotFound | DestExists
|
||||
|
||||
-- -- consider delegating to Names.numHashChars when ready to implement?
|
||||
-- -- are those enough?
|
||||
-- -- could move this to a read-only field in Branch0
|
||||
-- -- could move a Names0 to a read-only field in Branch0 until it gets too big
|
||||
-- numHashChars :: Branch m -> Int
|
||||
-- numHashChars _b = 3
|
||||
|
||||
-- This type is a little ugly, so we wrap it up with a nice type alias for
|
||||
-- use outside this module.
|
||||
type Cache m = Cache.Cache (Causal.RawHash Raw) (UnwrappedBranch m)
|
||||
|
||||
-- boundedCache :: MonadIO m => Word -> m (Cache m2)
|
||||
-- boundedCache = Cache.semispaceCache
|
||||
|
||||
-- Can use `Cache.nullCache` to disable caching if needed
|
||||
cachedRead :: forall m . MonadIO m
|
||||
=> Cache m
|
||||
-> Causal.Deserialize m Raw Raw
|
||||
-> (EditHash -> m Patch)
|
||||
-> Hash
|
||||
-> m (Branch m)
|
||||
cachedRead cache deserializeRaw deserializeEdits h =
|
||||
Branch <$> Causal.cachedRead cache d h
|
||||
where
|
||||
fromRaw :: Raw -> m (Branch0 m)
|
||||
fromRaw Raw {..} = do
|
||||
children <- traverse go _childrenR
|
||||
edits <- for _editsR $ \hash -> (hash,) . pure <$> deserializeEdits hash
|
||||
pure $ branch0 _termsR _typesR children edits
|
||||
go = cachedRead cache deserializeRaw deserializeEdits
|
||||
d :: Causal.Deserialize m Raw (Branch0 m)
|
||||
d h = deserializeRaw h >>= \case
|
||||
RawOne raw -> RawOne <$> fromRaw raw
|
||||
RawCons raw h -> flip RawCons h <$> fromRaw raw
|
||||
RawMerge raw hs -> flip RawMerge hs <$> fromRaw raw
|
||||
|
||||
sync
|
||||
:: Monad m
|
||||
=> (Hash -> m Bool)
|
||||
-> Causal.Serialize m Raw Raw
|
||||
-> (EditHash -> m Patch -> m ())
|
||||
-> Branch m
|
||||
-> m ()
|
||||
sync exists serializeRaw serializeEdits b = do
|
||||
_written <- State.execStateT (sync' exists serializeRaw serializeEdits b) mempty
|
||||
-- traceM $ "Branch.sync wrote " <> show (Set.size written) <> " namespace files."
|
||||
pure ()
|
||||
|
||||
-- serialize a `Branch m` indexed by the hash of its corresponding Raw
|
||||
sync'
|
||||
:: forall m
|
||||
. Monad m
|
||||
=> (Hash -> m Bool)
|
||||
-> Causal.Serialize m Raw Raw
|
||||
-> (EditHash -> m Patch -> m ())
|
||||
-> Branch m
|
||||
-> StateT (Set Hash) m ()
|
||||
sync' exists serializeRaw serializeEdits b = Causal.sync exists
|
||||
serialize0
|
||||
(view history b)
|
||||
where
|
||||
serialize0 :: Causal.Serialize (StateT (Set Hash) m) Raw (Branch0 m)
|
||||
serialize0 h b0 = case b0 of
|
||||
RawOne b0 -> do
|
||||
writeB0 b0
|
||||
lift $ serializeRaw h $ RawOne (toRaw b0)
|
||||
RawCons b0 ht -> do
|
||||
writeB0 b0
|
||||
lift $ serializeRaw h $ RawCons (toRaw b0) ht
|
||||
RawMerge b0 hs -> do
|
||||
writeB0 b0
|
||||
lift $ serializeRaw h $ RawMerge (toRaw b0) hs
|
||||
where
|
||||
writeB0 :: Branch0 m -> StateT (Set Hash) m ()
|
||||
writeB0 b0 = do
|
||||
for_ (view children b0) $ \c -> do
|
||||
queued <- State.get
|
||||
when (Set.notMember (headHash c) queued) $
|
||||
sync' exists serializeRaw serializeEdits c
|
||||
for_ (view edits b0) (lift . uncurry serializeEdits)
|
||||
|
||||
-- this has to serialize the branch0 and its descendants in the tree,
|
||||
-- and then serialize the rest of the history of the branch as well
|
||||
|
||||
toRaw :: Branch0 m -> Raw
|
||||
toRaw Branch0 {..} =
|
||||
Raw _terms _types (headHash <$> _children) (fst <$> _edits)
|
||||
|
||||
toCausalRaw :: Branch m -> Causal.Raw Raw Raw
|
||||
toCausalRaw = \case
|
||||
Branch (Causal.One _h e) -> RawOne (toRaw e)
|
||||
Branch (Causal.Cons _h e (ht, _m)) -> RawCons (toRaw e) ht
|
||||
Branch (Causal.Merge _h e tls) -> RawMerge (toRaw e) (Map.keysSet tls)
|
||||
|
||||
-- -- copy a path to another path
|
||||
-- fork
|
||||
-- :: Applicative m
|
||||
-- => Path
|
||||
-- -> Path
|
||||
-- -> Branch m
|
||||
-- -> Either ForkFailure (Branch m)
|
||||
-- fork src dest root = case getAt src root of
|
||||
-- Nothing -> Left SrcNotFound
|
||||
-- Just src' -> case setIfNotExists dest src' root of
|
||||
-- Nothing -> Left DestExists
|
||||
-- Just root' -> Right root'
|
||||
|
||||
-- -- Move the node at src to dest.
|
||||
-- -- It's okay if `dest` is inside `src`, just create empty levels.
|
||||
-- -- Try not to `step` more than once at each node.
|
||||
-- move :: Applicative m
|
||||
-- => Path
|
||||
-- -> Path
|
||||
-- -> Branch m
|
||||
-- -> Either ForkFailure (Branch m)
|
||||
-- move src dest root = case getAt src root of
|
||||
-- Nothing -> Left SrcNotFound
|
||||
-- Just src' ->
|
||||
-- -- make sure dest doesn't already exist
|
||||
-- case getAt dest root of
|
||||
-- Just _destExists -> Left DestExists
|
||||
-- Nothing ->
|
||||
-- -- find and update common ancestor of `src` and `dest`:
|
||||
-- Right $ modifyAt ancestor go root
|
||||
-- where
|
||||
-- (ancestor, relSrc, relDest) = Path.relativeToAncestor src dest
|
||||
-- go = deleteAt relSrc . setAt relDest src'
|
||||
|
||||
-- setIfNotExists
|
||||
-- :: Applicative m => Path -> Branch m -> Branch m -> Maybe (Branch m)
|
||||
-- setIfNotExists dest b root = case getAt dest root of
|
||||
-- Just _destExists -> Nothing
|
||||
-- Nothing -> Just $ setAt dest b root
|
||||
|
||||
-- setAt :: Applicative m => Path -> Branch m -> Branch m -> Branch m
|
||||
-- setAt path b = modifyAt path (const b)
|
||||
|
||||
-- deleteAt :: Applicative m => Path -> Branch m -> Branch m
|
||||
-- deleteAt path = setAt path empty
|
||||
|
||||
-- returns `Nothing` if no Branch at `path` or if Branch is empty at `path`
|
||||
getAt :: Path
|
||||
-> Branch m
|
||||
-> Maybe (Branch m)
|
||||
getAt path root = case Path.uncons path of
|
||||
Nothing -> if isEmpty root then Nothing else Just root
|
||||
Just (seg, path) -> case Map.lookup seg (_children $ head root) of
|
||||
Just b -> getAt path b
|
||||
Nothing -> Nothing
|
||||
|
||||
getAt' :: Path -> Branch m -> Branch m
|
||||
getAt' p b = fromMaybe empty $ getAt p b
|
||||
|
||||
-- getAt0 :: Path -> Branch0 m -> Branch0 m
|
||||
-- getAt0 p b = case Path.uncons p of
|
||||
-- Nothing -> b
|
||||
-- Just (seg, path) -> case Map.lookup seg (_children b) of
|
||||
-- Just c -> getAt0 path (head c)
|
||||
-- Nothing -> empty0
|
||||
|
||||
empty :: Branch m
|
||||
empty = Branch $ Causal.one empty0
|
||||
|
||||
-- one :: Branch0 m -> Branch m
|
||||
-- one = Branch . Causal.one
|
||||
|
||||
empty0 :: Branch0 m
|
||||
empty0 =
|
||||
Branch0 mempty mempty mempty mempty mempty mempty mempty mempty mempty mempty
|
||||
|
||||
isEmpty0 :: Branch0 m -> Bool
|
||||
isEmpty0 = (== empty0)
|
||||
|
||||
isEmpty :: Branch m -> Bool
|
||||
isEmpty = (== empty)
|
||||
|
||||
step :: Applicative m => (Branch0 m -> Branch0 m) -> Branch m -> Branch m
|
||||
step f = \case
|
||||
Branch (Causal.One _h e) | e == empty0 -> Branch (Causal.one (f empty0))
|
||||
b -> over history (Causal.stepDistinct f) b
|
||||
|
||||
-- stepM :: (Monad m, Monad n) => (Branch0 m -> n (Branch0 m)) -> Branch m -> n (Branch m)
|
||||
-- stepM f = \case
|
||||
-- Branch (Causal.One _h e) | e == empty0 -> Branch . Causal.one <$> f empty0
|
||||
-- b -> mapMOf history (Causal.stepDistinctM f) b
|
||||
|
||||
cons :: Applicative m => Branch0 m -> Branch m -> Branch m
|
||||
cons = step . const
|
||||
|
||||
-- isOne :: Branch m -> Bool
|
||||
-- isOne (Branch Causal.One{}) = True
|
||||
-- isOne _ = False
|
||||
|
||||
-- uncons :: Applicative m => Branch m -> m (Maybe (Branch0 m, Branch m))
|
||||
-- uncons (Branch b) = go <$> Causal.uncons b where
|
||||
-- go = over (_Just . _2) Branch
|
||||
|
||||
-- -- Modify the branch0 at the head of at `path` with `f`,
|
||||
-- -- after creating it if necessary. Preserves history.
|
||||
-- stepAt :: forall m. Applicative m
|
||||
-- => Path
|
||||
-- -> (Branch0 m -> Branch0 m)
|
||||
-- -> Branch m -> Branch m
|
||||
-- stepAt p f = modifyAt p g where
|
||||
-- g :: Branch m -> Branch m
|
||||
-- g (Branch b) = Branch . Causal.consDistinct (f (Causal.head b)) $ b
|
||||
|
||||
-- stepManyAt :: (Monad m, Foldable f)
|
||||
-- => f (Path, Branch0 m -> Branch0 m) -> Branch m -> Branch m
|
||||
-- stepManyAt actions = step (stepManyAt0 actions)
|
||||
|
||||
-- -- Modify the branch0 at the head of at `path` with `f`,
|
||||
-- -- after creating it if necessary. Preserves history.
|
||||
-- stepAtM :: forall n m. (Functor n, Applicative m)
|
||||
-- => Path -> (Branch0 m -> n (Branch0 m)) -> Branch m -> n (Branch m)
|
||||
-- stepAtM p f = modifyAtM p g where
|
||||
-- g :: Branch m -> n (Branch m)
|
||||
-- g (Branch b) = do
|
||||
-- b0' <- f (Causal.head b)
|
||||
-- pure $ Branch . Causal.consDistinct b0' $ b
|
||||
|
||||
-- stepManyAtM :: (Monad m, Monad n, Foldable f)
|
||||
-- => f (Path, Branch0 m -> n (Branch0 m)) -> Branch m -> n (Branch m)
|
||||
-- stepManyAtM actions = stepM (stepManyAt0M actions)
|
||||
|
||||
-- -- starting at the leaves, apply `f` to every level of the branch.
|
||||
-- stepEverywhere
|
||||
-- :: Applicative m => (Branch0 m -> Branch0 m) -> (Branch0 m -> Branch0 m)
|
||||
-- stepEverywhere f Branch0 {..} = f (branch0 _terms _types children _edits)
|
||||
-- where children = fmap (step $ stepEverywhere f) _children
|
||||
|
||||
-- -- Creates a function to fix up the children field._1
|
||||
-- -- If the action emptied a child, then remove the mapping,
|
||||
-- -- otherwise update it.
|
||||
-- -- Todo: Fix this in hashing & serialization instead of here?
|
||||
-- getChildBranch :: NameSegment -> Branch0 m -> Branch m
|
||||
-- getChildBranch seg b = fromMaybe empty $ Map.lookup seg (_children b)
|
||||
|
||||
-- setChildBranch :: NameSegment -> Branch m -> Branch0 m -> Branch0 m
|
||||
-- setChildBranch seg b = over children (updateChildren seg b)
|
||||
|
||||
-- getPatch :: Applicative m => NameSegment -> Branch0 m -> m Patch
|
||||
-- getPatch seg b = case Map.lookup seg (_edits b) of
|
||||
-- Nothing -> pure Patch.empty
|
||||
-- Just (_, p) -> p
|
||||
|
||||
-- getMaybePatch :: Applicative m => NameSegment -> Branch0 m -> m (Maybe Patch)
|
||||
-- getMaybePatch seg b = case Map.lookup seg (_edits b) of
|
||||
-- Nothing -> pure Nothing
|
||||
-- Just (_, p) -> Just <$> p
|
||||
|
||||
-- modifyPatches
|
||||
-- :: Monad m => NameSegment -> (Patch -> Patch) -> Branch0 m -> m (Branch0 m)
|
||||
-- modifyPatches seg f = mapMOf edits update
|
||||
-- where
|
||||
-- update m = do
|
||||
-- p' <- case Map.lookup seg m of
|
||||
-- Nothing -> pure $ f Patch.empty
|
||||
-- Just (_, p) -> f <$> p
|
||||
-- let h = H.accumulate' p'
|
||||
-- pure $ Map.insert seg (h, pure p') m
|
||||
|
||||
-- replacePatch :: Applicative m => NameSegment -> Patch -> Branch0 m -> Branch0 m
|
||||
-- replacePatch n p = over edits (Map.insert n (H.accumulate' p, pure p))
|
||||
|
||||
-- deletePatch :: NameSegment -> Branch0 m -> Branch0 m
|
||||
-- deletePatch n = over edits (Map.delete n)
|
||||
|
||||
-- updateChildren ::NameSegment
|
||||
-- -> Branch m
|
||||
-- -> Map NameSegment (Branch m)
|
||||
-- -> Map NameSegment (Branch m)
|
||||
-- updateChildren seg updatedChild =
|
||||
-- if isEmpty updatedChild
|
||||
-- then Map.delete seg
|
||||
-- else Map.insert seg updatedChild
|
||||
|
||||
-- -- Modify the Branch at `path` with `f`, after creating it if necessary.
|
||||
-- -- Because it's a `Branch`, it overwrites the history at `path`.
|
||||
-- modifyAt :: Applicative m
|
||||
-- => Path -> (Branch m -> Branch m) -> Branch m -> Branch m
|
||||
-- modifyAt path f = runIdentity . modifyAtM path (pure . f)
|
||||
|
||||
-- -- Modify the Branch at `path` with `f`, after creating it if necessary.
|
||||
-- -- Because it's a `Branch`, it overwrites the history at `path`.
|
||||
-- modifyAtM
|
||||
-- :: forall n m
|
||||
-- . Functor n
|
||||
-- => Applicative m -- because `Causal.cons` uses `pure`
|
||||
-- => Path
|
||||
-- -> (Branch m -> n (Branch m))
|
||||
-- -> Branch m
|
||||
-- -> n (Branch m)
|
||||
-- modifyAtM path f b = case Path.uncons path of
|
||||
-- Nothing -> f b
|
||||
-- Just (seg, path) -> do -- Functor
|
||||
-- let child = getChildBranch seg (head b)
|
||||
-- child' <- modifyAtM path f child
|
||||
-- -- step the branch by updating its children according to fixup
|
||||
-- pure $ step (setChildBranch seg child') b
|
||||
|
||||
-- -- stepManyAt0 consolidates several changes into a single step
|
||||
-- stepManyAt0 :: forall f m . (Monad m, Foldable f)
|
||||
-- => f (Path, Branch0 m -> Branch0 m)
|
||||
-- -> Branch0 m -> Branch0 m
|
||||
-- stepManyAt0 actions =
|
||||
-- runIdentity . stepManyAt0M [ (p, pure . f) | (p,f) <- toList actions ]
|
||||
|
||||
-- stepManyAt0M :: forall m n f . (Monad m, Monad n, Foldable f)
|
||||
-- => f (Path, Branch0 m -> n (Branch0 m))
|
||||
-- -> Branch0 m -> n (Branch0 m)
|
||||
-- stepManyAt0M actions b = go (toList actions) b where
|
||||
-- go :: [(Path, Branch0 m -> n (Branch0 m))] -> Branch0 m -> n (Branch0 m)
|
||||
-- go actions b = let
|
||||
-- -- combines the functions that apply to this level of the tree
|
||||
-- currentAction b = foldM (\b f -> f b) b [ f | (Path.Empty, f) <- actions ]
|
||||
|
||||
-- -- groups the actions based on the child they apply to
|
||||
-- childActions :: Map NameSegment [(Path, Branch0 m -> n (Branch0 m))]
|
||||
-- childActions =
|
||||
-- List.multimap [ (seg, (rest,f)) | (seg :< rest, f) <- actions ]
|
||||
|
||||
-- -- alters the children of `b` based on the `childActions` map
|
||||
-- stepChildren :: Map NameSegment (Branch m) -> n (Map NameSegment (Branch m))
|
||||
-- stepChildren children0 = foldM g children0 $ Map.toList childActions
|
||||
-- where
|
||||
-- g children (seg, actions) = do
|
||||
-- -- Recursively applies the relevant actions to the child branch
|
||||
-- -- The `findWithDefault` is important - it allows the stepManyAt
|
||||
-- -- to create new children at paths that don't previously exist.
|
||||
-- child <- stepM (go actions) (Map.findWithDefault empty seg children0)
|
||||
-- pure $ updateChildren seg child children
|
||||
-- in do
|
||||
-- c2 <- stepChildren (view children b)
|
||||
-- currentAction (set children c2 b)
|
||||
|
||||
instance Hashable (Branch0 m) where
|
||||
tokens b =
|
||||
[ H.accumulateToken (_terms b)
|
||||
, H.accumulateToken (_types b)
|
||||
, H.accumulateToken (headHash <$> _children b)
|
||||
, H.accumulateToken (fst <$> _edits b)
|
||||
]
|
||||
|
||||
-- -- getLocalBranch :: Hash -> IO Branch
|
||||
-- -- getGithubBranch :: RemotePath -> IO Branch
|
||||
-- -- getLocalEdit :: GUID -> IO Patch
|
||||
|
||||
-- -- todo: consider inlining these into Actions2
|
||||
-- addTermName
|
||||
-- :: Referent -> NameSegment -> Metadata.Metadata -> Branch0 m -> Branch0 m
|
||||
-- addTermName r new md =
|
||||
-- over terms (Metadata.insertWithMetadata (r, md) . Star3.insertD1 (r, new))
|
||||
|
||||
-- addTypeName
|
||||
-- :: Reference -> NameSegment -> Metadata.Metadata -> Branch0 m -> Branch0 m
|
||||
-- addTypeName r new md =
|
||||
-- over types (Metadata.insertWithMetadata (r, md) . Star3.insertD1 (r, new))
|
||||
|
||||
-- -- addTermNameAt :: Path.Split -> Referent -> Branch0 m -> Branch0 m
|
||||
-- -- addTypeNameAt :: Path.Split -> Reference -> Branch0 m -> Branch0 m
|
||||
|
||||
-- deleteTermName :: Referent -> NameSegment -> Branch0 m -> Branch0 m
|
||||
-- deleteTermName r n b | Star3.memberD1 (r,n) (view terms b)
|
||||
-- = over terms (Star3.deletePrimaryD1 (r,n)) b
|
||||
-- deleteTermName _ _ b = b
|
||||
|
||||
-- deleteTypeName :: Reference -> NameSegment -> Branch0 m -> Branch0 m
|
||||
-- deleteTypeName r n b | Star3.memberD1 (r,n) (view types b)
|
||||
-- = over types (Star3.deletePrimaryD1 (r,n)) b
|
||||
-- deleteTypeName _ _ b = b
|
||||
|
||||
-- namesDiff :: Branch m -> Branch m -> Names.Diff
|
||||
-- namesDiff b1 b2 = Names.diff0 (toNames0 (head b1)) (toNames0 (head b2))
|
||||
|
||||
lca :: Monad m => Branch m -> Branch m -> m (Maybe (Branch m))
|
||||
lca (Branch a) (Branch b) = fmap Branch <$> Causal.lca a b
|
||||
|
||||
diff0 :: Monad m => Branch0 m -> Branch0 m -> m BranchDiff
|
||||
diff0 old new = do
|
||||
newEdits <- sequenceA $ snd <$> _edits new
|
||||
oldEdits <- sequenceA $ snd <$> _edits old
|
||||
let diffEdits = Map.merge (Map.mapMissing $ \_ p -> Patch.diff p mempty)
|
||||
(Map.mapMissing $ \_ p -> Patch.diff mempty p)
|
||||
(Map.zipWithMatched (const Patch.diff))
|
||||
newEdits
|
||||
oldEdits
|
||||
pure $ BranchDiff
|
||||
{ addedTerms = Star3.difference (_terms new) (_terms old)
|
||||
, removedTerms = Star3.difference (_terms old) (_terms new)
|
||||
, addedTypes = Star3.difference (_types new) (_types old)
|
||||
, removedTypes = Star3.difference (_types old) (_types new)
|
||||
, changedPatches = diffEdits
|
||||
}
|
||||
|
||||
transform :: Functor m => (forall a . m a -> n a) -> Branch m -> Branch n
|
||||
transform f b = case _history b of
|
||||
causal -> Branch . Causal.transform f $ transformB0s f causal
|
||||
where
|
||||
transformB0 :: Functor m => (forall a . m a -> n a) -> Branch0 m -> Branch0 n
|
||||
transformB0 f b =
|
||||
b { _children = transform f <$> _children b
|
||||
, _edits = second f <$> _edits b
|
||||
}
|
||||
|
||||
transformB0s :: Functor m => (forall a . m a -> n a)
|
||||
-> Causal m Raw (Branch0 m)
|
||||
-> Causal m Raw (Branch0 n)
|
||||
transformB0s f = Causal.unsafeMapHashPreserving (transformB0 f)
|
||||
|
||||
-- data BranchAttentions = BranchAttentions
|
||||
-- { -- Patches that were edited on the right but entirely removed on the left.
|
||||
-- removedPatchEdited :: [Name]
|
||||
-- -- Patches that were edited on the left but entirely removed on the right.
|
||||
-- , editedPatchRemoved :: [Name]
|
||||
-- }
|
||||
|
||||
-- instance Semigroup BranchAttentions where
|
||||
-- BranchAttentions edited1 removed1 <> BranchAttentions edited2 removed2
|
||||
-- = BranchAttentions (edited1 <> edited2) (removed1 <> removed2)
|
||||
|
||||
-- instance Monoid BranchAttentions where
|
||||
-- mempty = BranchAttentions [] []
|
||||
-- mappend = (<>)
|
||||
|
||||
-- data RefCollisions =
|
||||
-- RefCollisions { termCollisions :: Relation Name Name
|
||||
-- , typeCollisions :: Relation Name Name
|
||||
-- } deriving (Eq, Show)
|
||||
|
||||
-- instance Semigroup RefCollisions where
|
||||
-- (<>) = mappend
|
||||
-- instance Monoid RefCollisions where
|
||||
-- mempty = RefCollisions mempty mempty
|
||||
-- mappend r1 r2 = RefCollisions (termCollisions r1 <> termCollisions r2)
|
||||
-- (typeCollisions r1 <> typeCollisions r2)
|
@ -2,27 +2,33 @@
|
||||
{-# LANGUAGE DeriveGeneric #-}
|
||||
{-# LANGUAGE RecordWildCards #-}
|
||||
|
||||
module Unison.Codebase.FileCodebase.Branch.Dependencies where
|
||||
|
||||
import Data.Set (Set)
|
||||
module Unison.Codebase.FileCodebase.Branch.Dependencies
|
||||
( Branches,
|
||||
Dependencies (..),
|
||||
Dependencies' (..),
|
||||
to',
|
||||
fromRawCausal,
|
||||
fromBranch,
|
||||
)
|
||||
where
|
||||
import Data.Foldable (toList)
|
||||
import qualified Data.Set as Set
|
||||
import qualified Data.Map as Map
|
||||
import Unison.Codebase.Branch (Branch(Branch), Branch0, EditHash)
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import qualified Unison.Reference as Reference
|
||||
import qualified Unison.Referent as Referent
|
||||
import GHC.Generics (Generic)
|
||||
import Data.Monoid.Generic
|
||||
import Data.Map (Map)
|
||||
import qualified Data.Map as Map
|
||||
import Data.Monoid.Generic
|
||||
import Data.Set (Set)
|
||||
import qualified Data.Set as Set
|
||||
import GHC.Generics (Generic)
|
||||
import Unison.Codebase.FileCodebase.Branch (Branch (Branch), Branch0, EditHash)
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import qualified Unison.Codebase.FileCodebase.Branch as Branch
|
||||
import Unison.Codebase.FileCodebase.Patch (Patch)
|
||||
import Unison.Codebase.FileCodebase.Reference (Reference (DerivedId))
|
||||
import qualified Unison.Codebase.FileCodebase.Reference as Reference
|
||||
import Unison.Codebase.FileCodebase.Referent (Referent)
|
||||
import qualified Unison.Codebase.FileCodebase.Referent as Referent
|
||||
import Unison.NameSegment (NameSegment)
|
||||
import Unison.Referent (Referent)
|
||||
import Unison.Codebase.Patch (Patch)
|
||||
import qualified Unison.Util.Star3 as Star3
|
||||
import qualified Unison.Util.Relation as R
|
||||
import Unison.Reference (Reference(DerivedId))
|
||||
|
||||
import qualified Unison.Util.Star3 as Star3
|
||||
type Branches m = [(Branch.Hash, Maybe (m (Branch m)))]
|
||||
|
||||
data Dependencies = Dependencies
|
||||
@ -45,7 +51,6 @@ data Dependencies' = Dependencies'
|
||||
deriving Semigroup via GenericSemigroup Dependencies'
|
||||
deriving Monoid via GenericMonoid Dependencies'
|
||||
|
||||
|
||||
to' :: Dependencies -> Dependencies'
|
||||
to' Dependencies{..} = Dependencies' (toList patches) (toList terms) (toList decls)
|
||||
|
||||
|
109
parser-typechecker/src/Unison/Codebase/FileCodebase/Codebase.hs
Normal file
109
parser-typechecker/src/Unison/Codebase/FileCodebase/Codebase.hs
Normal file
@ -0,0 +1,109 @@
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
|
||||
module Unison.Codebase.FileCodebase.Codebase
|
||||
( Codebase (..),
|
||||
CodebasePath,
|
||||
GetRootBranchError (..),
|
||||
GitError (..),
|
||||
GitFileCodebaseError (..),
|
||||
SyncToDir,
|
||||
)
|
||||
where
|
||||
|
||||
import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, ReadRepo, WriteRepo)
|
||||
import Unison.Codebase.FileCodebase.Branch (Branch)
|
||||
import qualified Unison.Codebase.FileCodebase.Branch as Branch
|
||||
import Unison.Codebase.FileCodebase.DataDeclaration (Decl)
|
||||
import Unison.Codebase.FileCodebase.Patch (Patch)
|
||||
import Unison.Codebase.FileCodebase.Reference (Reference)
|
||||
import qualified Unison.Codebase.FileCodebase.Reference as Reference
|
||||
import qualified Unison.Codebase.FileCodebase.Referent as Referent
|
||||
import Unison.Codebase.FileCodebase.Term (Term)
|
||||
import Unison.Codebase.FileCodebase.Type (Type)
|
||||
import Unison.Codebase.GitError (GitCodebaseError, GitProtocolError)
|
||||
import qualified Unison.Codebase.Reflog as Reflog
|
||||
import Unison.Codebase.ShortBranchHash (ShortBranchHash)
|
||||
import Unison.Codebase.SyncMode (SyncMode)
|
||||
import Unison.CodebasePath (CodebasePath)
|
||||
import Unison.Prelude
|
||||
import Unison.ShortHash (ShortHash)
|
||||
import qualified Unison.WatchKind as WK
|
||||
|
||||
type SyncToDir m =
|
||||
CodebasePath -> -- dest codebase
|
||||
SyncMode ->
|
||||
Branch m -> -- branch to sync to dest codebase
|
||||
m ()
|
||||
|
||||
-- | Abstract interface to a user's codebase.
|
||||
--
|
||||
-- One implementation is 'Unison.Codebase.FileCodebase' which uses the filesystem.
|
||||
data Codebase m v a = Codebase
|
||||
{ getTerm :: Reference.Id -> m (Maybe (Term v a)),
|
||||
getTypeOfTermImpl :: Reference.Id -> m (Maybe (Type v a)),
|
||||
getTypeDeclaration :: Reference.Id -> m (Maybe (Decl v a)),
|
||||
putTerm :: Reference.Id -> Term v a -> Type v a -> m (),
|
||||
putTypeDeclaration :: Reference.Id -> Decl v a -> m (),
|
||||
getRootBranch :: m (Either GetRootBranchError (Branch m)),
|
||||
putRootBranch :: Branch m -> m (),
|
||||
rootBranchUpdates :: m (IO (), IO (Set Branch.Hash)),
|
||||
getBranchForHash :: Branch.Hash -> m (Maybe (Branch m)),
|
||||
putBranch :: Branch m -> m (),
|
||||
branchExists :: Branch.Hash -> m Bool,
|
||||
getPatch :: Branch.EditHash -> m (Maybe Patch),
|
||||
putPatch :: Branch.EditHash -> Patch -> m (),
|
||||
patchExists :: Branch.EditHash -> m Bool,
|
||||
dependentsImpl :: Reference -> m (Set Reference.Id),
|
||||
-- This copies all the dependencies of `b` from the specified Codebase into this one
|
||||
syncFromDirectory :: CodebasePath -> SyncMode -> Branch m -> m (),
|
||||
-- This copies all the dependencies of `b` from this Codebase
|
||||
syncToDirectory :: CodebasePath -> SyncMode -> Branch m -> m (),
|
||||
viewRemoteBranch' :: ReadRemoteNamespace -> m (Either GitError (m (), Branch m, CodebasePath)),
|
||||
pushGitRootBranch :: Branch m -> WriteRepo -> SyncMode -> m (Either GitError ()),
|
||||
-- Watch expressions are part of the codebase, the `Reference.Id` is
|
||||
-- the hash of the source of the watch expression, and the `Term v a`
|
||||
-- is the evaluated result of the expression, decompiled to a term.
|
||||
watches :: WK.WatchKind -> m [Reference.Id],
|
||||
getWatch :: WK.WatchKind -> Reference.Id -> m (Maybe (Term v a)),
|
||||
putWatch :: WK.WatchKind -> Reference.Id -> Term v a -> m (),
|
||||
clearWatches :: m (),
|
||||
getReflog :: m [Reflog.Entry Branch.Hash],
|
||||
appendReflog :: Text -> Branch m -> Branch m -> m (),
|
||||
-- list of terms of the given type
|
||||
termsOfTypeImpl :: Reference -> m (Set Referent.Id),
|
||||
-- list of terms that mention the given type anywhere in their signature
|
||||
termsMentioningTypeImpl :: Reference -> m (Set Referent.Id),
|
||||
-- number of base58 characters needed to distinguish any two references in the codebase
|
||||
hashLength :: m Int,
|
||||
termReferencesByPrefix :: ShortHash -> m (Set Reference.Id),
|
||||
typeReferencesByPrefix :: ShortHash -> m (Set Reference.Id),
|
||||
termReferentsByPrefix :: ShortHash -> m (Set Referent.Id),
|
||||
branchHashLength :: m Int,
|
||||
branchHashesByPrefix :: ShortBranchHash -> m (Set Branch.Hash),
|
||||
-- returns `Nothing` to not implemented, fallback to in-memory
|
||||
-- also `Nothing` if no LCA
|
||||
-- The result is undefined if the two hashes are not in the codebase.
|
||||
-- Use `Codebase.lca` which wraps this in a nice API.
|
||||
lcaImpl :: Maybe (Branch.Hash -> Branch.Hash -> m (Maybe Branch.Hash)),
|
||||
-- `beforeImpl` returns `Nothing` if not implemented by the codebase
|
||||
-- `beforeImpl b1 b2` is undefined if `b2` not in the codebase
|
||||
--
|
||||
-- Use `Codebase.before` which wraps this in a nice API.
|
||||
beforeImpl :: Maybe (Branch.Hash -> Branch.Hash -> m Bool)
|
||||
}
|
||||
|
||||
data GetRootBranchError
|
||||
= NoRootBranch
|
||||
| CouldntParseRootBranch FilePath
|
||||
| CouldntLoadRootBranch Branch.Hash
|
||||
deriving (Show)
|
||||
|
||||
data GitError
|
||||
= GitProtocolError GitProtocolError
|
||||
| GitCodebaseError (GitCodebaseError Branch.Hash)
|
||||
| GitFileCodebaseError GitFileCodebaseError
|
||||
|
||||
data GitFileCodebaseError
|
||||
= GitCouldntParseRootBranchHash ReadRepo String
|
||||
deriving Show
|
@ -73,59 +73,58 @@ module Unison.Codebase.FileCodebase.Common
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import Control.Error (runExceptT, ExceptT(..))
|
||||
import Control.Lens (Lens, use, to, (%=))
|
||||
import Control.Monad.Catch (catch)
|
||||
import Control.Monad.State (MonadState)
|
||||
import Control.Error (ExceptT (..), runExceptT)
|
||||
import Control.Lens (Lens, to, use, (%=))
|
||||
import Control.Monad.Catch (catch)
|
||||
import Control.Monad.State (MonadState)
|
||||
import qualified Data.ByteString.Base16 as ByteString (decodeBase16, encodeBase16)
|
||||
import qualified Data.Char as Char
|
||||
import Data.List ( isPrefixOf )
|
||||
import qualified Data.Set as Set
|
||||
import qualified Data.Text as Text
|
||||
import UnliftIO.Directory ( createDirectoryIfMissing
|
||||
, doesFileExist
|
||||
, removeFile
|
||||
, doesDirectoryExist, copyFile
|
||||
)
|
||||
import UnliftIO.IO.File (writeBinaryFile)
|
||||
import qualified System.Directory
|
||||
import System.FilePath ( takeBaseName
|
||||
, takeDirectory
|
||||
, (</>)
|
||||
)
|
||||
import qualified Unison.Codebase as Codebase
|
||||
import Unison.Codebase (CodebasePath)
|
||||
import Unison.Codebase.Causal ( Causal
|
||||
, RawHash(..)
|
||||
)
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import Unison.Codebase.Branch ( Branch )
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import Unison.Codebase.ShortBranchHash (ShortBranchHash(..))
|
||||
import qualified Unison.Codebase.ShortBranchHash as SBH
|
||||
import qualified Unison.Codebase.Serialization as S
|
||||
import qualified Unison.Codebase.Serialization.V1 as V1
|
||||
import Unison.Codebase.SyncMode ( SyncMode )
|
||||
import Unison.Codebase.Patch ( Patch(..) )
|
||||
import qualified Unison.ConstructorType as CT
|
||||
import qualified Unison.DataDeclaration as DD
|
||||
import qualified Unison.Hash as Hash
|
||||
import Unison.Parser ( Ann(External) )
|
||||
import Unison.Reference ( Reference )
|
||||
import qualified Unison.Reference as Reference
|
||||
import Unison.Referent ( Referent )
|
||||
import qualified Unison.Referent as Referent
|
||||
import Unison.ShortHash (ShortHash)
|
||||
import qualified Unison.ShortHash as SH
|
||||
import Unison.Term ( Term )
|
||||
import qualified Unison.Term as Term
|
||||
import Unison.Type ( Type )
|
||||
import qualified Unison.Type as Type
|
||||
import Unison.Var ( Var )
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import Unison.Util.Monoid (foldMapM)
|
||||
import U.Util.Timing (time)
|
||||
import qualified Data.Char as Char
|
||||
import Data.Either.Extra (maybeToEither)
|
||||
import Data.List (isPrefixOf)
|
||||
import qualified Data.Set as Set
|
||||
import qualified Data.Text as Text
|
||||
import qualified System.Directory
|
||||
import System.FilePath (takeBaseName, takeDirectory, (</>))
|
||||
import U.Util.Timing (time)
|
||||
import Unison.Codebase (CodebasePath)
|
||||
import Unison.Codebase.Causal (Causal, RawHash (..))
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import Unison.Codebase.FileCodebase.Branch (Branch)
|
||||
import qualified Unison.Codebase.FileCodebase.Branch as Branch
|
||||
import qualified Unison.Codebase.FileCodebase.Codebase as Codebase
|
||||
import qualified Unison.Codebase.FileCodebase.DataDeclaration as DD
|
||||
import Unison.Codebase.FileCodebase.Patch (Patch (..))
|
||||
import Unison.Codebase.FileCodebase.Reference (Reference)
|
||||
import qualified Unison.Codebase.FileCodebase.Reference as Reference
|
||||
import Unison.Codebase.FileCodebase.Referent (Referent)
|
||||
import qualified Unison.Codebase.FileCodebase.Referent as Referent
|
||||
import qualified Unison.Codebase.FileCodebase.Serialization.V1 as V1
|
||||
import Unison.Codebase.FileCodebase.Term (Term)
|
||||
import qualified Unison.Codebase.FileCodebase.Term as Term
|
||||
import Unison.Codebase.FileCodebase.Type (Type)
|
||||
import qualified Unison.Codebase.FileCodebase.Type as Type
|
||||
import qualified Unison.Codebase.Serialization as S
|
||||
import Unison.Codebase.ShortBranchHash (ShortBranchHash (..))
|
||||
import qualified Unison.Codebase.ShortBranchHash as SBH
|
||||
import Unison.Codebase.SyncMode (SyncMode)
|
||||
import qualified Unison.ConstructorType as CT
|
||||
import qualified Unison.Hash as Hash
|
||||
import Unison.Parser.Ann (Ann (External))
|
||||
import qualified Unison.Referent' as Referent
|
||||
import Unison.ShortHash (ShortHash)
|
||||
import qualified Unison.ShortHash as SH
|
||||
import Unison.Util.Monoid (foldMapM)
|
||||
import Unison.Var (Var)
|
||||
import Unison.WatchKind (WatchKind)
|
||||
import qualified Unison.WatchKind as WK
|
||||
import UnliftIO.Directory
|
||||
( copyFile,
|
||||
createDirectoryIfMissing,
|
||||
doesDirectoryExist,
|
||||
doesFileExist,
|
||||
removeFile,
|
||||
)
|
||||
import UnliftIO.IO.File (writeBinaryFile)
|
||||
|
||||
data Err
|
||||
= InvalidBranchFile FilePath String
|
||||
@ -178,11 +177,11 @@ dependentsDir root r = dependentsDir' root </> referenceToDir r
|
||||
dependentsDir' root = root </> codebasePath </> "dependents"
|
||||
|
||||
watchesDir :: CodebasePath -> Text -> FilePath
|
||||
watchesDir root UF.RegularWatch =
|
||||
watchesDir root WK.RegularWatch =
|
||||
root </> codebasePath </> "watches" </> "_cache"
|
||||
watchesDir root kind =
|
||||
root </> codebasePath </> "watches" </> encodeFileName (Text.unpack kind)
|
||||
watchPath :: CodebasePath -> UF.WatchKind -> Reference.Id -> FilePath
|
||||
watchPath :: CodebasePath -> WatchKind -> Reference.Id -> FilePath
|
||||
watchPath root kind id =
|
||||
watchesDir root (Text.pack kind) </> componentIdToString id <> ".ub"
|
||||
|
||||
@ -511,7 +510,7 @@ getWatch :: (MonadIO m, Ord v)
|
||||
=> S.Get v
|
||||
-> S.Get a
|
||||
-> CodebasePath
|
||||
-> UF.WatchKind
|
||||
-> WatchKind
|
||||
-> Reference.Id
|
||||
-> m (Maybe (Term v a))
|
||||
getWatch getV getA path k id = do
|
||||
@ -525,7 +524,7 @@ putWatch
|
||||
=> S.Put v
|
||||
-> S.Put a
|
||||
-> CodebasePath
|
||||
-> UF.WatchKind
|
||||
-> WatchKind
|
||||
-> Reference.Id
|
||||
-> Term v a
|
||||
-> m ()
|
||||
|
@ -0,0 +1,117 @@
|
||||
{-# LANGUAGE RecordWildCards #-}
|
||||
{-# LANGUAGE DeriveAnyClass #-}
|
||||
{-# Language DeriveFoldable #-}
|
||||
{-# Language DeriveTraversable #-}
|
||||
{-# Language OverloadedStrings #-}
|
||||
{-# Language PatternSynonyms #-}
|
||||
{-# Language ViewPatterns #-}
|
||||
|
||||
module Unison.Codebase.FileCodebase.DataDeclaration
|
||||
( DataDeclaration (..),
|
||||
EffectDeclaration (..),
|
||||
Decl,
|
||||
Modifier(..),
|
||||
asDataDecl,
|
||||
constructorType,
|
||||
constructorTypes,
|
||||
declConstructorReferents,
|
||||
declDependencies,
|
||||
dependencies,
|
||||
)
|
||||
where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
|
||||
import qualified Data.Set as Set
|
||||
import Prelude.Extras (Show1)
|
||||
import Unison.Codebase.FileCodebase.Reference (Reference)
|
||||
import qualified Unison.Codebase.FileCodebase.Reference as Reference
|
||||
import qualified Unison.Codebase.FileCodebase.Referent as Referent
|
||||
import Unison.Codebase.FileCodebase.Type (Type)
|
||||
import qualified Unison.Codebase.FileCodebase.Type as Type
|
||||
import qualified Unison.ConstructorType as CT
|
||||
import Unison.Hashable (Hashable1)
|
||||
import qualified Unison.Hashable as Hashable
|
||||
import qualified Unison.Referent' as Referent'
|
||||
import Prelude hiding (cycle)
|
||||
|
||||
type Decl v a = Either (EffectDeclaration v a) (DataDeclaration v a)
|
||||
|
||||
data DeclOrBuiltin v a =
|
||||
Builtin CT.ConstructorType | Decl (Decl v a)
|
||||
deriving (Eq, Show)
|
||||
|
||||
asDataDecl :: Decl v a -> DataDeclaration v a
|
||||
asDataDecl = either toDataDecl id
|
||||
|
||||
declDependencies :: Ord v => Decl v a -> Set Reference
|
||||
declDependencies = either (dependencies . toDataDecl) dependencies
|
||||
|
||||
constructorType :: Decl v a -> CT.ConstructorType
|
||||
constructorType = \case
|
||||
Left{} -> CT.Effect
|
||||
Right{} -> CT.Data
|
||||
|
||||
data Modifier = Structural | Unique Text -- | Opaque (Set Reference)
|
||||
deriving (Eq, Ord, Show)
|
||||
|
||||
data DataDeclaration v a = DataDeclaration {
|
||||
modifier :: Modifier,
|
||||
annotation :: a,
|
||||
bound :: [v],
|
||||
constructors' :: [(a, v, Type v a)]
|
||||
} deriving (Eq, Show, Functor)
|
||||
|
||||
newtype EffectDeclaration v a = EffectDeclaration {
|
||||
toDataDecl :: DataDeclaration v a
|
||||
} deriving (Eq,Show,Functor)
|
||||
|
||||
constructorTypes :: DataDeclaration v a -> [Type v a]
|
||||
constructorTypes = (snd <$>) . constructors
|
||||
|
||||
constructors :: DataDeclaration v a -> [(v, Type v a)]
|
||||
constructors (DataDeclaration _ _ _ ctors) = [(v,t) | (_,v,t) <- ctors ]
|
||||
|
||||
-- This function is unsound, since the `rid` and the `decl` have to match.
|
||||
-- It should probably be hashed directly from the Decl, once we have a
|
||||
-- reliable way of doing that. —AI
|
||||
declConstructorReferents :: Reference.Id -> Decl v a -> [Referent.Id]
|
||||
declConstructorReferents rid decl =
|
||||
[ Referent'.Con' rid i ct | i <- constructorIds (asDataDecl decl) ]
|
||||
where ct = constructorType decl
|
||||
|
||||
constructorIds :: DataDeclaration v a -> [Int]
|
||||
constructorIds dd = [0 .. length (constructors dd) - 1]
|
||||
|
||||
|
||||
dependencies :: Ord v => DataDeclaration v a -> Set Reference
|
||||
dependencies dd =
|
||||
Set.unions (Type.dependencies <$> constructorTypes dd)
|
||||
|
||||
data F a
|
||||
= Type (Type.F a)
|
||||
| LetRec [a] a
|
||||
| Constructors [a]
|
||||
| Modified Modifier a
|
||||
deriving (Functor, Foldable, Show, Show1)
|
||||
|
||||
instance Hashable1 F where
|
||||
hash1 hashCycle hash e =
|
||||
let (tag, hashed) = (Hashable.Tag, Hashable.Hashed)
|
||||
-- Note: start each layer with leading `2` byte, to avoid collisions with
|
||||
-- terms, which start each layer with leading `1`. See `Hashable1 Term.F`
|
||||
in Hashable.accumulate $ tag 2 : case e of
|
||||
Type t -> [tag 0, hashed $ Hashable.hash1 hashCycle hash t]
|
||||
LetRec bindings body ->
|
||||
let (hashes, hash') = hashCycle bindings
|
||||
in [tag 1] ++ map hashed hashes ++ [hashed $ hash' body]
|
||||
Constructors cs ->
|
||||
let (hashes, _) = hashCycle cs
|
||||
in tag 2 : map hashed hashes
|
||||
Modified m t ->
|
||||
[tag 3, Hashable.accumulateToken m, hashed $ hash t]
|
||||
|
||||
instance Hashable.Hashable Modifier where
|
||||
tokens Structural = [Hashable.Tag 0]
|
||||
tokens (Unique txt) = [Hashable.Tag 1, Hashable.Text txt]
|
27
parser-typechecker/src/Unison/Codebase/FileCodebase/Init.hs
Normal file
27
parser-typechecker/src/Unison/Codebase/FileCodebase/Init.hs
Normal file
@ -0,0 +1,27 @@
|
||||
{-# LANGUAGE ApplicativeDo #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Unison.Codebase.FileCodebase.Init (Init(..), CreateCodebaseError(..), Pretty) where
|
||||
|
||||
import Unison.Codebase.FileCodebase.Codebase (Codebase)
|
||||
import Unison.CodebasePath (CodebasePath)
|
||||
import qualified Unison.Util.Pretty as P
|
||||
|
||||
type Pretty = P.Pretty P.ColorText
|
||||
|
||||
data CreateCodebaseError
|
||||
= CreateCodebaseAlreadyExists
|
||||
| CreateCodebaseOther Pretty
|
||||
|
||||
type DebugName = String
|
||||
|
||||
data Init m v a = Init
|
||||
{ -- | open an existing codebase
|
||||
openCodebase :: DebugName -> CodebasePath -> m (Either Pretty (m (), Codebase m v a)),
|
||||
-- | create a new codebase
|
||||
createCodebase' :: DebugName -> CodebasePath -> m (Either CreateCodebaseError (m (), Codebase m v a)),
|
||||
-- | given a codebase root, and given that the codebase root may have other junk in it,
|
||||
-- give the path to the "actual" files; e.g. what a forked transcript should clone.
|
||||
codebasePath :: CodebasePath -> CodebasePath
|
||||
}
|
||||
|
@ -0,0 +1,56 @@
|
||||
{-# LANGUAGE PatternSynonyms #-}
|
||||
|
||||
module Unison.Codebase.FileCodebase.LabeledDependency
|
||||
( derivedTerm
|
||||
, derivedType
|
||||
, termRef
|
||||
, typeRef
|
||||
, referent
|
||||
, dataConstructor
|
||||
, effectConstructor
|
||||
, fold
|
||||
, referents
|
||||
, toReference
|
||||
, LabeledDependency
|
||||
, partition
|
||||
) where
|
||||
|
||||
import Unison.Prelude hiding (fold)
|
||||
|
||||
import qualified Data.Set as Set
|
||||
import Unison.Codebase.FileCodebase.Reference (Id, Reference (DerivedId))
|
||||
import Unison.Codebase.FileCodebase.Referent (ConstructorId, Referent, pattern Con, pattern Ref)
|
||||
import Unison.ConstructorType (ConstructorType (Data, Effect))
|
||||
|
||||
-- dumb constructor name is private
|
||||
newtype LabeledDependency = X (Either Reference Referent) deriving (Eq, Ord, Show)
|
||||
|
||||
derivedType, derivedTerm :: Id -> LabeledDependency
|
||||
typeRef, termRef :: Reference -> LabeledDependency
|
||||
referent :: Referent -> LabeledDependency
|
||||
dataConstructor :: Reference -> ConstructorId -> LabeledDependency
|
||||
effectConstructor :: Reference -> ConstructorId -> LabeledDependency
|
||||
|
||||
derivedType = X . Left . DerivedId
|
||||
derivedTerm = X . Right . Ref . DerivedId
|
||||
typeRef = X . Left
|
||||
termRef = X . Right . Ref
|
||||
referent = X . Right
|
||||
dataConstructor r cid = X . Right $ Con r cid Data
|
||||
effectConstructor r cid = X . Right $ Con r cid Effect
|
||||
|
||||
referents :: Foldable f => f Referent -> Set LabeledDependency
|
||||
referents rs = Set.fromList (map referent $ toList rs)
|
||||
|
||||
fold :: (Reference -> a) -> (Referent -> a) -> LabeledDependency -> a
|
||||
fold f g (X e) = either f g e
|
||||
|
||||
partition :: Foldable t => t LabeledDependency -> ([Reference], [Referent])
|
||||
partition = partitionEithers . map (\(X e) -> e) . toList
|
||||
|
||||
-- | Left TypeRef | Right TermRef
|
||||
toReference :: LabeledDependency -> Either Reference Reference
|
||||
toReference = \case
|
||||
X (Left r) -> Left r
|
||||
X (Right (Ref r)) -> Right r
|
||||
X (Right (Con r _ _)) -> Left r
|
@ -0,0 +1,80 @@
|
||||
module Unison.Codebase.FileCodebase.Metadata where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import qualified Data.Map as Map
|
||||
import qualified Data.Set as Set
|
||||
import Unison.Codebase.FileCodebase.Reference (Reference)
|
||||
import qualified Unison.Util.List as List
|
||||
import Unison.Util.Relation (Relation)
|
||||
import qualified Unison.Util.Relation as R
|
||||
import qualified Unison.Util.Relation3 as R3
|
||||
import Unison.Util.Relation4 (Relation4)
|
||||
import qualified Unison.Util.Relation4 as R4
|
||||
import Unison.Util.Star3 (Star3)
|
||||
import qualified Unison.Util.Star3 as Star3
|
||||
|
||||
type Type = Reference
|
||||
type Value = Reference
|
||||
|
||||
-- keys can be terms or types
|
||||
type Metadata = Map Type (Set Value)
|
||||
|
||||
-- `a` is generally the type of references or hashes
|
||||
-- `n` is generally the the type of name associated with the references
|
||||
-- `Type` is the type of metadata. Duplicate info to speed up certain queries.
|
||||
-- `(Type, Value)` is the metadata value itself along with its type.
|
||||
type Star a n = Star3 a n Type (Type, Value)
|
||||
type R4 a n = R4.Relation4 a n Type Value
|
||||
|
||||
starToR4 :: (Ord r, Ord n) => Star r n -> Relation4 r n Type Value
|
||||
starToR4 = R4.fromList . fmap (\(r,n,_,(t,v)) -> (r,n,t,v)) . Star3.toList
|
||||
|
||||
hasMetadata :: Ord a => a -> Type -> Value -> Star a n -> Bool
|
||||
hasMetadata a t v = Set.member (t, v) . R.lookupDom a . Star3.d3
|
||||
|
||||
hasMetadataWithType' :: Ord a => a -> Type -> R4 a n -> Bool
|
||||
hasMetadataWithType' a t r =
|
||||
fromMaybe False $ Set.member t . R3.d2s <$> (Map.lookup a $ R4.d1 r)
|
||||
|
||||
hasMetadataWithType :: Ord a => a -> Type -> Star a n -> Bool
|
||||
hasMetadataWithType a t = Set.member t . R.lookupDom a . Star3.d2
|
||||
|
||||
inserts :: (Ord a, Ord n) => [(a, Type, Value)] -> Star a n -> Star a n
|
||||
inserts tups s = foldl' (flip insert) s tups
|
||||
|
||||
insertWithMetadata
|
||||
:: (Ord a, Ord n) => (a, Metadata) -> Star a n -> Star a n
|
||||
insertWithMetadata (a, md) =
|
||||
inserts [ (a, ty, v) | (ty, vs) <- Map.toList md, v <- toList vs ]
|
||||
|
||||
insert :: (Ord a, Ord n) => (a, Type, Value) -> Star a n -> Star a n
|
||||
insert (a, ty, v) = Star3.insertD23 (a, ty, (ty,v))
|
||||
|
||||
delete :: (Ord a, Ord n) => (a, Type, Value) -> Star a n -> Star a n
|
||||
delete (a, ty, v) s = let
|
||||
s' = Star3.deleteD3 (a, (ty,v)) s
|
||||
-- if (ty,v) is the last metadata of type ty
|
||||
-- we also delete (a, ty) from the d2 index
|
||||
metadataByType = List.multimap (toList (R.lookupDom a (Star3.d3 s)))
|
||||
in
|
||||
case Map.lookup ty metadataByType of
|
||||
Just vs | all (== v) vs -> Star3.deleteD2 (a, ty) s'
|
||||
_ -> s'
|
||||
|
||||
-- parallel composition - commutative and associative
|
||||
merge :: Metadata -> Metadata -> Metadata
|
||||
merge = Map.unionWith (<>)
|
||||
|
||||
-- sequential composition, right-biased
|
||||
append :: Metadata -> Metadata -> Metadata
|
||||
append = Map.unionWith (flip const)
|
||||
|
||||
empty :: Metadata
|
||||
empty = mempty
|
||||
|
||||
singleton :: Type -> Value -> Metadata
|
||||
singleton ty v = Map.singleton ty (Set.singleton v)
|
||||
|
||||
toRelation :: Star3 a n x y -> Relation a n
|
||||
toRelation = Star3.d1
|
136
parser-typechecker/src/Unison/Codebase/FileCodebase/Patch.hs
Normal file
136
parser-typechecker/src/Unison/Codebase/FileCodebase/Patch.hs
Normal file
@ -0,0 +1,136 @@
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
{-# LANGUAGE PatternSynonyms #-}
|
||||
{-# LANGUAGE RecordWildCards #-}
|
||||
|
||||
module Unison.Codebase.FileCodebase.Patch where
|
||||
|
||||
import Unison.Prelude hiding (empty)
|
||||
|
||||
import Prelude hiding (head,read,subtract)
|
||||
|
||||
import Control.Lens hiding (children, cons, transform)
|
||||
import qualified Data.Set as Set
|
||||
import Unison.Codebase.FileCodebase.LabeledDependency (LabeledDependency)
|
||||
import qualified Unison.Codebase.FileCodebase.LabeledDependency as LD
|
||||
import Unison.Codebase.FileCodebase.Reference (Reference)
|
||||
import Unison.Codebase.FileCodebase.TermEdit (TermEdit, Typing (Same))
|
||||
import qualified Unison.Codebase.FileCodebase.TermEdit as TermEdit
|
||||
import Unison.Codebase.FileCodebase.TypeEdit (TypeEdit)
|
||||
import qualified Unison.Codebase.FileCodebase.TypeEdit as TypeEdit
|
||||
import Unison.Hashable (Hashable)
|
||||
import qualified Unison.Hashable as H
|
||||
import Unison.Util.Relation (Relation)
|
||||
import qualified Unison.Util.Relation as R
|
||||
|
||||
data Patch = Patch
|
||||
{ _termEdits :: Relation Reference TermEdit
|
||||
, _typeEdits :: Relation Reference TypeEdit
|
||||
} deriving (Eq, Ord, Show)
|
||||
|
||||
data PatchDiff = PatchDiff
|
||||
{ _addedTermEdits :: Relation Reference TermEdit
|
||||
, _addedTypeEdits :: Relation Reference TypeEdit
|
||||
, _removedTermEdits :: Relation Reference TermEdit
|
||||
, _removedTypeEdits :: Relation Reference TypeEdit
|
||||
} deriving (Eq, Ord, Show)
|
||||
|
||||
makeLenses ''Patch
|
||||
makeLenses ''PatchDiff
|
||||
|
||||
diff :: Patch -> Patch -> PatchDiff
|
||||
diff new old = PatchDiff
|
||||
{ _addedTermEdits = R.difference (view termEdits new) (view termEdits old)
|
||||
, _addedTypeEdits = R.difference (view typeEdits new) (view typeEdits old)
|
||||
, _removedTypeEdits = R.difference (view typeEdits old) (view typeEdits new)
|
||||
, _removedTermEdits = R.difference (view termEdits old) (view termEdits new)
|
||||
}
|
||||
|
||||
labeledDependencies :: Patch -> Set LabeledDependency
|
||||
labeledDependencies Patch {..} =
|
||||
Set.map LD.termRef (R.dom _termEdits)
|
||||
<> Set.fromList
|
||||
(fmap LD.termRef $ TermEdit.references =<< toList (R.ran _termEdits))
|
||||
<> Set.map LD.typeRef (R.dom _typeEdits)
|
||||
<> Set.fromList
|
||||
(fmap LD.typeRef $ TypeEdit.references =<< toList (R.ran _typeEdits))
|
||||
|
||||
empty :: Patch
|
||||
empty = Patch mempty mempty
|
||||
|
||||
isEmpty :: Patch -> Bool
|
||||
isEmpty p = p == empty
|
||||
|
||||
allReferences :: Patch -> Set Reference
|
||||
allReferences p = typeReferences p <> termReferences p where
|
||||
typeReferences p = Set.fromList
|
||||
[ r | (old, TypeEdit.Replace new) <- R.toList (_typeEdits p)
|
||||
, r <- [old, new] ]
|
||||
termReferences p = Set.fromList
|
||||
[ r | (old, TermEdit.Replace new _) <- R.toList (_termEdits p)
|
||||
, r <- [old, new] ]
|
||||
|
||||
-- | Returns the set of references which are the target of an arrow in the patch
|
||||
allReferenceTargets :: Patch -> Set Reference
|
||||
allReferenceTargets p = typeReferences p <> termReferences p where
|
||||
typeReferences p = Set.fromList
|
||||
[ new | (_, TypeEdit.Replace new) <- R.toList (_typeEdits p) ]
|
||||
termReferences p = Set.fromList
|
||||
[ new | (_, TermEdit.Replace new _) <- R.toList (_termEdits p) ]
|
||||
|
||||
updateTerm :: (Reference -> Reference -> Typing)
|
||||
-> Reference -> TermEdit -> Patch -> Patch
|
||||
updateTerm typing r edit p =
|
||||
-- get D ~= lookupRan r
|
||||
-- for each d ∈ D, remove (d, r) and add (d, r')
|
||||
-- add (r, r') and remove (r', r')
|
||||
let deleteCycle = case edit of
|
||||
TermEdit.Deprecate -> id
|
||||
TermEdit.Replace r' _ -> R.delete r' (TermEdit.Replace r' Same)
|
||||
edits' :: Relation Reference TermEdit
|
||||
edits' = deleteCycle . R.insert r edit . R.map f $ _termEdits p
|
||||
f (x, TermEdit.Replace y _) | y == r = case edit of
|
||||
TermEdit.Replace r' _ -> (x, TermEdit.Replace r' (typing x r'))
|
||||
TermEdit.Deprecate -> (x, TermEdit.Deprecate)
|
||||
f p = p
|
||||
in p { _termEdits = edits' }
|
||||
|
||||
updateType :: Reference -> TypeEdit -> Patch -> Patch
|
||||
updateType r edit p =
|
||||
let deleteCycle = case edit of
|
||||
TypeEdit.Deprecate -> id
|
||||
TypeEdit.Replace r' -> R.delete r' (TypeEdit.Replace r')
|
||||
edits' :: Relation Reference TypeEdit
|
||||
edits' = deleteCycle . R.insert r edit . R.map f $ _typeEdits p
|
||||
f (x, TypeEdit.Replace y) | y == r = case edit of
|
||||
TypeEdit.Replace r' -> (x, TypeEdit.Replace r')
|
||||
TypeEdit.Deprecate -> (x, TypeEdit.Deprecate)
|
||||
f p = p
|
||||
in p { _typeEdits = edits' }
|
||||
|
||||
conflicts :: Patch -> Patch
|
||||
conflicts Patch{..} =
|
||||
Patch (R.filterManyDom _termEdits) (R.filterManyDom _typeEdits)
|
||||
|
||||
instance Semigroup Patch where
|
||||
a <> b = Patch (_termEdits a <> _termEdits b)
|
||||
(_typeEdits a <> _typeEdits b)
|
||||
|
||||
instance Monoid Patch where
|
||||
mappend = (<>)
|
||||
mempty = Patch mempty mempty
|
||||
|
||||
instance Hashable Patch where
|
||||
tokens e = [ H.Hashed (H.accumulate (H.tokens (_termEdits e))),
|
||||
H.Hashed (H.accumulate (H.tokens (_typeEdits e))) ]
|
||||
|
||||
instance Semigroup PatchDiff where
|
||||
a <> b = PatchDiff
|
||||
{ _addedTermEdits = _addedTermEdits a <> _addedTermEdits b
|
||||
, _addedTypeEdits = _addedTypeEdits a <> _addedTypeEdits b
|
||||
, _removedTermEdits = _removedTermEdits a <> _removedTermEdits b
|
||||
, _removedTypeEdits = _removedTypeEdits a <> _removedTypeEdits b
|
||||
}
|
||||
|
||||
instance Monoid PatchDiff where
|
||||
mappend = (<>)
|
||||
mempty = PatchDiff mempty mempty mempty mempty
|
165
parser-typechecker/src/Unison/Codebase/FileCodebase/Pattern.hs
Normal file
165
parser-typechecker/src/Unison/Codebase/FileCodebase/Pattern.hs
Normal file
@ -0,0 +1,165 @@
|
||||
{-# Language DeriveTraversable, DeriveGeneric, PatternSynonyms, OverloadedStrings #-}
|
||||
|
||||
module Unison.Codebase.FileCodebase.Pattern where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import Data.Foldable as Foldable hiding (foldMap')
|
||||
import Data.List (intercalate)
|
||||
import qualified Data.Set as Set
|
||||
import Unison.Codebase.FileCodebase.LabeledDependency (LabeledDependency)
|
||||
import qualified Unison.Codebase.FileCodebase.LabeledDependency as LD
|
||||
import Unison.Codebase.FileCodebase.Reference (Reference)
|
||||
import qualified Unison.Codebase.FileCodebase.Type as Type
|
||||
import qualified Unison.Hashable as H
|
||||
|
||||
type ConstructorId = Int
|
||||
|
||||
data Pattern loc
|
||||
= Unbound loc
|
||||
| Var loc
|
||||
| Boolean loc !Bool
|
||||
| Int loc !Int64
|
||||
| Nat loc !Word64
|
||||
| Float loc !Double
|
||||
| Text loc !Text
|
||||
| Char loc !Char
|
||||
| Constructor loc !Reference !Int [Pattern loc]
|
||||
| As loc (Pattern loc)
|
||||
| EffectPure loc (Pattern loc)
|
||||
| EffectBind loc !Reference !Int [Pattern loc] (Pattern loc)
|
||||
| SequenceLiteral loc [Pattern loc]
|
||||
| SequenceOp loc (Pattern loc) !SeqOp (Pattern loc)
|
||||
deriving (Ord,Generic,Functor,Foldable,Traversable)
|
||||
|
||||
data SeqOp = Cons
|
||||
| Snoc
|
||||
| Concat
|
||||
deriving (Eq, Show, Ord, Generic)
|
||||
|
||||
instance H.Hashable SeqOp where
|
||||
tokens Cons = [H.Tag 0]
|
||||
tokens Snoc = [H.Tag 1]
|
||||
tokens Concat = [H.Tag 2]
|
||||
|
||||
instance Show (Pattern loc) where
|
||||
show (Unbound _ ) = "Unbound"
|
||||
show (Var _ ) = "Var"
|
||||
show (Boolean _ x) = "Boolean " <> show x
|
||||
show (Int _ x) = "Int " <> show x
|
||||
show (Nat _ x) = "Nat " <> show x
|
||||
show (Float _ x) = "Float " <> show x
|
||||
show (Text _ t) = "Text " <> show t
|
||||
show (Char _ c) = "Char " <> show c
|
||||
show (Constructor _ r i ps) =
|
||||
"Constructor " <> unwords [show r, show i, show ps]
|
||||
show (As _ p) = "As " <> show p
|
||||
show (EffectPure _ k) = "EffectPure " <> show k
|
||||
show (EffectBind _ r i ps k) =
|
||||
"EffectBind " <> unwords [show r, show i, show ps, show k]
|
||||
show (SequenceLiteral _ ps) = "Sequence " <> intercalate ", " (fmap show ps)
|
||||
show (SequenceOp _ ph op pt) = "Sequence " <> show ph <> " " <> show op <> " " <> show pt
|
||||
|
||||
application :: Pattern loc -> Bool
|
||||
application (Constructor _ _ _ (_ : _)) = True
|
||||
application _ = False
|
||||
|
||||
loc :: Pattern loc -> loc
|
||||
loc p = head $ Foldable.toList p
|
||||
|
||||
setLoc :: Pattern loc -> loc -> Pattern loc
|
||||
setLoc p loc = case p of
|
||||
EffectBind _ a b c d -> EffectBind loc a b c d
|
||||
EffectPure _ a -> EffectPure loc a
|
||||
As _ a -> As loc a
|
||||
Constructor _ a b c -> Constructor loc a b c
|
||||
SequenceLiteral _ ps -> SequenceLiteral loc ps
|
||||
SequenceOp _ ph op pt -> SequenceOp loc ph op pt
|
||||
x -> fmap (const loc) x
|
||||
|
||||
instance H.Hashable (Pattern p) where
|
||||
tokens (Unbound _) = [H.Tag 0]
|
||||
tokens (Var _) = [H.Tag 1]
|
||||
tokens (Boolean _ b) = H.Tag 2 : [H.Tag $ if b then 1 else 0]
|
||||
tokens (Int _ n) = H.Tag 3 : [H.Int n]
|
||||
tokens (Nat _ n) = H.Tag 4 : [H.Nat n]
|
||||
tokens (Float _ f) = H.Tag 5 : H.tokens f
|
||||
tokens (Constructor _ r n args) =
|
||||
[H.Tag 6, H.accumulateToken r, H.Nat $ fromIntegral n, H.accumulateToken args]
|
||||
tokens (EffectPure _ p) = H.Tag 7 : H.tokens p
|
||||
tokens (EffectBind _ r n args k) =
|
||||
[H.Tag 8, H.accumulateToken r, H.Nat $ fromIntegral n, H.accumulateToken args, H.accumulateToken k]
|
||||
tokens (As _ p) = H.Tag 9 : H.tokens p
|
||||
tokens (Text _ t) = H.Tag 10 : H.tokens t
|
||||
tokens (SequenceLiteral _ ps) = H.Tag 11 : concatMap H.tokens ps
|
||||
tokens (SequenceOp _ l op r) = H.Tag 12 : H.tokens op ++ H.tokens l ++ H.tokens r
|
||||
tokens (Char _ c) = H.Tag 13 : H.tokens c
|
||||
|
||||
instance Eq (Pattern loc) where
|
||||
Unbound _ == Unbound _ = True
|
||||
Var _ == Var _ = True
|
||||
Boolean _ b == Boolean _ b2 = b == b2
|
||||
Int _ n == Int _ m = n == m
|
||||
Nat _ n == Nat _ m = n == m
|
||||
Float _ f == Float _ g = f == g
|
||||
Constructor _ r n args == Constructor _ s m brgs = r == s && n == m && args == brgs
|
||||
EffectPure _ p == EffectPure _ q = p == q
|
||||
EffectBind _ r ctor ps k == EffectBind _ r2 ctor2 ps2 k2 = r == r2 && ctor == ctor2 && ps == ps2 && k == k2
|
||||
As _ p == As _ q = p == q
|
||||
Text _ t == Text _ t2 = t == t2
|
||||
SequenceLiteral _ ps == SequenceLiteral _ ps2 = ps == ps2
|
||||
SequenceOp _ ph op pt == SequenceOp _ ph2 op2 pt2 = ph == ph2 && op == op2 && pt == pt2
|
||||
_ == _ = False
|
||||
|
||||
foldMap' :: Monoid m => (Pattern loc -> m) -> Pattern loc -> m
|
||||
foldMap' f p = case p of
|
||||
Unbound _ -> f p
|
||||
Var _ -> f p
|
||||
Boolean _ _ -> f p
|
||||
Int _ _ -> f p
|
||||
Nat _ _ -> f p
|
||||
Float _ _ -> f p
|
||||
Text _ _ -> f p
|
||||
Char _ _ -> f p
|
||||
Constructor _ _ _ ps -> f p <> foldMap (foldMap' f) ps
|
||||
As _ p' -> f p <> foldMap' f p'
|
||||
EffectPure _ p' -> f p <> foldMap' f p'
|
||||
EffectBind _ _ _ ps p' -> f p <> foldMap (foldMap' f) ps <> foldMap' f p'
|
||||
SequenceLiteral _ ps -> f p <> foldMap (foldMap' f) ps
|
||||
SequenceOp _ p1 _ p2 -> f p <> foldMap' f p1 <> foldMap' f p2
|
||||
|
||||
generalizedDependencies
|
||||
:: Ord r
|
||||
=> (Reference -> r)
|
||||
-> (Reference -> ConstructorId -> r)
|
||||
-> (Reference -> r)
|
||||
-> (Reference -> ConstructorId -> r)
|
||||
-> (Reference -> r)
|
||||
-> Pattern loc
|
||||
-> Set r
|
||||
generalizedDependencies literalType dataConstructor dataType effectConstructor effectType
|
||||
= Set.fromList . foldMap'
|
||||
(\case
|
||||
Unbound _ -> mempty
|
||||
Var _ -> mempty
|
||||
As _ _ -> mempty
|
||||
Constructor _ r cid _ -> [dataType r, dataConstructor r cid]
|
||||
EffectPure _ _ -> [effectType Type.effectRef]
|
||||
EffectBind _ r cid _ _ ->
|
||||
[effectType Type.effectRef, effectType r, effectConstructor r cid]
|
||||
SequenceLiteral _ _ -> [literalType Type.listRef]
|
||||
SequenceOp {} -> [literalType Type.listRef]
|
||||
Boolean _ _ -> [literalType Type.booleanRef]
|
||||
Int _ _ -> [literalType Type.intRef]
|
||||
Nat _ _ -> [literalType Type.natRef]
|
||||
Float _ _ -> [literalType Type.floatRef]
|
||||
Text _ _ -> [literalType Type.textRef]
|
||||
Char _ _ -> [literalType Type.charRef]
|
||||
)
|
||||
|
||||
labeledDependencies :: Pattern loc -> Set LabeledDependency
|
||||
labeledDependencies = generalizedDependencies LD.typeRef
|
||||
LD.dataConstructor
|
||||
LD.typeRef
|
||||
LD.effectConstructor
|
||||
LD.typeRef
|
192
parser-typechecker/src/Unison/Codebase/FileCodebase/Reference.hs
Normal file
192
parser-typechecker/src/Unison/Codebase/FileCodebase/Reference.hs
Normal file
@ -0,0 +1,192 @@
|
||||
{-# LANGUAGE DeriveGeneric #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE PatternSynonyms #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
|
||||
module Unison.Codebase.FileCodebase.Reference
|
||||
(Reference,
|
||||
pattern Builtin,
|
||||
pattern Derived,
|
||||
pattern DerivedId,
|
||||
Id(..),
|
||||
Pos,
|
||||
Size,
|
||||
derivedBase32Hex,
|
||||
Component, members,
|
||||
components,
|
||||
groupByComponent,
|
||||
componentFor,
|
||||
unsafeFromText,
|
||||
idFromText,
|
||||
isPrefixOf,
|
||||
fromShortHash,
|
||||
fromText,
|
||||
readSuffix,
|
||||
showShort,
|
||||
showSuffix,
|
||||
toId,
|
||||
toText,
|
||||
unsafeId,
|
||||
toShortHash,
|
||||
idToShortHash) where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import qualified Data.Map as Map
|
||||
import qualified Data.Set as Set
|
||||
import qualified Data.Text as Text
|
||||
import qualified Unison.Hash as H
|
||||
import Unison.Hashable as Hashable
|
||||
import Unison.ShortHash (ShortHash)
|
||||
import qualified Unison.ShortHash as SH
|
||||
import Data.Char (isDigit)
|
||||
|
||||
-- | Either a builtin or a user defined (hashed) top-level declaration.
|
||||
--
|
||||
-- Used for both terms and types. Doesn't distinguish between them.
|
||||
--
|
||||
-- Other used defined things like local variables don't get @Reference@s.
|
||||
data Reference
|
||||
= Builtin Text.Text
|
||||
-- `Derived` can be part of a strongly connected component.
|
||||
-- The `Pos` refers to a particular element of the component
|
||||
-- and the `Size` is the number of elements in the component.
|
||||
-- Using an ugly name so no one tempted to use this
|
||||
| DerivedId Id deriving (Eq,Ord,Generic)
|
||||
|
||||
pattern Derived :: H.Hash -> Pos -> Size -> Reference
|
||||
pattern Derived h i n = DerivedId (Id h i n)
|
||||
|
||||
{-# COMPLETE Builtin, Derived #-}
|
||||
|
||||
-- | @Pos@ is a position into a cycle of size @Size@, as cycles are hashed together.
|
||||
data Id = Id H.Hash Pos Size deriving (Generic)
|
||||
|
||||
unsafeId :: Reference -> Id
|
||||
unsafeId (Builtin b) =
|
||||
error $ "Tried to get the hash of builtin " <> Text.unpack b <> "."
|
||||
unsafeId (DerivedId x) = x
|
||||
|
||||
idToShortHash :: Id -> ShortHash
|
||||
idToShortHash = toShortHash . DerivedId
|
||||
|
||||
-- todo: move these to ShortHash module?
|
||||
-- but Show Reference currently depends on SH
|
||||
toShortHash :: Reference -> ShortHash
|
||||
toShortHash (Builtin b) = SH.Builtin b
|
||||
toShortHash (Derived h _ 1) = SH.ShortHash (H.base32Hex h) Nothing Nothing
|
||||
toShortHash (Derived h i n) = SH.ShortHash (H.base32Hex h) index Nothing
|
||||
where
|
||||
-- todo: remove `n` parameter; must also update readSuffix
|
||||
index = Just $ showSuffix i n
|
||||
|
||||
-- toShortHash . fromJust . fromShortHash == id and
|
||||
-- fromJust . fromShortHash . toShortHash == id
|
||||
-- but for arbitrary ShortHashes which may be broken at the wrong boundary, it
|
||||
-- may not be possible to base32Hex decode them. These will return Nothing.
|
||||
-- Also, ShortHashes that include constructor ids will return Nothing;
|
||||
-- try Referent.fromShortHash
|
||||
fromShortHash :: ShortHash -> Maybe Reference
|
||||
fromShortHash (SH.Builtin b) = Just (Builtin b)
|
||||
fromShortHash (SH.ShortHash prefix cycle Nothing) = do
|
||||
h <- H.fromBase32Hex prefix
|
||||
case cycle of
|
||||
Nothing -> Just (Derived h 0 1)
|
||||
Just t -> case Text.splitOn "c" t of
|
||||
[i,n] -> Derived h <$> readMay (Text.unpack i) <*> readMay (Text.unpack n)
|
||||
_ -> Nothing
|
||||
fromShortHash (SH.ShortHash _prefix _cycle (Just _cid)) = Nothing
|
||||
|
||||
-- (3,10) encoded as "3c10"
|
||||
-- (0,93) encoded as "0c93"
|
||||
showSuffix :: Pos -> Size -> Text
|
||||
showSuffix i n = Text.pack $ show i <> "c" <> show n
|
||||
|
||||
-- todo: don't read or return size; must also update showSuffix and fromText
|
||||
readSuffix :: Text -> Either String (Pos, Size)
|
||||
readSuffix t = case Text.breakOn "c" t of
|
||||
(pos, Text.drop 1 -> size) | Text.all isDigit pos && Text.all isDigit size ->
|
||||
Right (read (Text.unpack pos), read (Text.unpack size))
|
||||
_ -> Left "suffix decoding error"
|
||||
|
||||
isPrefixOf :: ShortHash -> Reference -> Bool
|
||||
isPrefixOf sh r = SH.isPrefixOf sh (toShortHash r)
|
||||
|
||||
toText :: Reference -> Text
|
||||
toText = SH.toText . toShortHash
|
||||
|
||||
showShort :: Int -> Reference -> Text
|
||||
showShort numHashChars = SH.toText . SH.take numHashChars . toShortHash
|
||||
|
||||
type Pos = Word64
|
||||
type Size = Word64
|
||||
|
||||
newtype Component = Component { members :: Set Reference }
|
||||
|
||||
-- Gives the component (dependency cycle) that the reference is a part of
|
||||
componentFor :: Reference -> Component
|
||||
componentFor b@Builtin {} = Component (Set.singleton b)
|
||||
componentFor (Derived h _ n) =
|
||||
Component $ Set.fromList [Derived h i n | i <- take (fromIntegral n) [0 ..]]
|
||||
|
||||
derivedBase32Hex :: Text -> Pos -> Size -> Reference
|
||||
derivedBase32Hex b32Hex i n = DerivedId (Id (fromMaybe msg h) i n)
|
||||
where
|
||||
msg = error $ "Reference.derivedBase32Hex " <> show h
|
||||
h = H.fromBase32Hex b32Hex
|
||||
|
||||
unsafeFromText :: Text -> Reference
|
||||
unsafeFromText = either error id . fromText
|
||||
|
||||
idFromText :: Text -> Maybe Id
|
||||
idFromText s = case fromText s of
|
||||
Left _ -> Nothing
|
||||
Right (Builtin _) -> Nothing
|
||||
Right (DerivedId id) -> pure id
|
||||
|
||||
toId :: Reference -> Maybe Id
|
||||
toId (DerivedId id) = Just id
|
||||
toId Builtin{} = Nothing
|
||||
|
||||
-- examples:
|
||||
-- `##Text.take` — builtins don’t have cycles
|
||||
-- `#2tWjVAuc7` — derived, no cycle
|
||||
-- `#y9ycWkiC1.y9` — derived, part of cycle
|
||||
-- todo: take a (Reference -> CycleSize) so that `readSuffix` doesn't have to parse the size from the text.
|
||||
fromText :: Text -> Either String Reference
|
||||
fromText t = case Text.split (=='#') t of
|
||||
[_, "", b] -> Right (Builtin b)
|
||||
[_, h] -> case Text.split (=='.') h of
|
||||
[hash] -> Right (derivedBase32Hex hash 0 1)
|
||||
[hash, suffix] -> uncurry (derivedBase32Hex hash) <$> readSuffix suffix
|
||||
_ -> bail
|
||||
_ -> bail
|
||||
where bail = Left $ "couldn't parse a Reference from " <> Text.unpack t
|
||||
|
||||
component :: H.Hash -> [k] -> [(k, Id)]
|
||||
component h ks = let
|
||||
size = fromIntegral (length ks)
|
||||
in [ (k, (Id h i size)) | (k, i) <- ks `zip` [0..]]
|
||||
|
||||
components :: [(H.Hash, [k])] -> [(k, Id)]
|
||||
components sccs = uncurry component =<< sccs
|
||||
|
||||
groupByComponent :: [(k, Reference)] -> [[(k, Reference)]]
|
||||
groupByComponent refs = done $ foldl' insert Map.empty refs
|
||||
where
|
||||
insert m (k, r@(Derived h _ _)) =
|
||||
Map.unionWith (<>) m (Map.fromList [(Right h, [(k,r)])])
|
||||
insert m (k, r) =
|
||||
Map.unionWith (<>) m (Map.fromList [(Left r, [(k,r)])])
|
||||
done m = sortOn snd <$> toList m
|
||||
|
||||
instance Show Id where show = SH.toString . SH.take 5 . toShortHash . DerivedId
|
||||
instance Show Reference where show = SH.toString . SH.take 5 . toShortHash
|
||||
|
||||
instance Hashable.Hashable Reference where
|
||||
tokens (Builtin txt) = [Hashable.Tag 0, Hashable.Text txt]
|
||||
tokens (DerivedId (Id h i n)) = [Hashable.Tag 1, Hashable.Bytes (H.toBytes h), Hashable.Nat i, Hashable.Nat n]
|
||||
|
||||
-- | Two references mustn't differ in cycle length only.
|
||||
instance Eq Id where x == y = compare x y == EQ
|
||||
instance Ord Id where Id h i _ `compare` Id h2 i2 _ = compare h h2 <> compare i i2
|
@ -0,0 +1,21 @@
|
||||
module Unison.Codebase.FileCodebase.Reference.Util where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import qualified Unison.Codebase.FileCodebase.Reference as Reference
|
||||
import Unison.Hashable (Hashable1)
|
||||
import Unison.ABT (Var)
|
||||
import qualified Unison.ABT as ABT
|
||||
import qualified Data.Map as Map
|
||||
|
||||
hashComponents ::
|
||||
(Functor f, Hashable1 f, Foldable f, Eq v, Show v, Var v)
|
||||
=> (Reference.Id -> ABT.Term f v ())
|
||||
-> Map v (ABT.Term f v a)
|
||||
-> Map v (Reference.Id, ABT.Term f v a)
|
||||
hashComponents embedRef tms =
|
||||
Map.fromList [ (v, (r,e)) | ((v,e), r) <- cs ]
|
||||
where cs = Reference.components $ ABT.hashComponents ref tms
|
||||
ref h i n = embedRef (Reference.Id h i n)
|
||||
|
||||
|
124
parser-typechecker/src/Unison/Codebase/FileCodebase/Referent.hs
Normal file
124
parser-typechecker/src/Unison/Codebase/FileCodebase/Referent.hs
Normal file
@ -0,0 +1,124 @@
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE PatternSynonyms #-}
|
||||
|
||||
module Unison.Codebase.FileCodebase.Referent where
|
||||
|
||||
import Unison.Prelude
|
||||
import Unison.Referent' ( Referent'(..), toReference' )
|
||||
|
||||
import qualified Data.Char as Char
|
||||
import qualified Data.Text as Text
|
||||
import Unison.Codebase.FileCodebase.Reference (Reference)
|
||||
import qualified Unison.Codebase.FileCodebase.Reference as R
|
||||
import Unison.ShortHash (ShortHash)
|
||||
import qualified Unison.ShortHash as SH
|
||||
|
||||
import Unison.ConstructorType (ConstructorType)
|
||||
import qualified Unison.ConstructorType as CT
|
||||
|
||||
-- | Specifies a term.
|
||||
--
|
||||
-- Either a term 'Reference', a data constructor, or an effect constructor.
|
||||
--
|
||||
-- Slightly odd naming. This is the "referent of term name in the codebase",
|
||||
-- rather than the target of a Reference.
|
||||
type Referent = Referent' Reference
|
||||
type ConstructorId = Int
|
||||
pattern Ref :: Reference -> Referent
|
||||
pattern Ref r = Ref' r
|
||||
pattern Con :: Reference -> ConstructorId -> ConstructorType -> Referent
|
||||
pattern Con r i t = Con' r i t
|
||||
{-# COMPLETE Ref, Con #-}
|
||||
|
||||
-- | Cannot be a builtin.
|
||||
type Id = Referent' R.Id
|
||||
|
||||
-- referentToTerm moved to Term.fromReferent
|
||||
-- termToReferent moved to Term.toReferent
|
||||
|
||||
-- todo: move these to ShortHash module
|
||||
toShortHash :: Referent -> ShortHash
|
||||
toShortHash = \case
|
||||
Ref r -> R.toShortHash r
|
||||
Con r i _ -> patternShortHash r i
|
||||
|
||||
toShortHashId :: Id -> ShortHash
|
||||
toShortHashId = toShortHash . fromId
|
||||
|
||||
-- also used by HashQualified.fromPattern
|
||||
patternShortHash :: Reference -> ConstructorId -> ShortHash
|
||||
patternShortHash r i = (R.toShortHash r) { SH.cid = Just . Text.pack $ show i }
|
||||
|
||||
showShort :: Int -> Referent -> Text
|
||||
showShort numHashChars = SH.toText . SH.take numHashChars . toShortHash
|
||||
|
||||
toText :: Referent -> Text
|
||||
toText = \case
|
||||
Ref r -> R.toText r
|
||||
Con r cid ct -> R.toText r <> "#" <> ctorTypeText ct <> Text.pack (show cid)
|
||||
|
||||
ctorTypeText :: CT.ConstructorType -> Text
|
||||
ctorTypeText CT.Effect = EffectCtor
|
||||
ctorTypeText CT.Data = DataCtor
|
||||
|
||||
pattern EffectCtor = "a"
|
||||
pattern DataCtor = "d"
|
||||
|
||||
toString :: Referent -> String
|
||||
toString = Text.unpack . toText
|
||||
|
||||
isConstructor :: Referent -> Bool
|
||||
isConstructor Con{} = True
|
||||
isConstructor _ = False
|
||||
|
||||
toTermReference :: Referent -> Maybe Reference
|
||||
toTermReference = \case
|
||||
Ref r -> Just r
|
||||
_ -> Nothing
|
||||
|
||||
toReference :: Referent -> Reference
|
||||
toReference = toReference'
|
||||
|
||||
fromId :: Id -> Referent
|
||||
fromId = fmap R.DerivedId
|
||||
|
||||
toTypeReference :: Referent -> Maybe Reference
|
||||
toTypeReference = \case
|
||||
Con r _i _t -> Just r
|
||||
_ -> Nothing
|
||||
|
||||
isPrefixOf :: ShortHash -> Referent -> Bool
|
||||
isPrefixOf sh r = SH.isPrefixOf sh (toShortHash r)
|
||||
|
||||
unsafeFromText :: Text -> Referent
|
||||
unsafeFromText = fromMaybe (error "invalid referent") . fromText
|
||||
|
||||
-- #abc[.xy][#<T>cid]
|
||||
fromText :: Text -> Maybe Referent
|
||||
fromText t = either (const Nothing) Just $
|
||||
-- if the string has just one hash at the start, it's just a reference
|
||||
if Text.length refPart == 1 then
|
||||
Ref <$> R.fromText t
|
||||
else if Text.all Char.isDigit cidPart then do
|
||||
r <- R.fromText (Text.dropEnd 1 refPart)
|
||||
ctorType <- ctorType
|
||||
let cid = read (Text.unpack cidPart)
|
||||
pure $ Con r cid ctorType
|
||||
else
|
||||
Left ("invalid constructor id: " <> Text.unpack cidPart)
|
||||
where
|
||||
ctorType = case Text.take 1 cidPart' of
|
||||
EffectCtor -> Right CT.Effect
|
||||
DataCtor -> Right CT.Data
|
||||
_otherwise ->
|
||||
Left ("invalid constructor type (expected '"
|
||||
<> EffectCtor <> "' or '" <> DataCtor <> "'): " <> Text.unpack cidPart')
|
||||
refPart = Text.dropWhileEnd (/= '#') t
|
||||
cidPart' = Text.takeWhileEnd (/= '#') t
|
||||
cidPart = Text.drop 1 cidPart'
|
||||
|
||||
fold :: (r -> a) -> (r -> ConstructorId -> ConstructorType -> a) -> Referent' r -> a
|
||||
fold fr fc = \case
|
||||
Ref' r -> fr r
|
||||
Con' r i ct -> fc r i ct
|
||||
|
@ -1,77 +1,81 @@
|
||||
{-# LANGUAGE Strict #-}
|
||||
{-# LANGUAGE RankNTypes #-}
|
||||
|
||||
module Unison.Codebase.Serialization.V1 where
|
||||
module Unison.Codebase.FileCodebase.Serialization.V1
|
||||
( formatSymbol,
|
||||
getBranchDependencies,
|
||||
getCausal0,
|
||||
getRawBranch,
|
||||
getEdits,
|
||||
putRawCausal,
|
||||
putRawBranch,
|
||||
putEdits,
|
||||
getTerm,
|
||||
getType,
|
||||
putTerm,
|
||||
putType,
|
||||
getEither,
|
||||
getEffectDeclaration,
|
||||
getDataDeclaration,
|
||||
putEither,
|
||||
putEffectDeclaration,
|
||||
putDataDeclaration,
|
||||
)
|
||||
where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import Prelude hiding (getChar, putChar)
|
||||
|
||||
import Basement.Block (Block)
|
||||
|
||||
-- import qualified Data.Text as Text
|
||||
import qualified Unison.Pattern as Pattern
|
||||
import Unison.Pattern ( Pattern
|
||||
, SeqOp
|
||||
)
|
||||
import Data.Bits ( Bits )
|
||||
import Data.Bytes.Get as Ser
|
||||
import Data.Bytes.Put as Ser
|
||||
import Data.Bytes.Serial ( serialize
|
||||
, deserialize
|
||||
, serializeBE
|
||||
, deserializeBE
|
||||
)
|
||||
import qualified Data.ByteArray as BA
|
||||
import Data.Bytes.Signed ( Unsigned )
|
||||
import Data.Bytes.VarInt ( VarInt(..) )
|
||||
import qualified Data.Map as Map
|
||||
import Data.List ( elemIndex
|
||||
)
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import Data.Bits (Bits)
|
||||
import qualified Data.ByteString as B
|
||||
import Data.Bytes.Get as Ser
|
||||
import Data.Bytes.Put as Ser
|
||||
import Data.Bytes.Serial (deserialize, deserializeBE, serialize, serializeBE)
|
||||
import Data.Bytes.Signed (Unsigned)
|
||||
import Data.Bytes.VarInt (VarInt (..))
|
||||
import Data.List (elemIndex)
|
||||
import qualified Data.Map as Map
|
||||
import qualified Data.Sequence as Sequence
|
||||
import qualified Data.Set as Set
|
||||
import qualified Unison.ABT as ABT
|
||||
import qualified Unison.Codebase.FileCodebase.Branch as Branch
|
||||
import Unison.Codebase.Causal (Raw (..), RawHash (..), unRawHash)
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import qualified Unison.Codebase.FileCodebase.Branch.Dependencies as BD
|
||||
import Unison.Codebase.Causal ( Raw(..)
|
||||
, RawHash(..)
|
||||
, unRawHash
|
||||
)
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import qualified Unison.Codebase.Metadata as Metadata
|
||||
import Unison.NameSegment as NameSegment
|
||||
import Unison.Codebase.Patch ( Patch(..) )
|
||||
import qualified Unison.Codebase.Patch as Patch
|
||||
import Unison.Codebase.TermEdit ( TermEdit )
|
||||
import Unison.Codebase.TypeEdit ( TypeEdit )
|
||||
import Unison.Hash ( Hash )
|
||||
import Unison.Kind ( Kind )
|
||||
import Unison.Reference ( Reference )
|
||||
import Unison.Symbol ( Symbol(..) )
|
||||
import Unison.Term ( Term )
|
||||
import qualified Data.ByteString as B
|
||||
import qualified Data.Sequence as Sequence
|
||||
import qualified Data.Set as Set
|
||||
import qualified Unison.ABT as ABT
|
||||
import qualified Unison.Codebase.TermEdit as TermEdit
|
||||
import qualified Unison.Codebase.TypeEdit as TypeEdit
|
||||
import Unison.Codebase.FileCodebase.Reference (Reference)
|
||||
import qualified Unison.Codebase.FileCodebase.Reference as Reference
|
||||
import Unison.Codebase.FileCodebase.Referent (Referent)
|
||||
import qualified Unison.Codebase.FileCodebase.Referent as Referent
|
||||
import Unison.Codebase.FileCodebase.Term (Term)
|
||||
import qualified Unison.Codebase.FileCodebase.Term as Term
|
||||
import Unison.Codebase.FileCodebase.Type (Type)
|
||||
import qualified Unison.Codebase.FileCodebase.Type as Type
|
||||
import qualified Unison.Codebase.FileCodebase.Metadata as Metadata
|
||||
import Unison.Codebase.FileCodebase.Patch (Patch (..))
|
||||
import qualified Unison.Codebase.FileCodebase.Patch as Patch
|
||||
import qualified Unison.Codebase.Serialization as S
|
||||
import qualified Unison.Hash as Hash
|
||||
import qualified Unison.Kind as Kind
|
||||
import qualified Unison.Reference as Reference
|
||||
import Unison.Referent (Referent)
|
||||
import qualified Unison.Referent as Referent
|
||||
import qualified Unison.Term as Term
|
||||
import qualified Unison.Type as Type
|
||||
import qualified Unison.Util.Bytes as Bytes
|
||||
import Unison.Util.Star3 ( Star3 )
|
||||
import qualified Unison.Util.Star3 as Star3
|
||||
import Unison.Util.Relation ( Relation )
|
||||
import qualified Unison.Util.Relation as Relation
|
||||
import qualified Unison.DataDeclaration as DataDeclaration
|
||||
import Unison.DataDeclaration ( DataDeclaration
|
||||
, EffectDeclaration
|
||||
)
|
||||
import qualified Unison.Var as Var
|
||||
import qualified Unison.ConstructorType as CT
|
||||
import Unison.Type (Type)
|
||||
import Unison.Codebase.FileCodebase.TermEdit (TermEdit)
|
||||
import qualified Unison.Codebase.FileCodebase.TermEdit as TermEdit
|
||||
import Unison.Codebase.FileCodebase.TypeEdit (TypeEdit)
|
||||
import qualified Unison.Codebase.FileCodebase.TypeEdit as TypeEdit
|
||||
import qualified Unison.ConstructorType as CT
|
||||
import Unison.Codebase.FileCodebase.DataDeclaration (DataDeclaration, EffectDeclaration)
|
||||
import qualified Unison.Codebase.FileCodebase.DataDeclaration as DataDeclaration
|
||||
import Unison.Hash (Hash)
|
||||
import qualified Unison.Hash as Hash
|
||||
import Unison.Kind (Kind)
|
||||
import qualified Unison.Kind as Kind
|
||||
import Unison.NameSegment (NameSegment (NameSegment))
|
||||
import qualified Unison.NameSegment as NameSegment
|
||||
import Unison.Codebase.FileCodebase.Pattern (Pattern, SeqOp)
|
||||
import qualified Unison.Codebase.FileCodebase.Pattern as Pattern
|
||||
import Unison.Symbol (Symbol (..))
|
||||
import Unison.Util.Relation (Relation)
|
||||
import qualified Unison.Util.Relation as Relation
|
||||
import Unison.Util.Star3 (Star3)
|
||||
import qualified Unison.Util.Star3 as Star3
|
||||
import qualified Unison.Var as Var
|
||||
|
||||
-- ABOUT THIS FORMAT:
|
||||
--
|
||||
@ -294,20 +298,6 @@ putFoldable putA as = do
|
||||
putLength (length as)
|
||||
traverse_ putA as
|
||||
|
||||
|
||||
-- putFoldableN
|
||||
-- :: forall f m n a
|
||||
-- . (Traversable f, MonadPut m, Applicative n)
|
||||
-- => f a
|
||||
-- -> (a -> n (m ()))
|
||||
-- -> n (m ())
|
||||
-- putFoldableN as putAn =
|
||||
-- pure (putLength @m (length as)) *> (fmap sequence_ $ traverse putAn as)
|
||||
|
||||
getFolded :: MonadGet m => (b -> a -> b) -> b -> m a -> m b
|
||||
getFolded f z a =
|
||||
foldl' f z <$> getList a
|
||||
|
||||
getList :: MonadGet m => m a -> m [a]
|
||||
getList a = getLength >>= (`replicateM` a)
|
||||
|
||||
@ -570,14 +560,6 @@ getTerm getVar getA = getABT getVar getA go where
|
||||
putPair :: MonadPut m => (a -> m ()) -> (b -> m ()) -> (a,b) -> m ()
|
||||
putPair putA putB (a,b) = putA a *> putB b
|
||||
|
||||
putPair''
|
||||
:: (MonadPut m, Monad n)
|
||||
=> (a -> m ())
|
||||
-> (b -> n (m ()))
|
||||
-> (a, b)
|
||||
-> n (m ())
|
||||
putPair'' putA putBn (a, b) = pure (putA a) *> putBn b
|
||||
|
||||
getPair :: MonadGet m => m a -> m b -> m (a,b)
|
||||
getPair = liftA2 (,)
|
||||
|
||||
@ -669,12 +651,6 @@ putBranchStar putA putN =
|
||||
getBranchStar :: (Ord a, Ord n, MonadGet m) => m a -> m n -> m (Branch.Star a n)
|
||||
getBranchStar getA getN = getStar3 getA getN getMetadataType (getPair getMetadataType getMetadataValue)
|
||||
|
||||
putLink :: MonadPut m => (Hash, mb) -> m ()
|
||||
putLink (h, _) = do
|
||||
-- 0 means local; later we may have remote links with other ids
|
||||
putWord8 0
|
||||
putHash h
|
||||
|
||||
putChar :: MonadPut m => Char -> m ()
|
||||
putChar = serialize . VarInt . fromEnum
|
||||
|
||||
@ -812,15 +788,3 @@ putEdits edits =
|
||||
getEdits :: MonadGet m => m Patch
|
||||
getEdits = Patch <$> getRelation getReference getTermEdit
|
||||
<*> getRelation getReference getTypeEdit
|
||||
|
||||
putBytes :: MonadPut m => Bytes.Bytes -> m ()
|
||||
putBytes = putFoldable putBlock . Bytes.chunks
|
||||
|
||||
putBlock :: MonadPut m => Bytes.View (Block Word8) -> m ()
|
||||
putBlock b = putLength (BA.length b) *> putByteString (BA.convert b)
|
||||
|
||||
getBytes :: MonadGet m => m Bytes.Bytes
|
||||
getBytes = Bytes.fromChunks <$> getList getBlock
|
||||
|
||||
getBlock :: MonadGet m => m (Bytes.View (Block Word8))
|
||||
getBlock = getLength >>= fmap (Bytes.view . BA.convert) . getByteString
|
@ -11,38 +11,39 @@ module Unison.Codebase.FileCodebase.SlimCopyRegenerateIndex (syncToDirectory) wh
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import qualified Data.Set as Set
|
||||
import Control.Lens
|
||||
import Control.Monad.State.Strict ( MonadState, evalStateT )
|
||||
import Control.Monad.Writer.Strict ( MonadWriter, execWriterT )
|
||||
import qualified Control.Monad.Writer.Strict as Writer
|
||||
import UnliftIO.Directory ( doesFileExist )
|
||||
import Unison.Codebase ( CodebasePath )
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import Unison.Codebase.Branch ( Branch(..) )
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import Control.Lens
|
||||
import Control.Monad.State.Strict (MonadState, evalStateT)
|
||||
import Control.Monad.Writer.Strict (MonadWriter, execWriterT)
|
||||
import qualified Control.Monad.Writer.Strict as Writer
|
||||
import qualified Data.Set as Set
|
||||
import U.Util.Timing (time)
|
||||
import Unison.Codebase (CodebasePath)
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import Unison.Codebase.FileCodebase.Branch (Branch (..))
|
||||
import qualified Unison.Codebase.FileCodebase.Branch as Branch
|
||||
import qualified Unison.Codebase.FileCodebase.Branch.Dependencies as BD
|
||||
import qualified Unison.Codebase.Patch as Patch
|
||||
import qualified Unison.Codebase.FileCodebase.DataDeclaration as DD
|
||||
import qualified Unison.Codebase.FileCodebase.LabeledDependency as LD
|
||||
import qualified Unison.Codebase.FileCodebase.Patch as Patch
|
||||
import Unison.Codebase.FileCodebase.Reference (Reference)
|
||||
import qualified Unison.Codebase.FileCodebase.Reference as Reference
|
||||
import qualified Unison.Codebase.FileCodebase.Referent as Referent
|
||||
import qualified Unison.Codebase.FileCodebase.Serialization.V1 as V1
|
||||
import qualified Unison.Codebase.FileCodebase.Term as Term
|
||||
import qualified Unison.Codebase.FileCodebase.TermEdit as TermEdit
|
||||
import Unison.Codebase.FileCodebase.Type (Type)
|
||||
import qualified Unison.Codebase.FileCodebase.Type as Type
|
||||
import qualified Unison.Codebase.FileCodebase.TypeEdit as TypeEdit
|
||||
import qualified Unison.Codebase.Serialization as S
|
||||
import qualified Unison.Codebase.Serialization.V1 as V1
|
||||
import Unison.Codebase.SyncMode ( SyncMode )
|
||||
import qualified Unison.Codebase.SyncMode as SyncMode
|
||||
import qualified Unison.Codebase.TermEdit as TermEdit
|
||||
import qualified Unison.Codebase.TypeEdit as TypeEdit
|
||||
import qualified Unison.DataDeclaration as DD
|
||||
import qualified Unison.LabeledDependency as LD
|
||||
import Unison.Reference ( Reference )
|
||||
import qualified Unison.Reference as Reference
|
||||
import qualified Unison.Referent as Referent
|
||||
import qualified Unison.Term as Term
|
||||
import Unison.Type ( Type )
|
||||
import qualified Unison.Type as Type
|
||||
import Unison.Var ( Var )
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import qualified Unison.Util.Relation as Relation
|
||||
import Unison.Util.Relation ( Relation )
|
||||
import Unison.Util.Monoid (foldMapM)
|
||||
import U.Util.Timing (time)
|
||||
import Unison.Codebase.SyncMode (SyncMode)
|
||||
import qualified Unison.Codebase.SyncMode as SyncMode
|
||||
import qualified Unison.Referent' as Referent
|
||||
import Unison.Util.Monoid (foldMapM)
|
||||
import Unison.Util.Relation (Relation)
|
||||
import qualified Unison.Util.Relation as Relation
|
||||
import Unison.Var (Var)
|
||||
import qualified Unison.WatchKind as WK
|
||||
import UnliftIO.Directory (doesFileExist)
|
||||
|
||||
import Data.Monoid.Generic
|
||||
import Unison.Codebase.FileCodebase.Common
|
||||
@ -273,9 +274,9 @@ syncToDirectory' getV getA srcPath destPath mode newRoot =
|
||||
Just typ -> do
|
||||
copyFileWithParents (termPath srcPath h) (termPath destPath h)
|
||||
copyFileWithParents (typePath srcPath h) (typePath destPath h)
|
||||
whenM (doesFileExist $ watchPath srcPath UF.TestWatch h) $
|
||||
copyFileWithParents (watchPath srcPath UF.TestWatch h)
|
||||
(watchPath destPath UF.TestWatch h)
|
||||
whenM (doesFileExist $ watchPath srcPath WK.TestWatch h) $
|
||||
copyFileWithParents (watchPath srcPath WK.TestWatch h)
|
||||
(watchPath destPath WK.TestWatch h)
|
||||
let typeDeps' = toList (Type.dependencies typ)
|
||||
let typeForIndexing = Type.removeAllEffectVars typ
|
||||
let typeReference = Type.toReference typeForIndexing
|
||||
|
1120
parser-typechecker/src/Unison/Codebase/FileCodebase/Term.hs
Normal file
1120
parser-typechecker/src/Unison/Codebase/FileCodebase/Term.hs
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,42 @@
|
||||
module Unison.Codebase.FileCodebase.TermEdit where
|
||||
|
||||
import Unison.Hashable (Hashable)
|
||||
import qualified Unison.Hashable as H
|
||||
import Unison.Codebase.FileCodebase.Reference (Reference)
|
||||
|
||||
data TermEdit = Replace Reference Typing | Deprecate
|
||||
deriving (Eq, Ord, Show)
|
||||
|
||||
references :: TermEdit -> [Reference]
|
||||
references (Replace r _) = [r]
|
||||
references Deprecate = []
|
||||
|
||||
-- Replacements with the Same type can be automatically propagated.
|
||||
-- Replacements with a Subtype can be automatically propagated but may result in dependents getting more general types, so requires re-inference.
|
||||
-- Replacements of a Different type need to be manually propagated by the programmer.
|
||||
data Typing = Same | Subtype | Different
|
||||
deriving (Eq, Ord, Show)
|
||||
|
||||
instance Hashable Typing where
|
||||
tokens Same = [H.Tag 0]
|
||||
tokens Subtype = [H.Tag 1]
|
||||
tokens Different = [H.Tag 2]
|
||||
|
||||
instance Hashable TermEdit where
|
||||
tokens (Replace r t) = [H.Tag 0] ++ H.tokens r ++ H.tokens t
|
||||
tokens Deprecate = [H.Tag 1]
|
||||
|
||||
toReference :: TermEdit -> Maybe Reference
|
||||
toReference (Replace r _) = Just r
|
||||
toReference Deprecate = Nothing
|
||||
|
||||
isTypePreserving :: TermEdit -> Bool
|
||||
isTypePreserving e = case e of
|
||||
Replace _ Same -> True
|
||||
Replace _ Subtype -> True
|
||||
_ -> False
|
||||
|
||||
isSame :: TermEdit -> Bool
|
||||
isSame e = case e of
|
||||
Replace _ Same -> True
|
||||
_ -> False
|
709
parser-typechecker/src/Unison/Codebase/FileCodebase/Type.hs
Normal file
709
parser-typechecker/src/Unison/Codebase/FileCodebase/Type.hs
Normal file
@ -0,0 +1,709 @@
|
||||
{-# LANGUAGE DeriveGeneric #-}
|
||||
{-# LANGUAGE DeriveTraversable #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE PatternSynonyms #-}
|
||||
{-# LANGUAGE Rank2Types #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
|
||||
module Unison.Codebase.FileCodebase.Type where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import qualified Control.Monad.Writer.Strict as Writer
|
||||
import Data.Functor.Identity (runIdentity)
|
||||
import Data.Monoid (Any(..))
|
||||
import Data.List.Extra (nubOrd)
|
||||
import qualified Data.Map as Map
|
||||
import qualified Data.Set as Set
|
||||
import Prelude.Extras (Eq1(..),Show1(..),Ord1(..))
|
||||
import qualified Unison.ABT as ABT
|
||||
import Unison.Hashable (Hashable1)
|
||||
import qualified Unison.Hashable as Hashable
|
||||
import qualified Unison.Kind as K
|
||||
import Unison.Codebase.FileCodebase.Reference (Reference)
|
||||
import qualified Unison.Codebase.FileCodebase.Reference as Reference
|
||||
import qualified Unison.Codebase.FileCodebase.Reference.Util as ReferenceUtil
|
||||
import Unison.Var (Var)
|
||||
import qualified Unison.Var as Var
|
||||
import qualified Unison.Settings as Settings
|
||||
import qualified Unison.Names.ResolutionResult as Names
|
||||
import qualified Unison.Name as Name
|
||||
import qualified Unison.Util.List as List
|
||||
|
||||
-- | Base functor for types in the Unison language
|
||||
data F a
|
||||
= Ref Reference
|
||||
| Arrow a a
|
||||
| Ann a K.Kind
|
||||
| App a a
|
||||
| Effect a a
|
||||
| Effects [a]
|
||||
| Forall a
|
||||
| IntroOuter a -- binder like ∀, used to introduce variables that are
|
||||
-- bound by outer type signatures, to support scoped type
|
||||
-- variables
|
||||
deriving (Foldable,Functor,Generic,Generic1,Eq,Ord,Traversable)
|
||||
|
||||
instance Eq1 F where (==#) = (==)
|
||||
instance Ord1 F where compare1 = compare
|
||||
instance Show1 F where showsPrec1 = showsPrec
|
||||
|
||||
-- | Types are represented as ABTs over the base functor F, with variables in `v`
|
||||
type Type v a = ABT.Term F v a
|
||||
|
||||
wrapV :: Ord v => Type v a -> Type (ABT.V v) a
|
||||
wrapV = ABT.vmap ABT.Bound
|
||||
|
||||
freeVars :: Type v a -> Set v
|
||||
freeVars = ABT.freeVars
|
||||
|
||||
bindExternal
|
||||
:: ABT.Var v => [(v, Reference)] -> Type v a -> Type v a
|
||||
bindExternal bs = ABT.substsInheritAnnotation [ (v, ref () r) | (v, r) <- bs ]
|
||||
|
||||
bindNames
|
||||
:: Var v
|
||||
=> Set v
|
||||
-> Map Name.Name Reference
|
||||
-> Type v a
|
||||
-> Names.ResolutionResult v a (Type v a)
|
||||
bindNames keepFree ns t = let
|
||||
fvs = ABT.freeVarOccurrences keepFree t
|
||||
rs = [(v, a, Map.lookup (Name.fromVar v) ns) | (v, a) <- fvs]
|
||||
ok (v, _a, Just r) = pure (v, r)
|
||||
ok (v, a, Nothing) = Left (pure (Names.TypeResolutionFailure v a mempty))
|
||||
in List.validate ok rs <&> \es -> bindExternal es t
|
||||
|
||||
newtype Monotype v a = Monotype { getPolytype :: Type v a } deriving Eq
|
||||
|
||||
instance (Show v) => Show (Monotype v a) where
|
||||
show = show . getPolytype
|
||||
|
||||
-- Smart constructor which checks if a `Type` has no `Forall` quantifiers.
|
||||
monotype :: ABT.Var v => Type v a -> Maybe (Monotype v a)
|
||||
monotype t = Monotype <$> ABT.visit isMono t where
|
||||
isMono (Forall' _) = Just Nothing
|
||||
isMono _ = Nothing
|
||||
|
||||
arity :: Type v a -> Int
|
||||
arity (ForallNamed' _ body) = arity body
|
||||
arity (Arrow' _ o) = 1 + arity o
|
||||
arity (Ann' a _) = arity a
|
||||
arity _ = 0
|
||||
|
||||
-- some smart patterns
|
||||
pattern Ref' r <- ABT.Tm' (Ref r)
|
||||
pattern Arrow' i o <- ABT.Tm' (Arrow i o)
|
||||
pattern Arrow'' i es o <- Arrow' i (Effect'' es o)
|
||||
pattern Arrows' spine <- (unArrows -> Just spine)
|
||||
pattern EffectfulArrows' fst rest <- (unEffectfulArrows -> Just (fst, rest))
|
||||
pattern Ann' t k <- ABT.Tm' (Ann t k)
|
||||
pattern App' f x <- ABT.Tm' (App f x)
|
||||
pattern Apps' f args <- (unApps -> Just (f, args))
|
||||
pattern Pure' t <- (unPure -> Just t)
|
||||
pattern Effects' es <- ABT.Tm' (Effects es)
|
||||
-- Effect1' must match at least one effect
|
||||
pattern Effect1' e t <- ABT.Tm' (Effect e t)
|
||||
pattern Effect' es t <- (unEffects1 -> Just (es, t))
|
||||
pattern Effect'' es t <- (unEffect0 -> (es, t))
|
||||
-- Effect0' may match zero effects
|
||||
pattern Effect0' es t <- (unEffect0 -> (es, t))
|
||||
pattern Forall' subst <- ABT.Tm' (Forall (ABT.Abs' subst))
|
||||
pattern IntroOuter' subst <- ABT.Tm' (IntroOuter (ABT.Abs' subst))
|
||||
pattern IntroOuterNamed' v body <- ABT.Tm' (IntroOuter (ABT.out -> ABT.Abs v body))
|
||||
pattern ForallsNamed' vs body <- (unForalls -> Just (vs, body))
|
||||
pattern ForallNamed' v body <- ABT.Tm' (Forall (ABT.out -> ABT.Abs v body))
|
||||
pattern Var' v <- ABT.Var' v
|
||||
pattern Cycle' xs t <- ABT.Cycle' xs t
|
||||
pattern Abs' subst <- ABT.Abs' subst
|
||||
|
||||
unPure :: Ord v => Type v a -> Maybe (Type v a)
|
||||
unPure (Effect'' [] t) = Just t
|
||||
unPure (Effect'' _ _) = Nothing
|
||||
unPure t = Just t
|
||||
|
||||
unArrows :: Type v a -> Maybe [Type v a]
|
||||
unArrows t =
|
||||
case go t of [_] -> Nothing; l -> Just l
|
||||
where go (Arrow' i o) = i : go o
|
||||
go o = [o]
|
||||
|
||||
unEffectfulArrows
|
||||
:: Type v a -> Maybe (Type v a, [(Maybe [Type v a], Type v a)])
|
||||
unEffectfulArrows t = case t of
|
||||
Arrow' i o -> Just (i, go o)
|
||||
_ -> Nothing
|
||||
where
|
||||
go (Effect1' (Effects' es) (Arrow' i o)) =
|
||||
(Just $ es >>= flattenEffects, i) : go o
|
||||
go (Effect1' (Effects' es) t) = [(Just $ es >>= flattenEffects, t)]
|
||||
go (Arrow' i o) = (Nothing, i) : go o
|
||||
go t = [(Nothing, t)]
|
||||
|
||||
unApps :: Type v a -> Maybe (Type v a, [Type v a])
|
||||
unApps t = case go t [] of
|
||||
[] -> Nothing
|
||||
[ _ ] -> Nothing
|
||||
f : args -> Just (f, args)
|
||||
where
|
||||
go (App' i o) acc = go i (o : acc)
|
||||
go fn args = fn : args
|
||||
|
||||
unIntroOuters :: Type v a -> Maybe ([v], Type v a)
|
||||
unIntroOuters t = go t []
|
||||
where go (IntroOuterNamed' v body) vs = go body (v:vs)
|
||||
go _body [] = Nothing
|
||||
go body vs = Just (reverse vs, body)
|
||||
|
||||
-- Most code doesn't care about `introOuter` binders and is fine dealing with the
|
||||
-- these outer variable references as free variables. This function strips out
|
||||
-- one or more `introOuter` binders, so `outer a b . (a, b)` becomes `(a, b)`.
|
||||
stripIntroOuters :: Type v a -> Type v a
|
||||
stripIntroOuters t = case unIntroOuters t of
|
||||
Just (_, t) -> t
|
||||
Nothing -> t
|
||||
|
||||
unForalls :: Type v a -> Maybe ([v], Type v a)
|
||||
unForalls t = go t []
|
||||
where go (ForallNamed' v body) vs = go body (v:vs)
|
||||
go _body [] = Nothing
|
||||
go body vs = Just(reverse vs, body)
|
||||
|
||||
unEffect0 :: Ord v => Type v a -> ([Type v a], Type v a)
|
||||
unEffect0 (Effect1' e a) = (flattenEffects e, a)
|
||||
unEffect0 t = ([], t)
|
||||
|
||||
unEffects1 :: Ord v => Type v a -> Maybe ([Type v a], Type v a)
|
||||
unEffects1 (Effect1' (Effects' es) a) = Just (es, a)
|
||||
unEffects1 _ = Nothing
|
||||
|
||||
-- | True if the given type is a function, possibly quantified
|
||||
isArrow :: ABT.Var v => Type v a -> Bool
|
||||
isArrow (ForallNamed' _ t) = isArrow t
|
||||
isArrow (Arrow' _ _) = True
|
||||
isArrow _ = False
|
||||
|
||||
-- some smart constructors
|
||||
|
||||
ref :: Ord v => a -> Reference -> Type v a
|
||||
ref a = ABT.tm' a . Ref
|
||||
|
||||
refId :: Ord v => a -> Reference.Id -> Type v a
|
||||
refId a = ref a . Reference.DerivedId
|
||||
|
||||
termLink :: Ord v => a -> Type v a
|
||||
termLink a = ABT.tm' a . Ref $ termLinkRef
|
||||
|
||||
typeLink :: Ord v => a -> Type v a
|
||||
typeLink a = ABT.tm' a . Ref $ typeLinkRef
|
||||
|
||||
derivedBase32Hex :: Ord v => Reference -> a -> Type v a
|
||||
derivedBase32Hex r a = ref a r
|
||||
|
||||
intRef, natRef, floatRef, booleanRef, textRef, charRef, listRef, bytesRef, effectRef, termLinkRef, typeLinkRef :: Reference
|
||||
intRef = Reference.Builtin "Int"
|
||||
natRef = Reference.Builtin "Nat"
|
||||
floatRef = Reference.Builtin "Float"
|
||||
booleanRef = Reference.Builtin "Boolean"
|
||||
textRef = Reference.Builtin "Text"
|
||||
charRef = Reference.Builtin "Char"
|
||||
listRef = Reference.Builtin "Sequence"
|
||||
bytesRef = Reference.Builtin "Bytes"
|
||||
effectRef = Reference.Builtin "Effect"
|
||||
termLinkRef = Reference.Builtin "Link.Term"
|
||||
typeLinkRef = Reference.Builtin "Link.Type"
|
||||
|
||||
builtinIORef, fileHandleRef, filePathRef, threadIdRef, socketRef :: Reference
|
||||
builtinIORef = Reference.Builtin "IO"
|
||||
fileHandleRef = Reference.Builtin "Handle"
|
||||
filePathRef = Reference.Builtin "FilePath"
|
||||
threadIdRef = Reference.Builtin "ThreadId"
|
||||
socketRef = Reference.Builtin "Socket"
|
||||
|
||||
mvarRef, tvarRef :: Reference
|
||||
mvarRef = Reference.Builtin "MVar"
|
||||
tvarRef = Reference.Builtin "TVar"
|
||||
|
||||
tlsRef :: Reference
|
||||
tlsRef = Reference.Builtin "Tls"
|
||||
|
||||
stmRef :: Reference
|
||||
stmRef = Reference.Builtin "STM"
|
||||
|
||||
tlsClientConfigRef :: Reference
|
||||
tlsClientConfigRef = Reference.Builtin "Tls.ClientConfig"
|
||||
|
||||
tlsServerConfigRef :: Reference
|
||||
tlsServerConfigRef = Reference.Builtin "Tls.ServerConfig"
|
||||
|
||||
tlsSignedCertRef :: Reference
|
||||
tlsSignedCertRef = Reference.Builtin "Tls.SignedCert"
|
||||
|
||||
tlsPrivateKeyRef :: Reference
|
||||
tlsPrivateKeyRef = Reference.Builtin "Tls.PrivateKey"
|
||||
|
||||
tlsCipherRef :: Reference
|
||||
tlsCipherRef = Reference.Builtin "Tls.Cipher"
|
||||
|
||||
tlsVersionRef :: Reference
|
||||
tlsVersionRef = Reference.Builtin "Tls.Version"
|
||||
|
||||
hashAlgorithmRef :: Reference
|
||||
hashAlgorithmRef = Reference.Builtin "crypto.HashAlgorithm"
|
||||
|
||||
codeRef, valueRef :: Reference
|
||||
codeRef = Reference.Builtin "Code"
|
||||
valueRef = Reference.Builtin "Value"
|
||||
|
||||
anyRef :: Reference
|
||||
anyRef = Reference.Builtin "Any"
|
||||
|
||||
any :: Ord v => a -> Type v a
|
||||
any a = ref a anyRef
|
||||
|
||||
builtin :: Ord v => a -> Text -> Type v a
|
||||
builtin a = ref a . Reference.Builtin
|
||||
|
||||
int :: Ord v => a -> Type v a
|
||||
int a = ref a intRef
|
||||
|
||||
nat :: Ord v => a -> Type v a
|
||||
nat a = ref a natRef
|
||||
|
||||
float :: Ord v => a -> Type v a
|
||||
float a = ref a floatRef
|
||||
|
||||
boolean :: Ord v => a -> Type v a
|
||||
boolean a = ref a booleanRef
|
||||
|
||||
text :: Ord v => a -> Type v a
|
||||
text a = ref a textRef
|
||||
|
||||
char :: Ord v => a -> Type v a
|
||||
char a = ref a charRef
|
||||
|
||||
fileHandle :: Ord v => a -> Type v a
|
||||
fileHandle a = ref a fileHandleRef
|
||||
|
||||
threadId :: Ord v => a -> Type v a
|
||||
threadId a = ref a threadIdRef
|
||||
|
||||
builtinIO :: Ord v => a -> Type v a
|
||||
builtinIO a = ref a builtinIORef
|
||||
|
||||
socket :: Ord v => a -> Type v a
|
||||
socket a = ref a socketRef
|
||||
|
||||
list :: Ord v => a -> Type v a
|
||||
list a = ref a listRef
|
||||
|
||||
bytes :: Ord v => a -> Type v a
|
||||
bytes a = ref a bytesRef
|
||||
|
||||
effectType :: Ord v => a -> Type v a
|
||||
effectType a = ref a $ effectRef
|
||||
|
||||
code, value :: Ord v => a -> Type v a
|
||||
code a = ref a codeRef
|
||||
value a = ref a valueRef
|
||||
|
||||
app :: Ord v => a -> Type v a -> Type v a -> Type v a
|
||||
app a f arg = ABT.tm' a (App f arg)
|
||||
|
||||
-- `f x y z` means `((f x) y) z` and the annotation paired with `y` is the one
|
||||
-- meant for `app (f x) y`
|
||||
apps :: Ord v => Type v a -> [(a, Type v a)] -> Type v a
|
||||
apps = foldl' go where go f (a, t) = app a f t
|
||||
|
||||
app' :: (Ord v, Semigroup a) => Type v a -> Type v a -> Type v a
|
||||
app' f arg = app (ABT.annotation f <> ABT.annotation arg) f arg
|
||||
|
||||
apps' :: (Semigroup a, Ord v) => Type v a -> [Type v a] -> Type v a
|
||||
apps' = foldl app'
|
||||
|
||||
arrow :: Ord v => a -> Type v a -> Type v a -> Type v a
|
||||
arrow a i o = ABT.tm' a (Arrow i o)
|
||||
|
||||
arrow' :: (Semigroup a, Ord v) => Type v a -> Type v a -> Type v a
|
||||
arrow' i o = arrow (ABT.annotation i <> ABT.annotation o) i o
|
||||
|
||||
ann :: Ord v => a -> Type v a -> K.Kind -> Type v a
|
||||
ann a e t = ABT.tm' a (Ann e t)
|
||||
|
||||
forall :: Ord v => a -> v -> Type v a -> Type v a
|
||||
forall a v body = ABT.tm' a (Forall (ABT.abs' a v body))
|
||||
|
||||
introOuter :: Ord v => a -> v -> Type v a -> Type v a
|
||||
introOuter a v body = ABT.tm' a (IntroOuter (ABT.abs' a v body))
|
||||
|
||||
iff :: Var v => Type v ()
|
||||
iff = forall () aa $ arrows (f <$> [boolean(), a, a]) a
|
||||
where aa = Var.named "a"
|
||||
a = var () aa
|
||||
f x = ((), x)
|
||||
|
||||
iff' :: Var v => a -> Type v a
|
||||
iff' loc = forall loc aa $ arrows (f <$> [boolean loc, a, a]) a
|
||||
where aa = Var.named "a"
|
||||
a = var loc aa
|
||||
f x = (loc, x)
|
||||
|
||||
iff2 :: Var v => a -> Type v a
|
||||
iff2 loc = forall loc aa $ arrows (f <$> [a, a]) a
|
||||
where aa = Var.named "a"
|
||||
a = var loc aa
|
||||
f x = (loc, x)
|
||||
|
||||
andor :: Ord v => Type v ()
|
||||
andor = arrows (f <$> [boolean(), boolean()]) $ boolean()
|
||||
where f x = ((), x)
|
||||
|
||||
andor' :: Ord v => a -> Type v a
|
||||
andor' a = arrows (f <$> [boolean a, boolean a]) $ boolean a
|
||||
where f x = (a, x)
|
||||
|
||||
var :: Ord v => a -> v -> Type v a
|
||||
var = ABT.annotatedVar
|
||||
|
||||
v' :: Var v => Text -> Type v ()
|
||||
v' s = ABT.var (Var.named s)
|
||||
|
||||
-- Like `v'`, but creates an annotated variable given an annotation
|
||||
av' :: Var v => a -> Text -> Type v a
|
||||
av' a s = ABT.annotatedVar a (Var.named s)
|
||||
|
||||
forall' :: Var v => a -> [Text] -> Type v a -> Type v a
|
||||
forall' a vs body = foldr (forall a) body (Var.named <$> vs)
|
||||
|
||||
foralls :: Ord v => a -> [v] -> Type v a -> Type v a
|
||||
foralls a vs body = foldr (forall a) body vs
|
||||
|
||||
-- Note: `a -> b -> c` parses as `a -> (b -> c)`
|
||||
-- the annotation associated with `b` will be the annotation for the `b -> c`
|
||||
-- node
|
||||
arrows :: Ord v => [(a, Type v a)] -> Type v a -> Type v a
|
||||
arrows ts result = foldr go result ts where
|
||||
go = uncurry arrow
|
||||
|
||||
-- The types of effectful computations
|
||||
effect :: Ord v => a -> [Type v a] -> Type v a -> Type v a
|
||||
effect a es (Effect1' fs t) =
|
||||
let es' = (es >>= flattenEffects) ++ flattenEffects fs
|
||||
in ABT.tm' a (Effect (ABT.tm' a (Effects es')) t)
|
||||
effect a es t = ABT.tm' a (Effect (ABT.tm' a (Effects es)) t)
|
||||
|
||||
effects :: Ord v => a -> [Type v a] -> Type v a
|
||||
effects a es = ABT.tm' a (Effects $ es >>= flattenEffects)
|
||||
|
||||
effect1 :: Ord v => a -> Type v a -> Type v a -> Type v a
|
||||
effect1 a es (Effect1' fs t) =
|
||||
let es' = flattenEffects es ++ flattenEffects fs
|
||||
in ABT.tm' a (Effect (ABT.tm' a (Effects es')) t)
|
||||
effect1 a es t = ABT.tm' a (Effect es t)
|
||||
|
||||
flattenEffects :: Type v a -> [Type v a]
|
||||
flattenEffects (Effects' es) = es >>= flattenEffects
|
||||
flattenEffects es = [es]
|
||||
|
||||
-- The types of first-class effect values
|
||||
-- which get deconstructed in effect handlers.
|
||||
effectV :: Ord v => a -> (a, Type v a) -> (a, Type v a) -> Type v a
|
||||
effectV builtinA e t = apps (builtin builtinA "Effect") [e, t]
|
||||
|
||||
-- Strips effects from a type. E.g. `{e} a` becomes `a`.
|
||||
stripEffect :: Ord v => Type v a -> ([Type v a], Type v a)
|
||||
stripEffect (Effect' e t) = case stripEffect t of (ei, t) -> (e ++ ei, t)
|
||||
stripEffect t = ([], t)
|
||||
|
||||
-- The type of the flipped function application operator:
|
||||
-- `(a -> (a -> b) -> b)`
|
||||
flipApply :: Var v => Type v () -> Type v ()
|
||||
flipApply t = forall() b $ arrow() (arrow() t (var() b)) (var() b)
|
||||
where b = ABT.fresh t (Var.named "b")
|
||||
|
||||
generalize' :: Var v => Var.Type -> Type v a -> Type v a
|
||||
generalize' k t = generalize vsk t where
|
||||
vsk = [ v | v <- Set.toList (freeVars t), Var.typeOf v == k ]
|
||||
|
||||
-- | Bind the given variables with an outer `forall`, if they are used in `t`.
|
||||
generalize :: Ord v => [v] -> Type v a -> Type v a
|
||||
generalize vs t = foldr f t vs
|
||||
where
|
||||
f v t =
|
||||
if Set.member v (ABT.freeVars t) then forall (ABT.annotation t) v t else t
|
||||
|
||||
unforall :: Type v a -> Type v a
|
||||
unforall (ForallsNamed' _ t) = t
|
||||
unforall t = t
|
||||
|
||||
unforall' :: Type v a -> ([v], Type v a)
|
||||
unforall' (ForallsNamed' vs t) = (vs, t)
|
||||
unforall' t = ([], t)
|
||||
|
||||
dependencies :: Ord v => Type v a -> Set Reference
|
||||
dependencies t = Set.fromList . Writer.execWriter $ ABT.visit' f t
|
||||
where f t@(Ref r) = Writer.tell [r] $> t
|
||||
f t = pure t
|
||||
|
||||
updateDependencies :: Ord v => Map Reference Reference -> Type v a -> Type v a
|
||||
updateDependencies typeUpdates = ABT.rebuildUp go
|
||||
where
|
||||
go (Ref r) = Ref (Map.findWithDefault r r typeUpdates)
|
||||
go f = f
|
||||
|
||||
usesEffects :: Ord v => Type v a -> Bool
|
||||
usesEffects t = getAny . getConst $ ABT.visit go t where
|
||||
go (Effect1' _ _) = Just (Const (Any True))
|
||||
go _ = Nothing
|
||||
|
||||
-- Returns free effect variables in the given type, for instance, in:
|
||||
--
|
||||
-- ∀ e3 . a ->{e,e2} b ->{e3} c
|
||||
--
|
||||
-- This function would return the set {e, e2}, but not `e3` since `e3`
|
||||
-- is bound by the enclosing forall.
|
||||
freeEffectVars :: Ord v => Type v a -> Set v
|
||||
freeEffectVars t =
|
||||
Set.fromList . join . runIdentity $
|
||||
ABT.foreachSubterm go (snd <$> ABT.annotateBound t)
|
||||
where
|
||||
go t@(Effects' es) =
|
||||
let frees = Set.fromList [ v | Var' v <- es >>= flattenEffects ]
|
||||
in pure . Set.toList $ frees `Set.difference` ABT.annotation t
|
||||
go t@(Effect1' e _) =
|
||||
let frees = Set.fromList [ v | Var' v <- flattenEffects e ]
|
||||
in pure . Set.toList $ frees `Set.difference` ABT.annotation t
|
||||
go _ = pure []
|
||||
|
||||
-- Converts all unadorned arrows in a type to have fresh
|
||||
-- existential ability requirements. For example:
|
||||
--
|
||||
-- (a -> b) -> [a] -> [b]
|
||||
--
|
||||
-- Becomes
|
||||
--
|
||||
-- (a ->{e1} b) ->{e2} [a] ->{e3} [b]
|
||||
existentializeArrows :: (Ord v, Monad m) => m v -> Type v a -> m (Type v a)
|
||||
existentializeArrows newVar t = ABT.visit go t
|
||||
where
|
||||
go t@(Arrow' a b) = case b of
|
||||
-- If an arrow already has attached abilities,
|
||||
-- leave it alone. Ex: `a ->{e} b` is kept as is.
|
||||
Effect1' _ _ -> Just $ do
|
||||
a <- existentializeArrows newVar a
|
||||
b <- existentializeArrows newVar b
|
||||
pure $ arrow (ABT.annotation t) a b
|
||||
-- For unadorned arrows, make up a fresh variable.
|
||||
-- So `a -> b` becomes `a ->{e} b`, using the
|
||||
-- `newVar` variable generator.
|
||||
_ -> Just $ do
|
||||
e <- newVar
|
||||
a <- existentializeArrows newVar a
|
||||
b <- existentializeArrows newVar b
|
||||
let ann = ABT.annotation t
|
||||
pure $ arrow ann a (effect ann [var ann e] b)
|
||||
go _ = Nothing
|
||||
|
||||
purifyArrows :: (Ord v) => Type v a -> Type v a
|
||||
purifyArrows = ABT.visitPure go
|
||||
where
|
||||
go t@(Arrow' a b) = case b of
|
||||
Effect1' _ _ -> Nothing
|
||||
_ -> Just $ arrow ann a (effect ann [] b)
|
||||
where ann = ABT.annotation t
|
||||
go _ = Nothing
|
||||
|
||||
-- Remove free effect variables from the type that are in the set
|
||||
removeEffectVars :: ABT.Var v => Set v -> Type v a -> Type v a
|
||||
removeEffectVars removals t =
|
||||
let z = effects () []
|
||||
t' = ABT.substsInheritAnnotation ((,z) <$> Set.toList removals) t
|
||||
-- leave explicitly empty `{}` alone
|
||||
removeEmpty (Effect1' (Effects' []) v) = Just (ABT.visitPure removeEmpty v)
|
||||
removeEmpty t@(Effect1' e v) =
|
||||
case flattenEffects e of
|
||||
[] -> Just (ABT.visitPure removeEmpty v)
|
||||
es -> Just (effect (ABT.annotation t) es $ ABT.visitPure removeEmpty v)
|
||||
removeEmpty t@(Effects' es) =
|
||||
Just $ effects (ABT.annotation t) (es >>= flattenEffects)
|
||||
removeEmpty _ = Nothing
|
||||
in ABT.visitPure removeEmpty t'
|
||||
|
||||
-- Remove all effect variables from the type.
|
||||
-- Used for type-based search, we apply this transformation to both the
|
||||
-- indexed type and the query type, so the user can supply `a -> b` that will
|
||||
-- match `a ->{e} b` (but not `a ->{IO} b`).
|
||||
removeAllEffectVars :: ABT.Var v => Type v a -> Type v a
|
||||
removeAllEffectVars t = let
|
||||
allEffectVars = foldMap go (ABT.subterms t)
|
||||
go (Effects' vs) = Set.fromList [ v | Var' v <- vs]
|
||||
go (Effect1' (Var' v) _) = Set.singleton v
|
||||
go _ = mempty
|
||||
(vs, tu) = unforall' t
|
||||
in generalize vs (removeEffectVars allEffectVars tu)
|
||||
|
||||
removePureEffects :: ABT.Var v => Type v a -> Type v a
|
||||
removePureEffects t | not Settings.removePureEffects = t
|
||||
| otherwise =
|
||||
generalize vs $ removeEffectVars (Set.filter isPure fvs) tu
|
||||
where
|
||||
(vs, tu) = unforall' t
|
||||
fvs = freeEffectVars tu `Set.difference` ABT.freeVars t
|
||||
-- If an effect variable is mentioned only once, it is on
|
||||
-- an arrow `a ->{e} b`. Generalizing this to
|
||||
-- `∀ e . a ->{e} b` gives us the pure arrow `a -> b`.
|
||||
isPure v = ABT.occurrences v tu <= 1
|
||||
|
||||
editFunctionResult
|
||||
:: forall v a
|
||||
. Ord v
|
||||
=> (Type v a -> Type v a)
|
||||
-> Type v a
|
||||
-> Type v a
|
||||
editFunctionResult f = go
|
||||
where
|
||||
go :: Type v a -> Type v a
|
||||
go (ABT.Term s a t) = case t of
|
||||
ABT.Tm (Forall t) ->
|
||||
(\x -> ABT.Term (s <> freeVars x) a . ABT.Tm $ Forall x) $ go t
|
||||
ABT.Tm (Arrow i o) ->
|
||||
(\x -> ABT.Term (s <> freeVars x) a . ABT.Tm $ Arrow i x) $ go o
|
||||
ABT.Abs v r ->
|
||||
(\x -> ABT.Term (s <> freeVars x) a $ ABT.Abs v x) $ go r
|
||||
_ -> f (ABT.Term s a t)
|
||||
|
||||
functionResult :: Type v a -> Maybe (Type v a)
|
||||
functionResult = go False
|
||||
where
|
||||
go inArr (ForallNamed' _ body) = go inArr body
|
||||
go _inArr (Arrow' _i o ) = go True o
|
||||
go inArr t = if inArr then Just t else Nothing
|
||||
|
||||
|
||||
-- | Bind all free variables (not in `except`) that start with a lowercase
|
||||
-- letter and are unqualified with an outer `forall`.
|
||||
-- `a -> a` becomes `∀ a . a -> a`
|
||||
-- `B -> B` becomes `B -> B` (not changed)
|
||||
-- `.foo -> .foo` becomes `.foo -> .foo` (not changed)
|
||||
-- `.foo.bar -> blarrg.woot` becomes `.foo.bar -> blarrg.woot` (unchanged)
|
||||
generalizeLowercase :: Var v => Set v -> Type v a -> Type v a
|
||||
generalizeLowercase except t = foldr (forall (ABT.annotation t)) t vars
|
||||
where
|
||||
vars =
|
||||
[ v | v <- Set.toList (ABT.freeVars t `Set.difference` except), Var.universallyQuantifyIfFree v ]
|
||||
|
||||
-- Convert all free variables in `allowed` to variables bound by an `introOuter`.
|
||||
freeVarsToOuters :: Ord v => Set v -> Type v a -> Type v a
|
||||
freeVarsToOuters allowed t = foldr (introOuter (ABT.annotation t)) t vars
|
||||
where vars = Set.toList $ ABT.freeVars t `Set.intersection` allowed
|
||||
|
||||
-- | This function removes all variable shadowing from the types and reduces
|
||||
-- fresh ids to the minimum possible to avoid ambiguity. Useful when showing
|
||||
-- two different types.
|
||||
cleanupVars :: Var v => [Type v a] -> [Type v a]
|
||||
cleanupVars ts | not Settings.cleanupTypes = ts
|
||||
cleanupVars ts = let
|
||||
changedVars = cleanupVarsMap ts
|
||||
in cleanupVars1' changedVars <$> ts
|
||||
|
||||
-- Compute a variable replacement map from a collection of types, which
|
||||
-- can be passed to `cleanupVars1'`. This is used to cleanup variable ids
|
||||
-- for multiple related types, like when reporting a type error.
|
||||
cleanupVarsMap :: Var v => [Type v a] -> Map.Map v v
|
||||
cleanupVarsMap ts = let
|
||||
varsByName = foldl' step Map.empty (ts >>= ABT.allVars)
|
||||
step m v = Map.insertWith (++) (Var.name $ Var.reset v) [v] m
|
||||
changedVars = Map.fromList [ (v, Var.freshenId i v)
|
||||
| (_, vs) <- Map.toList varsByName
|
||||
, (v,i) <- nubOrd vs `zip` [0..]]
|
||||
in changedVars
|
||||
|
||||
cleanupVars1' :: Var v => Map.Map v v -> Type v a -> Type v a
|
||||
cleanupVars1' = ABT.changeVars
|
||||
|
||||
-- | This function removes all variable shadowing from the type and reduces
|
||||
-- fresh ids to the minimum possible to avoid ambiguity.
|
||||
cleanupVars1 :: Var v => Type v a -> Type v a
|
||||
cleanupVars1 t | not Settings.cleanupTypes = t
|
||||
cleanupVars1 t = let [t'] = cleanupVars [t] in t'
|
||||
|
||||
-- This removes duplicates and normalizes the order of ability lists
|
||||
cleanupAbilityLists :: Var v => Type v a -> Type v a
|
||||
cleanupAbilityLists = ABT.visitPure go
|
||||
where
|
||||
-- leave explicitly empty `{}` alone
|
||||
go (Effect1' (Effects' []) _v) = Nothing
|
||||
go t@(Effect1' e v) =
|
||||
let es = Set.toList . Set.fromList $ flattenEffects e
|
||||
in case es of
|
||||
[] -> Just (ABT.visitPure go v)
|
||||
_ -> Just (effect (ABT.annotation t) es $ ABT.visitPure go v)
|
||||
go _ = Nothing
|
||||
|
||||
cleanups :: Var v => [Type v a] -> [Type v a]
|
||||
cleanups ts = cleanupVars $ map cleanupAbilityLists ts
|
||||
|
||||
cleanup :: Var v => Type v a -> Type v a
|
||||
cleanup t | not Settings.cleanupTypes = t
|
||||
cleanup t = cleanupVars1 . cleanupAbilityLists $ t
|
||||
|
||||
toReference :: (ABT.Var v, Show v) => Type v a -> Reference
|
||||
toReference (Ref' r) = r
|
||||
-- a bit of normalization - any unused type parameters aren't part of the hash
|
||||
toReference (ForallNamed' v body) | not (Set.member v (ABT.freeVars body)) = toReference body
|
||||
toReference t = Reference.Derived (ABT.hash t) 0 1
|
||||
|
||||
toReferenceMentions :: (ABT.Var v, Show v) => Type v a -> Set Reference
|
||||
toReferenceMentions ty =
|
||||
let (vs, _) = unforall' ty
|
||||
gen ty = generalize (Set.toList (freeVars ty)) $ generalize vs ty
|
||||
in Set.fromList $ toReference . gen <$> ABT.subterms ty
|
||||
|
||||
hashComponents
|
||||
:: Var v => Map v (Type v a) -> Map v (Reference.Id, Type v a)
|
||||
hashComponents = ReferenceUtil.hashComponents $ refId ()
|
||||
|
||||
instance Hashable1 F where
|
||||
hash1 hashCycle hash e =
|
||||
let
|
||||
(tag, hashed) = (Hashable.Tag, Hashable.Hashed)
|
||||
-- Note: start each layer with leading `0` byte, to avoid collisions with
|
||||
-- terms, which start each layer with leading `1`. See `Hashable1 Term.F`
|
||||
in Hashable.accumulate $ tag 0 : case e of
|
||||
Ref r -> [tag 0, Hashable.accumulateToken r]
|
||||
Arrow a b -> [tag 1, hashed (hash a), hashed (hash b) ]
|
||||
App a b -> [tag 2, hashed (hash a), hashed (hash b) ]
|
||||
Ann a k -> [tag 3, hashed (hash a), Hashable.accumulateToken k ]
|
||||
-- Example:
|
||||
-- a) {Remote, Abort} (() -> {Remote} ()) should hash the same as
|
||||
-- b) {Abort, Remote} (() -> {Remote} ()) but should hash differently from
|
||||
-- c) {Remote, Abort} (() -> {Abort} ())
|
||||
Effects es -> let
|
||||
(hs, _) = hashCycle es
|
||||
in tag 4 : map hashed hs
|
||||
Effect e t -> [tag 5, hashed (hash e), hashed (hash t)]
|
||||
Forall a -> [tag 6, hashed (hash a)]
|
||||
IntroOuter a -> [tag 7, hashed (hash a)]
|
||||
|
||||
instance Show a => Show (F a) where
|
||||
showsPrec = go where
|
||||
go _ (Ref r) = shows r
|
||||
go p (Arrow i o) =
|
||||
showParen (p > 0) $ showsPrec (p+1) i <> s" -> " <> showsPrec p o
|
||||
go p (Ann t k) =
|
||||
showParen (p > 1) $ shows t <> s":" <> shows k
|
||||
go p (App f x) =
|
||||
showParen (p > 9) $ showsPrec 9 f <> s" " <> showsPrec 10 x
|
||||
go p (Effects es) = showParen (p > 0) $
|
||||
s"{" <> shows es <> s"}"
|
||||
go p (Effect e t) = showParen (p > 0) $
|
||||
showParen True $ shows e <> s" " <> showsPrec p t
|
||||
go p (Forall body) = case p of
|
||||
0 -> showsPrec p body
|
||||
_ -> showParen True $ s"∀ " <> shows body
|
||||
go p (IntroOuter body) = case p of
|
||||
0 -> showsPrec p body
|
||||
_ -> showParen True $ s"outer " <> shows body
|
||||
(<>) = (.)
|
||||
s = showString
|
||||
|
@ -0,0 +1,20 @@
|
||||
module Unison.Codebase.FileCodebase.TypeEdit where
|
||||
|
||||
import Unison.Codebase.FileCodebase.Reference (Reference)
|
||||
import Unison.Hashable (Hashable)
|
||||
import qualified Unison.Hashable as H
|
||||
|
||||
data TypeEdit = Replace Reference | Deprecate
|
||||
deriving (Eq, Ord, Show)
|
||||
|
||||
references :: TypeEdit -> [Reference]
|
||||
references (Replace r) = [r]
|
||||
references Deprecate = []
|
||||
|
||||
instance Hashable TypeEdit where
|
||||
tokens (Replace r) = H.Tag 0 : H.tokens r
|
||||
tokens Deprecate = [H.Tag 1]
|
||||
|
||||
toReference :: TypeEdit -> Maybe Reference
|
||||
toReference (Replace r) = Just r
|
||||
toReference Deprecate = Nothing
|
@ -3,26 +3,25 @@ module Unison.Codebase.GitError where
|
||||
import Unison.Prelude
|
||||
|
||||
import Unison.Codebase.ShortBranchHash (ShortBranchHash)
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import Unison.Codebase.Editor.RemoteRepo (ReadRepo, WriteRepo)
|
||||
import U.Codebase.Sqlite.DbId (SchemaVersion)
|
||||
import Unison.Codebase.Editor.RemoteRepo (ReadRepo, WriteRepo, ReadRemoteNamespace)
|
||||
|
||||
type CodebasePath = FilePath
|
||||
|
||||
data GitError = NoGit
|
||||
| UnrecognizableCacheDir Text CodebasePath
|
||||
| UnrecognizableCheckoutDir Text CodebasePath
|
||||
| CloneException ReadRepo String
|
||||
| PushException WriteRepo String
|
||||
| PushNoOp WriteRepo
|
||||
-- url commit Diff of what would change on merge with remote
|
||||
| PushDestinationHasNewStuff WriteRepo
|
||||
| NoRemoteNamespaceWithHash ReadRepo ShortBranchHash
|
||||
| RemoteNamespaceHashAmbiguous ReadRepo ShortBranchHash (Set Branch.Hash)
|
||||
| CouldntLoadRootBranch ReadRepo Branch.Hash
|
||||
| CouldntParseRootBranch ReadRepo String
|
||||
| CouldntOpenCodebase ReadRepo CodebasePath
|
||||
| UnrecognizedSchemaVersion ReadRepo CodebasePath SchemaVersion
|
||||
| SomeOtherError String
|
||||
| CouldntLoadSyncedBranch Branch.Hash
|
||||
deriving Show
|
||||
data GitProtocolError
|
||||
= NoGit
|
||||
| UnrecognizableCacheDir ReadRepo CodebasePath
|
||||
| UnrecognizableCheckoutDir ReadRepo CodebasePath
|
||||
| CloneException ReadRepo String
|
||||
| PushException WriteRepo String
|
||||
| PushNoOp WriteRepo
|
||||
-- url commit Diff of what would change on merge with remote
|
||||
| PushDestinationHasNewStuff WriteRepo
|
||||
| CleanupError SomeException
|
||||
deriving Show
|
||||
|
||||
data GitCodebaseError h
|
||||
= NoRemoteNamespaceWithHash ReadRepo ShortBranchHash
|
||||
| RemoteNamespaceHashAmbiguous ReadRepo ShortBranchHash (Set h)
|
||||
| CouldntLoadRootBranch ReadRepo h
|
||||
| CouldntLoadSyncedBranch ReadRemoteNamespace h
|
||||
deriving Show
|
||||
|
@ -1,45 +1,40 @@
|
||||
{-# LANGUAGE ApplicativeDo #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Unison.Codebase.Init where
|
||||
module Unison.Codebase.Init
|
||||
( Init (..),
|
||||
DebugName,
|
||||
Pretty,
|
||||
createCodebase,
|
||||
initCodebaseAndExit,
|
||||
openNewUcmCodebaseOrExit,
|
||||
)
|
||||
where
|
||||
|
||||
import Unison.Codebase.Init.Type
|
||||
import System.Exit (exitFailure)
|
||||
import Unison.Codebase (Codebase, CodebasePath)
|
||||
import qualified Unison.Codebase as Codebase
|
||||
import Unison.Parser (Ann)
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import Unison.Prelude
|
||||
import qualified Unison.PrettyTerminal as PT
|
||||
import Unison.Symbol (Symbol)
|
||||
import qualified Unison.Util.Pretty as P
|
||||
import UnliftIO.Directory (canonicalizePath)
|
||||
|
||||
type Pretty = P.Pretty P.ColorText
|
||||
|
||||
data CreateCodebaseError
|
||||
= CreateCodebaseAlreadyExists
|
||||
| CreateCodebaseOther Pretty
|
||||
import qualified Unison.Codebase.Init.CreateCodebaseError as E
|
||||
import Unison.Codebase.Init.CreateCodebaseError (Pretty)
|
||||
|
||||
type DebugName = String
|
||||
|
||||
data Init m v a = Init
|
||||
{ -- | open an existing codebase
|
||||
openCodebase :: DebugName -> CodebasePath -> m (Either Pretty (m (), Codebase m v a)),
|
||||
-- | create a new codebase
|
||||
createCodebase' :: DebugName -> CodebasePath -> m (Either CreateCodebaseError (m (), Codebase m v a)),
|
||||
-- | given a codebase root, and given that the codebase root may have other junk in it,
|
||||
-- give the path to the "actual" files; e.g. what a forked transcript should clone.
|
||||
codebasePath :: CodebasePath -> CodebasePath
|
||||
}
|
||||
|
||||
createCodebase :: MonadIO m => Init m v a -> DebugName -> CodebasePath -> m (Either Pretty (m (), Codebase m v a))
|
||||
createCodebase debugName cbInit path = do
|
||||
prettyDir <- P.string <$> canonicalizePath path
|
||||
createCodebase' debugName cbInit path <&> mapLeft \case
|
||||
CreateCodebaseAlreadyExists ->
|
||||
E.CreateCodebaseAlreadyExists ->
|
||||
P.wrap $
|
||||
"It looks like there's already a codebase in: "
|
||||
<> prettyDir
|
||||
CreateCodebaseOther message ->
|
||||
E.CreateCodebaseOther message ->
|
||||
P.wrap ("I ran into an error when creating the codebase in: " <> prettyDir)
|
||||
<> P.newline
|
||||
<> P.newline
|
||||
|
@ -0,0 +1,12 @@
|
||||
{-# LANGUAGE ApplicativeDo #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Unison.Codebase.Init.CreateCodebaseError (CreateCodebaseError(..), Pretty) where
|
||||
|
||||
import qualified Unison.Util.Pretty as P
|
||||
|
||||
type Pretty = P.Pretty P.ColorText
|
||||
|
||||
data CreateCodebaseError
|
||||
= CreateCodebaseAlreadyExists
|
||||
| CreateCodebaseOther Pretty
|
20
parser-typechecker/src/Unison/Codebase/Init/Type.hs
Normal file
20
parser-typechecker/src/Unison/Codebase/Init/Type.hs
Normal file
@ -0,0 +1,20 @@
|
||||
{-# LANGUAGE ApplicativeDo #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
|
||||
module Unison.Codebase.Init.Type (Init(..)) where
|
||||
|
||||
import Unison.Codebase.Init.CreateCodebaseError (CreateCodebaseError, Pretty)
|
||||
import Unison.Codebase (Codebase, CodebasePath)
|
||||
|
||||
type DebugName = String
|
||||
|
||||
data Init m v a = Init
|
||||
{ -- | open an existing codebase
|
||||
openCodebase :: DebugName -> CodebasePath -> m (Either Pretty (m (), Codebase m v a)),
|
||||
-- | create a new codebase
|
||||
createCodebase' :: DebugName -> CodebasePath -> m (Either CreateCodebaseError (m (), Codebase m v a)),
|
||||
-- | given a codebase root, and given that the codebase root may have other junk in it,
|
||||
-- give the path to the "actual" files; e.g. what a forked transcript should clone.
|
||||
codebasePath :: CodebasePath -> CodebasePath
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ module Unison.Codebase.MainTerm where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import Unison.Parser ( Ann )
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import qualified Unison.Parser as Parser
|
||||
import qualified Unison.Term as Term
|
||||
import Unison.Term ( Term )
|
||||
@ -22,6 +22,7 @@ import Unison.Reference ( Reference )
|
||||
import qualified Unison.Type as Type
|
||||
import Unison.Type ( Type )
|
||||
import qualified Unison.Typechecker as Typechecker
|
||||
import qualified Unison.Parser.Ann as Parser.Ann
|
||||
|
||||
data MainTerm v
|
||||
= NotAFunctionName String
|
||||
@ -41,7 +42,7 @@ getMainTerm loadTypeOfTerm parseNames0 mainName mainType =
|
||||
Nothing -> pure (NotAFunctionName mainName)
|
||||
Just hq -> do
|
||||
let refs = Names3.lookupHQTerm hq (Names3.Names parseNames0 mempty)
|
||||
let a = Parser.External
|
||||
let a = Parser.Ann.External
|
||||
case toList refs of
|
||||
[Referent.Ref ref] -> do
|
||||
typ <- loadTypeOfTerm ref
|
||||
|
@ -1,15 +0,0 @@
|
||||
module Unison.Codebase.NameEdit where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import Unison.Reference (Reference)
|
||||
import Unison.Hashable (Hashable, tokens)
|
||||
|
||||
data NameEdit =
|
||||
NameEdit { added :: Set Reference, removed :: Set Reference }
|
||||
|
||||
instance Semigroup NameEdit where
|
||||
NameEdit add1 del1 <> NameEdit add2 del2 = NameEdit (add1 <> add2) (del1 <> del2)
|
||||
|
||||
instance Hashable NameEdit where
|
||||
tokens (NameEdit added removed) = tokens (toList added, toList removed)
|
@ -4,13 +4,69 @@
|
||||
{-# LANGUAGE PatternSynonyms #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
|
||||
module Unison.Codebase.Path where
|
||||
module Unison.Codebase.Path
|
||||
( Path (..),
|
||||
Path' (..),
|
||||
Absolute (..),
|
||||
Relative (..),
|
||||
Resolve (..),
|
||||
pattern Empty,
|
||||
singleton,
|
||||
Unison.Codebase.Path.uncons,
|
||||
empty,
|
||||
absoluteEmpty,
|
||||
relativeEmpty',
|
||||
currentPath,
|
||||
prefix,
|
||||
unprefix,
|
||||
prefixName,
|
||||
unprefixName,
|
||||
HQSplit,
|
||||
Split,
|
||||
Split',
|
||||
HQSplit',
|
||||
ancestors,
|
||||
|
||||
-- * tests
|
||||
isCurrentPath,
|
||||
isRoot,
|
||||
isRoot',
|
||||
|
||||
-- * things that could be replaced with `Convert` instances
|
||||
absoluteToPath',
|
||||
fromAbsoluteSplit,
|
||||
fromList,
|
||||
fromName,
|
||||
fromName',
|
||||
fromPath',
|
||||
fromText,
|
||||
toAbsoluteSplit,
|
||||
toList,
|
||||
toName,
|
||||
toName',
|
||||
toText,
|
||||
unsplit,
|
||||
unsplit',
|
||||
unsplitHQ,
|
||||
unsplitHQ',
|
||||
|
||||
-- * things that could be replaced with `Parse` instances
|
||||
splitFromName,
|
||||
hqSplitFromName',
|
||||
|
||||
-- * things that could be replaced with `Cons` instances
|
||||
cons,
|
||||
|
||||
-- * things that could be replaced with `Snoc` instances
|
||||
snoc,
|
||||
unsnoc,
|
||||
)
|
||||
where
|
||||
import Unison.Prelude hiding (empty, toList)
|
||||
|
||||
import Data.Bifunctor ( first )
|
||||
import Data.List.Extra ( stripPrefix, dropPrefix )
|
||||
import Control.Lens hiding (unsnoc, cons, snoc)
|
||||
import Control.Lens hiding (Empty, unsnoc, cons, snoc)
|
||||
import qualified Control.Lens as Lens
|
||||
import qualified Data.Foldable as Foldable
|
||||
import qualified Data.Text as Text
|
||||
@ -83,7 +139,7 @@ type HQSplit' = (Path', HQ'.HQSegment)
|
||||
type SplitAbsolute = (Absolute, NameSegment)
|
||||
type HQSplitAbsolute = (Absolute, HQ'.HQSegment)
|
||||
|
||||
-- examples:
|
||||
-- | examples:
|
||||
-- unprefix .foo.bar .blah == .blah (absolute paths left alone)
|
||||
-- unprefix .foo.bar id == id (relative paths starting w/ nonmatching prefix left alone)
|
||||
-- unprefix .foo.bar foo.bar.baz == baz (relative paths w/ common prefix get stripped)
|
||||
@ -282,6 +338,7 @@ hqSplitFromName' = fmap (fmap HQ'.fromName) . Lens.unsnoc . fromName'
|
||||
splitFromName :: Name -> Maybe Split
|
||||
splitFromName = unsnoc . fromName
|
||||
|
||||
-- | what is this? —AI
|
||||
unprefixName :: Absolute -> Name -> Name
|
||||
unprefixName prefix = toName . unprefix prefix . fromName'
|
||||
|
||||
|
258
parser-typechecker/src/Unison/Codebase/Path/Parse.hs
Normal file
258
parser-typechecker/src/Unison/Codebase/Path/Parse.hs
Normal file
@ -0,0 +1,258 @@
|
||||
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
|
||||
{-# LANGUAGE OverloadedLists #-}
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE PatternSynonyms #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
|
||||
module Unison.Codebase.Path.Parse
|
||||
( parsePath',
|
||||
parsePathImpl',
|
||||
parseSplit',
|
||||
definitionNameSegment,
|
||||
parseHQSplit,
|
||||
parseHQSplit',
|
||||
parseShortHashOrHQSplit',
|
||||
wordyNameSegment,
|
||||
)
|
||||
where
|
||||
|
||||
import Unison.Prelude hiding (empty, toList)
|
||||
|
||||
import Unison.Codebase.Path
|
||||
|
||||
import Data.Bifunctor ( first )
|
||||
import Data.List.Extra ( stripPrefix, dropPrefix )
|
||||
import Control.Lens hiding (unsnoc, cons, snoc)
|
||||
import qualified Control.Lens as Lens
|
||||
import qualified Data.Foldable as Foldable
|
||||
import qualified Data.Text as Text
|
||||
import Data.Sequence (Seq((:<|),(:|>) ))
|
||||
import qualified Data.Sequence as Seq
|
||||
import Unison.Name ( Name, Convert, Parse )
|
||||
import qualified Unison.Name as Name
|
||||
import Unison.Util.Monoid (intercalateMap)
|
||||
import qualified Unison.Lexer as Lexer
|
||||
import qualified Unison.HashQualified' as HQ'
|
||||
import qualified Unison.ShortHash as SH
|
||||
|
||||
import Unison.NameSegment ( NameSegment(NameSegment))
|
||||
import qualified Unison.NameSegment as NameSegment
|
||||
|
||||
-- .libs.blah.poo is Absolute
|
||||
-- libs.blah.poo is Relative
|
||||
-- Left is some parse error tbd
|
||||
parsePath' :: String -> Either String Path'
|
||||
parsePath' p = case parsePathImpl' p of
|
||||
Left e -> Left e
|
||||
Right (p, "" ) -> Right p
|
||||
Right (p, rem) -> case parseSegment rem of
|
||||
Right (seg, "") -> Right (unsplit' (p, NameSegment . Text.pack $ seg))
|
||||
Right (_, rem) ->
|
||||
Left ("extra characters after " <> show p <> ": " <> show rem)
|
||||
Left e -> Left e
|
||||
|
||||
-- implementation detail of parsePath' and parseSplit'
|
||||
-- foo.bar.baz.34 becomes `Right (foo.bar.baz, "34")
|
||||
-- foo.bar.baz becomes `Right (foo.bar, "baz")
|
||||
-- baz becomes `Right (, "baz")
|
||||
-- foo.bar.baz#a8fj becomes `Left`; we don't hash-qualify paths.
|
||||
-- TODO: Get rid of this thing.
|
||||
parsePathImpl' :: String -> Either String (Path', String)
|
||||
parsePathImpl' p = case p of
|
||||
"." -> Right (Path' . Left $ absoluteEmpty, "")
|
||||
'.' : p -> over _1 (Path' . Left . Absolute . fromList) <$> segs p
|
||||
p -> over _1 (Path' . Right . Relative . fromList) <$> segs p
|
||||
where
|
||||
go f p = case f p of
|
||||
Right (a, "") -> case Lens.unsnoc (Name.segments' $ Text.pack a) of
|
||||
Nothing -> Left "empty path"
|
||||
Just (segs, last) -> Right (NameSegment <$> segs, Text.unpack last)
|
||||
Right (segs, '.' : rem) ->
|
||||
let segs' = Name.segments' (Text.pack segs)
|
||||
in Right (NameSegment <$> segs', rem)
|
||||
Right (segs, rem) ->
|
||||
Left $ "extra characters after " <> segs <> ": " <> show rem
|
||||
Left e -> Left e
|
||||
segs p = go parseSegment p
|
||||
|
||||
parseSegment :: String -> Either String (String, String)
|
||||
parseSegment s =
|
||||
first show
|
||||
. (Lexer.wordyId <> Lexer.symbolyId)
|
||||
<> unit'
|
||||
<> const (Left ("I expected an identifier but found " <> s))
|
||||
$ s
|
||||
|
||||
wordyNameSegment, definitionNameSegment :: String -> Either String NameSegment
|
||||
wordyNameSegment s = case Lexer.wordyId0 s of
|
||||
Left e -> Left (show e)
|
||||
Right (a, "") -> Right (NameSegment (Text.pack a))
|
||||
Right (a, rem) ->
|
||||
Left $ "trailing characters after " <> show a <> ": " <> show rem
|
||||
|
||||
optionalWordyNameSegment :: String -> Either String NameSegment
|
||||
optionalWordyNameSegment "" = Right $ NameSegment ""
|
||||
optionalWordyNameSegment s = wordyNameSegment s
|
||||
|
||||
-- Parse a name segment like "()"
|
||||
unit' :: String -> Either String (String, String)
|
||||
unit' s = case stripPrefix "()" s of
|
||||
Nothing -> Left $ "Expected () but found: " <> s
|
||||
Just rem -> Right ("()", rem)
|
||||
|
||||
unit :: String -> Either String NameSegment
|
||||
unit s = case unit' s of
|
||||
Right (_, "" ) -> Right $ NameSegment "()"
|
||||
Right (_, rem) -> Left $ "trailing characters after (): " <> show rem
|
||||
Left _ -> Left $ "I don't know how to parse " <> s
|
||||
|
||||
|
||||
definitionNameSegment s = wordyNameSegment s <> symbolyNameSegment s <> unit s
|
||||
where
|
||||
symbolyNameSegment s = case Lexer.symbolyId0 s of
|
||||
Left e -> Left (show e)
|
||||
Right (a, "") -> Right (NameSegment (Text.pack a))
|
||||
Right (a, rem) ->
|
||||
Left $ "trailing characters after " <> show a <> ": " <> show rem
|
||||
|
||||
-- parseSplit' wordyNameSegment "foo.bar.baz" returns Right (foo.bar, baz)
|
||||
-- parseSplit' wordyNameSegment "foo.bar.+" returns Left err
|
||||
-- parseSplit' definitionNameSegment "foo.bar.+" returns Right (foo.bar, +)
|
||||
parseSplit' :: (String -> Either String NameSegment)
|
||||
-> String
|
||||
-> Either String Split'
|
||||
parseSplit' lastSegment p = do
|
||||
(p', rem) <- parsePathImpl' p
|
||||
seg <- lastSegment rem
|
||||
pure (p', seg)
|
||||
|
||||
parseShortHashOrHQSplit' :: String -> Either String (Either SH.ShortHash HQSplit')
|
||||
parseShortHashOrHQSplit' s =
|
||||
case Text.breakOn "#" $ Text.pack s of
|
||||
("","") -> error $ "encountered empty string parsing '" <> s <> "'"
|
||||
(n,"") -> do
|
||||
(p, rem) <- parsePathImpl' (Text.unpack n)
|
||||
seg <- definitionNameSegment rem
|
||||
pure $ Right (p, HQ'.NameOnly seg)
|
||||
("", sh) -> do
|
||||
sh <- maybeToRight (shError s) . SH.fromText $ sh
|
||||
pure $ Left sh
|
||||
(n, sh) -> do
|
||||
(p, rem) <- parsePathImpl' (Text.unpack n)
|
||||
seg <- definitionNameSegment rem
|
||||
hq <- maybeToRight (shError s) .
|
||||
fmap (\sh -> (p, HQ'.HashQualified seg sh)) .
|
||||
SH.fromText $ sh
|
||||
pure $ Right hq
|
||||
where
|
||||
shError s = "couldn't parse shorthash from " <> s
|
||||
|
||||
parseHQSplit :: String -> Either String HQSplit
|
||||
parseHQSplit s = case parseHQSplit' s of
|
||||
Right (Path' (Right (Relative p)), hqseg) -> Right (p, hqseg)
|
||||
Right (Path' Left{}, _) ->
|
||||
Left $ "Sorry, you can't use an absolute name like " <> s <> " here."
|
||||
Left e -> Left e
|
||||
|
||||
parseHQSplit' :: String -> Either String HQSplit'
|
||||
parseHQSplit' s = case Text.breakOn "#" $ Text.pack s of
|
||||
("", "") -> error $ "encountered empty string parsing '" <> s <> "'"
|
||||
("", _ ) -> Left "Sorry, you can't use a hash-only reference here."
|
||||
(n , "") -> do
|
||||
(p, rem) <- parsePath n
|
||||
seg <- definitionNameSegment rem
|
||||
pure (p, HQ'.NameOnly seg)
|
||||
(n, sh) -> do
|
||||
(p, rem) <- parsePath n
|
||||
seg <- definitionNameSegment rem
|
||||
maybeToRight (shError s)
|
||||
. fmap (\sh -> (p, HQ'.HashQualified seg sh))
|
||||
. SH.fromText
|
||||
$ sh
|
||||
where
|
||||
shError s = "couldn't parse shorthash from " <> s
|
||||
parsePath n = do
|
||||
x <- parsePathImpl' $ Text.unpack n
|
||||
pure $ case x of
|
||||
(Path' (Left e), "") | e == absoluteEmpty -> (relativeEmpty', ".")
|
||||
x -> x
|
||||
|
||||
toAbsoluteSplit :: Absolute -> (Path', a) -> (Absolute, a)
|
||||
toAbsoluteSplit a (p, s) = (resolve a p, s)
|
||||
|
||||
fromSplit' :: (Path', a) -> (Path, a)
|
||||
fromSplit' (Path' (Left (Absolute p)), a) = (p, a)
|
||||
fromSplit' (Path' (Right (Relative p)), a) = (p, a)
|
||||
|
||||
fromAbsoluteSplit :: (Absolute, a) -> (Path, a)
|
||||
fromAbsoluteSplit (Absolute p, a) = (p, a)
|
||||
|
||||
-- splitFromName :: Name -> Maybe Split
|
||||
-- splitFromName = unsnoc . fromName
|
||||
|
||||
unprefixName :: Absolute -> Name -> Name
|
||||
unprefixName prefix = toName . unprefix prefix . fromName'
|
||||
|
||||
prefixName :: Absolute -> Name -> Name
|
||||
prefixName p = toName . prefix p . fromName'
|
||||
|
||||
singleton :: NameSegment -> Path
|
||||
singleton n = fromList [n]
|
||||
|
||||
cons :: NameSegment -> Path -> Path
|
||||
cons = Lens.cons
|
||||
|
||||
snoc :: Path -> NameSegment -> Path
|
||||
snoc = Lens.snoc
|
||||
|
||||
snoc' :: Path' -> NameSegment -> Path'
|
||||
snoc' = Lens.snoc
|
||||
|
||||
unsnoc :: Path -> Maybe (Path, NameSegment)
|
||||
unsnoc = Lens.unsnoc
|
||||
|
||||
uncons :: Path -> Maybe (NameSegment, Path)
|
||||
uncons = Lens.uncons
|
||||
|
||||
--asDirectory :: Path -> Text
|
||||
--asDirectory p = case toList p of
|
||||
-- NameSegment "_root_" : (Seq.fromList -> tail) ->
|
||||
-- "/" <> asDirectory (Path tail)
|
||||
-- other -> Text.intercalate "/" . fmap NameSegment.toText $ other
|
||||
|
||||
-- -- > Path.fromName . Name.unsafeFromText $ ".Foo.bar"
|
||||
-- -- /Foo/bar
|
||||
-- -- Int./ -> "Int"/"/"
|
||||
-- -- pkg/Int.. -> "pkg"/"Int"/"."
|
||||
-- -- Int./foo -> error because "/foo" is not a valid NameSegment
|
||||
-- -- and "Int." is not a valid NameSegment
|
||||
-- -- and "Int" / "" / "foo" is not a valid path (internal "")
|
||||
-- -- todo: fromName needs to be a little more complicated if we want to allow
|
||||
-- -- identifiers called Function.(.)
|
||||
-- fromName :: Name -> Path
|
||||
-- fromName = fromList . Name.segments
|
||||
|
||||
-- fromName' :: Name -> Path'
|
||||
-- fromName' n = case take 1 (Name.toString n) of
|
||||
-- "." -> Path' . Left . Absolute $ Path seq
|
||||
-- _ -> Path' . Right $ Relative path
|
||||
-- where
|
||||
-- path = fromName n
|
||||
-- seq = toSeq path
|
||||
|
||||
-- toName :: Path -> Name
|
||||
-- toName = Name.unsafeFromText . toText
|
||||
|
||||
-- | Convert a Path' to a Name
|
||||
toName' :: Path' -> Name
|
||||
toName' = Name.unsafeFromText . toText'
|
||||
|
||||
fromText :: Text -> Path
|
||||
fromText = \case
|
||||
"" -> empty
|
||||
t -> fromList $ NameSegment <$> Name.segments' t
|
||||
|
||||
toText' :: Path' -> Text
|
||||
toText' = \case
|
||||
Path' (Left (Absolute path)) -> Text.cons '.' (toText path)
|
||||
Path' (Right (Relative path)) -> toText path
|
@ -1,30 +1,29 @@
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
|
||||
module Unison.Codebase.Reflog where
|
||||
module Unison.Codebase.Reflog (Entry(..), fromText, toText) where
|
||||
|
||||
import Data.Coerce (Coercible, coerce)
|
||||
import Data.Text (Text)
|
||||
import qualified Data.Text as Text
|
||||
import Unison.Codebase.Branch (Hash)
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import qualified Unison.Hash as Hash
|
||||
|
||||
data Entry =
|
||||
Entry
|
||||
{ from :: Hash
|
||||
, to :: Hash
|
||||
, reason :: Text
|
||||
}
|
||||
data Entry h = Entry
|
||||
{ from :: h,
|
||||
to :: h,
|
||||
reason :: Text
|
||||
}
|
||||
|
||||
fromText :: Text -> Maybe Entry
|
||||
fromText :: Coercible h Hash.Hash => Text -> Maybe (Entry h)
|
||||
fromText t =
|
||||
case Text.words t of
|
||||
(Hash.fromBase32Hex -> Just old) : (Hash.fromBase32Hex -> Just new) : (Text.unwords -> reason) ->
|
||||
Just $ Entry (Causal.RawHash old) (Causal.RawHash new) reason
|
||||
Just $ Entry (coerce old) (coerce new) reason
|
||||
_ -> Nothing
|
||||
|
||||
|
||||
toText :: Entry -> Text
|
||||
toText :: Coercible h Hash.Hash => Entry h -> Text
|
||||
toText (Entry old new reason) =
|
||||
Text.unwords [ Hash.base32Hex . Causal.unRawHash $ old
|
||||
, Hash.base32Hex . Causal.unRawHash $ new
|
||||
, reason ]
|
||||
Text.unwords
|
||||
[ Hash.base32Hex . coerce $ old,
|
||||
Hash.base32Hex . coerce $ new,
|
||||
reason
|
||||
]
|
||||
|
@ -10,9 +10,9 @@ import Data.Bifunctor (first)
|
||||
import qualified Data.Map as Map
|
||||
import qualified Data.Set as Set
|
||||
import qualified Unison.Codebase.CodeLookup as CL
|
||||
import qualified Unison.Codebase as Codebase
|
||||
import qualified Unison.Codebase.CodeLookup.Util as CL
|
||||
import Unison.UnisonFile ( UnisonFile )
|
||||
import Unison.Parser ( Ann )
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import qualified Unison.Term as Term
|
||||
import Unison.Type ( Type )
|
||||
import Unison.Var ( Var )
|
||||
@ -20,9 +20,12 @@ import qualified Unison.Var as Var
|
||||
import Unison.Reference ( Reference )
|
||||
import qualified Unison.Reference as Reference
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import qualified Unison.UnisonFile.Type as UF
|
||||
import Unison.Builtin.Decls (pattern TupleTerm', tupleTerm)
|
||||
import qualified Unison.Util.Pretty as P
|
||||
import qualified Unison.PrettyPrintEnv as PPE
|
||||
import Unison.WatchKind (WatchKind)
|
||||
import qualified Unison.WatchKind as WK
|
||||
|
||||
type Error = P.Pretty P.ColorText
|
||||
type Term v = Term.Term v ()
|
||||
@ -36,7 +39,6 @@ data Runtime v = Runtime
|
||||
-> IO (Either Error (Term v))
|
||||
, mainType :: Type v Ann
|
||||
, ioTestType :: Type v Ann
|
||||
, needsContainment :: Bool
|
||||
}
|
||||
|
||||
type IsCacheHit = Bool
|
||||
@ -48,7 +50,7 @@ type WatchResults v a = (Either Error
|
||||
-- Bindings:
|
||||
( [(v, Term v)]
|
||||
-- Map watchName (loc, hash, expression, value, isHit)
|
||||
, Map v (a, UF.WatchKind, Reference, Term v, Term v, IsCacheHit)
|
||||
, Map v (a, WatchKind, Reference, Term v, Term v, IsCacheHit)
|
||||
))
|
||||
|
||||
-- Evaluates the watch expressions in the file, returning a `Map` of their
|
||||
@ -74,7 +76,7 @@ evaluateWatches code ppe evaluationCache rt uf = do
|
||||
m = first Reference.DerivedId <$>
|
||||
Term.hashComponents (Map.fromList (UF.terms uf <> UF.allWatches uf))
|
||||
watches = Set.fromList (fst <$> UF.allWatches uf)
|
||||
watchKinds :: Map v UF.WatchKind
|
||||
watchKinds :: Map v WatchKind
|
||||
watchKinds = Map.fromList [ (v, k) | (k, ws) <- Map.toList (UF.watches uf)
|
||||
, (v,_) <- ws ]
|
||||
unann = Term.amap (const ())
|
||||
@ -134,12 +136,8 @@ evaluateTerm' codeLookup cache ppe rt tm = do
|
||||
Just r -> pure (Right r)
|
||||
Nothing -> do
|
||||
let uf = UF.UnisonFileId mempty mempty mempty
|
||||
(Map.singleton UF.RegularWatch [(Var.nameds "result", tm)])
|
||||
runnable <-
|
||||
if needsContainment rt
|
||||
then Codebase.makeSelfContained' codeLookup uf
|
||||
else pure uf
|
||||
r <- evaluateWatches codeLookup ppe cache rt runnable
|
||||
(Map.singleton WK.RegularWatch [(Var.nameds "result", tm)])
|
||||
r <- evaluateWatches codeLookup ppe cache rt uf
|
||||
pure $ r <&> \(_,map) ->
|
||||
let [(_loc, _kind, _hash, _src, value, _isHit)] = Map.elems map
|
||||
in value
|
||||
|
@ -1,57 +0,0 @@
|
||||
module Unison.Codebase.Serialization.PutT where
|
||||
|
||||
import Data.Bytes.Put
|
||||
import qualified Data.Serialize.Put as Ser
|
||||
import Data.Serialize.Put ( PutM
|
||||
, runPutM
|
||||
)
|
||||
|
||||
newtype PutT m a = PutT { unPutT :: m (PutM a) }
|
||||
|
||||
instance Monad m => MonadPut (PutT m) where
|
||||
putWord8 = PutT . pure . putWord8
|
||||
{-# INLINE putWord8 #-}
|
||||
putByteString = PutT . pure . putByteString
|
||||
{-# INLINE putByteString #-}
|
||||
putLazyByteString = PutT . pure . putLazyByteString
|
||||
{-# INLINE putLazyByteString #-}
|
||||
flush = PutT $ pure flush
|
||||
{-# INLINE flush #-}
|
||||
putWord16le = PutT . pure . putWord16le
|
||||
{-# INLINE putWord16le #-}
|
||||
putWord16be = PutT . pure . putWord16be
|
||||
{-# INLINE putWord16be #-}
|
||||
putWord16host = PutT . pure . putWord16host
|
||||
{-# INLINE putWord16host #-}
|
||||
putWord32le = PutT . pure . putWord32le
|
||||
{-# INLINE putWord32le #-}
|
||||
putWord32be = PutT . pure . putWord32be
|
||||
{-# INLINE putWord32be #-}
|
||||
putWord32host = PutT . pure . putWord32host
|
||||
{-# INLINE putWord32host #-}
|
||||
putWord64le = PutT . pure . putWord64le
|
||||
{-# INLINE putWord64le #-}
|
||||
putWord64be = PutT . pure . putWord64be
|
||||
{-# INLINE putWord64be #-}
|
||||
putWord64host = PutT . pure . putWord64host
|
||||
{-# INLINE putWord64host #-}
|
||||
putWordhost = PutT . pure . putWordhost
|
||||
{-# INLINE putWordhost #-}
|
||||
|
||||
instance Functor m => Functor (PutT m) where
|
||||
fmap f (PutT m) = PutT $ fmap (fmap f) m
|
||||
|
||||
instance Applicative m => Applicative (PutT m) where
|
||||
pure = PutT . pure . pure
|
||||
(PutT f) <*> (PutT a) = PutT $ (<*>) <$> f <*> a
|
||||
|
||||
instance Monad m => Monad (PutT m) where
|
||||
(PutT m) >>= f = PutT $ do
|
||||
putm <- m
|
||||
let (a, bs) = runPutM putm
|
||||
putm' <- unPutT $ f a
|
||||
let (b, bs') = runPutM putm'
|
||||
pure $ do
|
||||
Ser.putByteString bs
|
||||
Ser.putByteString bs'
|
||||
pure b
|
@ -4,11 +4,10 @@
|
||||
module Unison.Codebase.ShortBranchHash where
|
||||
|
||||
import Unison.Prelude
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import qualified Unison.Hash as Hash
|
||||
import qualified Data.Text as Text
|
||||
import qualified Data.Set as Set
|
||||
import Data.Coerce (Coercible, coerce)
|
||||
|
||||
newtype ShortBranchHash =
|
||||
ShortBranchHash { toText :: Text } -- base32hex characters
|
||||
@ -17,15 +16,15 @@ newtype ShortBranchHash =
|
||||
toString :: ShortBranchHash -> String
|
||||
toString = Text.unpack . toText
|
||||
|
||||
toHash :: ShortBranchHash -> Maybe Branch.Hash
|
||||
toHash = fmap Causal.RawHash . Hash.fromBase32Hex . toText
|
||||
toHash :: Coercible Hash.Hash h => ShortBranchHash -> Maybe h
|
||||
toHash = fmap coerce . Hash.fromBase32Hex . toText
|
||||
|
||||
fromHash :: Int -> Branch.Hash -> ShortBranchHash
|
||||
fromHash :: Coercible h Hash.Hash => Int -> h -> ShortBranchHash
|
||||
fromHash len =
|
||||
ShortBranchHash . Text.take len . Hash.base32Hex . Causal.unRawHash
|
||||
ShortBranchHash . Text.take len . Hash.base32Hex . coerce
|
||||
|
||||
fullFromHash :: Branch.Hash -> ShortBranchHash
|
||||
fullFromHash = ShortBranchHash . Hash.base32Hex . Causal.unRawHash
|
||||
fullFromHash :: Coercible h Hash.Hash => h -> ShortBranchHash
|
||||
fullFromHash = ShortBranchHash . Hash.base32Hex . coerce
|
||||
|
||||
-- abc -> SBH abc
|
||||
-- #abc -> SBH abc
|
||||
|
@ -15,8 +15,9 @@ where
|
||||
|
||||
import qualified Control.Concurrent
|
||||
import qualified Control.Exception
|
||||
import Control.Exception.Safe (MonadCatch)
|
||||
import Control.Monad (filterM, unless, when, (>=>))
|
||||
import Control.Monad.Except (ExceptT(ExceptT), MonadError (throwError), runExceptT)
|
||||
import Control.Monad.Except (ExceptT (ExceptT), MonadError (throwError), runExceptT, withExceptT)
|
||||
import qualified Control.Monad.Except as Except
|
||||
import Control.Monad.Extra (ifM, unlessM)
|
||||
import qualified Control.Monad.Extra as Monad
|
||||
@ -26,10 +27,10 @@ import qualified Control.Monad.State as State
|
||||
import Control.Monad.Trans (MonadTrans (lift))
|
||||
import Control.Monad.Trans.Maybe (MaybeT (MaybeT))
|
||||
import Data.Bifunctor (Bifunctor (bimap, first), second)
|
||||
import qualified Data.Either.Combinators as Either
|
||||
import qualified Data.Char as Char
|
||||
import qualified Data.Either.Combinators as Either
|
||||
import Data.Foldable (Foldable (toList), for_, traverse_)
|
||||
import Data.Functor (void, (<&>), ($>))
|
||||
import Data.Functor (void, ($>), (<&>))
|
||||
import qualified Data.List as List
|
||||
import Data.Map (Map)
|
||||
import qualified Data.Map as Map
|
||||
@ -47,12 +48,13 @@ import qualified System.Console.ANSI as ANSI
|
||||
import System.FilePath ((</>))
|
||||
import qualified System.FilePath as FilePath
|
||||
import U.Codebase.HashTags (CausalHash (CausalHash, unCausalHash))
|
||||
import U.Codebase.Sqlite.Operations (EDB)
|
||||
import qualified U.Codebase.Reference as C.Reference
|
||||
import U.Codebase.Sqlite.Connection (Connection (Connection))
|
||||
import qualified U.Codebase.Sqlite.Connection as Connection
|
||||
import U.Codebase.Sqlite.DbId (SchemaVersion (SchemaVersion))
|
||||
import qualified U.Codebase.Sqlite.JournalMode as JournalMode
|
||||
import qualified U.Codebase.Sqlite.ObjectType as OT
|
||||
import U.Codebase.Sqlite.Operations (EDB)
|
||||
import qualified U.Codebase.Sqlite.Operations as Ops
|
||||
import qualified U.Codebase.Sqlite.Queries as Q
|
||||
import qualified U.Codebase.Sqlite.Sync22 as Sync22
|
||||
@ -62,6 +64,7 @@ import qualified U.Util.Cache as Cache
|
||||
import qualified U.Util.Hash as H2
|
||||
import qualified U.Util.Monoid as Monoid
|
||||
import qualified U.Util.Set as Set
|
||||
import U.Util.Timing (time)
|
||||
import qualified Unison.Builtin as Builtins
|
||||
import Unison.Codebase (Codebase, CodebasePath)
|
||||
import qualified Unison.Codebase as Codebase1
|
||||
@ -69,23 +72,25 @@ import Unison.Codebase.Branch (Branch (..))
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import Unison.Codebase.Editor.Git (gitIn, gitTextIn, pullBranch)
|
||||
import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRepo (WriteGitRepo), writeToRead, printWriteRepo)
|
||||
import Unison.Codebase.GitError (GitError)
|
||||
import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRepo (WriteGitRepo), printWriteRepo, writeToRead)
|
||||
import qualified Unison.Codebase.GitError as GitError
|
||||
import qualified Unison.Codebase.Init as Codebase
|
||||
import qualified Unison.Codebase.Init as Codebase1
|
||||
import qualified Unison.Codebase.Init.CreateCodebaseError as Codebase1
|
||||
import Unison.Codebase.Patch (Patch)
|
||||
import qualified Unison.Codebase.Reflog as Reflog
|
||||
import Unison.Codebase.ShortBranchHash (ShortBranchHash)
|
||||
import qualified Unison.Codebase.SqliteCodebase.Branch.Dependencies as BD
|
||||
import qualified Unison.Codebase.SqliteCodebase.Conversions as Cv
|
||||
import qualified Unison.Codebase.SqliteCodebase.GitError as GitError
|
||||
import qualified Unison.Codebase.SqliteCodebase.SyncEphemeral as SyncEphemeral
|
||||
import Unison.Codebase.SyncMode (SyncMode)
|
||||
import qualified Unison.Codebase.Type as C
|
||||
import qualified Unison.ConstructorType as CT
|
||||
import Unison.DataDeclaration (Decl)
|
||||
import qualified Unison.DataDeclaration as Decl
|
||||
import Unison.Hash (Hash)
|
||||
import Unison.Parser (Ann)
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import Unison.Prelude (MaybeT (runMaybeT), fromMaybe, isJust, trace, traceM)
|
||||
import Unison.Reference (Reference)
|
||||
import qualified Unison.Reference as Reference
|
||||
@ -98,14 +103,11 @@ import Unison.Term (Term)
|
||||
import qualified Unison.Term as Term
|
||||
import Unison.Type (Type)
|
||||
import qualified Unison.Type as Type
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import qualified Unison.Util.Pretty as P
|
||||
import U.Util.Timing (time)
|
||||
import qualified Unison.WatchKind as UF
|
||||
import UnliftIO (MonadIO, catchIO, finally, liftIO)
|
||||
import UnliftIO.Directory (canonicalizePath, createDirectoryIfMissing, doesDirectoryExist, doesFileExist)
|
||||
import UnliftIO.STM
|
||||
import U.Codebase.Sqlite.DbId (SchemaVersion(SchemaVersion))
|
||||
import Control.Exception.Safe (MonadCatch)
|
||||
|
||||
debug, debugProcessBranches, debugCommitFailedTransaction :: Bool
|
||||
debug = False
|
||||
@ -626,7 +628,7 @@ sqliteCodebase debugName root = do
|
||||
clearWatches :: MonadIO m => m ()
|
||||
clearWatches = runDB conn Ops.clearWatches
|
||||
|
||||
getReflog :: MonadIO m => m [Reflog.Entry]
|
||||
getReflog :: MonadIO m => m [Reflog.Entry Branch.Hash]
|
||||
getReflog =
|
||||
liftIO $
|
||||
( do
|
||||
@ -695,7 +697,7 @@ sqliteCodebase debugName root = do
|
||||
>>= traverse (Cv.referentid2to1 (getCycleLen "referentsByPrefix") getDeclType)
|
||||
declReferents' <- Ops.declReferentsByPrefix prefix cycle (read . Text.unpack <$> cid)
|
||||
let declReferents =
|
||||
[ Referent.Con' (Reference.Id (Cv.hash2to1 h) pos len) (fromIntegral cid) (Cv.decltype2to1 ct)
|
||||
[ Referent.ConId (Reference.Id (Cv.hash2to1 h) pos len) (fromIntegral cid) (Cv.decltype2to1 ct)
|
||||
| (h, pos, len, ct, cids) <- declReferents',
|
||||
cid <- cids
|
||||
]
|
||||
@ -993,15 +995,15 @@ viewRemoteBranch' ::
|
||||
forall m.
|
||||
(MonadIO m, MonadCatch m) =>
|
||||
ReadRemoteNamespace ->
|
||||
m (Either GitError (m (), Branch m, CodebasePath))
|
||||
viewRemoteBranch' (repo, sbh, path) = runExceptT do
|
||||
m (Either C.GitError (m (), Branch m, CodebasePath))
|
||||
viewRemoteBranch' (repo, sbh, path) = runExceptT @C.GitError do
|
||||
-- set up the cache dir
|
||||
remotePath <- time "Git fetch" $ pullBranch repo
|
||||
ifM
|
||||
remotePath <- time "Git fetch" . withExceptT C.GitProtocolError $ pullBranch repo
|
||||
ifM @(ExceptT C.GitError m)
|
||||
(codebaseExists remotePath)
|
||||
do
|
||||
lift (sqliteCodebase "viewRemoteBranch.gitCache" remotePath) >>= \case
|
||||
Left sv -> ExceptT . pure . Left $ GitError.UnrecognizedSchemaVersion repo remotePath sv
|
||||
Left sv -> ExceptT . pure . Left . C.GitSqliteCodebaseError $ GitError.UnrecognizedSchemaVersion repo remotePath sv
|
||||
Right (closeCodebase, codebase) -> do
|
||||
-- try to load the requested branch from it
|
||||
branch <- time "Git fetch (sbh)" $ case sbh of
|
||||
@ -1011,20 +1013,20 @@ viewRemoteBranch' (repo, sbh, path) = runExceptT do
|
||||
-- this NoRootBranch case should probably be an error too.
|
||||
Left Codebase1.NoRootBranch -> pure Branch.empty
|
||||
Left (Codebase1.CouldntLoadRootBranch h) ->
|
||||
throwError $ GitError.CouldntLoadRootBranch repo h
|
||||
throwError . C.GitCodebaseError $ GitError.CouldntLoadRootBranch repo h
|
||||
Left (Codebase1.CouldntParseRootBranch s) ->
|
||||
throwError $ GitError.CouldntParseRootBranch repo s
|
||||
throwError . C.GitSqliteCodebaseError $ GitError.GitCouldntParseRootBranchHash repo s
|
||||
Right b -> pure b
|
||||
-- load from a specific `ShortBranchHash`
|
||||
Just sbh -> do
|
||||
branchCompletions <- lift $ Codebase1.branchHashesByPrefix codebase sbh
|
||||
case toList branchCompletions of
|
||||
[] -> throwError $ GitError.NoRemoteNamespaceWithHash repo sbh
|
||||
[] -> throwError . C.GitCodebaseError $ GitError.NoRemoteNamespaceWithHash repo sbh
|
||||
[h] ->
|
||||
lift (Codebase1.getBranchForHash codebase h) >>= \case
|
||||
Just b -> pure b
|
||||
Nothing -> throwError $ GitError.NoRemoteNamespaceWithHash repo sbh
|
||||
_ -> throwError $ GitError.RemoteNamespaceHashAmbiguous repo sbh branchCompletions
|
||||
Nothing -> throwError . C.GitCodebaseError $ GitError.NoRemoteNamespaceWithHash repo sbh
|
||||
_ -> throwError . C.GitCodebaseError $ GitError.RemoteNamespaceHashAmbiguous repo sbh branchCompletions
|
||||
pure (closeCodebase, Branch.getAt' path branch, remotePath)
|
||||
-- else there's no initialized codebase at this repo; we pretend there's an empty one.
|
||||
-- I'm thinking we should probably return an error value instead.
|
||||
@ -1037,8 +1039,8 @@ pushGitRootBranch ::
|
||||
Connection ->
|
||||
Branch m ->
|
||||
WriteRepo ->
|
||||
m (Either GitError ())
|
||||
pushGitRootBranch srcConn branch repo = runExceptT @GitError do
|
||||
m (Either C.GitError ())
|
||||
pushGitRootBranch srcConn branch repo = runExceptT @C.GitError do
|
||||
-- pull the remote repo to the staging directory
|
||||
-- open a connection to the staging codebase
|
||||
-- create a savepoint on the staging codebase
|
||||
@ -1048,7 +1050,7 @@ pushGitRootBranch srcConn branch repo = runExceptT @GitError do
|
||||
-- if it fails, rollback to the savepoint and clean up.
|
||||
|
||||
-- set up the cache dir
|
||||
remotePath <- time "Git fetch" $ pullBranch (writeToRead repo)
|
||||
remotePath <- time "Git fetch" $ withExceptT C.GitProtocolError $ pullBranch (writeToRead repo)
|
||||
destConn <- openOrCreateCodebaseConnection "push.dest" remotePath
|
||||
|
||||
flip runReaderT destConn $ Q.savepoint "push"
|
||||
@ -1073,7 +1075,7 @@ pushGitRootBranch srcConn branch repo = runExceptT @GitError do
|
||||
++ "."
|
||||
Just False -> do
|
||||
Q.rollbackRelease "push"
|
||||
throwError $ GitError.PushDestinationHasNewStuff repo
|
||||
throwError . C.GitProtocolError $ GitError.PushDestinationHasNewStuff repo
|
||||
|
||||
Just True -> do
|
||||
setRepoRoot newRootHash
|
||||
|
@ -48,8 +48,9 @@ import Unison.Hash (Hash)
|
||||
import qualified Unison.Hash as V1
|
||||
import qualified Unison.Kind as V1.Kind
|
||||
import qualified Unison.NameSegment as V1
|
||||
import Unison.Parser (Ann)
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import qualified Unison.Parser as Ann
|
||||
import qualified Unison.Parser.Ann as Ann
|
||||
import qualified Unison.Pattern as V1.Pattern
|
||||
import qualified Unison.Reference as V1
|
||||
import qualified Unison.Reference as V1.Reference
|
||||
@ -62,6 +63,7 @@ import qualified Unison.Util.Relation as Relation
|
||||
import qualified Unison.Util.Star3 as V1.Star3
|
||||
import qualified Unison.Var as V1.Var
|
||||
import qualified Unison.Var as Var
|
||||
import qualified Unison.WatchKind as V1.WK
|
||||
|
||||
sbh1to2 :: V1.ShortBranchHash -> V2.ShortBranchHash
|
||||
sbh1to2 (V1.ShortBranchHash b32) = V2.ShortBranchHash b32
|
||||
@ -76,16 +78,16 @@ decltype1to2 = \case
|
||||
CT.Data -> V2.Decl.Data
|
||||
CT.Effect -> V2.Decl.Effect
|
||||
|
||||
watchKind1to2 :: V1.Var.WatchKind -> V2.WatchKind
|
||||
watchKind1to2 :: V1.WK.WatchKind -> V2.WatchKind
|
||||
watchKind1to2 = \case
|
||||
V1.Var.RegularWatch -> V2.WatchKind.RegularWatch
|
||||
V1.Var.TestWatch -> V2.WatchKind.TestWatch
|
||||
V1.WK.RegularWatch -> V2.WatchKind.RegularWatch
|
||||
V1.WK.TestWatch -> V2.WatchKind.TestWatch
|
||||
other -> error $ "What kind of watchkind is " ++ other ++ "?"
|
||||
|
||||
watchKind2to1 :: V2.WatchKind -> V1.Var.WatchKind
|
||||
watchKind2to1 :: V2.WatchKind -> V1.WK.WatchKind
|
||||
watchKind2to1 = \case
|
||||
V2.WatchKind.RegularWatch -> V1.Var.RegularWatch
|
||||
V2.WatchKind.TestWatch -> V1.Var.TestWatch
|
||||
V2.WatchKind.RegularWatch -> V1.WK.RegularWatch
|
||||
V2.WatchKind.TestWatch -> V1.WK.TestWatch
|
||||
|
||||
term1to2 :: Hash -> V1.Term.Term V1.Symbol Ann -> V2.Term.Term V2.Symbol
|
||||
term1to2 h =
|
||||
@ -342,9 +344,9 @@ referent1to2 = \case
|
||||
|
||||
referentid2to1 :: Applicative m => (Hash -> m V1.Reference.Size) -> (V2.Reference -> m CT.ConstructorType) -> V2.Referent.Id -> m V1.Referent.Id
|
||||
referentid2to1 lookupSize lookupCT = \case
|
||||
V2.RefId r -> V1.Ref' <$> referenceid2to1 lookupSize r
|
||||
V2.RefId r -> V1.RefId <$> referenceid2to1 lookupSize r
|
||||
V2.ConId r i ->
|
||||
V1.Con' <$> referenceid2to1 lookupSize r
|
||||
V1.ConId <$> referenceid2to1 lookupSize r
|
||||
<*> pure (fromIntegral i)
|
||||
<*> lookupCT (V2.ReferenceDerived r)
|
||||
|
||||
|
@ -0,0 +1,10 @@
|
||||
module Unison.Codebase.SqliteCodebase.GitError where
|
||||
|
||||
import Unison.Codebase.Editor.RemoteRepo (ReadRepo)
|
||||
import Unison.CodebasePath (CodebasePath)
|
||||
import U.Codebase.Sqlite.DbId (SchemaVersion)
|
||||
|
||||
data GitSqliteCodebaseError
|
||||
= GitCouldntParseRootBranchHash ReadRepo String
|
||||
| UnrecognizedSchemaVersion ReadRepo CodebasePath SchemaVersion
|
||||
deriving Show
|
@ -3,9 +3,6 @@ module Unison.Codebase.TermEdit where
|
||||
import Unison.Hashable (Hashable)
|
||||
import qualified Unison.Hashable as H
|
||||
import Unison.Reference (Reference)
|
||||
import qualified Unison.Typechecker as Typechecker
|
||||
import Unison.Type (Type)
|
||||
import Unison.Var (Var)
|
||||
|
||||
data TermEdit = Replace Reference Typing | Deprecate
|
||||
deriving (Eq, Ord, Show)
|
||||
@ -43,9 +40,3 @@ isSame :: TermEdit -> Bool
|
||||
isSame e = case e of
|
||||
Replace _ Same -> True
|
||||
_ -> False
|
||||
|
||||
typing :: Var v => Type v loc -> Type v loc -> Typing
|
||||
typing newType oldType | Typechecker.isEqual newType oldType = Same
|
||||
| Typechecker.isSubtype newType oldType = Subtype
|
||||
| otherwise = Different
|
||||
|
||||
|
12
parser-typechecker/src/Unison/Codebase/TermEdit/Typing.hs
Normal file
12
parser-typechecker/src/Unison/Codebase/TermEdit/Typing.hs
Normal file
@ -0,0 +1,12 @@
|
||||
module Unison.Codebase.TermEdit.Typing where
|
||||
|
||||
import Unison.Codebase.TermEdit (Typing (Different, Same, Subtype))
|
||||
import Unison.Type (Type)
|
||||
import qualified Unison.Typechecker as Typechecker
|
||||
import Unison.Var (Var)
|
||||
|
||||
typing :: Var v => Type v loc -> Type v loc -> Typing
|
||||
typing newType oldType
|
||||
| Typechecker.isEqual newType oldType = Same
|
||||
| Typechecker.isSubtype newType oldType = Subtype
|
||||
| otherwise = Different
|
@ -25,7 +25,7 @@ import Unison.CommandLine
|
||||
import Unison.CommandLine.InputPattern (InputPattern (aliases, patternName))
|
||||
import Unison.CommandLine.InputPatterns (validInputs)
|
||||
import Unison.CommandLine.OutputMessages (notifyUser, notifyNumbered)
|
||||
import Unison.Parser (Ann)
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import Unison.Prelude
|
||||
import Unison.PrettyTerminal
|
||||
import Unison.Symbol (Symbol)
|
||||
@ -41,6 +41,7 @@ import qualified Unison.Codebase.Branch as Branch
|
||||
import qualified Unison.Codebase.Editor.HandleCommand as HandleCommand
|
||||
import qualified Unison.Codebase.Editor.HandleInput as HandleInput
|
||||
import qualified Unison.Codebase.Path as Path
|
||||
import qualified Unison.Codebase.Path.Parse as Path
|
||||
import qualified Unison.Codebase.Runtime as Runtime
|
||||
import qualified Unison.CommandLine.InputPattern as IP
|
||||
import qualified Unison.Runtime.Interface as RTI
|
||||
|
101
parser-typechecker/src/Unison/Codebase/Type.hs
Normal file
101
parser-typechecker/src/Unison/Codebase/Type.hs
Normal file
@ -0,0 +1,101 @@
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
|
||||
module Unison.Codebase.Type (Codebase (..), CodebasePath, GitError(..), GetRootBranchError (..), SyncToDir) where
|
||||
|
||||
import Unison.Codebase.Branch (Branch)
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteRepo)
|
||||
import Unison.Codebase.Patch (Patch)
|
||||
import qualified Unison.Codebase.Reflog as Reflog
|
||||
import Unison.Codebase.ShortBranchHash (ShortBranchHash)
|
||||
import Unison.Codebase.SyncMode (SyncMode)
|
||||
import Unison.CodebasePath (CodebasePath)
|
||||
import Unison.DataDeclaration (Decl)
|
||||
import Unison.Prelude
|
||||
import Unison.Reference (Reference)
|
||||
import qualified Unison.Reference as Reference
|
||||
import qualified Unison.Referent as Referent
|
||||
import Unison.ShortHash (ShortHash)
|
||||
import Unison.Term (Term)
|
||||
import Unison.Type (Type)
|
||||
import qualified Unison.WatchKind as WK
|
||||
import Unison.Codebase.GitError (GitProtocolError, GitCodebaseError)
|
||||
import Unison.Codebase.FileCodebase.Codebase (GitFileCodebaseError)
|
||||
import Unison.Codebase.SqliteCodebase.GitError (GitSqliteCodebaseError)
|
||||
|
||||
type SyncToDir m =
|
||||
CodebasePath -> -- dest codebase
|
||||
SyncMode ->
|
||||
Branch m -> -- branch to sync to dest codebase
|
||||
m ()
|
||||
|
||||
-- | Abstract interface to a user's codebase.
|
||||
--
|
||||
-- One implementation is 'Unison.Codebase.FileCodebase' which uses the filesystem.
|
||||
data Codebase m v a = Codebase
|
||||
{ getTerm :: Reference.Id -> m (Maybe (Term v a)),
|
||||
getTypeOfTermImpl :: Reference.Id -> m (Maybe (Type v a)),
|
||||
getTypeDeclaration :: Reference.Id -> m (Maybe (Decl v a)),
|
||||
putTerm :: Reference.Id -> Term v a -> Type v a -> m (),
|
||||
putTypeDeclaration :: Reference.Id -> Decl v a -> m (),
|
||||
getRootBranch :: m (Either GetRootBranchError (Branch m)),
|
||||
putRootBranch :: Branch m -> m (),
|
||||
rootBranchUpdates :: m (IO (), IO (Set Branch.Hash)),
|
||||
getBranchForHash :: Branch.Hash -> m (Maybe (Branch m)),
|
||||
putBranch :: Branch m -> m (),
|
||||
branchExists :: Branch.Hash -> m Bool,
|
||||
getPatch :: Branch.EditHash -> m (Maybe Patch),
|
||||
putPatch :: Branch.EditHash -> Patch -> m (),
|
||||
patchExists :: Branch.EditHash -> m Bool,
|
||||
dependentsImpl :: Reference -> m (Set Reference.Id),
|
||||
-- This copies all the dependencies of `b` from the specified Codebase into this one
|
||||
syncFromDirectory :: CodebasePath -> SyncMode -> Branch m -> m (),
|
||||
-- This copies all the dependencies of `b` from this Codebase
|
||||
syncToDirectory :: CodebasePath -> SyncMode -> Branch m -> m (),
|
||||
viewRemoteBranch' :: ReadRemoteNamespace -> m (Either GitError (m (), Branch m, CodebasePath)),
|
||||
pushGitRootBranch :: Branch m -> WriteRepo -> SyncMode -> m (Either GitError ()),
|
||||
-- Watch expressions are part of the codebase, the `Reference.Id` is
|
||||
-- the hash of the source of the watch expression, and the `Term v a`
|
||||
-- is the evaluated result of the expression, decompiled to a term.
|
||||
watches :: WK.WatchKind -> m [Reference.Id],
|
||||
getWatch :: WK.WatchKind -> Reference.Id -> m (Maybe (Term v a)),
|
||||
putWatch :: WK.WatchKind -> Reference.Id -> Term v a -> m (),
|
||||
clearWatches :: m (),
|
||||
getReflog :: m [Reflog.Entry Branch.Hash],
|
||||
appendReflog :: Text -> Branch m -> Branch m -> m (),
|
||||
-- list of terms of the given type
|
||||
termsOfTypeImpl :: Reference -> m (Set Referent.Id),
|
||||
-- list of terms that mention the given type anywhere in their signature
|
||||
termsMentioningTypeImpl :: Reference -> m (Set Referent.Id),
|
||||
-- number of base58 characters needed to distinguish any two references in the codebase
|
||||
hashLength :: m Int,
|
||||
termReferencesByPrefix :: ShortHash -> m (Set Reference.Id),
|
||||
typeReferencesByPrefix :: ShortHash -> m (Set Reference.Id),
|
||||
termReferentsByPrefix :: ShortHash -> m (Set Referent.Id),
|
||||
branchHashLength :: m Int,
|
||||
branchHashesByPrefix :: ShortBranchHash -> m (Set Branch.Hash),
|
||||
-- returns `Nothing` to not implemented, fallback to in-memory
|
||||
-- also `Nothing` if no LCA
|
||||
-- The result is undefined if the two hashes are not in the codebase.
|
||||
-- Use `Codebase.lca` which wraps this in a nice API.
|
||||
lcaImpl :: Maybe (Branch.Hash -> Branch.Hash -> m (Maybe Branch.Hash)),
|
||||
-- `beforeImpl` returns `Nothing` if not implemented by the codebase
|
||||
-- `beforeImpl b1 b2` is undefined if `b2` not in the codebase
|
||||
--
|
||||
-- Use `Codebase.before` which wraps this in a nice API.
|
||||
beforeImpl :: Maybe (Branch.Hash -> Branch.Hash -> m Bool)
|
||||
}
|
||||
|
||||
data GetRootBranchError
|
||||
= NoRootBranch
|
||||
| CouldntParseRootBranch String
|
||||
| CouldntLoadRootBranch Branch.Hash
|
||||
deriving Show
|
||||
|
||||
data GitError
|
||||
= GitProtocolError GitProtocolError
|
||||
| GitCodebaseError (GitCodebaseError Branch.Hash)
|
||||
| GitFileCodebaseError GitFileCodebaseError
|
||||
| GitSqliteCodebaseError GitSqliteCodebaseError
|
||||
deriving Show
|
13
parser-typechecker/src/Unison/CodebasePath.hs
Normal file
13
parser-typechecker/src/Unison/CodebasePath.hs
Normal file
@ -0,0 +1,13 @@
|
||||
module Unison.CodebasePath
|
||||
( CodebasePath,
|
||||
getCodebaseDir,
|
||||
)
|
||||
where
|
||||
|
||||
import Control.Monad.IO.Class (MonadIO)
|
||||
import UnliftIO.Directory (getHomeDirectory)
|
||||
|
||||
type CodebasePath = FilePath
|
||||
|
||||
getCodebaseDir :: MonadIO m => Maybe CodebasePath -> m CodebasePath
|
||||
getCodebaseDir = maybe getHomeDirectory pure
|
@ -19,6 +19,8 @@ import qualified Unison.DataDeclaration as DD
|
||||
import qualified Unison.DeclPrinter as DP
|
||||
import qualified Unison.NamePrinter as NP
|
||||
import qualified Unison.PrettyPrintEnv as PPE
|
||||
import qualified Unison.PrettyPrintEnv.Util as PPE
|
||||
import qualified Unison.PrettyPrintEnvDecl as PPE
|
||||
import qualified Unison.Referent as Referent
|
||||
import qualified Unison.Reference as Reference
|
||||
import qualified Unison.ShortHash as SH
|
||||
|
@ -27,8 +27,11 @@ import qualified Data.Set as Set
|
||||
import qualified Data.Text as Text
|
||||
import qualified Text.Megaparsec as P
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import qualified Unison.Codebase.Branch.Merge as Branch
|
||||
import qualified Unison.Codebase.Branch.Names as Branch
|
||||
import qualified Unison.Codebase.Editor.Input as Input
|
||||
import qualified Unison.Codebase.Path as Path
|
||||
import qualified Unison.Codebase.Path.Parse as Path
|
||||
import qualified Unison.CommandLine.InputPattern as I
|
||||
import qualified Unison.HashQualified as HQ
|
||||
import qualified Unison.HashQualified' as HQ'
|
||||
|
@ -27,7 +27,7 @@ import Unison.PrettyTerminal
|
||||
import Unison.CommandLine.InputPattern (ArgumentType (suggestions), InputPattern (aliases, patternName))
|
||||
import Unison.CommandLine.InputPatterns (validInputs)
|
||||
import Unison.CommandLine.OutputMessages (notifyUser, notifyNumbered, shortenDirectory)
|
||||
import Unison.Parser (Ann)
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import Unison.Symbol (Symbol)
|
||||
import qualified Control.Concurrent.Async as Async
|
||||
import qualified Data.Map as Map
|
||||
|
@ -38,6 +38,7 @@ import System.Directory ( canonicalizePath
|
||||
)
|
||||
import qualified Unison.ABT as ABT
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import Unison.Codebase.Type (GitError(GitSqliteCodebaseError, GitProtocolError, GitCodebaseError))
|
||||
import Unison.Codebase.GitError
|
||||
import qualified Unison.Codebase.Path as Path
|
||||
import qualified Unison.Codebase.Patch as Patch
|
||||
@ -73,8 +74,10 @@ import Unison.NamePrinter (prettyHashQualified,
|
||||
import Unison.Names2 (Names'(..), Names0)
|
||||
import qualified Unison.Names2 as Names
|
||||
import qualified Unison.Names3 as Names
|
||||
import Unison.Parser (Ann, startingLine)
|
||||
import Unison.Parser.Ann (Ann, startingLine)
|
||||
import qualified Unison.PrettyPrintEnv as PPE
|
||||
import qualified Unison.PrettyPrintEnv.Util as PPE
|
||||
import qualified Unison.PrettyPrintEnvDecl as PPE
|
||||
import qualified Unison.Codebase.Runtime as Runtime
|
||||
import Unison.PrintError ( prettyParseError
|
||||
, printNoteWithSource
|
||||
@ -112,6 +115,9 @@ import qualified Unison.ShortHash as SH
|
||||
import Unison.LabeledDependency as LD
|
||||
import Unison.Codebase.Editor.RemoteRepo (ReadRepo, WriteRepo)
|
||||
import U.Codebase.Sqlite.DbId (SchemaVersion(SchemaVersion))
|
||||
import Unison.Codebase.SqliteCodebase.GitError (GitSqliteCodebaseError(UnrecognizedSchemaVersion, GitCouldntParseRootBranchHash))
|
||||
import qualified Unison.Referent' as Referent
|
||||
import qualified Unison.WatchKind as WK
|
||||
|
||||
type Pretty = P.Pretty P.ColorText
|
||||
|
||||
@ -669,75 +675,78 @@ notifyUser dir o = case o of
|
||||
|
||||
TodoOutput names todo -> pure (todoOutput names todo)
|
||||
GitError input e -> pure $ case e of
|
||||
CouldntOpenCodebase repo localPath -> P.wrap $ "I couldn't open the repository at"
|
||||
<> prettyReadRepo repo <> "in the cache directory at"
|
||||
<> P.backticked' (P.string localPath) "."
|
||||
UnrecognizedSchemaVersion repo localPath (SchemaVersion v) -> P.wrap
|
||||
$ "I don't know how to interpret schema version " <> P.shown v
|
||||
<> "in the repository at" <> prettyReadRepo repo
|
||||
<> "in the cache directory at" <> P.backticked' (P.string localPath) "."
|
||||
CouldntParseRootBranch repo s -> P.wrap $ "I couldn't parse the string"
|
||||
<> P.red (P.string s) <> "into a namespace hash, when opening the repository at"
|
||||
<> P.group (prettyReadRepo repo <> ".")
|
||||
CouldntLoadSyncedBranch h -> P.wrap $ "I just finished importing the branch"
|
||||
<> P.red (P.shown h) <> "but now I can't find it."
|
||||
NoGit -> P.wrap $
|
||||
"I couldn't find git. Make sure it's installed and on your path."
|
||||
CloneException repo msg -> P.wrap $
|
||||
"I couldn't clone the repository at" <> prettyReadRepo repo <> ";"
|
||||
<> "the error was:" <> (P.indentNAfterNewline 2 . P.group . P.string) msg
|
||||
PushNoOp repo -> P.wrap $
|
||||
"The repository at" <> prettyWriteRepo repo <> "is already up-to-date."
|
||||
PushException repo msg -> P.wrap $
|
||||
"I couldn't push to the repository at" <> prettyWriteRepo repo <> ";"
|
||||
<> "the error was:" <> (P.indentNAfterNewline 2 . P.group . P.string) msg
|
||||
UnrecognizableCacheDir uri localPath -> P.wrap $ "A cache directory for"
|
||||
<> P.backticked (P.text uri) <> "already exists at"
|
||||
<> P.backticked' (P.string localPath) "," <> "but it doesn't seem to"
|
||||
<> "be a git repository, so I'm not sure what to do next. Delete it?"
|
||||
UnrecognizableCheckoutDir uri localPath -> P.wrap $ "I tried to clone"
|
||||
<> P.backticked (P.text uri) <> "into a cache directory at"
|
||||
<> P.backticked' (P.string localPath) "," <> "but I can't recognize the"
|
||||
<> "result as a git repository, so I'm not sure what to do next."
|
||||
PushDestinationHasNewStuff repo ->
|
||||
P.callout "⏸" . P.lines $ [
|
||||
P.wrap $ "The repository at" <> prettyWriteRepo repo
|
||||
<> "has some changes I don't know about.",
|
||||
"",
|
||||
P.wrap $ "If you want to " <> push <> "you can do:", "",
|
||||
P.indentN 2 pull, "",
|
||||
P.wrap $
|
||||
"to merge these changes locally," <>
|
||||
"then try your" <> push <> "again."
|
||||
]
|
||||
where
|
||||
push = P.group . P.backticked . P.string . IP1.patternName $ IP.patternFromInput input
|
||||
pull = P.group . P.backticked $ IP.inputStringFromInput input
|
||||
CouldntLoadRootBranch repo hash -> P.wrap
|
||||
$ "I couldn't load the designated root hash"
|
||||
<> P.group ("(" <> fromString (Hash.showBase32Hex hash) <> ")")
|
||||
<> "from the repository at" <> prettyReadRepo repo
|
||||
NoRemoteNamespaceWithHash repo sbh -> P.wrap
|
||||
$ "The repository at" <> prettyReadRepo repo
|
||||
<> "doesn't contain a namespace with the hash prefix"
|
||||
<> (P.blue . P.text . SBH.toText) sbh
|
||||
RemoteNamespaceHashAmbiguous repo sbh hashes -> P.lines [
|
||||
P.wrap $ "The namespace hash" <> prettySBH sbh
|
||||
<> "at" <> prettyReadRepo repo
|
||||
<> "is ambiguous."
|
||||
<> "Did you mean one of these hashes?",
|
||||
"",
|
||||
P.indentN 2 $ P.lines
|
||||
(prettySBH . SBH.fromHash ((Text.length . SBH.toText) sbh * 2)
|
||||
<$> Set.toList hashes),
|
||||
"",
|
||||
P.wrap "Try again with a few more hash characters to disambiguate."
|
||||
]
|
||||
SomeOtherError msg -> P.callout "‼" . P.lines $ [
|
||||
P.wrap "I ran into an error:", "",
|
||||
P.indentN 2 (P.string msg), "",
|
||||
P.wrap $ "Check the logging messages above for more info."
|
||||
]
|
||||
-- CouldntOpenCodebase repo localPath -> P.wrap $ "I couldn't open the repository at"
|
||||
-- <> prettyReadRepo repo <> "in the cache directory at"
|
||||
-- <> P.backticked' (P.string localPath) "."
|
||||
GitSqliteCodebaseError e -> case e of
|
||||
UnrecognizedSchemaVersion repo localPath (SchemaVersion v) -> P.wrap
|
||||
$ "I don't know how to interpret schema version " <> P.shown v
|
||||
<> "in the repository at" <> prettyReadRepo repo
|
||||
<> "in the cache directory at" <> P.backticked' (P.string localPath) "."
|
||||
GitCouldntParseRootBranchHash repo s -> P.wrap $ "I couldn't parse the string"
|
||||
<> P.red (P.string s) <> "into a namespace hash, when opening the repository at"
|
||||
<> P.group (prettyReadRepo repo <> ".")
|
||||
-- CouldntLoadSyncedBranch h -> P.wrap $ "I just finished importing the branch"
|
||||
-- <> P.red (P.shown h) <> "but now I can't find it."
|
||||
GitProtocolError e -> case e of
|
||||
NoGit -> P.wrap $
|
||||
"I couldn't find git. Make sure it's installed and on your path."
|
||||
CloneException repo msg -> P.wrap $
|
||||
"I couldn't clone the repository at" <> prettyReadRepo repo <> ";"
|
||||
<> "the error was:" <> (P.indentNAfterNewline 2 . P.group . P.string) msg
|
||||
PushNoOp repo -> P.wrap $
|
||||
"The repository at" <> prettyWriteRepo repo <> "is already up-to-date."
|
||||
PushException repo msg -> P.wrap $
|
||||
"I couldn't push to the repository at" <> prettyWriteRepo repo <> ";"
|
||||
<> "the error was:" <> (P.indentNAfterNewline 2 . P.group . P.string) msg
|
||||
UnrecognizableCacheDir uri localPath -> P.wrap $ "A cache directory for"
|
||||
<> P.backticked (P.text $ RemoteRepo.printReadRepo uri) <> "already exists at"
|
||||
<> P.backticked' (P.string localPath) "," <> "but it doesn't seem to"
|
||||
<> "be a git repository, so I'm not sure what to do next. Delete it?"
|
||||
UnrecognizableCheckoutDir uri localPath -> P.wrap $ "I tried to clone"
|
||||
<> P.backticked (P.text $ RemoteRepo.printReadRepo uri) <> "into a cache directory at"
|
||||
<> P.backticked' (P.string localPath) "," <> "but I can't recognize the"
|
||||
<> "result as a git repository, so I'm not sure what to do next."
|
||||
PushDestinationHasNewStuff repo ->
|
||||
P.callout "⏸" . P.lines $ [
|
||||
P.wrap $ "The repository at" <> prettyWriteRepo repo
|
||||
<> "has some changes I don't know about.",
|
||||
"",
|
||||
P.wrap $ "If you want to " <> push <> "you can do:", "",
|
||||
P.indentN 2 pull, "",
|
||||
P.wrap $
|
||||
"to merge these changes locally," <>
|
||||
"then try your" <> push <> "again."
|
||||
]
|
||||
where
|
||||
push = P.group . P.backticked . P.string . IP1.patternName $ IP.patternFromInput input
|
||||
pull = P.group . P.backticked $ IP.inputStringFromInput input
|
||||
GitCodebaseError e -> case e of
|
||||
CouldntLoadRootBranch repo hash -> P.wrap
|
||||
$ "I couldn't load the designated root hash"
|
||||
<> P.group ("(" <> fromString (Hash.showBase32Hex hash) <> ")")
|
||||
<> "from the repository at" <> prettyReadRepo repo
|
||||
NoRemoteNamespaceWithHash repo sbh -> P.wrap
|
||||
$ "The repository at" <> prettyReadRepo repo
|
||||
<> "doesn't contain a namespace with the hash prefix"
|
||||
<> (P.blue . P.text . SBH.toText) sbh
|
||||
RemoteNamespaceHashAmbiguous repo sbh hashes -> P.lines [
|
||||
P.wrap $ "The namespace hash" <> prettySBH sbh
|
||||
<> "at" <> prettyReadRepo repo
|
||||
<> "is ambiguous."
|
||||
<> "Did you mean one of these hashes?",
|
||||
"",
|
||||
P.indentN 2 $ P.lines
|
||||
(prettySBH . SBH.fromHash ((Text.length . SBH.toText) sbh * 2)
|
||||
<$> Set.toList hashes),
|
||||
"",
|
||||
P.wrap "Try again with a few more hash characters to disambiguate."
|
||||
]
|
||||
-- SomeOtherError msg -> P.callout "‼" . P.lines $ [
|
||||
-- P.wrap "I ran into an error:", "",
|
||||
-- P.indentN 2 (P.string msg), "",
|
||||
-- P.wrap $ "Check the logging messages above for more info."
|
||||
-- ]
|
||||
ListEdits patch ppe -> do
|
||||
let
|
||||
types = Patch._typeEdits patch
|
||||
@ -1880,7 +1889,7 @@ watchPrinter
|
||||
=> Text
|
||||
-> PPE.PrettyPrintEnv
|
||||
-> Ann
|
||||
-> UF.WatchKind
|
||||
-> WK.WatchKind
|
||||
-> Term v ()
|
||||
-> Runtime.IsCacheHit
|
||||
-> Pretty
|
||||
@ -1911,7 +1920,7 @@ watchPrinter src ppe ann kind term isHit =
|
||||
P.lines
|
||||
[ fromString (show lineNum) <> " | " <> P.text line
|
||||
, case (kind, term) of
|
||||
(UF.TestWatch, Term.List' tests) -> foldMap renderTest tests
|
||||
(WK.TestWatch, Term.List' tests) -> foldMap renderTest tests
|
||||
_ -> P.lines
|
||||
[ fromString (replicate lineNumWidth ' ')
|
||||
<> fromString extra
|
||||
|
@ -23,7 +23,6 @@ import qualified Unison.PrettyPrintEnv as PPE
|
||||
import qualified Unison.Referent as Referent
|
||||
import Unison.Reference ( Reference(DerivedId) )
|
||||
import qualified Unison.Util.SyntaxText as S
|
||||
import Unison.Util.SyntaxText ( SyntaxText )
|
||||
import qualified Unison.Term as Term
|
||||
import qualified Unison.Type as Type
|
||||
import qualified Unison.TypePrinter as TypePrinter
|
||||
@ -32,6 +31,8 @@ import qualified Unison.Util.Pretty as P
|
||||
import Unison.Var ( Var )
|
||||
import qualified Unison.Var as Var
|
||||
|
||||
type SyntaxText = S.SyntaxText' Reference
|
||||
|
||||
prettyDecl
|
||||
:: Var v
|
||||
=> PrettyPrintEnv
|
||||
|
@ -15,18 +15,23 @@ import Unison.DataDeclaration (DataDeclaration, EffectDeclaration)
|
||||
import qualified Unison.DataDeclaration as DD
|
||||
import qualified Unison.Lexer as L
|
||||
import Unison.Parser
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import Unison.Term (Term)
|
||||
import qualified Unison.Term as Term
|
||||
import qualified Unison.TermParser as TermParser
|
||||
import Unison.Type (Type)
|
||||
import qualified Unison.Type as Type
|
||||
import qualified Unison.TypeParser as TypeParser
|
||||
import Unison.UnisonFile (UnisonFile(..), environmentFor)
|
||||
import Unison.UnisonFile (UnisonFile(..))
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import qualified Unison.UnisonFile.Env as UF
|
||||
import Unison.UnisonFile.Names (environmentFor)
|
||||
import qualified Unison.Util.List as List
|
||||
import Unison.Var (Var)
|
||||
import qualified Unison.Var as Var
|
||||
import qualified Unison.WatchKind as UF
|
||||
import qualified Unison.Names3 as Names
|
||||
import qualified Unison.Names.ResolutionResult as Names
|
||||
import qualified Unison.Name as Name
|
||||
|
||||
resolutionFailures :: Ord v => [Names.ResolutionFailure v Ann] -> P v x
|
||||
|
@ -21,7 +21,7 @@ import qualified Unison.ABT as ABT
|
||||
import qualified Unison.Blank as Blank
|
||||
import qualified Unison.Name as Name
|
||||
import qualified Unison.Names3 as Names
|
||||
import Unison.Parser (Ann)
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import qualified Unison.Parsers as Parsers
|
||||
import qualified Unison.Referent as Referent
|
||||
import Unison.Reference (Reference)
|
||||
@ -33,6 +33,8 @@ import qualified Unison.Typechecker as Typechecker
|
||||
import qualified Unison.Typechecker.TypeLookup as TL
|
||||
import qualified Unison.Typechecker.Context as Context
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import qualified Unison.UnisonFile.Names as UF
|
||||
import qualified Unison.UnisonFile.Type as UF
|
||||
import qualified Unison.Util.List as List
|
||||
import qualified Unison.Util.Relation as Rel
|
||||
import Unison.Var (Var)
|
||||
|
@ -36,14 +36,9 @@ import qualified Text.Megaparsec.Error as EP
|
||||
import qualified Text.Megaparsec.Char as CP
|
||||
import Text.Megaparsec.Char (char)
|
||||
import qualified Text.Megaparsec.Char.Lexer as LP
|
||||
import Unison.Lexer.Pos (Pos (Pos), Column, Line, column, line)
|
||||
import qualified Unison.Util.Bytes as Bytes
|
||||
|
||||
type Line = Int
|
||||
type Column = Int
|
||||
|
||||
data Pos = Pos {-# Unpack #-} !Line {-# Unpack #-} !Column deriving (Eq,Ord)
|
||||
instance Show Pos where show (Pos line col) = "line " <> show line <> ", column " <> show col
|
||||
|
||||
type BlockName = String
|
||||
type Layout = [(BlockName,Column)]
|
||||
|
||||
@ -908,12 +903,6 @@ notLayout t = case payload t of
|
||||
Open _ -> False
|
||||
_ -> True
|
||||
|
||||
line :: Pos -> Line
|
||||
line (Pos line _) = line
|
||||
|
||||
column :: Pos -> Column
|
||||
column (Pos _ column) = column
|
||||
|
||||
-- `True` if the tokens are adjacent, with no space separating the two
|
||||
touches :: Token a -> Token b -> Bool
|
||||
touches (end -> t) (start -> t2) =
|
||||
@ -1178,12 +1167,3 @@ instance ShowToken (Token Lexeme) where
|
||||
instance Applicative Token where
|
||||
pure a = Token a (Pos 0 0) (Pos 0 0)
|
||||
Token f start _ <*> Token a _ end = Token (f a) start end
|
||||
|
||||
instance Semigroup Pos where (<>) = mappend
|
||||
|
||||
instance Monoid Pos where
|
||||
mempty = Pos 0 0
|
||||
Pos line col `mappend` Pos line2 col2 =
|
||||
if line2 == 0 then Pos line (col + col2)
|
||||
else Pos (line + line2) col2
|
||||
|
||||
|
29
parser-typechecker/src/Unison/Lexer/Pos.hs
Normal file
29
parser-typechecker/src/Unison/Lexer/Pos.hs
Normal file
@ -0,0 +1,29 @@
|
||||
{-# LANGUAGE BangPatterns #-}
|
||||
{-# LANGUAGE DeriveFoldable #-}
|
||||
{-# LANGUAGE DeriveTraversable #-}
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
|
||||
module Unison.Lexer.Pos (Pos (..), Line, Column, line, column) where
|
||||
|
||||
type Line = Int
|
||||
type Column = Int
|
||||
|
||||
data Pos = Pos {-# UNPACK #-} !Line {-# UNPACK #-} !Column deriving (Eq, Ord)
|
||||
|
||||
line :: Pos -> Line
|
||||
line (Pos line _) = line
|
||||
|
||||
column :: Pos -> Column
|
||||
column (Pos _ column) = column
|
||||
|
||||
instance Show Pos where show (Pos line col) = "line " <> show line <> ", column " <> show col
|
||||
|
||||
instance Semigroup Pos where (<>) = mappend
|
||||
|
||||
instance Monoid Pos where
|
||||
mempty = Pos 0 0
|
||||
Pos line col `mappend` Pos line2 col2 =
|
||||
if line2 == 0
|
||||
then Pos line (col + col2)
|
||||
else Pos (line + line2) col2
|
@ -12,11 +12,12 @@ import Unison.Reference (Reference)
|
||||
import Unison.Referent (Referent)
|
||||
import Unison.ShortHash (ShortHash)
|
||||
import qualified Unison.ShortHash as SH
|
||||
import Unison.Util.SyntaxText (SyntaxText)
|
||||
import qualified Unison.Util.SyntaxText as S
|
||||
import Unison.Util.Pretty (Pretty)
|
||||
import qualified Unison.Util.Pretty as PP
|
||||
|
||||
type SyntaxText = S.SyntaxText' Reference
|
||||
|
||||
prettyName :: IsString s => Name -> Pretty s
|
||||
prettyName = PP.text . Name.toText
|
||||
|
||||
|
@ -29,15 +29,17 @@ import qualified Unison.Pattern as Pattern
|
||||
import Unison.Term (MatchCase (..))
|
||||
import Unison.Var (Var)
|
||||
import qualified Unison.Var as Var
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import qualified Unison.UnisonFile.Error as UF
|
||||
import Unison.Util.Bytes (Bytes)
|
||||
import Unison.Name as Name
|
||||
import Unison.Names3 (Names)
|
||||
import qualified Unison.Names3 as Names
|
||||
import qualified Unison.Names.ResolutionResult as Names
|
||||
import Control.Monad.Reader.Class (asks)
|
||||
import qualified Unison.Hashable as Hashable
|
||||
import Unison.Referent (Referent)
|
||||
import Unison.Reference (Reference)
|
||||
import Unison.Parser.Ann (Ann(..))
|
||||
|
||||
debug :: Bool
|
||||
debug = False
|
||||
@ -107,28 +109,6 @@ data Error v
|
||||
| PatternArityMismatch Int Int Ann -- PatternArityMismatch expectedArity actualArity location
|
||||
deriving (Show, Eq, Ord)
|
||||
|
||||
data Ann
|
||||
= Intrinsic -- { sig :: String, start :: L.Pos, end :: L.Pos }
|
||||
| External
|
||||
| Ann { start :: L.Pos, end :: L.Pos }
|
||||
deriving (Eq, Ord, Show)
|
||||
|
||||
startingLine :: Ann -> Maybe L.Line
|
||||
startingLine (Ann (L.line -> line) _) = Just line
|
||||
startingLine _ = Nothing
|
||||
|
||||
instance Monoid Ann where
|
||||
mempty = External
|
||||
mappend = (<>)
|
||||
|
||||
instance Semigroup Ann where
|
||||
Ann s1 _ <> Ann _ e2 = Ann s1 e2
|
||||
-- If we have a concrete location from a file, use it
|
||||
External <> a = a
|
||||
a <> External = a
|
||||
Intrinsic <> a = a
|
||||
a <> Intrinsic = a
|
||||
|
||||
tokenToPair :: L.Token a -> (Ann, a)
|
||||
tokenToPair t = (ann t, L.payload t)
|
||||
|
||||
|
29
parser-typechecker/src/Unison/Parser/Ann.hs
Normal file
29
parser-typechecker/src/Unison/Parser/Ann.hs
Normal file
@ -0,0 +1,29 @@
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
{-# LANGUAGE TypeFamilies #-}
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
|
||||
module Unison.Parser.Ann where
|
||||
|
||||
import qualified Unison.Lexer.Pos as L
|
||||
|
||||
data Ann
|
||||
= Intrinsic -- { sig :: String, start :: L.Pos, end :: L.Pos }
|
||||
| External
|
||||
| Ann {start :: L.Pos, end :: L.Pos}
|
||||
deriving (Eq, Ord, Show)
|
||||
|
||||
startingLine :: Ann -> Maybe L.Line
|
||||
startingLine (Ann (L.line -> line) _) = Just line
|
||||
startingLine _ = Nothing
|
||||
|
||||
instance Monoid Ann where
|
||||
mempty = External
|
||||
mappend = (<>)
|
||||
|
||||
instance Semigroup Ann where
|
||||
Ann s1 _ <> Ann _ e2 = Ann s1 e2
|
||||
-- If we have a concrete location from a file, use it
|
||||
External <> a = a
|
||||
a <> External = a
|
||||
Intrinsic <> a = a
|
||||
a <> Intrinsic = a
|
@ -8,7 +8,7 @@ import Prelude hiding ( readFile )
|
||||
import qualified Unison.Names3 as Names
|
||||
import qualified Unison.Builtin as Builtin
|
||||
import qualified Unison.FileParser as FileParser
|
||||
import Unison.Parser ( Ann )
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import qualified Unison.Parser as Parser
|
||||
import Unison.PrintError ( prettyParseError
|
||||
, defaultWidth )
|
||||
|
@ -1,54 +0,0 @@
|
||||
-- |
|
||||
-- Provides a typeclass for a general concept of a path into
|
||||
-- a treelike structure. We have a root or empty path, paths
|
||||
-- may be concatenated, and a pair of paths may be factored into
|
||||
-- paths relative to their lowest common ancestor in the tree.
|
||||
|
||||
module Unison.Path where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
-- | Satisfies:
|
||||
-- * `extend root p == p` and `extend p root == p`
|
||||
-- * `extend` is associative, `extend (extend p1 p2) p3 == extend p1 (extend p2 p3)`
|
||||
-- * `lca root p == root` and `lca p root == root`
|
||||
-- * `case factor p p2 of (r,p',p2') -> extend r p' == p && extend r p2' == p2`
|
||||
class Path p where
|
||||
-- | The root or empty path
|
||||
root :: p
|
||||
-- | Concatenate two paths
|
||||
extend :: p -> p -> p
|
||||
-- | Extract the lowest common ancestor and the path from the LCA to each argument
|
||||
factor :: p -> p -> (p,(p,p))
|
||||
-- | Satisfies `factor (parent p) p == (parent p, (root, tl)` and
|
||||
-- `extend (parent p) tl == p`
|
||||
parent :: p -> p
|
||||
|
||||
-- | Compute the lowest common ancestor of two paths
|
||||
lca :: Path p => p -> p -> p
|
||||
lca p p2 = fst (factor p p2)
|
||||
|
||||
-- | `isSubpath p1 p2` is true if `p2 == extend p1 x` for some `x`
|
||||
isSubpath :: (Eq p, Path p) => p -> p -> Bool
|
||||
isSubpath p1 p2 = lca p1 p2 == p1
|
||||
|
||||
instance Eq a => Path (Maybe a) where
|
||||
root = Nothing
|
||||
extend = (<|>)
|
||||
parent _ = Nothing
|
||||
factor p1 p2 | p1 == p2 = (p1, (Nothing, Nothing))
|
||||
factor p1 p2 = (Nothing, (p1,p2))
|
||||
|
||||
instance Eq a => Path [a] where
|
||||
root = []
|
||||
extend = (++)
|
||||
parent p | null p = []
|
||||
parent p = init p
|
||||
factor p1 p2 = (take shared p1, (drop shared p1, drop shared p2))
|
||||
where shared = length (takeWhile id $ zipWith (==) p1 p2)
|
||||
|
||||
instance Path () where
|
||||
root = ()
|
||||
parent _ = ()
|
||||
extend _ _ = ()
|
||||
factor u _ = (u,(u,u))
|
@ -1,24 +1,17 @@
|
||||
{-# Language OverloadedStrings #-}
|
||||
|
||||
module Unison.PrettyPrintEnv where
|
||||
module Unison.PrettyPrintEnv (PrettyPrintEnv(..), patterns, patternName, termName, typeName) where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import Unison.HashQualified ( HashQualified )
|
||||
import Unison.Name ( Name )
|
||||
import Unison.Names3 ( Names )
|
||||
import Unison.Reference ( Reference )
|
||||
import Unison.Referent ( Referent )
|
||||
import Unison.Util.List (safeHead)
|
||||
import qualified Data.Map as Map
|
||||
import qualified Unison.HashQualified as HQ
|
||||
import qualified Unison.Name as Name
|
||||
import qualified Unison.Names3 as Names
|
||||
import qualified Unison.Reference as Reference
|
||||
import qualified Unison.Referent as Referent
|
||||
import qualified Unison.ConstructorType as CT
|
||||
import qualified Unison.HashQualified' as HQ'
|
||||
import qualified Data.Set as Set
|
||||
|
||||
data PrettyPrintEnv = PrettyPrintEnv {
|
||||
-- names for terms, constructors, and requests
|
||||
@ -33,49 +26,6 @@ patterns ppe r cid = terms ppe (Referent.Con r cid CT.Data)
|
||||
instance Show PrettyPrintEnv where
|
||||
show _ = "PrettyPrintEnv"
|
||||
|
||||
fromNames :: Int -> Names -> PrettyPrintEnv
|
||||
fromNames len names = PrettyPrintEnv terms' types' where
|
||||
terms' r = shortestName . Set.map HQ'.toHQ $ Names.termName len r names
|
||||
types' r = shortestName . Set.map HQ'.toHQ $ Names.typeName len r names
|
||||
shortestName ns = safeHead $ HQ.sortByLength (toList ns)
|
||||
|
||||
fromSuffixNames :: Int -> Names -> PrettyPrintEnv
|
||||
fromSuffixNames len names = fromNames len (Names.suffixify names)
|
||||
|
||||
fromNamesDecl :: Int -> Names -> PrettyPrintEnvDecl
|
||||
fromNamesDecl len names =
|
||||
PrettyPrintEnvDecl (fromNames len names) (fromSuffixNames len names)
|
||||
|
||||
-- A pair of PrettyPrintEnvs:
|
||||
-- - suffixifiedPPE uses the shortest unique suffix
|
||||
-- - unsuffixifiedPPE uses the shortest full name
|
||||
--
|
||||
-- Generally, we want declarations LHS (the `x` in `x = 23`) to use the
|
||||
-- unsuffixified names, so the LHS is an accurate description of where in the
|
||||
-- namespace the definition lives. For everywhere else, we can use the
|
||||
-- suffixified version.
|
||||
data PrettyPrintEnvDecl = PrettyPrintEnvDecl {
|
||||
unsuffixifiedPPE :: PrettyPrintEnv,
|
||||
suffixifiedPPE :: PrettyPrintEnv
|
||||
} deriving Show
|
||||
|
||||
-- declarationPPE uses the full name for references that are
|
||||
-- part the same cycle as the input reference, used to ensures
|
||||
-- recursive definitions are printed properly, for instance:
|
||||
--
|
||||
-- foo.bar x = foo.bar x
|
||||
-- and not
|
||||
-- foo.bar x = bar x
|
||||
declarationPPE :: PrettyPrintEnvDecl -> Reference -> PrettyPrintEnv
|
||||
declarationPPE ppe rd = PrettyPrintEnv tm ty where
|
||||
comp = Reference.members (Reference.componentFor rd)
|
||||
tm r0@(Referent.Ref r) = if Set.member r comp
|
||||
then terms (unsuffixifiedPPE ppe) r0
|
||||
else terms (suffixifiedPPE ppe) r0
|
||||
tm r = terms (suffixifiedPPE ppe) r
|
||||
ty r = if Set.member r comp then types (unsuffixifiedPPE ppe) r
|
||||
else types (suffixifiedPPE ppe) r
|
||||
|
||||
-- Left-biased union of environments
|
||||
unionLeft :: PrettyPrintEnv -> PrettyPrintEnv -> PrettyPrintEnv
|
||||
unionLeft e1 e2 = PrettyPrintEnv
|
||||
@ -117,25 +67,3 @@ instance Monoid PrettyPrintEnv where
|
||||
mappend = unionLeft
|
||||
instance Semigroup PrettyPrintEnv where
|
||||
(<>) = mappend
|
||||
|
||||
-- Type aliases relating to Fully-Qualified Names, e.g. 'Acme.API.foo'
|
||||
-- Used primarily by the FQN elision code - see TermPrinter.PrintAnnotation.
|
||||
|
||||
-- Note that a Suffix can include dots.
|
||||
type Suffix = Text
|
||||
-- Each member of a Prefix list is dot-free.
|
||||
type Prefix = [Text]
|
||||
-- Keys are FQNs, values are shorter names which are equivalent, thanks to use
|
||||
-- statements that are in scope.
|
||||
type Imports = Map Name Suffix
|
||||
|
||||
-- Give the shortened version of an FQN, if there's been a `use` statement for that FQN.
|
||||
elideFQN :: Imports -> HQ.HashQualified Name -> HQ.HashQualified Name
|
||||
elideFQN imports hq =
|
||||
let hash = HQ.toHash hq
|
||||
name' = do name <- HQ.toName hq
|
||||
let hit = fmap Name.unsafeFromText (Map.lookup name imports)
|
||||
-- Cut out the "const id $" to get tracing of FQN elision attempts.
|
||||
let t = const id $ trace ("hit: " ++ show hit ++ " finding: " ++ show hq ++ " in imports: " ++ show imports)
|
||||
t (pure $ fromMaybe name hit)
|
||||
in HQ.fromNameHash name' hash
|
||||
|
32
parser-typechecker/src/Unison/PrettyPrintEnv/FQN.hs
Normal file
32
parser-typechecker/src/Unison/PrettyPrintEnv/FQN.hs
Normal file
@ -0,0 +1,32 @@
|
||||
{-# Language OverloadedStrings #-}
|
||||
|
||||
module Unison.PrettyPrintEnv.FQN (Imports, Prefix, Suffix, elideFQN) where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import qualified Data.Map as Map
|
||||
import qualified Unison.HashQualified as HQ
|
||||
import Unison.Name (Name)
|
||||
import qualified Unison.Name as Name
|
||||
|
||||
-- Type aliases relating to Fully-Qualified Names, e.g. 'Acme.API.foo'
|
||||
-- Used primarily by the FQN elision code - see TermPrinter.PrintAnnotation.
|
||||
|
||||
-- Note that a Suffix can include dots.
|
||||
type Suffix = Text
|
||||
-- Each member of a Prefix list is dot-free.
|
||||
type Prefix = [Text]
|
||||
-- Keys are FQNs, values are shorter names which are equivalent, thanks to use
|
||||
-- statements that are in scope.
|
||||
type Imports = Map Name Suffix
|
||||
|
||||
-- Give the shortened version of an FQN, if there's been a `use` statement for that FQN.
|
||||
elideFQN :: Imports -> HQ.HashQualified Name -> HQ.HashQualified Name
|
||||
elideFQN imports hq =
|
||||
let hash = HQ.toHash hq
|
||||
name' = do name <- HQ.toName hq
|
||||
let hit = fmap Name.unsafeFromText (Map.lookup name imports)
|
||||
-- Cut out the "const id $" to get tracing of FQN elision attempts.
|
||||
let t = const id $ trace ("hit: " ++ show hit ++ " finding: " ++ show hq ++ " in imports: " ++ show imports)
|
||||
t (pure $ fromMaybe name hit)
|
||||
in HQ.fromNameHash name' hash
|
144
parser-typechecker/src/Unison/PrettyPrintEnv/Names.hs
Normal file
144
parser-typechecker/src/Unison/PrettyPrintEnv/Names.hs
Normal file
@ -0,0 +1,144 @@
|
||||
{-# Language OverloadedStrings #-}
|
||||
|
||||
module Unison.PrettyPrintEnv.Names (fromNames, fromSuffixNames) where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import Unison.HashQualified ( HashQualified )
|
||||
import Unison.Name ( Name )
|
||||
import Unison.Names3 ( Names )
|
||||
import Unison.Reference ( Reference )
|
||||
import Unison.Referent ( Referent )
|
||||
import Unison.Util.List (safeHead)
|
||||
import qualified Data.Map as Map
|
||||
import qualified Unison.HashQualified as HQ
|
||||
import qualified Unison.Name as Name
|
||||
import qualified Unison.Names3 as Names
|
||||
import qualified Unison.Reference as Reference
|
||||
import qualified Unison.Referent as Referent
|
||||
import qualified Unison.ConstructorType as CT
|
||||
import qualified Data.Set as Set
|
||||
import Unison.PrettyPrintEnv (PrettyPrintEnv (PrettyPrintEnv))
|
||||
import Unison.PrettyPrintEnvDecl (PrettyPrintEnvDecl (PrettyPrintEnvDecl))
|
||||
|
||||
-- data PrettyPrintEnv = PrettyPrintEnv {
|
||||
-- -- names for terms, constructors, and requests
|
||||
-- terms :: Referent -> Maybe (HashQualified Name),
|
||||
-- -- names for types
|
||||
-- types :: Reference -> Maybe (HashQualified Name) }
|
||||
|
||||
-- patterns :: PrettyPrintEnv -> Reference -> Int -> Maybe (HashQualified Name)
|
||||
-- patterns ppe r cid = terms ppe (Referent.Con r cid CT.Data)
|
||||
-- <|>terms ppe (Referent.Con r cid CT.Effect)
|
||||
|
||||
-- instance Show PrettyPrintEnv where
|
||||
-- show _ = "PrettyPrintEnv"
|
||||
|
||||
fromNames :: Int -> Names -> PrettyPrintEnv
|
||||
fromNames len names = PrettyPrintEnv terms' types' where
|
||||
terms' r = shortestName . Set.map Name.convert $ Names.termName len r names
|
||||
types' r = shortestName . Set.map Name.convert $ Names.typeName len r names
|
||||
shortestName ns = safeHead $ HQ.sortByLength (toList ns)
|
||||
|
||||
fromSuffixNames :: Int -> Names -> PrettyPrintEnv
|
||||
fromSuffixNames len names = PrettyPrintEnv terms' types' where
|
||||
terms' r = safeHead $ Names.suffixedTermName len r names
|
||||
types' r = safeHead $ Names.suffixedTypeName len r names
|
||||
|
||||
fromNamesDecl :: Int -> Names -> PrettyPrintEnvDecl
|
||||
fromNamesDecl len names =
|
||||
PrettyPrintEnvDecl (fromNames len names) (fromSuffixNames len names)
|
||||
|
||||
-- -- A pair of PrettyPrintEnvs:
|
||||
-- -- - suffixifiedPPE uses the shortest unique suffix
|
||||
-- -- - unsuffixifiedPPE uses the shortest full name
|
||||
-- --
|
||||
-- -- Generally, we want declarations LHS (the `x` in `x = 23`) to use the
|
||||
-- -- unsuffixified names, so the LHS is an accurate description of where in the
|
||||
-- -- namespace the definition lives. For everywhere else, we can use the
|
||||
-- -- suffixified version.
|
||||
-- data PrettyPrintEnvDecl = PrettyPrintEnvDecl {
|
||||
-- unsuffixifiedPPE :: PrettyPrintEnv,
|
||||
-- suffixifiedPPE :: PrettyPrintEnv
|
||||
-- } deriving Show
|
||||
|
||||
-- -- declarationPPE uses the full name for references that are
|
||||
-- -- part the same cycle as the input reference, used to ensures
|
||||
-- -- recursive definitions are printed properly, for instance:
|
||||
-- --
|
||||
-- -- foo.bar x = foo.bar x
|
||||
-- -- and not
|
||||
-- -- foo.bar x = bar x
|
||||
-- declarationPPE :: PrettyPrintEnvDecl -> Reference -> PrettyPrintEnv
|
||||
-- declarationPPE ppe rd = PrettyPrintEnv tm ty where
|
||||
-- comp = Reference.members (Reference.componentFor rd)
|
||||
-- tm r0@(Referent.Ref r) = if Set.member r comp
|
||||
-- then terms (unsuffixifiedPPE ppe) r0
|
||||
-- else terms (suffixifiedPPE ppe) r0
|
||||
-- tm r = terms (suffixifiedPPE ppe) r
|
||||
-- ty r = if Set.member r comp then types (unsuffixifiedPPE ppe) r
|
||||
-- else types (suffixifiedPPE ppe) r
|
||||
|
||||
-- -- Left-biased union of environments
|
||||
-- unionLeft :: PrettyPrintEnv -> PrettyPrintEnv -> PrettyPrintEnv
|
||||
-- unionLeft e1 e2 = PrettyPrintEnv
|
||||
-- (\r -> terms e1 r <|> terms e2 r)
|
||||
-- (\r -> types e1 r <|> types e2 r)
|
||||
|
||||
-- assignTermName
|
||||
-- :: Referent -> HashQualified Name -> PrettyPrintEnv -> PrettyPrintEnv
|
||||
-- assignTermName r name = (fromTermNames [(r, name)] `unionLeft`)
|
||||
|
||||
-- fromTypeNames :: [(Reference, HashQualified Name)] -> PrettyPrintEnv
|
||||
-- fromTypeNames types =
|
||||
-- let m = Map.fromList types in PrettyPrintEnv (const Nothing) (`Map.lookup` m)
|
||||
|
||||
-- fromTermNames :: [(Referent, HashQualified Name)] -> PrettyPrintEnv
|
||||
-- fromTermNames tms =
|
||||
-- let m = Map.fromList tms in PrettyPrintEnv (`Map.lookup` m) (const Nothing)
|
||||
|
||||
-- -- todo: these need to be a dynamic length, but we need additional info
|
||||
-- todoHashLength :: Int
|
||||
-- todoHashLength = 10
|
||||
|
||||
-- termName :: PrettyPrintEnv -> Referent -> HashQualified Name
|
||||
-- termName env r =
|
||||
-- fromMaybe (HQ.take todoHashLength $ HQ.fromReferent r) (terms env r)
|
||||
|
||||
-- typeName :: PrettyPrintEnv -> Reference -> HashQualified Name
|
||||
-- typeName env r =
|
||||
-- fromMaybe (HQ.take todoHashLength $ HQ.fromReference r) (types env r)
|
||||
|
||||
-- patternName :: PrettyPrintEnv -> Reference -> Int -> HashQualified Name
|
||||
-- patternName env r cid =
|
||||
-- case patterns env r cid of
|
||||
-- Just name -> name
|
||||
-- Nothing -> HQ.take todoHashLength $ HQ.fromPattern r cid
|
||||
|
||||
-- instance Monoid PrettyPrintEnv where
|
||||
-- mempty = PrettyPrintEnv (const Nothing) (const Nothing)
|
||||
-- mappend = unionLeft
|
||||
-- instance Semigroup PrettyPrintEnv where
|
||||
-- (<>) = mappend
|
||||
|
||||
-- -- Type aliases relating to Fully-Qualified Names, e.g. 'Acme.API.foo'
|
||||
-- -- Used primarily by the FQN elision code - see TermPrinter.PrintAnnotation.
|
||||
|
||||
-- -- Note that a Suffix can include dots.
|
||||
-- type Suffix = Text
|
||||
-- -- Each member of a Prefix list is dot-free.
|
||||
-- type Prefix = [Text]
|
||||
-- -- Keys are FQNs, values are shorter names which are equivalent, thanks to use
|
||||
-- -- statements that are in scope.
|
||||
-- type Imports = Map Name Suffix
|
||||
|
||||
-- -- Give the shortened version of an FQN, if there's been a `use` statement for that FQN.
|
||||
-- elideFQN :: Imports -> HQ.HashQualified Name -> HQ.HashQualified Name
|
||||
-- elideFQN imports hq =
|
||||
-- let hash = HQ.toHash hq
|
||||
-- name' = do name <- HQ.toName hq
|
||||
-- let hit = fmap Name.unsafeFromText (Map.lookup name imports)
|
||||
-- -- Cut out the "const id $" to get tracing of FQN elision attempts.
|
||||
-- let t = const id $ trace ("hit: " ++ show hit ++ " finding: " ++ show hq ++ " in imports: " ++ show imports)
|
||||
-- t (pure $ fromMaybe name hit)
|
||||
-- in HQ.fromNameHash name' hash
|
110
parser-typechecker/src/Unison/PrettyPrintEnv/Util.hs
Normal file
110
parser-typechecker/src/Unison/PrettyPrintEnv/Util.hs
Normal file
@ -0,0 +1,110 @@
|
||||
{-# Language OverloadedStrings #-}
|
||||
|
||||
module Unison.PrettyPrintEnv.Util where
|
||||
|
||||
import Unison.Prelude
|
||||
|
||||
import Unison.HashQualified ( HashQualified )
|
||||
import Unison.Name ( Name )
|
||||
import Unison.Names3 ( Names )
|
||||
import Unison.Reference ( Reference )
|
||||
import Unison.Referent ( Referent )
|
||||
import Unison.Util.List (safeHead)
|
||||
import qualified Data.Map as Map
|
||||
import qualified Data.Set as Set
|
||||
import qualified Unison.HashQualified as HQ
|
||||
import qualified Unison.HashQualified' as HQ'
|
||||
import qualified Unison.Name as Name
|
||||
import qualified Unison.Names3 as Names
|
||||
import Unison.PrettyPrintEnv
|
||||
import Unison.PrettyPrintEnvDecl (PrettyPrintEnvDecl (PrettyPrintEnvDecl, suffixifiedPPE, unsuffixifiedPPE))
|
||||
import qualified Unison.Reference as Reference
|
||||
import qualified Unison.Referent as Referent
|
||||
|
||||
-- fromNames :: Int -> Names -> PrettyPrintEnv
|
||||
-- fromNames len names = PrettyPrintEnv terms' types' where
|
||||
-- terms' r = shortestName . Set.map HQ'.toHQ $ Names.termName len r names
|
||||
-- types' r = shortestName . Set.map HQ'.toHQ $ Names.typeName len r names
|
||||
-- shortestName ns = safeHead $ HQ.sortByLength (toList ns)
|
||||
|
||||
-- fromSuffixNames :: Int -> Names -> PrettyPrintEnv
|
||||
-- fromSuffixNames len names = fromNames len (Names.suffixify names)
|
||||
|
||||
-- fromNamesDecl :: Int -> Names -> PrettyPrintEnvDecl
|
||||
-- fromNamesDecl len names =
|
||||
-- PrettyPrintEnvDecl (fromNames len names) (fromSuffixNames len names)
|
||||
|
||||
-- declarationPPE uses the full name for references that are
|
||||
-- part the same cycle as the input reference, used to ensures
|
||||
-- recursive definitions are printed properly, for instance:
|
||||
--
|
||||
-- foo.bar x = foo.bar x
|
||||
-- and not
|
||||
-- foo.bar x = bar x
|
||||
declarationPPE :: PrettyPrintEnvDecl -> Reference -> PrettyPrintEnv
|
||||
declarationPPE ppe rd = PrettyPrintEnv tm ty where
|
||||
comp = Reference.members (Reference.componentFor rd)
|
||||
tm r0@(Referent.Ref r) = if Set.member r comp
|
||||
then terms (unsuffixifiedPPE ppe) r0
|
||||
else terms (suffixifiedPPE ppe) r0
|
||||
tm r = terms (suffixifiedPPE ppe) r
|
||||
ty r = if Set.member r comp then types (unsuffixifiedPPE ppe) r
|
||||
else types (suffixifiedPPE ppe) r
|
||||
|
||||
-- Left-biased union of environments
|
||||
unionLeft :: PrettyPrintEnv -> PrettyPrintEnv -> PrettyPrintEnv
|
||||
unionLeft e1 e2 = PrettyPrintEnv
|
||||
(\r -> terms e1 r <|> terms e2 r)
|
||||
(\r -> types e1 r <|> types e2 r)
|
||||
|
||||
assignTermName
|
||||
:: Referent -> HashQualified Name -> PrettyPrintEnv -> PrettyPrintEnv
|
||||
assignTermName r name = (fromTermNames [(r, name)] `unionLeft`)
|
||||
|
||||
fromTypeNames :: [(Reference, HashQualified Name)] -> PrettyPrintEnv
|
||||
fromTypeNames types =
|
||||
let m = Map.fromList types in PrettyPrintEnv (const Nothing) (`Map.lookup` m)
|
||||
|
||||
fromTermNames :: [(Referent, HashQualified Name)] -> PrettyPrintEnv
|
||||
fromTermNames tms =
|
||||
let m = Map.fromList tms in PrettyPrintEnv (`Map.lookup` m) (const Nothing)
|
||||
|
||||
-- todo: these need to be a dynamic length, but we need additional info
|
||||
todoHashLength :: Int
|
||||
todoHashLength = 10
|
||||
|
||||
-- termName :: PrettyPrintEnv -> Referent -> HashQualified Name
|
||||
-- termName env r =
|
||||
-- fromMaybe (HQ.take todoHashLength $ HQ.fromReferent r) (terms env r)
|
||||
|
||||
-- typeName :: PrettyPrintEnv -> Reference -> HashQualified Name
|
||||
-- typeName env r =
|
||||
-- fromMaybe (HQ.take todoHashLength $ HQ.fromReference r) (types env r)
|
||||
|
||||
patternName :: PrettyPrintEnv -> Reference -> Int -> HashQualified Name
|
||||
patternName env r cid =
|
||||
case patterns env r cid of
|
||||
Just name -> name
|
||||
Nothing -> HQ.take todoHashLength $ HQ.fromPattern r cid
|
||||
|
||||
-- Type aliases relating to Fully-Qualified Names, e.g. 'Acme.API.foo'
|
||||
-- Used primarily by the FQN elision code - see TermPrinter.PrintAnnotation.
|
||||
|
||||
-- Note that a Suffix can include dots.
|
||||
type Suffix = Text
|
||||
-- Each member of a Prefix list is dot-free.
|
||||
type Prefix = [Text]
|
||||
-- Keys are FQNs, values are shorter names which are equivalent, thanks to use
|
||||
-- statements that are in scope.
|
||||
type Imports = Map Name Suffix
|
||||
|
||||
-- Give the shortened version of an FQN, if there's been a `use` statement for that FQN.
|
||||
elideFQN :: Imports -> HQ.HashQualified Name -> HQ.HashQualified Name
|
||||
elideFQN imports hq =
|
||||
let hash = HQ.toHash hq
|
||||
name' = do name <- HQ.toName hq
|
||||
let hit = fmap Name.unsafeFromText (Map.lookup name imports)
|
||||
-- Cut out the "const id $" to get tracing of FQN elision attempts.
|
||||
let t = const id $ trace ("hit: " ++ show hit ++ " finding: " ++ show hq ++ " in imports: " ++ show imports)
|
||||
t (pure $ fromMaybe name hit)
|
||||
in HQ.fromNameHash name' hash
|
18
parser-typechecker/src/Unison/PrettyPrintEnvDecl.hs
Normal file
18
parser-typechecker/src/Unison/PrettyPrintEnvDecl.hs
Normal file
@ -0,0 +1,18 @@
|
||||
{-# Language OverloadedStrings #-}
|
||||
|
||||
module Unison.PrettyPrintEnvDecl (PrettyPrintEnvDecl(..)) where
|
||||
|
||||
import Unison.PrettyPrintEnv (PrettyPrintEnv)
|
||||
|
||||
-- A pair of PrettyPrintEnvs:
|
||||
-- - suffixifiedPPE uses the shortest unique suffix
|
||||
-- - unsuffixifiedPPE uses the shortest full name
|
||||
--
|
||||
-- Generally, we want declarations LHS (the `x` in `x = 23`) to use the
|
||||
-- unsuffixified names, so the LHS is an accurate description of where in the
|
||||
-- namespace the definition lives. For everywhere else, we can use the
|
||||
-- suffixified version.
|
||||
data PrettyPrintEnvDecl = PrettyPrintEnvDecl {
|
||||
unsuffixifiedPPE :: PrettyPrintEnv,
|
||||
suffixifiedPPE :: PrettyPrintEnv
|
||||
} deriving Show
|
11
parser-typechecker/src/Unison/PrettyPrintEnvDecl/Names.hs
Normal file
11
parser-typechecker/src/Unison/PrettyPrintEnvDecl/Names.hs
Normal file
@ -0,0 +1,11 @@
|
||||
{-# Language OverloadedStrings #-}
|
||||
|
||||
module Unison.PrettyPrintEnvDecl.Names where
|
||||
|
||||
import Unison.Names3 (Names)
|
||||
import Unison.PrettyPrintEnvDecl (PrettyPrintEnvDecl (PrettyPrintEnvDecl))
|
||||
import Unison.PrettyPrintEnv.Names (fromNames, fromSuffixNames)
|
||||
|
||||
fromNamesDecl :: Int -> Names -> PrettyPrintEnvDecl
|
||||
fromNamesDecl len names =
|
||||
PrettyPrintEnvDecl (fromNames len names) (fromSuffixNames len names)
|
@ -24,9 +24,11 @@ import qualified Unison.HashQualified as HQ
|
||||
import Unison.Kind (Kind)
|
||||
import qualified Unison.Kind as Kind
|
||||
import qualified Unison.Lexer as L
|
||||
import qualified Unison.Lexer.Pos as L
|
||||
import Unison.Name ( Name )
|
||||
import Unison.Parser (Ann (..), Annotated, ann)
|
||||
import qualified Unison.Parser as Parser
|
||||
import Unison.Parser (Annotated, ann)
|
||||
import qualified Unison.Parser as Parser
|
||||
import Unison.Parser.Ann (Ann (..))
|
||||
import qualified Unison.Reference as R
|
||||
import Unison.Referent (Referent, pattern Ref)
|
||||
import Unison.Result (Note (..))
|
||||
@ -38,6 +40,7 @@ import qualified Unison.Typechecker.Context as C
|
||||
import Unison.Typechecker.TypeError
|
||||
import qualified Unison.Typechecker.TypeVar as TypeVar
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import qualified Unison.UnisonFile.Error as UF
|
||||
import Unison.Util.AnnotatedText (AnnotatedText)
|
||||
import qualified Unison.Util.AnnotatedText as AT
|
||||
import Unison.Util.ColorText (Color)
|
||||
@ -51,6 +54,7 @@ import qualified Unison.TermPrinter as TermPrinter
|
||||
import qualified Unison.Util.Pretty as Pr
|
||||
import Unison.Util.Pretty (Pretty, ColorText)
|
||||
import qualified Unison.Names3 as Names
|
||||
import qualified Unison.Names.ResolutionResult as Names
|
||||
import qualified Unison.Name as Name
|
||||
import Unison.HashQualified (HashQualified)
|
||||
import Unison.Type (Type)
|
||||
|
@ -19,7 +19,7 @@ import Unison.Paths ( Path )
|
||||
import Unison.Term ( Term )
|
||||
import qualified Unison.Typechecker.Context as Context
|
||||
import Control.Error.Util ( note)
|
||||
import qualified Unison.Names3 as Names
|
||||
import qualified Unison.Names.ResolutionResult as Names
|
||||
|
||||
type Result notes = ResultT notes Identity
|
||||
|
||||
|
@ -1,34 +1,53 @@
|
||||
{-# language LambdaCase #-}
|
||||
{-# language BangPatterns #-}
|
||||
{-# language PatternSynonyms #-}
|
||||
|
||||
module Unison.Runtime.ANF.Serialize where
|
||||
|
||||
import Prelude hiding (putChar, getChar)
|
||||
|
||||
import Basement.Block (Block)
|
||||
|
||||
import Control.Applicative (liftA2)
|
||||
import Control.Monad
|
||||
|
||||
import Data.Bits (Bits)
|
||||
import Data.Bytes.Put
|
||||
import Data.Bytes.Get hiding (getBytes)
|
||||
import qualified Data.Bytes.Get as Ser
|
||||
import Data.Bytes.VarInt
|
||||
import Data.Bytes.Serial
|
||||
import Data.Bytes.Signed (Unsigned)
|
||||
import Data.ByteString (ByteString)
|
||||
import qualified Data.ByteString as B
|
||||
import Data.Foldable (traverse_)
|
||||
import Data.Functor ((<&>))
|
||||
import Data.Map as Map (Map, fromList, lookup)
|
||||
import Data.Map as Map (Map, fromList, lookup, toList)
|
||||
import Data.Serialize.Put (runPutLazy)
|
||||
import Data.Text (Text)
|
||||
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
|
||||
import Data.Word (Word8, Word16, Word64)
|
||||
import Data.Int (Int64)
|
||||
|
||||
import qualified Data.ByteArray as BA
|
||||
import qualified Data.Sequence as Seq
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
|
||||
import GHC.Stack
|
||||
|
||||
import Unison.Codebase.Serialization.V1 as V1
|
||||
import Unison.Hash (Hash)
|
||||
import Unison.Util.EnumContainers as EC
|
||||
import Unison.Reference (Reference)
|
||||
import Unison.Reference (Reference(..), pattern Derived, Id(..))
|
||||
import Unison.Referent (Referent, pattern Ref, pattern Con)
|
||||
import Unison.ABT.Normalized (Term(..))
|
||||
import Unison.Runtime.Exception
|
||||
import Unison.Runtime.ANF as ANF hiding (Tag)
|
||||
import Unison.Var (Var(..), Type(ANFBlank))
|
||||
|
||||
import qualified Unison.Util.Bytes as Bytes
|
||||
import qualified Unison.Hash as Hash
|
||||
import qualified Unison.ConstructorType as CT
|
||||
|
||||
data TmTag
|
||||
= VarT | ForceT | AppT | HandleT
|
||||
| ShiftT | MatchT | LitT
|
||||
@ -49,6 +68,9 @@ data BLTag = TextT | ListT | TmLinkT | TyLinkT | BytesT
|
||||
data VaTag = PartialT | DataT | ContT | BLitT
|
||||
data CoTag = KET | MarkT | PushT
|
||||
|
||||
unknownTag :: String -> a
|
||||
unknownTag t = exn $ "unknown " ++ t ++ " word"
|
||||
|
||||
class Tag t where
|
||||
tag2word :: t -> Word8
|
||||
word2tag :: Word8 -> t
|
||||
@ -78,7 +100,7 @@ instance Tag TmTag where
|
||||
9 -> NameVarT
|
||||
10 -> LetDirT
|
||||
11 -> LetIndT
|
||||
_ -> exn "unknown TmTag word"
|
||||
_ -> unknownTag "TmTag"
|
||||
|
||||
instance Tag FnTag where
|
||||
tag2word = \case
|
||||
@ -96,7 +118,7 @@ instance Tag FnTag where
|
||||
3 -> FConT
|
||||
4 -> FReqT
|
||||
5 -> FPrimT
|
||||
_ -> exn "unknown FnTag word"
|
||||
_ -> unknownTag "FnTag"
|
||||
|
||||
instance Tag MtTag where
|
||||
tag2word = \case
|
||||
@ -114,7 +136,7 @@ instance Tag MtTag where
|
||||
3 -> MEmptyT
|
||||
4 -> MDataT
|
||||
5 -> MSumT
|
||||
_ -> exn "unknown MtTag word"
|
||||
_ -> unknownTag "MtTag"
|
||||
|
||||
instance Tag LtTag where
|
||||
tag2word = \case
|
||||
@ -134,7 +156,7 @@ instance Tag LtTag where
|
||||
4 -> CT
|
||||
5 -> LMT
|
||||
6 -> LYT
|
||||
_ -> exn "unknown LtTag word"
|
||||
_ -> unknownTag "LtTag"
|
||||
|
||||
instance Tag BLTag where
|
||||
tag2word = \case
|
||||
@ -150,7 +172,7 @@ instance Tag BLTag where
|
||||
2 -> TmLinkT
|
||||
3 -> TyLinkT
|
||||
4 -> BytesT
|
||||
_ -> exn "unknown BLTag word"
|
||||
t -> unknownTag "BLTag" t
|
||||
|
||||
instance Tag VaTag where
|
||||
tag2word = \case
|
||||
@ -164,7 +186,7 @@ instance Tag VaTag where
|
||||
1 -> DataT
|
||||
2 -> ContT
|
||||
3 -> BLitT
|
||||
_ -> exn "unknown VaTag word"
|
||||
t -> unknownTag "VaTag" t
|
||||
|
||||
instance Tag CoTag where
|
||||
tag2word = \case
|
||||
@ -175,7 +197,7 @@ instance Tag CoTag where
|
||||
0 -> KET
|
||||
1 -> MarkT
|
||||
2 -> PushT
|
||||
_ -> exn "unknown CoTag word"
|
||||
t -> unknownTag "CoTag" t
|
||||
|
||||
putTag :: MonadPut m => Tag t => t -> m ()
|
||||
putTag = putWord8 . tag2word
|
||||
@ -402,7 +424,7 @@ putLit (I i) = putTag IT *> putInt i
|
||||
putLit (N n) = putTag NT *> putNat n
|
||||
putLit (F f) = putTag FT *> putFloat f
|
||||
putLit (T t) = putTag TT *> putText t
|
||||
putLit (C c) = putTag CT *> V1.putChar c
|
||||
putLit (C c) = putTag CT *> putChar c
|
||||
putLit (LM r) = putTag LMT *> putReferent r
|
||||
putLit (LY r) = putTag LYT *> putReference r
|
||||
|
||||
@ -412,7 +434,7 @@ getLit = getTag >>= \case
|
||||
NT -> N <$> getNat
|
||||
FT -> F <$> getFloat
|
||||
TT -> T <$> getText
|
||||
CT -> C <$> V1.getChar
|
||||
CT -> C <$> getChar
|
||||
LMT -> LM <$> getReferent
|
||||
LYT -> LY <$> getReference
|
||||
|
||||
@ -461,7 +483,7 @@ putBranches ctx bs = case bs of
|
||||
putTag MReqT
|
||||
putMap putReference (putEnumMap putCTag (putCase ctx)) m
|
||||
putNormal (v:ctx) df
|
||||
where
|
||||
where
|
||||
MatchData r m df -> do
|
||||
putTag MDataT
|
||||
putReference r
|
||||
@ -605,3 +627,157 @@ serializeValue v = runPutS (putVersion *> putValue v)
|
||||
serializeValueLazy :: Value -> L.ByteString
|
||||
serializeValueLazy v = runPutLazy (putVersion *> putValue v)
|
||||
where putVersion = putWord32be 1
|
||||
|
||||
-- Some basics, moved over from V1 serialization
|
||||
putChar :: MonadPut m => Char -> m ()
|
||||
putChar = serialize . VarInt . fromEnum
|
||||
|
||||
getChar :: MonadGet m => m Char
|
||||
getChar = toEnum . unVarInt <$> deserialize
|
||||
|
||||
putFloat :: MonadPut m => Double -> m ()
|
||||
putFloat = serializeBE
|
||||
|
||||
getFloat :: MonadGet m => m Double
|
||||
getFloat = deserializeBE
|
||||
|
||||
putNat :: MonadPut m => Word64 -> m ()
|
||||
putNat = putWord64be
|
||||
|
||||
getNat :: MonadGet m => m Word64
|
||||
getNat = getWord64be
|
||||
|
||||
putInt :: MonadPut m => Int64 -> m ()
|
||||
putInt = serializeBE
|
||||
|
||||
getInt :: MonadGet m => m Int64
|
||||
getInt = deserializeBE
|
||||
|
||||
putLength ::
|
||||
(MonadPut m, Integral n, Integral (Unsigned n),
|
||||
Bits n, Bits (Unsigned n))
|
||||
=> n -> m ()
|
||||
putLength = serialize . VarInt
|
||||
|
||||
getLength ::
|
||||
(MonadGet m, Integral n, Integral (Unsigned n),
|
||||
Bits n, Bits (Unsigned n))
|
||||
=> m n
|
||||
getLength = unVarInt <$> deserialize
|
||||
|
||||
putFoldable
|
||||
:: (Foldable f, MonadPut m) => (a -> m ()) -> f a -> m ()
|
||||
putFoldable putA as = do
|
||||
putLength (length as)
|
||||
traverse_ putA as
|
||||
|
||||
putMap :: MonadPut m => (a -> m ()) -> (b -> m ()) -> Map a b -> m ()
|
||||
putMap putA putB m = putFoldable (putPair putA putB) (Map.toList m)
|
||||
|
||||
getList :: MonadGet m => m a -> m [a]
|
||||
getList a = getLength >>= (`replicateM` a)
|
||||
|
||||
getMap :: (MonadGet m, Ord a) => m a -> m b -> m (Map a b)
|
||||
getMap getA getB = Map.fromList <$> getList (getPair getA getB)
|
||||
|
||||
putMaybe :: MonadPut m => Maybe a -> (a -> m ()) -> m ()
|
||||
putMaybe Nothing _ = putWord8 0
|
||||
putMaybe (Just a) putA = putWord8 1 *> putA a
|
||||
|
||||
getMaybe :: MonadGet m => m a -> m (Maybe a)
|
||||
getMaybe getA = getWord8 >>= \tag -> case tag of
|
||||
0 -> pure Nothing
|
||||
1 -> Just <$> getA
|
||||
_ -> unknownTag "Maybe" tag
|
||||
|
||||
putPair :: MonadPut m => (a -> m ()) -> (b -> m ()) -> (a,b) -> m ()
|
||||
putPair putA putB (a,b) = putA a *> putB b
|
||||
|
||||
getPair :: MonadGet m => m a -> m b -> m (a,b)
|
||||
getPair = liftA2 (,)
|
||||
|
||||
getBytes :: MonadGet m => m Bytes.Bytes
|
||||
getBytes = Bytes.fromChunks <$> getList getBlock
|
||||
|
||||
putBytes :: MonadPut m => Bytes.Bytes -> m ()
|
||||
putBytes = putFoldable putBlock . Bytes.chunks
|
||||
|
||||
getBlock :: MonadGet m => m (Bytes.View (Block Word8))
|
||||
getBlock = getLength >>= fmap (Bytes.view . BA.convert) . getByteString
|
||||
|
||||
putBlock :: MonadPut m => Bytes.View (Block Word8) -> m ()
|
||||
putBlock b = putLength (BA.length b) *> putByteString (BA.convert b)
|
||||
|
||||
putHash :: MonadPut m => Hash -> m ()
|
||||
putHash h = do
|
||||
let bs = Hash.toBytes h
|
||||
putLength (B.length bs)
|
||||
putByteString bs
|
||||
|
||||
getHash :: MonadGet m => m Hash
|
||||
getHash = do
|
||||
len <- getLength
|
||||
bs <- B.copy <$> Ser.getBytes len
|
||||
pure $ Hash.fromBytes bs
|
||||
|
||||
putReferent :: MonadPut m => Referent -> m ()
|
||||
putReferent = \case
|
||||
Ref r -> do
|
||||
putWord8 0
|
||||
putReference r
|
||||
Con r i ct -> do
|
||||
putWord8 1
|
||||
putReference r
|
||||
putLength i
|
||||
putConstructorType ct
|
||||
|
||||
getReferent :: MonadGet m => m Referent
|
||||
getReferent = do
|
||||
tag <- getWord8
|
||||
case tag of
|
||||
0 -> Ref <$> getReference
|
||||
1 -> Con <$> getReference <*> getLength <*> getConstructorType
|
||||
_ -> unknownTag "getReferent" tag
|
||||
|
||||
getConstructorType :: MonadGet m => m CT.ConstructorType
|
||||
getConstructorType = getWord8 >>= \case
|
||||
0 -> pure CT.Data
|
||||
1 -> pure CT.Effect
|
||||
t -> unknownTag "getConstructorType" t
|
||||
|
||||
putConstructorType :: MonadPut m => CT.ConstructorType -> m ()
|
||||
putConstructorType = \case
|
||||
CT.Data -> putWord8 0
|
||||
CT.Effect -> putWord8 1
|
||||
|
||||
putText :: MonadPut m => Text -> m ()
|
||||
putText text = do
|
||||
let bs = encodeUtf8 text
|
||||
putLength $ B.length bs
|
||||
putByteString bs
|
||||
|
||||
getText :: MonadGet m => m Text
|
||||
getText = do
|
||||
len <- getLength
|
||||
bs <- B.copy <$> Ser.getBytes len
|
||||
pure $ decodeUtf8 bs
|
||||
|
||||
putReference :: MonadPut m => Reference -> m ()
|
||||
putReference r = case r of
|
||||
Builtin name -> do
|
||||
putWord8 0
|
||||
putText name
|
||||
Derived hash i n -> do
|
||||
putWord8 1
|
||||
putHash hash
|
||||
putLength i
|
||||
putLength n
|
||||
|
||||
getReference :: MonadGet m => m Reference
|
||||
getReference = do
|
||||
tag <- getWord8
|
||||
case tag of
|
||||
0 -> Builtin <$> getText
|
||||
1 -> DerivedId <$> (Id <$> getHash <*> getLength <*> getLength)
|
||||
_ -> unknownTag "Reference" tag
|
||||
|
||||
|
@ -14,12 +14,13 @@ import Data.List (elemIndex, genericIndex)
|
||||
import Text.RawString.QQ (r)
|
||||
import Unison.Codebase.CodeLookup (CodeLookup(..))
|
||||
import Unison.FileParsers (parseAndSynthesizeFile)
|
||||
import Unison.Parser (Ann(..))
|
||||
import Unison.Parser.Ann (Ann(..))
|
||||
import Unison.Symbol (Symbol)
|
||||
import qualified Data.Map as Map
|
||||
import qualified Unison.Builtin as Builtin
|
||||
import qualified Unison.Codebase.CodeLookup as CL
|
||||
import qualified Unison.Codebase.CodeLookup.Util as CL
|
||||
import qualified Unison.DataDeclaration as DD
|
||||
import qualified Unison.DataDeclaration.ConstructorId as DD
|
||||
import qualified Unison.Parser as Parser
|
||||
import qualified Unison.Reference as R
|
||||
import qualified Unison.Result as Result
|
||||
|
@ -43,7 +43,7 @@ import Unison.Codebase.CodeLookup (CodeLookup(..))
|
||||
import Unison.Codebase.Runtime (Runtime(..), Error)
|
||||
import Unison.Codebase.MainTerm (builtinMain, builtinTest)
|
||||
|
||||
import Unison.Parser (Ann(External))
|
||||
import Unison.Parser.Ann (Ann(External))
|
||||
import Unison.PrettyPrintEnv
|
||||
import Unison.Util.Pretty as P
|
||||
import Unison.Symbol (Symbol)
|
||||
@ -334,5 +334,4 @@ startRuntime = do
|
||||
evalInContext ppe ctx init
|
||||
, mainType = builtinMain External
|
||||
, ioTestType = builtinTest External
|
||||
, needsContainment = False
|
||||
}
|
||||
|
@ -25,6 +25,7 @@ import Unison.ABT
|
||||
(absChain', visitPure, pattern AbsN', renames)
|
||||
import Unison.Builtin.Decls (builtinDataDecls, builtinEffectDecls)
|
||||
import Unison.DataDeclaration (declFields)
|
||||
import Unison.DataDeclaration.ConstructorId (ConstructorId)
|
||||
import Unison.Pattern
|
||||
import qualified Unison.Pattern as P
|
||||
import Unison.Reference (Reference(..))
|
||||
@ -350,7 +351,7 @@ splitRowSeq
|
||||
-> [([P.Pattern v], PatternRow v)]
|
||||
splitRowSeq avoid0 v m r@(PR (break ((==v).loc) -> (pl, sp : pr)) g b)
|
||||
= case decomposeSeqP avoid m sp of
|
||||
Cover sps ->
|
||||
Cover sps ->
|
||||
[(sps, PR (pl ++ filter refutable sps ++ pr) g b)]
|
||||
Disjoint -> []
|
||||
Overlap -> [([], r)]
|
||||
@ -541,7 +542,7 @@ prepareAs p u = pure $ u <$ p
|
||||
preparePattern :: Var v => P.Pattern a -> PPM v (P.Pattern v)
|
||||
preparePattern p = prepareAs p =<< freshVar
|
||||
|
||||
buildPattern :: Bool -> Reference -> Int -> [v] -> Int -> P.Pattern ()
|
||||
buildPattern :: Bool -> Reference -> ConstructorId -> [v] -> Int -> P.Pattern ()
|
||||
buildPattern effect r t vs nfields
|
||||
| effect, [] <- vps = internalBug "too few patterns for effect bind"
|
||||
| effect = P.EffectBind () r t (init vps) (last vps)
|
||||
|
@ -31,6 +31,8 @@ import Unison.Codebase (Codebase)
|
||||
import qualified Unison.Codebase as Codebase
|
||||
import Unison.Codebase.Branch (Branch, Branch0)
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import qualified Unison.Codebase.Branch.Names as Branch
|
||||
import qualified Unison.Codebase.Causal (RawHash(RawHash))
|
||||
import Unison.Codebase.Editor.DisplayObject
|
||||
import qualified Unison.Codebase.Metadata as Metadata
|
||||
import Unison.Codebase.Path (Path)
|
||||
@ -57,9 +59,11 @@ import Unison.Names3
|
||||
Names0,
|
||||
)
|
||||
import qualified Unison.Names3 as Names3
|
||||
import Unison.Parser (Ann)
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import Unison.Prelude
|
||||
import qualified Unison.PrettyPrintEnv as PPE
|
||||
import qualified Unison.PrettyPrintEnvDecl as PPE
|
||||
import qualified Unison.PrettyPrintEnvDecl.Names as PPE
|
||||
import Unison.Reference (Reference)
|
||||
import qualified Unison.Reference as Reference
|
||||
import Unison.Referent (Referent)
|
||||
@ -87,6 +91,10 @@ import Unison.Var (Var)
|
||||
import qualified Unison.Server.Doc as Doc
|
||||
import qualified Unison.UnisonFile as UF
|
||||
import qualified Unison.Codebase.Editor.DisplayObject as DisplayObject
|
||||
import qualified Unison.WatchKind as WK
|
||||
import qualified Unison.PrettyPrintEnv.Util as PPE
|
||||
|
||||
type SyntaxText = UST.SyntaxText' Reference
|
||||
|
||||
data ShallowListEntry v a
|
||||
= ShallowTermEntry (TermEntry v a)
|
||||
@ -286,7 +294,7 @@ formatTypeName :: PPE.PrettyPrintEnv -> Reference -> Syntax.SyntaxText
|
||||
formatTypeName ppe =
|
||||
fmap Syntax.convertElement . formatTypeName' ppe
|
||||
|
||||
formatTypeName' :: PPE.PrettyPrintEnv -> Reference -> UST.SyntaxText
|
||||
formatTypeName' :: PPE.PrettyPrintEnv -> Reference -> SyntaxText
|
||||
formatTypeName' ppe r =
|
||||
Pretty.renderUnbroken .
|
||||
NP.styleHashQualified id $
|
||||
@ -547,7 +555,7 @@ expandShortBranchHash codebase hash = do
|
||||
_ ->
|
||||
throwError . AmbiguousBranchHash hash $ Set.map (SBH.fromHash len) hashSet
|
||||
|
||||
formatType' :: Var v => PPE.PrettyPrintEnv -> Width -> Type v a -> UST.SyntaxText
|
||||
formatType' :: Var v => PPE.PrettyPrintEnv -> Width -> Type v a -> SyntaxText
|
||||
formatType' ppe w =
|
||||
Pretty.render w . TypePrinter.pretty0 ppe mempty (-1)
|
||||
|
||||
@ -601,7 +609,7 @@ prettyDefinitionsBySuffixes relativeTo root renderWidth suffixifyBindings rt cod
|
||||
where
|
||||
rel = Names.terms $ currentNames parseNames
|
||||
f k _ = Set.fromList . fmap Name.toText . filter isAbsolute . toList
|
||||
$ R.lookupRan (Referent.Ref' k) rel
|
||||
$ R.lookupRan (Referent.Ref k) rel
|
||||
typeFqns :: Map Reference (Set Text)
|
||||
typeFqns = Map.mapWithKey f types
|
||||
where
|
||||
@ -641,7 +649,7 @@ prettyDefinitionsBySuffixes relativeTo root renderWidth suffixifyBindings rt cod
|
||||
let cache r = fmap Term.unannotate <$> Codebase.lookupWatchCache codebase r
|
||||
r <- fmap hush . liftIO $ Rt.evaluateTerm' codeLookup cache ppes rt tm
|
||||
lift $ case r of
|
||||
Just tmr -> Codebase.putWatch codebase UF.RegularWatch
|
||||
Just tmr -> Codebase.putWatch codebase WK.RegularWatch
|
||||
(Term.hashClosedTerm tm)
|
||||
(Term.amap (const mempty) tmr)
|
||||
Nothing -> pure ()
|
||||
@ -791,7 +799,7 @@ termsToSyntax
|
||||
-> Width
|
||||
-> PPE.PrettyPrintEnvDecl
|
||||
-> Map Reference.Reference (DisplayObject (Type v a) (Term v a))
|
||||
-> Map Reference.Reference (DisplayObject UST.SyntaxText UST.SyntaxText)
|
||||
-> Map Reference.Reference (DisplayObject SyntaxText SyntaxText)
|
||||
termsToSyntax suff width ppe0 terms =
|
||||
Map.fromList . map go . Map.toList $ Map.mapKeys
|
||||
(first (PPE.termName ppeDecl . Referent.Ref) . dupe)
|
||||
@ -816,7 +824,7 @@ typesToSyntax
|
||||
-> Width
|
||||
-> PPE.PrettyPrintEnvDecl
|
||||
-> Map Reference.Reference (DisplayObject () (DD.Decl v a))
|
||||
-> Map Reference.Reference (DisplayObject UST.SyntaxText UST.SyntaxText)
|
||||
-> Map Reference.Reference (DisplayObject SyntaxText SyntaxText)
|
||||
typesToSyntax suff width ppe0 types =
|
||||
Map.fromList $ map go . Map.toList $ Map.mapKeys
|
||||
(first (PPE.typeName ppeDecl) . dupe)
|
||||
|
@ -93,7 +93,7 @@ import qualified System.FilePath as FilePath
|
||||
import System.Random.Stateful (getStdGen, newAtomicGenM, uniformByteStringM)
|
||||
import Unison.Codebase (Codebase)
|
||||
import qualified Unison.Codebase.Runtime as Rt
|
||||
import Unison.Parser (Ann)
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import Unison.Prelude
|
||||
import Unison.Server.Endpoints.FuzzyFind (FuzzyFindAPI, serveFuzzyFind)
|
||||
import Unison.Server.Endpoints.GetDefinitions
|
||||
|
@ -34,6 +34,7 @@ import qualified Unison.DataDeclaration as DD
|
||||
import qualified Unison.DeclPrinter as DeclPrinter
|
||||
import qualified Unison.NamePrinter as NP
|
||||
import qualified Unison.PrettyPrintEnv as PPE
|
||||
import qualified Unison.PrettyPrintEnvDecl as PPE
|
||||
import qualified Unison.Reference as Reference
|
||||
import qualified Unison.Referent as Referent
|
||||
import qualified Unison.Runtime.IOSource as DD
|
||||
@ -153,7 +154,7 @@ renderDoc pped terms typeOf eval types tm = eval tm >>= \case
|
||||
source :: Term v () -> m SyntaxText
|
||||
source tm = (pure . formatPretty . TermPrinter.prettyBlock' True (PPE.suffixifiedPPE pped)) tm
|
||||
|
||||
goSignatures :: [Referent] -> m [P.Pretty S.SyntaxText]
|
||||
goSignatures :: [Referent] -> m [P.Pretty (S.SyntaxText' Reference)]
|
||||
goSignatures rs = runMaybeT (traverse (MaybeT . typeOf) rs) >>= \case
|
||||
Nothing -> pure ["🆘 codebase is missing type signature for these definitions"]
|
||||
Just types -> pure . fmap P.group $
|
||||
@ -184,9 +185,9 @@ renderDoc pped terms typeOf eval types tm = eval tm >>= \case
|
||||
-- Link (Either Link.Type Doc2.Term)
|
||||
DD.Doc2SpecialFormLink e -> let
|
||||
ppe = PPE.suffixifiedPPE pped
|
||||
tm :: Referent -> P.Pretty S.SyntaxText
|
||||
tm :: Referent -> P.Pretty (S.SyntaxText' Reference)
|
||||
tm r = (NP.styleHashQualified'' (NP.fmt (S.Referent r)) . PPE.termName ppe) r
|
||||
ty :: Reference -> P.Pretty S.SyntaxText
|
||||
ty :: Reference -> P.Pretty (S.SyntaxText' Reference)
|
||||
ty r = (NP.styleHashQualified'' (NP.fmt (S.Reference r)) . PPE.typeName ppe) r
|
||||
in Link <$> case e of
|
||||
DD.EitherLeft' (Term.TypeLink' r) -> (pure . formatPretty . ty) r
|
||||
|
@ -38,10 +38,11 @@ import qualified Unison.Codebase as Codebase
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import Unison.Codebase.Editor.DisplayObject
|
||||
import qualified Unison.Codebase.Path as Path
|
||||
import qualified Unison.Codebase.Path.Parse as Path
|
||||
import qualified Unison.Codebase.ShortBranchHash as SBH
|
||||
import qualified Unison.HashQualified' as HQ'
|
||||
import Unison.NameSegment
|
||||
import Unison.Parser (Ann)
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import Unison.Prelude
|
||||
import qualified Unison.Server.Backend as Backend
|
||||
import Unison.Server.Errors
|
||||
|
@ -24,12 +24,13 @@ import Servant.Docs
|
||||
import Servant.Server (Handler)
|
||||
import Unison.Codebase (Codebase)
|
||||
import qualified Unison.Codebase.Path as Path
|
||||
import qualified Unison.Codebase.Path.Parse as Path
|
||||
import qualified Unison.Codebase.Runtime as Rt
|
||||
import Unison.Codebase.ShortBranchHash
|
||||
( ShortBranchHash,
|
||||
)
|
||||
import qualified Unison.HashQualified as HQ
|
||||
import Unison.Parser (Ann)
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import Unison.Prelude
|
||||
import qualified Unison.Server.Backend as Backend
|
||||
import Unison.Server.Errors
|
||||
|
@ -33,12 +33,13 @@ import qualified Unison.Codebase as Codebase
|
||||
import qualified Unison.Codebase.Branch as Branch
|
||||
import qualified Unison.Codebase.Causal as Causal
|
||||
import qualified Unison.Codebase.Path as Path
|
||||
import qualified Unison.Codebase.Path.Parse as Path
|
||||
import qualified Unison.Codebase.ShortBranchHash as SBH
|
||||
import qualified Unison.Hash as Hash
|
||||
import qualified Unison.HashQualified as HQ
|
||||
import qualified Unison.Name as Name
|
||||
import qualified Unison.NameSegment as NameSegment
|
||||
import Unison.Parser (Ann)
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import Unison.Prelude
|
||||
import qualified Unison.PrettyPrintEnv as PPE
|
||||
import qualified Unison.Server.Backend as Backend
|
||||
|
@ -39,6 +39,7 @@ import qualified Unison.Lexer as L
|
||||
import qualified Unison.Name as Name
|
||||
import qualified Unison.Names3 as Names
|
||||
import qualified Unison.Parser as Parser (seq, uniqueName)
|
||||
import Unison.Parser.Ann (Ann)
|
||||
import qualified Unison.Pattern as Pattern
|
||||
import qualified Unison.Term as Term
|
||||
import qualified Unison.Type as Type
|
||||
|
@ -31,7 +31,6 @@ import qualified Unison.Reference as Reference
|
||||
import qualified Unison.Referent as Referent
|
||||
import Unison.Referent ( Referent )
|
||||
import qualified Unison.Util.SyntaxText as S
|
||||
import Unison.Util.SyntaxText ( SyntaxText )
|
||||
import Unison.Term
|
||||
import Unison.Type ( Type )
|
||||
import qualified Unison.Type as Type
|
||||
@ -42,12 +41,15 @@ import qualified Unison.Util.Bytes as Bytes
|
||||
import Unison.Util.Monoid ( intercalateMap )
|
||||
import qualified Unison.Util.Pretty as PP
|
||||
import Unison.Util.Pretty ( Pretty, ColorText, Width )
|
||||
import Unison.PrettyPrintEnv ( PrettyPrintEnv, Suffix, Prefix, Imports, elideFQN )
|
||||
import qualified Unison.PrettyPrintEnv as PrettyPrintEnv
|
||||
import Unison.PrettyPrintEnv (PrettyPrintEnv)
|
||||
import qualified Unison.PrettyPrintEnv as PrettyPrintEnv
|
||||
import Unison.PrettyPrintEnv.FQN (Imports, Prefix, Suffix, elideFQN)
|
||||
import qualified Unison.Builtin.Decls as DD
|
||||
import Unison.Builtin.Decls (pattern TuplePattern, pattern TupleTerm')
|
||||
import qualified Unison.ConstructorType as CT
|
||||
|
||||
type SyntaxText = S.SyntaxText' Reference
|
||||
|
||||
pretty :: Var v => PrettyPrintEnv -> Term v a -> Pretty ColorText
|
||||
pretty env = PP.syntaxToColor . pretty0 env emptyAc . printAnnotate env
|
||||
|
||||
|
@ -7,6 +7,7 @@ import Unison.Prelude
|
||||
import qualified Text.Megaparsec as P
|
||||
import qualified Unison.Lexer as L
|
||||
import Unison.Parser
|
||||
import Unison.Parser.Ann (Ann(..))
|
||||
import Unison.Type (Type)
|
||||
import qualified Unison.Type as Type
|
||||
import Unison.Var (Var)
|
||||
|
@ -9,19 +9,21 @@ import qualified Data.Map as Map
|
||||
import Unison.HashQualified (HashQualified)
|
||||
import Unison.Name ( Name )
|
||||
import Unison.NamePrinter (styleHashQualified'')
|
||||
import Unison.PrettyPrintEnv (PrettyPrintEnv, Imports, elideFQN)
|
||||
import Unison.PrettyPrintEnv (PrettyPrintEnv)
|
||||
import qualified Unison.PrettyPrintEnv as PrettyPrintEnv
|
||||
import Unison.Reference (pattern Builtin)
|
||||
import Unison.PrettyPrintEnv.FQN (Imports, elideFQN)
|
||||
import Unison.Reference (Reference, pattern Builtin)
|
||||
import Unison.Type
|
||||
import Unison.Util.Pretty (ColorText, Pretty, Width)
|
||||
import Unison.Util.ColorText (toPlain)
|
||||
import qualified Unison.Util.SyntaxText as S
|
||||
import Unison.Util.SyntaxText (SyntaxText)
|
||||
import qualified Unison.Util.Pretty as PP
|
||||
import Unison.Var (Var)
|
||||
import qualified Unison.Var as Var
|
||||
import qualified Unison.Builtin.Decls as DD
|
||||
|
||||
type SyntaxText = S.SyntaxText' Reference
|
||||
|
||||
pretty :: forall v a . (Var v) => PrettyPrintEnv -> Type v a -> Pretty ColorText
|
||||
pretty ppe = PP.syntaxToColor . prettySyntax ppe
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user