⅄ trunk → 22-10-18-unison-syntax

This commit is contained in:
Mitchell Rosen 2022-10-28 11:24:17 +05:30
commit 56c80904ff
66 changed files with 2399 additions and 1193 deletions

View File

@ -11,6 +11,10 @@ on:
description: 'Release Version (E.g. M4 or M4a)'
required: true
type: string
share_base_path:
description: 'Path to base version that UCM should pull by default (E.g. `unison.public.base.releases.M4`)'
required: true
type: string
target:
description: 'Ref to use for this release, defaults to trunk'
required: true
@ -57,6 +61,8 @@ jobs:
name: "build_linux"
runs-on: ubuntu-20.04
env:
UNISON_BASE_PATH: "${{inputs.share_base_path}}"
steps:
- uses: actions/checkout@v2
with:
@ -102,7 +108,11 @@ jobs:
echo "$PWD/stack-"* >> $GITHUB_PATH
- name: build
run: stack --no-terminal build --flag unison-parser-typechecker:optimized
run: |
# unison-cli checks env vars for which base version to automatically pull,
# so it needs to be forced to rebuild to ensure those are updated.
stack clean unison-cli
stack --no-terminal build --flag unison-parser-typechecker:optimized
- name: fetch latest Unison Local UI and package with ucm
run: |
@ -123,6 +133,8 @@ jobs:
build_macos:
name: "build_macos"
runs-on: macos-11
env:
UNISON_BASE_PATH: "${{inputs.share_base_path}}"
steps:
- uses: actions/checkout@v2
with:
@ -171,7 +183,11 @@ jobs:
run: rm -rf ~/.stack/setup-exe-cache
- name: build
run: stack --no-terminal build --flag unison-parser-typechecker:optimized
run: |
# unison-cli checks env vars for which base version to automatically pull,
# so it needs to be forced to rebuild to ensure those are updated.
stack clean unison-cli
stack --no-terminal build --flag unison-parser-typechecker:optimized
- name: fetch latest Unison Local UI and package with ucm
run: |
@ -192,6 +208,8 @@ jobs:
build_windows:
name: "build_windows"
runs-on: windows-2019
env:
UNISON_BASE_PATH: "${{inputs.share_base_path}}"
steps:
- uses: actions/checkout@v2
@ -239,6 +257,10 @@ jobs:
- name: build
run: |
# unison-cli checks env vars for which base version to automatically pull,
# so it needs to be forced to rebuild to ensure those are updated.
stack clean unison-cli
# Windows will crash on build intermittently because the filesystem
# sucks at managing concurrent file access;
# Just keep retrying on these failures.

View File

@ -4,11 +4,13 @@ module U.Codebase.Sqlite.Operations
loadRootCausalHash,
expectRootCausalHash,
expectRootCausal,
expectRootBranchHash,
loadCausalHashAtPath,
expectCausalHashAtPath,
saveBranch,
loadCausalBranchByCausalHash,
expectCausalBranchByCausalHash,
expectBranchByBranchHash,
expectNamespaceStatsByHash,
expectNamespaceStatsByHashId,
@ -65,7 +67,7 @@ module U.Codebase.Sqlite.Operations
termsMentioningType,
-- ** name lookup index
rebuildNameIndex,
updateNameIndex,
rootNamesByPath,
NamesByPath (..),
@ -196,6 +198,11 @@ expectValueHashByCausalHashId = loadValueHashById <=< Q.expectCausalValueHashId
expectRootCausalHash :: Transaction CausalHash
expectRootCausalHash = Q.expectCausalHash =<< Q.expectNamespaceRoot
expectRootBranchHash :: Transaction BranchHash
expectRootBranchHash = do
rootCausalHashId <- Q.expectNamespaceRoot
expectValueHashByCausalHashId rootCausalHashId
loadRootCausalHash :: Transaction (Maybe CausalHash)
loadRootCausalHash =
runMaybeT $
@ -716,6 +723,16 @@ loadDbBranchByCausalHashId causalHashId =
Nothing -> pure Nothing
Just branchObjectId -> Just <$> expectDbBranch branchObjectId
expectBranchByBranchHashId :: Db.BranchHashId -> Transaction (C.Branch.Branch Transaction)
expectBranchByBranchHashId bhId = do
boId <- Q.expectBranchObjectIdByBranchHashId bhId
expectBranch boId
expectBranchByBranchHash :: BranchHash -> Transaction (C.Branch.Branch Transaction)
expectBranchByBranchHash bh = do
bhId <- Q.saveBranchHash bh
expectBranchByBranchHashId bhId
-- | Expect a branch value given its causal hash id.
expectDbBranchByCausalHashId :: Db.CausalHashId -> Transaction S.DbBranch
expectDbBranchByCausalHashId causalHashId = do
@ -1038,13 +1055,19 @@ derivedDependencies cid = do
cids <- traverse s2cReferenceId sids
pure $ Set.fromList cids
-- | Given the list of term and type names from the root branch, rebuild the name lookup
-- table.
rebuildNameIndex :: [S.NamedRef (C.Referent, Maybe C.ConstructorType)] -> [S.NamedRef C.Reference] -> Transaction ()
rebuildNameIndex termNames typeNames = do
Q.resetNameLookupTables
Q.insertTermNames ((fmap (c2sTextReferent *** fmap c2sConstructorType) <$> termNames))
Q.insertTypeNames ((fmap c2sTextReference <$> typeNames))
-- | Given lists of names to add and remove, update the index accordingly.
updateNameIndex ::
-- | (add terms, remove terms)
([S.NamedRef (C.Referent, Maybe C.ConstructorType)], [S.NamedRef C.Referent]) ->
-- | (add types, remove types)
([S.NamedRef C.Reference], [S.NamedRef C.Reference]) ->
Transaction ()
updateNameIndex (newTermNames, removedTermNames) (newTypeNames, removedTypeNames) = do
Q.ensureNameLookupTables
Q.removeTermNames ((fmap c2sTextReferent <$> removedTermNames))
Q.removeTypeNames ((fmap c2sTextReference <$> removedTypeNames))
Q.insertTermNames (fmap (c2sTextReferent *** fmap c2sConstructorType) <$> newTermNames)
Q.insertTypeNames (fmap c2sTextReference <$> newTypeNames)
data NamesByPath = NamesByPath
{ termNamesInPath :: [S.NamedRef (C.Referent, Maybe C.ConstructorType)],

View File

@ -80,6 +80,7 @@ module U.Codebase.Sqlite.Queries
expectCausalByCausalHash,
loadBranchObjectIdByCausalHashId,
expectBranchObjectIdByCausalHashId,
expectBranchObjectIdByBranchHashId,
-- ** causal_parent table
saveCausalParents,
@ -130,9 +131,12 @@ module U.Codebase.Sqlite.Queries
causalHashIdByBase32Prefix,
-- * Name Lookup
resetNameLookupTables,
ensureNameLookupTables,
dropNameLookupTables,
insertTermNames,
insertTypeNames,
removeTermNames,
removeTypeNames,
rootTermNamesByPath,
rootTypeNamesByPath,
getNamespaceDefinitionCount,
@ -1038,6 +1042,16 @@ loadBranchObjectIdByCausalHashIdSql =
WHERE causal.self_hash_id = ?
|]
expectBranchObjectIdByBranchHashId :: BranchHashId -> Transaction BranchObjectId
expectBranchObjectIdByBranchHashId id = queryOneCol loadBranchObjectIdByBranchHashIdSql (Only id)
loadBranchObjectIdByBranchHashIdSql :: Sql
loadBranchObjectIdByBranchHashIdSql =
[here|
SELECT object_id FROM hash_object
WHERE hash_id = ?
|]
saveCausalParents :: CausalHashId -> [CausalHashId] -> Transaction ()
saveCausalParents child parents = executeMany sql $ (child,) <$> parents where
sql = [here|
@ -1387,7 +1401,7 @@ getDependencyIdsForDependent dependent@(C.Reference.Id oid0 _) =
FROM dependents_index
WHERE dependency_builtin IS NULL
AND dependent_object_id = ?
AND dependen_component_index = ?
AND dependent_component_index = ?
|]
isNotSelfReference :: Reference.Id -> Bool
@ -1441,14 +1455,26 @@ removeHashObjectsByHashingVersion hashVersion =
WHERE hash_version = ?
|]
-- | Drop and recreate the name lookup tables. Use this when resetting names to a new branch.
resetNameLookupTables :: Transaction ()
resetNameLookupTables = do
execute_ "DROP TABLE IF EXISTS term_name_lookup"
execute_ "DROP TABLE IF EXISTS type_name_lookup"
-- | Not used in typical operations, but if we ever end up in a situation where a bug
-- has caused the name lookup index to go out of sync this can be used to get back to a clean
-- slate.
dropNameLookupTables :: Transaction ()
dropNameLookupTables = do
execute_
[here|
CREATE TABLE term_name_lookup (
DROP TABLE IF EXISTS term_name_lookup
|]
execute_
[here|
DROP TABLE IF EXISTS type_name_lookup
|]
-- | Ensure the name lookup tables exist.
ensureNameLookupTables :: Transaction ()
ensureNameLookupTables = do
execute_
[here|
CREATE TABLE IF NOT EXISTS term_name_lookup (
-- The name of the term: E.g. map.List.base
reversed_name TEXT NOT NULL,
-- The namespace containing this term, not reversed: E.g. base.List
@ -1463,7 +1489,7 @@ resetNameLookupTables = do
|]
execute_
[here|
CREATE INDEX term_names_by_namespace ON term_name_lookup(namespace)
CREATE INDEX IF NOT EXISTS term_names_by_namespace ON term_name_lookup(namespace)
|]
-- Don't need this index at the moment, but will likely be useful later.
-- execute_
@ -1472,7 +1498,7 @@ resetNameLookupTables = do
-- |]
execute_
[here|
CREATE TABLE type_name_lookup (
CREATE TABLE IF NOT EXISTS type_name_lookup (
-- The name of the term: E.g. List.base
reversed_name TEXT NOT NULL,
-- The namespace containing this term, not reversed: E.g. base.List
@ -1485,7 +1511,7 @@ resetNameLookupTables = do
|]
execute_
[here|
CREATE INDEX type_names_by_namespace ON type_name_lookup(namespace)
CREATE INDEX IF NOT EXISTS type_names_by_namespace ON type_name_lookup(namespace)
|]
-- Don't need this index at the moment, but will likely be useful later.
@ -1508,6 +1534,37 @@ insertTermNames names = do
ON CONFLICT DO NOTHING
|]
-- | Remove the given set of term names into the name lookup table
removeTermNames :: [NamedRef Referent.TextReferent] -> Transaction ()
removeTermNames names = do
executeMany sql names
where
sql =
[here|
DELETE FROM term_name_lookup
WHERE
reversed_name IS ?
AND referent_builtin IS ?
AND referent_component_hash IS ?
AND referent_component_index IS ?
AND referent_constructor_index IS ?
|]
-- | Remove the given set of term names into the name lookup table
removeTypeNames :: [NamedRef (Reference.TextReference)] -> Transaction ()
removeTypeNames names = do
executeMany sql names
where
sql =
[here|
DELETE FROM type_name_lookup
WHERE
reversed_name IS ?
AND reference_builtin IS ?
AND reference_component_hash IS ?
AND reference_component_index IS ?
|]
-- | We need to escape any special characters for globbing.
--
-- >>> globEscape "Nat.*.doc"

View File

@ -1,6 +1,7 @@
module U.Util.Monoid where
import Control.Monad (foldM)
import Control.Monad.Extra ((>=>))
import Data.Foldable (toList)
import Data.List (intersperse)
@ -10,6 +11,9 @@ intercalateMap :: (Foldable t, Monoid a) => a -> (b -> a) -> t b -> a
intercalateMap separator renderer elements =
mconcat $ intersperse separator (renderer <$> toList elements)
intercalateMapM :: (Traversable t, Monad m, Monoid a) => a -> (b -> m a) -> t b -> m a
intercalateMapM separator renderer = traverse renderer >=> return . intercalateMap separator id
fromMaybe :: Monoid a => Maybe a -> a
fromMaybe Nothing = mempty
fromMaybe (Just a) = a
@ -24,4 +28,4 @@ isEmpty a = a == mempty
nonEmpty = not . isEmpty
foldMapM :: (Monad m, Foldable f, Monoid b) => (a -> m b) -> f a -> m b
foldMapM f as = foldM (\b a -> fmap (b <>) (f a)) mempty as
foldMapM f = foldM (\b a -> fmap (b <>) (f a)) mempty

View File

@ -6,9 +6,9 @@
Supported features:
* Show type on hover
* Autocompletion
* Inline type and parser error messages
* NO autocomplete yet, but soon.
* Show type on hover
Notes:
@ -30,7 +30,8 @@ Configuration for [coc-nvim](https://github.com/neoclide/coc.nvim), enter the fo
"unison": {
"filetypes": ["unison"],
"host": "127.0.0.1",
"port": 5757
"port": 5757,
"settings": {}
}
}
```
@ -39,5 +40,25 @@ Note that you'll need to start UCM _before_ you try connecting to it in your edi
### VSCode
VSCode doesn't allow customizing LSP implementations without an extension,
Hang tight, one will be available soon!
Simply install the [Unison Language VSCode extension](https://marketplace.visualstudio.com/items?itemName=unison-lang.unison).
### Other Editors
If your editor provides a mechanism for connecting to a host and port, provide a host of `127.0.0.1` and port `5757`.
If your editor requires a command to run, you can provide the command `nc localhost 5757` on Mac, or `netcat localhost 5757` on linux.
Note that some editors require passing the command and arguments as separate parameters.
## Configuration
Supported settings and their defaults. See information for your language server client about where to provide these.
```json
{
// The number of completions the server should collect and send based on a single query.
// Increasing this limit will provide more completion results, but at the cost of being slower to respond.
// If explicitly set to `null` the server will return ALL completions available.
"maxCompletions": 100
}
```

View File

@ -51,6 +51,12 @@ cradle:
- path: "lib/unison-util-base32hex-orphans-sqlite/src"
component: "unison-util-base32hex-orphans-sqlite:lib"
- path: "lib/unison-util-bytes/src"
component: "unison-util-bytes:lib"
- path: "lib/unison-util-bytes/test"
component: "unison-util-bytes:test:util-bytes-tests"
- path: "lib/unison-util-relation/src"
component: "unison-util-relation:lib"
@ -60,6 +66,9 @@ cradle:
- path: "lib/unison-util-relation/benchmarks/relation/Main.hs"
component: "unison-util-relation:bench:relation"
- path: "lib/unison-util-rope/src"
component: "unison-util-rope:lib"
- path: "parser-typechecker/src"
component: "unison-parser-typechecker:lib"

View File

@ -34,6 +34,9 @@ data DebugFlag
LSP
| -- | Timing how long things take
Timing
| -- | Useful for adding temporary debugging statements during development.
-- Remove uses of Debug.Temp before merging to keep things clean for the next person :)
Temp
deriving (Eq, Ord, Show, Bounded, Enum)
debugFlags :: Set DebugFlag
@ -54,6 +57,7 @@ debugFlags = case (unsafePerformIO (lookupEnv "UNISON_DEBUG")) of
"SYNC" -> pure Sync
"LSP" -> pure LSP
"TIMING" -> pure Timing
"TEMP" -> pure Temp
_ -> empty
{-# NOINLINE debugFlags #-}
@ -93,6 +97,10 @@ debugTiming :: Bool
debugTiming = Timing `Set.member` debugFlags
{-# NOINLINE debugTiming #-}
debugTemp :: Bool
debugTemp = Temp `Set.member` debugFlags
{-# NOINLINE debugTemp #-}
-- | Use for trace-style selective debugging.
-- E.g. 1 + (debug Git "The second number" 2)
--
@ -142,3 +150,4 @@ shouldDebug = \case
Sync -> debugSync
LSP -> debugLSP
Timing -> debugTiming
Temp -> debugTemp

View File

@ -40,6 +40,7 @@ module Unison.Util.Pretty
column3UnzippedM,
column3sep,
column3Header,
columnNHeader,
commas,
commented,
oxfordCommas,
@ -101,6 +102,7 @@ module Unison.Util.Pretty
spaceIfNeeded,
spaced,
spacedMap,
spacedTraverse,
spacesIfBreak,
string,
surroundCommas,
@ -405,6 +407,9 @@ spaced = intercalateMap softbreak id
spacedMap :: (Foldable f, IsString s) => (a -> Pretty s) -> f a -> Pretty s
spacedMap f as = spaced . fmap f $ toList as
spacedTraverse :: (Traversable f, IsString s, Applicative m) => (a -> m (Pretty s)) -> f a -> m (Pretty s)
spacedTraverse f as = spaced <$> traverse f as
commas :: (Foldable f, IsString s) => f (Pretty s) -> Pretty s
commas = intercalateMap ("," <> softbreak) id
@ -469,7 +474,7 @@ excerptSep' ::
excerptSep' maxCount summarize s ps = case maxCount of
Just max
| length ps > max ->
sep s (take max ps) <> summarize (length ps - max)
sep s (take max ps) <> summarize (length ps - max)
_ -> sep s ps
nonEmpty :: (Foldable f, IsString s) => f (Pretty s) -> [Pretty s]
@ -579,7 +584,7 @@ excerptColumn2Headed ::
excerptColumn2Headed max hd cols = case max of
Just max
| len > max ->
lines [column2 (hd : take max cols), "... " <> shown (len - max) <> " more"]
lines [column2 (hd : take max cols), "... " <> shown (len - max) <> " more"]
_ -> column2 (hd : cols)
where
len = length cols

View File

@ -2,8 +2,7 @@ name: unison-util-rope
github: unisonweb/unison
copyright: Copyright (C) 2013-2022 Unison Computing, PBC and contributors
ghc-options: -Wall -O0
# -fno-warn-name-shadowing -fno-warn-missing-pattern-synonym-signatures
ghc-options: -Wall
dependencies:
- base

View File

@ -49,7 +49,7 @@ library
TypeApplications
TypeFamilies
ViewPatterns
ghc-options: -Wall -O0
ghc-options: -Wall
build-depends:
base
, deepseq

View File

@ -45,6 +45,7 @@ dependencies:
- fingertree
- fsnotify
- fuzzyfind
- free
- generic-lens
- generic-monoid
- hashable

View File

@ -0,0 +1,174 @@
module U.Codebase.Branch.Diff
( TreeDiff (..),
NameChanges (..),
DefinitionDiffs (..),
Diff (..),
diffBranches,
nameChanges,
)
where
import Control.Comonad.Cofree
import Control.Lens (ifoldMap)
import qualified Control.Lens as Lens
import qualified Data.Map as Map
import qualified Data.Semialign as Align
import qualified Data.Set as Set
import Data.These
import U.Codebase.Branch
import qualified U.Codebase.Branch.Type as Branch
import qualified U.Codebase.Causal as Causal
import U.Codebase.Reference (Reference)
import U.Codebase.Referent (Referent)
import qualified Unison.Codebase.SqliteCodebase.Conversions as Cv
import Unison.Name (Name)
import qualified Unison.Name as Name
import Unison.Prelude
data Diff a = Diff
{ adds :: Set a,
removals :: Set a
}
deriving (Show, Eq, Ord)
-- | Represents the changes to definitions at a given path, not including child paths.
--
-- Note: doesn't yet include any info on metadata or patch diffs. Feel free to add it.
data DefinitionDiffs = DefinitionDiffs
{ termDiffs :: Map NameSegment (Diff Referent),
typeDiffs :: Map NameSegment (Diff Reference)
-- termMetadataDiffs :: Map (NameSegment, Referent) (Diff Reference),
-- typeMetadataDiffs :: Map (NameSegment, Reference) (Diff Reference)
-- patchDiffs :: Map NameSegment (Diff ())
}
deriving stock (Show, Eq, Ord)
instance Semigroup DefinitionDiffs where
a <> b =
DefinitionDiffs
{ termDiffs = termDiffs a <> termDiffs b,
typeDiffs = typeDiffs a <> typeDiffs b
}
instance Monoid DefinitionDiffs where
mempty = DefinitionDiffs mempty mempty
-- | A tree of local diffs. Each node of the tree contains the definition diffs at that path.
newtype TreeDiff = TreeDiff
{ unTreeDiff :: Cofree (Map NameSegment) DefinitionDiffs
}
deriving stock (Show, Eq, Ord)
instance Semigroup TreeDiff where
TreeDiff (a :< as) <> TreeDiff (b :< bs) =
TreeDiff $ (a <> b) :< (Map.unionWith mergeCofrees as bs)
where
mergeCofrees x y = unTreeDiff (TreeDiff x <> TreeDiff y)
instance Monoid TreeDiff where
mempty = TreeDiff (mempty :< mempty)
instance Lens.AsEmpty TreeDiff where
_Empty = Lens.only mempty
-- | A summary of a 'TreeDiff', containing all names added and removed.
-- Note that there isn't a clear notion of a name "changing" since conflicts might muddy the notion
-- by having multiple copies of both the from and to names, so we just talk about adds and
-- removals instead.
data NameChanges = NameChanges
{ termNameAdds :: [(Name, Referent)],
termNameRemovals :: [(Name, Referent)],
typeNameAdds :: [(Name, Reference)],
typeNameRemovals :: [(Name, Reference)]
}
instance Semigroup NameChanges where
(NameChanges a b c d) <> (NameChanges a2 b2 c2 d2) =
NameChanges (a <> a2) (b <> b2) (c <> c2) (d <> d2)
instance Monoid NameChanges where
mempty = NameChanges mempty mempty mempty mempty
-- | Diff two Branches, returning a tree containing all of the changes
diffBranches :: forall m. Monad m => Branch m -> Branch m -> m TreeDiff
diffBranches from to = do
let termDiffs = diffMap (terms from) (terms to)
let typeDiffs = diffMap (types from) (types to)
let defDiff = DefinitionDiffs {termDiffs, typeDiffs}
childDiff <- do
Align.align (children from) (children to)
& wither
( \case
This ca -> do
-- TODO: For the names index we really don't need to know which exact
-- names were removed, we just need to delete from the index using a
-- prefix query, this would be faster than crawling to get all the deletes.
removedChildBranch <- Causal.value ca
Just . unTreeDiff <$> diffBranches removedChildBranch Branch.empty
That ca -> do
newChildBranch <- Causal.value ca
Just . unTreeDiff <$> diffBranches Branch.empty newChildBranch
These fromC toC
| Causal.valueHash fromC == Causal.valueHash toC -> do
-- This child didn't change.
pure Nothing
| otherwise -> do
fromChildBranch <- Causal.value fromC
toChildBranch <- Causal.value toC
diffBranches fromChildBranch toChildBranch >>= \case
Lens.Empty -> pure Nothing
TreeDiff cfr -> pure . Just $ cfr
)
pure $ TreeDiff (defDiff :< childDiff)
where
diffMap :: forall ref. Ord ref => Map NameSegment (Map ref (m MdValues)) -> Map NameSegment (Map ref (m MdValues)) -> Map NameSegment (Diff ref)
diffMap l r =
Align.align l r
& fmap
( \case
(This refs) -> (Diff {removals = Map.keysSet refs, adds = mempty})
(That refs) -> (Diff {removals = mempty, adds = Map.keysSet refs})
(These l' r') ->
let lRefs = Map.keysSet l'
rRefs = Map.keysSet r'
in (Diff {removals = lRefs `Set.difference` rRefs, adds = rRefs `Set.difference` lRefs})
)
-- | Get a summary of all of the name adds and removals from a tree diff.
--
-- The provided name will be prepended to all names in the output diff, and can be useful if diffing branches at a
-- specific sub-tree, but you can pass 'Nothing' if you're diffing from the root.
nameChanges ::
Maybe Name ->
TreeDiff ->
NameChanges
nameChanges namePrefix (TreeDiff (DefinitionDiffs {termDiffs, typeDiffs} :< children)) =
let (termNameAdds, termNameRemovals) =
( termDiffs
& ifoldMap \ns diff ->
let name = appendName ns
in (listifyNames name $ adds diff, listifyNames name $ removals diff)
)
(typeNameAdds, typeNameRemovals) =
( typeDiffs
& ifoldMap \ns diff ->
let name = appendName ns
in (listifyNames name $ adds diff, listifyNames name $ removals diff)
)
childNameChanges =
( children
& ifoldMap \ns childTree ->
nameChanges (Just $ appendName ns) (TreeDiff childTree)
)
in NameChanges {termNameAdds, termNameRemovals, typeNameAdds, typeNameRemovals} <> childNameChanges
where
appendName :: NameSegment -> Name
appendName ns =
case namePrefix of
Nothing -> Name.fromSegment . Cv.namesegment2to1 $ ns
Just prefix -> prefix Lens.|> Cv.namesegment2to1 ns
listifyNames :: (Name -> Set ref -> [(Name, ref)])
listifyNames name xs =
xs
& Set.toList
& fmap (name,)

View File

@ -27,7 +27,7 @@ import qualified System.Console.ANSI as ANSI
import qualified System.FilePath as FilePath
import qualified System.FilePath.Posix as FilePath.Posix
import qualified U.Codebase.Branch as V2Branch
import U.Codebase.HashTags (CausalHash (CausalHash))
import U.Codebase.HashTags (BranchHash, CausalHash (CausalHash))
import qualified U.Codebase.Reflog as Reflog
import qualified U.Codebase.Sqlite.Operations as Ops
import qualified U.Codebase.Sqlite.Queries as Q
@ -276,6 +276,10 @@ sqliteCodebase debugName root localOrRemote migrationStrategy action = do
putTerm id tm tp =
runTransaction (CodebaseOps.putTerm termBuffer declBuffer id tm tp)
putTermComponent :: Hash -> [(Term Symbol Ann, Type Symbol Ann)] -> Sqlite.Transaction ()
putTermComponent =
CodebaseOps.putTermComponent termBuffer declBuffer
putTypeDeclaration :: Reference.Id -> Decl Symbol Ann -> m ()
putTypeDeclaration id decl =
runTransaction (CodebaseOps.putTypeDeclaration termBuffer declBuffer id decl)
@ -430,9 +434,9 @@ sqliteCodebase debugName root localOrRemote migrationStrategy action = do
namesAtPath path =
runTransaction (CodebaseOps.namesAtPath path)
updateNameLookup :: m ()
updateNameLookup =
runTransaction (CodebaseOps.updateNameLookupIndexFromV2Root getDeclType)
updateNameLookup :: Path -> Maybe BranchHash -> BranchHash -> m ()
updateNameLookup pathPrefix fromBH toBH =
runTransaction (CodebaseOps.updateNameLookupIndex getDeclType pathPrefix fromBH toBH)
let codebase =
C.Codebase
@ -444,6 +448,7 @@ sqliteCodebase debugName root localOrRemote migrationStrategy action = do
withConn \conn ->
Sqlite.runReadOnlyTransaction conn \run -> run (getDeclType r),
putTerm,
putTermComponent,
putTypeDeclaration,
putTypeDeclarationComponent,
getTermComponentWithTypes,

View File

@ -18,8 +18,9 @@ import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified U.Codebase.Branch as V2Branch
import qualified U.Codebase.Branch.Diff as BranchDiff
import qualified U.Codebase.Causal as V2Causal
import U.Codebase.HashTags (CausalHash (unCausalHash))
import U.Codebase.HashTags (BranchHash, CausalHash (unCausalHash))
import qualified U.Codebase.Reference as C.Reference
import qualified U.Codebase.Referent as C.Referent
import U.Codebase.Sqlite.DbId (ObjectId)
@ -553,7 +554,13 @@ referentsByPrefix doGetDeclType (SH.ShortHash prefix (fmap Cv.shortHashSuffix1to
termReferents <-
Ops.termReferentsByPrefix prefix cycle
>>= traverse (Cv.referentid2to1 doGetDeclType)
declReferents' <- Ops.declReferentsByPrefix prefix cycle (read . Text.unpack <$> cid)
cid' <- case cid of
Nothing -> pure Nothing
Just c ->
case readMaybe (Text.unpack c) of
Nothing -> error $ reportBug "994787297" "cid of ShortHash must be an integer but got: " <> show cid
Just cInt -> pure $ Just cInt
declReferents' <- Ops.declReferentsByPrefix prefix cycle cid'
let declReferents =
[ Referent.ConId (ConstructorReference (Reference.Id h pos) (fromIntegral cid)) (Cv.decltype2to1 ct)
| (h, pos, ct, cids) <- declReferents',
@ -638,11 +645,49 @@ namesAtPath path = do
-- | Update the root namespace names index which is used by the share server for serving api
-- requests.
updateNameLookupIndex ::
(C.Reference.Reference -> Sqlite.Transaction CT.ConstructorType) ->
Path ->
-- | "from" branch, if 'Nothing' use the empty branch
Maybe BranchHash ->
-- | "to" branch
BranchHash ->
Sqlite.Transaction ()
updateNameLookupIndex getDeclType pathPrefix mayFromBranchHash toBranchHash = do
fromBranch <- case mayFromBranchHash of
Nothing -> pure V2Branch.empty
Just fromBH -> Ops.expectBranchByBranchHash fromBH
toBranch <- Ops.expectBranchByBranchHash toBranchHash
treeDiff <- BranchDiff.diffBranches fromBranch toBranch
let namePrefix = case pathPrefix of
Path.Empty -> Nothing
(p Path.:< ps) -> Just $ Name.fromSegments (p :| Path.toList ps)
let BranchDiff.NameChanges {termNameAdds, termNameRemovals, typeNameAdds, typeNameRemovals} = BranchDiff.nameChanges namePrefix treeDiff
termNameAddsWithCT <- do
for termNameAdds \(name, ref) -> do
refWithCT <- addReferentCT ref
pure $ toNamedRef (name, refWithCT)
Ops.updateNameIndex (termNameAddsWithCT, toNamedRef <$> termNameRemovals) (toNamedRef <$> typeNameAdds, toNamedRef <$> typeNameRemovals)
where
toNamedRef :: (Name, ref) -> S.NamedRef ref
toNamedRef (name, ref) = S.NamedRef {reversedSegments = coerce $ Name.reverseSegments name, ref = ref}
addReferentCT :: C.Referent.Referent -> Transaction (C.Referent.Referent, Maybe C.Referent.ConstructorType)
addReferentCT referent = case referent of
C.Referent.Ref {} -> pure (referent, Nothing)
C.Referent.Con ref _conId -> do
ct <- getDeclType ref
pure (referent, Just $ Cv.constructorType1to2 ct)
-- | Compute the root namespace names index which is used by the share server for serving api
-- requests. Using 'updateNameLookupIndex' is preferred whenever possible, since it's
-- considerably faster. This can be used to reset the index if it ever gets out of sync due to
-- a bug.
--
-- This version should be used if you've already got the root Branch pre-loaded, otherwise
-- it's faster to use 'updateNameLookupIndexFromV2Branch'
updateNameLookupIndexFromV1Branch :: Branch Transaction -> Sqlite.Transaction ()
updateNameLookupIndexFromV1Branch root = do
-- This version can be used if you've already got the root Branch pre-loaded, otherwise
-- it's faster to use 'initializeNameLookupIndexFromV2Root'
initializeNameLookupIndexFromV1Branch :: Branch Transaction -> Sqlite.Transaction ()
initializeNameLookupIndexFromV1Branch root = do
Q.dropNameLookupTables
saveRootNamesIndexV1 (V1Branch.toNames . Branch.head $ root)
where
saveRootNamesIndexV1 :: Names -> Transaction ()
@ -655,7 +700,7 @@ updateNameLookupIndexFromV1Branch root = do
<&> ( \(name, ref) ->
S.NamedRef {reversedSegments = nameSegments name, ref = Cv.reference1to2 ref}
)
Ops.rebuildNameIndex termNames typeNames
Ops.updateNameIndex (termNames, []) (typeNames, [])
where
nameSegments :: Name -> NonEmpty Text
nameSegments = coerce @(NonEmpty NameSegment) @(NonEmpty Text) . Name.reverseSegments
@ -664,13 +709,16 @@ updateNameLookupIndexFromV1Branch root = do
Referent.Ref {} -> (Cv.referent1to2 referent, Nothing)
Referent.Con _ref ct -> (Cv.referent1to2 referent, Just (Cv.constructorType1to2 ct))
-- | Update the root namespace names index which is used by the share server for serving api
-- requests.
-- | Compute the root namespace names index which is used by the share server for serving api
-- requests. Using 'updateNameLookupIndex' is preferred whenever possible, since it's
-- considerably faster. This can be used to reset the index if it ever gets out of sync due to
-- a bug.
--
-- This version should be used if you don't already have the root Branch pre-loaded,
-- If you do, use 'updateNameLookupIndexFromV2Branch' instead.
updateNameLookupIndexFromV2Root :: (C.Reference.Reference -> Sqlite.Transaction CT.ConstructorType) -> Sqlite.Transaction ()
updateNameLookupIndexFromV2Root getDeclType = do
-- If you do, use 'initializeNameLookupIndexFromV1Branch' instead.
initializeNameLookupIndexFromV2Root :: (C.Reference.Reference -> Sqlite.Transaction CT.ConstructorType) -> Sqlite.Transaction ()
initializeNameLookupIndexFromV2Root getDeclType = do
Q.dropNameLookupTables
rootHash <- Ops.expectRootCausalHash
causalBranch <- Ops.expectCausalBranchByCausalHash rootHash
(termNameMap, typeNameMap) <- nameMapsFromV2Branch [] causalBranch
@ -683,7 +731,7 @@ updateNameLookupIndexFromV2Root getDeclType = do
(name, refs) <- Map.toList typeNameMap
ref <- Set.toList refs
pure $ S.NamedRef {S.reversedSegments = coerce name, S.ref = ref}
Ops.rebuildNameIndex termNameList typeNameList
Ops.updateNameIndex (termNameList, []) (typeNameList, [])
where
addReferentCT :: C.Referent.Referent -> Transaction (C.Referent.Referent, Maybe C.Referent.ConstructorType)
addReferentCT referent = case referent of

View File

@ -14,6 +14,7 @@ module Unison.Codebase.Type
where
import qualified U.Codebase.Branch as V2
import U.Codebase.HashTags (BranchHash)
import qualified U.Codebase.Reference as V2
import qualified U.Codebase.Reflog as Reflog
import qualified U.Codebase.Sqlite.Queries as Queries
@ -72,6 +73,7 @@ data Codebase m v a = Codebase
-- implementation may choose to delay the put until all of the term's (and its type's) references are stored as
-- well.
putTerm :: Reference.Id -> Term v a -> Type v a -> m (),
putTermComponent :: Hash -> [(Term v a, Type v a)] -> Sqlite.Transaction (),
-- | Enqueue the put of a type declaration into the codebase, if it doesn't already exist. The implementation may
-- choose to delay the put until all of the type declaration's references are stored as well.
putTypeDeclaration :: Reference.Id -> Decl v a -> m (),
@ -170,9 +172,21 @@ data Codebase m v a = Codebase
-- NOTE: this method requires an up-to-date name lookup index, which is
-- currently not kept up-to-date automatically (because it's slow to do so).
namesAtPath :: Path -> m ScopedNames,
-- Updates the root namespace names index.
-- Updates the root namespace names index from an old BranchHash to a new one.
-- This isn't run automatically because it can be a bit slow.
updateNameLookup :: m (),
updateNameLookup ::
-- Path to the root of the _changes_.
-- E.g. if you know that all the changes occur at "base.List", you can pass "base.List"
-- here, and pass the old and new branch hashes for the branch as "base.List".
-- This allows us to avoid searching for changes in areas where it's impossible for it
-- to have occurred.
Path ->
-- The branch hash at 'Path' which the existing index was built from.
-- Pass 'Nothing' to build the index from scratch (i.e. compute a diff from an empty branch).
Maybe BranchHash ->
-- The new branch
BranchHash ->
m (),
-- | Acquire a new connection to the same underlying database file this codebase object connects to.
withConnection :: forall x. (Sqlite.Connection -> m x) -> m x,
-- | Acquire a new connection to the same underlying database file this codebase object connects to.

View File

@ -185,9 +185,8 @@ synthesizeFile ambient tl fqnsByShortName uf term = do
in -- use tlcsFromTypechecker to inform annotation-stripping decisions
traverse (traverse strippedTopLevelBinding) tlcsFromTypechecker
let doTdnr = applyTdnrDecisions infos
doTdnrInComponent (v, t, tp) = (\t -> (v, t, tp)) <$> doTdnr t
_ <- doTdnr tdnrTerm
tdnredTlcs <- (traverse . traverse) doTdnrInComponent topLevelComponents
let doTdnrInComponent (v, t, tp) = (v, doTdnr t, tp)
let tdnredTlcs = (fmap . fmap) doTdnrInComponent topLevelComponents
let (watches', terms') = partition isWatch tdnredTlcs
isWatch = all (\(v, _, _) -> Set.member v watchedVars)
watchedVars = Set.fromList [v | (v, _) <- UF.allWatches uf]
@ -208,19 +207,15 @@ synthesizeFile ambient tl fqnsByShortName uf term = do
applyTdnrDecisions ::
[Context.InfoNote v Ann] ->
Term v ->
Result' v (Term v)
applyTdnrDecisions infos tdnrTerm = foldM go tdnrTerm decisions
Term v
applyTdnrDecisions infos tdnrTerm = ABT.visitPure resolve tdnrTerm
where
-- UF data/effect ctors + builtins + TLC Term.vars
go term _decision@(shortv, loc, replacement) =
ABT.visit (resolve shortv loc replacement) term
decisions =
[(v, loc, replacement) | Context.Decision v loc replacement <- infos]
decisions = Map.fromList [((Var.nameStr v, loc), replacement) | Context.Decision v loc replacement <- infos]
-- resolve (v,loc) in a matching Blank to whatever `fqn` maps to in `names`
resolve shortv loc replacement t = case t of
resolve t = case t of
Term.Blank' (Blank.Recorded (Blank.Resolve loc' name))
| loc' == loc && Var.nameStr shortv == name ->
-- loc of replacement already chosen correctly by whatever made the
-- Decision
pure . pure $ replacement
| Just replacement <- Map.lookup (name, loc') decisions ->
-- loc of replacement already chosen correctly by whatever made the
-- Decision
Just $ replacement
_ -> Nothing

View File

@ -0,0 +1,47 @@
{-# LANGUAGE ConstraintKinds #-}
module Unison.PrettyPrintEnv.MonadPretty where
import Control.Lens (over, set, view, views, _1, _2)
import Control.Monad.Reader (MonadReader, Reader, local, runReader)
import qualified Data.Set as Set
import Unison.Prelude (Set)
import Unison.PrettyPrintEnv (PrettyPrintEnv)
import Unison.Var (Var)
type MonadPretty v m = (Var v, MonadReader (PrettyPrintEnv, Set v) m)
getPPE :: MonadPretty v m => m PrettyPrintEnv
getPPE = view _1
-- | Run a computation with a modified PrettyPrintEnv, restoring the original
withPPE :: MonadPretty v m => PrettyPrintEnv -> m a -> m a
withPPE p = local (set _1 p)
applyPPE :: MonadPretty v m => (PrettyPrintEnv -> a) -> m a
applyPPE = views _1
applyPPE2 :: MonadPretty v m => (PrettyPrintEnv -> a -> b) -> a -> m b
applyPPE2 f a = views _1 (`f` a)
applyPPE3 :: MonadPretty v m => (PrettyPrintEnv -> a -> b -> c) -> a -> b -> m c
applyPPE3 f a b = views _1 (\ppe -> f ppe a b)
-- | Run a computation with a modified PrettyPrintEnv, restoring the original
modifyPPE :: MonadPretty v m => (PrettyPrintEnv -> PrettyPrintEnv) -> m a -> m a
modifyPPE = local . over _1
modifyTypeVars :: MonadPretty v m => (Set v -> Set v) -> m a -> m a
modifyTypeVars = local . over _2
-- | Add type variables to the set of variables that need to be avoided
addTypeVars :: MonadPretty v m => [v] -> m a -> m a
addTypeVars = modifyTypeVars . Set.union . Set.fromList
-- | Check if a list of type variables contains any variables that need to be
-- avoided
willCapture :: MonadPretty v m => [v] -> m Bool
willCapture vs = views _2 (not . Set.null . Set.intersection (Set.fromList vs))
runPretty :: Var v => PrettyPrintEnv -> Reader (PrettyPrintEnv, Set v) a -> a
runPretty ppe m = runReader m (ppe, mempty)

View File

@ -6,10 +6,11 @@ import Data.Text
import GHC.Stack
import Unison.Runtime.Stack
import Unison.Util.Pretty as P
import Unison.Reference (Reference)
data RuntimeExn
= PE CallStack (P.Pretty P.ColorText)
| BU Text Closure
| BU [(Reference,Int)] Text Closure
deriving (Show)
instance Exception RuntimeExn

View File

@ -92,6 +92,7 @@ import Unison.Runtime.Pattern
import Unison.Runtime.Serialize as SER
import Unison.Runtime.Stack
import Unison.Symbol (Symbol)
import Unison.Syntax.NamePrinter (prettyHashQualified)
import Unison.Syntax.TermPrinter
import qualified Unison.Term as Tm
import Unison.Util.EnumContainers as EC
@ -361,7 +362,7 @@ evalInContext ppe ctx activeThreads w = do
decom = decompile (backReferenceTm crs (decompTm ctx))
prettyError (PE _ p) = p
prettyError (BU nm c) = either id (bugMsg ppe nm) $ decom c
prettyError (BU tr nm c) = either id (bugMsg ppe tr nm) $ decom c
tr tx c = case decom c of
Right dv -> do
@ -394,13 +395,18 @@ executeMainComb init cc = do
Right () -> pure (Right ())
where
formatErr (PE _ msg) = pure msg
formatErr (BU nm c) = do
formatErr (BU tr nm c) = do
crs <- readTVarIO (combRefs cc)
let decom = decompile (backReferenceTm crs (decompTm $ cacheContext cc))
pure . either id (bugMsg PPE.empty nm) $ decom c
pure . either id (bugMsg PPE.empty tr nm) $ decom c
bugMsg :: PrettyPrintEnv -> Text -> Term Symbol -> Pretty ColorText
bugMsg ppe name tm
bugMsg
:: PrettyPrintEnv
-> [(Reference, Int)]
-> Text
-> Term Symbol
-> Pretty ColorText
bugMsg ppe tr name tm
| name == "blank expression" =
P.callout icon . P.lines $
[ P.wrap
@ -409,8 +415,8 @@ bugMsg ppe name tm
),
"",
P.indentN 2 $ pretty ppe tm,
"",
sorryMsg
"\n",
stackTrace ppe tr
]
| "pattern match failure" `isPrefixOf` name =
P.callout icon . P.lines $
@ -423,13 +429,16 @@ bugMsg ppe name tm
"",
"This happens when calling a function that doesn't handle all \
\possible inputs",
sorryMsg
"\n",
stackTrace ppe tr
]
| name == "builtin.raise" =
P.callout icon . P.lines $
[ P.wrap ("The program halted with an unhandled exception:"),
"",
P.indentN 2 $ pretty ppe tm
P.indentN 2 $ pretty ppe tm,
"\n",
stackTrace ppe tr
]
| name == "builtin.bug",
RF.TupleTerm' [Tm.Text' msg, x] <- tm,
@ -444,9 +453,10 @@ bugMsg ppe name tm
"",
"This happens when calling a function that doesn't handle all \
\possible inputs",
sorryMsg
"\n",
stackTrace ppe tr
]
bugMsg ppe name tm =
bugMsg ppe tr name tm =
P.callout icon . P.lines $
[ P.wrap
( "I've encountered a call to" <> P.red (P.text name)
@ -454,18 +464,26 @@ bugMsg ppe name tm =
),
"",
P.indentN 2 $ pretty ppe tm,
"",
sorryMsg
"\n",
stackTrace ppe tr
]
where
icon, sorryMsg :: Pretty ColorText
stackTrace :: PrettyPrintEnv -> [(Reference, Int)] -> Pretty ColorText
stackTrace ppe tr = "Stack trace:\n" <> P.indentN 2 (P.lines $ f <$> tr)
where
f (rf, n) = name <> count
where
count
| n > 1 = " (" <> fromString (show n) <> " copies)"
| otherwise = ""
name =
syntaxToColor .
prettyHashQualified .
PPE.termName ppe .
RF.Ref $ rf
icon :: Pretty ColorText
icon = "💔💥"
sorryMsg =
P.wrap $
"I'm sorry this message doesn't have more detail about"
<> "the location of the failure."
<> "My makers plan to fix this in a future release. 😢"
catchInternalErrors ::
IO (Either Error a) ->
@ -536,7 +554,7 @@ tryM :: IO () -> IO (Maybe Error)
tryM = fmap (either (Just . extract) (const Nothing)) . try
where
extract (PE _ e) = e
extract (BU _ _) = "impossible"
extract (BU _ _ _) = "impossible"
runStandalone :: StoredCache -> Word64 -> IO (Either (Pretty ColorText) ())
runStandalone sc init =

View File

@ -28,6 +28,7 @@ module Unison.Runtime.MCode
emitComb,
emptyRNs,
argsToLists,
combRef,
combDeps,
combTypes,
prettyCombs,
@ -543,6 +544,9 @@ data CombIx
!Word64 -- section
deriving (Eq, Ord, Show)
combRef :: CombIx -> Reference
combRef (CIx r _ _) = r
data RefNums = RN
{ dnum :: Reference -> Word64,
cnum :: Reference -> Word64

View File

@ -152,7 +152,7 @@ eval0 !env !activeThreads !co = do
bstk <- alloc
(denv, k) <-
topDEnv <$> readTVarIO (refTy env) <*> readTVarIO (refTm env)
eval env denv activeThreads ustk bstk (k KE) co
eval env denv activeThreads ustk bstk (k KE) dummyRef co
topDEnv ::
M.Map Reference Word64 ->
@ -241,31 +241,32 @@ exec ::
Stack 'UN ->
Stack 'BX ->
K ->
Reference ->
Instr ->
IO (DEnv, Stack 'UN, Stack 'BX, K)
exec !_ !denv !_activeThreads !ustk !bstk !k (Info tx) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (Info tx) = do
info tx ustk
info tx bstk
info tx k
pure (denv, ustk, bstk, k)
exec !env !denv !_activeThreads !ustk !bstk !k (Name r args) = do
exec !env !denv !_activeThreads !ustk !bstk !k _ (Name r args) = do
bstk <- name ustk bstk args =<< resolve env denv bstk r
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (SetDyn p i) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (SetDyn p i) = do
clo <- peekOff bstk i
pure (EC.mapInsert p clo denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (Capture p) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (Capture p) = do
(cap, denv, ustk, bstk, k) <- splitCont denv ustk bstk k p
bstk <- bump bstk
poke bstk cap
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (UPrim1 op i) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (UPrim1 op i) = do
ustk <- uprim1 ustk op i
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (UPrim2 op i j) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (UPrim2 op i j) = do
ustk <- uprim2 ustk op i j
pure (denv, ustk, bstk, k)
exec !env !denv !_activeThreads !ustk !bstk !k (BPrim1 MISS i)
exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim1 MISS i)
| sandboxed env = die "attempted to use sandboxed operation: isMissing"
| otherwise = do
clink <- peekOff bstk i
@ -274,7 +275,7 @@ exec !env !denv !_activeThreads !ustk !bstk !k (BPrim1 MISS i)
ustk <- bump ustk
if (link `M.member` m) then poke ustk 1 else poke ustk 0
pure (denv, ustk, bstk, k)
exec !env !denv !_activeThreads !ustk !bstk !k (BPrim1 CACH i)
exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim1 CACH i)
| sandboxed env = die "attempted to use sandboxed operation: cache"
| otherwise = do
arg <- peekOffS bstk i
@ -285,7 +286,7 @@ exec !env !denv !_activeThreads !ustk !bstk !k (BPrim1 CACH i)
bstk
(Sq.fromList $ Foreign . Wrap Rf.termLinkRef . Ref <$> unknown)
pure (denv, ustk, bstk, k)
exec !env !denv !_activeThreads !ustk !bstk !k (BPrim1 CVLD i)
exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim1 CVLD i)
| sandboxed env = die "attempted to use sandboxed operation: validate"
| otherwise = do
arg <- peekOffS bstk i
@ -303,7 +304,7 @@ exec !env !denv !_activeThreads !ustk !bstk !k (BPrim1 CVLD i)
pokeOffBi bstk 1 msg
pokeOff bstk 2 clo
pure (denv, ustk, bstk, k)
exec !env !denv !_activeThreads !ustk !bstk !k (BPrim1 LKUP i)
exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim1 LKUP i)
| sandboxed env = die "attempted to use sandboxed operation: lookup"
| otherwise = do
clink <- peekOff bstk i
@ -323,14 +324,14 @@ exec !env !denv !_activeThreads !ustk !bstk !k (BPrim1 LKUP i)
bstk <- bump bstk
bstk <$ pokeBi bstk sg
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (BPrim1 TLTT i) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (BPrim1 TLTT i) = do
clink <- peekOff bstk i
let Ref link = unwrapForeign $ marshalToForeign clink
let sh = Util.Text.fromText . SH.toText $ toShortHash link
bstk <- bump bstk
pokeBi bstk sh
pure (denv, ustk, bstk, k)
exec !env !denv !_activeThreads !ustk !bstk !k (BPrim1 LOAD i)
exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim1 LOAD i)
| sandboxed env = die "attempted to use sandboxed operation: load"
| otherwise = do
v <- peekOffBi bstk i
@ -345,16 +346,16 @@ exec !env !denv !_activeThreads !ustk !bstk !k (BPrim1 LOAD i)
poke ustk 1
poke bstk x
pure (denv, ustk, bstk, k)
exec !env !denv !_activeThreads !ustk !bstk !k (BPrim1 VALU i) = do
exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim1 VALU i) = do
m <- readTVarIO (tagRefs env)
c <- peekOff bstk i
bstk <- bump bstk
pokeBi bstk =<< reflectValue m c
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (BPrim1 op i) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (BPrim1 op i) = do
(ustk, bstk) <- bprim1 ustk bstk op i
pure (denv, ustk, bstk, k)
exec !env !denv !_activeThreads !ustk !bstk !k (BPrim2 SDBX i j) = do
exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim2 SDBX i j) = do
s <- peekOffS bstk i
c <- peekOff bstk j
l <- decodeSandboxArgument s
@ -362,92 +363,96 @@ exec !env !denv !_activeThreads !ustk !bstk !k (BPrim2 SDBX i j) = do
ustk <- bump ustk
poke ustk $ if b then 1 else 0
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (BPrim2 EQLU i j) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (BPrim2 EQLU i j) = do
x <- peekOff bstk i
y <- peekOff bstk j
ustk <- bump ustk
poke ustk $ if universalEq (==) x y then 1 else 0
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (BPrim2 CMPU i j) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (BPrim2 CMPU i j) = do
x <- peekOff bstk i
y <- peekOff bstk j
ustk <- bump ustk
poke ustk . fromEnum $ universalCompare compare x y
pure (denv, ustk, bstk, k)
exec !env !denv !_activeThreads !ustk !bstk !k (BPrim2 TRCE i j)
exec !_ !_ !_activeThreads !_ !bstk !k r (BPrim2 THRO i j) = do
name <- peekOffBi @Util.Text.Text bstk i
x <- peekOff bstk j
throwIO (BU (traceK r k) (Util.Text.toText name) x)
exec !env !denv !_activeThreads !ustk !bstk !k _ (BPrim2 TRCE i j)
| sandboxed env = die "attempted to use sandboxed operation: trace"
| otherwise = do
tx <- peekOffBi bstk i
clo <- peekOff bstk j
tracer env tx clo
pure (denv, ustk, bstk, k)
exec !_ !denv !_trackThreads !ustk !bstk !k (BPrim2 op i j) = do
exec !_ !denv !_trackThreads !ustk !bstk !k _ (BPrim2 op i j) = do
(ustk, bstk) <- bprim2 ustk bstk op i j
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (Pack r t args) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (Pack r t args) = do
clo <- buildData ustk bstk r t args
bstk <- bump bstk
poke bstk clo
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (Unpack r i) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (Unpack r i) = do
(ustk, bstk) <- dumpData r ustk bstk =<< peekOff bstk i
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (Print i) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (Print i) = do
t <- peekOffBi bstk i
Tx.putStrLn (Util.Text.toText t)
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (Lit (MI n)) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (Lit (MI n)) = do
ustk <- bump ustk
poke ustk n
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (Lit (MD d)) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (Lit (MD d)) = do
ustk <- bump ustk
pokeD ustk d
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (Lit (MT t)) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (Lit (MT t)) = do
bstk <- bump bstk
poke bstk (Foreign (Wrap Rf.textRef t))
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (Lit (MM r)) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (Lit (MM r)) = do
bstk <- bump bstk
poke bstk (Foreign (Wrap Rf.termLinkRef r))
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (Lit (MY r)) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (Lit (MY r)) = do
bstk <- bump bstk
poke bstk (Foreign (Wrap Rf.typeLinkRef r))
pure (denv, ustk, bstk, k)
exec !_ !denv !_activeThreads !ustk !bstk !k (Reset ps) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (Reset ps) = do
(ustk, ua) <- saveArgs ustk
(bstk, ba) <- saveArgs bstk
pure (denv, ustk, bstk, Mark ua ba ps clos k)
where
clos = EC.restrictKeys denv ps
exec !_ !denv !_activeThreads !ustk !bstk !k (Seq as) = do
exec !_ !denv !_activeThreads !ustk !bstk !k _ (Seq as) = do
l <- closureArgs bstk as
bstk <- bump bstk
pokeS bstk $ Sq.fromList l
pure (denv, ustk, bstk, k)
exec !env !denv !_activeThreads !ustk !bstk !k (ForeignCall _ w args)
exec !env !denv !_activeThreads !ustk !bstk !k _ (ForeignCall _ w args)
| Just (FF arg res ev) <- EC.lookup w (foreignFuncs env) =
uncurry (denv,,,k)
<$> (arg ustk bstk args >>= ev >>= res ustk bstk)
| otherwise =
die $ "reference to unknown foreign function: " ++ show w
exec !env !denv !activeThreads !ustk !bstk !k (Fork i)
exec !env !denv !activeThreads !ustk !bstk !k _ (Fork i)
| sandboxed env = die "attempted to use sandboxed operation: fork"
| otherwise = do
tid <- forkEval env activeThreads =<< peekOff bstk i
bstk <- bump bstk
poke bstk . Foreign . Wrap Rf.threadIdRef $ tid
pure (denv, ustk, bstk, k)
exec !env !denv !activeThreads !ustk !bstk !k (Atomically i)
exec !env !denv !activeThreads !ustk !bstk !k _ (Atomically i)
| sandboxed env = die $ "attempted to use sandboxed operation: atomically"
| otherwise = do
c <- peekOff bstk i
bstk <- bump bstk
atomicEval env activeThreads (poke bstk) c
pure (denv, ustk, bstk, k)
exec !env !denv !activeThreads !ustk !bstk !k (TryForce i)
exec !env !denv !activeThreads !ustk !bstk !k _ (TryForce i)
| sandboxed env = die $ "attempted to use sandboxed operation: tryForce"
| otherwise = do
c <- peekOff bstk i
@ -478,7 +483,7 @@ encodeExn ustk bstk (Left exn) = do
| Just re <- fromException exn = case re of
PE _stk msg ->
(Rf.runtimeFailureRef, Util.Text.pack $ toPlainUnbroken msg, unitValue)
BU tx cl -> (Rf.runtimeFailureRef, Util.Text.fromText tx, cl)
BU _ tx cl -> (Rf.runtimeFailureRef, Util.Text.fromText tx, cl)
| Just (ae :: ArithException) <- fromException exn =
(Rf.arithmeticFailureRef, disp ae, unitValue)
| Just (nae :: NestedAtomically) <- fromException exn =
@ -496,15 +501,16 @@ eval ::
Stack 'UN ->
Stack 'BX ->
K ->
Reference ->
Section ->
IO ()
eval !env !denv !activeThreads !ustk !bstk !k (Match i (TestT df cs)) = do
eval !env !denv !activeThreads !ustk !bstk !k r (Match i (TestT df cs)) = do
t <- peekOffBi bstk i
eval env denv activeThreads ustk bstk k $ selectTextBranch t df cs
eval !env !denv !activeThreads !ustk !bstk !k (Match i br) = do
eval env denv activeThreads ustk bstk k r $ selectTextBranch t df cs
eval !env !denv !activeThreads !ustk !bstk !k r (Match i br) = do
n <- peekOffN ustk i
eval env denv activeThreads ustk bstk k $ selectBranch n br
eval !env !denv !activeThreads !ustk !bstk !k (Yield args)
eval env denv activeThreads ustk bstk k r $ selectBranch n br
eval !env !denv !activeThreads !ustk !bstk !k _ (Yield args)
| asize ustk + asize bstk > 0,
BArg1 i <- args =
peekOff bstk i >>= apply env denv activeThreads ustk bstk k False ZArgs
@ -513,23 +519,23 @@ eval !env !denv !activeThreads !ustk !bstk !k (Yield args)
ustk <- frameArgs ustk
bstk <- frameArgs bstk
yield env denv activeThreads ustk bstk k
eval !env !denv !activeThreads !ustk !bstk !k (App ck r args) =
eval !env !denv !activeThreads !ustk !bstk !k _ (App ck r args) =
resolve env denv bstk r
>>= apply env denv activeThreads ustk bstk k ck args
eval !env !denv !activeThreads !ustk !bstk !k (Call ck n args) =
eval !env !denv !activeThreads !ustk !bstk !k _ (Call ck n args) =
combSection env (CIx dummyRef n 0)
>>= enter env denv activeThreads ustk bstk k ck args
eval !env !denv !activeThreads !ustk !bstk !k (Jump i args) =
eval !env !denv !activeThreads !ustk !bstk !k _ (Jump i args) =
peekOff bstk i >>= jump env denv activeThreads ustk bstk k args
eval !env !denv !activeThreads !ustk !bstk !k (Let nw cix) = do
eval !env !denv !activeThreads !ustk !bstk !k r (Let nw cix) = do
(ustk, ufsz, uasz) <- saveFrame ustk
(bstk, bfsz, basz) <- saveFrame bstk
eval env denv activeThreads ustk bstk (Push ufsz bfsz uasz basz cix k) nw
eval !env !denv !activeThreads !ustk !bstk !k (Ins i nx) = do
(denv, ustk, bstk, k) <- exec env denv activeThreads ustk bstk k i
eval env denv activeThreads ustk bstk k nx
eval !_ !_ !_ !_activeThreads !_ !_ Exit = pure ()
eval !_ !_ !_ !_activeThreads !_ !_ (Die s) = die s
eval env denv activeThreads ustk bstk (Push ufsz bfsz uasz basz cix k) r nw
eval !env !denv !activeThreads !ustk !bstk !k r (Ins i nx) = do
(denv, ustk, bstk, k) <- exec env denv activeThreads ustk bstk k r i
eval env denv activeThreads ustk bstk k r nx
eval !_ !_ !_ !_activeThreads !_ !_ _ Exit = pure ()
eval !_ !_ !_ !_activeThreads !_ !_ _ (Die s) = die s
{-# NOINLINE eval #-}
forkEval :: CCache -> ActiveThreads -> Closure -> IO ThreadId
@ -587,7 +593,9 @@ enter !env !denv !activeThreads !ustk !bstk !k !ck !args !comb = do
(ustk, bstk) <- moveArgs ustk bstk args
ustk <- acceptArgs ustk ua
bstk <- acceptArgs bstk ba
eval env denv activeThreads ustk bstk k entry
-- TODO: start putting references in `Call` if we ever start
-- detecting saturated calls.
eval env denv activeThreads ustk bstk k dummyRef entry
where
Lam ua ba uf bf entry = comb
{-# INLINE enter #-}
@ -626,7 +634,7 @@ apply !env !denv !activeThreads !ustk !bstk !k !ck !args (PAp comb useg bseg) =
bstk <- dumpSeg bstk bseg A
ustk <- acceptArgs ustk ua
bstk <- acceptArgs bstk ba
eval env denv activeThreads ustk bstk k entry
eval env denv activeThreads ustk bstk k (combRef comb) entry
| otherwise -> do
(useg, bseg) <- closeArgs C ustk bstk useg bseg args
ustk <- discardFrame =<< frameArgs ustk
@ -1594,10 +1602,7 @@ bprim2 !ustk !bstk CATB i j = do
bstk <- bump bstk
pokeBi bstk (l <> r :: By.Bytes)
pure (ustk, bstk)
bprim2 !_ !bstk THRO i j = do
name <- peekOffBi @Util.Text.Text bstk i
x <- peekOff bstk j
throwIO (BU (Util.Text.toText name) x)
bprim2 !ustk !bstk THRO _ _ = pure (ustk, bstk) -- impossible
bprim2 !ustk !bstk TRCE _ _ = pure (ustk, bstk) -- impossible
bprim2 !ustk !bstk CMPU _ _ = pure (ustk, bstk) -- impossible
bprim2 !ustk !bstk SDBX _ _ = pure (ustk, bstk) -- impossible
@ -1626,7 +1631,7 @@ yield !env !denv !activeThreads !ustk !bstk !k = leap denv k
bstk <- restoreFrame bstk bfsz basz
ustk <- ensure ustk uf
bstk <- ensure bstk bf
eval env denv activeThreads ustk bstk k nx
eval env denv activeThreads ustk bstk k (combRef cix) nx
leap _ (CB (Hook f)) = f ustk bstk
leap _ KE = pure ()
{-# INLINE yield #-}
@ -1713,7 +1718,7 @@ resolve env _ _ (Env n i) =
resolve _ _ bstk (Stk i) = peekOff bstk i
resolve _ denv _ (Dyn i) = case EC.lookup i denv of
Just clo -> pure clo
_ -> die $ "resolve: looked up bad dynamic: " ++ show i
_ -> die $ "resolve: unhandled ability request: " ++ show i
combSection :: HasCallStack => CCache -> CombIx -> IO Comb
combSection env (CIx _ n i) =

View File

@ -17,6 +17,7 @@ module Unison.Runtime.Stack
Off,
SZ,
FP,
traceK,
frameDataSize,
marshalToForeign,
unull,
@ -109,6 +110,14 @@ data Closure
| BlackHole
deriving (Show, Eq, Ord)
traceK :: Reference -> K -> [(Reference, Int)]
traceK begin = dedup (begin, 1) where
dedup p (Mark _ _ _ _ k) = dedup p k
dedup p@(cur,n) (Push _ _ _ _ (CIx r _ _) k)
| cur == r = dedup (cur,1+n) k
| otherwise = p : dedup (r,1) k
dedup p _ = [p]
splitData :: Closure -> Maybe (Reference, Word64, [Int], [Closure])
splitData (Enum r t) = Just (r, t, [], [])
splitData (DataU1 r t i) = Just (r, t, [i], [])

View File

@ -22,6 +22,7 @@ import Unison.Reference (Reference (DerivedId))
import qualified Unison.Referent as Referent
import qualified Unison.Result as Result
import Unison.Syntax.NamePrinter (styleHashQualified'')
import Unison.Syntax.TypePrinter (runPretty)
import qualified Unison.Syntax.TypePrinter as TypePrinter
import qualified Unison.Term as Term
import qualified Unison.Type as Type
@ -73,9 +74,9 @@ prettyGADT env ctorType r name dd =
where
constructor (n, (_, _, t)) =
prettyPattern env ctorType name (ConstructorReference r n)
<> (fmt S.TypeAscriptionColon " :")
`P.hang` TypePrinter.pretty0 env Map.empty (-1) t
header = prettyEffectHeader name (DD.EffectDeclaration dd) <> (fmt S.ControlKeyword " where")
<> fmt S.TypeAscriptionColon " :"
`P.hang` TypePrinter.prettySyntax env t
header = prettyEffectHeader name (DD.EffectDeclaration dd) <> fmt S.ControlKeyword " where"
prettyPattern ::
PrettyPrintEnv ->
@ -86,7 +87,7 @@ prettyPattern ::
prettyPattern env ctorType namespace ref =
styleHashQualified''
(fmt (S.TermReference conRef))
( HQ.stripNamespace (fromMaybe "" $ Name.toText <$> HQ.toName namespace) $
( HQ.stripNamespace (maybe "" Name.toText (HQ.toName namespace)) $
PPE.termName env conRef
)
where
@ -106,26 +107,26 @@ prettyDataDecl (PrettyPrintEnvDecl unsuffixifiedPPE suffixifiedPPE) r name dd =
[0 ..]
(DD.constructors' dd)
where
constructor (n, (_, _, (Type.ForallsNamed' _ t))) = constructor' n t
constructor (n, (_, _, Type.ForallsNamed' _ t)) = constructor' n t
constructor (n, (_, _, t)) = constructor' n t
constructor' n t = case Type.unArrows t of
Nothing -> prettyPattern suffixifiedPPE CT.Data name (ConstructorReference r n)
Just ts -> case fieldNames unsuffixifiedPPE r name dd of
Nothing ->
P.group . P.hang' (prettyPattern suffixifiedPPE CT.Data name (ConstructorReference r n)) " " $
P.spaced (TypePrinter.prettyRaw suffixifiedPPE Map.empty 10 <$> init ts)
P.spaced (runPretty suffixifiedPPE (traverse (TypePrinter.prettyRaw Map.empty 10) (init ts)))
Just fs ->
P.group $
(fmt S.DelimiterChar "{ ")
fmt S.DelimiterChar "{ "
<> P.sep
((fmt S.DelimiterChar ",") <> " " `P.orElse` "\n ")
(fmt S.DelimiterChar "," <> " " `P.orElse` "\n ")
(field <$> zip fs (init ts))
<> (fmt S.DelimiterChar " }")
<> fmt S.DelimiterChar " }"
field (fname, typ) =
P.group $
styleHashQualified'' (fmt (S.TypeReference r)) fname
<> (fmt S.TypeAscriptionColon " :") `P.hang` TypePrinter.prettyRaw suffixifiedPPE Map.empty (-1) typ
header = prettyDataHeader name dd <> (fmt S.DelimiterChar (" = " `P.orElse` "\n = "))
<> fmt S.TypeAscriptionColon " :" `P.hang` runPretty suffixifiedPPE (TypePrinter.prettyRaw Map.empty (-1) typ)
header = prettyDataHeader name dd <> fmt S.DelimiterChar (" = " `P.orElse` "\n = ")
-- Comes up with field names for a data declaration which has the form of a
-- record, like `type Pt = { x : Int, y : Int }`. Works by generating the

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,6 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Unison.Syntax.TypePrinter
( pretty,
@ -12,6 +13,7 @@ module Unison.Syntax.TypePrinter
prettySignaturesCTCollapsed,
prettySignaturesAlt,
prettySignaturesAlt',
runPretty,
)
where
@ -23,6 +25,7 @@ import Unison.Prelude
import Unison.PrettyPrintEnv (PrettyPrintEnv)
import qualified Unison.PrettyPrintEnv as PrettyPrintEnv
import Unison.PrettyPrintEnv.FQN (Imports, elideFQN)
import Unison.PrettyPrintEnv.MonadPretty (MonadPretty, getPPE, runPretty, willCapture)
import Unison.Reference (Reference, pattern Builtin)
import Unison.Referent (Referent)
import Unison.Syntax.NamePrinter (styleHashQualified'')
@ -36,17 +39,17 @@ import qualified Unison.Var as Var
type SyntaxText = S.SyntaxText' Reference
pretty :: forall v a. (Var v) => PrettyPrintEnv -> Type v a -> Pretty ColorText
pretty ppe = PP.syntaxToColor . prettySyntax ppe
pretty :: Var v => PrettyPrintEnv -> Type v a -> Pretty ColorText
pretty ppe t = PP.syntaxToColor $ prettySyntax ppe t
prettySyntax :: forall v a. (Var v) => PrettyPrintEnv -> Type v a -> Pretty SyntaxText
prettySyntax ppe = pretty0 ppe mempty (-1)
prettySyntax :: Var v => PrettyPrintEnv -> Type v a -> Pretty SyntaxText
prettySyntax ppe = runPretty ppe . pretty0 Map.empty (-1)
prettyStr :: Var v => Maybe Width -> PrettyPrintEnv -> Type v a -> String
prettyStr (Just width) n t =
toPlain $ PP.render width $ PP.syntaxToColor $ pretty0 n Map.empty (-1) t
prettyStr Nothing n t =
toPlain $ PP.render maxBound $ PP.syntaxToColor $ pretty0 n Map.empty (-1) t
prettyStr (Just width) ppe t =
toPlain . PP.render width . PP.syntaxToColor . runPretty ppe $ pretty0 Map.empty (-1) t
prettyStr Nothing ppe t =
toPlain . PP.render maxBound . PP.syntaxToColor . runPretty ppe $ pretty0 Map.empty (-1) t
{- Explanation of precedence handling
@ -71,90 +74,103 @@ prettyStr Nothing n t =
-}
pretty0 ::
forall v a.
(Var v) =>
PrettyPrintEnv ->
forall v a m.
MonadPretty v m =>
Imports ->
Int ->
Type v a ->
Pretty SyntaxText
pretty0 n im p tp = prettyRaw n im p (cleanup (removePureEffects tp))
m (Pretty SyntaxText)
pretty0 im p tp = prettyRaw im p (cleanup (removePureEffects tp))
prettyRaw ::
forall v a.
(Var v) =>
PrettyPrintEnv ->
forall v a m.
MonadPretty v m =>
Imports ->
Int ->
Type v a ->
Pretty SyntaxText
m (Pretty SyntaxText)
-- p is the operator precedence of the enclosing context (a number from 0 to
-- 11, or -1 to avoid outer parentheses unconditionally). Function
-- application has precedence 10.
prettyRaw n im p tp = go n im p tp
prettyRaw im p tp = go im p tp
where
go :: PrettyPrintEnv -> Imports -> Int -> Type v a -> Pretty SyntaxText
go n im p tp = case stripIntroOuters tp of
Var' v -> fmt S.Var $ PP.text (Var.name v)
DD.TupleType' xs | length xs /= 1 -> PP.parenthesizeCommas $ map (go n im 0) xs
go :: Imports -> Int -> Type v a -> m (Pretty SyntaxText)
go im p tp = case stripIntroOuters tp of
Var' v -> pure . fmt S.Var $ PP.text (Var.name v)
DD.TupleType' xs | length xs /= 1 -> PP.parenthesizeCommas <$> traverse (go im 0) xs
-- Would be nice to use a different SyntaxHighlights color if the reference is an ability.
Ref' r -> styleHashQualified'' (fmt $ S.TypeReference r) $ elideFQN im (PrettyPrintEnv.typeName n r)
Cycle' _ _ -> fromString "error: TypeParser does not currently emit Cycle"
Abs' _ -> fromString "error: TypeParser does not currently emit Abs"
Ann' _ _ -> fromString "error: TypeParser does not currently emit Ann"
App' (Ref' (Builtin "Sequence")) x ->
PP.group $ (fmt S.DelimiterChar "[") <> go n im 0 x <> (fmt S.DelimiterChar "]")
Ref' r -> do
n <- getPPE
pure $ styleHashQualified'' (fmt $ S.TypeReference r) $ elideFQN im (PrettyPrintEnv.typeName n r)
Cycle' _ _ -> pure $ fromString "bug: TypeParser does not currently emit Cycle"
Abs' _ -> pure $ fromString "bug: TypeParser does not currently emit Abs"
Ann' _ _ -> pure $ fromString "bug: TypeParser does not currently emit Ann"
App' (Ref' (Builtin "Sequence")) x -> do
x' <- go im (-1) x
pure $ PP.group $ fmt S.DelimiterChar "[" <> x' <> fmt S.DelimiterChar "]"
Apps' f xs ->
PP.parenthesizeIf (p >= 10) $
go n im 9 f
`PP.hang` PP.spaced
(go n im 10 <$> xs)
PP.parenthesizeIf (p >= 10)
<$> ( PP.hang <$> go im 9 f <*> (PP.spaced <$> traverse (go im 10) xs)
)
Effect1' e t ->
PP.parenthesizeIf (p >= 10) $ go n im 9 e <> " " <> go n im 10 t
PP.parenthesizeIf (p >= 10) <$> ((\x y -> x <> " " <> y) <$> go im 9 e <*> go im 10 t)
Effects' es -> effects (Just es)
ForallsNamed' vs' body ->
let vs = filter (\v -> Var.name v /= "()") vs'
in if p < 0 && all Var.universallyQuantifyIfFree vs
then go n im p body
else
paren (p >= 0) $
let vformatted = PP.sep " " (fmt S.Var . PP.text . Var.name <$> vs)
in (fmt S.TypeOperator "" <> vformatted <> fmt S.TypeOperator ".")
`PP.hang` go n im (-1) body
prettyForall p = do
let vformatted = PP.sep " " (fmt S.Var . PP.text . Var.name <$> vs)
PP.hang (fmt S.TypeOperator "" <> vformatted <> fmt S.TypeOperator ".") <$> go im p body
in -- if we're printing a type signature, and all the type variables
-- are universally quantified, then we can omit the `forall` keyword
-- only if the type variables are not bound in an outer scope
if p < 0 && all Var.universallyQuantifyIfFree vs
then ifM (willCapture vs) (prettyForall p) (go im p body)
else paren (p >= 0) <$> prettyForall (-1)
t@(Arrow' _ _) -> case t of
EffectfulArrows' (Ref' DD.UnitRef) rest ->
PP.parenthesizeIf (p >= 10) $ arrows True True rest
PP.parenthesizeIf (p >= 10) <$> arrows True True rest
EffectfulArrows' fst rest ->
case fst of
Var' v
| Var.name v == "()" ->
PP.parenthesizeIf (p >= 10) $ arrows True True rest
PP.parenthesizeIf (p >= 10) <$> arrows True True rest
_ ->
PP.parenthesizeIf (p >= 0) $
go n im 0 fst <> arrows False False rest
_ -> "error"
_ -> "error"
effects Nothing = mempty
effects (Just es) = PP.group $ fmt S.AbilityBraces "{" <> PP.commas (go n im 0 <$> es) <> (fmt S.AbilityBraces "}")
PP.parenthesizeIf (p >= 0)
<$> ((<>) <$> go im 0 fst <*> arrows False False rest)
_ -> pure . fromString $ "bug: unexpected Arrow form in prettyRaw: " <> show t
_ -> pure . fromString $ "bug: unexpected form in prettyRaw: " <> show tp
effects Nothing = pure mempty
effects (Just es) =
PP.group . (fmt S.AbilityBraces "{" <>) . (<> fmt S.AbilityBraces "}")
<$> (PP.commas <$> traverse (go im 0) es)
-- `first`: is this the first argument?
-- `mes`: list of effects
arrow delay first mes =
(if first then mempty else PP.softbreak <> fmt S.TypeOperator "->")
<> (if delay then (if first then fmt S.DelayForceChar "'" else fmt S.DelayForceChar " '") else mempty)
<> effects mes
<> if isJust mes || not delay && not first then " " else mempty
arrow delay first mes = do
es <- effects mes
pure $
(if first then mempty else PP.softbreak <> fmt S.TypeOperator "->")
<> (if delay then (if first then fmt S.DelayForceChar "'" else fmt S.DelayForceChar " '") else mempty)
<> es
<> if isJust mes || not delay && not first then " " else mempty
arrows delay first [(mes, Ref' DD.UnitRef)] = arrow delay first mes <> fmt S.Unit "()"
arrows delay first ((mes, Ref' DD.UnitRef) : rest) =
arrow delay first mes <> parenNoGroup delay (arrows True True rest)
arrows delay first ((mes, arg) : rest) =
arrow delay first mes
<> parenNoGroup
(delay && not (null rest))
(go n im 0 arg <> arrows False False rest)
arrows False False [] = mempty
arrows False True [] = mempty -- not reachable
arrows True _ [] = mempty -- not reachable
arrows ::
Bool ->
Bool ->
[(Maybe [Type v a], Type v a)] ->
m (Pretty SyntaxText)
arrows delay first [(mes, Ref' DD.UnitRef)] = (<> fmt S.Unit "()") <$> arrow delay first mes
arrows delay first ((mes, Ref' DD.UnitRef) : rest) = do
es <- arrow delay first mes
rest' <- arrows True True rest
pure $ es <> parenNoGroup delay rest'
arrows delay first ((mes, arg) : rest) = do
es <- arrow delay first mes
arg' <- go im 0 arg
rest' <- arrows False False rest
pure $ es <> parenNoGroup (delay && not (null rest)) (arg' <> rest')
arrows False False [] = pure mempty
arrows False True [] = pure mempty -- not reachable
arrows True _ [] = pure mempty -- not reachable
paren True s = PP.group $ fmt S.Parenthesis "(" <> s <> fmt S.Parenthesis ")"
paren False s = PP.group s
@ -170,30 +186,32 @@ prettySignaturesCT ::
PrettyPrintEnv ->
[(Referent, HashQualified Name, Type v a)] ->
[Pretty ColorText]
prettySignaturesCT env ts = map PP.syntaxToColor $ prettySignaturesST env ts
prettySignaturesCT ppe ts = map PP.syntaxToColor $ prettySignaturesST ppe ts
prettySignaturesCTCollapsed ::
Var v =>
PrettyPrintEnv ->
[(Referent, HashQualified Name, Type v a)] ->
Pretty ColorText
prettySignaturesCTCollapsed env ts =
PP.lines $
PP.group <$> prettySignaturesCT env ts
prettySignaturesCTCollapsed ppe ts =
PP.lines
. map PP.group
$ prettySignaturesCT ppe ts
prettySignaturesST ::
Var v =>
PrettyPrintEnv ->
[(Referent, HashQualified Name, Type v a)] ->
[Pretty SyntaxText]
prettySignaturesST env ts =
PP.align [(name r hq, sig typ) | (r, hq, typ) <- ts]
prettySignaturesST ppe ts =
PP.align . runPretty ppe $ traverse (\(r, hq, typ) -> (name r hq,) <$> sig typ) ts
where
name r hq =
styleHashQualified'' (fmt $ S.TermReference r) hq
sig typ =
(fmt S.TypeAscriptionColon ": " <> pretty0 env Map.empty (-1) typ)
`PP.orElse` (fmt S.TypeAscriptionColon ": " <> PP.indentNAfterNewline 2 (pretty0 env Map.empty (-1) typ))
sig typ = do
t <- pretty0 Map.empty (-1) typ
let col = fmt S.TypeAscriptionColon ": "
pure $ (col <> t) `PP.orElse` (col <> PP.indentNAfterNewline 2 t)
-- todo: provide sample output in comment; different from prettySignatures'
prettySignaturesAlt' ::
@ -201,19 +219,21 @@ prettySignaturesAlt' ::
PrettyPrintEnv ->
[([HashQualified Name], Type v a)] ->
[Pretty ColorText]
prettySignaturesAlt' env ts =
map PP.syntaxToColor $
PP.align
[ ( PP.commas . fmap (\name -> styleHashQualified'' (fmt $ S.HashQualifier name) name) $ names,
(fmt S.TypeAscriptionColon ": " <> pretty0 env Map.empty (-1) typ)
`PP.orElse` ( fmt S.TypeAscriptionColon ": "
<> PP.indentNAfterNewline 2 (pretty0 env Map.empty (-1) typ)
)
prettySignaturesAlt' ppe ts = runPretty ppe $
do
ts' <- traverse f ts
pure $ map PP.syntaxToColor $ PP.align ts'
where
f :: MonadPretty v m => ([HashQualified Name], Type v a) -> m (Pretty SyntaxText, Pretty SyntaxText)
f (names, typ) = do
typ' <- pretty0 Map.empty (-1) typ
let col = fmt S.TypeAscriptionColon ": "
pure
( PP.commas . fmap (\name -> styleHashQualified'' (fmt $ S.HashQualifier name) name) $ names,
(col <> typ') `PP.orElse` (col <> PP.indentNAfterNewline 2 typ')
)
| (names, typ) <- ts
]
-- prettySignatures'' :: Var v => PrettyPrintEnv -> [(Name, Type v a)] -> [Pretty ColorText]
-- prettySignatures'' :: Var v => [(Name, Type v a)] -> [Pretty ColorText]
-- prettySignatures'' env ts = prettySignatures' env (first HQ.fromName <$> ts)
prettySignaturesAlt ::
@ -221,6 +241,7 @@ prettySignaturesAlt ::
PrettyPrintEnv ->
[([HashQualified Name], Type v a)] ->
Pretty ColorText
prettySignaturesAlt env ts =
PP.lines $
PP.group <$> prettySignaturesAlt' env ts
prettySignaturesAlt ppe ts =
PP.lines
. map PP.group
$ prettySignaturesAlt' ppe ts

View File

@ -0,0 +1,63 @@
module Unison.Util.Pretty.MegaParsec where
import qualified Data.List.NonEmpty as NE
import Data.Proxy
import Data.Void
import qualified Text.Megaparsec as Parser
import Unison.Prelude
import qualified Unison.Util.Pretty as P
prettyPrintParseError :: String -> Parser.ParseErrorBundle Text Void -> P.Pretty P.ColorText
prettyPrintParseError input errBundle =
let (firstError, sp) = NE.head . fst $ Parser.attachSourcePos Parser.errorOffset (Parser.bundleErrors errBundle) (Parser.bundlePosState errBundle)
in case firstError of
Parser.TrivialError _errorOffset ue ee ->
P.lines
[ printLocation sp,
P.newline,
printTrivial ue ee
]
Parser.FancyError _errorOffset ee ->
let errors = foldMap (P.string . mappend "\n" . showErrorFancy) ee
in P.lines
[ printLocation sp,
errors
]
where
printLocation :: Parser.SourcePos -> P.Pretty P.ColorText
printLocation sp =
let col = (Parser.unPos $ Parser.sourceColumn sp) - 1
row = (Parser.unPos $ Parser.sourceLine sp) - 1
errorLine = lines input !! row
in P.lines
[ P.newline,
P.string errorLine,
P.string $ Prelude.replicate col ' ' <> "^-- This is where I gave up."
]
printTrivial :: (Maybe (Parser.ErrorItem Char)) -> (Set (Parser.ErrorItem Char)) -> P.Pretty P.ColorText
printTrivial ue ee =
let expected = "I expected " <> foldMap (P.singleQuoted . P.string . showErrorItem) ee
found = P.string . mappend "I found " . showErrorItem <$> ue
message = [expected] <> catMaybes [found]
in P.oxfordCommasWith "." message
showErrorFancy :: Parser.ShowErrorComponent e => Parser.ErrorFancy e -> String
showErrorFancy (Parser.ErrorFail msg) = msg
showErrorFancy (Parser.ErrorIndentation ord ref actual) =
"incorrect indentation (got " <> show (Parser.unPos actual)
<> ", should be "
<> p
<> show (Parser.unPos ref)
<> ")"
where
p = case ord of
LT -> "less than "
EQ -> "equal to "
GT -> "greater than "
showErrorFancy (Parser.ErrorCustom a) = Parser.showErrorComponent a
showErrorItem :: Parser.ErrorItem (Parser.Token Text) -> String
showErrorItem (Parser.Tokens ts) = Parser.showTokens (Proxy @Text) ts
showErrorItem (Parser.Label label) = NE.toList label
showErrorItem Parser.EndOfInput = "end of input"

View File

@ -47,11 +47,47 @@ cpattern p = CP p (run p)
run :: Pattern -> Text -> Maybe ([Text], Text)
run p =
let cp = compile p (\_ _ -> Nothing) (\acc rem -> Just (reverse acc, rem))
in \t -> cp [] t
let cp = compile p (\_ _ -> Nothing) (\acc rem -> Just (s acc, rem))
s = reverse . capturesToList . stackCaptures
in \t -> cp (Empty emptyCaptures) t
-- Pattern a -> ([a] -> a -> r) -> ... -- might need a takeable and droppable interface if go this route
compile :: Pattern -> ([Text] -> Text -> r) -> ([Text] -> Text -> r) -> [Text] -> Text -> r
-- Stack used to track captures and to support backtracking.
-- A `try` will push a `Mark` that allows the old state
-- (both the list of captures and the current remainder)
-- to be restored on failure.
data Stack = Empty !Captures | Mark !Captures !Text !Stack
-- A difference list for representing the captures of a pattern.
-- So that capture lists can be appended in O(1).
type Captures = [Text] -> [Text]
stackCaptures :: Stack -> Captures
stackCaptures (Mark cs _ _) = cs
stackCaptures (Empty cs) = cs
{-# INLINE stackCaptures #-}
pushCaptures :: Captures -> Stack -> Stack
pushCaptures c (Empty cs) = Empty (appendCaptures c cs)
pushCaptures c (Mark cs t s) = Mark (appendCaptures c cs) t s
{-# INLINE pushCaptures #-}
pushCapture :: Text -> Stack -> Stack
pushCapture txt = pushCaptures (txt :)
{-# INLINE pushCapture #-}
appendCaptures :: Captures -> Captures -> Captures
appendCaptures c1 c2 = c1 . c2
{-# INLINE appendCaptures #-}
emptyCaptures :: Captures
emptyCaptures = id
capturesToList :: Captures -> [Text]
capturesToList c = c []
type Compiled r = (Stack -> Text -> r) -> (Stack -> Text -> r) -> Stack -> Text -> r
compile :: Pattern -> Compiled r
compile !Eof !err !success = go
where
go acc t
@ -68,17 +104,17 @@ compile AnyChar !err !success = go
rem
| Text.size t > Text.size rem -> success acc rem
| otherwise -> err acc rem
compile (Capture (Many AnyChar)) !_ !success = \acc t -> success (t : acc) Text.empty
compile (Capture (Many AnyChar)) !_ !success = \acc t -> success (pushCapture t acc) Text.empty
compile (Capture c) !err !success = go
where
err' _ _ acc0 t0 = err acc0 t0
success' _ rem acc0 t0 = success (Text.take (Text.size t0 - Text.size rem) t0 : acc0) rem
success' _ rem acc0 t0 = success (pushCapture (Text.take (Text.size t0 - Text.size rem) t0) acc0) rem
compiled = compile c err' success'
go acc t = compiled acc t acc t
compile (Or p1 p2) err success = cp1
where
cp2 = compile p2 err success
cp1 = compile p1 cp2 success
cp1 = try "Or" (compile p1) cp2 success
compile (Join ps) !err !success = go ps
where
go [] = success
@ -112,6 +148,8 @@ compile (Many p) !_ !success = case p of
AnyChar -> (\acc _ -> success acc Text.empty)
CharIn cs -> walker (charInPred cs)
NotCharIn cs -> walker (charNotInPred cs)
CharRange c1 c2 -> walker (\ch -> ch >= c1 && c1 <= c2)
NotCharRange c1 c2 -> walker (\ch -> ch < c1 || ch > c2)
Digit -> walker isDigit
Letter -> walker isLetter
Punctuation -> walker isPunctuation
@ -144,16 +182,18 @@ compile (Replicate m n p) !err !success = case p of
else success acc (Text.drop n t)
CharIn cs -> dropper (charInPred cs)
NotCharIn cs -> dropper (charNotInPred cs)
CharRange c1 c2 -> dropper (\ch -> ch >= c1 && c1 <= c2)
NotCharRange c1 c2 -> dropper (\ch -> ch < c1 || ch > c2)
Digit -> dropper isDigit
Letter -> dropper isLetter
Punctuation -> dropper isPunctuation
Space -> dropper isSpace
_ -> go1 m
_ -> try "Replicate" (go1 m) err (go2 (n - m))
where
go1 0 = go2 (n - m)
go1 n = compile p err (go1 (n - 1))
go1 0 = \_err success stk rem -> success stk rem
go1 n = \err success -> compile p err (go1 (n - 1) err success)
go2 0 = success
go2 n = compile p success (go2 (n - 1))
go2 n = try "Replicate" (compile p) success (go2 (n - 1))
dropper ok acc t
| (i, rest) <- Text.dropWhileMax ok n t, i >= m = success acc rest
@ -184,3 +224,16 @@ charInPred [] = const False
charInPred (c : chs) = let ok = charInPred chs in \ci -> ci == c || ok ci
charNotInPred [] = const True
charNotInPred (c : chs) = let ok = charNotInPred chs in (\ci -> ci /= c && ok ci)
-- runs c and if it fails, restores state to what it was before
try :: String -> Compiled r -> Compiled r
try msg c err success stk rem =
c err' success' (Mark id rem stk) rem
where
success' stk rem = case stk of
Mark caps _ stk -> success (pushCaptures caps stk) rem
_ -> error $ "Pattern compiler error in: " <> msg
err' stk _ = case stk of
Mark _ rem stk -> err stk rem
_ -> error $ "Pattern compiler error in: " <> msg
{-# INLINE try #-}

View File

@ -463,6 +463,12 @@ test =
tcBinding 50 "+" Nothing "a b -> foo a b" "a + b = foo a b",
tcBinding 50 "+" Nothing "a b c -> foo a b c" "(+) a b c = foo a b c",
tcBinding 50 "." Nothing "f g x -> f (g x)" "(.) f g x = f (g x)",
tcBinding
50
"foo"
(Just "forall a. a -> a")
"x -> let\n bar : forall a. a -> a\n bar x = x\n bar 10\n x"
"foo : a -> a\nfoo x =\n bar : \8704 a. a -> a\n bar x = x\n bar 10\n x",
tcBreaks
32
"let\n\

View File

@ -1,3 +1,7 @@
{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-}
{-# HLINT ignore "Use camelCase" #-}
module Unison.Test.Syntax.TypePrinter where
import qualified Data.Map as Map
@ -18,7 +22,7 @@ tc_diff_rtt :: Bool -> String -> String -> PP.Width -> Test ()
tc_diff_rtt rtt s expected width =
let input_type = Common.t s
get_names = PPE.fromNames Common.hqLength Unison.Builtin.names
prettied = fmap toPlain $ PP.syntaxToColor $ prettyRaw get_names Map.empty (-1) input_type
prettied = fmap toPlain $ PP.syntaxToColor . runPretty get_names $ prettyRaw Map.empty (-1) input_type
actual =
if width == 0
then PP.renderUnbroken $ prettied
@ -26,26 +30,24 @@ tc_diff_rtt rtt s expected width =
actual_reparsed = Common.t actual
in scope s $
tests
[ ( if actual == expected
then ok
else do
note $ "expected: " ++ show expected
note $ "actual : " ++ show actual
note $ "expectedS:\n" ++ expected
note $ "actualS:\n" ++ actual
note $ "show(input) : " ++ show input_type
note $ "prettyprint : " ++ show prettied
crash "actual != expected"
),
( if (not rtt) || (input_type == actual_reparsed)
then ok
else do
note $ "round trip test..."
note $ "single parse: " ++ show input_type
note $ "double parse: " ++ show actual_reparsed
note $ "prettyprint : " ++ show prettied
crash "single parse != double parse"
)
[ if actual == expected
then ok
else do
note $ "expected: " ++ show expected
note $ "actual : " ++ show actual
note $ "expectedS:\n" ++ expected
note $ "actualS:\n" ++ actual
note $ "show(input) : " ++ show input_type
note $ "prettyprint : " ++ show prettied
crash "actual != expected",
if not rtt || (input_type == actual_reparsed)
then ok
else do
note $ "round trip test..."
note $ "single parse: " ++ show input_type
note $ "double parse: " ++ show actual_reparsed
note $ "prettyprint : " ++ show prettied
crash "single parse != double parse"
]
-- As above, but do the round-trip test unconditionally.
@ -87,6 +89,9 @@ test =
tc "Pair (a -> b) (c -> d)",
tc "Pair a b ->{e1, e2} Pair a b ->{} Pair (a -> b) d -> Pair c d",
tc "[Pair a a]",
tc "[a]",
tc "[a -> b]",
tc "[a ->{g} b]",
tc "'a",
tc "'Pair a a",
tc "a -> 'b",

View File

@ -124,6 +124,57 @@ test =
dpart = P.Join [P.Literal ".", part]
ip = P.Join [part, P.Replicate 3 3 dpart, P.Eof]
in P.run ip "127.0.0.1" == Just (["127", "0", "0", "1"], "")
expect' $
let p = P.Replicate 5 8 (P.Capture P.Digit)
in P.run p "12345" == Just (["1", "2", "3", "4", "5"], "")
expect' $
let p = P.Replicate 5 8 (P.Capture P.Digit) `P.Or` P.Join []
in P.run p "1234" == Just ([], "1234")
expect' $
let p = P.Replicate 5 8 (P.Capture (P.Join [P.Digit, P.Literal "z"])) `P.Or` P.Join []
in P.run p "1z2z3z4z5z6a" == Just (["1z", "2z", "3z", "4z", "5z"], "6a")
-- https://github.com/unisonweb/unison/issues/3530
expectEqual Nothing $
let p =
P.Or
(P.Join [P.Literal "a", P.Literal "b"])
(P.Join [P.Literal "a", P.Literal "c"])
in P.run p "aac"
expectEqual (Just ([], "")) $
let p =
P.Or
( P.Capture $
( P.Or
(P.Join [P.Literal "a", P.Literal "b"])
(P.Join [P.Literal "a", P.Literal "c"])
)
)
(P.Join [P.Literal "aa", P.Literal "cd"])
in P.run p "aacd"
-- this is just making sure we don't duplicate captures to our left
-- when entering an `Or` node
expectEqual (Just (["@"], "")) $
let p = P.Join [P.Capture P.AnyChar, P.Or (P.Literal "c") (P.Join []), P.Literal "d"]
in P.run p "@cd"
expectEqual (Just (["%", "c"], "")) $
let p = P.Join [P.Capture P.AnyChar, (P.Or (P.Capture (P.Literal "c")) (P.Join [])), P.Literal "d"]
in P.run p "%cd"
expectEqual (Just ([""], "ac")) $
let p = P.Capture (P.Or (P.Join [P.Literal "a", P.Literal "b"]) (P.Join []))
in P.run p "ac"
expectEqual (Just ([""], "ac")) $
let p = P.Capture (P.Replicate 0 1 (P.Join [P.Literal "a", P.Literal "b"]))
in P.run p "ac"
-- nested or tests
expectEqual (Just (["zzzaaa", "!"], "!!")) $
let p =
P.Or
( P.Or
(P.Literal "a")
(P.Join [P.Literal "z", P.Replicate 3 5 (P.Literal "z")])
)
(P.Join [P.Capture (P.Literal "zzzaaa"), P.Capture (P.Literal "!")])
in P.run p "zzzaaa!!!"
ok
]
where

View File

@ -1,6 +1,6 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.34.4.
-- This file has been generated from package.yaml by hpack version 0.35.0.
--
-- see: https://github.com/sol/hpack
@ -23,6 +23,7 @@ flag optimized
library
exposed-modules:
U.Codebase.Branch.Diff
Unison.Builtin
Unison.Builtin.Decls
Unison.Builtin.Terms
@ -91,6 +92,7 @@ library
Unison.Parsers
Unison.PrettyPrintEnv
Unison.PrettyPrintEnv.FQN
Unison.PrettyPrintEnv.MonadPretty
Unison.PrettyPrintEnv.Names
Unison.PrettyPrintEnv.Util
Unison.PrettyPrintEnvDecl
@ -142,6 +144,7 @@ library
Unison.Util.Exception
Unison.Util.Logger
Unison.Util.PinBoard
Unison.Util.Pretty.MegaParsec
Unison.Util.Star3
Unison.Util.Text
Unison.Util.Text.Pattern
@ -207,6 +210,7 @@ library
, extra
, filepath
, fingertree
, free
, fsnotify
, fuzzyfind
, generic-lens
@ -293,9 +297,9 @@ library
, x509-system
, yaml
, zlib
default-language: Haskell2010
if flag(optimized)
ghc-options: -funbox-strict-fields -O2
default-language: Haskell2010
test-suite parser-typechecker-tests
type: exitcode-stdio-1.0
@ -392,6 +396,7 @@ test-suite parser-typechecker-tests
, filemanip
, filepath
, fingertree
, free
, fsnotify
, fuzzyfind
, generic-lens
@ -480,6 +485,6 @@ test-suite parser-typechecker-tests
, x509-system
, yaml
, zlib
default-language: Haskell2010
if flag(optimized)
ghc-options: -funbox-strict-fields -O2
default-language: Haskell2010

View File

@ -9,15 +9,16 @@ fi
usage() {
echo "NOTE: must be run from the root of the project."
echo "Usage: $0 VERSION [TARGET]"
echo "Usage: $0 VERSION SHARE_BASE_PATH [TARGET]"
echo "VERSION: The version you're releasing, e.g. M4a"
echo "SHARE_BASE_PATH: Which base version to pull from share, e.g. 'unison.public.base.releases.M4'"
echo "TARGET: The revision to make the release from, defaults to 'trunk'"
echo ""
echo "E.g."
echo "$0 M4a"
}
if [[ -z "$1" ]] ; then
if [[ -z "$1" || -z "$2" ]] ; then
usage
exit 1
fi
@ -35,7 +36,8 @@ fi
version="${1}"
prev_version=$(./scripts/previous-tag.sh "$version")
target=${2:-trunk}
share_base_path=${2}
target=${3:-trunk}
tag="release/${version}"
echo "Creating release in unison-local-ui..."
@ -44,7 +46,7 @@ gh release create "release/${version}" --repo unisonweb/unison-local-ui --target
echo "Kicking off release workflow in unisonweb/unison"
git tag "${tag}" "${target}"
git push origin "${tag}"
gh workflow run release --repo unisonweb/unison --field "version=${version}"
gh workflow run release --repo unisonweb/unison --field "version=${version}" --field "share_base_path=${share_base_path}"
echo "Kicking off Homebrew update task"
gh workflow run release --repo unisonweb/homebrew-unison --field "version=${version}"

View File

@ -28,6 +28,7 @@ dependencies:
- exceptions
- extra
- filepath
- free
- fuzzyfind
- friendly-time
- generic-lens

View File

@ -19,7 +19,6 @@ import qualified Data.List as List
import Data.List.Extra (nubOrd)
import qualified Data.List.NonEmpty as Nel
import qualified Data.Map as Map
import Unison.Cli.TypeCheck (typecheck)
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
import Data.Set.NonEmpty (NESet)
@ -30,6 +29,8 @@ import Data.Tuple.Extra (uncurry3)
import qualified System.Console.Regions as Console.Regions
import System.Environment (withArgs)
import qualified Text.Megaparsec as P
import qualified U.Codebase.Branch.Diff as V2Branch
import qualified U.Codebase.Causal as V2Causal
import U.Codebase.HashTags (CausalHash (..))
import qualified U.Codebase.Reflog as Reflog
import qualified U.Codebase.Sqlite.Operations as Ops
@ -45,6 +46,7 @@ import Unison.Cli.Monad (Cli)
import qualified Unison.Cli.Monad as Cli
import qualified Unison.Cli.MonadUtils as Cli
import Unison.Cli.NamesUtils (basicParseNames, basicPrettyPrintNamesA, displayNames, findHistoricalHQs, getBasicPrettyPrintNames, makeHistoricalParsingNames, makePrintNamesFromLabeled', makeShadowedPrintNamesFromHQ)
import Unison.Cli.TypeCheck (typecheck)
import Unison.Cli.UnisonConfigUtils (gitUrlKey, remoteMappingKey)
import Unison.Codebase (Codebase, Preprocessing (..), PushGitBranchOpts (..))
import qualified Unison.Codebase as Codebase
@ -1185,9 +1187,9 @@ loop e = do
case filtered of
[(Referent.Ref ref, ty)]
| Typechecker.fitsScheme ty mainType -> do
let codeLookup = () <$ Codebase.toCodeLookup codebase
whenJustM (liftIO (Runtime.compileTo runtime codeLookup ppe ref (output <> ".uc"))) \err ->
Cli.returnEarly (EvaluationFailure err)
let codeLookup = () <$ Codebase.toCodeLookup codebase
whenJustM (liftIO (Runtime.compileTo runtime codeLookup ppe ref (output <> ".uc"))) \err ->
Cli.returnEarly (EvaluationFailure err)
| otherwise -> Cli.returnEarly (BadMainFunction smain ty ppe [mainType])
_ -> Cli.returnEarly (NoMainFunction smain ppe [mainType])
IOTestI main -> do
@ -1435,6 +1437,24 @@ loop e = do
Cli.Env {codebase} <- ask
r <- liftIO (Codebase.runTransaction codebase IntegrityCheck.integrityCheckFullCodebase)
Cli.respond (IntegrityCheck r)
DebugNameDiffI fromSBH toSBH -> do
Cli.Env {codebase} <- ask
sbhLen <- liftIO $ Codebase.branchHashLength codebase
fromCHs <- liftIO $ Codebase.branchHashesByPrefix codebase fromSBH
toCHs <- liftIO $ Codebase.branchHashesByPrefix codebase toSBH
(fromCH, toCH) <- case (Set.toList fromCHs, Set.toList toCHs) of
((_ : _ : _), _) -> Cli.returnEarly $ Output.BranchHashAmbiguous fromSBH (Set.map (SBH.fromHash sbhLen) fromCHs)
([], _) -> Cli.returnEarly $ Output.NoBranchWithHash fromSBH
(_, []) -> Cli.returnEarly $ Output.NoBranchWithHash toSBH
(_, (_ : _ : _)) -> Cli.returnEarly $ Output.BranchHashAmbiguous toSBH (Set.map (SBH.fromHash sbhLen) toCHs)
([fromCH], [toCH]) -> pure (fromCH, toCH)
output <- liftIO do
fromBranch <- (Codebase.getShallowCausalForHash codebase $ Cv.causalHash1to2 fromCH) >>= V2Causal.value
toBranch <- (Codebase.getShallowCausalForHash codebase $ Cv.causalHash1to2 toCH) >>= V2Causal.value
treeDiff <- V2Branch.diffBranches fromBranch toBranch
let nameChanges = V2Branch.nameChanges Nothing treeDiff
pure (DisplayDebugNameDiff nameChanges)
Cli.respond output
DeprecateTermI {} -> Cli.respond NotImplemented
DeprecateTypeI {} -> Cli.respond NotImplemented
RemoveTermReplacementI from patchPath -> doRemoveReplacement from patchPath True
@ -1600,6 +1620,7 @@ inputDescription input =
CreatePullRequestI {} -> wat
DebugClearWatchI {} -> wat
DebugDoctorI {} -> wat
DebugNameDiffI {} -> wat
DebugDumpNamespaceSimpleI {} -> wat
DebugDumpNamespacesI {} -> wat
DebugNumberedArgsI {} -> wat
@ -2446,10 +2467,10 @@ searchBranchScored names0 score queries =
pair qn
HQ.HashQualified qn h
| h `SH.isPrefixOf` Referent.toShortHash ref ->
pair qn
pair qn
HQ.HashOnly h
| h `SH.isPrefixOf` Referent.toShortHash ref ->
Set.singleton (Nothing, result)
Set.singleton (Nothing, result)
_ -> mempty
where
result = SR.termSearchResult names0 name ref
@ -2466,10 +2487,10 @@ searchBranchScored names0 score queries =
pair qn
HQ.HashQualified qn h
| h `SH.isPrefixOf` Reference.toShortHash ref ->
pair qn
pair qn
HQ.HashOnly h
| h `SH.isPrefixOf` Reference.toShortHash ref ->
Set.singleton (Nothing, result)
Set.singleton (Nothing, result)
_ -> mempty
where
result = SR.typeSearchResult names0 name ref

View File

@ -32,7 +32,6 @@ import qualified Unison.PrettyPrintEnv as PPE
import Unison.Referent (Referent)
import qualified Unison.Referent as Referent
import qualified Unison.Server.Backend as Backend
import Unison.Symbol (Symbol)
import qualified Unison.Util.Monoid as Monoid
import qualified Unison.Util.Relation as R
import qualified Unison.Util.Set as Set
@ -40,7 +39,7 @@ import qualified Unison.Util.Set as Set
-- Add default metadata to all added types and terms in a slurp component.
--
-- No-op if the slurp component is empty.
addDefaultMetadata :: SlurpComponent Symbol -> Cli ()
addDefaultMetadata :: SlurpComponent -> Cli ()
addDefaultMetadata adds =
when (not (SC.isEmpty adds)) do
Cli.time "add-default-metadata" do

View File

@ -69,7 +69,6 @@ import qualified Unison.Util.Map as Map (remap)
import Unison.Util.Monoid (foldMapM)
import qualified Unison.Util.Relation as R
import qualified Unison.Util.Set as Set
import Unison.Var (Var)
import qualified Unison.Var as Var
import Unison.WatchKind (WatchKind)
import qualified Unison.WatchKind as WK
@ -86,7 +85,7 @@ handleUpdate input optionalPatch requestedNames = do
UsePatch p -> Just p
slurpCheckNames <- Branch.toNames <$> Cli.getCurrentBranch0
sr <- getSlurpResultForUpdate requestedNames slurpCheckNames
let addsAndUpdates :: SlurpComponent Symbol
let addsAndUpdates :: SlurpComponent
addsAndUpdates = Slurp.updates sr <> Slurp.adds sr
fileNames :: Names
fileNames = UF.typecheckedToNames (Slurp.originalFile sr)
@ -202,9 +201,9 @@ handleUpdate input optionalPatch requestedNames = do
& Path.resolve @_ @_ @Path.Absolute currentPath'
& tShow
getSlurpResultForUpdate :: Set Name -> Names -> Cli (SlurpResult Symbol)
getSlurpResultForUpdate :: Set Name -> Names -> Cli SlurpResult
getSlurpResultForUpdate requestedNames slurpCheckNames = do
let slurp :: TypecheckedUnisonFile Symbol Ann -> SlurpResult Symbol
let slurp :: TypecheckedUnisonFile Symbol Ann -> SlurpResult
slurp file =
Slurp.slurpFile file (Set.map Name.toVar requestedNames) Slurp.UpdateOp slurpCheckNames
@ -513,10 +512,10 @@ rewriteTermReferences mapping =
-- updates the namespace for adding `slurp`
doSlurpAdds ::
forall m v.
(Monad m, Var v) =>
SlurpComponent v ->
TypecheckedUnisonFile v Ann ->
forall m.
Monad m =>
SlurpComponent ->
TypecheckedUnisonFile Symbol Ann ->
(Branch0 m -> Branch0 m)
doSlurpAdds slurp uf = Branch.batchUpdates (typeActions <> termActions)
where
@ -531,7 +530,7 @@ doSlurpAdds slurp uf = Branch.batchUpdates (typeActions <> termActions)
if Set.member v tests
then Metadata.singleton isTestType isTestValue
else Metadata.empty
doTerm :: v -> (Path, Branch0 m -> Branch0 m)
doTerm :: Symbol -> (Path, Branch0 m -> Branch0 m)
doTerm v = case toList (Names.termsNamed names (Name.unsafeFromVar v)) of
[] -> errorMissingVar v
[r] ->
@ -543,7 +542,7 @@ doSlurpAdds slurp uf = Branch.batchUpdates (typeActions <> termActions)
<> Var.nameStr v
<> ": "
<> show wha
doType :: v -> (Path, Branch0 m -> Branch0 m)
doType :: Symbol -> (Path, Branch0 m -> Branch0 m)
doType v = case toList (Names.typesNamed names (Name.unsafeFromVar v)) of
[] -> errorMissingVar v
[r] ->

View File

@ -9,6 +9,7 @@ module Unison.Codebase.Editor.Input
BranchId,
AbsBranchId,
parseBranchId,
parseShortBranchHash,
HashOrHQSplit',
Insistence (..),
PullMode (..),
@ -67,6 +68,10 @@ parseBranchId ('#' : s) = case SBH.fromText (Text.pack s) of
Just h -> pure $ Left h
parseBranchId s = Right <$> Path.parsePath' s
parseShortBranchHash :: String -> Either String ShortBranchHash
parseShortBranchHash ('#' : s) | Just sbh <- SBH.fromText (Text.pack s) = Right sbh
parseShortBranchHash _ = Left "Invalid hash, expected a base32hex string."
data PullMode
= PullWithHistory
| PullWithoutHistory
@ -186,6 +191,7 @@ data Input
| DebugDumpNamespaceSimpleI
| DebugClearWatchI
| DebugDoctorI
| DebugNameDiffI ShortBranchHash ShortBranchHash
| QuitI
| ApiI
| UiI

View File

@ -20,6 +20,7 @@ import Data.Set.NonEmpty (NESet)
import Data.Time (UTCTime)
import Network.URI (URI)
import qualified System.Console.Haskeline as Completion
import U.Codebase.Branch.Diff (NameChanges)
import Unison.Auth.Types (CredentialFailure)
import qualified Unison.Codebase.Branch as Branch
import Unison.Codebase.Editor.DisplayObject (DisplayObject)
@ -186,7 +187,7 @@ data Output
| ListShallow (IO PPE.PrettyPrintEnv) [ShallowListEntry Symbol Ann]
| ListOfPatches (Set Name)
| -- show the result of add/update
SlurpOutput Input PPE.PrettyPrintEnv (SlurpResult Symbol)
SlurpOutput Input PPE.PrettyPrintEnv SlurpResult
| -- Original source, followed by the errors:
ParseErrors Text [Parser.Err Symbol]
| TypeErrors Path.Absolute Text PPE.PrettyPrintEnv [Context.ErrorNote Symbol Ann]
@ -199,7 +200,7 @@ data Output
[(Symbol, Term Symbol ())]
(Map Symbol (Ann, WK.WatchKind, Term Symbol (), Runtime.IsCacheHit))
| RunResult PPE.PrettyPrintEnv (Term Symbol ())
| Typechecked SourceName PPE.PrettyPrintEnv (SlurpResult Symbol) (UF.TypecheckedUnisonFile Symbol Ann)
| Typechecked SourceName PPE.PrettyPrintEnv SlurpResult (UF.TypecheckedUnisonFile Symbol Ann)
| DisplayRendered (Maybe FilePath) (P.Pretty P.ColorText)
| -- "display" definitions, possibly to a FilePath on disk (e.g. editing)
DisplayDefinitions
@ -274,6 +275,7 @@ data Output
| CredentialFailureMsg CredentialFailure
| PrintVersion Text
| IntegrityCheck IntegrityResult
| DisplayDebugNameDiff NameChanges
| DisplayDebugCompletions [Completion.Completion]
data ShareError
@ -420,6 +422,7 @@ isFailure o = case o of
ShareError {} -> True
ViewOnShare {} -> False
DisplayDebugCompletions {} -> False
DisplayDebugNameDiff {} -> False
isNumberedFailure :: NumberedOutput -> Bool
isNumberedFailure = \case

View File

@ -23,6 +23,7 @@ import Unison.Prelude
import Unison.Referent (Referent)
import qualified Unison.Referent as Referent
import qualified Unison.Referent' as Referent
import Unison.Symbol (Symbol)
import qualified Unison.UnisonFile as UF
import qualified Unison.UnisonFile.Names as UF
import qualified Unison.Util.Map as Map
@ -41,11 +42,11 @@ data SlurpOp
deriving (Eq, Show)
-- | Tag a variable as representing a term, type, or constructor
data TaggedVar v = TermVar v | TypeVar v | ConstructorVar v
data TaggedVar = TermVar Symbol | TypeVar Symbol | ConstructorVar Symbol
deriving (Eq, Ord, Show)
-- | Extract the var from a TaggedVar
untagged :: TaggedVar v -> v
untagged :: TaggedVar -> Symbol
untagged (TermVar v) = v
untagged (TypeVar v) = v
untagged (ConstructorVar v) = v
@ -95,24 +96,22 @@ mostSevereDepStatus =
-- | Analyze a file and determine the status of all of its definitions with respect to a set
-- of vars to analyze and an operation you wish to perform.
slurpFile ::
forall v.
Var v =>
UF.TypecheckedUnisonFile v Ann ->
Set v ->
UF.TypecheckedUnisonFile Symbol Ann ->
Set Symbol ->
SlurpOp ->
Names ->
SR.SlurpResult v
SR.SlurpResult
slurpFile uf defsToConsider slurpOp unalteredCodebaseNames =
let -- A mapping of all vars in the file to their references.
-- TypeVars are keyed to Type references
-- TermVars are keyed to Term references
-- ConstructorVars are keyed to Constructor references
varReferences :: Map (TaggedVar v) LD.LabeledDependency
varReferences :: Map TaggedVar LD.LabeledDependency
varReferences = buildVarReferences uf
-- All variables which were either:
-- 1. specified explicitly by the end-user
-- 2. An in-file transitive dependency (within the file) of a var specified by the end-user.
involvedVars :: Set (TaggedVar v)
involvedVars :: Set TaggedVar
involvedVars = computeInvolvedVars uf defsToConsider varReferences
-- The set of names after removing any constructors which would
-- be removed by the requested operation.
@ -121,14 +120,14 @@ slurpFile uf defsToConsider slurpOp unalteredCodebaseNames =
-- A mapping of every involved variable to its transitive dependencies.
-- Dependency here is any type or term referenced within the definition (transitively).
-- This also includes all Constructors of any type used by a term.
varDeps :: Map (TaggedVar v) (Set (TaggedVar v))
varDeps :: Map TaggedVar (Set TaggedVar)
varDeps = computeVarDeps uf involvedVars
-- Compute the status of each definition on its own.
-- This doesn't consider the vars dependencies.
selfStatuses :: Map (TaggedVar v) DefnStatus
selfStatuses :: Map TaggedVar DefnStatus
selfStatuses = computeSelfStatuses involvedVars varReferences codebaseNames
-- A mapping from each definition's name to the most severe status of it plus its transitive dependencies.
depStatuses :: Map (TaggedVar v) DepStatus
depStatuses :: Map TaggedVar DepStatus
depStatuses = computeDepStatuses varDeps selfStatuses
in toSlurpResult uf slurpOp defsToConsider involvedVars fileNames codebaseNames selfStatuses depStatuses
where
@ -138,10 +137,9 @@ slurpFile uf defsToConsider slurpOp unalteredCodebaseNames =
-- | Return a modified set of names with constructors which would be deprecated by possible
-- updates are removed.
computeNamesWithDeprecations ::
Var v =>
UF.TypecheckedUnisonFile v Ann ->
UF.TypecheckedUnisonFile Symbol Ann ->
Names ->
Set (TaggedVar v) ->
Set TaggedVar ->
SlurpOp ->
Names
computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars = \case
@ -183,16 +181,14 @@ computeNamesWithDeprecations uf unalteredCodebaseNames involvedVars = \case
-- | Compute a mapping of each definition to its status.
computeSelfStatuses ::
forall v.
(Ord v, Var v) =>
Set (TaggedVar v) ->
Map (TaggedVar v) LD.LabeledDependency ->
Set TaggedVar ->
Map TaggedVar LD.LabeledDependency ->
Names ->
Map (TaggedVar v) DefnStatus
Map TaggedVar DefnStatus
computeSelfStatuses vars varReferences codebaseNames =
Map.fromSet definitionStatus vars
where
definitionStatus :: TaggedVar v -> DefnStatus
definitionStatus :: TaggedVar -> DefnStatus
definitionStatus tv =
let ld = case Map.lookup tv varReferences of
Just r -> r
@ -232,12 +228,10 @@ computeDepStatuses varDeps selfStatuses =
-- I.e. any variable requested by the user and all of their dependencies,
-- component peers, and component peers of dependencies.
computeInvolvedVars ::
forall v.
Var v =>
UF.TypecheckedUnisonFile v Ann ->
Set v ->
Map (TaggedVar v) LD.LabeledDependency ->
Set (TaggedVar v)
UF.TypecheckedUnisonFile Symbol Ann ->
Set Symbol ->
Map TaggedVar LD.LabeledDependency ->
Set TaggedVar
computeInvolvedVars uf defsToConsider varReferences
-- If nothing was specified, consider every var in the file.
| Set.null defsToConsider = Map.keysSet varReferences
@ -245,8 +239,8 @@ computeInvolvedVars uf defsToConsider varReferences
where
-- The user specifies _untyped_ names, which may not even exist in the file.
-- We need to figure out which vars exist, and what type they are if they do.
requestedVarsWhichActuallyExist :: Set (TaggedVar v)
requestedVarsWhichActuallyExist = Set.fromList $ do
requestedVarsWhichActuallyExist :: Set TaggedVar
requestedVarsWhichActuallyExist = Set.fromList do
v <- Set.toList defsToConsider
-- We don't know whether each var is a type or term, so we try both.
-- We don't test ConstructorVar because you can't request to add/update a Constructor in
@ -257,11 +251,9 @@ computeInvolvedVars uf defsToConsider varReferences
-- | Compute transitive dependencies for all relevant variables.
computeVarDeps ::
forall v.
Var v =>
UF.TypecheckedUnisonFile v Ann ->
Set (TaggedVar v) ->
Map (TaggedVar v) (Set (TaggedVar v))
UF.TypecheckedUnisonFile Symbol Ann ->
Set TaggedVar ->
Map TaggedVar (Set TaggedVar)
computeVarDeps uf allInvolvedVars =
allInvolvedVars
& Set.toList
@ -272,17 +264,17 @@ computeVarDeps uf allInvolvedVars =
-- | Compute the closure of all vars which the provided vars depend on.
-- A type depends on its constructors.
varClosure :: (Var v) => UF.TypecheckedUnisonFile v a -> Set (TaggedVar v) -> Set (TaggedVar v)
varClosure :: UF.TypecheckedUnisonFile Symbol a -> Set TaggedVar -> Set TaggedVar
varClosure uf (partitionVars -> sc) =
let deps = SC.closeWithDependencies uf sc
in mingleVars deps
-- | Collect a relation of term or type var to labelled dependency for all definitions mentioned in a file.
buildVarReferences :: forall v a. (Ord v, Show v) => UF.TypecheckedUnisonFile v a -> Map (TaggedVar v) LD.LabeledDependency
buildVarReferences :: UF.TypecheckedUnisonFile Symbol a -> Map TaggedVar LD.LabeledDependency
buildVarReferences uf =
decls <> effects <> terms <> constructors
where
terms :: Map (TaggedVar v) LD.LabeledDependency
terms :: Map TaggedVar LD.LabeledDependency
terms =
UF.hashTermsId uf
-- Filter out non-test watch expressions
@ -295,30 +287,30 @@ buildVarReferences uf =
& Map.bimap
TermVar
(\(refId, _, _, _) -> LD.derivedTerm refId)
decls :: Map (TaggedVar v) LD.LabeledDependency
decls :: Map TaggedVar LD.LabeledDependency
decls =
UF.dataDeclarationsId' uf
& Map.bimap
TypeVar
(\(refId, _) -> LD.derivedType refId)
effects :: Map (TaggedVar v) LD.LabeledDependency
effects :: Map TaggedVar LD.LabeledDependency
effects =
UF.effectDeclarationsId' uf
& Map.bimap
TypeVar
(\(refId, _) -> LD.derivedType refId)
constructors :: Map (TaggedVar v) LD.LabeledDependency
constructors :: Map TaggedVar LD.LabeledDependency
constructors =
let effectConstructors :: Map (TaggedVar v) LD.LabeledDependency
let effectConstructors :: Map TaggedVar LD.LabeledDependency
effectConstructors = Map.fromList $ do
(_, (typeRefId, effect)) <- Map.toList (UF.effectDeclarations' uf)
let decl = DD.toDataDecl effect
(conId, constructorV) <- zip (DD.constructorIds decl) (DD.constructorVars decl)
pure $ (ConstructorVar constructorV, LD.effectConstructor (CR.ConstructorReference typeRefId conId))
dataConstructors :: Map (TaggedVar v) LD.LabeledDependency
dataConstructors :: Map TaggedVar LD.LabeledDependency
dataConstructors = Map.fromList $ do
(_, (typeRefId, decl)) <- Map.toList (UF.dataDeclarations' uf)
(conId, constructorV) <- zip (DD.constructorIds decl) (DD.constructorVars decl)
@ -326,16 +318,16 @@ buildVarReferences uf =
in effectConstructors <> dataConstructors
-- A helper type just used by 'toSlurpResult' for partitioning results.
data SlurpingSummary v = SlurpingSummary
{ adds :: !(SlurpComponent v),
duplicates :: !(SlurpComponent v),
updates :: !(SlurpComponent v),
termCtorColl :: !(SlurpComponent v),
ctorTermColl :: !(SlurpComponent v),
blocked :: !(SlurpComponent v)
data SlurpingSummary = SlurpingSummary
{ adds :: !SlurpComponent,
duplicates :: !SlurpComponent,
updates :: !SlurpComponent,
termCtorColl :: !SlurpComponent,
ctorTermColl :: !SlurpComponent,
blocked :: !SlurpComponent
}
instance Ord v => Semigroup (SlurpingSummary v) where
instance Semigroup SlurpingSummary where
SlurpingSummary a b c d e f
<> SlurpingSummary a' b' c' d' e' f' =
SlurpingSummary
@ -346,22 +338,20 @@ instance Ord v => Semigroup (SlurpingSummary v) where
(e <> e')
(f <> f')
instance Ord v => Monoid (SlurpingSummary v) where
instance Monoid SlurpingSummary where
mempty = SlurpingSummary mempty mempty mempty mempty mempty mempty
-- | Convert a 'VarsByStatus' mapping into a 'SR.SlurpResult'
toSlurpResult ::
forall v.
(Var v) =>
UF.TypecheckedUnisonFile v Ann ->
UF.TypecheckedUnisonFile Symbol Ann ->
SlurpOp ->
Set v ->
Set (TaggedVar v) ->
Set Symbol ->
Set TaggedVar ->
Names ->
Names ->
Map (TaggedVar v) DefnStatus ->
Map (TaggedVar v) DepStatus ->
SR.SlurpResult v
Map TaggedVar DefnStatus ->
Map TaggedVar DepStatus ->
SR.SlurpResult
toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames selfStatuses depStatuses =
SR.SlurpResult
{ SR.originalFile = uf,
@ -393,7 +383,7 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames selfStatu
-- Compute a singleton summary for a single definition, per its own status and the most severe status of its
-- transitive dependencies.
summarize1 :: TaggedVar v -> DefnStatus -> SlurpingSummary v
summarize1 :: TaggedVar -> DefnStatus -> SlurpingSummary
summarize1 name = \case
CtorTermCollision -> mempty {ctorTermColl = sc}
Duplicated -> mempty {duplicates = sc}
@ -413,7 +403,7 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames selfStatu
DepNeedsUpdate -> mempty {updates = sc}
DepCollision -> mempty {blocked = sc}
where
sc :: SlurpComponent v
sc :: SlurpComponent
sc =
scFromTaggedVar name
@ -421,7 +411,7 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames selfStatu
depStatus =
Map.findWithDefault DepOk name depStatuses
scFromTaggedVar :: TaggedVar v -> SlurpComponent v
scFromTaggedVar :: TaggedVar -> SlurpComponent
scFromTaggedVar = \case
TermVar v -> SC.fromTerms (Set.singleton v)
TypeVar v -> SC.fromTypes (Set.singleton v)
@ -430,8 +420,8 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames selfStatu
buildAliases ::
Rel.Relation Name Referent ->
Rel.Relation Name Referent ->
Set v ->
Map v SR.Aliases
Set Symbol ->
Map Symbol SR.Aliases
buildAliases existingNames namesFromFile dups =
Map.fromList
[ ( varFromName n,
@ -452,14 +442,14 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames selfStatu
Set.notMember (varFromName n) dups
]
termAliases :: Map v SR.Aliases
termAliases :: Map Symbol SR.Aliases
termAliases =
buildAliases
(Names.terms codebaseNames)
(Names.terms fileNames)
(SC.terms duplicates)
typeAliases :: Map v SR.Aliases
typeAliases :: Map Symbol SR.Aliases
typeAliases =
buildAliases
(Rel.mapRan Referent.Ref $ Names.types codebaseNames)
@ -470,7 +460,7 @@ toSlurpResult uf op requestedVars involvedVars fileNames codebaseNames selfStatu
varFromName name = Var.named (Name.toText name)
-- | Sort out a set of variables by whether it is a term or type.
partitionVars :: (Foldable f, Ord v) => f (TaggedVar v) -> SlurpComponent v
partitionVars :: Foldable f => f TaggedVar -> SlurpComponent
partitionVars =
foldMap
( \case
@ -480,7 +470,7 @@ partitionVars =
)
-- | Collapse a SlurpComponent into a tagged set.
mingleVars :: Ord v => SlurpComponent v -> Set (TaggedVar v)
mingleVars :: SlurpComponent -> Set TaggedVar
mingleVars SlurpComponent {terms, types, ctors} =
Set.map TypeVar types
<> Set.map TermVar terms

View File

@ -1,41 +1,64 @@
{-# LANGUAGE PatternSynonyms #-}
module Unison.Codebase.Editor.SlurpComponent
( -- * Slurp component
SlurpComponent (..),
module Unison.Codebase.Editor.SlurpComponent where
-- ** Basic constructors
empty,
fromTerms,
fromTypes,
fromCtors,
-- ** Predicates
isEmpty,
-- ** Set operations
difference,
intersection,
-- ** Closure
closeWithDependencies,
)
where
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Tuple (swap)
import qualified Unison.DataDeclaration as DD
import Unison.Prelude
import Unison.Prelude hiding (empty)
import Unison.Reference (Reference)
import Unison.Symbol (Symbol)
import qualified Unison.Term as Term
import Unison.UnisonFile (TypecheckedUnisonFile)
import qualified Unison.UnisonFile as UF
data SlurpComponent v = SlurpComponent {types :: Set v, terms :: Set v, ctors :: Set v}
data SlurpComponent = SlurpComponent
{ types :: Set Symbol,
terms :: Set Symbol,
ctors :: Set Symbol
}
deriving (Eq, Ord, Show)
isEmpty :: SlurpComponent v -> Bool
isEmpty :: SlurpComponent -> Bool
isEmpty sc = Set.null (types sc) && Set.null (terms sc) && Set.null (ctors sc)
empty :: Ord v => SlurpComponent v
empty = SlurpComponent {types = mempty, terms = mempty, ctors = mempty}
empty :: SlurpComponent
empty = SlurpComponent {types = Set.empty, terms = Set.empty, ctors = Set.empty}
difference :: Ord v => SlurpComponent v -> SlurpComponent v -> SlurpComponent v
difference :: SlurpComponent -> SlurpComponent -> SlurpComponent
difference c1 c2 = SlurpComponent {types = types', terms = terms', ctors = ctors'}
where
types' = types c1 `Set.difference` types c2
terms' = terms c1 `Set.difference` terms c2
ctors' = ctors c1 `Set.difference` ctors c2
intersection :: Ord v => SlurpComponent v -> SlurpComponent v -> SlurpComponent v
intersection :: SlurpComponent -> SlurpComponent -> SlurpComponent
intersection c1 c2 = SlurpComponent {types = types', terms = terms', ctors = ctors'}
where
types' = types c1 `Set.intersection` types c2
terms' = terms c1 `Set.intersection` terms c2
ctors' = ctors c1 `Set.intersection` ctors c2
instance Ord v => Semigroup (SlurpComponent v) where
instance Semigroup SlurpComponent where
c1 <> c2 =
SlurpComponent
{ types = types c1 <> types c2,
@ -43,36 +66,35 @@ instance Ord v => Semigroup (SlurpComponent v) where
ctors = ctors c1 <> ctors c2
}
instance Ord v => Monoid (SlurpComponent v) where
mempty = SlurpComponent {types = mempty, terms = mempty, ctors = mempty}
instance Monoid SlurpComponent where
mempty = empty
-- I'm calling this `closeWithDependencies` because it doesn't just compute
-- the dependencies of the inputs, it mixes them together. Make sure this
-- is what you want.
closeWithDependencies ::
forall v a.
Ord v =>
TypecheckedUnisonFile v a ->
SlurpComponent v ->
SlurpComponent v
forall a.
TypecheckedUnisonFile Symbol a ->
SlurpComponent ->
SlurpComponent
closeWithDependencies uf inputs = seenDefns {ctors = constructorDeps}
where
seenDefns = foldl' termDeps (SlurpComponent {terms = mempty, types = seenTypes, ctors = mempty}) (terms inputs)
seenTypes = foldl' typeDeps mempty (types inputs)
constructorDeps :: Set v
constructorDeps :: Set Symbol
constructorDeps = UF.constructorsForDecls seenTypes uf
termDeps :: SlurpComponent v -> v -> SlurpComponent v
termDeps :: SlurpComponent -> Symbol -> SlurpComponent
termDeps seen v | Set.member v (terms seen) = seen
termDeps seen v = fromMaybe seen $ do
term <- findTerm v
let -- get the `v`s for the transitive dependency types
-- (the ones for terms are just the `freeVars below`)
-- although this isn't how you'd do it for a term that's already in codebase
tdeps :: [v]
tdeps :: [Symbol]
tdeps = resolveTypes $ Term.dependencies term
seenTypes :: Set v
seenTypes :: Set Symbol
seenTypes = foldl' typeDeps (types seen) tdeps
seenTerms = Set.insert v (terms seen)
pure $
@ -85,7 +107,7 @@ closeWithDependencies uf inputs = seenDefns {ctors = constructorDeps}
)
(Term.freeVars term)
typeDeps :: Set v -> v -> Set v
typeDeps :: Set Symbol -> Symbol -> Set Symbol
typeDeps seen v | Set.member v seen = seen
typeDeps seen v = fromMaybe seen $ do
dd <-
@ -93,25 +115,25 @@ closeWithDependencies uf inputs = seenDefns {ctors = constructorDeps}
<|> fmap (DD.toDataDecl . snd) (Map.lookup v (UF.effectDeclarations' uf))
pure $ foldl' typeDeps (Set.insert v seen) (resolveTypes $ DD.dependencies dd)
resolveTypes :: Set Reference -> [v]
resolveTypes :: Set Reference -> [Symbol]
resolveTypes rs = [v | r <- Set.toList rs, Just v <- [Map.lookup r typeNames]]
findTerm :: v -> Maybe (Term.Term v a)
findTerm :: Symbol -> Maybe (Term.Term Symbol a)
findTerm v = Map.lookup v allTerms
allTerms = UF.allTerms uf
typeNames :: Map Reference v
typeNames :: Map Reference Symbol
typeNames = invert (fst <$> UF.dataDeclarations' uf) <> invert (fst <$> UF.effectDeclarations' uf)
invert :: forall k v. Ord k => Ord v => Map k v -> Map v k
invert m = Map.fromList (swap <$> Map.toList m)
fromTypes :: Ord v => Set v -> SlurpComponent v
fromTypes :: Set Symbol -> SlurpComponent
fromTypes vs = mempty {types = vs}
fromTerms :: Ord v => Set v -> SlurpComponent v
fromTerms :: Set Symbol -> SlurpComponent
fromTerms vs = mempty {terms = vs}
fromCtors :: Ord v => Set v -> SlurpComponent v
fromCtors :: Set Symbol -> SlurpComponent
fromCtors vs = mempty {ctors = vs}

View File

@ -33,6 +33,7 @@ import Unison.Name (Name)
import Unison.Parser.Ann (Ann)
import Unison.Prelude
import qualified Unison.PrettyPrintEnv as PPE
import Unison.Symbol (Symbol)
import qualified Unison.Syntax.DeclPrinter as DeclPrinter
import qualified Unison.Syntax.TypePrinter as TP
import qualified Unison.UnisonFile as UF
@ -52,36 +53,36 @@ data Aliases
}
deriving (Show, Eq, Ord)
data SlurpResult v = SlurpResult
data SlurpResult = SlurpResult
{ -- The file that we tried to add from
originalFile :: UF.TypecheckedUnisonFile v Ann,
originalFile :: UF.TypecheckedUnisonFile Symbol Ann,
-- Extra definitions that were added to satisfy transitive closure,
-- beyond what the user specified.
extraDefinitions :: SlurpComponent v,
extraDefinitions :: SlurpComponent,
-- Previously existed only in the file; now added to the codebase.
adds :: SlurpComponent v,
adds :: SlurpComponent,
-- Exists in the branch and the file, with the same name and contents.
duplicates :: SlurpComponent v,
duplicates :: SlurpComponent,
-- Not added to codebase due to the name already existing
-- in the branch with a different definition.
-- I.e. an update is required but we're performing an add.
collisions :: SlurpComponent v,
collisions :: SlurpComponent,
-- Names that already exist in the branch, but whose definitions
-- in `originalFile` are treated as updates.
updates :: SlurpComponent v,
updates :: SlurpComponent,
-- Names of terms in `originalFile` that couldn't be updated because
-- they refer to existing constructors. (User should instead do a find/replace,
-- a constructor rename, or refactor the type that the name comes from).
termExistingConstructorCollisions :: Set v,
constructorExistingTermCollisions :: Set v,
termExistingConstructorCollisions :: Set Symbol,
constructorExistingTermCollisions :: Set Symbol,
-- -- Already defined in the branch, but with a different name.
termAlias :: Map v Aliases,
typeAlias :: Map v Aliases,
defsWithBlockedDependencies :: SlurpComponent v
termAlias :: Map Symbol Aliases,
typeAlias :: Map Symbol Aliases,
defsWithBlockedDependencies :: SlurpComponent
}
deriving (Show)
hasAddsOrUpdates :: Ord v => SlurpResult v -> Bool
hasAddsOrUpdates :: SlurpResult -> Bool
hasAddsOrUpdates s =
-- We intentionally ignore constructors here since they are added as part of adding their
-- types.
@ -120,11 +121,9 @@ aliasesToShow :: Int
aliasesToShow = 5
pretty ::
forall v.
Var v =>
IsPastTense ->
PPE.PrettyPrintEnv ->
SlurpResult v ->
SlurpResult ->
P.Pretty P.ColorText
pretty isPast ppe sr =
let tms = UF.hashTerms (originalFile sr)
@ -190,7 +189,7 @@ pretty isPast ppe sr =
++ (if null newNames then mempty else [newMessage])
-- The second field in the result is an optional second column.
okTerm :: v -> [(P.Pretty P.ColorText, Maybe (P.Pretty P.ColorText))]
okTerm :: Symbol -> [(P.Pretty P.ColorText, Maybe (P.Pretty P.ColorText))]
okTerm v = case Map.lookup v tms of
Nothing ->
[(P.bold (prettyVar v), Just $ P.red "(Unison bug, unknown term)")]
@ -303,14 +302,14 @@ pretty isPast ppe sr =
sr
]
isOk :: Ord v => SlurpResult v -> Bool
isOk :: SlurpResult -> Bool
isOk SlurpResult {..} =
SC.isEmpty collisions
&& Set.null termExistingConstructorCollisions
&& Set.null constructorExistingTermCollisions
&& SC.isEmpty defsWithBlockedDependencies
isAllDuplicates :: Ord v => SlurpResult v -> Bool
isAllDuplicates :: SlurpResult -> Bool
isAllDuplicates SlurpResult {..} =
emptyIgnoringConstructors adds
&& emptyIgnoringConstructors updates
@ -322,15 +321,14 @@ isAllDuplicates SlurpResult {..} =
&& Set.null constructorExistingTermCollisions
&& emptyIgnoringConstructors defsWithBlockedDependencies
where
emptyIgnoringConstructors :: SlurpComponent v -> Bool
emptyIgnoringConstructors :: SlurpComponent -> Bool
emptyIgnoringConstructors SlurpComponent {types, terms} =
null types && null terms
filterUnisonFile ::
Ord v =>
SlurpResult v ->
UF.TypecheckedUnisonFile v Ann ->
UF.TypecheckedUnisonFile v Ann
SlurpResult ->
UF.TypecheckedUnisonFile Symbol Ann ->
UF.TypecheckedUnisonFile Symbol Ann
filterUnisonFile
SlurpResult {adds, updates}
( UF.TypecheckedUnisonFileId

View File

@ -5,9 +5,12 @@ module Unison.Codebase.Editor.UriParser
writeGitRepo,
deprecatedWriteGitRemotePath,
writeRemotePath,
parseReadRemoteNamespace,
parseReadShareRemoteNamespace,
)
where
import Data.Bifunctor (first)
import Data.Char (isAlphaNum, isDigit, isSpace)
import Data.Sequence as Seq
import qualified Data.Text as Text
@ -33,6 +36,8 @@ import Unison.NameSegment (NameSegment (..))
import qualified Unison.NameSegment as NameSegment
import Unison.Prelude
import qualified Unison.Syntax.Lexer
import qualified Unison.Util.Pretty as P
import qualified Unison.Util.Pretty.MegaParsec as P
type P = P.Parsec Void Text.Text
@ -60,6 +65,16 @@ repoPath =
fmap ReadRemoteNamespaceGit readGitRemoteNamespace
<|> fmap ReadRemoteNamespaceShare readShareRemoteNamespace
parseReadRemoteNamespace :: String -> String -> Either (P.Pretty P.ColorText) ReadRemoteNamespace
parseReadRemoteNamespace label input =
let printError err = P.lines [P.string "I couldn't parse the repository address given above.", P.prettyPrintParseError input err]
in first printError (P.parse repoPath label (Text.pack input))
parseReadShareRemoteNamespace :: String -> String -> Either (P.Pretty P.ColorText) ReadShareRemoteNamespace
parseReadShareRemoteNamespace label input =
let printError err = P.lines [P.string "I couldn't parse this as a share path.", P.prettyPrintParseError input err]
in first printError (P.parse readShareRemoteNamespace label (Text.pack input))
-- >>> P.parseMaybe writeRemotePath "unisonweb.base._releases.M4"
-- >>> P.parseMaybe writeRemotePath "git(git@github.com:unisonweb/base:v3)._releases.M3"
-- Just (WriteRemotePathShare (WriteShareRemotePath {server = ShareRepo, repo = "unisonweb", path = base._releases.M4}))

View File

@ -1,5 +1,4 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE ViewPatterns #-}
module Unison.CommandLine.DisplayValues where
@ -329,7 +328,7 @@ displayDoc pped terms typeOf evaluated types = go
let ppe = PPE.declarationPPE pped ref
in terms ref >>= \case
Nothing -> pure $ "😶 Missing term source for: " <> termName ppe r
Just tm -> pure . P.syntaxToColor $ P.group $ TP.prettyBinding ppe (PPE.termName ppe r) tm
Just tm -> pure . P.syntaxToColor . P.group $ TP.prettyBinding ppe (PPE.termName ppe r) tm
Referent.Con (ConstructorReference r _) _ -> prettyType r
prettyType r =
let ppe = PPE.declarationPPE pped r

View File

@ -11,7 +11,6 @@ import qualified Data.Map as Map
import Data.Proxy (Proxy (..))
import qualified Data.Set as Set
import qualified Data.Text as Text
import Data.Void (Void)
import System.Console.Haskeline.Completion (Completion (Completion))
import qualified Text.Megaparsec as P
import qualified Unison.Codebase.Branch as Branch
@ -20,8 +19,9 @@ import Unison.Codebase.Editor.Input (Input)
import qualified Unison.Codebase.Editor.Input as Input
import Unison.Codebase.Editor.Output.PushPull (PushPull (Pull, Push))
import qualified Unison.Codebase.Editor.Output.PushPull as PushPull
import Unison.Codebase.Editor.RemoteRepo (ReadRemoteNamespace, WriteGitRepo, WriteRemotePath)
import Unison.Codebase.Editor.RemoteRepo (WriteGitRepo, WriteRemotePath)
import qualified Unison.Codebase.Editor.SlurpResult as SR
import Unison.Codebase.Editor.UriParser (parseReadRemoteNamespace)
import qualified Unison.Codebase.Editor.UriParser as UriParser
import qualified Unison.Codebase.Path as Path
import qualified Unison.Codebase.Path.Parse as Path
@ -1358,46 +1358,6 @@ loadPullRequest =
_ -> Left (I.help loadPullRequest)
)
parseReadRemoteNamespace :: String -> String -> Either (P.Pretty P.ColorText) ReadRemoteNamespace
parseReadRemoteNamespace label input =
let printError err = P.lines [P.string "I couldn't parse the repository address given above.", prettyPrintParseError input err]
in first printError (P.parse UriParser.repoPath label (Text.pack input))
prettyPrintParseError :: String -> P.ParseErrorBundle Text Void -> P.Pretty P.ColorText
prettyPrintParseError input errBundle =
let (firstError, sp) = NE.head . fst $ P.attachSourcePos P.errorOffset (P.bundleErrors errBundle) (P.bundlePosState errBundle)
in case firstError of
P.TrivialError _errorOffset ue ee ->
P.lines
[ printLocation sp,
P.newline,
printTrivial ue ee
]
P.FancyError _errorOffset ee ->
let errors = foldMap (P.string . mappend "\n" . showErrorFancy) ee
in P.lines
[ printLocation sp,
errors
]
where
printLocation :: P.SourcePos -> P.Pretty P.ColorText
printLocation sp =
let col = (P.unPos $ P.sourceColumn sp) - 1
row = (P.unPos $ P.sourceLine sp) - 1
errorLine = lines input !! row
in P.lines
[ P.newline,
P.string errorLine,
P.string $ replicate col ' ' <> "^-- This is where I gave up."
]
printTrivial :: (Maybe (P.ErrorItem Char)) -> (Set (P.ErrorItem Char)) -> P.Pretty P.ColorText
printTrivial ue ee =
let expected = "I expected " <> foldMap (P.singleQuoted . P.string . showErrorItem) ee
found = P.string . mappend "I found " . showErrorItem <$> ue
message = [expected] <> catMaybes [found]
in P.oxfordCommasWith "." message
parseWriteGitRepo :: String -> String -> Either (P.Pretty P.ColorText) WriteGitRepo
parseWriteGitRepo label input = do
first
@ -2005,6 +1965,24 @@ debugDoctor =
_ -> Left (showPatternHelp debugDoctor)
)
debugNameDiff :: InputPattern
debugNameDiff =
InputPattern
{ patternName = "debug.name-diff",
aliases = [],
visibility = I.Hidden,
argTypes = [(Required, namespaceArg), (Required, namespaceArg)],
help = P.wrap "List all name changes between two causal hashes. Does not detect patch or metadata changes.",
parse =
( \case
[from, to] -> first fromString $ do
fromSBH <- Input.parseShortBranchHash from
toSBH <- Input.parseShortBranchHash to
pure $ Input.DebugNameDiffI fromSBH toSBH
_ -> Left (I.help debugNameDiff)
)
}
test :: InputPattern
test =
InputPattern
@ -2322,6 +2300,7 @@ validInputs =
debugClearWatchCache,
debugDoctor,
debugTabCompletion,
debugNameDiff,
gist,
authLogin,
printVersion

View File

@ -37,6 +37,7 @@ import System.Directory
getHomeDirectory,
)
import U.Codebase.Branch (NamespaceStats (..))
import U.Codebase.Branch.Diff (NameChanges (..))
import U.Codebase.Sqlite.DbId (SchemaVersion (SchemaVersion))
import U.Util.Base32Hex (Base32Hex)
import qualified U.Util.Base32Hex as Base32Hex
@ -74,6 +75,7 @@ import qualified Unison.Codebase.PushBehavior as PushBehavior
import qualified Unison.Codebase.Runtime as Runtime
import Unison.Codebase.ShortBranchHash (ShortBranchHash)
import qualified Unison.Codebase.ShortBranchHash as SBH
import qualified Unison.Codebase.SqliteCodebase.Conversions as Cv
import Unison.Codebase.SqliteCodebase.GitError
( GitSqliteCodebaseError (..),
)
@ -84,6 +86,7 @@ import Unison.CommandLine (bigproblem, note, tip)
import Unison.CommandLine.InputPatterns (makeExample')
import qualified Unison.CommandLine.InputPatterns as IP
import Unison.ConstructorReference (GConstructorReference (..))
import qualified Unison.ConstructorType as CT
import qualified Unison.DataDeclaration as DD
import qualified Unison.Hash as Hash
import qualified Unison.HashQualified as HQ
@ -296,21 +299,20 @@ notifyNumbered o = case o of
else
first
( \p ->
( P.lines
[ P.wrap $
"The changes summarized below are available for you to review,"
<> "using the following command:",
"",
P.indentN 2 $
IP.makeExampleNoBackticks
IP.loadPullRequest
[ (prettyReadRemoteNamespace baseRepo),
(prettyReadRemoteNamespace headRepo)
],
"",
p
]
)
P.lines
[ P.wrap $
"The changes summarized below are available for you to review,"
<> "using the following command:",
"",
P.indentN 2 $
IP.makeExampleNoBackticks
IP.loadPullRequest
[ prettyReadRemoteNamespace baseRepo,
prettyReadRemoteNamespace headRepo
],
"",
p
]
)
(showDiffNamespace HideNumbers ppe (absPathToBranchId Path.absoluteEmpty) (absPathToBranchId Path.absoluteEmpty) diff)
-- todo: these numbers aren't going to work,
@ -663,8 +665,8 @@ notifyUser dir o = case o of
CachedTests 0 _ -> pure . P.callout "😶" $ "No tests to run."
CachedTests n n'
| n == n' ->
pure $
P.lines [cache, "", displayTestResults True ppe oks fails]
pure $
P.lines [cache, "", displayTestResults True ppe oks fails]
CachedTests _n m ->
pure $
if m == 0
@ -673,7 +675,6 @@ notifyUser dir o = case o of
P.indentN 2 $
P.lines ["", cache, "", displayTestResults False ppe oks fails, "", ""]
where
NewlyComputed -> do
clearCurrentLine
pure $
@ -687,7 +688,7 @@ notifyUser dir o = case o of
TestIncrementalOutputStart ppe (n, total) r _src -> do
putPretty' $
P.shown (total - n) <> " tests left to run, current test: "
<> (P.syntaxToColor $ prettyHashQualified (PPE.termName ppe $ Referent.Ref r))
<> P.syntaxToColor (prettyHashQualified (PPE.termName ppe $ Referent.Ref r))
pure mempty
TestIncrementalOutputEnd _ppe (_n, _total) _r result -> do
clearCurrentLine
@ -1037,7 +1038,7 @@ notifyUser dir o = case o of
P.bracket . P.lines $
P.wrap "The watch expression(s) reference these definitions:" :
"" :
[ (P.syntaxToColor $ TermPrinter.prettyBinding ppe (HQ.unsafeFromVar v) b)
[ P.syntaxToColor $ TermPrinter.prettyBinding ppe (HQ.unsafeFromVar v) b
| (v, b) <- bindings
]
prettyWatches =
@ -1805,6 +1806,20 @@ notifyUser dir o = case o of
IntegrityCheck result -> pure $ case result of
NoIntegrityErrors -> "🎉 No issues detected 🎉"
IntegrityErrorDetected ns -> prettyPrintIntegrityErrors ns
DisplayDebugNameDiff NameChanges {termNameAdds, termNameRemovals, typeNameAdds, typeNameRemovals} -> do
let referentText =
-- We don't use the constructor type in the actual output here, so there's no
-- point in looking up the correct one.
P.text . Referent.toText . runIdentity . Cv.referent2to1 (\_ref -> Identity CT.Data)
let referenceText = P.text . Reference.toText . Cv.reference2to1
pure $
P.columnNHeader
["Kind", "Name", "Change", "Ref"]
( (termNameAdds <&> \(n, ref) -> ["Term", prettyName n, "Added", referentText ref])
<> (termNameRemovals <&> \(n, ref) -> ["Term", prettyName n, "Removed", referentText ref])
<> (typeNameAdds <&> \(n, ref) -> ["Type", prettyName n, "Added", referenceText ref])
<> (typeNameRemovals <&> \(n, ref) -> ["Type", prettyName n, "Removed", referenceText ref])
)
DisplayDebugCompletions completions ->
pure $
P.column2
@ -2149,7 +2164,7 @@ unsafePrettyTermResultSigFull' ppe = \case
[ P.hiBlack "-- " <> greyHash (HQ.fromReferent r),
P.group $
P.commas (fmap greyHash $ hq : map HQ'.toHQ (toList aliases)) <> " : "
<> (P.syntaxToColor $ TypePrinter.pretty0 ppe mempty (-1) typ),
<> P.syntaxToColor (TypePrinter.prettySyntax ppe typ),
mempty
]
_ -> error "Don't pass Nothing"
@ -2447,7 +2462,7 @@ showDiffNamespace ::
(Pretty, NumberedArgs)
showDiffNamespace _ _ _ _ diffOutput
| OBD.isEmpty diffOutput =
("The namespaces are identical.", mempty)
("The namespaces are identical.", mempty)
showDiffNamespace sn ppe oldPath newPath OBD.BranchDiffOutput {..} =
(P.sepNonEmpty "\n\n" p, toList args)
where

View File

@ -8,7 +8,6 @@ module Unison.LSP where
import Colog.Core (LogAction (LogAction))
import qualified Colog.Core as Colog
import Control.Monad.Reader
import Data.Aeson hiding (Options, defaultOptions)
import GHC.IO.Exception (ioe_errno)
import qualified Ki
import qualified Language.LSP.Logging as LSP
@ -27,6 +26,8 @@ import Unison.Codebase.Runtime (Runtime)
import qualified Unison.Debug as Debug
import Unison.LSP.CancelRequest (cancelRequestHandler)
import Unison.LSP.CodeAction (codeActionHandler)
import Unison.LSP.Completion (completionHandler)
import qualified Unison.LSP.Configuration as Config
import qualified Unison.LSP.FileAnalysis as Analysis
import Unison.LSP.FoldingRange (foldingRangeRequest)
import qualified Unison.LSP.HandlerUtils as Handlers
@ -65,7 +66,7 @@ spawnLsp codebase runtime latestBranch latestPath = TCP.withSocketsDo do
case Errno <$> ioe_errno ioerr of
Just errNo
| errNo == eADDRINUSE -> do
putStrLn $ "Note: Port " <> lspPort <> " is already bound by another process or another UCM. The LSP server will not be started."
putStrLn $ "Note: Port " <> lspPort <> " is already bound by another process or another UCM. The LSP server will not be started."
_ -> do
Debug.debugM Debug.LSP "LSP Exception" ioerr
Debug.debugM Debug.LSP "LSP Errno" (ioe_errno ioerr)
@ -85,21 +86,14 @@ serverDefinition ::
ServerDefinition Config
serverDefinition vfsVar codebase runtime scope latestBranch latestPath =
ServerDefinition
{ defaultConfig = lspDefaultConfig,
onConfigurationChange = lspOnConfigurationChange,
{ defaultConfig = defaultLSPConfig,
onConfigurationChange = Config.updateConfig,
doInitialize = lspDoInitialize vfsVar codebase runtime scope latestBranch latestPath,
staticHandlers = lspStaticHandlers,
interpretHandler = lspInterpretHandler,
options = lspOptions
}
-- | Detect user LSP configuration changes.
lspOnConfigurationChange :: Config -> Value -> Either Text Config
lspOnConfigurationChange _ _ = pure Config
lspDefaultConfig :: Config
lspDefaultConfig = Config
-- | Initialize any context needed by the LSP server
lspDoInitialize ::
MVar VFS ->
@ -120,6 +114,7 @@ lspDoInitialize vfsVar codebase runtime scope latestBranch latestPath lspContext
parseNamesCacheVar <- newTVarIO mempty
currentPathCacheVar <- newTVarIO Path.absoluteEmpty
cancellationMapVar <- newTVarIO mempty
completionsVar <- newTVarIO mempty
let env = Env {ppeCache = readTVarIO ppeCacheVar, parseNamesCache = readTVarIO parseNamesCacheVar, currentPathCache = readTVarIO currentPathCacheVar, ..}
let lspToIO = flip runReaderT lspContext . unLspT . flip runReaderT env . runLspM
Ki.fork scope (lspToIO Analysis.fileAnalysisWorker)
@ -141,6 +136,7 @@ lspRequestHandlers =
& SMM.insert STextDocumentHover (mkHandler hoverHandler)
& SMM.insert STextDocumentCodeAction (mkHandler codeActionHandler)
& SMM.insert STextDocumentFoldingRange (mkHandler foldingRangeRequest)
& SMM.insert STextDocumentCompletion (mkHandler completionHandler)
where
defaultTimeout = 10_000 -- 10s
mkHandler ::
@ -167,6 +163,7 @@ lspNotificationHandlers =
& SMM.insert STextDocumentDidChange (ClientMessageHandler VFS.lspChangeFile)
& SMM.insert SInitialized (ClientMessageHandler Notifications.initializedHandler)
& SMM.insert SCancelRequest (ClientMessageHandler $ Notifications.withDebugging cancelRequestHandler)
& SMM.insert SWorkspaceDidChangeConfiguration (ClientMessageHandler Config.workspaceConfigurationChanged)
-- | A natural transformation into IO, required by the LSP lib.
lspInterpretHandler :: Env -> Lsp <~> IO

View File

@ -4,122 +4,238 @@
module Unison.LSP.Completion where
import Control.Lens hiding (List)
import Control.Comonad.Cofree
import Control.Lens hiding (List, (:<))
import Control.Monad.Reader
import Data.String.Here.Uninterpolated (here)
import Data.Bifunctor (second)
import Data.List.Extra (nubOrdOn)
import Data.List.NonEmpty (NonEmpty (..))
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Text as Text
import Language.LSP.Types
import Language.LSP.Types.Lens
import qualified Text.FuzzyFind as Fuzzy
import Unison.Codebase.Path (Path)
import qualified Unison.Codebase.Path as Path
import qualified Unison.HashQualified' as HQ'
import Unison.LSP.Types
import qualified Unison.LSP.VFS as VFS
import Unison.LabeledDependency (LabeledDependency)
import qualified Unison.LabeledDependency as LD
import Unison.Name (Name)
import qualified Unison.Name as Name
import Unison.NameSegment (NameSegment (..))
import qualified Unison.NameSegment as NameSegment
import Unison.Names (Names (..))
import Unison.Prelude
import qualified Unison.Server.Endpoints.FuzzyFind as FZF
import qualified Unison.Server.Syntax as Server
import qualified Unison.Server.Types as Backend
import qualified Unison.PrettyPrintEnv as PPE
import qualified Unison.PrettyPrintEnvDecl as PPED
import qualified Unison.Referent as Referent
import qualified Unison.Util.Monoid as Monoid
import qualified Unison.Util.Relation as Relation
-- | Rudimentary auto-completion handler
--
-- TODO:
-- * Rewrite this to use an index rather than fuzzy searching ALL names
-- * Respect ucm's current path
-- * Provide namespaces as auto-complete targets
-- * Auto-complete minimally suffixed names
-- * Include docs in completion details?
completionHandler :: RequestMessage 'TextDocumentCompletion -> (Either ResponseError (ResponseResult 'TextDocumentCompletion) -> Lsp ()) -> Lsp ()
completionHandler m respond =
respond =<< do
mayPrefix <- VFS.completionPrefix (m ^. params)
case mayPrefix of
Nothing -> pure . Right . InL . List $ []
Just (range, prefix) -> do
matches <- expand range prefix
let isIncomplete = True -- TODO: be smarter about this
pure . Right . InR . CompletionList isIncomplete . List $ snippetCompletions prefix range <> matches
respond . maybe (Right $ InL mempty) (Right . InR) =<< runMaybeT do
(range, prefix) <- MaybeT $ VFS.completionPrefix (m ^. params)
ppe <- PPED.suffixifiedPPE <$> lift globalPPE
completions <- lift getCompletions
Config {maxCompletions} <- lift getConfig
let defMatches = matchCompletions completions prefix
let (isIncomplete, defCompletions) =
defMatches
& nubOrdOn (\(p, _name, ref) -> (p, ref))
& fmap (over _1 Path.toText)
& case maxCompletions of
Nothing -> (False,)
Just n -> takeCompletions n
let defCompletionItems =
defCompletions
& mapMaybe \(path, fqn, dep) ->
let biasedPPE = PPE.biasTo [fqn] ppe
hqName = LD.fold (PPE.types biasedPPE) (PPE.terms biasedPPE) dep
in hqName <&> \hqName -> mkDefCompletionItem range (Name.toText fqn) path (HQ'.toText hqName) dep
pure . CompletionList isIncomplete . List $ defCompletionItems
where
resultToCompletion :: Range -> Text -> FZF.FoundResult -> CompletionItem
resultToCompletion range prefix = \case
FZF.FoundTermResult (FZF.FoundTerm {namedTerm = Backend.NamedTerm {termName, termType}}) -> do
(mkCompletionItem (HQ'.toText termName))
{ _detail = (": " <>) . Text.pack . Server.toPlain <$> termType,
_kind = Just CiVariable,
_insertText = Text.stripPrefix prefix (HQ'.toText termName),
_textEdit = Just $ CompletionEditText (TextEdit range (HQ'.toText termName))
}
FZF.FoundTypeResult (FZF.FoundType {namedType = Backend.NamedType {typeName, typeTag}}) ->
let (detail, kind) = case typeTag of
Backend.Ability -> ("Ability", CiInterface)
Backend.Data -> ("Data", CiClass)
in (mkCompletionItem (HQ'.toText typeName))
{ _detail = Just detail,
_kind = Just kind
}
expand :: Range -> Text -> Lsp [CompletionItem]
expand range prefix = do
-- We should probably write a different fzf specifically for completion, but for now, it
-- expects the unique pieces of the query to be different "words".
let query = Text.unwords . Text.splitOn "." $ prefix
cb <- asks codebase
lspBackend (FZF.serveFuzzyFind cb Nothing Nothing Nothing Nothing (Just $ Text.unpack query)) >>= \case
Left _be -> pure []
Right results ->
pure . fmap (resultToCompletion range prefix . snd) . take 15 . sortOn (Fuzzy.score . fst) $ results
-- Takes at most the specified number of completions, but also indicates with a boolean
-- whether there were more completions remaining so we can pass that along to the client.
takeCompletions :: Int -> [a] -> (Bool, [a])
takeCompletions 0 xs = (not $ null xs, [])
takeCompletions _ [] = (False, [])
takeCompletions n (x : xs) = second (x :) $ takeCompletions (pred n) xs
snippetCompletions :: Text -> Range -> [CompletionItem]
snippetCompletions prefix range =
[ ("handler", handlerTemplate),
("cases", casesTemplate),
("match-with", matchWithTemplate)
]
& filter (Text.isPrefixOf prefix . fst)
& fmap toCompletion
where
toCompletion :: (Text, Text) -> CompletionItem
toCompletion (pat, snippet) =
(mkCompletionItem pat)
{ _insertTextFormat = Just Snippet,
_insertTextMode = Just AdjustIndentation,
_textEdit = Just $ CompletionEditText (TextEdit range snippet)
}
handlerTemplate =
[here|
handle${1:Ability} : Request (${1:Ability} ${2}) a -> a
handle${1:Ability} = cases
{${3} -> continue} -> do
${4}
|]
casesTemplate =
[here|
cases
${1} -> do
${2}
|]
matchWithTemplate =
[here|
match ${1} with
${2} -> do
${3}
|]
mkCompletionItem :: Text -> CompletionItem
mkCompletionItem lbl =
mkDefCompletionItem :: Range -> Text -> Text -> Text -> LabeledDependency -> CompletionItem
mkDefCompletionItem range fqn path suffixified dep =
CompletionItem
{ _label = lbl,
_kind = Nothing,
_kind = case dep of
LD.TypeReference _ref -> Just CiClass
LD.TermReferent ref -> case ref of
Referent.Con {} -> Just CiConstructor
Referent.Ref {} -> Just CiValue,
_tags = Nothing,
_detail = Nothing,
_detail = Just fqn,
_documentation = Nothing,
_deprecated = Nothing,
_preselect = Nothing,
_sortText = Nothing,
_filterText = Nothing,
_filterText = Just path,
_insertText = Nothing,
_insertTextFormat = Nothing,
_insertTextMode = Nothing,
_textEdit = Nothing,
_textEdit = Just (CompletionEditText $ TextEdit range suffixified),
_additionalTextEdits = Nothing,
_commitCharacters = Nothing,
_command = Nothing,
_xdata = Nothing
}
where
-- We should generally show the longer of the path or suffixified name in the label,
-- it helps the user understand the difference between options which may otherwise look
-- the same.
--
-- E.g. if I type "ma" then the suffixied options might be: List.map, Bag.map, but the
-- path matches are just "map" and "map" since the query starts at that segment, so we
-- show the suffixified version to disambiguate.
--
-- However, if the user types "base.List.ma" then the matching path is "base.List.map" and
-- the suffixification is just "List.map", so we use the path in this case because it more
-- closely matches what the user actually typed.
--
-- This is what's felt best to me, anecdotally.
lbl =
if Text.length path > Text.length suffixified
then path
else suffixified
-- | Generate a completion tree from a set of names.
-- A completion tree is a suffix tree over the path segments of each name it contains.
-- The goal is to allow fast completion of names by any partial path suffix.
--
-- The tree is generated by building a trie where all possible suffixes of a name are
-- reachable from the root of the trie, with sharing over subtrees to improve memory
-- residency.
--
-- Currently we don't "summarize" all of the children of a node in the node itself, and
-- instead you have to crawl all the children to get the actual completions.
--
-- TODO: Would it be worthwhile to perform compression or include child summaries on the suffix tree?
-- I suspect most namespace trees won't actually compress very well since each node is likely
-- to have terms/types at it.
--
-- E.g. From the names:
-- * alpha.beta.Nat
-- * alpha.Text
-- * foxtrot.Text
--
-- It will generate a tree like the following, where each bullet is a possible completion:
--
-- .
-- ├── foxtrot
-- │   └── Text
-- │   └── * foxtrot.Text (##Text)
-- ├── beta
-- │   └── Nat
-- │   └── * alpha.beta.Nat (##Nat)
-- ├── alpha
-- │   ├── beta
-- │   │   └── Nat
-- │   │   └── * alpha.beta.Nat (##Nat)
-- │   └── Text
-- │   └── * alpha.Text (##Text)
-- ├── Text
-- │   ├── * foxtrot.Text (##Text)
-- │   └── * alpha.Text (##Text)
-- └── Nat
-- └── * alpha.beta.Nat (##Nat)
namesToCompletionTree :: Names -> CompletionTree
namesToCompletionTree Names {terms, types} =
let typeCompls =
Relation.domain types
& ifoldMap
( \name refs ->
refs
& Monoid.whenM (not . isDefinitionDoc $ name)
& Set.map \ref -> (name, LD.typeRef ref)
)
termCompls =
Relation.domain terms
& ifoldMap
( \name refs ->
refs
& Monoid.whenM (not . isDefinitionDoc $ name)
& Set.map \ref -> (name, LD.referent ref)
)
in foldMap (uncurry nameToCompletionTree) (typeCompls <> termCompls)
where
-- It's annoying to see _all_ the definition docs in autocomplete so we filter them out.
-- Special docs like "README" will still appear since they're not named 'doc'
isDefinitionDoc name =
case Name.reverseSegments name of
("doc" :| _) -> True
_ -> False
nameToCompletionTree :: Name -> LabeledDependency -> CompletionTree
nameToCompletionTree name ref =
let (lastSegment :| prefix) = Name.reverseSegments name
complMap = helper (Map.singleton lastSegment (Set.singleton (name, ref) :< mempty)) prefix
in CompletionTree (mempty :< complMap)
where
-- We build the tree bottom-up rather than top-down so we can take 'share' submaps for
-- improved memory residency, each call is passed the submap that we built under the
-- current reversed path prefix.
helper ::
Map
NameSegment
(Cofree (Map NameSegment) (Set (Name, LabeledDependency))) ->
[NameSegment] ->
Map
NameSegment
(Cofree (Map NameSegment) (Set (Name, LabeledDependency)))
helper subMap revPrefix = case revPrefix of
[] -> subMap
(ns : rest) ->
mergeSubmaps (helper (Map.singleton ns (mempty :< subMap)) rest) subMap
where
mergeSubmaps = Map.unionWith (\a b -> unCompletionTree $ CompletionTree a <> CompletionTree b)
-- | Crawl the completion tree and return all valid prefix-based completions alongside their
-- Path from the provided prefix, and their full name.
--
-- E.g. if the term "alpha.beta.gamma.map (#abc)" exists in the completion map, and the query is "beta" the result would
-- be:
--
-- @@
-- [(["beta", "gamma", "map"], "alpha.beta.gamma.map", TermReferent #abc)]
-- @@
matchCompletions :: CompletionTree -> Text -> [(Path, Name, LabeledDependency)]
matchCompletions (CompletionTree tree) txt =
matchSegments segments (Set.toList <$> tree)
where
segments :: [Text]
segments =
Text.splitOn "." txt
& filter (not . Text.null)
matchSegments :: [Text] -> Cofree (Map NameSegment) [(Name, LabeledDependency)] -> [(Path, Name, LabeledDependency)]
matchSegments xs (currentMatches :< subtreeMap) =
case xs of
[] ->
let current = currentMatches <&> (\(name, def) -> (Path.empty, name, def))
in (current <> mkDefMatches subtreeMap)
[prefix] ->
Map.dropWhileAntitone ((< prefix) . NameSegment.toText) subtreeMap
& Map.takeWhileAntitone (Text.isPrefixOf prefix . NameSegment.toText)
& \matchingSubtrees ->
let subMatches = ifoldMap (\ns subTree -> matchSegments [] subTree & consPathPrefix ns) matchingSubtrees
in subMatches
(ns : rest) ->
foldMap (matchSegments rest) (Map.lookup (NameSegment ns) subtreeMap)
& consPathPrefix (NameSegment ns)
consPathPrefix :: NameSegment -> ([(Path, Name, LabeledDependency)]) -> [(Path, Name, LabeledDependency)]
consPathPrefix ns = over (mapped . _1) (Path.cons ns)
mkDefMatches :: Map NameSegment (Cofree (Map NameSegment) [(Name, LabeledDependency)]) -> [(Path, Name, LabeledDependency)]
mkDefMatches xs = do
(ns, (matches :< rest)) <- Map.toList xs
let childMatches = mkDefMatches rest <&> over _1 (Path.cons ns)
let currentMatches = matches <&> \(name, dep) -> (Path.singleton ns, name, dep)
currentMatches <> childMatches

View File

@ -0,0 +1,23 @@
{-# LANGUAGE DataKinds #-}
module Unison.LSP.Configuration where
import Data.Aeson
import qualified Data.Text as Text
import Language.LSP.Types
import qualified Unison.Debug as Debug
import Unison.LSP.Types
import Unison.Prelude
-- | Handle configuration changes
updateConfig :: Config -> Value -> Either Text Config
updateConfig _oldConfig newConfig = Debug.debug Debug.LSP "Configuration Change" $ case fromJSON newConfig of
Error err -> Left $ Text.pack err
Success a -> Right a
-- | We could use this notification to cancel/update work-in-progress,
-- but we don't actually need to update the config here, that's handled by the lsp library
-- automatically.
workspaceConfigurationChanged :: NotificationMessage 'WorkspaceDidChangeConfiguration -> Lsp ()
workspaceConfigurationChanged _m = do
pure ()

View File

@ -33,7 +33,7 @@ reportDiagnostics ::
f Diagnostic ->
Lsp ()
reportDiagnostics docUri fileVersion diags = do
let jsonRPC = "" -- TODO: what's this for?
let jsonRPC = "2.0"
let params = PublishDiagnosticsParams {_uri = docUri, _version = fromIntegral <$> fileVersion, _diagnostics = List . toList $ diags}
sendNotification (NotificationMessage jsonRPC STextDocumentPublishDiagnostics params)

View File

@ -1,19 +1,28 @@
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE UndecidableInstances #-}
module Unison.LSP.Types where
import Colog.Core hiding (Lens')
import Control.Lens hiding (List)
import Control.Comonad.Cofree (Cofree)
import qualified Control.Comonad.Cofree as Cofree
import Control.Lens hiding (List, (:<))
import Control.Monad.Except
import Control.Monad.Reader
import qualified Data.Aeson as Aeson
import qualified Data.ByteString.Lazy.Char8 as BSC
import qualified Data.HashMap.Strict as HM
import Data.IntervalMap.Lazy (IntervalMap)
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Data.Text as Text
import qualified Ki
import qualified Language.LSP.Logging as LSP
import Language.LSP.Server
import qualified Language.LSP.Server as LSP
import Language.LSP.Types
import Language.LSP.Types.Lens
import Language.LSP.VFS
@ -21,6 +30,9 @@ import Unison.Codebase
import qualified Unison.Codebase.Path as Path
import Unison.Codebase.Runtime (Runtime)
import Unison.LSP.Orphans ()
import Unison.LabeledDependency (LabeledDependency)
import Unison.Name (Name)
import Unison.NameSegment (NameSegment)
import Unison.NamesWithHistory (NamesWithHistory)
import Unison.Parser.Ann
import Unison.Prelude
@ -65,9 +77,25 @@ data Env = Env
dirtyFilesVar :: TVar (Set Uri),
-- A map of request IDs to an action which kills that request.
cancellationMapVar :: TVar (Map SomeLspId (IO ())),
-- A lazily computed map of all valid completion suffixes from the current path.
completionsVar :: TVar CompletionTree,
scope :: Ki.Scope
}
-- | A suffix tree over path segments of name completions.
-- see 'namesToCompletionTree' for more on how this is built and the invariants it should have.
newtype CompletionTree = CompletionTree
{ unCompletionTree :: Cofree (Map NameSegment) (Set (Name, LabeledDependency))
}
deriving (Show)
instance Semigroup CompletionTree where
CompletionTree (a Cofree.:< subtreeA) <> CompletionTree (b Cofree.:< subtreeB) =
CompletionTree (a <> b Cofree.:< Map.unionWith (\a b -> unCompletionTree $ CompletionTree a <> CompletionTree b) subtreeA subtreeB)
instance Monoid CompletionTree where
mempty = CompletionTree $ mempty Cofree.:< mempty
-- | A monotonically increasing file version tracked by the lsp client.
type FileVersion = Int32
@ -88,6 +116,9 @@ data FileAnalysis = FileAnalysis
getCurrentPath :: Lsp Path.Absolute
getCurrentPath = asks currentPathCache >>= liftIO
getCompletions :: Lsp CompletionTree
getCompletions = asks completionsVar >>= readTVarIO
globalPPE :: Lsp PrettyPrintEnvDecl
globalPPE = asks ppeCache >>= liftIO
@ -95,6 +126,41 @@ getParseNames :: Lsp NamesWithHistory
getParseNames = asks parseNamesCache >>= liftIO
data Config = Config
{ -- 'Nothing' will load ALL available completions, which is slower, but may provide a better
-- solution for some users.
--
-- 'Just n' will only fetch the first 'n' completions and will prompt the client to ask for
-- more completions after more typing.
maxCompletions :: Maybe Int
}
deriving stock (Show)
instance Aeson.FromJSON Config where
parseJSON = Aeson.withObject "Config" \obj -> do
maxCompletions <- obj Aeson..:! "maxCompletions" Aeson..!= maxCompletions defaultLSPConfig
let invalidKeys = Set.fromList (HM.keys obj) `Set.difference` validKeys
when (not . null $ invalidKeys) do
fail . Text.unpack $
"Unrecognized configuration key(s): "
<> Text.intercalate ", " (Set.toList invalidKeys)
<> ".\nThe default configuration is:\n"
<> Text.pack defaultConfigExample
pure Config {..}
where
validKeys = Set.fromList ["maxCompletions"]
defaultConfigExample =
BSC.unpack $ Aeson.encode defaultLSPConfig
instance Aeson.ToJSON Config where
toJSON (Config maxCompletions) =
Aeson.object
[ "maxCompletions" Aeson..= maxCompletions
]
defaultLSPConfig :: Config
defaultLSPConfig = Config {..}
where
maxCompletions = Just 100
-- | Lift a backend computation into the Lsp monad.
lspBackend :: Backend.Backend IO a -> Lsp (Either Backend.BackendError a)
@ -142,3 +208,9 @@ includeEdits uri replacement ranges rca =
_changeAnnotations = Nothing
}
in rca & codeAction . edit ?~ workspaceEdit
getConfig :: Lsp Config
getConfig = LSP.getConfig
setConfig :: Config -> Lsp ()
setConfig = LSP.setConfig

View File

@ -5,9 +5,11 @@ import qualified Unison.Codebase as Codebase
import Unison.Codebase.Branch (Branch)
import qualified Unison.Codebase.Path as Path
import qualified Unison.Debug as Debug
import Unison.LSP.Completion
import Unison.LSP.Types
import qualified Unison.LSP.VFS as VFS
import Unison.NamesWithHistory (NamesWithHistory)
import qualified Unison.NamesWithHistory as NamesWithHistory
import Unison.PrettyPrintEnvDecl
import qualified Unison.PrettyPrintEnvDecl.Names as PPE
import qualified Unison.Server.Backend as Backend
@ -21,7 +23,7 @@ ucmWorker ::
STM Path.Absolute ->
Lsp ()
ucmWorker ppeVar parseNamesVar getLatestRoot getLatestPath = do
Env {codebase} <- ask
Env {codebase, completionsVar} <- ask
let loop :: (Branch IO, Path.Absolute) -> Lsp a
loop (currentRoot, currentPath) = do
Debug.debugM Debug.LSP "LSP path: " currentPath
@ -33,6 +35,8 @@ ucmWorker ppeVar parseNamesVar getLatestRoot getLatestPath = do
writeTVar ppeVar ppe
-- Re-check everything with the new names and ppe
VFS.markAllFilesDirty
atomically do
writeTVar completionsVar (namesToCompletionTree $ NamesWithHistory.currentNames parseNames)
latest <- atomically $ do
latestRoot <- getLatestRoot
latestPath <- getLatestPath

View File

@ -72,6 +72,7 @@ library
Unison.LSP.CancelRequest
Unison.LSP.CodeAction
Unison.LSP.Completion
Unison.LSP.Configuration
Unison.LSP.Conversions
Unison.LSP.Diagnostics
Unison.LSP.FileAnalysis
@ -140,6 +141,7 @@ library
, exceptions
, extra
, filepath
, free
, friendly-time
, fuzzyfind
, generic-lens
@ -262,6 +264,7 @@ executable cli-integration-tests
, exceptions
, extra
, filepath
, free
, friendly-time
, fuzzyfind
, generic-lens
@ -379,6 +382,7 @@ executable transcripts
, exceptions
, extra
, filepath
, free
, friendly-time
, fuzzyfind
, generic-lens
@ -500,6 +504,7 @@ executable unison
, exceptions
, extra
, filepath
, free
, friendly-time
, fuzzyfind
, generic-lens
@ -628,6 +633,7 @@ test-suite cli-tests
, exceptions
, extra
, filepath
, free
, friendly-time
, fuzzyfind
, generic-lens

View File

@ -3,6 +3,7 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
{-# OPTIONS_GHC -Wno-partial-type-signatures #-}
@ -37,10 +38,12 @@ import qualified Data.Text.IO as Text
import GHC.Conc (setUncaughtExceptionHandler)
import qualified GHC.Conc
import qualified Ki
import qualified Language.Haskell.TH as TH
import qualified Language.Haskell.TH.Syntax as TH
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Client.TLS as HTTP
import System.Directory (canonicalizePath, getCurrentDirectory, removeDirectoryRecursive)
import System.Environment (getProgName, withArgs)
import System.Environment (getProgName, lookupEnv, withArgs)
import qualified System.Exit as Exit
import qualified System.FilePath as FP
import System.IO (stderr)
@ -55,6 +58,7 @@ import qualified Unison.Codebase as Codebase
import Unison.Codebase.Branch (Branch)
import qualified Unison.Codebase.Editor.Input as Input
import Unison.Codebase.Editor.RemoteRepo (ReadShareRemoteNamespace)
import Unison.Codebase.Editor.UriParser (parseReadShareRemoteNamespace)
import qualified Unison.Codebase.Editor.VersionParser as VP
import Unison.Codebase.Execute (execute)
import Unison.Codebase.Init (CodebaseInitOptions (..), InitError (..), InitResult (..), SpecifiedCodebase (..))
@ -468,8 +472,14 @@ getConfigFilePath mcodepath = (FP.</> ".unisonConfig") <$> Codebase.getCodebaseD
defaultBaseLib :: Maybe ReadShareRemoteNamespace
defaultBaseLib =
rightMay $
runParser VP.defaultBaseLib "version" gitRef
let mayBaseSharePath =
$( do
mayPath <- TH.runIO (lookupEnv "UNISON_BASE_PATH")
TH.lift mayPath
)
in mayBaseSharePath & \case
Just s -> eitherToMaybe $ parseReadShareRemoteNamespace "UNISON_BASE_PATH" s
Nothing -> rightMay $ runParser VP.defaultBaseLib "version" gitRef
where
(gitRef, _date) = Version.gitDescribe

View File

@ -25,19 +25,42 @@ isConstructor = \case
-- Parse a string like those described in Referent.fromText:
-- examples:
-- `##Text.take` — builtins dont have cycles or cids
-- `#2tWjVAuc7` — term ref, no cycle
-- `#y9ycWkiC1.y9` — term ref, part of cycle
-- `#cWkiC1x89#1` — constructor
-- `#DCxrnCAPS.WD#0` — constructor of a type in a cycle
--
-- builtins dont have cycles or cids
-- >>> fromText "##Text.take"
-- Just (Builtin "Text.take")
--
-- term ref, no cycle
-- >>> fromText "#2tWjVAuc7"
-- Just (ShortHash {prefix = "2tWjVAuc7", cycle = Nothing, cid = Nothing})
--
-- term ref, part of cycle
-- >>> fromText "#y9ycWkiC1.y9"
-- Just (ShortHash {prefix = "y9ycWkiC1", cycle = Just "y9", cid = Nothing})
--
-- constructor
-- >>> fromText "#cWkiC1x89#1"
-- Just (ShortHash {prefix = "cWkiC1x89", cycle = Nothing, cid = Just "1"})
--
-- constructor of a type in a cycle
-- >>> fromText "#DCxrnCAPS.WD#0"
-- Just (ShortHash {prefix = "DCxrnCAPS", cycle = Just "WD", cid = Just "0"})
--
-- A constructor ID on a builtin is ignored:
-- e.g. ##FileIO#2 is parsed as ##FileIO
-- >>> fromText "##FileIO#2"
-- Just (Builtin "FileIO")
--
-- Anything to the left of the first # is
-- e.g. foo#abc is parsed as #abc
-- >>> fromText "foo#abc "
-- Just (ShortHash {prefix = "abc ", cycle = Nothing, cid = Nothing})
--
-- Anything including and following a third # is ignored.
-- e.g. foo#abc#2#hello is parsed as #abc#2
-- >>> fromText "foo#abc#2#hello"
-- Just (ShortHash {prefix = "abc", cycle = Nothing, cid = Just "2"})
--
-- Anything after a second . before a second # is ignored.
-- e.g. foo#abc.1f.x is parsed as #abc.1f
-- >>> fromText "foo#abc.1f.x"
-- Just (ShortHash {prefix = "abc", cycle = Just "1f", cid = Nothing})
fromText :: Text -> Maybe ShortHash
fromText t = case Text.split (== '#') t of
[_, "", b] -> Just $ Builtin b -- builtin starts with ##

View File

@ -1,8 +1,34 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE ViewPatterns #-}
module Unison.Var where
module Unison.Var
( Var (..),
Type (..),
InferenceType (..),
blank,
freshIn,
inferAbility,
inferInput,
inferOther,
inferOutput,
inferPatternBindE,
inferPatternBindV,
inferPatternPureE,
inferPatternPureV,
inferTypeConstructor,
inferTypeConstructorArg,
joinDot,
missingResult,
name,
nameStr,
named,
nameds,
namespaced,
rawName,
reset,
uncapitalize,
universallyQuantifyIfFree,
unnamedRef,
unnamedTest,
)
where
import Data.Char (isLower, toLower)
import Data.Text (pack)
@ -169,9 +195,6 @@ joinDot prefix v2 =
then named (name prefix `mappend` name v2)
else named (name prefix `mappend` "." `mappend` name v2)
freshNamed :: Var v => Set v -> Text -> v
freshNamed used n = ABT.freshIn used (named n)
universallyQuantifyIfFree :: forall v. Var v => v -> Bool
universallyQuantifyIfFree v =
ok (name $ reset v) && unqualified v == v

View File

@ -144,7 +144,7 @@ data BackendError
| MissingSignatureForTerm Reference
| NoSuchDefinition (HQ.HashQualified Name)
data BackendEnv = BackendEnv
newtype BackendEnv = BackendEnv
{ -- | Whether to use the sqlite name-lookup table to generate Names objects rather than building Names from the root branch.
useNamesIndex :: Bool
}
@ -265,7 +265,7 @@ termEntryDisplayName :: TermEntry v a -> Text
termEntryDisplayName = HQ'.toText . termEntryHQName
termEntryHQName :: TermEntry v a -> HQ'.HashQualified NameSegment
termEntryHQName (TermEntry {termEntryName, termEntryConflicted, termEntryHash}) =
termEntryHQName TermEntry {termEntryName, termEntryConflicted, termEntryHash} =
if termEntryConflicted
then HQ'.HashQualified termEntryName termEntryHash
else HQ'.NameOnly termEntryName
@ -283,7 +283,7 @@ typeEntryDisplayName :: TypeEntry -> Text
typeEntryDisplayName = HQ'.toText . typeEntryHQName
typeEntryHQName :: TypeEntry -> HQ'.HashQualified NameSegment
typeEntryHQName (TypeEntry {typeEntryName, typeEntryConflicted, typeEntryReference}) =
typeEntryHQName TypeEntry {typeEntryName, typeEntryConflicted, typeEntryReference} =
if typeEntryConflicted
then HQ'.HashQualified typeEntryName (Reference.toShortHash typeEntryReference)
else HQ'.NameOnly typeEntryName
@ -528,7 +528,7 @@ formatTypeName' ppe r =
termEntryToNamedTerm ::
Var v => PPE.PrettyPrintEnv -> Maybe Width -> TermEntry v a -> NamedTerm
termEntryToNamedTerm ppe typeWidth te@(TermEntry {termEntryType = mayType, termEntryTag = tag, termEntryHash}) =
termEntryToNamedTerm ppe typeWidth te@TermEntry {termEntryType = mayType, termEntryTag = tag, termEntryHash} =
NamedTerm
{ termName = termEntryHQName te,
termHash = termEntryHash,
@ -537,7 +537,7 @@ termEntryToNamedTerm ppe typeWidth te@(TermEntry {termEntryType = mayType, termE
}
typeEntryToNamedType :: TypeEntry -> NamedType
typeEntryToNamedType te@(TypeEntry {typeEntryTag, typeEntryHash}) =
typeEntryToNamedType te@TypeEntry {typeEntryTag, typeEntryHash} =
NamedType
{ typeName = typeEntryHQName $ te,
typeHash = typeEntryHash,
@ -672,7 +672,7 @@ makeTypeSearch :: Int -> NamesWithHistory -> Search Reference
makeTypeSearch len names =
Search
{ lookupNames = \ref -> NamesWithHistory.typeName len ref names,
lookupRelativeHQRefs' = \name -> NamesWithHistory.lookupRelativeHQType' name names,
lookupRelativeHQRefs' = (`NamesWithHistory.lookupRelativeHQType'` names),
matchesNamedRef = HQ'.matchesNamedReference,
makeResult = SR.typeResult
}
@ -682,7 +682,7 @@ makeTermSearch :: Int -> NamesWithHistory -> Search Referent
makeTermSearch len names =
Search
{ lookupNames = \ref -> NamesWithHistory.termName len ref names,
lookupRelativeHQRefs' = \name -> NamesWithHistory.lookupRelativeHQTerm' name names,
lookupRelativeHQRefs' = (`NamesWithHistory.lookupRelativeHQTerm'` names),
matchesNamedRef = HQ'.matchesNamedReferent,
makeResult = SR.termResult
}
@ -721,7 +721,7 @@ hqNameQuery ::
NameSearch ->
[HQ.HashQualified Name] ->
m QueryResult
hqNameQuery codebase (NameSearch {typeSearch, termSearch}) hqs = do
hqNameQuery codebase NameSearch {typeSearch, termSearch} hqs = do
-- Split the query into hash-only and hash-qualified-name queries.
let (hashes, hqnames) = partitionEithers (map HQ'.fromHQ2 hqs)
-- Find the terms with those hashes.
@ -747,8 +747,12 @@ hqNameQuery codebase (NameSearch {typeSearch, termSearch}) hqs = do
-- Now do the actual name query
resultss = map (\name -> applySearch typeSearch name <> applySearch termSearch name) hqnames
(misses, hits) =
zip hqnames resultss
& map (\(hqname, results) -> if null results then Left hqname else Right results)
zipWith
( \hqname results ->
(if null results then Left hqname else Right results)
)
hqnames
resultss
& partitionEithers
-- Handle query misses correctly
missingRefs =
@ -776,7 +780,7 @@ data DefinitionResults v = DefinitionResults
}
expandShortBranchHash ::
Monad m => Codebase m v a -> ShortBranchHash -> Backend m (Branch.CausalHash)
Monad m => Codebase m v a -> ShortBranchHash -> Backend m Branch.CausalHash
expandShortBranchHash codebase hash = do
hashSet <- lift $ Codebase.branchHashesByPrefix codebase hash
len <- lift $ Codebase.branchHashLength codebase
@ -787,18 +791,17 @@ expandShortBranchHash codebase hash = do
throwError . AmbiguousBranchHash hash $ Set.map (SBH.fromHash len) hashSet
-- | Efficiently resolve a root hash and path to a shallow branch's causal.
getShallowCausalAtPathFromRootHash :: Monad m => Codebase m v a -> Maybe (Branch.CausalHash) -> Path -> Backend m (V2Branch.CausalBranch m)
getShallowCausalAtPathFromRootHash :: Monad m => Codebase m v a -> Maybe Branch.CausalHash -> Path -> Backend m (V2Branch.CausalBranch m)
getShallowCausalAtPathFromRootHash codebase mayRootHash path = do
shallowRoot <- case mayRootHash of
Nothing -> lift (Codebase.getShallowRootCausal codebase)
Just h -> do
lift $ Codebase.getShallowCausalForHash codebase (Cv.causalHash1to2 h)
causal <- lift $ Codebase.getShallowCausalAtPath codebase path (Just shallowRoot)
pure causal
lift $ Codebase.getShallowCausalAtPath codebase path (Just shallowRoot)
formatType' :: Var v => PPE.PrettyPrintEnv -> Width -> Type v a -> SyntaxText
formatType' ppe w =
Pretty.render w . TypePrinter.pretty0 ppe mempty (-1)
Pretty.render w . TypePrinter.prettySyntax ppe
formatType :: Var v => PPE.PrettyPrintEnv -> Width -> Type v a -> Syntax.SyntaxText
formatType ppe w = mungeSyntaxText . formatType' ppe w
@ -822,7 +825,7 @@ prettyDefinitionsForHQName ::
-- this path.
Path ->
-- | The root branch to use
Maybe (Branch.CausalHash) ->
Maybe Branch.CausalHash ->
Maybe Width ->
-- | Whether to suffixify bindings in the rendered syntax
Suffixify ->
@ -995,7 +998,7 @@ docsInBranchToHtmlFiles runtime codebase root currentPath directory = do
let currentBranch = Branch.getAt' currentPath root
let allTerms = (R.toList . Branch.deepTerms . Branch.head) currentBranch
-- ignores docs inside lib namespace, recursively
let notLib (_, name) = all (/= "lib") (Name.segments name)
let notLib (_, name) = "lib" `notElem` Name.segments name
docTermsWithNames <- filterM (isDoc codebase . fst) (filter notLib allTerms)
let docNamesByRef = Map.fromList docTermsWithNames
hqLength <- Codebase.hashLength codebase
@ -1069,7 +1072,8 @@ bestNameForTerm ppe width =
Text.pack
. Pretty.render width
. fmap UST.toPlain
. TermPrinter.pretty0 @v ppe TermPrinter.emptyAc
. TermPrinter.runPretty ppe
. TermPrinter.pretty0 @v TermPrinter.emptyAc
. Term.fromReferent mempty
bestNameForType ::
@ -1078,7 +1082,7 @@ bestNameForType ppe width =
Text.pack
. Pretty.render width
. fmap UST.toPlain
. TypePrinter.pretty0 @v ppe mempty (-1)
. TypePrinter.prettySyntax @v ppe
. Type.ref ()
-- | Returns (parse, pretty, local, ppe) where:
@ -1096,9 +1100,9 @@ scopedNamesForBranchHash codebase mbh path = do
Nothing
| shouldUseNamesIndex -> indexNames
| otherwise -> do
rootBranch <- lift $ Codebase.getRootBranch codebase
let (parseNames, _prettyNames, localNames) = namesForBranch rootBranch (AllNames path)
pure (parseNames, localNames)
rootBranch <- lift $ Codebase.getRootBranch codebase
let (parseNames, _prettyNames, localNames) = namesForBranch rootBranch (AllNames path)
pure (parseNames, localNames)
Just rootCausal -> do
let ch = V2Causal.causalHash rootCausal
let v1CausalHash = Cv.causalHash2to1 ch
@ -1255,8 +1259,7 @@ termsToSyntax suff width ppe0 terms =
DisplayObject.UserObject tm ->
DisplayObject.UserObject
. Pretty.render width
. TermPrinter.prettyBinding (ppeBody r) n
$ tm
$ TermPrinter.prettyBinding (ppeBody r) n tm
typesToSyntax ::
Var v =>
@ -1296,8 +1299,8 @@ typesToSyntax suff width ppe0 types =
typeToSyntaxHeader ::
Width ->
HQ.HashQualified Name ->
(DisplayObject () (DD.Decl Symbol Ann)) ->
(DisplayObject SyntaxText SyntaxText)
DisplayObject () (DD.Decl Symbol Ann) ->
DisplayObject SyntaxText SyntaxText
typeToSyntaxHeader width hqName obj =
case obj of
BuiltinObject _ ->

View File

@ -1,6 +1,4 @@
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE OverloadedStrings #-}
@ -183,7 +181,7 @@ renderDoc pped terms typeOf eval types tm =
formatPrettyType ppe typ = formatPretty (TypePrinter.prettySyntax ppe typ)
source :: Term v () -> m SyntaxText
source tm = (pure . formatPretty . TermPrinter.prettyBlock' True (PPE.suffixifiedPPE pped)) tm
source tm = pure . formatPretty $ TermPrinter.prettyBlock' True (PPE.suffixifiedPPE pped) tm
goSignatures :: [Referent] -> m [P.Pretty SSyntaxText]
goSignatures rs =

View File

@ -50,6 +50,10 @@ test2 = do
(typeLink IOFailure)
"Cannot decode byte '\\xee': Data.Text.Internal.Encoding.decodeUtf8: Invalid UTF-8 stream"
(Any ())
Stack trace:
##raise
```
```ucm
@ -60,5 +64,9 @@ test2 = do
The program halted with an unhandled exception:
Failure (typeLink RuntimeFailure) "builtin.bug" (Any "whoa")
Stack trace:
##raise
```

View File

@ -91,8 +91,9 @@ myServer = unsafeRun! '(hello "127.0.0.1" "0")
Failure (typeLink IOFailure) "problem" (Any ())
I'm sorry this message doesn't have more detail about the
location of the failure. My makers plan to fix this in a
future release. 😢
Stack trace:
bug
#dtd8ccth5f
```

View File

@ -234,6 +234,8 @@ test> Text.tests.patterns =
run (capture (many (notCharIn [?,,]))) "abracadabra,123" == Some (["abracadabra"], ",123"),
run (capture (many (or digit letter))) "11234abc,remainder" == Some (["11234abc"], ",remainder"),
run (capture (replicate 1 5 (or digit letter))) "1a2ba aaa" == Some (["1a2ba"], " aaa"),
-- Regression test for: https://github.com/unisonweb/unison/issues/3530
run (capture (replicate 0 1 (join [literal "a", literal "b"]))) "ac" == Some ([""], "ac"),
isMatch (join [many letter, eof]) "aaaaabbbb" == true,
isMatch (join [many letter, eof]) "aaaaabbbb1" == false,
isMatch (join [l "abra", many (l "cadabra")]) "abracadabracadabra" == true,
@ -273,12 +275,12 @@ test> Bytes.tests.compression =
isLeft (gzip.decompress 0xs201209348750982374593939393939709827345789023457892345)
]
test> Bytes.tests.fromBase64UrlUnpadded =
test> Bytes.tests.fromBase64UrlUnpadded =
checks [Exception.catch
'(fromUtf8
(raiseMessage () (Bytes.fromBase64UrlUnpadded (toUtf8 "aGVsbG8gd29ybGQ")))) == Right "hello world"
, isLeft (Bytes.fromBase64UrlUnpadded (toUtf8 "aGVsbG8gd29ybGQ="))]
```
```ucm:hide

View File

@ -215,6 +215,8 @@ test> Text.tests.patterns =
run (capture (many (notCharIn [?,,]))) "abracadabra,123" == Some (["abracadabra"], ",123"),
run (capture (many (or digit letter))) "11234abc,remainder" == Some (["11234abc"], ",remainder"),
run (capture (replicate 1 5 (or digit letter))) "1a2ba aaa" == Some (["1a2ba"], " aaa"),
-- Regression test for: https://github.com/unisonweb/unison/issues/3530
run (capture (replicate 0 1 (join [literal "a", literal "b"]))) "ac" == Some ([""], "ac"),
isMatch (join [many letter, eof]) "aaaaabbbb" == true,
isMatch (join [many letter, eof]) "aaaaabbbb1" == false,
isMatch (join [l "abra", many (l "cadabra")]) "abracadabracadabra" == true,
@ -250,12 +252,12 @@ test> Bytes.tests.compression =
isLeft (gzip.decompress 0xs201209348750982374593939393939709827345789023457892345)
]
test> Bytes.tests.fromBase64UrlUnpadded =
test> Bytes.tests.fromBase64UrlUnpadded =
checks [Exception.catch
'(fromUtf8
(raiseMessage () (Bytes.fromBase64UrlUnpadded (toUtf8 "aGVsbG8gd29ybGQ")))) == Right "hello world"
, isLeft (Bytes.fromBase64UrlUnpadded (toUtf8 "aGVsbG8gd29ybGQ="))]
```
## `Any` functions

View File

@ -0,0 +1,19 @@
```unison
a.b.one = 1
a.two = 2
a.x.three = 3
a.x.four = 4
structural type a.x.Foo = Foo | Bar
structural type a.b.Baz = Boo
```
```ucm
.> add
.> delete.term a.b.one
.> alias.term a.two a.newtwo
.> move.namespace a.x a.y
.> history
.> debug.name-diff 4 1
```

View File

@ -0,0 +1,107 @@
```unison
a.b.one = 1
a.two = 2
a.x.three = 3
a.x.four = 4
structural type a.x.Foo = Foo | Bar
structural type a.b.Baz = Boo
```
```ucm
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
change:
⍟ These new definitions are ok to `add`:
structural type a.b.Baz
structural type a.x.Foo
a.b.one : ##Nat
a.two : ##Nat
a.x.four : ##Nat
a.x.three : ##Nat
```
```ucm
.> add
⍟ I've added these definitions:
structural type a.b.Baz
structural type a.x.Foo
a.b.one : ##Nat
a.two : ##Nat
a.x.four : ##Nat
a.x.three : ##Nat
.> delete.term a.b.one
Removed definitions:
1. a.b.one : ##Nat
Tip: You can use `undo` or `reflog` to undo this change.
.> alias.term a.two a.newtwo
Done.
.> move.namespace a.x a.y
Done.
.> history
Note: The most recent namespace hash is immediately below this
message.
⊙ 1. #tteooc9j2d
> Moves:
Original name New name
a.x.Foo a.y.Foo
a.x.Foo.Bar a.y.Foo.Bar
a.x.Foo.Foo a.y.Foo.Foo
a.x.four a.y.four
a.x.three a.y.three
⊙ 2. #bicrtgqj12
+ Adds / updates:
a.newtwo
= Copies:
Original name New name(s)
a.two a.newtwo
⊙ 3. #bofp4huk1j
- Deletes:
a.b.one
□ 4. #gss5s88mo3 (start of history)
.> debug.name-diff 4 1
Kind Name Change Ref
Term a.newtwo Added #dcgdua2lj6upd1ah5v0qp09gjsej0d77d87fu6qn8e2qrssnlnmuinoio46hiu53magr7qn8vnqke8ndt0v76700o5u8gcvo7st28jg
Term a.y.four Added #vcfbbslncd2qloc03kalgsmufl3j5es6cehcrbmlj6t78d4uk5j9gpa3hhf2opln1u2kiepg5n2cn49ianf2oig0mi4c2ldn1r9lf40
Term a.y.three Added #f3lgjvjqoocpt8v6kdgd2bgthh11a7md3qdp9rf5datccmo580btjd5bt5dro3irqs0is7vm7s1dphddjbtufch620te7ef7canmjj8
Term a.y.Foo.Bar Added #6kbe32g06nqg93cqub6ohqc4ql4o49ntgnunifds0t75qre6lacnbsr3evn8bkivj68ecbvmhkbak4dbg4fqertcpgb396rmo34tnh0#d1
Term a.y.Foo.Foo Added #6kbe32g06nqg93cqub6ohqc4ql4o49ntgnunifds0t75qre6lacnbsr3evn8bkivj68ecbvmhkbak4dbg4fqertcpgb396rmo34tnh0#d0
Term a.b.one Removed #gjmq673r1vrurfotlnirv7vutdhm6sa3s02em5g22kk606mv6duvv8be402dv79312i4a0onepq5bo7citsodvq2g720nttj0ee9p0g
Term a.x.four Removed #vcfbbslncd2qloc03kalgsmufl3j5es6cehcrbmlj6t78d4uk5j9gpa3hhf2opln1u2kiepg5n2cn49ianf2oig0mi4c2ldn1r9lf40
Term a.x.three Removed #f3lgjvjqoocpt8v6kdgd2bgthh11a7md3qdp9rf5datccmo580btjd5bt5dro3irqs0is7vm7s1dphddjbtufch620te7ef7canmjj8
Term a.x.Foo.Bar Removed #6kbe32g06nqg93cqub6ohqc4ql4o49ntgnunifds0t75qre6lacnbsr3evn8bkivj68ecbvmhkbak4dbg4fqertcpgb396rmo34tnh0#d1
Term a.x.Foo.Foo Removed #6kbe32g06nqg93cqub6ohqc4ql4o49ntgnunifds0t75qre6lacnbsr3evn8bkivj68ecbvmhkbak4dbg4fqertcpgb396rmo34tnh0#d0
Type a.y.Foo Added #6kbe32g06nqg93cqub6ohqc4ql4o49ntgnunifds0t75qre6lacnbsr3evn8bkivj68ecbvmhkbak4dbg4fqertcpgb396rmo34tnh0
Type a.x.Foo Removed #6kbe32g06nqg93cqub6ohqc4ql4o49ntgnunifds0t75qre6lacnbsr3evn8bkivj68ecbvmhkbak4dbg4fqertcpgb396rmo34tnh0
```

View File

@ -604,6 +604,10 @@ Calling our examples with the wrong number of args will error.
The program halted with an unhandled exception:
Failure (typeLink IOFailure) "called with args" (Any ())
Stack trace:
##raise
```
```ucm
@ -614,6 +618,10 @@ Calling our examples with the wrong number of args will error.
The program halted with an unhandled exception:
Failure (typeLink IOFailure) "called with no args" (Any ())
Stack trace:
##raise
```
```ucm
@ -625,6 +633,10 @@ Calling our examples with the wrong number of args will error.
Failure
(typeLink IOFailure) "called with too many args" (Any ())
Stack trace:
##raise
```
```ucm
@ -635,5 +647,9 @@ Calling our examples with the wrong number of args will error.
The program halted with an unhandled exception:
Failure (typeLink IOFailure) "called with no args" (Any ())
Stack trace:
##raise
```

View File

@ -21,9 +21,10 @@
"implement me later"
I'm sorry this message doesn't have more detail about the
location of the failure. My makers plan to fix this in a
future release. 😢
Stack trace:
todo
#qe5e1lcfn8
```
```unison
@ -46,9 +47,10 @@
"there's a bug in my code"
I'm sorry this message doesn't have more detail about the
location of the failure. My makers plan to fix this in a
future release. 😢
Stack trace:
bug
#m67hcdcoda
```
## Todo

View File

@ -93,5 +93,9 @@ unique type RuntimeError =
The program halted with an unhandled exception:
Failure (typeLink RuntimeError) "oh noes!" (Any ())
Stack trace:
##raise
```