Merge remote-tracking branch 'origin/trunk' into cp/pull-hash-validation-patch

This commit is contained in:
Chris Penner 2023-11-13 14:18:36 -08:00
commit 201865e503
133 changed files with 3844 additions and 349 deletions

View File

@ -73,8 +73,11 @@ The format for this list: name, GitHub handle
* Jesse Looney (@jesselooney)
* Vlad Posmangiu Luchian (@cstml)
* Andrii Uvarov (@unorsk)
* Fabio Labella (@SystemFw)
* Alexis King (@lexi-lambda)
* Mario Bašić (@mabasic)
* Chris Krycho (@chriskrycho)
* Hatim Khambati (@hatimkhambati26)
* Kyle Goetz (@kylegoetz)
* Ethan Morgan (@sixfourtwelve)
* Johan Winther (@JohanWinther)

View File

@ -65,6 +65,7 @@ module U.Codebase.Sqlite.Operations
-- ** dependents index
dependents,
dependentsOfComponent,
dependentsWithinScope,
-- ** type index
Q.addTypeToIndexForTerm,
@ -545,7 +546,6 @@ expectDeclNumConstructors (C.Reference.Id h i) = do
oid <- Q.expectObjectIdForPrimaryHash h
Q.expectDeclObject oid (decodeDeclElementNumConstructors i)
-- * Branch transformation
s2cBranch :: S.DbBranch -> Transaction (C.Branch.Branch Transaction)
@ -1142,6 +1142,20 @@ dependents selector r = do
sIds <- Q.getDependentsForDependency selector r'
Set.traverse s2cReferenceId sIds
-- | `dependentsWithinScope scope query` returns all of transitive dependents of `query` that are in `scope` (not
-- including `query` itself). Each dependent is also tagged with whether it is a term or decl.
dependentsWithinScope :: Set C.Reference.Id -> Set C.Reference -> Transaction (Map C.Reference.Id C.ReferenceType)
dependentsWithinScope scope query = do
scope' <- Set.traverse c2sReferenceId scope
query' <- Set.traverse c2sReference query
Q.getDependentsWithinScope scope' query'
>>= Map.bitraverse s2cReferenceId (pure . objectTypeToReferenceType)
where
objectTypeToReferenceType = \case
ObjectType.TermComponent -> C.RtTerm
ObjectType.DeclComponent -> C.RtType
_ -> error "Q.getDependentsWithinScope shouldn't return any other types"
-- | returns a list of known definitions referencing `h`
dependentsOfComponent :: H.Hash -> Transaction (Set C.Reference.Id)
dependentsOfComponent h = do

View File

@ -160,6 +160,7 @@ module U.Codebase.Sqlite.Queries
getDependenciesForDependent,
getDependencyIdsForDependent,
getDependenciesBetweenTerms,
getDependentsWithinScope,
-- ** type index
addToTypeIndex,
@ -1776,6 +1777,83 @@ getDependenciesBetweenTerms oid1 oid2 =
WHERE path_elem IS NOT null
|]
-- | `getDependentsWithinScope scope query` returns all of transitive dependents of `query` that are in `scope` (not
-- including `query` itself). Each dependent is also tagged with whether it is a term or decl.
getDependentsWithinScope :: Set Reference.Id -> Set S.Reference -> Transaction (Map Reference.Id ObjectType)
getDependentsWithinScope scope query = do
-- Populate a temporary table with all of the references in `scope`
execute
[sql|
CREATE TEMPORARY TABLE dependents_search_scope (
dependent_object_id INTEGER NOT NULL,
dependent_component_index INTEGER NOT NULL,
PRIMARY KEY (dependent_object_id, dependent_component_index)
)
|]
for_ scope \r ->
execute [sql|INSERT INTO dependents_search_scope VALUES (@r, @)|]
-- Populate a temporary table with all of the references in `query`
execute
[sql|
CREATE TEMPORARY TABLE dependencies_query (
dependency_builtin INTEGER NULL,
dependency_object_id INTEGER NULL,
dependency_component_index INTEGER NULL,
CHECK ((dependency_builtin IS NULL) = (dependency_object_id IS NOT NULL)),
CHECK ((dependency_object_id IS NULL) = (dependency_component_index IS NULL))
)
|]
for_ query \r ->
execute [sql|INSERT INTO dependencies_query VALUES (@r, @, @)|]
-- Say the query set is { #foo, #bar }, and the scope set is { #foo, #bar, #baz, #qux, #honk }.
--
-- Furthermore, say the dependencies are as follows, where `x -> y` means "x depends on y".
--
-- #honk -> #baz -> #foo
-- #qux -> #bar
--
-- The recursive query below is seeded with direct dependents of the `query` set that are in `scope`, namely:
--
-- #honk -> #baz -> #foo
-- #qux -> #bar
-- ^^^^
-- direct deps of { #foo, #bar } are: { #baz, #qux }
--
-- Then, every iteration of the query expands to that set's dependents (#honk and onwards), until there are no more.
-- We use `UNION` rather than `UNION ALL` so as to not track down the transitive dependents of any particular
-- reference more than once.
result :: [Reference.Id :. Only ObjectType] <- queryListRow [sql|
WITH RECURSIVE transitive_dependents (dependent_object_id, dependent_component_index, type_id) AS (
SELECT d.dependent_object_id, d.dependent_component_index, object.type_id
FROM dependents_index d
JOIN object ON d.dependent_object_id = object.id
JOIN dependencies_query q
ON q.dependency_builtin IS d.dependency_builtin
AND q.dependency_object_id IS d.dependency_object_id
AND q.dependency_component_index IS d.dependency_component_index
JOIN dependents_search_scope s
ON s.dependent_object_id = d.dependent_object_id
AND s.dependent_component_index = d.dependent_component_index
UNION SELECT d.dependent_object_id, d.dependent_component_index, object.type_id
FROM dependents_index d
JOIN object ON d.dependent_object_id = object.id
JOIN transitive_dependents t
ON t.dependent_object_id = d.dependency_object_id
AND t.dependent_component_index = d.dependency_component_index
JOIN dependents_search_scope s
ON s.dependent_object_id = d.dependent_object_id
AND s.dependent_component_index = d.dependent_component_index
)
SELECT * FROM transitive_dependents
|]
execute [sql|DROP TABLE dependents_search_scope|]
execute [sql|DROP TABLE dependencies_query|]
pure . Map.fromList $ [(r, t) | r :. Only t <- result]
objectIdByBase32Prefix :: ObjectType -> Text -> Transaction [ObjectId]
objectIdByBase32Prefix objType prefix =
queryListCol

View File

@ -10,6 +10,7 @@ module U.Codebase.Reference
Reference' (..),
TermReference',
TypeReference',
ReferenceType(..),
pattern Derived,
Id,
Id' (..),
@ -34,11 +35,11 @@ import Data.Bifoldable (Bifoldable (..))
import Data.Bitraversable (Bitraversable (..))
import Data.Text qualified as Text
import Unison.Hash (Hash)
import Unison.Hash qualified as H
import Unison.Hash qualified as Hash
import Unison.Prelude
import Unison.ShortHash (ShortHash)
import Unison.ShortHash qualified as SH
import Unison.Hash qualified as H
-- | This is the canonical representation of Reference
type Reference = Reference' Text Hash
@ -66,6 +67,8 @@ type TermReferenceId = Id
-- | A type declaration reference id.
type TypeReferenceId = Id
data ReferenceType = RtTerm | RtType deriving (Eq, Ord, Show)
-- | Either a builtin or a user defined (hashed) top-level declaration. Used for both terms and types.
data Reference' t h
= ReferenceBuiltin t
@ -163,4 +166,3 @@ component :: H.Hash -> [k] -> [(k, Id)]
component h ks =
let
in [(k, (Id h i)) | (k, i) <- ks `zip` [0 ..]]

View File

@ -8,7 +8,7 @@ import Unison.Util.Alphabetical (Alphabetical, compareAlphabetical)
-- Represents the parts of a name between the `.`s
newtype NameSegment = NameSegment {toText :: Text}
deriving stock (Eq, Ord, Generic, Show)
deriving stock (Eq, Ord, Generic)
instance Alphabetical NameSegment where
compareAlphabetical n1 n2 = compareAlphabetical (toText n1) (toText n2)
@ -58,3 +58,6 @@ toTextBuilder =
instance IsString NameSegment where
fromString = NameSegment . Text.pack
instance Show NameSegment where
show = show . toText

View File

@ -86,11 +86,11 @@
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1673956053,
"narHash": "sha256-4gtG9iQuiKITOjNQQeQIpoIB6b16fm+504Ch3sNKLd8=",
"lastModified": 1696426674,
"narHash": "sha256-kvjfFW7WAETZlt09AgDn1MrtKzP7t90Vf7vypd3OL1U=",
"owner": "edolstra",
"repo": "flake-compat",
"rev": "35bb57c0c8d8b62bbfd284272c928ceb64ddbde9",
"rev": "0f9255e01c2351cc7d116c072cb317785dd33b33",
"type": "github"
},
"original": {
@ -121,11 +121,11 @@
"systems": "systems"
},
"locked": {
"lastModified": 1681202837,
"narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=",
"lastModified": 1694529238,
"narHash": "sha256-zsNZZGTGnMOf9YpHKJqMSsa0dXbfmxeoJ7xHlrt+xmY=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "cfacdce06f30d2b68473a46042957675eebb3401",
"rev": "ff7b65b44d01cf9ba6a71320833626af21126384",
"type": "github"
},
"original": {
@ -134,22 +134,6 @@
"type": "github"
}
},
"flake-utils_2": {
"locked": {
"lastModified": 1679360468,
"narHash": "sha256-LGnza3cfXF10Biw3ZTg0u9o9t7s680Ww200t5KkHTh8=",
"owner": "hamishmack",
"repo": "flake-utils",
"rev": "e1ea268ff47ad475443dbabcd54744b4e5b9d4f5",
"type": "github"
},
"original": {
"owner": "hamishmack",
"ref": "hkm/nested-hydraJobs",
"repo": "flake-utils",
"type": "github"
}
},
"ghc-8.6.5-iohk": {
"flake": false,
"locked": {
@ -167,14 +151,51 @@
"type": "github"
}
},
"ghc98X": {
"flake": false,
"locked": {
"lastModified": 1696643148,
"narHash": "sha256-E02DfgISH7EvvNAu0BHiPvl1E5FGMDi0pWdNZtIBC9I=",
"ref": "ghc-9.8",
"rev": "443e870d977b1ab6fc05f47a9a17bc49296adbd6",
"revCount": 61642,
"submodules": true,
"type": "git",
"url": "https://gitlab.haskell.org/ghc/ghc"
},
"original": {
"ref": "ghc-9.8",
"submodules": true,
"type": "git",
"url": "https://gitlab.haskell.org/ghc/ghc"
}
},
"ghc99": {
"flake": false,
"locked": {
"lastModified": 1697054644,
"narHash": "sha256-kKarOuXUaAH3QWv7ASx+gGFMHaHKe0pK5Zu37ky2AL4=",
"ref": "refs/heads/master",
"rev": "f383a242c76f90bcca8a4d7ee001dcb49c172a9a",
"revCount": 62040,
"submodules": true,
"type": "git",
"url": "https://gitlab.haskell.org/ghc/ghc"
},
"original": {
"submodules": true,
"type": "git",
"url": "https://gitlab.haskell.org/ghc/ghc"
}
},
"hackage": {
"flake": false,
"locked": {
"lastModified": 1692577366,
"narHash": "sha256-PkMJxz0AOgsmTGUppr9obJaGLHxSJbeNxa8C0t8RUio=",
"lastModified": 1699402991,
"narHash": "sha256-2nQBlA3ygBiIqVPh2J1JwP51rEO0xMjyoOaoJk5PboY=",
"owner": "input-output-hk",
"repo": "hackage.nix",
"rev": "4bb79ccf9e2e80990cf06c96cdf3c61ca1dfa684",
"rev": "9e963602a5b3259dac9cf5e994f0a338fb352b7e",
"type": "github"
},
"original": {
@ -191,11 +212,15 @@
"cabal-36": "cabal-36",
"cardano-shell": "cardano-shell",
"flake-compat": "flake-compat_2",
"flake-utils": "flake-utils_2",
"ghc-8.6.5-iohk": "ghc-8.6.5-iohk",
"ghc98X": "ghc98X",
"ghc99": "ghc99",
"hackage": "hackage",
"hls-1.10": "hls-1.10",
"hls-2.0": "hls-2.0",
"hls-2.2": "hls-2.2",
"hls-2.3": "hls-2.3",
"hls-2.4": "hls-2.4",
"hpc-coveralls": "hpc-coveralls",
"hydra": "hydra",
"iserv-proxy": "iserv-proxy",
@ -214,11 +239,11 @@
"stackage": "stackage"
},
"locked": {
"lastModified": 1692579024,
"narHash": "sha256-alHUQAAmeyKm/aZ8q8/AQSpxv+Uo6P2E9eXJJTjyC2M=",
"lastModified": 1699404571,
"narHash": "sha256-EwI7vKBxCHvIKPWbvGlOF9IZlSFqPODgT/BQy8Z2s/w=",
"owner": "input-output-hk",
"repo": "haskell.nix",
"rev": "884be454d5087a37ecc6f3665de7333e3c2e72a8",
"rev": "cec253ca482301509e9e90cb5c15299dd3550cce",
"type": "github"
},
"original": {
@ -261,6 +286,57 @@
"type": "github"
}
},
"hls-2.2": {
"flake": false,
"locked": {
"lastModified": 1693064058,
"narHash": "sha256-8DGIyz5GjuCFmohY6Fa79hHA/p1iIqubfJUTGQElbNk=",
"owner": "haskell",
"repo": "haskell-language-server",
"rev": "b30f4b6cf5822f3112c35d14a0cba51f3fe23b85",
"type": "github"
},
"original": {
"owner": "haskell",
"ref": "2.2.0.0",
"repo": "haskell-language-server",
"type": "github"
}
},
"hls-2.3": {
"flake": false,
"locked": {
"lastModified": 1695910642,
"narHash": "sha256-tR58doOs3DncFehHwCLczJgntyG/zlsSd7DgDgMPOkI=",
"owner": "haskell",
"repo": "haskell-language-server",
"rev": "458ccdb55c9ea22cd5d13ec3051aaefb295321be",
"type": "github"
},
"original": {
"owner": "haskell",
"ref": "2.3.0.0",
"repo": "haskell-language-server",
"type": "github"
}
},
"hls-2.4": {
"flake": false,
"locked": {
"lastModified": 1696939266,
"narHash": "sha256-VOMf5+kyOeOmfXTHlv4LNFJuDGa7G3pDnOxtzYR40IU=",
"owner": "haskell",
"repo": "haskell-language-server",
"rev": "362fdd1293efb4b82410b676ab1273479f6d17ee",
"type": "github"
},
"original": {
"owner": "haskell",
"ref": "2.4.0.0",
"repo": "haskell-language-server",
"type": "github"
}
},
"hpc-coveralls": {
"flake": false,
"locked": {
@ -303,11 +379,11 @@
"iserv-proxy": {
"flake": false,
"locked": {
"lastModified": 1688517130,
"narHash": "sha256-hUqfxSlo+ffqVdkSZ1EDoB7/ILCL25eYkcCXW9/P3Wc=",
"lastModified": 1691634696,
"narHash": "sha256-MZH2NznKC/gbgBu8NgIibtSUZeJ00HTLJ0PlWKCBHb0=",
"ref": "hkm/remote-iserv",
"rev": "9151db2a9a61d7f5fe52ff8836f18bbd0fd8933c",
"revCount": 13,
"rev": "43a979272d9addc29fbffc2e8542c5d96e993d73",
"revCount": 14,
"type": "git",
"url": "https://gitlab.haskell.org/hamishmack/iserv-proxy.git"
},
@ -452,11 +528,11 @@
},
"nixpkgs-2305": {
"locked": {
"lastModified": 1690680713,
"narHash": "sha256-NXCWA8N+GfSQyoN7ZNiOgq/nDJKOp5/BHEpiZP8sUZw=",
"lastModified": 1695416179,
"narHash": "sha256-610o1+pwbSu+QuF3GE0NU5xQdTHM3t9wyYhB9l94Cd8=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "b81af66deb21f73a70c67e5ea189568af53b1e8c",
"rev": "715d72e967ec1dd5ecc71290ee072bcaf5181ed6",
"type": "github"
},
"original": {
@ -484,11 +560,11 @@
},
"nixpkgs-unstable": {
"locked": {
"lastModified": 1690720142,
"narHash": "sha256-GywuiZjBKfFkntQwpNQfL+Ksa2iGjPprBGL0/psgRZM=",
"lastModified": 1695318763,
"narHash": "sha256-FHVPDRP2AfvsxAdc+AsgFJevMz5VBmnZglFUMlxBkcY=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "3acb5c4264c490e7714d503c7166a3fde0c51324",
"rev": "e12483116b3b51a185a33a272bf351e357ba9a99",
"type": "github"
},
"original": {
@ -529,11 +605,11 @@
"stackage": {
"flake": false,
"locked": {
"lastModified": 1692576558,
"narHash": "sha256-cFQs/lSEhKD6oIBPX1SRVvU81sxviB81CF+bwGwGHP0=",
"lastModified": 1699402155,
"narHash": "sha256-fOywUFLuAuZAkIrv1JdjGzfY53uEiMRlu8UpdJtCjh0=",
"owner": "input-output-hk",
"repo": "stackage.nix",
"rev": "ae06057930b59a55b17aee2303ce604ae79b4db6",
"rev": "7c7bfe8cca23c96b850e16f3c0b159aca1850314",
"type": "github"
},
"original": {

View File

@ -44,6 +44,7 @@ data DebugFlag
| PatternCoverage
| PatternCoverageConstraintSolver
| KindInference
| Update
deriving (Eq, Ord, Show, Bounded, Enum)
debugFlags :: Set DebugFlag
@ -70,6 +71,7 @@ debugFlags = case (unsafePerformIO (lookupEnv "UNISON_DEBUG")) of
"PATTERN_COVERAGE" -> pure PatternCoverage
"PATTERN_COVERAGE_CONSTRAINT_SOLVER" -> pure PatternCoverageConstraintSolver
"KIND_INFERENCE" -> pure KindInference
"UPDATE" -> pure Update
_ -> empty
{-# NOINLINE debugFlags #-}
@ -125,6 +127,10 @@ debugKindInference :: Bool
debugKindInference = KindInference `Set.member` debugFlags
{-# NOINLINE debugKindInference #-}
debugUpdate :: Bool
debugUpdate = Update `Set.member` debugFlags
{-# NOINLINE debugUpdate #-}
debugPatternCoverage :: Bool
debugPatternCoverage = PatternCoverage `Set.member` debugFlags
{-# NOINLINE debugPatternCoverage #-}
@ -188,3 +194,4 @@ shouldDebug = \case
PatternCoverage -> debugPatternCoverage
PatternCoverageConstraintSolver -> debugPatternCoverageConstraintSolver
KindInference -> debugKindInference
Update -> debugUpdate

View File

@ -11,6 +11,7 @@ module Unison.Util.Map
traverseKeysWith,
swap,
upsert,
upsertF,
valuesVector,
)
where
@ -45,6 +46,11 @@ upsert :: (Ord k) => (Maybe v -> v) -> k -> Map k v -> Map k v
upsert f =
Map.alter (Just . f)
-- | Upsert an element into a map.
upsertF :: (Functor f, Ord k) => (Maybe v -> f v) -> k -> Map k v -> f (Map k v)
upsertF f =
Map.alterF (fmap Just . f)
valuesVector :: Map k v -> Vector v
valuesVector =
Vector.fromList . Map.elems

View File

@ -0,0 +1,55 @@
name: unison-util-nametree
github: unisonweb/unison
copyright: Copyright (C) 2013-2018 Unison Computing, PBC and contributors
ghc-options: -Wall
dependencies:
- base
- containers
- lens
- semialign
- semigroups
- these
- unison-core
- unison-core1
- unison-prelude
- unison-util-relation
library:
source-dirs: src
when:
- condition: false
other-modules: Paths_unison_util_nametree
default-extensions:
- BangPatterns
- BlockArguments
- DeriveAnyClass
- DeriveFoldable
- DeriveFunctor
- DeriveGeneric
- DeriveTraversable
- DerivingStrategies
- DerivingVia
- DoAndIfThenElse
- DuplicateRecordFields
- FlexibleContexts
- FlexibleInstances
- GADTs
- GeneralizedNewtypeDeriving
- ImportQualifiedPost
- InstanceSigs
- LambdaCase
- MultiParamTypeClasses
- MultiWayIf
- NamedFieldPuns
- NumericUnderscores
- OverloadedLabels
- OverloadedStrings
- PatternSynonyms
- RankNTypes
- ScopedTypeVariables
- TupleSections
- TypeApplications
- ViewPatterns

View File

@ -0,0 +1,167 @@
module Unison.Util.Nametree
( -- * Nametree
Nametree (..),
traverseNametreeWithName,
unfoldNametree,
-- ** Flattening and unflattening
flattenNametree,
unflattenNametree,
-- * Definitions
Defns (..),
mapDefns,
bimapDefns,
)
where
import Data.List.NonEmpty (NonEmpty, pattern (:|))
import Data.List.NonEmpty qualified as List.NonEmpty
import Data.Map.Strict qualified as Map
import Data.Semialign (Semialign (alignWith), Unzip (unzipWith), Zip (zipWith))
import Data.Semigroup.Generic (GenericSemigroupMonoid (..))
import Data.These (These (..), these)
import Unison.Name (Name)
import Unison.Name qualified as Name
import Unison.NameSegment
import Unison.Prelude
import Unison.Util.BiMultimap (BiMultimap)
import Unison.Util.BiMultimap qualified as BiMultimap
import Prelude hiding (zipWith)
-- | A nametree has a value, and a collection of children nametrees keyed by name segment.
data Nametree a = Nametree
{ value :: !a,
children :: !(Map NameSegment (Nametree a))
}
deriving stock (Functor, Generic, Show)
instance Semialign Nametree where
alignWith :: (These a b -> c) -> Nametree a -> Nametree b -> Nametree c
alignWith f (Nametree x xs) (Nametree y ys) =
Nametree (f (These x y)) (alignWith (these (fmap (f . This)) (fmap (f . That)) (alignWith f)) xs ys)
instance Zip Nametree where
zipWith :: (a -> b -> c) -> Nametree a -> Nametree b -> Nametree c
zipWith f (Nametree x xs) (Nametree y ys) =
Nametree (f x y) (zipWith (zipWith f) xs ys)
instance Unzip Nametree where
unzipWith :: (c -> (a, b)) -> Nametree c -> (Nametree a, Nametree b)
unzipWith f (Nametree x xs) =
(Nametree y ys, Nametree z zs)
where
(y, z) = f x
(ys, zs) = unzipWith (unzipWith f) xs
-- | Traverse over a nametree, with access to the list of name segments (in reverse order) leading to each value.
traverseNametreeWithName :: Applicative f => ([NameSegment] -> a -> f b) -> Nametree a -> f (Nametree b)
traverseNametreeWithName f =
go []
where
go names (Nametree x xs) =
Nametree <$> f names x <*> Map.traverseWithKey (\name -> go (name : names)) xs
-- | Build a nametree from a seed value.
unfoldNametree :: (a -> (b, Map NameSegment a)) -> a -> Nametree b
unfoldNametree f x =
let (y, ys) = f x
in Nametree y (unfoldNametree f <$> ys)
-- | 'flattenNametree' organizes a nametree like
--
-- > "foo" = #foo
-- > "foo": {
-- > "bar" = #bar
-- > "bar": {
-- > "baz" = #baz
-- > }
-- > }
--
-- into an equivalent-but-flatter association between names and definitions, like
--
-- > {
-- > "foo" = #bar,
-- > "foo.bar" = #bar,
-- > "foo.bar.baz" = #baz
-- > }
flattenNametree ::
forall a b.
Ord b =>
(a -> Map NameSegment b) ->
Nametree a ->
BiMultimap b Name
flattenNametree f =
go []
where
go :: [NameSegment] -> Nametree a -> BiMultimap b Name
go prefix (Nametree node children) =
foldr
( \(name, child) ->
-- This union is safe because the keys are disjoint
BiMultimap.unsafeUnion (go (name : prefix) child)
)
( BiMultimap.fromRange
( Map.mapKeysMonotonic
(\name -> Name.fromReverseSegments (name :| prefix))
(f node)
)
)
(Map.toList children)
-- | 'unflattenNametree' organizes an association between names and definitions like
--
-- > {
-- > "foo" = #bar,
-- > "foo.bar" = #bar,
-- > "foo.bar.baz" = #baz
-- > }
--
-- into an equivalent-but-less-flat nametree, like
--
-- > "foo" = #foo
-- > "foo": {
-- > "bar" = #bar
-- > "bar": {
-- > "baz" = #baz
-- > }
-- > }
unflattenNametree :: Ord a => BiMultimap a Name -> Nametree (Map NameSegment a)
unflattenNametree =
unfoldNametree unflattenLevel . map (first Name.segments) . Map.toList . BiMultimap.range
where
unflattenLevel :: [(NonEmpty NameSegment, a)] -> (Map NameSegment a, Map NameSegment [(NonEmpty NameSegment, a)])
unflattenLevel =
foldl' phi (Map.empty, Map.empty)
where
phi (!accValue, !accChildren) = \case
(NameHere n, v) -> (Map.insert n v accValue, accChildren)
(NameThere n ns, v) -> (accValue, Map.insertWith (++) n [(ns, v)] accChildren)
-- Helper patterns for switching on "name here" (1 name segment) or "name there" (2+ name segments)
pattern NameHere :: a -> NonEmpty a
pattern NameHere x <- x :| (List.NonEmpty.nonEmpty -> Nothing)
pattern NameThere :: a -> NonEmpty a -> NonEmpty a
pattern NameThere x xs <- x :| (List.NonEmpty.nonEmpty -> Just xs)
{-# COMPLETE NameHere, NameThere #-}
-- | Definitions (terms and types) in a namespace.
--
-- FIXME this doesn't belong in this module
data Defns terms types = Defns
{ terms :: !terms,
types :: !types
}
deriving stock (Generic, Show)
deriving (Semigroup) via GenericSemigroupMonoid (Defns terms types)
mapDefns :: (a -> b) -> Defns a a -> Defns b b
mapDefns f (Defns terms types) =
Defns (f terms) (f types)
bimapDefns :: (terms -> terms') -> (types -> types') -> Defns terms types -> Defns terms' types'
bimapDefns f g (Defns terms types) =
Defns (f terms) (g types)

View File

@ -0,0 +1,66 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.35.2.
--
-- see: https://github.com/sol/hpack
name: unison-util-nametree
version: 0.0.0
homepage: https://github.com/unisonweb/unison#readme
bug-reports: https://github.com/unisonweb/unison/issues
copyright: Copyright (C) 2013-2018 Unison Computing, PBC and contributors
build-type: Simple
source-repository head
type: git
location: https://github.com/unisonweb/unison
library
exposed-modules:
Unison.Util.Nametree
hs-source-dirs:
src
default-extensions:
BangPatterns
BlockArguments
DeriveAnyClass
DeriveFoldable
DeriveFunctor
DeriveGeneric
DeriveTraversable
DerivingStrategies
DerivingVia
DoAndIfThenElse
DuplicateRecordFields
FlexibleContexts
FlexibleInstances
GADTs
GeneralizedNewtypeDeriving
ImportQualifiedPost
InstanceSigs
LambdaCase
MultiParamTypeClasses
MultiWayIf
NamedFieldPuns
NumericUnderscores
OverloadedLabels
OverloadedStrings
PatternSynonyms
RankNTypes
ScopedTypeVariables
TupleSections
TypeApplications
ViewPatterns
ghc-options: -Wall
build-depends:
base
, containers
, lens
, semialign
, semigroups
, these
, unison-core
, unison-core1
, unison-prelude
, unison-util-relation
default-language: Haskell2010

View File

@ -39,15 +39,16 @@ benchmarks:
dependencies:
- base
- containers
- extra
- unison-prelude
- deepseq
- extra
- nonempty-containers
- unison-prelude
ghc-options:
-Wall
default-extensions:
- ApplicativeDo
- BangPatterns
- BlockArguments
- DeriveFunctor
- DerivingStrategies

View File

@ -0,0 +1,246 @@
-- | A left-unique relation.
module Unison.Util.BiMultimap
( BiMultimap,
Unison.Util.BiMultimap.empty,
-- ** Lookup
memberDom,
lookupDom,
lookupRan,
lookupPreimage,
-- ** Mapping / traversing
unsafeTraverseDom,
-- ** Filtering
filter,
filterDom,
filterDomain,
restrictDom,
restrictRan,
withoutDom,
withoutRan,
-- ** Maps
domain,
range,
unsafeFromDomain,
fromRange,
-- ** Sets
dom,
ran,
-- ** Insert
insert,
unsafeInsert,
-- ** Union
unsafeUnion,
)
where
import Data.Map.Strict qualified as Map
import Data.Set qualified as Set
import Data.Set.NonEmpty (NESet)
import Data.Set.NonEmpty qualified as Set.NonEmpty
import Unison.Prelude
import Unison.Util.Map qualified as Map
import Prelude hiding (filter)
-- | A left-unique relation.
--
-- "Left-unique" means that for all @(x, y)@ in the relation, @y@ is related only to @x@.
data BiMultimap a b = BiMultimap
{ toMultimap :: !(Map a (NESet b)),
toMapR :: !(Map b a)
}
deriving (Eq, Ord, Show)
-- | An empty left-unique relation.
empty :: (Ord a, Ord b) => BiMultimap a b
empty = BiMultimap mempty mempty
memberDom :: Ord a => a -> BiMultimap a b -> Bool
memberDom x =
Map.member x . domain
-- | Look up the set of @b@ related to an @a@.
--
-- /O(log a)/.
lookupDom :: Ord a => a -> BiMultimap a b -> Set b
lookupDom a =
lookupDom_ a . domain
lookupDom_ :: Ord a => a -> Map a (NESet b) -> Set b
lookupDom_ x xs =
maybe Set.empty Set.NonEmpty.toSet (Map.lookup x xs)
-- | Look up the @a@ related to a @b@.
--
-- /O(log b)/.
lookupRan :: Ord b => b -> BiMultimap a b -> Maybe a
lookupRan b (BiMultimap _ r) =
Map.lookup b r
-- | Look up the preimage of a @b@, that is, the set of @b@ that are related to the same @a@ as the input @b@.
--
-- /O(log a + log b)
lookupPreimage :: (Ord a, Ord b) => b -> BiMultimap a b -> Set b
lookupPreimage y (BiMultimap domain range) =
maybe Set.empty (\x -> lookupDom_ x domain) (Map.lookup y range)
-- | Traverse over the domain a left-unique relation.
--
-- The caller is responsible for maintaining left-uniqueness.
unsafeTraverseDom :: forall a b m x. (Monad m, Ord b, Ord x) => (a -> m b) -> BiMultimap a x -> m (BiMultimap b x)
unsafeTraverseDom f m =
foldr g pure (Map.toList (domain m)) Unison.Util.BiMultimap.empty
where
g :: (a, NESet x) -> (BiMultimap b x -> m (BiMultimap b x)) -> (BiMultimap b x -> m (BiMultimap b x))
g (a, xs) acc (BiMultimap domain0 range0) = do
!b <- f a
acc $! BiMultimap (Map.insert b xs domain0) (deriveRangeFromDomain b xs range0)
-- | Filter a left-unique relation, keeping only members @(a, b)@ that satisfy a predicate.
filter :: (Ord a, Ord b) => (a -> b -> Bool) -> BiMultimap a b -> BiMultimap a b
filter p (BiMultimap domain range) =
BiMultimap
( Map.mapMaybeWithKey
( \x ys ->
ys
& Set.NonEmpty.filter (p x)
& Set.NonEmpty.nonEmptySet
)
domain
)
(Map.filterWithKey (flip p) range)
-- | Filter a left-unique relation, keeping only members @(a, b)@ whose @a@ satisfies a predicate.
filterDom :: (Ord a, Ord b) => (a -> Bool) -> BiMultimap a b -> BiMultimap a b
filterDom f m =
unsafeFromDomain (Map.filterWithKey (\x _ -> f x) (domain m))
-- | Filter a left-unique relation, keeping only members @(a, b)@ whose @a@ and set of @b@ satisfies a predicate.
filterDomain :: (Ord a, Ord b) => (a -> NESet b -> Bool) -> BiMultimap a b -> BiMultimap a b
filterDomain f m =
unsafeFromDomain (Map.filterWithKey f (domain m))
-- | Restrict a left-unique relation to only those @(a, b)@ members whose @a@ is in the given set.
restrictDom :: (Ord a, Ord b) => Set a -> BiMultimap a b -> BiMultimap a b
restrictDom xs m =
unsafeFromDomain (Map.restrictKeys (domain m) xs)
-- | Restrict a left-unique relation to only those @(a, b)@ members whose @b@ is in the given set.
restrictRan :: (Ord a, Ord b) => Set b -> BiMultimap a b -> BiMultimap a b
restrictRan ys m =
fromRange (Map.restrictKeys (range m) ys)
-- | Restrict a left-unique relation to only those @(a, b)@ members whose @a@ is not in the given set.
withoutDom :: (Ord a, Ord b) => Set a -> BiMultimap a b -> BiMultimap a b
withoutDom xs m =
unsafeFromDomain (Map.withoutKeys (domain m) xs)
-- | Restrict a left-unique relation to only those @(a, b)@ members whose @b@ is not in the given set.
withoutRan :: (Ord a, Ord b) => Set b -> BiMultimap a b -> BiMultimap a b
withoutRan ys m =
fromRange (Map.withoutKeys (range m) ys)
domain :: BiMultimap a b -> Map a (NESet b)
domain = toMultimap
range :: BiMultimap a b -> Map b a
range = toMapR
-- | Construct a left-unique relation from a mapping from its left-elements to set-of-right-elements. The caller is
-- responsible for ensuring that no right-element is mapped to by two different left-elements.
unsafeFromDomain :: Ord b => Map a (NESet b) -> BiMultimap a b
unsafeFromDomain domain =
BiMultimap domain (invertDomain domain)
invertDomain :: forall a b. Ord b => Map a (NESet b) -> Map b a
invertDomain =
Map.foldlWithKey' f Map.empty
where
f :: Map b a -> a -> NESet b -> Map b a
f acc x ys =
Set.NonEmpty.foldl' (g x) acc ys
g :: a -> Map b a -> b -> Map b a
g x acc y =
Map.insert y x acc
fromRange :: (Ord a, Ord b) => Map b a -> BiMultimap a b
fromRange m =
BiMultimap (Map.foldlWithKey' f Map.empty m) m
where
f acc k v =
Map.insertWith Set.NonEmpty.union v (Set.NonEmpty.singleton k) acc
-- | Returns the domain of the relation, as a Set, in its entirety.
--
-- /O(a)/.
dom :: BiMultimap a b -> Set a
dom =
Map.keysSet . toMultimap
-- | Returns the range of the relation, as a Set, in its entirety.
--
-- /O(a)/.
ran :: BiMultimap a b -> Set b
ran =
Map.keysSet . toMapR
-- | Insert a pair into a left-unique relation, maintaining left-uniqueness, preferring the latest inserted element.
--
-- That is, if a left-unique relation already contains the pair @(x, y)@, then inserting the pair @(z, y)@ will cause
-- the @(x, y)@ pair to be deleted.
insert :: (Ord a, Ord b) => a -> b -> BiMultimap a b -> BiMultimap a b
insert a b m@(BiMultimap l r) =
case Map.alterF (upsertFunc a) b r of
(Ignored, _) -> m
(Inserted, r') -> BiMultimap l' r'
(Replaced old, r') ->
let l'' = Map.update (Set.NonEmpty.nonEmptySet . Set.NonEmpty.delete b) old l'
in BiMultimap l'' r'
where
l' = Map.upsert (maybe (Set.NonEmpty.singleton b) (Set.NonEmpty.insert b)) a l
-- @upsertFunc x@ returns a function that upserts @x@, suitable for passing to @Map.alterF@.
upsertFunc :: Eq a => a -> Maybe a -> (UpsertResult a, Maybe a)
upsertFunc new existing =
case existing of
Nothing -> (Inserted, Just new)
Just old
| old == new -> (Ignored, existing)
| otherwise -> (Replaced old, Just new)
data UpsertResult old
= Ignored -- Ignored because an equivalent thing was already there
| Inserted -- Inserted something new
| Replaced old -- Replaced what was there, here's the old thing
-- | Like @insert x y@, but the caller is responsible maintaining left-uniqueness.
unsafeInsert :: (Ord a, Ord b) => a -> b -> BiMultimap a b -> BiMultimap a b
unsafeInsert x y (BiMultimap xs ys) =
BiMultimap
(Map.upsert (maybe (Set.NonEmpty.singleton y) (Set.NonEmpty.insert y)) x xs)
(Map.insert y x ys)
-- | Union two left-unique relations together.
--
-- The caller is responsible for maintaining left-uniqueness.
unsafeUnion :: (Ord a, Ord b) => BiMultimap a b -> BiMultimap a b -> BiMultimap a b
unsafeUnion xs ys =
BiMultimap
(Map.unionWith Set.NonEmpty.union (toMultimap xs) (toMultimap ys))
(Map.union (toMapR xs) (toMapR ys))
------------------------------------------------------------------------------------------------------------------------
-- @deriveRangeFromDomain x ys range@ is a helper that inserts @(x, y1)@, @(x, y2)@, ... into range @r@.
deriveRangeFromDomain :: Ord b => a -> NESet b -> Map b a -> Map b a
deriveRangeFromDomain x ys acc =
foldr (flip Map.insert x) acc ys
{-# INLINE deriveRangeFromDomain #-}

View File

@ -17,13 +17,14 @@ source-repository head
library
exposed-modules:
Unison.Util.BiMultimap
Unison.Util.Relation
Unison.Util.Relation3
Unison.Util.Relation4
hs-source-dirs:
src
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveFunctor
DerivingStrategies
@ -44,6 +45,7 @@ library
, containers
, deepseq
, extra
, nonempty-containers
, unison-prelude
default-language: Haskell2010
@ -53,7 +55,7 @@ test-suite util-relation-tests
hs-source-dirs:
test
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveFunctor
DerivingStrategies
@ -76,6 +78,7 @@ test-suite util-relation-tests
, deepseq
, easytest
, extra
, nonempty-containers
, random
, unison-prelude
, unison-util-relation
@ -87,7 +90,7 @@ benchmark relation
hs-source-dirs:
benchmarks/relation
default-extensions:
ApplicativeDo
BangPatterns
BlockArguments
DeriveFunctor
DerivingStrategies
@ -109,6 +112,7 @@ benchmark relation
, containers
, deepseq
, extra
, nonempty-containers
, random
, tasty-bench
, unison-prelude

View File

@ -122,6 +122,7 @@ dependencies:
- unison-util-base32hex
- unison-util-bytes
- unison-util-cache
- unison-util-nametree
- unison-util-relation
- unison-util-rope
- unison-util-serialization
@ -132,6 +133,7 @@ dependencies:
- vector
- wai
- warp
- witch
- witherable
- x509
- x509-store
@ -179,6 +181,7 @@ default-extensions:
- LambdaCase
- MultiParamTypeClasses
- NamedFieldPuns
- OverloadedLabels
- OverloadedStrings
- PatternSynonyms
- RankNTypes

View File

@ -51,6 +51,8 @@ module Unison.Codebase.Branch
addTermName,
addTypeName,
deleteTermName,
annihilateTermName,
annihilateTypeName,
deleteTypeName,
setChildBranch,
replacePatch,
@ -693,6 +695,12 @@ deleteTermName r n b
over terms (Star3.deletePrimaryD1 (r, n)) b
deleteTermName _ _ b = b
annihilateTermName :: NameSegment -> Branch0 m -> Branch0 m
annihilateTermName = over terms . Star3.deleteD1
annihilateTypeName :: NameSegment -> Branch0 m -> Branch0 m
annihilateTypeName = over types . Star3.deleteD1
deleteTypeName :: TypeReference -> NameSegment -> Branch0 m -> Branch0 m
deleteTypeName r n b
| Star3.memberD1 (r, n) (view types b) =

View File

@ -0,0 +1,230 @@
-- | The "decl coherency check": a type declaration in a namespace is "coherent" if it satisfies both of the following
-- criteria.
--
-- 1. For each naming of the type decl (say "Foo"#foohash), there exists exactly one name for each of its constructors
-- arbitrarily deep in the corresponding namespace ("Foo" in this example).
--
-- This allows us to render the decl naturally, as in
--
-- structural type Foo
-- = Bar Nat Int
-- | internal.hello.Bonk Nat
--
-- which corresponds to the three names
--
-- "Foo" => #foohash
-- "Foo.Bar" => #foohash#0
-- "Foo.internal.hello.Bonk" => #foohash#1
--
-- We could not do if there was at least one constructor whose full name does not contain the full name of the type
-- decl itself as a prefix.
--
-- A notable consequence of this requirement is that a second naming of a decl (i.e. an alias) cannot be embedded
-- within the first naming, as in:
--
-- type Foo = ...
-- type Foo.some.inner.namespace = ... -- an alias of Foo
--
-- 2. No constructor has a "stray" name that does not have a prefix that equals the type declaration's name. For
-- example, in the namespace
--
-- "Foo" => #foohash
-- "Foo.Bar" => #foohash#0
-- "Deep.What.SomeAlias" => #foohash#0
--
-- the constructor "What.SomeAlias" is "stray", as the type decl #foohash has no name that matches any prefix
-- (i.e. "Deep.What" nor "Deep").
--
-- On to the implementation. We are going to traverse the namespace depth-first. As we go, we have a stateful mapping
-- between decl reference that we *have* seen a name for in one of our parent namespace, and its corresponding set of
-- constructors that we *haven't* yet seen names for, but expect to, before fully searching the corresponding
-- sub-namespace (e.g. the child namespace named "Foo" of the namepace that declares a decl "Foo").
--
-- When processing a namespace, we first process all terms. Each constructor will fall into one of three cases:
--
-- > +----------------------------------------------------------------------------------------------------------------+
-- > | Case | Mapping before | Encountered constructor | Mapping after |
-- > +----------------------------------------------------------------------------------------------------------------+
-- > | Happy path | { #foo : {0, 1, 2} } | #foo#1 | { #foo : {0, 2} } |
-- > | Already seen | { #foo : {0, 1, 2} } | #foo#5 | Error: duplicate naming for constructor #foo#5 |
-- > | Never seen | { #foo : {0, 1, 2} } | #bar#2 | Error: stray constructor #bar#2 |
-- > +----------------------------------------------------------------------------------------------------------------+
--
-- In "happy path", we see a naming of a constructor that we're expecting, and check it off.
-- In "already seen", we see a second naming of a constructor that we're no longer expecting, and fail.
-- In "never seen", we see a naming of a constructor before any naming of its decl, so we fail.
--
-- Next, we process all type decls. Each will again fall into one of three cases:
--
-- > +-----------------------------------------------------------------------------------------------------+
-- > | Case | Mapping before | Declaration | Num constructors | New mapping |
-- > +-----------------------------------------------------------------------------------------------------+
-- > | Uninhabited decl | | #foo | 0 | |
-- > | Inhabited decl | | #foo | 1 or more | { #foo : {0, ..., n-1} } |
-- > | Already seen | { foo : {0, 1, 2} } | #foo | Irrelevant | Error: nested decl alias |
-- > +-----------------------------------------------------------------------------------------------------+
--
-- In "uninhabited decl", we find a decl with no constructors, so we don't expect anything new.
-- In "already seen", we find a second naming of a decl, whose constructors will necessarily violate coherency condition
-- (1) above.
--
-- In "inhabited decl", we find a decl with N constructors, and handle it by:
-- 1. Adding to our state that we expect a name for each.
-- 2. Recursing into the child namespace whose name matches the decl.
-- 3. (If we return from the recursion without short-circuiting) remove the mapping added in step (1) and assert that
-- its value is the empty set (meaning we encountered a name for every constructor).
--
-- Note: This check could be moved into SQLite (with sufficient schema support) some day, but for now, we just do this
-- in memory.
--
-- Note: once upon a time, decls could be "incoherent". Then, we decided we want decls to be "coherent". Thus, this
-- machinery was invented.
module Unison.Codebase.Branch.DeclCoherencyCheck
( IncoherentDeclReason (..),
checkDeclCoherency,
)
where
import Control.Lens (view, (%=), (.=))
import Control.Monad.Except (ExceptT)
import Control.Monad.Except qualified as Except
import Control.Monad.State.Strict (StateT)
import Control.Monad.State.Strict qualified as State
import Control.Monad.Trans.Except qualified as Except (except)
import Data.Functor.Compose (Compose (..))
import Data.IntMap.Strict (IntMap)
import Data.IntMap.Strict qualified as IntMap
import Data.List.NonEmpty (pattern (:|))
import Data.Map.Strict qualified as Map
import Data.Maybe (fromJust)
import Data.Set qualified as Set
import U.Codebase.Reference (Reference' (..), TypeReference, TypeReferenceId)
import U.Codebase.Referent (Referent)
import U.Codebase.Referent qualified as Referent
import Unison.Name (Name)
import Unison.Name qualified as Name
import Unison.NameSegment (NameSegment)
import Unison.Prelude
import Unison.Sqlite (Transaction)
import Unison.Util.BiMultimap (BiMultimap)
import Unison.Util.BiMultimap qualified as BiMultimap
import Unison.Util.Map qualified as Map (deleteLookup, upsertF)
import Unison.Util.Nametree (Defns (..), Nametree (..))
import Witch (unsafeFrom)
data IncoherentDeclReason
= -- | A second naming of a constructor was discovered underneath a decl's name, e.g.
--
-- Foo#Foo
-- Foo.Bar#Foo#0
-- Foo.Some.Other.Name.For.Bar#Foo#0
IncoherentDeclReason'ConstructorAlias !Name !Name
| IncoherentDeclReason'MissingConstructorName !Name
| IncoherentDeclReason'NestedDeclAlias !Name
| IncoherentDeclReason'NoConstructorNames !Name
| IncoherentDeclReason'StrayConstructor !Name
checkDeclCoherency ::
(TypeReferenceId -> Transaction Int) ->
(Nametree (Defns (Map NameSegment Referent) (Map NameSegment TypeReference))) ->
Transaction (Either IncoherentDeclReason (BiMultimap Name Name))
checkDeclCoherency loadDeclNumConstructors =
Except.runExceptT
. fmap (view #declNames)
. (`State.execStateT` DeclCoherencyCheckState Map.empty BiMultimap.empty)
. go []
where
go ::
[NameSegment] ->
( Nametree
(Defns (Map NameSegment Referent) (Map NameSegment TypeReference))
) ->
StateT DeclCoherencyCheckState (ExceptT IncoherentDeclReason Transaction) ()
go prefix (Nametree Defns {terms, types} children) = do
for_ (Map.toList terms) \case
(_, Referent.Ref _) -> pure ()
(_, Referent.Con (ReferenceBuiltin _) _) -> pure ()
(name, Referent.Con (ReferenceDerived typeRef) conId) -> do
DeclCoherencyCheckState {expectedConstructors} <- State.get
expectedConstructors1 <- lift (Except.except (Map.upsertF f typeRef expectedConstructors))
#expectedConstructors .= expectedConstructors1
where
f :: Maybe (IntMap MaybeConstructorName) -> Either IncoherentDeclReason (IntMap MaybeConstructorName)
f = \case
Nothing -> Left (IncoherentDeclReason'StrayConstructor (fullName name))
Just expected -> IntMap.alterF g (unsafeFrom @Word64 conId) expected
where
g :: Maybe MaybeConstructorName -> Either IncoherentDeclReason (Maybe MaybeConstructorName)
g = \case
Nothing -> error "didnt put expected constructor id"
Just NoConstructorNameYet -> Right (Just (YesConstructorName (fullName name)))
Just (YesConstructorName firstName) -> Left (IncoherentDeclReason'ConstructorAlias firstName (fullName name))
childrenWeWentInto <-
forMaybe (Map.toList types) \case
(_, ReferenceBuiltin _) -> pure Nothing
(name, ReferenceDerived typeRef) -> do
DeclCoherencyCheckState {expectedConstructors} <- State.get
whatHappened <- do
let recordNewDecl ::
Maybe (IntMap MaybeConstructorName) ->
Compose (ExceptT IncoherentDeclReason Transaction) WhatHappened (IntMap MaybeConstructorName)
recordNewDecl =
Compose . \case
Just _ -> Except.throwError (IncoherentDeclReason'NestedDeclAlias typeName)
Nothing ->
lift (loadDeclNumConstructors typeRef) <&> \case
0 -> UninhabitedDecl
n -> InhabitedDecl (IntMap.fromAscList [(i, NoConstructorNameYet) | i <- [0 .. n - 1]])
lift (getCompose (Map.upsertF recordNewDecl typeRef expectedConstructors))
case whatHappened of
UninhabitedDecl -> pure Nothing
InhabitedDecl expectedConstructors1 -> do
child <-
Map.lookup name children & onNothing do
Except.throwError (IncoherentDeclReason'NoConstructorNames typeName)
#expectedConstructors .= expectedConstructors1
go (name : prefix) child
DeclCoherencyCheckState {expectedConstructors} <- State.get
-- fromJust is safe here because we upserted `typeRef` key above
let (fromJust -> maybeConstructorNames, expectedConstructors1) =
Map.deleteLookup typeRef expectedConstructors
constructorNames <-
unMaybeConstructorNames maybeConstructorNames & onNothing do
Except.throwError (IncoherentDeclReason'MissingConstructorName typeName)
#expectedConstructors .= expectedConstructors1
#declNames %= \declNames ->
foldr (BiMultimap.insert typeName) declNames constructorNames
pure (Just name)
where
typeName = fullName name
let childrenWeHaventGoneInto = children `Map.withoutKeys` Set.fromList childrenWeWentInto
for_ (Map.toList childrenWeHaventGoneInto) \(name, child) -> go (name : prefix) child
where
fullName name =
Name.fromReverseSegments (name :| prefix)
data DeclCoherencyCheckState = DeclCoherencyCheckState
{ expectedConstructors :: !(Map TypeReferenceId (IntMap MaybeConstructorName)),
declNames :: !(BiMultimap Name Name)
}
deriving stock (Generic)
data MaybeConstructorName
= NoConstructorNameYet
| YesConstructorName !Name
unMaybeConstructorNames :: IntMap MaybeConstructorName -> Maybe [Name]
unMaybeConstructorNames =
traverse f . IntMap.elems
where
f :: MaybeConstructorName -> Maybe Name
f = \case
NoConstructorNameYet -> Nothing
YesConstructorName name -> Just name
data WhatHappened a
= UninhabitedDecl
| InhabitedDecl !a
deriving stock (Functor, Show)

View File

@ -15,8 +15,10 @@ module Unison.Codebase.BranchUtil
makeSetBranch,
makeAddTypeName,
makeDeleteTypeName,
makeAnnihilateTypeName,
makeAddTermName,
makeDeleteTermName,
makeAnnihilateTermName,
makeDeletePatch,
makeReplacePatch,
)
@ -91,7 +93,7 @@ getTermMetadataAt (path, _) r b = Set.fromList <$> List.multimap mdList
mdList = Set.toList . R.ran . Star3.d3 . Star3.selectFact (Set.singleton r) $ terms
terms = Branch._terms $ Branch.getAt0 path b
getType :: Path.HQSplit -> Branch0 m -> Set Reference
getType :: Path.HQSplit -> Branch0 m -> Set Reference.TypeReference
getType (p, hq) b = case hq of
NameOnly n -> Star3.lookupD1 n types
HashQualified n sh -> filter sh $ Star3.lookupD1 n types
@ -119,6 +121,12 @@ makeAddTermName (p, name) r md = (p, Branch.addTermName r name md)
makeDeleteTermName :: Path.Split -> Referent -> (Path, Branch0 m -> Branch0 m)
makeDeleteTermName (p, name) r = (p, Branch.deleteTermName r name)
makeAnnihilateTermName :: Path.Split -> (Path, Branch0 m -> Branch0 m)
makeAnnihilateTermName (p, name) = (p, Branch.annihilateTermName name)
makeAnnihilateTypeName :: Path.Split -> (Path, Branch0 m -> Branch0 m)
makeAnnihilateTypeName (p, name) = (p, Branch.annihilateTypeName name)
makeReplacePatch :: (Applicative m) => Path.Split -> Patch -> (Path, Branch0 m -> Branch0 m)
makeReplacePatch (p, name) patch = (p, Branch.replacePatch name patch)

View File

@ -1,5 +1,6 @@
module Unison.Syntax.DeclPrinter (prettyDecl, prettyDeclHeader, prettyDeclOrBuiltinHeader) where
module Unison.Syntax.DeclPrinter (prettyDecl, prettyDeclW, prettyDeclHeader, prettyDeclOrBuiltinHeader, AccessorName) where
import Control.Monad.Writer (Writer, runWriter, tell)
import Data.List (isPrefixOf)
import Data.Map qualified as Map
import Unison.ConstructorReference (ConstructorReference, GConstructorReference (..))
@ -13,6 +14,7 @@ import Unison.DataDeclaration qualified as DD
import Unison.DataDeclaration.Dependencies qualified as DD
import Unison.HashQualified qualified as HQ
import Unison.Name (Name)
import Unison.Name qualified as Name
import Unison.PrettyPrintEnv (PrettyPrintEnv)
import Unison.PrettyPrintEnv qualified as PPE
import Unison.PrettyPrintEnvDecl (PrettyPrintEnvDecl (..))
@ -32,6 +34,19 @@ import Unison.Var qualified as Var
type SyntaxText = S.SyntaxText' Reference
type AccessorName = HQ.HashQualified Name
prettyDeclW ::
(Var v) =>
PrettyPrintEnvDecl ->
Reference ->
HQ.HashQualified Name ->
DD.Decl v a ->
Writer [AccessorName] (Pretty SyntaxText)
prettyDeclW ppe r hq d = case d of
Left e -> pure $ prettyEffectDecl ppe r hq e
Right dd -> prettyDataDecl ppe r hq dd
prettyDecl ::
(Var v) =>
PrettyPrintEnvDecl ->
@ -39,9 +54,7 @@ prettyDecl ::
HQ.HashQualified Name ->
DD.Decl v a ->
Pretty SyntaxText
prettyDecl ppe r hq d = case d of
Left e -> prettyEffectDecl ppe r hq e
Right dd -> prettyDataDecl ppe r hq dd
prettyDecl ppe r hq d = fst . runWriter $ prettyDeclW ppe r hq d
prettyEffectDecl ::
(Var v) =>
@ -97,24 +110,34 @@ prettyDataDecl ::
Reference ->
HQ.HashQualified Name ->
DataDeclaration v a ->
Pretty SyntaxText
Writer [AccessorName] (Pretty SyntaxText)
prettyDataDecl (PrettyPrintEnvDecl unsuffixifiedPPE suffixifiedPPE) r name dd =
(header <>) . P.sep (fmt S.DelimiterChar (" | " `P.orElse` "\n | ")) $
constructor
<$> zip
(header <>) . P.sep (fmt S.DelimiterChar (" | " `P.orElse` "\n | "))
<$> constructor
`traverse` zip
[0 ..]
(DD.constructors' dd)
where
constructor (n, (_, _, Type.ForallsNamed' _ t)) = constructor' n t
constructor (n, (_, _, t)) = constructor' n t
constructor' n t = case Type.unArrows t of
Nothing -> prettyPattern unsuffixifiedPPE CT.Data name (ConstructorReference r n)
Nothing -> pure $ prettyPattern unsuffixifiedPPE CT.Data name (ConstructorReference r n)
Just ts -> case fieldNames unsuffixifiedPPE r name dd of
Nothing ->
P.group . P.hang' (prettyPattern unsuffixifiedPPE CT.Data name (ConstructorReference r n)) " " $
P.spaced (runPretty suffixifiedPPE (traverse (TypePrinter.prettyRaw Map.empty 10) (init ts)))
Just fs ->
P.group $
pure
. P.group
. P.hang' (prettyPattern unsuffixifiedPPE CT.Data name (ConstructorReference r n)) " "
$ P.spaced (runPretty suffixifiedPPE (traverse (TypePrinter.prettyRaw Map.empty 10) (init ts)))
Just fs -> do
tell
[ case accessor of
Nothing -> HQ.NameOnly $ declName `Name.joinDot` fieldName
Just accessor -> HQ.NameOnly $ declName `Name.joinDot` fieldName `Name.joinDot` accessor
| HQ.NameOnly declName <- [name],
HQ.NameOnly fieldName <- fs,
accessor <- [Nothing, Just "set", Just "modify"]
]
pure . P.group $
fmt S.DelimiterChar "{ "
<> P.sep
(fmt S.DelimiterChar "," <> " " `P.orElse` "\n ")

View File

@ -1,3 +1,4 @@
{-# LANGUAGE OverloadedRecordDot #-}
{-# LANGUAGE RecordWildCards #-}
module Unison.UnisonFile.Type where

View File

@ -135,6 +135,24 @@ deletePrimaryD1 (f, x) s =
then Star3 (Set.delete f (fact s)) d1' (R.deleteDom f (d2 s)) (R.deleteDom f (d3 s))
else s {d1 = d1'}
-- Deletes tuples of the form (_, d1, _, _).
deleteD1 ::
(Ord fact, Ord d1, Ord d2, Ord d3) =>
d1 ->
Star3 fact d1 d2 d3 ->
Star3 fact d1 d2 d3
deleteD1 x s =
let d1' = R.deleteRan x (d1 s)
deadFacts = R.lookupRan x (d1 s)
newFacts = Set.difference (fact s) deadFacts
d2' = R.subtractDom deadFacts (d2 s)
d3' = R.subtractDom deadFacts (d3 s)
in Star3
newFacts
d1'
d2'
d3'
lookupD1 :: (Ord fact, Ord d1) => d1 -> Star3 fact d1 d2 d3 -> Set fact
lookupD1 x s = R.lookupRan x (d1 s)

View File

@ -1,6 +1,6 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.35.2.
-- This file has been generated from package.yaml by hpack version 0.35.1.
--
-- see: https://github.com/sol/hpack
@ -35,6 +35,7 @@ library
Unison.Codebase
Unison.Codebase.Branch
Unison.Codebase.Branch.BranchDiff
Unison.Codebase.Branch.DeclCoherencyCheck
Unison.Codebase.Branch.Merge
Unison.Codebase.Branch.Names
Unison.Codebase.Branch.Raw
@ -214,6 +215,7 @@ library
LambdaCase
MultiParamTypeClasses
NamedFieldPuns
OverloadedLabels
OverloadedStrings
PatternSynonyms
RankNTypes
@ -328,6 +330,7 @@ library
, unison-util-base32hex
, unison-util-bytes
, unison-util-cache
, unison-util-nametree
, unison-util-relation
, unison-util-rope
, unison-util-serialization
@ -338,6 +341,7 @@ library
, vector
, wai
, warp
, witch
, witherable
, x509
, x509-store
@ -401,6 +405,7 @@ test-suite parser-typechecker-tests
LambdaCase
MultiParamTypeClasses
NamedFieldPuns
OverloadedLabels
OverloadedStrings
PatternSynonyms
RankNTypes
@ -520,6 +525,7 @@ test-suite parser-typechecker-tests
, unison-util-base32hex
, unison-util-bytes
, unison-util-cache
, unison-util-nametree
, unison-util-relation
, unison-util-rope
, unison-util-serialization
@ -530,6 +536,7 @@ test-suite parser-typechecker-tests
, vector
, wai
, warp
, witch
, witherable
, x509
, x509-store

View File

@ -29,6 +29,7 @@ packages:
- lib/unison-util-bytes
- lib/unison-util-cache
- lib/unison-util-file-embed
- lib/unison-util-nametree
- lib/unison-util-relation
- lib/unison-util-rope
- parser-typechecker

View File

@ -61,6 +61,7 @@ dependencies:
- recover-rtti
- regex-tdfa
- semialign
- semigroups
- servant
- servant-client
- stm

View File

@ -153,6 +153,9 @@ data Env = Env
credentialManager :: CredentialManager,
-- | Generate a unique name.
generateUniqueName :: IO Parser.UniqueName,
-- | Are we currently running a transcript? Sometimes, it is convenient to know this fact, so we can put more
-- information to the terminal to be captured in transcript output.
isTranscript :: Bool,
-- | How to load source code.
loadSource :: Text -> IO LoadSourceResult,
-- | What to do with output for the user.

View File

@ -34,6 +34,7 @@ where
import Control.Lens ((^.))
import Control.Monad.Reader (ask)
import Data.Proxy
import Network.HTTP.Client qualified as Http.Client
import Network.URI (URI)
import Network.URI qualified as URI
import Servant.API ((:<|>) (..), (:>))
@ -255,7 +256,12 @@ servantClientToCli action = do
let clientEnv :: ClientEnv
clientEnv =
mkClientEnv httpManager hardCodedBaseUrl
(mkClientEnv httpManager hardCodedBaseUrl)
{ Servant.makeClientRequest = \url request ->
(Servant.defaultMakeClientRequest url request)
{ Http.Client.responseTimeout = Http.Client.responseTimeoutMicro (60 * 1000 * 1000 {- 60s -})
}
}
liftIO (runClientM action clientEnv)

View File

@ -84,6 +84,7 @@ import Unison.Codebase.Editor.HandleInput.ReleaseDraft (handleReleaseDraft)
import Unison.Codebase.Editor.HandleInput.TermResolution (resolveCon, resolveMainRef, resolveTermRef)
import Unison.Codebase.Editor.HandleInput.UI (openUI)
import Unison.Codebase.Editor.HandleInput.Update (doSlurpAdds, handleUpdate)
import Unison.Codebase.Editor.HandleInput.Update2 (handleUpdate2)
import Unison.Codebase.Editor.Input
import Unison.Codebase.Editor.Input qualified as Input
import Unison.Codebase.Editor.Output
@ -1111,6 +1112,7 @@ loop e = do
let sr = Slurp.slurpFile uf vars Slurp.AddOp currentNames
previewResponse sourceName sr uf
UpdateI optionalPatch requestedNames -> handleUpdate input optionalPatch requestedNames
Update2I -> handleUpdate2
PreviewUpdateI requestedNames -> do
(sourceName, _) <- Cli.expectLatestFile
uf <- Cli.expectLatestTypecheckedFile
@ -1533,7 +1535,8 @@ inputDescription input =
NoPatch -> pure ".nopatch"
DefaultPatch -> (" " <>) <$> ps' Cli.defaultPatchPath
UsePatch p0 -> (" " <>) <$> ps' p0
pure ("update" <> p)
pure ("update.old" <> p)
Update2I -> pure ("update")
PropagatePatchI p0 scope0 -> do
p <- ps' p0
scope <- p' scope0

View File

@ -0,0 +1,373 @@
{-# LANGUAGE OverloadedRecordDot #-}
module Unison.Codebase.Editor.HandleInput.Update2
( handleUpdate2,
)
where
import Control.Lens (over, (^.))
import Control.Monad.RWS (ask)
import Data.Foldable qualified as Foldable
import Data.List.NonEmpty qualified as NonEmpty
import Data.List.NonEmpty.Extra ((|>))
import Data.Map qualified as Map
import Data.Maybe (fromJust)
import Data.Set qualified as Set
import Data.Text qualified as Text
import U.Codebase.Reference (Reference, ReferenceType)
import U.Codebase.Reference qualified as Reference
import U.Codebase.Sqlite.Operations qualified as Ops
import Unison.Cli.Monad (Cli)
import Unison.Cli.Monad qualified as Cli
import Unison.Cli.MonadUtils qualified as Cli
import Unison.Cli.TypeCheck (computeTypecheckingEnvironment)
import Unison.Cli.UniqueTypeGuidLookup qualified as Cli
import Unison.Codebase qualified as Codebase
import Unison.Codebase.Branch qualified as Branch
import Unison.Codebase.Branch.Names qualified as Branch
import Unison.Codebase.Branch.Type (Branch0)
import Unison.Codebase.BranchUtil qualified as BranchUtil
import Unison.Codebase.Editor.Output qualified as Output
import Unison.Codebase.Path (Path)
import Unison.Codebase.Path qualified as Path
import Unison.Codebase.Type (Codebase)
import Unison.CommandLine.OutputMessages qualified as Output
import Unison.ConstructorReference (GConstructorReference (ConstructorReference))
import Unison.DataDeclaration (DataDeclaration, Decl)
import Unison.DataDeclaration qualified as Decl
import Unison.DataDeclaration.ConstructorId (ConstructorId)
import Unison.Debug qualified as Debug
import Unison.FileParsers qualified as FileParsers
import Unison.Hash (Hash)
import Unison.Name (Name)
import Unison.Name qualified as Name
import Unison.Name.Forward (ForwardName (..))
import Unison.Name.Forward qualified as ForwardName
import Unison.NameSegment (NameSegment (NameSegment))
import Unison.Names (Names)
import Unison.Names qualified as Names
import Unison.NamesWithHistory qualified as Names
import Unison.NamesWithHistory qualified as NamesWithHistory
import Unison.Parser.Ann (Ann)
import Unison.Parser.Ann qualified as Ann
import Unison.Parsers qualified as Parsers
import Unison.Prelude
import Unison.PrettyPrintEnvDecl (PrettyPrintEnvDecl)
import Unison.PrettyPrintEnvDecl qualified as PPED
import Unison.PrettyPrintEnvDecl.Names qualified as PPE
import Unison.Reference qualified as Reference (fromId)
import Unison.Referent (Referent)
import Unison.Referent qualified as Referent
import Unison.Result qualified as Result
import Unison.Server.Backend qualified as Backend
import Unison.Sqlite (Transaction)
import Unison.Sqlite qualified as Sqlite
import Unison.Symbol (Symbol)
import Unison.Syntax.Name qualified as Name
import Unison.Syntax.Parser qualified as Parser
import Unison.Term (Term)
import Unison.Type (Type)
import Unison.UnisonFile qualified as UF
import Unison.UnisonFile.Names qualified as UF
import Unison.UnisonFile.Type (TypecheckedUnisonFile, UnisonFile)
import Unison.Util.Pretty (Pretty)
import Unison.Util.Pretty qualified as Pretty
import Unison.Util.Relation qualified as Relation
import Unison.Util.Set qualified as Set
import Unison.Var (Var)
data Defns terms types = Defns
{ terms :: !terms,
types :: !types
}
deriving stock (Generic, Show)
handleUpdate2 :: Cli ()
handleUpdate2 = do
Cli.Env {codebase} <- ask
-- - confirm all aliases updated together?
tuf <- Cli.expectLatestTypecheckedFile
-- - get add/updates from TUF
let termAndDeclNames :: Defns (Set Name) (Set Name) = getTermAndDeclNames tuf
currentBranch0 <- Cli.getCurrentBranch0
let namesIncludingLibdeps = Branch.toNames currentBranch0
let namesExcludingLibdeps = Branch.toNames (currentBranch0 & over Branch.children (Map.delete Name.libSegment))
let ctorNames = forwardCtorNames namesExcludingLibdeps
(pped, bigUf) <- Cli.runTransactionWithRollback \_abort -> do
dependents <-
Ops.dependentsWithinScope
(namespaceReferences namesExcludingLibdeps)
(getExistingReferencesNamed termAndDeclNames namesExcludingLibdeps)
-- - construct PPE for printing UF* for typechecking (whatever data structure we decide to print)
pped <- Codebase.hashLength <&> (`PPE.fromNamesDecl` (NamesWithHistory.fromCurrentNames namesIncludingLibdeps))
bigUf <- buildBigUnisonFile codebase tuf dependents namesExcludingLibdeps
let tufPped = PPE.fromNamesDecl 8 (Names.NamesWithHistory (UF.typecheckedToNames tuf) mempty)
pure (pped `PPED.addFallback` tufPped, bigUf)
-- - typecheck it
prettyParseTypecheck bigUf pped >>= \case
Left prettyUf -> do
Cli.Env {isTranscript} <- ask
maybePath <- if isTranscript then pure Nothing else Just . fst <$> Cli.expectLatestFile
Cli.respond (Output.DisplayDefinitionsString maybePath prettyUf)
Cli.respond Output.UpdateTypecheckingFailure
Right tuf -> do
Cli.respond Output.UpdateTypecheckingSuccess
saveTuf (findCtorNames namesExcludingLibdeps ctorNames Nothing) tuf
Cli.respond Output.Success
prettyParseTypecheck ::
UnisonFile Symbol Ann ->
PrettyPrintEnvDecl ->
Cli (Either (Pretty Pretty.ColorText) (TypecheckedUnisonFile Symbol Ann))
prettyParseTypecheck bigUf pped = do
typecheck <- mkTypecheckFnCli
let prettyUf = Output.prettyUnisonFile pped bigUf
let stringUf = Pretty.toPlain 80 prettyUf
rootBranch <- Cli.getRootBranch
currentPath <- Cli.getCurrentPath
let parseNames = Backend.getCurrentParseNames (Backend.Within (Path.unabsolute currentPath)) rootBranch
Cli.Env {generateUniqueName} <- ask
uniqueName <- liftIO generateUniqueName
let parsingEnv =
Parser.ParsingEnv
{ uniqueNames = uniqueName,
uniqueTypeGuid = Cli.loadUniqueTypeGuid currentPath,
names = parseNames
}
Debug.whenDebug Debug.Update do
liftIO do
putStrLn "--- Scratch ---"
putStrLn stringUf
Cli.runTransaction do
Parsers.parseFile "<update>" stringUf parsingEnv >>= \case
Left {} -> pure $ Left prettyUf
Right reparsedUf ->
typecheck reparsedUf <&> \case
Just reparsedTuf -> Right reparsedTuf
Nothing -> Left prettyUf
mkTypecheckFnCli :: Cli (UnisonFile Symbol Ann -> Transaction (Maybe (TypecheckedUnisonFile Symbol Ann)))
mkTypecheckFnCli = do
Cli.Env {codebase, generateUniqueName} <- ask
rootBranch <- Cli.getRootBranch
currentPath <- Cli.getCurrentPath
let parseNames = Backend.getCurrentParseNames (Backend.Within (Path.unabsolute currentPath)) rootBranch
pure (mkTypecheckFn codebase generateUniqueName currentPath parseNames)
mkTypecheckFn ::
Codebase.Codebase IO Symbol Ann ->
IO Parser.UniqueName ->
Path.Absolute ->
NamesWithHistory.NamesWithHistory ->
UnisonFile Symbol Ann ->
Transaction (Maybe (TypecheckedUnisonFile Symbol Ann))
mkTypecheckFn codebase generateUniqueName currentPath parseNames unisonFile = do
uniqueName <- Sqlite.unsafeIO generateUniqueName
let parsingEnv =
Parser.ParsingEnv
{ uniqueNames = uniqueName,
uniqueTypeGuid = Cli.loadUniqueTypeGuid currentPath,
names = parseNames
}
typecheckingEnv <-
computeTypecheckingEnvironment (FileParsers.ShouldUseTndr'Yes parsingEnv) codebase [] unisonFile
let Result.Result _notes maybeTypecheckedUnisonFile = FileParsers.synthesizeFile typecheckingEnv unisonFile
pure maybeTypecheckedUnisonFile
-- save definitions and namespace
saveTuf :: (Name -> [Name]) -> TypecheckedUnisonFile Symbol Ann -> Cli ()
saveTuf getConstructors tuf = do
Cli.Env {codebase} <- ask
currentPath <- Cli.getCurrentPath
Cli.runTransaction $ Codebase.addDefsToCodebase codebase tuf
Cli.stepAt "update" (Path.unabsolute currentPath, Branch.batchUpdates (declUpdates ++ termUpdates))
where
declUpdates :: [(Path, Branch0 m -> Branch0 m)]
declUpdates =
fold
[ foldMap makeDataDeclUpdates (Map.toList $ UF.dataDeclarationsId' tuf),
foldMap makeEffectDeclUpdates (Map.toList $ UF.effectDeclarationsId' tuf)
]
where
makeDataDeclUpdates (symbol, (typeRefId, dataDecl)) = makeDeclUpdates (symbol, (typeRefId, Right dataDecl))
makeEffectDeclUpdates (symbol, (typeRefId, effectDecl)) = makeDeclUpdates (symbol, (typeRefId, Left effectDecl))
makeDeclUpdates (symbol, (typeRefId, decl)) =
let deleteTypeAction = BranchUtil.makeAnnihilateTypeName split
-- some decls will be deleted, we want to delete their
-- constructors as well
deleteConstructorActions =
map
(BranchUtil.makeAnnihilateTermName . Path.splitFromName)
(getConstructors (Name.unsafeFromVar symbol))
split = splitVar symbol
insertTypeAction = BranchUtil.makeAddTypeName split (Reference.fromId typeRefId) Map.empty
insertTypeConstructorActions =
let referentIdsWithNames = zip (Decl.constructorVars (Decl.asDataDecl decl)) (Decl.declConstructorReferents typeRefId decl)
in map
( \(sym, rid) ->
let splitConName = splitVar sym
in BranchUtil.makeAddTermName splitConName (Reference.fromId <$> rid) Map.empty
)
referentIdsWithNames
deleteStuff = deleteTypeAction : deleteConstructorActions
addStuff = insertTypeAction : insertTypeConstructorActions
in deleteStuff ++ addStuff
termUpdates :: [(Path, Branch0 m -> Branch0 m)]
termUpdates =
tuf
& UF.hashTermsId
& Map.toList
& foldMap \(var, (_, ref, _, _, _)) ->
let split = splitVar var
in [ BranchUtil.makeAnnihilateTermName split,
BranchUtil.makeAddTermName split (Referent.fromTermReferenceId ref) Map.empty
]
splitVar :: Symbol -> Path.Split
splitVar = Path.splitFromName . Name.unsafeFromVar
-- | get references from `names` that have the same names as in `defns`
-- For constructors, we get the type reference.
getExistingReferencesNamed :: Defns (Set Name) (Set Name) -> Names -> Set Reference
getExistingReferencesNamed defns names = fromTerms <> fromTypes
where
fromTerms = foldMap (\n -> Set.map Referent.toReference $ Relation.lookupDom n $ Names.terms names) (defns ^. #terms)
fromTypes = foldMap (\n -> Relation.lookupDom n $ Names.types names) (defns ^. #types)
buildBigUnisonFile :: Codebase IO Symbol Ann -> TypecheckedUnisonFile Symbol Ann -> Map Reference.Id ReferenceType -> Names -> Transaction (UnisonFile Symbol Ann)
buildBigUnisonFile c tuf dependents names =
-- for each dependent, add its definition with all its names to the UnisonFile
foldM addComponent (UF.discardTypes tuf) (Map.toList dependents')
where
dependents' :: Map Hash ReferenceType = Map.mapKeys (\(Reference.Id h _pos) -> h) dependents
addComponent :: UnisonFile Symbol Ann -> (Hash, ReferenceType) -> Transaction (UnisonFile Symbol Ann)
addComponent uf (h, rt) = case rt of
Reference.RtTerm -> addTermComponent h uf
Reference.RtType -> addDeclComponent h uf
ctorNames = forwardCtorNames names
addTermComponent :: Hash -> UnisonFile Symbol Ann -> Transaction (UnisonFile Symbol Ann)
addTermComponent h uf = do
termComponent <- Codebase.unsafeGetTermComponent c h
pure $ foldl' addTermElement uf (zip termComponent [0 ..])
where
addTermElement :: UnisonFile Symbol Ann -> ((Term Symbol Ann, Type Symbol Ann), Reference.Pos) -> UnisonFile Symbol Ann
addTermElement uf ((tm, _tp), i) = do
let r :: Referent = Referent.Ref $ Reference.Derived h i
termNames = Relation.lookupRan r names.terms
foldl' (addDefinition tm) uf termNames
addDefinition :: Term Symbol Ann -> UnisonFile Symbol Ann -> Name -> UnisonFile Symbol Ann
addDefinition tm uf (Name.toVar -> v) =
if Set.member v termNames
then uf
else uf {UF.terms = (v, Ann.External, tm) : uf.terms}
termNames = Set.fromList [v | (v, _, _) <- uf.terms]
-- given a dependent hash, include that component in the scratch file
-- todo: wundefined: cut off constructor name prefixes
addDeclComponent :: Hash -> UnisonFile Symbol Ann -> Transaction (UnisonFile Symbol Ann)
addDeclComponent h uf = do
declComponent <- fromJust <$> Codebase.getDeclComponent h
pure $ foldl' addDeclElement uf (zip declComponent [0 ..])
where
-- for each name a decl has, update its constructor names according to what exists in the namespace
addDeclElement :: UnisonFile Symbol Ann -> (Decl Symbol Ann, Reference.Pos) -> UnisonFile Symbol Ann
addDeclElement uf (decl, i) = do
let declNames = Relation.lookupRan (Reference.Derived h i) (names.types)
-- look up names for this decl's constructor based on the decl's name, and embed them in the decl definition.
foldl' (addRebuiltDefinition decl) uf declNames
where
-- skip any definitions that already have names, we don't want to overwrite what the user has supplied
addRebuiltDefinition decl uf name = case decl of
Left ed -> uf {UF.effectDeclarationsId = Map.insertWith (\_new old -> old) (Name.toVar name) (Reference.Id h i, Decl.EffectDeclaration $ overwriteConstructorNames name ed.toDataDecl) uf.effectDeclarationsId}
Right dd -> uf {UF.dataDeclarationsId = Map.insertWith (\_new old -> old) (Name.toVar name) (Reference.Id h i, overwriteConstructorNames name dd) uf.dataDeclarationsId}
overwriteConstructorNames :: Name -> DataDeclaration Symbol Ann -> DataDeclaration Symbol Ann
overwriteConstructorNames name dd =
let constructorNames :: [Symbol]
constructorNames =
Name.toVar . fromJust . Name.stripNamePrefix name
<$> findCtorNames names ctorNames (Just $ Decl.constructorCount dd) name
swapConstructorNames oldCtors =
let (annotations, _vars, types) = unzip3 oldCtors
in zip3 annotations constructorNames types
in over Decl.constructors_ swapConstructorNames dd
-- | O(r + c * d) touches all the referents (r), and all the NameSegments (d) of all of the Con referents (c)
forwardCtorNames :: Names -> Map ForwardName (Referent, Name)
forwardCtorNames names =
Map.fromList $
[ (ForwardName.fromName name, (r, name))
| (r@Referent.Con {}, rNames) <- Map.toList $ Relation.range names.terms,
name <- Foldable.toList rNames
]
-- | given a decl name, find names for all of its constructors, in order.
findCtorNames :: Names -> Map ForwardName (Referent, Name) -> Maybe Int -> Name -> [Name]
findCtorNames names forwardCtorNames ctorCount n =
let declRef = Set.findMin $ Relation.lookupDom n names.types
f = ForwardName.fromName n
(_, centerRight) = Map.split f forwardCtorNames
(center, _) = Map.split (incrementLastSegmentChar f) centerRight
insertShortest :: Map ConstructorId Name -> (Referent, Name) -> Map ConstructorId Name
insertShortest m (Referent.Con (ConstructorReference r cid) _ct, newName) | r == declRef =
case Map.lookup cid m of
Just existingName
| length (Name.segments existingName) > length (Name.segments newName) ->
Map.insert cid newName m
Just {} -> m
Nothing -> Map.insert cid newName m
insertShortest m _ = m
m = foldl' insertShortest mempty (Foldable.toList center)
ctorCountGuess = fromMaybe (Map.size m) ctorCount
in if Map.size m == ctorCountGuess && all (isJust . flip Map.lookup m) [0 .. fromIntegral ctorCountGuess - 1]
then Map.elems m
else error $ "incomplete constructor mapping for " ++ show n ++ ": " ++ show (Map.keys m) ++ " out of " ++ show ctorCountGuess
-- >>> incrementLastSegmentChar $ ForwardName.fromName $ Name.unsafeFromText "foo.bar.quux"
-- ForwardName {toList = "foo" :| ["bar","quuy"]}
incrementLastSegmentChar :: ForwardName -> ForwardName
incrementLastSegmentChar (ForwardName segments) =
let (initSegments, lastSegment) = (NonEmpty.init segments, NonEmpty.last segments)
incrementedLastSegment = incrementLastCharInSegment lastSegment
in ForwardName $ maybe (NonEmpty.singleton incrementedLastSegment) (|> incrementedLastSegment) (NonEmpty.nonEmpty initSegments)
where
incrementLastCharInSegment :: NameSegment -> NameSegment
incrementLastCharInSegment (NameSegment text) =
let incrementedText =
if Text.null text
then text
else Text.init text `Text.append` Text.singleton (succ $ Text.last text)
in NameSegment incrementedText
namespaceReferences :: Names -> Set Reference.Id
namespaceReferences names = fromTerms <> fromTypes
where
fromTerms = Set.mapMaybe Referent.toReferenceId (Relation.ran $ Names.terms names)
fromTypes = Set.mapMaybe Reference.toId (Relation.ran $ Names.types names)
getTermAndDeclNames :: Var v => TypecheckedUnisonFile v a -> Defns (Set Name) (Set Name)
getTermAndDeclNames tuf = Defns (terms <> effectCtors <> dataCtors) (effects <> datas)
where
terms = keysToNames $ UF.hashTermsId tuf
effects = keysToNames $ UF.effectDeclarationsId' tuf
datas = keysToNames $ UF.dataDeclarationsId' tuf
effectCtors = foldMap ctorsToNames $ fmap (Decl.toDataDecl . snd) $ UF.effectDeclarationsId' tuf
dataCtors = foldMap ctorsToNames $ fmap snd $ UF.dataDeclarationsId' tuf
keysToNames = Set.map Name.unsafeFromVar . Map.keysSet
ctorsToNames = Set.fromList . map Name.unsafeFromVar . Decl.constructorVars
-- namespace:
-- type Foo = Bar Nat
-- baz = 4
-- qux = baz + 1
-- unison file:
-- Foo.Bar = 3
-- baz = 5

View File

@ -150,6 +150,7 @@ data Input
| AddI (Set Name)
| PreviewAddI (Set Name)
| UpdateI OptionalPatch (Set Name)
| Update2I
| PreviewUpdateI (Set Name)
| TodoI (Maybe PatchPath) Path'
| PropagatePatchI PatchPath Path'

View File

@ -251,6 +251,8 @@ data Output
| DisplayRendered (Maybe FilePath) (P.Pretty P.ColorText)
| -- "display" definitions, possibly to a FilePath on disk (e.g. editing)
DisplayDefinitions DisplayDefinitionsOutput
| -- Like `DisplayDefinitions`, but the definitions are already rendered. `Nothing` means put to the terminal.
DisplayDefinitionsString !(Maybe FilePath) !(P.Pretty P.ColorText) {- rendered definitions -}
| TestIncrementalOutputStart PPE.PrettyPrintEnv (Int, Int) Reference (Term Symbol Ann)
| TestIncrementalOutputEnd PPE.PrettyPrintEnv (Int, Int) Reference (Term Symbol Ann)
| TestResults
@ -383,6 +385,8 @@ data Output
| FailedToFetchLatestReleaseOfBase
| HappyCoding
| ProjectHasNoReleases ProjectName
| UpdateTypecheckingFailure
| UpdateTypecheckingSuccess
-- | What did we create a project branch from?
--
@ -444,6 +448,8 @@ type SourceFileContents = Text
isFailure :: Output -> Bool
isFailure o = case o of
UpdateTypecheckingFailure{} -> True
UpdateTypecheckingSuccess{} -> False
AmbiguousCloneLocal {} -> True
AmbiguousCloneRemote {} -> True
ClonedProjectBranch {} -> False
@ -509,6 +515,7 @@ isFailure o = case o of
Evaluated {} -> False
Typechecked {} -> False
DisplayDefinitions DisplayDefinitionsOutput {terms, types} -> null terms && null types
DisplayDefinitionsString {} -> False -- somewhat arbitrary :shrug:
DisplayRendered {} -> False
TestIncrementalOutputStart {} -> False
TestIncrementalOutputEnd {} -> False

View File

@ -158,23 +158,16 @@ pretty isPast ppe sr =
Just (UpdateAliases oldNames newNames) ->
let oldMessage =
let (shown, rest) = splitAt aliasesToShow $ toList oldNames
sz = length oldNames
in P.indentN
2
( P.wrap $
P.hiBlack
( "(The old definition "
<> (if isPast then "was" else "is")
<> " also named "
)
<> oxfordAliases (P.text . Name.toText <$> shown) (length rest) (P.hiBlack ".")
<> P.hiBlack
( case (sz, isPast) of
(1, True) -> "I updated this name too.)"
(1, False) -> "I'll update this name too.)"
(_, True) -> "I updated these names too.)"
(_, False) -> "I'll update these names too.)"
P.parenthesize $
P.hiBlack
( "The old definition "
<> (if isPast then "was" else "is")
<> " also named "
)
<> oxfordAliases (P.text . Name.toText <$> shown) (length rest) (P.hiBlack ".")
)
newMessage =
let (shown, rest) = splitAt aliasesToShow $ toList newNames

View File

@ -201,9 +201,9 @@ withTranscriptRunner ::
(TranscriptRunner -> m r) ->
m r
withTranscriptRunner verbosity ucmVersion configFile action = do
withRuntimes \runtime sbRuntime -> withConfig $ \config -> do
withRuntimes \runtime sbRuntime -> withConfig \config -> do
action \transcriptName transcriptSrc (codebaseDir, codebase) -> do
Server.startServer (Backend.BackendEnv {Backend.useNamesIndex = False}) Server.defaultCodebaseServerOpts runtime codebase $ \baseUrl -> do
Server.startServer (Backend.BackendEnv {Backend.useNamesIndex = False}) Server.defaultCodebaseServerOpts runtime codebase \baseUrl -> do
let parsed = parse transcriptName transcriptSrc
result <- for parsed \stanzas -> do
liftIO $ run verbosity codebaseDir stanzas codebase runtime sbRuntime config ucmVersion (tShow baseUrl)
@ -496,6 +496,7 @@ run verbosity dir stanzas codebase runtime sbRuntime config ucmVersion baseURL =
generateUniqueName = do
i <- atomicModifyIORef' seedRef \i -> let !i' = i + 1 in (i', i)
pure (Parser.uniqueBase32Namegen (Random.drgNewSeed (Random.seedFromInteger (fromIntegral i)))),
isTranscript = True, -- we are running a transcript
loadSource = loadPreviousUnisonBlock,
notify = print,
notifyNumbered = printNumbered,

View File

@ -221,17 +221,36 @@ previewAdd =
)
$ \ws -> pure $ Input.PreviewAddI (Set.fromList $ map Name.unsafeFromString ws)
updateNoPatch :: InputPattern
updateNoPatch =
update :: InputPattern
update =
InputPattern
"update.nopatch"
["un"]
{ patternName = "update",
aliases = [],
visibility = I.Visible,
argTypes = [],
help =
P.wrap $
"Adds everything in the most recently typechecked file to the namespace,"
<> "replacing existing definitions having the same name, and attempts to update all the existing dependents accordingly. If the process"
<> "can't be completed automatically, the dependents will be added back to the scratch file"
<> "for your review.",
parse =
maybeToEither (I.help update) . \case
[] -> Just Input.Update2I
_ -> Nothing
}
updateOldNoPatch :: InputPattern
updateOldNoPatch =
InputPattern
"update.old.nopatch"
[]
I.Visible
[(ZeroPlus, noCompletionsArg)]
( P.wrap
( makeExample' updateNoPatch
( makeExample' updateOldNoPatch
<> "works like"
<> P.group (makeExample' update <> ",")
<> P.group (makeExample' updateOld <> ",")
<> "except it doesn't add a patch entry for any updates. "
<> "Use this when you want to make changes to definitions without "
<> "pushing those changes to dependents beyond your codebase. "
@ -239,10 +258,10 @@ updateNoPatch =
<> "just added."
)
<> P.wrapColumn2
[ ( makeExample' updateNoPatch,
[ ( makeExample' updateOldNoPatch,
"updates all definitions in the .u file."
),
( makeExample updateNoPatch ["foo", "bar"],
( makeExample updateOldNoPatch ["foo", "bar"],
"updates `foo`, `bar`, and their dependents from the .u file."
)
]
@ -255,15 +274,15 @@ updateNoPatch =
(Set.fromList $ map Name.unsafeFromString ws)
)
update :: InputPattern
update =
updateOld :: InputPattern
updateOld =
InputPattern
"update"
"update.old"
[]
I.Visible
[(Optional, patchArg), (ZeroPlus, noCompletionsArg)]
( P.wrap
( makeExample' update
( makeExample' updateOld
<> "works like"
<> P.group (makeExample' add <> ",")
<> "except that if a definition in the file has the same name as an"
@ -273,45 +292,44 @@ update =
<> "optional patch."
)
<> P.wrapColumn2
[ ( makeExample' update,
[ ( makeExample' updateOld,
"adds all definitions in the .u file, noting replacements in the"
<> "default patch for the current namespace."
),
( makeExample update ["<patch>"],
( makeExample updateOld ["<patch>"],
"adds all definitions in the .u file, noting replacements in the"
<> "specified patch."
),
( makeExample update ["<patch>", "foo", "bar"],
( makeExample updateOld ["<patch>", "foo", "bar"],
"adds `foo`, `bar`, and their dependents from the .u file, noting"
<> "any replacements into the specified patch."
)
]
)
( \case
patchStr : ws -> do
patch <-
first fromString $
Path.parseSplit' Path.definitionNameSegment patchStr
pure $
Input.UpdateI
(Input.UsePatch patch)
(Set.fromList $ map Name.unsafeFromString ws)
[] -> Right $ Input.UpdateI Input.DefaultPatch mempty
)
\case
patchStr : ws -> do
patch <-
first fromString $
Path.parseSplit' Path.definitionNameSegment patchStr
pure $
Input.UpdateI
(Input.UsePatch patch)
(Set.fromList $ map Name.unsafeFromString ws)
[] -> Right $ Input.UpdateI Input.DefaultPatch mempty
previewUpdate :: InputPattern
previewUpdate =
InputPattern
"update.preview"
"update.old.preview"
[]
I.Visible
[(ZeroPlus, noCompletionsArg)]
( "`update.preview` previews updates to the codebase from the most "
( "`update.old.preview` previews updates to the codebase from the most "
<> "recently typechecked file. This command only displays cached "
<> "typechecking results. Use `load` to reparse & typecheck the file if "
<> "the context has changed."
)
$ \ws -> pure $ Input.PreviewUpdateI (Set.fromList $ map Name.unsafeFromString ws)
\ws -> pure $ Input.PreviewUpdateI (Set.fromList $ map Name.unsafeFromString ws)
patch :: InputPattern
patch =
@ -339,19 +357,18 @@ patch =
]
]
)
( \case
patchStr : ws -> first fromString $ do
patch <- Path.parseSplit' Path.definitionNameSegment patchStr
branch <- case ws of
[pathStr] -> Path.parsePath' pathStr
_ -> pure Path.relativeEmpty'
pure $ Input.PropagatePatchI patch branch
[] ->
Left $
warn $
makeExample' patch
<> "takes a patch and an optional namespace."
)
\case
patchStr : ws -> first fromString $ do
patch <- Path.parseSplit' Path.definitionNameSegment patchStr
branch <- case ws of
[pathStr] -> Path.parsePath' pathStr
_ -> pure Path.relativeEmpty'
pure $ Input.PropagatePatchI patch branch
[] ->
Left $
warn $
makeExample' patch
<> "takes a patch and an optional namespace."
view :: InputPattern
view =
@ -403,8 +420,7 @@ display =
"`display` without arguments invokes a search to select a definition to display, which requires that `fzf` can be found within your PATH."
]
)
( \xs -> Input.DisplayI Input.ConsoleLocation <$> (traverse parseHashQualifiedName xs)
)
\xs -> Input.DisplayI Input.ConsoleLocation <$> (traverse parseHashQualifiedName xs)
displayTo :: InputPattern
displayTo =
@ -417,11 +433,10 @@ displayTo =
makeExample displayTo ["<filename>", "foo"]
<> "prints a rendered version of the term `foo` to the given file."
)
( \case
(file : xs) ->
Input.DisplayI (Input.FileLocation file) <$> traverse parseHashQualifiedName xs
_ -> Left (I.help displayTo)
)
\case
file : xs ->
Input.DisplayI (Input.FileLocation file) <$> traverse parseHashQualifiedName xs
_ -> Left (I.help displayTo)
docs :: InputPattern
docs =
@ -1210,11 +1225,11 @@ pullImpl name aliases verbosity pullMode addendum = do
argTypes = [(Optional, remoteNamespaceArg), (Optional, namespaceArg)],
help =
P.lines
[ P.wrap
[ P.wrap $
"The"
<> makeExample' self
<> "command merges a remote namespace into a local namespace"
<> addendum,
<> makeExample' self
<> "command merges a remote namespace into a local namespace"
<> addendum,
"",
P.wrapColumn2
[ ( makeExample self ["@unison/base/main"],
@ -2899,7 +2914,8 @@ validInputs =
up,
update,
updateBuiltins,
updateNoPatch,
updateOld,
updateOldNoPatch,
view,
viewGlobal,
viewPatch,

View File

@ -223,6 +223,7 @@ main dir welcome initialPath config initialInputs runtime sbRuntime codebase ser
codebase,
config,
credentialManager,
isTranscript = False, -- we are not running a transcript
loadSource = loadSourceFile,
generateUniqueName = Parser.uniqueBase32Namegen <$> Random.getSystemDRG,
notify,

View File

@ -5,10 +5,11 @@
module Unison.CommandLine.OutputMessages where
import Control.Exception (mask, onException)
import Control.Lens hiding (at)
import Control.Monad.State
import Control.Monad.State.Strict qualified as State
import Control.Monad.Trans.Writer.CPS
import Control.Monad.Writer (Writer, mapWriter, runWriter, tell)
import Data.ByteString.Lazy qualified as LazyByteString
import Data.Foldable qualified as Foldable
import Data.List (stripPrefix)
@ -21,6 +22,7 @@ import Data.Set qualified as Set
import Data.Set.NonEmpty (NESet)
import Data.Text qualified as Text
import Data.Text.Encoding qualified as Text
import Data.Text.IO qualified as Text
import Data.Time (UTCTime, getCurrentTime)
import Data.Tuple (swap)
import Data.Tuple.Extra (dupe)
@ -29,7 +31,8 @@ import Network.HTTP.Types qualified as Http
import Servant.Client qualified as Servant
import System.Console.ANSI qualified as ANSI
import System.Console.Haskeline.Completion qualified as Completion
import System.Directory (canonicalizePath, doesFileExist, getHomeDirectory)
import System.Directory (canonicalizePath, doesFileExist, getHomeDirectory, getTemporaryDirectory, removeFile, renameFile)
import System.IO qualified as IO
import U.Codebase.Branch (NamespaceStats (..))
import U.Codebase.Branch.Diff (NameChanges (..))
import U.Codebase.HashTags (CausalHash (..))
@ -83,6 +86,7 @@ import Unison.ConstructorReference (GConstructorReference (..))
import Unison.ConstructorType qualified as CT
import Unison.Core.Project (ProjectBranchName (UnsafeProjectBranchName))
import Unison.DataDeclaration qualified as DD
import Unison.Debug qualified as Debug
import Unison.Hash qualified as Hash
import Unison.Hash32 (Hash32)
import Unison.HashQualified qualified as HQ
@ -126,6 +130,7 @@ import Unison.Share.Sync.Types (CodeserverTransportError (..))
import Unison.ShortHash qualified as ShortHash
import Unison.Symbol (Symbol)
import Unison.Sync.Types qualified as Share
import Unison.Syntax.DeclPrinter (AccessorName)
import Unison.Syntax.DeclPrinter qualified as DeclPrinter
import Unison.Syntax.HashQualified qualified as HQ (toString, toText, unsafeFromVar)
import Unison.Syntax.Name qualified as Name (toString, toText)
@ -759,6 +764,7 @@ notifyUser dir = \case
<> "to push the changes."
]
DisplayDefinitions output -> displayDefinitions output
DisplayDefinitionsString isTranscript definitions -> displayDefinitionsString isTranscript definitions
OutputRewrittenFile ppe dest msg uf -> displayOutputRewrittenFile ppe dest msg uf
DisplayRendered outputLoc pp ->
displayRendered outputLoc pp
@ -2168,6 +2174,8 @@ notifyUser dir = \case
<> P.wrap "🎉 🥳 Happy coding!"
ProjectHasNoReleases projectName ->
pure . P.wrap $ prettyProjectName projectName <> "has no releases."
UpdateTypecheckingFailure -> pure "Typechecking failed when propagating the update to all the dependents."
UpdateTypecheckingSuccess -> pure "I propagated the update and am now saving the results."
where
_nameChange _cmd _pastTenseCmd _oldName _newName _r = error "todo"
@ -2479,20 +2487,31 @@ foldLine = "\n\n---- Anything below this line is ignored by Unison.\n\n"
prettyUnisonFile :: forall v a. (Var v, Ord a) => PPED.PrettyPrintEnvDecl -> UF.UnisonFile v a -> Pretty
prettyUnisonFile ppe uf@(UF.UnisonFileId datas effects terms watches) =
P.sep "\n\n" (map snd . sortOn fst $ pretty <$> things)
P.sep "\n\n" (map snd . sortOn fst $ prettyEffects <> prettyDatas <> catMaybes prettyTerms <> prettyWatches)
where
things =
map Left (map Left (Map.toList effects) <> map Right (Map.toList datas))
<> map (Right . (Nothing,)) terms
<> (Map.toList watches >>= \(wk, tms) -> map (\a -> Right (Just wk, a)) tms)
pretty (Left (Left (n, (r, et)))) =
prettyEffects = map prettyEffectDecl (Map.toList effects)
(prettyDatas, accessorNames) = runWriter $ traverse prettyDataDecl (Map.toList datas)
prettyTerms = map (prettyTerm accessorNames) terms
prettyWatches = Map.toList watches >>= \(wk, tms) -> map (prettyWatch . (wk,)) tms
prettyEffectDecl :: (v, (Reference.Id, DD.EffectDeclaration v a)) -> (a, Pretty)
prettyEffectDecl (n, (r, et)) =
(DD.annotation . DD.toDataDecl $ et, st $ DeclPrinter.prettyDecl ppe' (rd r) (hqv n) (Left et))
pretty (Left (Right (n, (r, dt)))) =
(DD.annotation dt, st $ DeclPrinter.prettyDecl ppe' (rd r) (hqv n) (Right dt))
pretty (Right (Nothing, (n, a, tm))) =
(a, pb (hqv n) tm)
pretty (Right (Just wk, (n, a, tm))) =
(a, go wk n tm)
prettyDataDecl :: (v, (Reference.Id, DD.DataDeclaration v a)) -> Writer (Set AccessorName) (a, Pretty)
prettyDataDecl (n, (r, dt)) =
(DD.annotation dt,) . st <$> (mapWriter (second Set.fromList) $ DeclPrinter.prettyDeclW ppe' (rd r) (hqv n) (Right dt))
prettyTerm :: Set (AccessorName) -> (v, a, Term v a) -> Maybe (a, Pretty)
prettyTerm skip (n, a, tm) =
if traceMember isMember then Nothing else Just (a, pb hq tm)
where
traceMember =
if Debug.shouldDebug Debug.Update
then trace (show hq ++ " -> " ++ if isMember then "skip" else "print")
else id
isMember = Set.member hq skip
hq = hqv n
prettyWatch :: (String, (v, a, Term v a)) -> (a, Pretty)
prettyWatch (wk, (n, a, tm)) = (a, go wk n tm)
where
go wk v tm = case wk of
WK.RegularWatch
@ -2501,7 +2520,7 @@ prettyUnisonFile ppe uf@(UF.UnisonFileId datas effects terms watches) =
WK.RegularWatch -> "> " <> pb (hqv v) tm
w -> P.string w <> "> " <> pb (hqv v) tm
st = P.syntaxToColor
sppe = PPED.suffixifiedPPE ppe
sppe = PPED.suffixifiedPPE ppe'
pb v tm = st $ TermPrinter.prettyBinding sppe v tm
ppe' = PPED.PrettyPrintEnvDecl dppe dppe `PPED.addFallback` ppe
dppe = PPE.fromNames 8 (Names.NamesWithHistory (UF.toNames uf) mempty)
@ -2678,6 +2697,33 @@ displayDefinitions DisplayDefinitionsOutput {isTest, outputFile, prettyPrintEnv
<> P.newline
<> tip "You might need to repair the codebase manually."
displayDefinitionsString :: Maybe FilePath -> Pretty -> IO Pretty
displayDefinitionsString maybePath definitions =
case maybePath of
Nothing -> pure definitions
Just path -> do
let withTempFile tmpFilePath tmpHandle = do
Text.hPutStrLn tmpHandle (Text.pack (P.toPlain 80 definitions))
Text.hPutStrLn tmpHandle "\n---\n"
IO.withFile path IO.ReadMode \currentScratchFile -> do
let copyLoop = do
chunk <- Text.hGetChunk currentScratchFile
case Text.length chunk == 0 of
True -> pure ()
False -> do
Text.hPutStr tmpHandle chunk
copyLoop
copyLoop
IO.hClose tmpHandle
renameFile tmpFilePath path
tmpDir <- getTemporaryDirectory
mask \unmask -> do
(tmpFilePath, tmpHandle) <- IO.openTempFile tmpDir "unison-scratch"
unmask (withTempFile tmpFilePath tmpHandle) `onException` do
IO.hClose tmpHandle
removeFile tmpFilePath
pure mempty
displayTestResults ::
Bool -> -- whether to show the tip
PPE.PrettyPrintEnv ->
@ -2825,7 +2871,7 @@ renderEditConflicts ppe Patch {..} = do
then "deprecated and also replaced with"
else "replaced with"
)
`P.hang` P.lines replacements
`P.hang` P.lines replacements
formatTermEdits ::
(Reference.TermReference, Set TermEdit.TermEdit) ->
Numbered Pretty
@ -2840,7 +2886,7 @@ renderEditConflicts ppe Patch {..} = do
then "deprecated and also replaced with"
else "replaced with"
)
`P.hang` P.lines replacements
`P.hang` P.lines replacements
formatConflict ::
Either
(Reference, Set TypeEdit.TypeEdit)

View File

@ -37,12 +37,12 @@ testBuilder ::
Bool -> ((FilePath, Text) -> IO ()) -> FilePath -> [String] -> String -> Test ()
testBuilder expectFailure recordFailure dir prelude transcript = scope transcript $ do
outputs <- io . withTemporaryUcmCodebase SC.init Verbosity.Silent "transcript" SC.DoLock $ \(codebasePath, codebase) -> do
withTranscriptRunner Verbosity.Silent "TODO: pass version here" Nothing $ \runTranscript -> do
for files $ \filePath -> do
withTranscriptRunner Verbosity.Silent "TODO: pass version here" Nothing \runTranscript -> do
for files \filePath -> do
transcriptSrc <- readUtf8 filePath
out <- silence $ runTranscript filePath transcriptSrc (codebasePath, codebase)
pure (filePath, out)
for_ outputs $ \case
for_ outputs \case
(filePath, Left err) -> do
let outputFile = outputFileForTranscript filePath
case err of

View File

@ -66,6 +66,7 @@ library
Unison.Codebase.Editor.HandleInput.TermResolution
Unison.Codebase.Editor.HandleInput.UI
Unison.Codebase.Editor.HandleInput.Update
Unison.Codebase.Editor.HandleInput.Update2
Unison.Codebase.Editor.Input
Unison.Codebase.Editor.Output
Unison.Codebase.Editor.Output.BranchDiff
@ -202,6 +203,7 @@ library
, recover-rtti
, regex-tdfa
, semialign
, semigroups
, servant
, servant-client
, stm
@ -338,6 +340,7 @@ executable cli-integration-tests
, recover-rtti
, regex-tdfa
, semialign
, semigroups
, servant
, servant-client
, shellmet
@ -468,6 +471,7 @@ executable transcripts
, recover-rtti
, regex-tdfa
, semialign
, semigroups
, servant
, servant-client
, shellmet
@ -605,6 +609,7 @@ executable unison
, recover-rtti
, regex-tdfa
, semialign
, semigroups
, servant
, servant-client
, shellmet
@ -747,6 +752,7 @@ test-suite cli-tests
, recover-rtti
, regex-tdfa
, semialign
, semigroups
, servant
, servant-client
, shellmet

View File

@ -9,6 +9,7 @@ module Unison.DataDeclaration
allVars,
asDataDecl,
bindReferences,
constructorCount,
constructorNames,
constructors,
constructorType,
@ -32,6 +33,7 @@ module Unison.DataDeclaration
updateDependencies,
constructors_,
asDataDecl_,
declAsDataDecl_,
)
where
@ -111,6 +113,9 @@ data DataDeclaration v a = DataDeclaration
}
deriving (Eq, Ord, Show, Functor)
constructorCount :: DataDeclaration v a -> Int
constructorCount DataDeclaration {constructors'} = length constructors'
constructors_ :: Lens' (DataDeclaration v a) [(a, v, Type v a)]
constructors_ = lens getter setter
where
@ -122,6 +127,13 @@ newtype EffectDeclaration v a = EffectDeclaration
}
deriving (Eq, Ord, Show, Functor)
declAsDataDecl_ :: Lens' (Decl v a) (DataDeclaration v a)
declAsDataDecl_ = lens get set
where
get (Left ed) = toDataDecl ed
get (Right dd) = dd
set decl dd = bimap (EffectDeclaration . const dd) (const dd) decl
asDataDecl_ :: Iso' (EffectDeclaration v a) (DataDeclaration v a)
asDataDecl_ = iso toDataDecl EffectDeclaration
@ -242,7 +254,7 @@ constructorNames dd = Var.name <$> constructorVars dd
-- This function is unsound, since the `rid` and the `decl` have to match.
-- It should probably be hashed directly from the Decl, once we have a
-- reliable way of doing that. —AI
declConstructorReferents :: Reference.Id -> Decl v a -> [Referent.Id]
declConstructorReferents :: Reference.TypeReferenceId -> Decl v a -> [Referent.Id]
declConstructorReferents rid decl =
[Referent'.Con' (ConstructorReference rid i) ct | i <- constructorIds (asDataDecl decl)]
where

View File

@ -0,0 +1,19 @@
module Unison.Name.Forward where
import Data.List qualified as List
import Data.List.NonEmpty (NonEmpty ((:|)), nonEmpty)
import Unison.Name qualified as Name
import Unison.Name.Internal (Name)
import Unison.NameSegment (NameSegment)
newtype ForwardName = ForwardName { toList :: NonEmpty NameSegment } deriving (Eq, Ord, Show)
-- | O(d)
fromName :: Name -> ForwardName
fromName n = ForwardName $ Name.segments n
stripNamePrefix :: ForwardName -> ForwardName -> Maybe ForwardName
stripNamePrefix (ForwardName (p :| ps)) (ForwardName (n :| ns)) =
if p /= n
then Nothing
else ForwardName <$> maybe Nothing nonEmpty (List.stripPrefix ps ns)

View File

@ -1,6 +1,6 @@
cabal-version: 1.12
-- This file has been generated from package.yaml by hpack version 0.35.2.
-- This file has been generated from package.yaml by hpack version 0.35.1.
--
-- see: https://github.com/sol/hpack
@ -37,6 +37,7 @@ library
Unison.Kind
Unison.LabeledDependency
Unison.Name
Unison.Name.Forward
Unison.Name.Internal
Unison.Names
Unison.Names.ResolutionResult

View File

@ -13,6 +13,6 @@ foo = 1
```
```ucm
.> update
.> update.old
.> links foo
```

View File

@ -46,7 +46,7 @@ foo = 1
```
```ucm
.> update
.> update.old
⍟ I've updated these names to your new definition:

View File

@ -36,8 +36,8 @@ thing _ = send 1
These should fail with a term/ctor conflict since we exclude the ability from the update.
```ucm:error
.ns> update patch Channels.send
.ns> update patch thing
.ns> update.old patch Channels.send
.ns> update.old patch thing
```
If however, `Channels.send` and `thing` _depend_ on `Channels`, updating them should succeed since it pulls in the ability as a dependency.
@ -56,14 +56,14 @@ thing _ = send 1
These updates should succeed since `Channels` is a dependency.
```ucm
.ns> update.preview patch Channels.send
.ns> update.preview patch thing
.ns> update.old.preview patch Channels.send
.ns> update.old.preview patch thing
```
We should also be able to successfully update the whole thing.
```ucm
.ns> update
.ns> update.old
```
# Constructor-term conflict

View File

@ -66,7 +66,7 @@ thing _ = send 1
These should fail with a term/ctor conflict since we exclude the ability from the update.
```ucm
.ns> update patch Channels.send
.ns> update.old patch Channels.send
x These definitions failed:
@ -75,7 +75,7 @@ These should fail with a term/ctor conflict since we exclude the ability from th
Tip: Use `help filestatus` to learn more.
.ns> update patch thing
.ns> update.old patch thing
⍟ I've added these definitions:
@ -118,7 +118,7 @@ thing _ = send 1
These updates should succeed since `Channels` is a dependency.
```ucm
.ns> update.preview patch Channels.send
.ns> update.old.preview patch Channels.send
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
@ -131,7 +131,7 @@ These updates should succeed since `Channels` is a dependency.
Channels.send : a ->{Channels} ()
.ns> update.preview patch thing
.ns> update.old.preview patch thing
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
@ -149,7 +149,7 @@ These updates should succeed since `Channels` is a dependency.
We should also be able to successfully update the whole thing.
```ucm
.ns> update
.ns> update.old
⊡ Ignored previously added definitions: Channels

View File

@ -90,11 +90,9 @@ structural type X = Three Nat Nat Nat
new definition:
structural type X
(The old definition is also named Z. I'll update this
name too.)
(The old definition is also named Z.)
x : Nat
(The old definition is also named z. I'll update this
name too.)
(The old definition is also named z.)
```
Expected: `x` is now `3` and `X` has constructor `Three`. UCM tells you the old definitions were also called `z` and `Z` and these names have also been updated.
@ -102,14 +100,9 @@ Expected: `x` is now `3` and `X` has constructor `Three`. UCM tells you the old
```ucm
.> update
⍟ I've updated these names to your new definition:
structural type X
(The old definition was also named Z. I updated this name
too.)
x : Nat
(The old definition was also named z. I updated this name
too.)
I propagated the update and am now saving the results.
Done.
```
Update it to something that already exists with a different name:
@ -129,13 +122,9 @@ structural type X = Two Nat Nat
new definition:
structural type X
(The old definition is also named Z. I'll update this
name too.)
(The new definition is already named Y as well.)
(also named Y)
x : Nat
(The old definition is also named z. I'll update this
name too.)
(The new definition is already named y as well.)
(also named y)
```
Expected: `x` is now `2` and `X` is `Two`. UCM says the old definition was also named `z/Z`, and was also updated. And it says the new definition is also named `y/Y`.
@ -143,15 +132,8 @@ Expected: `x` is now `2` and `X` is `Two`. UCM says the old definition was also
```ucm
.> update
⍟ I've updated these names to your new definition:
structural type X
(The old definition was also named Z. I updated this name
too.)
(The new definition is already named Y as well.)
x : Nat
(The old definition was also named z. I updated this name
too.)
(The new definition is already named y as well.)
I propagated the update and am now saving the results.
Done.
```

View File

@ -15,7 +15,7 @@ x = 2
```
```ucm
.> update foo.patch
.> update.old foo.patch
```
Copy the patch and make sure it's still there.

View File

@ -19,7 +19,7 @@ x = 2
```
```ucm
.> update foo.patch
.> update.old foo.patch
⍟ I've updated these names to your new definition:

View File

@ -49,10 +49,9 @@ ping _ = !pong + 3
```ucm
.> update
⍟ I've updated these names to your new definition:
ping : 'Nat
pong : 'Nat
I propagated the update and am now saving the results.
Done.
.> view ping pong

View File

@ -49,10 +49,9 @@ ping _ = 3
```ucm
.> update
⍟ I've updated these names to your new definition:
ping : 'Nat
pong : 'Nat
I propagated the update and am now saving the results.
Done.
.> view ping pong

View File

@ -22,6 +22,6 @@ ping = 3
```
```ucm
.> update
.> update.old
.> view ping pong
```

View File

@ -47,7 +47,7 @@ ping = 3
```
```ucm
.> update
.> update.old
⍟ I've updated these names to your new definition:

View File

@ -25,6 +25,6 @@ clang _ = !pong + 3
```
```ucm
.> update ping
.> update.old ping
.> view ping pong clang
```

View File

@ -54,7 +54,7 @@ clang _ = !pong + 3
```
```ucm
.> update ping
.> update.old ping
⍟ I've added these definitions:

View File

@ -25,7 +25,7 @@ inner.ping _ = !pong + 3
```
```ucm
.inner> update
.inner> update.old
.> view inner.ping
```

View File

@ -50,7 +50,7 @@ inner.ping _ = !pong + 3
```
```ucm
.inner> update
.inner> update.old
⍟ I've added these definitions:

View File

@ -96,26 +96,21 @@ ping _ = ! #4t465jk908dsue9fgdfi06fihppsme16cvaua29hjm1585de1mvt11dftqrab5chhla3
```ucm
.> update
⍟ I've updated these names to your new definition:
ping : 'Nat
I propagated the update and am now saving the results.
Done.
.> view ping pong
ping : 'Nat
ping _ =
use Nat +
!pong#4t465jk908 + 4
!#4t465jk908 + 4
pong#4t465jk908 : 'Nat
pong#4t465jk908 _ =
pong : 'Nat
pong _ =
use Nat +
!#4t465jk908.1 + 2
pong#hrsm7vhrcr : 'Nat
pong#hrsm7vhrcr _ =
use Nat +
!ping + 3
!ping + 2
```
Here we see that we didn't properly update `pong` to point to the new `ping because it was conflicted.

View File

@ -13,7 +13,7 @@ x = 2
```
```ucm
.> update
.> update.old
.> view.patch
```
@ -35,7 +35,7 @@ unique[b] type Foo = Foo | Bar
```
```ucm
.> update
.> update.old
.> view.patch
```
@ -58,7 +58,7 @@ unique[bb] type bar = Foo | Bar
```
```ucm
.> update
.> update.old
.> view.patch
.> delete.type-replacement 1
.> view.patch

View File

@ -40,7 +40,7 @@ x = 2
```
```ucm
.> update
.> update.old
⍟ I've updated these names to your new definition:
@ -105,7 +105,7 @@ unique[b] type Foo = Foo | Bar
```
```ucm
.> update
.> update.old
⍟ I've updated these names to your new definition:
@ -173,7 +173,7 @@ unique[bb] type bar = Foo | Bar
```
```ucm
.> update
.> update.old
⍟ I've updated these names to your new definition:

View File

@ -82,7 +82,7 @@ unique type Y a b = Y a b
```
```ucm
.ns2> update
.ns2> update.old
.ns2> links fromJust
.> diff.namespace ns1 ns2
.> alias.term ns2.d ns2.d'
@ -106,7 +106,7 @@ unique type Y a b = Y a b
bdependent = "banana"
```
```ucm
.ns3> update
.ns3> update.old
.> diff.namespace ns2 ns3
```
@ -127,13 +127,13 @@ b = a + 1
a = 444
```
```ucm
.nsy> update
.nsy> update.old
```
```unison:hide
a = 555
```
```ucm
.nsz> update
.nsz> update.old
.> merge nsy nsw
```
```ucm:error

View File

@ -209,7 +209,7 @@ unique type Y a b = Y a b
```
```ucm
.ns2> update
.ns2> update.old
⍟ I've added these definitions:
@ -222,8 +222,7 @@ unique type Y a b = Y a b
b : Text
fromJust : Nat
(The old definition was also named fromJust'. I updated
this name too.)
(The old definition was also named fromJust'.)
.ns2> links fromJust
@ -477,7 +476,7 @@ bdependent = "banana"
```
```ucm
.ns3> update
.ns3> update.old
⍟ I've updated these names to your new definition:
@ -532,7 +531,7 @@ a = 444
```
```ucm
.nsy> update
.nsy> update.old
⍟ I've updated these names to your new definition:
@ -544,7 +543,7 @@ a = 555
```
```ucm
.nsz> update
.nsz> update.old
⍟ I've updated these names to your new definition:

View File

@ -21,7 +21,7 @@ hey = "hello"
Update
```ucm
.> update
.> update.old
.> find.patch
.> view.patch 1
```

View File

@ -52,7 +52,7 @@ hey = "hello"
Update
```ucm
.> update
.> update.old
⍟ I've updated these names to your new definition:

View File

@ -23,9 +23,9 @@ a = "an update"
```ucm
.> update
⍟ I've updated these names to your new definition:
a : ##Text
I propagated the update and am now saving the results.
Done.
```
As of the time of this writing, the history for `X` should be a single node, `#4eeuo5bsfr`;

View File

@ -69,9 +69,9 @@ Step 4: I add it and expect to see it
```ucm
.trunk> update
⍟ I've updated these names to your new definition:
x.doc : Doc
I propagated the update and am now saving the results.
Done.
.trunk> docs x
@ -83,10 +83,7 @@ That works great. Let's relink the old doc too.
```ucm
.trunk> link .backup.x.doc x
Updates:
1. trunk.x : Nat
+ 2. backup.x.doc : Doc
I didn't make any changes.
```
Let's check that we see both docs:

View File

@ -56,7 +56,7 @@ unique type A a b c d
Let's do the update now, and verify that the definitions all look good and there's nothing `todo`:
```ucm
.a2> update
.a2> update.old
.a2> view A NeedsA f f2 f3 g
.a2> todo
```
@ -91,6 +91,6 @@ And checking that after updating this record, there's nothing `todo`:
```ucm
.> fork a3 a4
.a4> update
.a4> update.old
.a4> todo
```

View File

@ -66,7 +66,7 @@ unique type A a b c d
Let's do the update now, and verify that the definitions all look good and there's nothing `todo`:
```ucm
.a2> update
.a2> update.old
⍟ I've updated these names to your new definition:
@ -193,7 +193,7 @@ And checking that after updating this record, there's nothing `todo`:
Done.
.a4> update
.a4> update.old
⍟ I've added these definitions:

View File

@ -26,6 +26,6 @@ bool = false
```
```ucm:error
.> update
.> update.old
.> test
```

View File

@ -71,7 +71,7 @@ bool = false
```
```ucm
.> update
.> update.old
⍟ I've updated these names to your new definition:

View File

@ -50,9 +50,9 @@ x = 7
```ucm
.> update
⍟ I've updated these names to your new definition:
x : Nat
I propagated the update and am now saving the results.
Done.
.> view x y z

View File

@ -24,7 +24,7 @@ x = 4
```
```ucm
.> update
.> update.old
.> links y
.> view 1
```

View File

@ -24,7 +24,7 @@ x = 4
```
```ucm
.> update
.> update.old
⍟ I've updated these names to your new definition:

View File

@ -136,7 +136,7 @@ a = "hello world!"
```
```ucm
.c1b> update
.c1b> update.old
```
Now merging `c1b` into `c1a` should result in the updated version of `a` and `f`, and the new definitions `b` and `c`:

View File

@ -275,7 +275,7 @@ a = "hello world!"
```
```ucm
.c1b> update
.c1b> update.old
⍟ I've updated these names to your new definition:

View File

@ -227,15 +227,9 @@ master.frobnicate n = n + 1
```ucm
.> update
⍟ I've added these definitions:
master.frobnicate : Nat -> Nat
⍟ I've updated these names to your new definition:
master.y : Text
(The old definition was also named feature2.y. I updated
this name too.)
I propagated the update and am now saving the results.
Done.
.> view master.y

View File

@ -53,10 +53,9 @@ unique type a.T = T1 | T2
```ucm
.happy> update
⍟ I've updated these names to your new definition:
unique type a.T
a.termInA : Nat
I propagated the update and am now saving the results.
Done.
```
Should be able to move the namespace, including its types, terms, and sub-namespaces.
@ -144,10 +143,9 @@ b.termInB = 11
```ucm
.history> update
⍟ I've updated these names to your new definition:
a.termInA : Nat
b.termInB : Nat
I propagated the update and am now saving the results.
Done.
```
Deleting a namespace should not leave behind any history,
@ -236,10 +234,9 @@ b.termInB = 11
```ucm
.existing> update
⍟ I've updated these names to your new definition:
a.termInA : Nat
b.termInB : Nat
I propagated the update and am now saving the results.
Done.
.existing> move.namespace a b
@ -278,7 +275,7 @@ I should be able to move the root into a sub-namespace
□ 1. #bo3npg6e0m (start of history)
□ 1. #0la3iepnak (start of history)
```
```ucm
@ -294,7 +291,7 @@ I should be able to move the root into a sub-namespace
Note: The most recent namespace hash is immediately below this
message.
⊙ 1. #b229p94nrj
⊙ 1. #55ghe8sfc0
- Deletes:
@ -305,7 +302,7 @@ I should be able to move the root into a sub-namespace
Original name New name
existing.a.termInA existing.b.termInA
⊙ 2. #2cn8souuq6
⊙ 2. #addivb5ls2
+ Adds / updates:
@ -317,26 +314,26 @@ I should be able to move the root into a sub-namespace
happy.b.termInA existing.a.termInA
history.b.termInA existing.a.termInA
⊙ 3. #i3loit00cj
⊙ 3. #fn8r1jonbk
+ Adds / updates:
existing.a.termInA existing.b.termInB
⊙ 4. #djomgabadc
⊙ 4. #cev4cnh02n
> Moves:
Original name New name
history.a.termInA history.b.termInA
⊙ 5. #u6l5c2fe7n
⊙ 5. #sjqnqbgls9
- Deletes:
history.b.termInB
⊙ 6. #5npm2fkq5r
⊙ 6. #u2ah32c5ug
+ Adds / updates:
@ -347,13 +344,13 @@ I should be able to move the root into a sub-namespace
Original name New name(s)
happy.b.termInA history.a.termInA
⊙ 7. #9b6rfi8qbl
⊙ 7. #cv8aq0amp2
+ Adds / updates:
history.a.termInA history.b.termInB
⊙ 8. #nltcfbjm3i
⊙ 8. #o9j7cnkr8n
> Moves:
@ -363,7 +360,7 @@ I should be able to move the root into a sub-namespace
happy.a.T.T2 happy.b.T.T2
happy.a.termInA happy.b.termInA
⊙ 9. #4r8uic569d
⊙ 9. #or64kicr0a
+ Adds / updates:
@ -406,27 +403,26 @@ I should be able to move a sub namespace _over_ the root.
.> ls
1. b/ (3 terms, 1 type)
2. patch (patch)
1. b/ (3 terms, 1 type)
.> history
Note: The most recent namespace hash is immediately below this
message.
⊙ 1. #7jqrm9050c
⊙ 1. #ur0jj0uuhu
+ Adds / updates:
b.T b.T.T1 b.T.T2 b.termInA
⊙ 2. #d2snuagr7q
⊙ 2. #8a0jmdflfd
- Deletes:
a.T a.T.T1 a.T.T2 a.termInA
⊙ 3. #pnls3frrp8
⊙ 3. #3sbe4c0ql9
+ Adds / updates:

View File

@ -32,7 +32,7 @@ unique type Foo = Foo | Bar
and update the codebase to use the new type `Foo`...
```ucm
.subpath> update
.subpath> update.old
```
... it should automatically propagate the type to `fooToInt`.
@ -76,7 +76,7 @@ someTerm _ = None
Update...
```ucm
.subpath.preserve> update
.subpath.preserve> update.old
.> cd .
```
@ -128,7 +128,7 @@ someTerm _ = None
... in one of the namespaces...
```ucm
.subpath.one> update
.subpath.one> update.old
```
The other namespace should be left alone.

View File

@ -75,7 +75,7 @@ unique type Foo = Foo | Bar
and update the codebase to use the new type `Foo`...
```ucm
.subpath> update
.subpath> update.old
⍟ I've updated these names to your new definition:
@ -158,7 +158,7 @@ someTerm _ = None
Update...
```ucm
.subpath.preserve> update
.subpath.preserve> update.old
⍟ I've updated these names to your new definition:
@ -258,7 +258,7 @@ someTerm _ = None
... in one of the namespaces...
```ucm
.subpath.one> update
.subpath.one> update.old
⍟ I've updated these names to your new definition:

View File

@ -178,9 +178,9 @@ a = 3
```ucm
foo/main> update
⍟ I've updated these names to your new definition:
a : Nat
I propagated the update and am now saving the results.
Done.
foo/main> reset /topic

View File

@ -45,7 +45,7 @@ foo = 43
```
```ucm
.example.resolve.a> update
.example.resolve.a> update.old
```
And make a different change in the `b` namespace:
@ -59,7 +59,7 @@ foo = 44
```
```ucm
.example.resolve.b> update
.example.resolve.b> update.old
```
The `a` and `b` namespaces now each contain a patch named `patch`. We can view these:

View File

@ -74,7 +74,7 @@ foo = 43
```
```ucm
.example.resolve.a> update
.example.resolve.a> update.old
⍟ I've updated these names to your new definition:
@ -104,7 +104,7 @@ foo = 44
```
```ucm
.example.resolve.b> update
.example.resolve.b> update.old
⍟ I've updated these names to your new definition:

View File

@ -33,5 +33,5 @@ This update should succeed since the conflicted constructor
is removed in the same update that the new term is being added.
```ucm
.ns> update
.ns> update.old
```

View File

@ -61,7 +61,7 @@ This update should succeed since the conflicted constructor
is removed in the same update that the new term is being added.
```ucm
.ns> update
.ns> update.old
⍟ I've added these definitions:

View File

@ -61,7 +61,7 @@ add b = b
```
```ucm
.> update
.> update.old
.> debug.tab-complete delete.type Foo
.> debug.tab-complete delete.term add
```

View File

@ -154,7 +154,7 @@ add b = b
```
```ucm
.> update
.> update.old
⍟ I've added these definitions:

View File

@ -29,7 +29,7 @@ structural type MyType = MyType Text
```
```ucm:error
.simple> update
.simple> update.old
.simple> todo
.> cd .
```
@ -56,7 +56,7 @@ structural type MyType = MyType Nat
```
```ucm:hide
.mergeA> update
.mergeA> update.old
.> cd .
```
@ -66,7 +66,7 @@ structural type MyType = MyType Int
```
```ucm:hide
.mergeB> update
.mergeB> update.old
```
```ucm:error
@ -93,7 +93,7 @@ foo = 802
```
```ucm
.lhs> update
.lhs> update.old
```
```unison
@ -131,7 +131,7 @@ even = 17
```
```ucm
.cycle2> update
.cycle2> update.old
```
```ucm:error

View File

@ -20,7 +20,7 @@ structural type MyType = MyType Text
```
```ucm
.simple> update
.simple> update.old
⍟ I've updated these names to your new definition:
@ -175,7 +175,7 @@ foo = 802
```
```ucm
.lhs> update
.lhs> update.old
⍟ I've updated these names to your new definition:
@ -269,7 +269,7 @@ even = 17
```
```ucm
.cycle2> update
.cycle2> update.old
⍟ I've updated these names to your new definition:

View File

@ -29,8 +29,7 @@ structural type Y = Y Nat
new definition:
structural type Y
(The old definition is also named builtin.Unit. I'll
update this name too.)
(The old definition is also named builtin.Unit.)
```
Adding should fail for BOTH definitions, `Y` needs an update and `Z` is blocked by `Y`.

View File

@ -42,18 +42,15 @@ foo = 200
new definition:
foo : Nat
(The old definition is also named lib.foo. I'll update
this name too.)
(The old definition is also named lib.foo.)
```
```ucm
.> update
⍟ I've updated these names to your new definition:
foo : Nat
(The old definition was also named lib.foo. I updated this
name too.)
I propagated the update and am now saving the results.
Done.
.> names foo

View File

@ -85,18 +85,12 @@ x = 3
```ucm
.merged> update
⍟ I've updated these names to your new definition:
x : Nat
I propagated the update and am now saving the results.
Done.
.merged> view.patch
Edited Terms:
1. b.x -> 3. x
2. a.x -> 4. x
Tip: To remove entries from a patch, use
delete.term-replacement or delete.type-replacement, as
appropriate.
This patch is empty.
```

View File

@ -0,0 +1,28 @@
```ucm
.> builtins.merge
```
```unison
foo : Nat
foo = 5
bar : Nat
bar = 5
```
```ucm
.> add
```
```unison
foo : Nat
foo = 6
bar : Nat
bar = 7
```
```ucm
.> update
.> view foo bar
```

View File

@ -0,0 +1,74 @@
```ucm
.> builtins.merge
Done.
```
```unison
foo : Nat
foo = 5
bar : Nat
bar = 5
```
```ucm
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
change:
⍟ These new definitions are ok to `add`:
bar : Nat
foo : Nat
```
```ucm
.> add
⍟ I've added these definitions:
bar : Nat
foo : Nat
```
```unison
foo : Nat
foo = 6
bar : Nat
bar = 7
```
```ucm
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
change:
⍟ These names already exist. You can `update` them to your
new definition:
bar : Nat
(The old definition is also named foo.)
foo : Nat
(The old definition is also named bar.)
```
```ucm
.> update
I propagated the update and am now saving the results.
Done.
.> view foo bar
bar : Nat
bar = 7
foo : Nat
foo = 6
```

View File

@ -0,0 +1,22 @@
```ucm
.> builtins.merge
```
```unison
foo : Nat
foo = 5
```
```ucm
.> add
```
```unison
foo : Int
foo = +5
```
```ucm
.> update
.> view foo
```

View File

@ -0,0 +1,60 @@
```ucm
.> builtins.merge
Done.
```
```unison
foo : Nat
foo = 5
```
```ucm
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
change:
⍟ These new definitions are ok to `add`:
foo : Nat
```
```ucm
.> add
⍟ I've added these definitions:
foo : Nat
```
```unison
foo : Int
foo = +5
```
```ucm
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
change:
⍟ These names already exist. You can `update` them to your
new definition:
foo : Int
```
```ucm
.> update
I propagated the update and am now saving the results.
Done.
.> view foo
foo : Int
foo = +5
```

View File

@ -0,0 +1,25 @@
```ucm
.> builtins.merge
```
```unison
foo : Nat
foo = 5
bar : Nat
bar = 5
```
```ucm
.> add
```
```unison
foo : Nat
foo = 6
```
```ucm
.> update
.> view foo bar
```

View File

@ -0,0 +1,69 @@
```ucm
.> builtins.merge
Done.
```
```unison
foo : Nat
foo = 5
bar : Nat
bar = 5
```
```ucm
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
change:
⍟ These new definitions are ok to `add`:
bar : Nat
foo : Nat
```
```ucm
.> add
⍟ I've added these definitions:
bar : Nat
foo : Nat
```
```unison
foo : Nat
foo = 6
```
```ucm
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
change:
⍟ These names already exist. You can `update` them to your
new definition:
foo : Nat
(The old definition is also named bar.)
```
```ucm
.> update
I propagated the update and am now saving the results.
Done.
.> view foo bar
bar : Nat
bar = 5
foo : Nat
foo = 6
```

View File

@ -0,0 +1,24 @@
```ucm
.> builtins.merge
```
```unison
foo : Nat
foo = 5
bar : Nat
bar = foo + 10
```
```ucm
.> add
```
```unison
foo : Int
foo = +5
```
```ucm:error
.> update
```

View File

@ -0,0 +1,66 @@
```ucm
.> builtins.merge
Done.
```
```unison
foo : Nat
foo = 5
bar : Nat
bar = foo + 10
```
```ucm
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
change:
⍟ These new definitions are ok to `add`:
bar : Nat
foo : Nat
```
```ucm
.> add
⍟ I've added these definitions:
bar : Nat
foo : Nat
```
```unison
foo : Int
foo = +5
```
```ucm
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
change:
⍟ These names already exist. You can `update` them to your
new definition:
foo : Int
```
```ucm
.> update
bar : Nat
bar =
use Nat +
foo + 10
foo : Int
foo = +5
Typechecking failed when propagating the update to all the dependents.
```

View File

@ -0,0 +1,25 @@
```ucm
.> builtins.merge
```
```unison
foo : Nat
foo = 5
bar : Nat
bar = foo + 10
```
```ucm
.> add
```
```unison
foo : Nat
foo = 6
```
```ucm
.> update
.> view bar
```

View File

@ -0,0 +1,67 @@
```ucm
.> builtins.merge
Done.
```
```unison
foo : Nat
foo = 5
bar : Nat
bar = foo + 10
```
```ucm
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
change:
⍟ These new definitions are ok to `add`:
bar : Nat
foo : Nat
```
```ucm
.> add
⍟ I've added these definitions:
bar : Nat
foo : Nat
```
```unison
foo : Nat
foo = 6
```
```ucm
I found and typechecked these definitions in scratch.u. If you
do an `add` or `update`, here's how your codebase would
change:
⍟ These names already exist. You can `update` them to your
new definition:
foo : Nat
```
```ucm
.> update
I propagated the update and am now saving the results.
Done.
.> view bar
bar : Nat
bar =
use Nat +
foo + 10
```

View File

@ -0,0 +1,22 @@
```ucm
.> builtins.merge
```
```unison
foo : Nat
foo = 5
```
```ucm
.> add
```
```unison
foo : Nat
foo = 6
```
```ucm
.> update
.> view foo
```

Some files were not shown because too many files have changed in this diff Show More