mirror of
https://github.com/unisonweb/unison.git
synced 2024-11-10 20:00:27 +03:00
Generalize duplicate checking
This commit is contained in:
parent
6c8eb09b7e
commit
73d784763b
@ -38,7 +38,6 @@ when:
|
||||
library:
|
||||
source-dirs: src
|
||||
dependencies:
|
||||
- pretty-simple
|
||||
- aeson
|
||||
- ansi-terminal
|
||||
- async
|
||||
|
@ -37,8 +37,6 @@ import qualified Unison.NamesWithHistory as NamesWithHistory
|
||||
import qualified Unison.Names as Names
|
||||
import qualified Unison.Names.ResolutionResult as Names
|
||||
import qualified Unison.Name as Name
|
||||
import Debug.Pretty.Simple (pTraceShowM)
|
||||
import qualified Data.Foldable as Foldable
|
||||
import qualified Unison.UnisonFile as UF
|
||||
|
||||
resolutionFailures :: Ord v => [Names.ResolutionFailure v Ann] -> P v x
|
||||
@ -126,31 +124,51 @@ file = do
|
||||
]
|
||||
uf = UnisonFileId (UF.datasId env) (UF.effectsId env) (terms <> join accessors)
|
||||
(List.multimap watches)
|
||||
let dupes = findDuplicateTermsAndConstructors uf
|
||||
when (not . null $ dupes) $ do
|
||||
let dupeList :: [(v, [Ann])]
|
||||
dupeList = Map.toList $ fmap Set.toList dupes
|
||||
P.customFailure (DuplicateTermNames dupeList)
|
||||
pTraceShowM uf
|
||||
validateUnisonFile uf
|
||||
pure uf
|
||||
|
||||
findDuplicateTermsAndConstructors :: forall v a. (Ord v, Ord a) => UnisonFile v a -> Map v (Set a)
|
||||
findDuplicateTermsAndConstructors uf = duplicates
|
||||
where
|
||||
allConstructors :: [(v, a)]
|
||||
allConstructors = Map.elems (dataDeclarationsId uf) <> (Map.elems . fmap (fmap DD.toDataDecl) $ (effectDeclarationsId uf))
|
||||
& Foldable.toList
|
||||
& fmap snd
|
||||
& foldMap DD.constructors'
|
||||
& fmap (\(ann, v, _typ) -> (v, ann))
|
||||
allTerms :: [(v, a)]
|
||||
allTerms = UF.terms uf
|
||||
<&> (\(v, t) -> (v, ABT.annotation t))
|
||||
merged :: Map v (Set a)
|
||||
merged = Map.fromListWith (<>) . (fmap . fmap) Set.singleton $ allConstructors <> allTerms
|
||||
duplicates :: Map v (Set a)
|
||||
duplicates = Map.filter ((> 1) . Set.size) merged
|
||||
-- | Final validations and sanity checks to perform before finishing parsing.
|
||||
validateUnisonFile :: forall v . Var v => UnisonFile v Ann -> P v ()
|
||||
validateUnisonFile uf =
|
||||
checkForDuplicateTermsAndConstructors uf
|
||||
|
||||
-- | Because types and abilities can introduce their own constructors and fields it's difficult
|
||||
-- to detect all duplicate terms during parsing itself. Here we collect all terms and
|
||||
-- constructors and verify that no duplicates exist in the file, triggering an error if needed.
|
||||
checkForDuplicateTermsAndConstructors ::
|
||||
forall v.
|
||||
(Ord v) =>
|
||||
UnisonFile v Ann ->
|
||||
P v ()
|
||||
checkForDuplicateTermsAndConstructors uf = do
|
||||
when (not . null $ duplicates) $ do
|
||||
let dupeList :: [(v, [Ann])]
|
||||
dupeList = duplicates
|
||||
& fmap Set.toList
|
||||
& Map.toList
|
||||
P.customFailure (DuplicateTermNames dupeList)
|
||||
where
|
||||
effectDecls :: [DataDeclaration v Ann]
|
||||
effectDecls = (Map.elems . fmap (DD.toDataDecl . snd) $ (effectDeclarationsId uf))
|
||||
dataDecls :: [DataDeclaration v Ann]
|
||||
dataDecls = fmap snd $ Map.elems (dataDeclarationsId uf)
|
||||
allConstructors :: [(v, Ann)]
|
||||
allConstructors =
|
||||
(dataDecls <> effectDecls)
|
||||
& foldMap DD.constructors'
|
||||
& fmap (\(ann, v, _typ) -> (v, ann))
|
||||
allTerms :: [(v, Ann)]
|
||||
allTerms =
|
||||
UF.terms uf
|
||||
<&> (\(v, t) -> (v, ABT.annotation t))
|
||||
mergedTerms :: Map v (Set Ann)
|
||||
mergedTerms = (allConstructors <> allTerms)
|
||||
& (fmap . fmap) Set.singleton
|
||||
& Map.fromListWith (<>)
|
||||
duplicates :: Map v (Set Ann)
|
||||
duplicates =
|
||||
-- Any vars with multiple annotations are duplicates.
|
||||
Map.filter ((> 1) . Set.size) mergedTerms
|
||||
|
||||
-- A stanza is either a watch expression like:
|
||||
-- > 1 + x
|
||||
|
@ -248,7 +248,6 @@ library
|
||||
, optparse-applicative >=0.16.1.0
|
||||
, pem
|
||||
, prelude-extras
|
||||
, pretty-simple
|
||||
, primitive
|
||||
, process
|
||||
, random >=1.2.0
|
||||
|
Loading…
Reference in New Issue
Block a user