1
1
mirror of https://github.com/github/semantic.git synced 2024-11-24 08:54:07 +03:00

Merge remote-tracking branch 'origin/master' into past-is-prologue

This commit is contained in:
Patrick Thomson 2020-01-29 12:31:02 -05:00
commit ce0f4335e1
96 changed files with 737 additions and 603 deletions

View File

@ -37,7 +37,7 @@ jobs:
name: Cache ~/.cabal/store
with:
path: ~/.cabal/store
key: ${{ runner.os }}-${{ matrix.ghc }}-v3-cabal-store
key: ${{ runner.os }}-${{ matrix.ghc }}-v5-cabal-store
- uses: actions/cache@v1
name: Cache dist-newstyle

View File

@ -1,13 +1,17 @@
{-# LANGUAGE DataKinds, FlexibleContexts, PackageImports, PartialTypeSignatures, TypeApplications, TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
module Evaluation (benchmarks) where
import Control.Carrier.Parse.Simple
import qualified Data.Duration as Duration
import Data.Abstract.Evaluatable
import Data.Blob
import Data.Blob.IO (readBlobFromFile')
import Data.Bifunctor
import Data.Blob.IO (readBlobFromPath)
import qualified Data.Duration as Duration
import "semantic" Data.Graph (topologicalSort)
import qualified Data.Language as Language
import Data.Project
@ -18,21 +22,23 @@ import Semantic.Config (defaultOptions)
import Semantic.Graph
import Semantic.Task (TaskSession (..), runTask, withOptions)
import Semantic.Util
import qualified System.Path as Path
import System.Path ((</>))
import qualified System.Path as Path
import qualified System.Path.PartClass as Path.PartClass
-- Duplicating this stuff from Util to shut off the logging
callGraphProject' :: ( Language.SLanguage lang
, HasPrelude lang
, Path.PartClass.AbsRel ar
)
=> TaskSession
-> Proxy lang
-> Path.RelFile
-> Path.File ar
-> IO (Either String ())
callGraphProject' session proxy path
| Just (SomeParser parser) <- parserForLanguage analysisParsers lang = fmap (bimap show (const ())) . runTask session $ do
blob <- readBlobFromFile' (fileForTypedPath path)
blob <- readBlobFromPath (Path.toAbsRel path)
package <- fmap snd <$> runParse (Duration.fromSeconds 10) (parsePackage parser (Project (Path.toString (Path.takeDirectory path)) [blob] lang []))
modules <- topologicalSort <$> runImportGraphToModules proxy package
runCallGraph proxy False modules package

View File

@ -1,6 +1,6 @@
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeApplications #-}
module Tagging (benchmarks) where
@ -8,19 +8,19 @@ import Control.Carrier.Parse.Measured
import Control.Carrier.Reader
import Control.Exception (throwIO)
import Control.Monad
import Data.Blob
import Data.Foldable
import Data.Language (LanguageMode (..), PerLanguageModes (..))
import Gauge
import System.FilePath.Glob
import qualified System.Path as Path
import Data.Flag
import Proto.Semantic as P hiding (Blob, BlobPair)
import Semantic.Api.Symbols (parseSymbols)
import Semantic.Config as Config
import Semantic.Task
import Semantic.Task.Files
import qualified Analysis.File as File
import Data.Flag
import Proto.Semantic as P hiding (Blob, BlobPair)
import Semantic.Api.Symbols (parseSymbols)
import Semantic.Config as Config
import Semantic.Task
import Semantic.Task.Files
benchmarks :: Benchmark
benchmarks = bgroup "tagging"
@ -66,7 +66,7 @@ parseSymbolsFilePath ::
=> PerLanguageModes
-> Path.RelFile
-> m ParseTreeSymbolResponse
parseSymbolsFilePath languageModes path = readBlob (fileForTypedPath path) >>= runReader languageModes . parseSymbols . pure @[]
parseSymbolsFilePath languageModes path = readBlob (File.fromPath path) >>= runReader languageModes . parseSymbols . pure @[]
aLaCarteLanguageModes :: PerLanguageModes
aLaCarteLanguageModes = PerLanguageModes

View File

@ -50,23 +50,30 @@ library
Analysis.Effect.Heap
Analysis.File
Analysis.FlowInsensitive
Analysis.Functor.Named
Analysis.ImportGraph
Analysis.Intro
Analysis.Name
Analysis.Typecheck
Control.Carrier.Fail.WithLoc
build-depends:
algebraic-graphs ^>= 0.3
, aeson ^>= 1.4
, algebraic-graphs ^>= 0.3
, base >= 4.13 && < 5
, containers ^>= 0.6
, filepath
, fused-effects ^>= 1.0
, fused-effects-readline
, fused-syntax
, hashable
, haskeline ^>= 0.7.5
, hashable
, lingo ^>= 0.3
, pathtype ^>= 0.8.1
, prettyprinter >= 1.2 && < 2
, prettyprinter-ansi-terminal ^>= 1.1.1
, semantic-source ^>= 0.0.1
, semantic-source ^>= 0.0.2
, semilattices
, terminal-size ^>= 0.3
, text ^>= 1.2.3.1
, transformers ^>= 0.5

View File

@ -29,7 +29,7 @@ import qualified Analysis.Carrier.Env.Precise as A
import qualified Analysis.Carrier.Heap.Precise as A
import qualified Analysis.Effect.Domain as A
import Analysis.File
import Analysis.Name
import Analysis.Functor.Named
import Control.Algebra
import Control.Carrier.Fail.WithLoc
import Control.Carrier.Fresh.Strict
@ -178,7 +178,7 @@ heapAddressGraph = heapGraph (\ addr v -> (Value v, addr)) (fmap G.vertex . (,)
addressStyle :: Heap (Concrete term) -> G.Style (EdgeType (Concrete term), Addr) Text
addressStyle heap = (G.defaultStyle vertex) { G.edgeAttributes }
where vertex (_, addr) = pack (show addr) <> " = " <> maybe "?" fromConcrete (IntMap.lookup addr heap)
edgeAttributes _ (Slot name, _) = ["label" G.:= unName name]
edgeAttributes _ (Slot name, _) = ["label" G.:= formatName name]
edgeAttributes _ (Edge Import, _) = ["color" G.:= "blue"]
edgeAttributes _ (Edge Lexical, _) = ["color" G.:= "green"]
edgeAttributes _ _ = []
@ -186,7 +186,7 @@ addressStyle heap = (G.defaultStyle vertex) { G.edgeAttributes }
Unit -> "()"
Bool b -> pack $ show b
String s -> pack $ show s
Closure p (Span s e) (Named n _) -> "\\\\ " <> unName n <> " [" <> pack (Path.toString p) <> ":" <> showPos s <> "-" <> showPos e <> "]"
Closure p (Span s e) (Named n _) -> "\\\\ " <> formatName n <> " [" <> pack (Path.toString p) <> ":" <> showPos s <> "-" <> showPos e <> "]"
Record _ -> "{}"
showPos (Pos l c) = pack (show l) <> ":" <> pack (show c)

View File

@ -24,7 +24,7 @@ module Analysis.Effect.Domain
, run
) where
import Analysis.Name
import Analysis.Functor.Named
import Control.Algebra
import Data.Text (Text)
import GHC.Generics (Generic1)

View File

@ -1,13 +1,18 @@
{-# LANGUAGE DeriveTraversable #-}
module Analysis.File
( File(..)
, fileLanguage
, fromBody
, fromPath
) where
import Data.Maybe (fromJust, listToMaybe)
import GHC.Stack
import Source.Span
import Data.Maybe (fromJust, listToMaybe)
import Data.Semilattice.Lower
import GHC.Stack
import Source.Language as Language
import Source.Span
import qualified System.Path as Path
import qualified System.Path.PartClass as Path.PartClass
data File a = File
{ filePath :: !Path.AbsRelFile
@ -19,3 +24,10 @@ data File a = File
fromBody :: HasCallStack => a -> File a
fromBody body = File (Path.absRel (srcLocFile srcLoc)) (spanFromSrcLoc srcLoc) body where
srcLoc = snd (fromJust (listToMaybe (getCallStack callStack)))
-- | The language of the provided file, as inferred by 'Language.forPath'.
fileLanguage :: File a -> Language
fileLanguage = Language.forPath . filePath
fromPath :: Path.PartClass.AbsRel ar => Path.File ar -> File Language
fromPath p = File (Path.toAbsRel p) lowerBound (Language.forPath p)

View File

@ -0,0 +1,37 @@
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveTraversable #-}
module Analysis.Functor.Named
( module Analysis.Name
, Named (..)
, named
, named'
, namedName
, namedValue
) where
import Analysis.Name
import Data.Function (on)
-- | Annotates an @a@ with a 'Name'-provided name, which is ignored for '==' and 'compare'.
data Named a = Named Name a
deriving (Foldable, Functor, Show, Traversable)
named :: Name -> a -> Named a
named = Named
named' :: Name -> Named Name
named' u = Named u u
namedName :: Named a -> Name
namedName (Named n _) = n
namedValue :: Named a -> a
namedValue (Named _ a) = a
instance Eq a => Eq (Named a) where
(==) = (==) `on` namedValue
instance Ord a => Ord (Named a) where
compare = compare `on` namedValue

View File

@ -21,7 +21,7 @@ import qualified Analysis.Carrier.Heap.Monovariant as A
import qualified Analysis.Effect.Domain as A
import Analysis.File
import Analysis.FlowInsensitive
import Analysis.Name
import Analysis.Functor.Named
import Control.Algebra
import Control.Applicative (Alternative (..))
import Control.Carrier.Fail.WithLoc

View File

@ -11,7 +11,7 @@ module Analysis.Intro
, record
) where
import Analysis.Name
import Analysis.Functor.Named
import Control.Algebra
import Data.Text (Text)
import GHC.Generics (Generic1)

View File

@ -1,40 +1,68 @@
{-# LANGUAGE DeriveTraversable, GeneralizedNewtypeDeriving #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Analysis.Name
( Name(..)
, Named(..)
, named
, named'
, namedName
, namedValue
( Name
-- * Constructors
, gensym
, name
, nameI
, formatName
, isGenerated
) where
import Data.Function (on)
import Data.String (IsString)
import Data.Text (Text)
import Control.Effect.Fresh
import Data.Aeson
import qualified Data.Char as Char
import Data.Hashable
import Data.String
import Data.Text (Text)
import qualified Data.Text as Text
-- | User-specified and -relevant names.
newtype Name = Name { unName :: Text }
deriving (Eq, IsString, Ord, Show)
-- | The type of variable names.
data Name
= Name Text
| I Int
deriving (Eq, Ord)
instance IsString Name where
fromString = Name . fromString
-- | Annotates an @a@ with a 'Name'-provided name, which is ignored for '==' and 'compare'.
data Named a = Named Name a
deriving (Foldable, Functor, Show, Traversable)
-- | Generate a fresh (unused) name for use in synthesized variables/closures/etc.
gensym :: Has Fresh sig m => m Name
gensym = I <$> fresh
named :: Name -> a -> Named a
named = Named
-- | Construct a 'Name' from a 'Text'.
name :: Text -> Name
name = Name
named' :: Name -> Named Name
named' u = Named u u
isGenerated :: Name -> Bool
isGenerated (I _) = True
isGenerated _ = False
namedName :: Named a -> Name
namedName (Named n _) = n
-- | Construct a 'Name' from an 'Int'. This is suitable for automatic generation, e.g. using a Fresh effect, but should not be used for human-generated names.
nameI :: Int -> Name
nameI = I
namedValue :: Named a -> a
namedValue (Named _ a) = a
-- | Extract a human-readable 'Text' from a 'Name'.
-- Sample outputs can be found in @Data.Abstract.Name.Spec@.
formatName :: Name -> Text
formatName (Name name) = name
formatName (I i) = Text.pack $ '_' : (alphabet !! a) : replicate n 'ʹ'
where alphabet = ['a'..'z']
(n, a) = i `divMod` length alphabet
instance Eq a => Eq (Named a) where
(==) = (==) `on` namedValue
instance Show Name where
showsPrec _ = prettyShowString . Text.unpack . formatName
where prettyShowString str = showChar '"' . foldr ((.) . prettyChar) id str . showChar '"'
prettyChar c
| c `elem` ['\\', '\"'] = Char.showLitChar c
| Char.isPrint c = showChar c
| otherwise = Char.showLitChar c
instance Ord a => Ord (Named a) where
compare = compare `on` namedValue
instance Hashable Name where
hashWithSalt salt (Name name) = hashWithSalt salt name
hashWithSalt salt (I i) = salt `hashWithSalt` (1 :: Int) `hashWithSalt` i
instance ToJSON Name where
toJSON = toJSON . formatName
toEncoding = toEncoding . formatName

View File

@ -26,8 +26,8 @@ import qualified Analysis.Carrier.Heap.Monovariant as A
import qualified Analysis.Effect.Domain as A
import Analysis.File
import Analysis.FlowInsensitive
import Analysis.Functor.Named
import qualified Analysis.Intro as Intro
import Analysis.Name
import Control.Algebra
import Control.Applicative (Alternative (..))
import Control.Carrier.Fail.WithLoc
@ -251,12 +251,12 @@ instance ( Alternative m
ret <- meta
unify t (Alg (arg :-> ret))
b <- concretize ret
k (Named (Name mempty) (lift b)) where
k (Named (name mempty) (lift b)) where
concretize = \case
Alg Unit -> pure Intro.unit
Alg Bool -> pure (Intro.bool True) <|> pure (Intro.bool False)
Alg String -> pure (Intro.string mempty)
Alg (_ :-> b) -> send . Intro.Lam . Named (Name mempty) . lift <$> concretize b
Alg (_ :-> b) -> send . Intro.Lam . Named (name mempty) . lift <$> concretize b
Alg (Record t) -> Intro.record <$> traverse (traverse concretize) (Map.toList t)
t -> fail $ "cant concretize " <> show t -- FIXME: concretize type variables by incrementally solving constraints
L (R (R (R (R (A.Record fields k))))) -> do

View File

@ -19,9 +19,9 @@ import Data.ByteString.Lazy.Char8 (putStrLn)
import Data.Aeson.Encode.Pretty (encodePretty)
data SemanticAST = SemanticAST
{ format :: Format
, noColor :: Bool
, source :: Either [FilePath] String
{ _format :: Format
, _noColor :: Bool
, _source :: Either [FilePath] String
}
-- Usage: semantic-ast --format ARG [--no-color] (--sourceString STRING | FILEPATHS…)

View File

@ -43,7 +43,7 @@ library
-- other-extensions:
build-depends: base ^>= 4.13
, tree-sitter ^>= 0.8
, semantic-source ^>= 0.0.1
, semantic-source ^>= 0.0.2
, tree-sitter-python ^>= 0.8.1
, bytestring ^>= 0.10.8.2
, optparse-applicative >= 0.14.3 && < 0.16

View File

@ -1,27 +1,26 @@
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE UndecidableInstances #-}
module Marshal.JSON
( MarshalJSON(..)
) where
import Data.Aeson as Aeson
import Data.List.NonEmpty (NonEmpty)
import GHC.Generics
import Data.Text (Text)
import Data.Aeson as Aeson
import Data.List.NonEmpty (NonEmpty)
import Data.Text (Text)
import qualified Data.Text as Text
import GHC.Generics
-- TODO: range and span will require a new release of semantic-source
-- TODO: use toEncoding -- direct serialization to ByteString
-- Serialize unmarshaled ASTs into JSON representation by auto-deriving Aeson instances generically
@ -33,7 +32,7 @@ class MarshalJSON t where
fields acc = gfields acc . from1
-- Implement the sum case
instance {-# OVERLAPPING #-} (MarshalJSON f, MarshalJSON g) => MarshalJSON (f :+: g) where
instance {-# OVERLAPPING #-} (MarshalJSON f, MarshalJSON g) => MarshalJSON (f :+: g) where
fields acc (L1 f) = fields acc f
fields acc (R1 g) = fields acc g
@ -71,7 +70,7 @@ instance (MarshalJSON t) => GValue (Rec1 t) where
instance (GValue t) => GValue (Maybe :.: t) where
gvalue (Comp1 (Just t)) = gvalue t
gvalue (Comp1 Nothing) = Null
gvalue (Comp1 Nothing) = Null
instance (GValue t) => GValue ([] :.: t) where
gvalue (Comp1 ts) = toJSON $ map gvalue ts
@ -85,4 +84,4 @@ class GFields f where
-- gvalue is a wrapper that calls to @toJSON@ (for leaf nodes such as @Text@) or recurses via @marshal@
class GValue f where
gvalue :: (ToJSON a) => f a -> Value
gvalue :: (ToJSON a) => f a -> Value

View File

@ -49,12 +49,13 @@ library
base >= 4.13 && < 5
, fused-effects ^>= 1.0
, fused-syntax
, hashable
, parsers ^>= 0.12.10
, pathtype ^>= 0.8.1
, prettyprinter >= 1.2.1 && < 2
, prettyprinter-ansi-terminal ^>= 1.1.1
, semantic-analysis ^>= 0
, semantic-source ^>= 0.0.1
, semantic-source ^>= 0.0.2
, text ^>= 1.2.3.1
, trifecta >= 2 && < 2.2
, unordered-containers ^>= 0.2.10
@ -69,7 +70,7 @@ test-suite test
base
, semantic-analysis
, semantic-core
, semantic-source ^>= 0.0.1
, semantic-source
, fused-effects
, fused-syntax
, hedgehog ^>= 1

View File

@ -1,25 +1,34 @@
{-# LANGUAGE DeriveGeneric, DeriveTraversable, GeneralizedNewtypeDeriving, LambdaCase, OverloadedLists #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE OverloadedStrings #-}
module Core.Name
( module Analysis.Name
( module Analysis.Functor.Named
, reservedNames
, isSimpleCharacter
, needsQuotation
) where
import Analysis.Name
import Analysis.Functor.Named
import qualified Data.Char as Char
import Data.Hashable
import Data.HashSet (HashSet)
import qualified Data.HashSet as HashSet
import Data.Text as Text (any, unpack)
import Data.String
import Data.Text as Text (any)
reservedNames :: HashSet String
reservedNames :: (Eq s, IsString s, Hashable s) => HashSet s
reservedNames = [ "#true", "#false", "if", "then", "else"
, "#unit", "load", "rec", "#record"]
-- | Returns true if any character would require quotation or if the
-- name conflicts with a Core primitive.
needsQuotation :: Name -> Bool
needsQuotation (Name u) = HashSet.member (unpack u) reservedNames || Text.any (not . isSimpleCharacter) u
needsQuotation n
| isGenerated n = False
| otherwise = HashSet.member n reservedNames || Text.any (not . isSimpleCharacter) (formatName n)
-- | A simple character is, loosely defined, a character that is compatible
-- with identifiers in most ASCII-oriented programming languages. This is defined

View File

@ -1,4 +1,6 @@
{-# LANGUAGE FlexibleContexts, GeneralizedNewtypeDeriving, TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeOperators #-}
module Core.Parser
( core
, lit
@ -14,7 +16,7 @@ import Control.Applicative
import Control.Monad
import Core.Core ((:<-) (..), Core)
import qualified Core.Core as Core
import Core.Name
import Core.Name hiding (name)
import qualified Data.Char as Char
import Data.Foldable (foldl')
import Data.Function

View File

@ -10,7 +10,7 @@ module Core.Pretty
import Analysis.File
import Core.Core
import Core.Name
import Core.Name hiding (name)
import Data.Foldable (toList)
import Data.Text.Prettyprint.Doc
import qualified Data.Text.Prettyprint.Doc.Render.String as Pretty
@ -43,7 +43,9 @@ primitive = keyword . mappend "#"
data Style = Unicode | Ascii
name :: Name -> AnsiDoc
name (Name n) = if needsQuotation (Name n) then enclose (symbol "#{") (symbol "}") (pretty n) else pretty n
name n
| needsQuotation n = enclose (symbol "#{") (symbol "}") (pretty (formatName n))
| otherwise = pretty (formatName n)
prettyCore :: Style -> Term Core Name -> AnsiDoc
prettyCore style = unPrec . go . fmap name

View File

@ -1,4 +1,5 @@
{-# LANGUAGE FlexibleContexts, ScopedTypeVariables #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Generators
( literal
@ -18,20 +19,21 @@ import qualified Hedgehog.Range as Range
import Control.Algebra
import qualified Core.Core as Core
import Core.Name
import Core.Name (Name, Named)
import qualified Core.Name as Name
-- The 'prune' call here ensures that we don't spend all our time just generating
-- fresh names for variables, since the length of variable names is not an
-- interesting property as they parse regardless.
name :: MonadGen m => m (Named Name)
name = Gen.prune (named' <$> names) where
names = Name <$> Gen.text (Range.linear 1 10) Gen.lower
name = Gen.prune (Name.named' <$> names) where
names = Name.name <$> Gen.text (Range.linear 1 10) Gen.lower
boolean :: (Has Core.Core sig t, MonadGen m) => m (t Name)
boolean = Core.bool <$> Gen.bool
variable :: (Applicative t, MonadGen m) => m (t Name)
variable = pure . namedValue <$> name
variable = pure . Name.namedValue <$> name
ifthenelse :: (Has Core.Core sig t, MonadGen m) => m (t Name) -> m (t Name)
ifthenelse bod = Gen.subterm3 boolean bod bod Core.if'
@ -51,7 +53,7 @@ lambda bod = do
Gen.subterm bod (Core.lam arg)
record :: (Has Core.Core sig t, MonadGen m) => m (t Name) -> m (t Name)
record bod = Core.record <$> Gen.list (Range.linear 0 5) ((,) . namedValue <$> name <*> bod)
record bod = Core.record <$> Gen.list (Range.linear 0 5) ((,) . Name.namedValue <$> name <*> bod)
atoms :: (Has Core.Core sig t, MonadGen m) => [m (t Name)]
atoms = [variable, pure Core.unit, boolean, Core.string <$> Gen.text (Range.linear 1 10) Gen.lower]
@ -69,7 +71,7 @@ expr = Gen.recursive Gen.choice atoms
, Gen.subterm3 expr expr expr Core.if'
, Gen.subterm expr Core.load
, record expr
, Gen.subtermM expr (\ x -> (x Core....) . namedValue <$> name)
, Gen.subtermM expr (\ x -> (x Core..?) . namedValue <$> name)
, Gen.subtermM expr (\ x -> (x Core....) . Name.namedValue <$> name)
, Gen.subtermM expr (\ x -> (x Core..?) . Name.namedValue <$> name)
, Gen.subterm2 expr expr (Core..=)
]

View File

@ -25,7 +25,7 @@ common haskell
, fused-syntax
, parsers ^>= 0.12.10
, semantic-core ^>= 0.0
, semantic-source ^>= 0.0.1
, semantic-source ^>= 0.0.2
, semantic-tags ^>= 0.0
, text ^>= 1.2.3
, tree-sitter ^>= 0.8

View File

@ -25,7 +25,7 @@ library
build-depends:
base >= 4.13 && < 5
, fused-effects ^>= 1.0
, semantic-source ^>= 0.0.1
, semantic-source ^>= 0.0.2
, semantic-tags ^>= 0.0
, tree-sitter ^>= 0.8
, tree-sitter-java ^>= 0.6.1

View File

@ -24,8 +24,9 @@ common haskell
, fused-effects ^>= 1.0
, fused-syntax
, parsers ^>= 0.12.10
, semantic-analysis ^>= 0
, semantic-core ^>= 0.0
, semantic-source ^>= 0.0.1
, semantic-source ^>= 0.0.2
, semantic-tags ^>= 0.0
, semantic-scope-graph ^>= 0.0
, semilattices ^>= 0

View File

@ -197,7 +197,7 @@ instance Compile Py.Call where
compile it _ _ = pure . invariantViolated $ "can't compile Call node with generator expression: " <> show it
instance Compile Py.ClassDefinition where
compile it@Py.ClassDefinition { body = pybody, name = Py.Identifier _ann (Name -> n) } cc next = do
compile it@Py.ClassDefinition { body = pybody, name = Py.Identifier _ann (Name.name -> n) } cc next = do
let buildTypeCall _ = do
bindings <- asks @Bindings (toList . unBindings)
let buildName n = (n, pure n)
@ -205,7 +205,7 @@ instance Compile Py.ClassDefinition where
typefn = prelude ["type"]
object = prelude ["object"]
pure (typefn $$ Core.string (coerce n) $$ object $$ contents)
pure (typefn $$ Core.string (formatName n) $$ object $$ contents)
body <- compile pybody buildTypeCall next
let coreName = Name.named' n
@ -244,8 +244,8 @@ instance Compile Py.DottedName where
compile it@Py.DottedName
{ extraChildren = Py.Identifier { text } :| rest
} cc _next = do
let aggregate Py.Identifier { text = inner } x = x ... Name inner
composite = foldr aggregate (pure (Name text)) rest
let aggregate Py.Identifier { text = inner } x = x ... Name.name inner
composite = foldr aggregate (pure (Name.name text)) rest
locate it composite & cc
@ -287,21 +287,21 @@ instance Compile Py.FunctionDefinition where
let parameters' = catMaybes parameterMs
body' <- compile body pure next
-- Build a lambda.
let located = locate it (rec (Name.named' (Name name)) (lams parameters' body'))
let located = locate it (rec (Name.named' (Name.name name)) (lams parameters' body'))
-- Give it a name (below), then augment the current continuation
-- with the new name (with 'def'), so that calling contexts know
-- that we have built an exportable definition.
assigning located <$> local (def (Name name)) (cc next)
where param (Py.Parameter (Prj (Py.Identifier _pann pname))) = Just . named' . Name $ pname
assigning located <$> local (def (Name.name name)) (cc next)
where param (Py.Parameter (Prj (Py.Identifier _pann pname))) = Just . named' . Name.name $ pname
param _ = Nothing
assigning item f = (Name.named' (Name name) :<- item) >>>= f
assigning item f = (Name.named' (Name.name name) :<- item) >>>= f
instance Compile Py.FutureImportStatement
instance Compile Py.GeneratorExpression
instance Compile Py.GlobalStatement
instance Compile Py.Identifier where
compile Py.Identifier { text } cc _ = cc . pure . Name $ text
compile Py.Identifier { text } cc _ = cc . pure . Name.name $ text
instance Compile Py.IfStatement where
compile it@Py.IfStatement{ condition, consequence, alternative} cc next =
@ -323,7 +323,7 @@ instance Compile Py.Lambda where
, parameters
} cc next = do
let unparams (Py.LambdaParameters _ ps) = toList ps
unparam (Py.Parameter (Prj (Py.Identifier _pann pname))) = Just . named' . Name $ pname
unparam (Py.Parameter (Prj (Py.Identifier _pann pname))) = Just . named' . Name.name $ pname
unparam _ = Nothing
body' <- compile body cc next
let params = maybe [] unparams parameters

View File

@ -9,17 +9,16 @@ module Language.Python.Patterns
) where
import AST.Element
import Data.Coerce
import Data.Text (Text)
import qualified Analysis.Name
import qualified TreeSitter.Python.AST as Py
-- | Useful pattern synonym for extracting a single identifier from
-- a Python ExpressionList. Easier than pattern-matching every time.
-- TODO: when this is finished, we won't need this pattern, as we'll
-- handle ExpressionLists the smart way every time.
pattern SingleIdentifier :: Coercible t Text => t -> Py.ExpressionList a
pattern SingleIdentifier name <- Py.ExpressionList
pattern SingleIdentifier :: Analysis.Name.Name -> Py.ExpressionList a
pattern SingleIdentifier n <- Py.ExpressionList
{ Py.extraChildren =
[ Py.Expression (Prj (Py.PrimaryExpression (Prj Py.Identifier { text = coerce -> name })))
[ Py.Expression (Prj (Py.PrimaryExpression (Prj Py.Identifier { text = Analysis.Name.name -> n })))
]
}

View File

@ -21,11 +21,11 @@ module Language.Python.ScopeGraph
( scopeGraphModule
) where
import Analysis.Name
import Control.Algebra (Algebra (..), handleCoercible)
import Control.Effect.Sketch
import Data.Foldable
import Data.Monoid
import Data.Name
import GHC.Generics
import GHC.Records
import GHC.TypeLits
@ -87,7 +87,7 @@ scopeGraphModule = getAp . scopeGraph
instance ToScopeGraph Py.AssertStatement where scopeGraph = onChildren
instance ToScopeGraph Py.Assignment where
scopeGraph (Py.Assignment _ (SingleIdentifier t) _val _typ) = complete <* declare @Name t DeclProperties
scopeGraph (Py.Assignment _ (SingleIdentifier t) _val _typ) = complete <* declare @Name (formatName t) DeclProperties
scopeGraph x = todo x
instance ToScopeGraph Py.Await where

View File

@ -5,12 +5,12 @@
{-# LANGUAGE TypeOperators #-}
module Main (main) where
import Analysis.Name (Name)
import Control.Algebra
import Control.Carrier.Lift
import Control.Carrier.Sketch.Fresh
import Control.Monad
import qualified Data.ByteString as ByteString
import Data.Name (Name)
import qualified Data.ScopeGraph as ScopeGraph
import qualified Language.Python ()
import qualified Language.Python as Py (Term)

View File

@ -1,4 +1,10 @@
{-# LANGUAGE DerivingStrategies, GeneralizedNewtypeDeriving, StandaloneDeriving, FlexibleInstances, NamedFieldPuns, OverloadedStrings, QuantifiedConstraints, TypeOperators, UndecidableInstances #-}
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE QuantifiedConstraints #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Instances () where
@ -8,14 +14,10 @@ module Instances () where
-- we should keep track of them in a dedicated file.
import Analysis.File
import Core.Name (Name (..))
import Data.Aeson
import Data.Text (pack)
import qualified System.Path as Path
deriving newtype instance ToJSON Name
deriving newtype instance ToJSONKey Name
instance ToJSON a => ToJSON (File a) where
toJSON File{filePath, fileSpan, fileBody} = object
[ "path" .= filePath

View File

@ -1,4 +1,7 @@
{-# LANGUAGE OverloadedStrings, ScopedTypeVariables, TypeApplications, TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -Wno-unrecognised-pragmas #-}
module Main (main) where
@ -69,7 +72,7 @@ assertEvaluatesTo core k val = do
(_, files) ->
HUnit.assertFailure ("Unexpected number of files: " <> show (length files))
let found = Map.lookup (Name k) env >>= flip IntMap.lookup heap
let found = Map.lookup (name k) env >>= flip IntMap.lookup heap
found HUnit.@?= Just val
{-# HLINT ignore assertEvaluatesTo #-}

View File

@ -25,7 +25,7 @@ common haskell
, fused-syntax
, parsers ^>= 0.12.10
, semantic-core ^>= 0.0
, semantic-source ^>= 0.0.1
, semantic-source ^>= 0.0.2
, semantic-tags ^>= 0.0
, text ^>= 1.2.3
, tree-sitter ^>= 0.8

View File

@ -25,7 +25,6 @@ library
ScopeGraph.Convert
Data.Hole
Data.Module
Data.Name
Data.ScopeGraph
build-depends:
base >= 4.13 && < 5
@ -33,12 +32,13 @@ library
, algebraic-graphs >= 0.3 && < 0.5
, containers
, fused-effects ^>= 1.0
, generic-monoid
, hashable
, lens
, semilattices
, generic-monoid
, pathtype
, semantic-source ^>= 0.0
, semantic-analysis
, semantic-source ^>= 0.0.2
, semilattices
, text ^>= 1.2.3.1
hs-source-dirs: src
default-language: Haskell2010

View File

@ -18,6 +18,8 @@ module Control.Carrier.Sketch.Fresh
, module Control.Effect.Sketch
) where
import Analysis.Name (Name)
import qualified Analysis.Name
import Control.Algebra
import Control.Carrier.Fresh.Strict
import Control.Carrier.State.Strict
@ -25,8 +27,6 @@ import Control.Effect.Sketch
import Control.Monad.IO.Class
import Data.Bifunctor
import Data.Module
import Data.Name (Name)
import qualified Data.Name
import Data.ScopeGraph (ScopeGraph)
import qualified Data.ScopeGraph as ScopeGraph
import Data.Semilattice.Lower
@ -45,9 +45,8 @@ data Sketchbook address = Sketchbook
instance Lower (Sketchbook Name) where
lowerBound =
let
initialGraph = ScopeGraph.insertScope n initialScope lowerBound
initialScope = lowerBound
n = Data.Name.nameI 0
initialGraph = ScopeGraph.insertScope n lowerBound lowerBound
n = Analysis.Name.nameI 0
in
Sketchbook initialGraph n
@ -59,7 +58,7 @@ instance (Effect sig, Algebra sig m) => Algebra (Sketch Name :+: sig) (SketchC N
Sketchbook old current <- SketchC (get @(Sketchbook Name))
let (new, _pos) =
ScopeGraph.declare
(ScopeGraph.Declaration (Data.Name.name n))
(ScopeGraph.Declaration (Analysis.Name.name n))
(lowerBound @ModuleInfo)
ScopeGraph.Default
ScopeGraph.Public
@ -74,11 +73,11 @@ instance (Effect sig, Algebra sig m) => Algebra (Sketch Name :+: sig) (SketchC N
Sketchbook old current <- SketchC (get @(Sketchbook Name))
let new =
ScopeGraph.reference
(ScopeGraph.Reference (Data.Name.name n))
(ScopeGraph.Reference (Analysis.Name.name n))
(lowerBound @ModuleInfo)
(lowerBound @Span)
ScopeGraph.Identifier
(ScopeGraph.Declaration (Data.Name.name decl))
(ScopeGraph.Declaration (Analysis.Name.name decl))
current
old
SketchC (put @(Sketchbook Name) (Sketchbook new current))

View File

@ -1,63 +0,0 @@
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Data.Name
( Name
-- * Constructors
, gensym
, name
, nameI
, formatName
, __self
) where
import Control.Effect.Fresh
import Data.Aeson
import qualified Data.Char as Char
import Data.Hashable
import Data.Text (Text)
import qualified Data.Text as Text
-- | The type of variable names.
data Name
= Name Text
| I Int
deriving (Eq, Ord)
-- | Generate a fresh (unused) name for use in synthesized variables/closures/etc.
gensym :: Has Fresh sig m => m Name
gensym = I <$> fresh
-- | Construct a 'Name' from a 'Text'.
name :: Text -> Name
name = Name
-- | Construct a 'Name' from an 'Int'. This is suitable for automatic generation, e.g. using a Fresh effect, but should not be used for human-generated names.
nameI :: Int -> Name
nameI = I
-- | Extract a human-readable 'Text' from a 'Name'.
-- Sample outputs can be found in @Data.Abstract.Name.Spec@.
formatName :: Name -> Text
formatName (Name name) = name
formatName (I i) = Text.pack $ '_' : (alphabet !! a) : replicate n 'ʹ'
where alphabet = ['a'..'z']
(n, a) = i `divMod` length alphabet
instance Show Name where
showsPrec _ = prettyShowString . Text.unpack . formatName
where prettyShowString str = showChar '"' . foldr ((.) . prettyChar) id str . showChar '"'
prettyChar c
| c `elem` ['\\', '\"'] = Char.showLitChar c
| Char.isPrint c = showChar c
| otherwise = Char.showLitChar c
instance Hashable Name where
hashWithSalt salt (Name name) = hashWithSalt salt name
hashWithSalt salt (I i) = salt `hashWithSalt` (1 :: Int) `hashWithSalt` i
instance ToJSON Name where
toJSON = toJSON . formatName
toEncoding = toEncoding . formatName
__self :: Name
__self = name "__semantic_self"

View File

@ -48,6 +48,7 @@ module Data.ScopeGraph
import Prelude hiding (lookup)
import Analysis.Name
import Control.Applicative
import Control.Lens.Lens
import Control.Monad
@ -61,7 +62,6 @@ import qualified Data.Map.Strict as Map
import Data.Maybe
import Data.Module
import Data.Monoid
import Data.Name
import Data.Semilattice.Lower
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq

View File

@ -13,9 +13,9 @@ module ScopeGraph.Convert
, complete
) where
import Analysis.Name (Name)
import Control.Effect.Sketch
import Data.List.NonEmpty
import Data.Name (Name)
import Data.Typeable
import Source.Loc

View File

@ -1,3 +1,8 @@
# 0.0.2.0
- Adds `Source.Language`.
- Adds `ToJSON` instances for `Range` and `Loc`.
# 0.0.1.0
- Adds an `NFData` instance for `Source`.

View File

@ -1,7 +1,7 @@
cabal-version: 2.4
name: semantic-source
version: 0.0.1.0
version: 0.0.2.0
synopsis: Types and functionality for working with source code
description: Types and functionality for working with source code (program text).
homepage: https://github.com/github/semantic/tree/master/semantic-source#readme
@ -42,6 +42,7 @@ common haskell
library
import: haskell
exposed-modules:
Source.Language
Source.Loc
Source.Range
Source.Source
@ -51,8 +52,11 @@ library
, base >= 4.12 && < 5
, bytestring ^>= 0.10.8.2
, deepseq ^>= 1.4.4.0
, containers ^>= 0.6.2
, generic-monoid ^>= 0.1.0.0
, hashable >= 1.2.7 && < 1.4
, lingo ^>= 0.3
, pathtype ^>= 0.8.1
, semilattices ^>= 0.0.0.3
, text ^>= 1.2.3.1
hs-source-dirs: src

View File

@ -0,0 +1,136 @@
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
module Source.Language
( Language (..)
, SLanguage (..)
, extensionsForLanguage
, knownLanguage
, forPath
, textToLanguage
, languageToText
) where
import Data.Aeson
import Data.Hashable (Hashable)
import qualified Data.Languages as Lingo
import qualified Data.Map.Strict as Map
import qualified Data.Text as T
import GHC.Generics (Generic)
import qualified System.Path as Path
import qualified System.Path.PartClass as Path.PartClass
-- | The various languages we support.
data Language
= Unknown
| Go
| Haskell
| Java
| JavaScript
| JSON
| JSX
| Markdown
| Python
| Ruby
| TypeScript
| PHP
| TSX
deriving (Eq, Generic, Ord, Read, Show, Bounded, Hashable, ToJSON, Enum)
-- | Reifies a proxied type-level 'Language' to a value.
class SLanguage (lang :: Language) where
reflect :: proxy lang -> Language
instance SLanguage 'Unknown where
reflect _ = Unknown
instance SLanguage 'Go where
reflect _ = Go
instance SLanguage 'Haskell where
reflect _ = Haskell
instance SLanguage 'Java where
reflect _ = Java
instance SLanguage 'JavaScript where
reflect _ = JavaScript
instance SLanguage 'JSON where
reflect _ = JSON
instance SLanguage 'JSX where
reflect _ = JSX
instance SLanguage 'Markdown where
reflect _ = Markdown
instance SLanguage 'Python where
reflect _ = Python
instance SLanguage 'Ruby where
reflect _ = Ruby
instance SLanguage 'TypeScript where
reflect _ = TypeScript
instance SLanguage 'PHP where
reflect _ = PHP
instance FromJSON Language where
parseJSON = withText "Language" $ \l ->
pure $ textToLanguage l
-- | Predicate failing on 'Unknown' and passing in all other cases.
knownLanguage :: Language -> Bool
knownLanguage = (/= Unknown)
extensionsForLanguage :: Language -> [String]
extensionsForLanguage language = T.unpack <$> maybe mempty Lingo.languageExtensions (Map.lookup (languageToText language) Lingo.languages)
forPath :: Path.PartClass.AbsRel ar => Path.File ar -> Language
forPath path =
let spurious lang = lang `elem` [ "Hack" -- .php files
, "GCC Machine Description" -- .md files
, "XML" -- .tsx files
]
allResults = Lingo.languageName <$> Lingo.languagesForPath (Path.toString path)
in case filter (not . spurious) allResults of
[result] -> textToLanguage result
_ -> Unknown
languageToText :: Language -> T.Text
languageToText = \case
Unknown -> "Unknown"
Go -> "Go"
Haskell -> "Haskell"
Java -> "Java"
JavaScript -> "JavaScript"
JSON -> "JSON"
JSX -> "JSX"
Markdown -> "Markdown"
Python -> "Python"
Ruby -> "Ruby"
TypeScript -> "TypeScript"
TSX -> "TSX"
PHP -> "PHP"
textToLanguage :: T.Text -> Language
textToLanguage = \case
"Go" -> Go
"Haskell" -> Haskell
"Java" -> Java
"JavaScript" -> JavaScript
"JSON" -> JSON
"JSX" -> JSX
"Markdown" -> Markdown
"Python" -> Python
"Ruby" -> Ruby
"TypeScript" -> TypeScript
"TSX" -> TSX
"PHP" -> PHP
_ -> Unknown

View File

@ -1,4 +1,4 @@
{-# LANGUAGE DeriveGeneric, DerivingVia, RankNTypes #-}
{-# LANGUAGE DeriveGeneric, DerivingVia, RankNTypes, NamedFieldPuns, OverloadedStrings #-}
module Source.Loc
( Loc(..)
, byteRange_
@ -7,6 +7,7 @@ module Source.Loc
) where
import Control.DeepSeq (NFData)
import Data.Aeson (ToJSON(..), object, (.=))
import Data.Hashable (Hashable)
import Data.Monoid.Generic
import GHC.Generics (Generic)
@ -28,6 +29,9 @@ instance HasSpan Loc where
span_ = lens span (\l s -> l { span = s })
{-# INLINE span_ #-}
instance ToJSON Loc where
toJSON Loc{byteRange, span} = object ["sourceRange" .= byteRange
, "sourceSpan" .= span]
byteRange_ :: Lens' Loc Range
byteRange_ = lens byteRange (\l r -> l { byteRange = r })
@ -38,3 +42,4 @@ type Lens' s a = forall f . Functor f => (a -> f a) -> (s -> f s)
lens :: (s -> a) -> (s -> a -> s) -> Lens' s a
lens get put afa s = fmap (put s) (afa (get s))
{-# INLINE lens #-}

View File

@ -1,4 +1,4 @@
{-# LANGUAGE DeriveAnyClass, DeriveGeneric, RankNTypes #-}
{-# LANGUAGE DeriveAnyClass, DeriveGeneric, RankNTypes, NamedFieldPuns #-}
module Source.Range
( Range(..)
, point
@ -10,7 +10,7 @@ module Source.Range
) where
import Control.DeepSeq (NFData)
import Data.Aeson (ToJSON)
import Data.Aeson (ToJSON(..))
import Data.Hashable (Hashable)
import Data.Semilattice.Lower (Lower(..))
import GHC.Generics (Generic)
@ -20,11 +20,13 @@ data Range = Range
{ start :: {-# UNPACK #-} !Int
, end :: {-# UNPACK #-} !Int
}
deriving (Eq, Generic, Ord, Show, ToJSON)
deriving (Eq, Generic, Ord, Show)
instance Hashable Range
instance NFData Range
-- $
-- prop> a <> (b <> c) === (a <> b) <> (c :: Range)
instance Semigroup Range where
@ -33,6 +35,8 @@ instance Semigroup Range where
instance Lower Range where
lowerBound = Range 0 0
instance ToJSON Range where
toJSON Range { start, end } = toJSON [ start, end ]
-- | Construct a 'Range' with a given value for both its start and end indices.
point :: Int -> Range
@ -61,3 +65,4 @@ lens get put afa s = fmap (put s) (afa (get s))
-- $setup
-- >>> import Test.QuickCheck
-- >>> instance Arbitrary Range where arbitrary = Range <$> arbitrary <*> arbitrary ; shrink (Range s e) = Range <$> shrink s <*> shrink e

View File

@ -26,7 +26,7 @@ library
build-depends:
base >= 4.13 && < 5
, fused-effects ^>= 1.0
, semantic-source ^>= 0.0.1
, semantic-source ^>= 0.0.2
, text ^>= 1.2.3.1
hs-source-dirs: src
default-language: Haskell2010

View File

@ -25,11 +25,11 @@ common haskell
, fused-syntax
, parsers ^>= 0.12.10
, semantic-core ^>= 0.0
, semantic-source ^>= 0.0.1
, semantic-source ^>= 0.0.2
, semantic-tags ^>= 0.0
, text ^>= 1.2.3
, tree-sitter ^>= 0.8
, tree-sitter-tsx ^>= 0.4.1
, tree-sitter-tsx ^>= 0.4.2
ghc-options:
-Weverything

View File

@ -100,6 +100,19 @@ instance ToTags Tsx.Class where
} = yieldTag text Class loc byteRange >> gtags t
tags t = gtags t
instance ToTags Tsx.Module where
tags t@Tsx.Module
{ ann = loc@Loc { byteRange }
, name
} = match name
where
match expr = case expr of
Prj Tsx.Identifier { text } -> yield text
-- TODO: Handle NestedIdentifiers and Strings
-- Prj Tsx.NestedIdentifier { extraChildren } -> match
_ -> gtags t
yield text = yieldTag text Module loc byteRange >> gtags t
instance (ToTags l, ToTags r) => ToTags (l :+: r) where
tags (L1 l) = tags l
tags (R1 r) = tags r
@ -222,7 +235,7 @@ instance ToTags Tsx.MemberExpression
instance ToTags Tsx.MetaProperty
-- instance ToTags Tsx.MethodDefinition
instance ToTags Tsx.MethodSignature
instance ToTags Tsx.Module
-- instance ToTags Tsx.Module
instance ToTags Tsx.NamedImports
instance ToTags Tsx.NamespaceImport
instance ToTags Tsx.NestedIdentifier

View File

@ -25,11 +25,11 @@ common haskell
, fused-syntax
, parsers ^>= 0.12.10
, semantic-core ^>= 0.0
, semantic-source ^>= 0.0.1
, semantic-source ^>= 0.0.2
, semantic-tags ^>= 0.0
, text ^>= 1.2.3
, tree-sitter ^>= 0.8
, tree-sitter-typescript ^>= 0.4.1
, tree-sitter-typescript ^>= 0.4.2
ghc-options:
-Weverything

View File

@ -93,6 +93,19 @@ instance ToTags Ts.CallExpression where
_ -> gtags t
yield name = yieldTag name Call loc byteRange >> gtags t
instance ToTags Ts.Module where
tags t@Ts.Module
{ ann = loc@Loc { byteRange }
, name
} = match name
where
match expr = case expr of
Prj Ts.Identifier { text } -> yield text
-- TODO: Handle NestedIdentifiers and Strings
-- Prj Tsx.NestedIdentifier { extraChildren } -> match
_ -> gtags t
yield text = yieldTag text Module loc byteRange >> gtags t
instance (ToTags l, ToTags r) => ToTags (l :+: r) where
tags (L1 l) = tags l
tags (R1 r) = tags r
@ -215,7 +228,7 @@ instance ToTags Ts.MemberExpression
instance ToTags Ts.MetaProperty
-- instance ToTags Ts.MethodDefinition
instance ToTags Ts.MethodSignature
instance ToTags Ts.Module
-- instance ToTags Ts.Module
instance ToTags Ts.NamedImports
instance ToTags Ts.NamespaceImport
instance ToTags Ts.NestedIdentifier

View File

@ -67,7 +67,8 @@ common dependencies
, recursion-schemes ^>= 5.1
, scientific ^>= 0.3.6.2
, safe-exceptions ^>= 0.1.7.0
, semantic-source ^>= 0.0.1
, semantic-analysis ^>= 0
, semantic-source ^>= 0.0.2
, semilattices ^>= 0.0.0.3
, streaming ^>= 0.2.2.0
, text ^>= 1.2.3.1
@ -131,7 +132,6 @@ library
, Data.Abstract.Live
, Data.Abstract.Module
, Data.Abstract.ModuleTable
, Data.Abstract.Name
, Data.Abstract.Number
, Data.Abstract.Package
, Data.Abstract.Path
@ -308,8 +308,8 @@ library
, tree-sitter-php ^>= 0.2
, tree-sitter-python ^>= 0.8.1
, tree-sitter-ruby ^>= 0.4.1
, tree-sitter-typescript ^>= 0.4.1
, tree-sitter-tsx ^>= 0.4.1
, tree-sitter-typescript ^>= 0.4.2
, tree-sitter-tsx ^>= 0.4.2
executable semantic
import: haskell, dependencies, executable-flags

View File

@ -53,6 +53,7 @@ module Control.Abstract.Heap
, scopeLookup
) where
import Analysis.Name
import Control.Abstract.Context (withCurrentCallStack)
import Control.Abstract.Evaluator
import Control.Abstract.Roots
@ -67,7 +68,6 @@ import Data.Abstract.Heap (Heap, Position (..))
import qualified Data.Abstract.Heap as Heap
import Data.Abstract.Live
import Data.Abstract.Module (ModuleInfo)
import Data.Abstract.Name
import Data.Abstract.ScopeGraph (Kind (..), Path (..), putDeclarationScopeAtPosition)
import Data.Functor.Classes
import Data.Map.Strict (Map)

View File

@ -7,6 +7,7 @@ module Control.Abstract.Primitive
, defineBuiltIn
) where
import Analysis.Name
import Control.Abstract.Context
import Control.Abstract.Evaluator
import Control.Abstract.Heap
@ -14,7 +15,6 @@ import Control.Abstract.ScopeGraph
import Control.Abstract.Value
import Control.Monad
import Data.Abstract.BaseError
import Data.Abstract.Name
import qualified Data.Abstract.ScopeGraph as ScopeGraph
import Data.Map.Strict as Map
import Data.Maybe

View File

@ -6,11 +6,11 @@
module Control.Abstract.PythonPackage
( runPythonPackaging, Strategy(..) ) where
import Analysis.Name (name)
import Control.Abstract as Abstract
import Control.Algebra
import Control.Effect.Sum.Project
import Control.Monad
import Data.Abstract.Name (name)
import Data.Abstract.Path (stripQuotes)
import Data.Abstract.Value.Concrete (Value (..))
import qualified Data.Map as Map

View File

@ -55,13 +55,13 @@ module Control.Abstract.ScopeGraph
, ScopeGraph.Path
) where
import Analysis.Name hiding (name)
import Control.Abstract.Evaluator hiding (Local)
import Control.Algebra
import qualified Control.Carrier.Resumable.Either as Either
import qualified Control.Carrier.Resumable.Resume as With
import Data.Abstract.BaseError
import Data.Abstract.Module
import Data.Abstract.Name hiding (name)
import Data.Abstract.ScopeGraph
( AccessControl (..)
, Declaration (..)

View File

@ -80,6 +80,7 @@ module Control.Abstract.Value
, HashC(..)
) where
import Analysis.Name
import Control.Abstract.Evaluator
import Control.Abstract.Heap
import Control.Abstract.ScopeGraph (CurrentScope, Declaration, ScopeGraph)
@ -87,7 +88,6 @@ import Control.Algebra
import Control.Carrier.Reader
import Data.Abstract.BaseError
import Data.Abstract.Module
import Data.Abstract.Name
import Data.Abstract.Number (Number, SomeNumber)
import Data.Bits
import Data.Scientific (Scientific)

View File

@ -5,9 +5,9 @@ module Data.Abstract.Address.Monovariant
import Prologue
import Analysis.Name
import Control.Abstract
import Control.Algebra
import Data.Abstract.Name
import qualified Data.Set as Set
-- | 'Monovariant' models using one address for a particular name. It tracks the set of values that a particular address takes and uses it's name to lookup in the store and only allocation if new.

View File

@ -4,7 +4,7 @@ module Data.Abstract.Declarations
, Declarations1 (..)
) where
import Data.Abstract.Name
import Analysis.Name
import Data.Sum
import Data.Term

View File

@ -24,6 +24,7 @@ module Data.Abstract.Evaluatable
, runUnspecialized
, runUnspecializedWith
, throwUnspecializedError
, __self
) where
import Control.Algebra
@ -40,6 +41,7 @@ import Data.Text
import GHC.Stack
import Source.Span (HasSpan (..))
import Analysis.Name as X
import Control.Abstract hiding (Load, String)
import qualified Control.Abstract as Abstract
import Control.Abstract.Context as X
@ -73,7 +75,6 @@ import Data.Abstract.BaseError as X
import Data.Abstract.Declarations as X
import Data.Abstract.FreeVariables as X
import Data.Abstract.Module
import Data.Abstract.Name as X
import qualified Data.Abstract.ScopeGraph as ScopeGraph
import Data.Language
import Data.Semigroup.App
@ -152,6 +153,8 @@ class (Show1 constr, Foldable constr) => Evaluatable constr where
traceResolve :: (Show a, Show b, Has Trace sig m) => a -> b -> Evaluator term address value m ()
traceResolve name path = trace ("resolved " <> show name <> " -> " <> show path)
__self :: Name
__self = name "__semantic_self"
-- Preludes
@ -225,7 +228,7 @@ defineSelf :: ( Has (State (ScopeGraph address)) sig m
)
=> Evaluator term address value m ()
defineSelf = do
let self = Declaration X.__self
let self = Declaration __self
declare self ScopeGraph.Prelude Public lowerBound ScopeGraph.Unknown Nothing
slot <- lookupSlot self
assign slot =<< object =<< currentFrame

View File

@ -8,7 +8,7 @@ module Data.Abstract.FreeVariables
, FreeVariables1 (..)
) where
import Data.Abstract.Name
import Analysis.Name
import Data.Set (Set)
import Data.Sum
import Data.Term

View File

@ -1,5 +0,0 @@
module Data.Abstract.Name
( module X
) where
import Data.Name as X

View File

@ -6,10 +6,10 @@ module Data.Abstract.Package
, Data.Abstract.Package.fromModules
) where
import Analysis.Name
import Data.Abstract.Module
import Data.Abstract.ModuleTable as ModuleTable
import qualified Data.Map as Map
import Data.Abstract.Name
type PackageName = Name

View File

@ -17,15 +17,15 @@ import qualified Data.Map.Strict as Map
import Data.Scientific.Exts
import Data.Text (pack)
import Analysis.Name
import Control.Abstract hiding
(Array (..), Boolean (..), Function (..), Hash (..), Numeric (..), Object (..), String (..), Unit (..), While (..))
import qualified Control.Abstract as Abstract
import Control.Algebra
import Control.Effect.Interpose
import Data.Abstract.BaseError
import Data.Abstract.Evaluatable (Declarations, EvalError (..), UnspecializedError (..))
import Data.Abstract.Evaluatable (Declarations, EvalError (..), UnspecializedError (..), __self)
import Data.Abstract.FreeVariables
import Data.Abstract.Name
import qualified Data.Abstract.Number as Number

View File

@ -6,18 +6,14 @@
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
module Data.Blob
( File(..)
, fileForPath
, fileForTypedPath
, Blob(..)
( Blob(..)
, Blobs(..)
, blobLanguage
, NoLanguageForBlob (..)
, blobPath
, makeBlob
, decodeBlobs
, nullBlob
, sourceBlob
, fromSource
, moduleForBlob
, noLanguageForBlob
, BlobPair
@ -29,6 +25,8 @@ module Data.Blob
, pathKeyForBlobPair
) where
import Analysis.File (File (..))
import Control.Effect.Error
import Control.Exception
import Data.Aeson
@ -36,68 +34,48 @@ import Data.Bifunctor
import qualified Data.ByteString.Lazy as BL
import Data.Edit
import Data.JSON.Fields
import Data.Language
import Data.Maybe
import Data.Maybe.Exts
import Data.Module
import Data.Text (Text)
import GHC.Generics (Generic)
import Source.Source (Source)
import Source.Language as Language
import Source.Source (Source, totalSpan)
import qualified Source.Source as Source
import qualified System.FilePath as FP
import qualified System.Path as Path
import qualified System.Path.PartClass as Path.PartClass
-- | A 'FilePath' paired with its corresponding 'Language'.
-- Unpacked to have the same size overhead as (FilePath, Language).
data File = File
{ filePath :: FilePath
, fileLanguage :: Language
} deriving (Show, Eq)
-- | Prefer 'fileForTypedPath' if at all possible.
fileForPath :: FilePath -> File
fileForPath p = File p (languageForFilePath p)
fileForTypedPath :: Path.PartClass.AbsRel ar => Path.File ar -> File
fileForTypedPath = fileForPath . Path.toString
-- | The source, path information, and language of a file read from disk.
data Blob = Blob
{ blobSource :: Source -- ^ The UTF-8 encoded source text of the blob.
, blobFile :: File -- ^ Path/language information for this blob.
, blobOid :: Text -- ^ Git OID for this blob, mempty if blob is not from a git db.
{ blobSource :: Source -- ^ The UTF-8 encoded source text of the blob.
, blobFile :: File Language -- ^ Path/language information for this blob.
} deriving (Show, Eq)
blobLanguage :: Blob -> Language
blobLanguage = fileLanguage . blobFile
blobLanguage = Analysis.File.fileBody . blobFile
blobPath :: Blob -> FilePath
blobPath = filePath . blobFile
makeBlob :: Source -> FilePath -> Language -> Text -> Blob
makeBlob s p l = Blob s (File p l)
{-# INLINE makeBlob #-}
blobPath = Path.toString . Analysis.File.filePath . blobFile
newtype Blobs a = Blobs { blobs :: [a] }
deriving (Generic, FromJSON)
instance FromJSON Blob where
parseJSON = withObject "Blob" $ \b -> inferringLanguage
<$> b .: "content"
<*> b .: "path"
<*> b .: "language"
parseJSON = withObject "Blob" $ \b -> do
src <- b .: "content"
Right pth <- fmap Path.parse (b .: "path")
lang <- b .: "language"
let lang' = if knownLanguage lang then lang else Language.forPath pth
pure (fromSource (pth :: Path.AbsRelFile) lang' src)
nullBlob :: Blob -> Bool
nullBlob Blob{..} = Source.null blobSource
sourceBlob :: FilePath -> Language -> Source -> Blob
sourceBlob filepath language source = makeBlob source filepath language mempty
inferringLanguage :: Source -> FilePath -> Language -> Blob
inferringLanguage src pth lang
| knownLanguage lang = makeBlob src pth lang mempty
| otherwise = makeBlob src pth (languageForFilePath pth) mempty
-- | Create a Blob from a provided path, language, and UTF-8 source.
-- The resulting Blob's span is taken from the 'totalSpan' of the source.
fromSource :: Path.PartClass.AbsRel ar => Path.File ar -> Language -> Source -> Blob
fromSource filepath language source
= Blob source (Analysis.File.File (Path.toAbsRel filepath) (totalSpan source) language)
decodeBlobs :: BL.ByteString -> Either String [Blob]
decodeBlobs = fmap blobs <$> eitherDecode
@ -116,7 +94,7 @@ moduleForBlob :: Maybe FilePath -- ^ The root directory relative to which the mo
-> Module term -- ^ A 'Module' named appropriate for the 'Blob', holding the @term@, and constructed relative to the root 'FilePath', if any.
moduleForBlob rootDir b = Module info
where root = fromMaybe (FP.takeDirectory (blobPath b)) rootDir
info = ModuleInfo (FP.makeRelative root (blobPath b)) (languageToText (blobLanguage b)) (blobOid b)
info = ModuleInfo (FP.makeRelative root (blobPath b)) (languageToText (blobLanguage b)) mempty
-- | Represents a blobs suitable for diffing which can be either a blob to
-- delete, a blob to insert, or a pair of blobs to diff.

View File

@ -1,14 +1,17 @@
{-# LANGUAGE ViewPatterns #-}
-- | These are primitive file IO methods for use in ghci and as internal functions.
-- Instead of using these, consider if you can use the Files DSL instead.
module Data.Blob.IO
( readBlobFromFile
, readBlobFromFile'
, readBlobFromPath
, readBlobsFromDir
, readFilePair
) where
import Prologue
import Analysis.File as File
import qualified Control.Concurrent.Async as Async
import Data.Blob
import qualified Data.ByteString as B
@ -18,24 +21,29 @@ import qualified Source.Source as Source
import qualified System.Path as Path
-- | Read a utf8-encoded file to a 'Blob'.
readBlobFromFile :: MonadIO m => File -> m (Maybe Blob)
readBlobFromFile (File "/dev/null" _) = pure Nothing
readBlobFromFile (File path language) = do
raw <- liftIO $ B.readFile path
pure . Just . sourceBlob path language . Source.fromUTF8 $ raw
readBlobFromFile :: MonadIO m => File Language -> m (Maybe Blob)
readBlobFromFile (File (Path.toString -> "/dev/null") _ _) = pure Nothing
readBlobFromFile file@(File path _ _language) = do
raw <- liftIO $ B.readFile (Path.toString path)
let newblob = Blob (Source.fromUTF8 raw) file
pure . Just $ newblob
-- | Read a utf8-encoded file to a 'Blob', raising an IOError if it can't be found.
readBlobFromFile' :: (MonadFail m, MonadIO m) => File -> m Blob
-- | Read a utf8-encoded file to a 'Blob', failing if it can't be found.
readBlobFromFile' :: (MonadFail m, MonadIO m) => File Language -> m Blob
readBlobFromFile' file = do
maybeFile <- readBlobFromFile file
maybeM (fail ("cannot read '" <> show file <> "', file not found or language not supported.")) maybeFile
-- | Read a blob from the provided absolute or relative path , failing if it can't be found.
readBlobFromPath :: (MonadFail m, MonadIO m) => Path.AbsRelFile -> m Blob
readBlobFromPath = readBlobFromFile' . File.fromPath
-- | Read all blobs in the directory with Language.supportedExts.
readBlobsFromDir :: MonadIO m => Path.AbsRelDir -> m [Blob]
readBlobsFromDir path = liftIO . fmap catMaybes $
findFilesInDir path supportedExts mempty >>= Async.mapConcurrently (readBlobFromFile . fileForTypedPath)
findFilesInDir path supportedExts mempty >>= Async.mapConcurrently (readBlobFromFile . File.fromPath)
readFilePair :: MonadIO m => File -> File -> m BlobPair
readFilePair :: MonadIO m => File Language -> File Language -> m BlobPair
readFilePair a b = do
before <- readBlobFromFile a
after <- readBlobFromFile b

View File

@ -24,9 +24,9 @@ module Data.Graph.ControlFlowVertex
, VertexDeclaration1 (..)
) where
import Analysis.Name
import Data.Abstract.Declarations
import Data.Abstract.Module (ModuleInfo (..))
import Data.Abstract.Name
import Data.Abstract.Package (PackageInfo (..))
import Data.Aeson
import Data.Graph (VertexTag (..))

View File

@ -2,7 +2,7 @@
{-# LANGUAGE DeriveGeneric #-}
module Data.ImportPath (IsRelative(..), ImportPath(..), importPath, toName, defaultAlias) where
import Data.Abstract.Name
import Analysis.Name
import Data.Abstract.Path (stripQuotes)
import Data.Aeson
import Data.Hashable

View File

@ -1,113 +1,19 @@
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE OverloadedStrings #-}
module Data.Language
( Language (..)
, SLanguage (..)
, extensionsForLanguage
, knownLanguage
, languageForFilePath
, pathIsMinified
, supportedExts
, codeNavLanguages
, textToLanguage
, languageToText
( module Source.Language
, LanguageMode(..)
, PerLanguageModes(..)
, defaultLanguageModes
, LanguageMode(..)
, codeNavLanguages
, supportedExts
) where
import Data.Aeson
import Data.Hashable
import qualified Data.Languages as Lingo
import qualified Data.Map.Strict as Map
import qualified Data.Text as T
import GHC.Generics (Generic)
import System.FilePath.Posix
import Source.Language
-- | The various languages we support.
-- Please do not reorder any of the field names: the current implementation of 'Primitive'
-- delegates to the auto-generated 'Enum' instance.
data Language
= Unknown
| Go
| Haskell
| Java
| JavaScript
| JSON
| JSX
| Markdown
| Python
| Ruby
| TypeScript
| PHP
| TSX
deriving (Eq, Generic, Ord, Read, Show, Bounded, Hashable, ToJSON, Enum)
class SLanguage (lang :: Language) where
reflect :: proxy lang -> Language
instance SLanguage 'Unknown where
reflect _ = Unknown
instance SLanguage 'Go where
reflect _ = Go
instance SLanguage 'Haskell where
reflect _ = Haskell
instance SLanguage 'Java where
reflect _ = Java
instance SLanguage 'JavaScript where
reflect _ = JavaScript
instance SLanguage 'JSON where
reflect _ = JSON
instance SLanguage 'JSX where
reflect _ = JSX
instance SLanguage 'Markdown where
reflect _ = Markdown
instance SLanguage 'Python where
reflect _ = Python
instance SLanguage 'Ruby where
reflect _ = Ruby
instance SLanguage 'TypeScript where
reflect _ = TypeScript
instance SLanguage 'PHP where
reflect _ = PHP
instance FromJSON Language where
parseJSON = withText "Language" $ \l ->
pure $ textToLanguage l
-- | Predicate failing on 'Unknown' and passing in all other cases.
knownLanguage :: Language -> Bool
knownLanguage = (/= Unknown)
extensionsForLanguage :: Language -> [String]
extensionsForLanguage language = T.unpack <$> maybe mempty Lingo.languageExtensions (Map.lookup (languageToText language) Lingo.languages)
-- | Return a language based on a FilePath's extension.
languageForFilePath :: FilePath -> Language
languageForFilePath path =
let spurious lang = lang `elem` [ "Hack" -- .php files
, "GCC Machine Description" -- .md files
, "XML" -- .tsx files
]
allResults = Lingo.languageName <$> Lingo.languagesForPath path
in case filter (not . spurious) allResults of
[result] -> textToLanguage result
_ -> Unknown
codeNavLanguages :: [Language]
codeNavLanguages = [Go, Java, Ruby, Python, JavaScript, TypeScript, PHP]
supportedExts :: [String]
supportedExts = foldr append mempty supportedLanguages
@ -117,44 +23,6 @@ supportedExts = foldr append mempty supportedLanguages
supportedLanguages = fmap lookup (languageToText <$> codeNavLanguages)
lookup k = Map.lookup k Lingo.languages
codeNavLanguages :: [Language]
codeNavLanguages = [Go, Java, Ruby, Python, JavaScript, TypeScript, PHP]
pathIsMinified :: FilePath -> Bool
pathIsMinified = isExtensionOf ".min.js"
languageToText :: Language -> T.Text
languageToText = \case
Unknown -> "Unknown"
Go -> "Go"
Haskell -> "Haskell"
Java -> "Java"
JavaScript -> "JavaScript"
JSON -> "JSON"
JSX -> "JSX"
Markdown -> "Markdown"
Python -> "Python"
Ruby -> "Ruby"
TypeScript -> "TypeScript"
TSX -> "TSX"
PHP -> "PHP"
textToLanguage :: T.Text -> Language
textToLanguage = \case
"Go" -> Go
"Haskell" -> Haskell
"Java" -> Java
"JavaScript" -> JavaScript
"JSON" -> JSON
"JSX" -> JSX
"Markdown" -> Markdown
"Python" -> Python
"Ruby" -> Ruby
"TypeScript" -> TypeScript
"TSX" -> TSX
"PHP" -> PHP
_ -> Unknown
data PerLanguageModes = PerLanguageModes
{ pythonMode :: LanguageMode

View File

@ -8,6 +8,7 @@ module Data.Project
import Prelude hiding (readFile)
import Analysis.File
import Control.Monad.IO.Class
import Data.Blob
import Data.Blob.IO
@ -33,7 +34,7 @@ projectName = T.pack . dropExtensions . takeFileName . projectRootDir
projectExtensions :: Project -> [String]
projectExtensions = extensionsForLanguage . projectLanguage
projectFiles :: Project -> [File]
projectFiles :: Project -> [File Language]
projectFiles = fmap blobFile . projectBlobs
readProjectFromPaths :: MonadIO m
@ -57,5 +58,5 @@ readProjectFromPaths maybeRoot path lang excludeDirs = do
blobs <- liftIO $ traverse (readBlobFromFile' . toFile) paths
pure $ Project (Path.toString rootDir) blobs lang (fmap Path.toString excludeDirs)
where
toFile path = File (Path.toString path) lang
toFile path = File path lowerBound lang
exts = extensionsForLanguage lang

View File

@ -4,9 +4,9 @@ module Data.Syntax.Expression (module Data.Syntax.Expression) where
import Prelude hiding (null)
import Prologue hiding (index, null)
import Analysis.Name as Name
import Control.Abstract hiding (Bitwise (..), Call)
import Data.Abstract.Evaluatable as Abstract
import Data.Abstract.Name as Name
import Data.Abstract.Number (liftIntegralFrac, liftReal, liftedExponent, liftedFloorDiv)
import Data.Fixed
import Data.JSON.Fields

View File

@ -10,10 +10,10 @@ module Language.Go.Assignment
, Go.Term(..)
) where
import Analysis.Name (name)
import Assigning.Assignment hiding (Assignment, Error)
import qualified Assigning.Assignment as Assignment
import Control.Monad
import Data.Abstract.Name (name)
import qualified Data.Abstract.ScopeGraph as ScopeGraph (AccessControl (..))
import Data.ImportPath (defaultAlias, importPath)
import Data.List.NonEmpty (NonEmpty (..), some1)

View File

@ -10,9 +10,9 @@ module Language.PHP.Assignment
, PHP.Term(..)
) where
import qualified Analysis.Name as Name
import Assigning.Assignment hiding (Assignment, Error)
import qualified Assigning.Assignment as Assignment
import qualified Data.Abstract.Name as Name
import qualified Data.Abstract.ScopeGraph as ScopeGraph (AccessControl (..))
import Data.Foldable
import Data.List.NonEmpty (NonEmpty (..), some1)

View File

@ -10,10 +10,10 @@ module Language.Python.Assignment
, Python.Term(..)
) where
import Analysis.Name (name)
import Assigning.Assignment hiding (Assignment, Error)
import qualified Assigning.Assignment as Assignment
import Control.Monad
import Data.Abstract.Name (name)
import Data.Functor
import Data.List.NonEmpty (some1)
import qualified Data.List.NonEmpty as NonEmpty

View File

@ -11,10 +11,10 @@ module Language.Ruby.Assignment
, Ruby.Term(..)
) where
import Analysis.Name (name)
import Assigning.Assignment hiding (Assignment, Error)
import qualified Assigning.Assignment as Assignment
import Control.Monad hiding (unless)
import Data.Abstract.Name (name)
import qualified Data.Abstract.ScopeGraph as ScopeGraph (AccessControl (..))
import Data.List.NonEmpty (some1)
import qualified Data.List.NonEmpty as NonEmpty

View File

@ -9,12 +9,12 @@
{-# LANGUAGE TypeApplications #-}
module Language.Ruby.Syntax (module Language.Ruby.Syntax) where
import Analysis.Name as Name
import Control.Abstract as Abstract hiding (Load, String)
import Control.Monad
import Data.Abstract.BaseError
import Data.Abstract.Evaluatable
import qualified Data.Abstract.Module as M
import Data.Abstract.Name as Name
import Data.Abstract.Path
import qualified Data.Abstract.ScopeGraph as ScopeGraph
import Data.Functor.Classes

View File

@ -11,10 +11,10 @@ module Language.TSX.Assignment
, TSX.Term(..)
) where
import Analysis.Name (name)
import Assigning.Assignment hiding (Assignment, Error)
import qualified Assigning.Assignment as Assignment
import Control.Monad
import Data.Abstract.Name (name)
import qualified Data.Abstract.ScopeGraph as ScopeGraph (AccessControl (..))
import Data.Foldable
import Data.Function

View File

@ -11,10 +11,10 @@ module Language.TypeScript.Assignment
, TypeScript.Term(..)
) where
import Analysis.Name (name)
import Assigning.Assignment hiding (Assignment, Error)
import qualified Assigning.Assignment as Assignment
import Control.Monad
import Data.Abstract.Name (name)
import qualified Data.Abstract.ScopeGraph as ScopeGraph (AccessControl (..))
import Data.Foldable
import Data.Function

View File

@ -3,9 +3,9 @@ module Language.TypeScript.Syntax.Import (module Language.TypeScript.Syntax.Impo
import Prologue
import qualified Analysis.Name as Name
import Control.Abstract hiding (Import)
import Data.Abstract.Evaluatable as Evaluatable
import qualified Data.Abstract.Name as Name
import qualified Data.Abstract.ScopeGraph as ScopeGraph
import Data.JSON.Fields
import Diffing.Algorithm

View File

@ -19,7 +19,6 @@ import Control.Monad
import Control.Monad.IO.Class
import Data.Functor.Foldable
import Foreign
import Foreign.C.Types (CBool (..))
import GHC.Generics
import Data.AST (AST, Node (Node))
@ -94,7 +93,6 @@ runParse parseTimeout language Blob{..} action =
liftIO . Exc.tryJust fromException . TS.withParser language $ \ parser -> do
let timeoutMicros = fromIntegral $ toMicroseconds parseTimeout
TS.ts_parser_set_timeout_micros parser timeoutMicros
TS.ts_parser_halt_on_error parser (CBool 1)
compatible <- TS.ts_parser_set_language parser language
if compatible then
TS.withParseTree parser (Source.bytes blobSource) $ \ treePtr -> do

View File

@ -1,21 +1,28 @@
{-# LANGUAGE FlexibleContexts, FlexibleInstances, FunctionalDependencies, RecordWildCards #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE RecordWildCards #-}
module Semantic.Api.Bridge
( APIBridge (..)
, APIConvert (..)
, (#?)
) where
import Analysis.File
import Control.Lens
import qualified Data.Blob as Data
import qualified Data.Edit as Data
import Data.Either
import qualified Data.Language as Data
import Data.ProtoLens (defMessage)
import qualified Data.Text as T
import qualified Semantic.Api.LegacyTypes as Legacy
import Data.Text.Lens
import qualified Proto.Semantic as API
import Proto.Semantic_Fields as P
import Source.Source (fromText, toText)
import qualified Semantic.Api.LegacyTypes as Legacy
import qualified Source.Source as Source (fromText, toText, totalSpan)
import qualified Source.Span as Source
import qualified System.Path as Path
-- | An @APIBridge x y@ instance describes an isomorphism between @x@ and @y@.
-- This is suitable for types such as 'Pos' which are representationally equivalent
@ -71,8 +78,18 @@ instance APIBridge T.Text Data.Language where
instance APIBridge API.Blob Data.Blob where
bridging = iso apiBlobToBlob blobToApiBlob where
blobToApiBlob b = defMessage & P.content .~ toText (Data.blobSource b) & P.path .~ T.pack (Data.blobPath b) & P.language .~ (bridging # Data.blobLanguage b)
apiBlobToBlob blob = Data.makeBlob (fromText (blob^.content)) (T.unpack (blob^.path)) (blob^.(language . bridging)) mempty
blobToApiBlob b
= defMessage
& P.content .~ Source.toText (Data.blobSource b)
& P.path .~ T.pack (Data.blobPath b)
& P.language .~ (bridging # Data.blobLanguage b)
apiBlobToBlob blob =
let src = blob^.content.to Source.fromText
pth = fromRight (Path.toAbsRel Path.emptyFile) (blob^.path._Text.to Path.parse)
in Data.Blob
{ blobSource = src
, blobFile = File pth (Source.totalSpan src) (blob^.language.bridging)
}
instance APIConvert API.BlobPair Data.BlobPair where

View File

@ -15,7 +15,7 @@ module Semantic.Api.LegacyTypes
) where
import Data.Aeson
import Data.Blob hiding (File (..))
import Data.Blob
import Data.Text (Text)
import GHC.Generics (Generic)

View File

@ -1,4 +1,14 @@
{-# LANGUAGE AllowAmbiguousTypes, DataKinds, FlexibleContexts, FlexibleInstances, MultiParamTypeClasses, OverloadedStrings, RecordWildCards, ScopedTypeVariables, TypeApplications, TypeFamilies, UndecidableInstances #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
module Semantic.Api.Symbols
( legacyParseSymbols
, parseSymbols
@ -12,11 +22,11 @@ import Control.Effect.Reader
import Control.Exception
import Control.Lens
import Data.Abstract.Declarations
import Data.Blob hiding (File (..))
import Data.Blob
import Data.ByteString.Builder
import Data.Language
import Data.ProtoLens (defMessage)
import Data.Term (IsTerm(..), TermF)
import Data.Term (IsTerm (..), TermF)
import Data.Text (pack)
import qualified Parsing.Parser as Parser
import Prologue
@ -78,7 +88,6 @@ parseSymbols blobs = do
& P.language .~ (bridging # blobLanguage')
& P.symbols .~ mempty
& P.errors .~ [defMessage & P.error .~ pack e]
& P.blobOid .~ blobOid
tagsToFile :: [Tag] -> File
tagsToFile tags = defMessage
@ -86,7 +95,6 @@ parseSymbols blobs = do
& P.language .~ (bridging # blobLanguage')
& P.symbols .~ fmap tagToSymbol tags
& P.errors .~ mempty
& P.blobOid .~ blobOid
tagToSymbol :: Tag -> Symbol
tagToSymbol Tag{..} = defMessage

View File

@ -2,12 +2,14 @@
{-# LANGUAGE FlexibleContexts #-}
module Semantic.CLI (main) where
import qualified Analysis.File as File
import qualified Control.Carrier.Parse.Measured as Parse
import Control.Carrier.Reader
import Control.Exception
import Control.Monad.IO.Class
import Data.Blob
import Data.Blob.IO
import Data.Either
import qualified Data.Flag as Flag
import Data.Foldable
import Data.Handle
@ -26,7 +28,6 @@ import qualified Semantic.Telemetry.Log as Log
import Semantic.Version
import Serializing.Format hiding (Options)
import System.Exit (die)
import System.FilePath
import qualified System.Path as Path
import qualified System.Path.PartClass as Path.PartClass
@ -154,10 +155,11 @@ graphCommand = command "graph" (info graphArgumentsParser (progDesc "Compute a g
<$> ( Just <$> some (strArgument (metavar "FILES..."))
<|> flag' Nothing (long "stdin" <> help "Read a list of newline-separated paths to analyze from stdin."))
makeReadProjectFromPathsTask maybePaths = do
paths <- maybeM (liftIO (many getLine)) maybePaths
blobs <- traverse readBlobFromFile' (fileForPath <$> paths)
strPaths <- maybeM (liftIO (many getLine)) maybePaths
let paths = rights (Path.parse <$> strPaths)
blobs <- traverse readBlobFromPath paths
case paths of
(x:_) -> pure $! Project (takeDirectory x) blobs (Language.languageForFilePath x) mempty
(x:_) -> pure $! Project (Path.toString (Path.takeDirectory x)) blobs (Language.forPath x) mempty
_ -> pure $! Project "/" mempty Language.Unknown mempty
allLanguages = intercalate "|" . fmap show $ [Language.Go .. maxBound]
@ -186,8 +188,8 @@ languageModes = Language.PerLanguageModes
<> value Language.ALaCarte
<> showDefault)
filePathReader :: ReadM File
filePathReader = fileForPath <$> str
filePathReader :: ReadM (File.File Language.Language)
filePathReader = File.fromPath <$> path
path :: (Path.PartClass.FileDir fd) => ReadM (Path.AbsRel fd)
path = eitherReader Path.parse

View File

@ -43,6 +43,7 @@ import Prelude hiding (readFile)
import Analysis.Abstract.Caching.FlowInsensitive
import Analysis.Abstract.Collecting
import Analysis.Abstract.Graph as Graph
import Analysis.File
import Control.Abstract hiding (String)
import Control.Abstract.PythonPackage as PythonPackage
import Control.Algebra
@ -70,8 +71,12 @@ import Data.Functor.Foldable
import Data.Graph
import Data.Graph.ControlFlowVertex (VertexDeclaration)
import Data.Language as Language
<<<<<<< HEAD
import Data.List (find, isPrefixOf, isSuffixOf)
import Data.Map (Map)
=======
import Data.List (isPrefixOf)
>>>>>>> origin/master
import qualified Data.Map as Map
import Data.Project
import Data.Proxy
@ -84,6 +89,7 @@ import Semantic.Task as Task
import Source.Loc as Loc
import Source.Span
import System.FilePath.Posix (takeDirectory, (</>))
import qualified System.Path as Path
import Text.Show.Pretty (ppShow)
data GraphType = ImportGraph | CallGraph
@ -337,8 +343,9 @@ parsePythonPackage parser project = do
]
PythonPackage.FindPackages excludeDirs -> do
trace "In Graph.FindPackages"
let initFiles = filter (("__init__.py" `isSuffixOf`) . filePath) (projectFiles project)
let packageDirs = filter (`notElem` ((projectRootDir project </>) . unpack <$> excludeDirs)) (takeDirectory . filePath <$> initFiles)
let initFiles = filter (isInit . filePath) (projectFiles project)
isInit = (== Path.relFile "__init__.py") . Path.takeFileName
packageDirs = filter (`notElem` ((projectRootDir project </>) . unpack <$> excludeDirs)) (takeDirectory . Path.toString . filePath <$> initFiles)
packageFromProject project [ blob | dir <- packageDirs
, blob <- projectBlobs project
, dir `isPrefixOf` blobPath blob

View File

@ -11,7 +11,6 @@ module Semantic.IO
import Prelude hiding (readFile)
import Control.Monad.IO.Class
import Data.Language
import System.Directory (doesDirectoryExist)
import System.Directory.Tree (AnchoredDirTree (..))
import qualified System.Directory.Tree as Tree
@ -22,6 +21,9 @@ import qualified System.Path.PartClass as Path.PartClass
isDirectory :: MonadIO m => FilePath -> m Bool
isDirectory path = liftIO (doesDirectoryExist path)
pathIsMinified :: FilePath -> Bool
pathIsMinified = isExtensionOf ".min.js"
-- Recursively find files in a directory.
findFilesInDir :: (Path.PartClass.AbsRel ar, MonadIO m) => Path.Dir ar -> [String] -> [Path.Dir ar] -> m [Path.File ar]
findFilesInDir path exts excludeDirs = do

View File

@ -19,6 +19,7 @@ module Semantic.Resolution
, ResolutionC(..)
) where
import Analysis.File as File
import Control.Algebra
import Control.Monad.IO.Class
import Data.Aeson
@ -38,10 +39,10 @@ import System.FilePath.Posix
import qualified System.Path as Path
nodeJSResolutionMap :: (Has Files sig m, MonadIO m) => FilePath -> Text -> [FilePath] -> m (Map FilePath FilePath)
nodeJSResolutionMap :: Has Files sig m => FilePath -> Text -> [FilePath] -> m (Map FilePath FilePath)
nodeJSResolutionMap rootDir prop excludeDirs = do
files <- findFiles (Path.absRel rootDir) [".json"] (fmap Path.absRel excludeDirs)
let packageFiles = fileForTypedPath <$> filter ((==) (Path.relFile "package.json") . Path.takeFileName) files
let packageFiles = File.fromPath <$> filter ((==) (Path.relFile "package.json") . Path.takeFileName) files
blobs <- readBlobs (FilesFromPaths packageFiles)
pure $ fold (mapMaybe (lookup prop) blobs)
where

View File

@ -27,6 +27,7 @@ module Semantic.Task.Files
, FilesArg(..)
) where
import Analysis.File
import Control.Algebra
import Control.Effect.Error
import Control.Exception
@ -44,10 +45,10 @@ import qualified System.Path as Path
import qualified System.Path.IO as IO (withBinaryFile)
data Source blob where
FromPath :: File -> Source Blob
FromPath :: File Language -> Source Blob
FromHandle :: Handle 'IO.ReadMode -> Source [Blob]
FromDir :: Path.AbsRelDir -> Source [Blob]
FromPathPair :: File -> File -> Source BlobPair
FromPathPair :: File Language -> File Language -> Source BlobPair
FromPairHandle :: Handle 'IO.ReadMode -> Source [BlobPair]
data Destination = ToPath Path.AbsRelFile | ToHandle (Handle 'IO.WriteMode)
@ -93,26 +94,21 @@ instance (Has (Error SomeException) sig m, MonadFail m, MonadIO m) => Algebra (F
Write (ToPath path) builder k -> liftIO (IO.withBinaryFile path IO.WriteMode (`B.hPutBuilder` builder)) >> k
Write (ToHandle (WriteHandle handle)) builder k -> liftIO (B.hPutBuilder handle builder) >> k
readBlob :: Has Files sig m => File -> m Blob
readBlob :: Has Files sig m => File Language -> m Blob
readBlob file = send (Read (FromPath file) pure)
-- Various ways to read in files
data FilesArg
= FilesFromHandle (Handle 'IO.ReadMode)
| FilesFromPaths [File]
| FilesFromPaths [File Language]
-- | A task which reads a list of 'Blob's from a 'Handle' or a list of 'FilePath's optionally paired with 'Language's.
readBlobs :: (Has Files sig m, MonadIO m) => FilesArg -> m [Blob]
readBlobs :: Has Files sig m => FilesArg -> m [Blob]
readBlobs (FilesFromHandle handle) = send (Read (FromHandle handle) pure)
readBlobs (FilesFromPaths [path]) = do
isDir <- isDirectory (filePath path)
if isDir
then send (Read (FromDir (Path.path (filePath path))) pure)
else pure <$> send (Read (FromPath path) pure)
readBlobs (FilesFromPaths paths) = traverse (send . flip Read pure . FromPath) paths
readBlobs (FilesFromPaths paths) = traverse (send . flip Read pure . FromPath) paths
-- | A task which reads a list of pairs of 'Blob's from a 'Handle' or a list of pairs of 'FilePath's optionally paired with 'Language's.
readBlobPairs :: Has Files sig m => Either (Handle 'IO.ReadMode) [(File, File)] -> m [BlobPair]
readBlobPairs :: Has Files sig m => Either (Handle 'IO.ReadMode) [(File Language, File Language)] -> m [BlobPair]
readBlobPairs (Left handle) = send (Read (FromPairHandle handle) pure)
readBlobPairs (Right paths) = traverse (send . flip Read pure . uncurry FromPathPair) paths

View File

@ -4,6 +4,10 @@
{-# LANGUAGE PartialTypeSignatures #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE TypeOperators #-}
<<<<<<< HEAD
=======
{-# LANGUAGE ViewPatterns #-}
>>>>>>> origin/master
{-# OPTIONS_GHC -Wno-missing-signatures -Wno-missing-exported-signatures -Wno-partial-type-signatures -O0 #-}
module Semantic.Util
( evaluateProject'
@ -16,6 +20,7 @@ module Semantic.Util
import Prelude hiding (readFile)
import Analysis.File
import Control.Abstract
import Control.Carrier.Fresh.Strict
import Control.Carrier.Lift
@ -24,7 +29,10 @@ import Control.Carrier.Reader
import Control.Carrier.Resumable.Either (SomeError (..))
import Control.Carrier.State.Strict
import Control.Carrier.Trace.Printing
<<<<<<< HEAD
import Control.Exception hiding (evaluate)
=======
>>>>>>> origin/master
import Control.Lens.Getter
import Control.Monad
import Data.Abstract.Address.Precise as Precise
@ -33,7 +41,6 @@ import Data.Abstract.Module
import qualified Data.Abstract.ModuleTable as ModuleTable
import Data.Abstract.Package
import Data.Abstract.Value.Concrete as Concrete
import Data.Blob
import Data.Blob.IO
import Data.Graph (topologicalSort)
import qualified Data.Language as Language
@ -50,6 +57,7 @@ import Semantic.Task
import Source.Span (HasSpan (..))
import System.Exit (die)
import System.FilePath.Posix (takeDirectory)
import qualified System.Path as Path
justEvaluating :: Evaluator term Precise (Value term Precise) _ result
-> IO ( Heap Precise Precise (Value term Precise),
@ -77,7 +85,7 @@ justEvaluating
evaluateProject' session proxy parser paths = do
let lang = Language.reflect proxy
res <- runTask session $ asks configTreeSitterParseTimeout >>= \ timeout -> runParse timeout $ do
blobs <- catMaybes <$> traverse readBlobFromFile (flip File lang <$> paths)
blobs <- catMaybes <$> traverse readBlobFromFile (fileForPath <$> paths)
package <- fmap snd <$> parsePackage parser (Project (takeDirectory (maybe "/" fst (uncons paths))) blobs lang [])
modules <- topologicalSort <$> runImportGraphToModules proxy package
trace $ "evaluating with load order: " <> show (map (modulePath . moduleInfo) modules)
@ -94,6 +102,9 @@ parseFile, parseFileQuiet :: Parser term -> FilePath -> IO term
parseFile parser = runTask' . (parse parser <=< readBlob . fileForPath)
parseFileQuiet parser = runTaskQuiet . (parse parser <=< readBlob . fileForPath)
fileForPath :: FilePath -> File Language.Language
fileForPath (Path.absRel -> p) = File p lowerBound (Language.forPath p)
runTask', runTaskQuiet :: ParseC TaskC a -> IO a
runTask' task = runTaskWithOptions debugOptions (asks configTreeSitterParseTimeout >>= \ timeout -> runParse timeout task) >>= either (die . displayException) pure
runTaskQuiet task = runTaskWithOptions defaultOptions (asks configTreeSitterParseTimeout >>= \ timeout -> runParse timeout task) >>= either (die . displayException) pure

View File

@ -35,8 +35,8 @@ where
import Analysis.ConstructorName
import Analysis.HasTextElement
import Analysis.Name
import Data.Abstract.Declarations
import Data.Abstract.Name
import Data.Algebra
import Data.Foldable
import Data.Functor.Foldable

View File

@ -3,7 +3,7 @@ module Data.Abstract.Name.Spec (spec) where
import SpecHelpers
spec :: Spec
spec = describe "Data.Abstract.Name" $
spec = describe "Analysis.Name" $
it "should format anonymous names correctly" $ do
show (nameI 0) `shouldBe` "\"_a\""
show (nameI 26) `shouldBe` "\"_aʹ\""

View File

@ -16,6 +16,7 @@ module Data.Functor.Listable
, ListableSyntax
) where
import qualified Analysis.Name as Name
import qualified Analysis.TOCSummary as ToC
import Data.Abstract.ScopeGraph (AccessControl(..))
import Data.Bifunctor.Join
@ -27,7 +28,6 @@ import qualified Data.Syntax as Syntax
import qualified Data.Syntax.Comment as Comment
import qualified Data.Syntax.Declaration as Declaration
import qualified Data.Syntax.Statement as Statement
import qualified Data.Abstract.Name as Name
import Data.Term
import Data.Text as T (Text, pack)
import Data.Sum

View File

@ -1,8 +1,9 @@
module Data.Language.Spec (testTree) where
import Data.Language
import Test.Tasty
import Test.Tasty.HUnit
import Data.Language as Language
import qualified System.Path as Path
import Test.Tasty
import Test.Tasty.HUnit
testTree :: TestTree
testTree = testGroup "Data.Language"
@ -13,7 +14,7 @@ testTree = testGroup "Data.Language"
codeNavLanguages @=? [Go, Java, Ruby, Python, JavaScript, TypeScript, PHP]
, testCase "languageForFilePath works for languages with ambiguous lingo extensions" $ do
languageForFilePath "foo.php" @=? PHP
languageForFilePath "foo.md" @=? Markdown
languageForFilePath "foo.tsx" @=? TSX
Language.forPath (Path.relFile "foo.php") @=? PHP
Language.forPath (Path.relFile "foo.md" ) @=? Markdown
Language.forPath (Path.relFile "foo.tsx") @=? TSX
]

View File

@ -1,13 +1,14 @@
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeApplications #-}
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeApplications #-}
{-# OPTIONS_GHC -O1 #-}
{-# OPTIONS_GHC -Wno-unused-top-binds -Wno-unused-imports #-}
module Main (main) where
import qualified Analysis.File as File
import Control.Carrier.Parse.Measured
import Control.Carrier.Reader
import Control.Concurrent.Async (forConcurrently)
@ -16,9 +17,9 @@ import Control.Lens
import Control.Monad
import Data.Blob
import Data.Foldable
import Data.Int
import Data.Language (LanguageMode (..), PerLanguageModes (..))
import Data.List
import Data.Int
import qualified Data.Text as Text
import Data.Traversable
import System.FilePath.Glob
@ -174,7 +175,7 @@ buildExamples session lang tsDir = do
assertOK msg = either (\e -> HUnit.assertFailure (msg <> " failed to parse" <> show e)) (refuteErrors msg)
refuteErrors msg a = case toList (a^.files) of
[x] | (e:_) <- toList (x^.errors) -> HUnit.assertFailure (msg <> " parse errors " <> show e)
_ -> pure ()
_ -> pure ()
assertMatch a b = case (a, b) of
(Right a, Right b) -> case (toList (a^.files), toList (b^.files)) of
@ -307,4 +308,4 @@ parseSymbolsFilePath ::
=> PerLanguageModes
-> Path.RelFile
-> m ParseTreeSymbolResponse
parseSymbolsFilePath languageModes path = readBlob (fileForTypedPath path) >>= runReader languageModes . parseSymbols . pure @[]
parseSymbolsFilePath languageModes path = readBlob (File.fromPath path) >>= runReader languageModes . parseSymbols . pure @[]

View File

@ -1,4 +1,8 @@
{-# LANGUAGE DataKinds, GADTs, OverloadedStrings, PackageImports, TypeApplications #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE PackageImports #-}
{-# LANGUAGE TypeApplications #-}
module Graphing.Calls.Spec ( spec ) where
@ -7,6 +11,7 @@ import SpecHelpers
import Algebra.Graph
import qualified Analysis.File as File
import Control.Effect.Parse
import "semantic" Data.Graph (Graph (..), topologicalSort)
import Data.Graph.ControlFlowVertex
@ -19,7 +24,7 @@ callGraphPythonProject path = runTaskOrDie $ do
let proxy = Proxy @'Language.Python
lang = Language.Python
SomeParser parser <- pure . fromJust $! parserForLanguage analysisParsers Language.Python
blob <- readBlobFromFile' (fileForTypedPath path)
blob <- readBlobFromFile' (File.fromPath path)
package <- fmap snd <$> parsePackage parser (Project (Path.toString (Path.takeDirectory path)) [blob] lang [])
modules <- topologicalSort <$> runImportGraphToModules proxy package
runCallGraph proxy False modules package

View File

@ -1,21 +1,22 @@
{-# LANGUAGE TypeApplications #-}
module Parsing.Spec (spec) where
import Data.Blob
import Data.ByteString.Char8 (pack)
import Data.Duration
import Data.Either
import Data.Language
import Parsing.TreeSitter
import Source.Source
import SpecHelpers
import TreeSitter.JSON (Grammar, tree_sitter_json)
import Data.Blob
import Data.ByteString.Char8 (pack)
import Data.Duration
import Data.Either
import Data.Language
import Parsing.TreeSitter
import Source.Source
import SpecHelpers
import qualified System.Path as Path
import TreeSitter.JSON (Grammar, tree_sitter_json)
spec :: Spec
spec = do
describe "parseToAST" $ do
let source = toJSONSource [1 :: Int .. 10000]
let largeBlob = sourceBlob "large.json" JSON source
let largeBlob = fromSource (Path.relFile "large.json") JSON source
it "returns a result when the timeout does not expire" $ do
let timeout = fromMicroseconds 0 -- Zero microseconds indicates no timeout

View File

@ -141,7 +141,7 @@ spec = do
it "produces JSON output if there are parse errors" $ do
blobs <- blobsForPaths (Path.relFile "ruby/toc/methods.A.rb") (Path.relFile "ruby/toc/methods.X.rb")
output <- runTaskOrDie (runReader defaultLanguageModes (diffSummaryBuilder Format.JSON [blobs]))
runBuilder output `shouldBe` ("{\"files\":[{\"path\":\"test/fixtures/ruby/toc/methods.A.rb -> test/fixtures/ruby/toc/methods.X.rb\",\"language\":\"Ruby\",\"changes\":[{\"category\":\"Method\",\"term\":\"bar\",\"span\":{\"start\":{\"line\":1,\"column\":1},\"end\":{\"line\":2,\"column\":4}},\"changeType\":\"REMOVED\"},{\"category\":\"Method\",\"term\":\"baz\",\"span\":{\"start\":{\"line\":4,\"column\":1},\"end\":{\"line\":5,\"column\":4}},\"changeType\":\"REMOVED\"}],\"errors\":[{\"error\":\"expected end of input nodes, but got ParseError\",\"span\":{\"start\":{\"line\":1,\"column\":1},\"end\":{\"line\":2,\"column\":3}}}]}]}\n" :: ByteString)
runBuilder output `shouldBe` ("{\"files\":[{\"path\":\"test/fixtures/ruby/toc/methods.A.rb -> test/fixtures/ruby/toc/methods.X.rb\",\"language\":\"Ruby\",\"changes\":[{\"category\":\"Method\",\"term\":\"bar\",\"span\":{\"start\":{\"line\":1,\"column\":1},\"end\":{\"line\":2,\"column\":4}},\"changeType\":\"REMOVED\"},{\"category\":\"Method\",\"term\":\"baz\",\"span\":{\"start\":{\"line\":4,\"column\":1},\"end\":{\"line\":5,\"column\":4}},\"changeType\":\"REMOVED\"}],\"errors\":[{\"error\":\"expected end of input nodes, but got ParseError\",\"span\":{\"start\":{\"line\":1,\"column\":1},\"end\":{\"line\":3,\"column\":1}}}]}]}\n" :: ByteString)
it "ignores anonymous functions" $ do
blobs <- blobsForPaths (Path.relFile "ruby/toc/lambda.A.rb") (Path.relFile "ruby/toc/lambda.B.rb")

View File

@ -1,14 +1,16 @@
module Semantic.CLI.Spec (testTree) where
import Analysis.File
import Control.Carrier.Parse.Simple
import Control.Carrier.Reader
import Data.ByteString.Builder
import Data.Language
import Semantic.Api hiding (Blob, BlobPair, File)
import Semantic.Task
import Serializing.Format
import System.IO.Unsafe
import qualified System.Path as Path
import System.Path ((</>))
import qualified System.Path as Path
import qualified System.Path.Directory as Path
import SpecHelpers
@ -34,7 +36,7 @@ renderDiff ref new = unsafePerformIO $ do
else ["git", "diff", ref, new]
{-# NOINLINE renderDiff #-}
testForDiffFixture :: (String, [BlobPair] -> ParseC TaskC Builder, [(File, File)], Path.RelFile) -> TestTree
testForDiffFixture :: (String, [BlobPair] -> ParseC TaskC Builder, [(File Language, File Language)], Path.RelFile) -> TestTree
testForDiffFixture (diffRenderer, runDiff, files, expected) =
goldenVsStringDiff
("diff fixture renders to " <> diffRenderer <> " " <> show files)
@ -42,7 +44,7 @@ testForDiffFixture (diffRenderer, runDiff, files, expected) =
(Path.toString expected)
(fmap toLazyByteString . runTaskOrDie $ readBlobPairs (Right files) >>= runDiff)
testForParseFixture :: (String, [Blob] -> ParseC TaskC Builder, [File], Path.RelFile) -> TestTree
testForParseFixture :: (String, [Blob] -> ParseC TaskC Builder, [File Language], Path.RelFile) -> TestTree
testForParseFixture (format, runParse, files, expected) =
goldenVsStringDiff
("diff fixture renders to " <> format)
@ -50,7 +52,7 @@ testForParseFixture (format, runParse, files, expected) =
(Path.toString expected)
(fmap toLazyByteString . runTaskOrDie $ readBlobs (FilesFromPaths files) >>= runParse)
parseFixtures :: [(String, [Blob] -> ParseC TaskC Builder, [File], Path.RelFile)]
parseFixtures :: [(String, [Blob] -> ParseC TaskC Builder, [File Language], Path.RelFile)]
parseFixtures =
[ ("s-expression", run . parseTermBuilder TermSExpression, path, Path.relFile "test/fixtures/ruby/corpus/and-or.parseA.txt")
, ("json", run . parseTermBuilder TermJSONTree, path, prefix </> Path.file "parse-tree.json")
@ -59,18 +61,18 @@ parseFixtures =
, ("symbols", run . parseSymbolsBuilder Serializing.Format.JSON, path'', prefix </> Path.file "parse-tree.symbols.json")
, ("protobuf symbols", run . parseSymbolsBuilder Serializing.Format.Proto, path'', prefix </> Path.file "parse-tree.symbols.protobuf.bin")
]
where path = [File "test/fixtures/ruby/corpus/and-or.A.rb" Ruby]
path' = [File "test/fixtures/ruby/corpus/and-or.A.rb" Ruby, File "test/fixtures/ruby/corpus/and-or.B.rb" Ruby]
path'' = [File "test/fixtures/ruby/corpus/method-declaration.A.rb" Ruby]
where path = [File (Path.absRel "test/fixtures/ruby/corpus/and-or.A.rb") lowerBound Ruby]
path' = [File (Path.absRel "test/fixtures/ruby/corpus/and-or.A.rb") lowerBound Ruby, File (Path.absRel"test/fixtures/ruby/corpus/and-or.B.rb") lowerBound Ruby]
path'' = [File (Path.absRel "test/fixtures/ruby/corpus/method-declaration.A.rb") lowerBound Ruby]
prefix = Path.relDir "test/fixtures/cli"
run = runReader defaultLanguageModes
diffFixtures :: [(String, [BlobPair] -> ParseC TaskC Builder, [(File, File)], Path.RelFile)]
diffFixtures :: [(String, [BlobPair] -> ParseC TaskC Builder, [(File Language, File Language)], Path.RelFile)]
diffFixtures =
[ ("json diff", parseDiffBuilder DiffJSONTree, pathMode, prefix </> Path.file "diff-tree.json")
, ("s-expression diff", parseDiffBuilder DiffSExpression, pathMode, Path.relFile "test/fixtures/ruby/corpus/method-declaration.diffA-B.txt")
, ("toc summaries diff", runReader defaultLanguageModes . diffSummaryBuilder Serializing.Format.JSON, pathMode, prefix </> Path.file "diff-tree.toc.json")
, ("protobuf diff", runReader defaultLanguageModes . diffSummaryBuilder Serializing.Format.Proto, pathMode, prefix </> Path.file "diff-tree.toc.protobuf.bin")
]
where pathMode = [(File "test/fixtures/ruby/corpus/method-declaration.A.rb" Ruby, File "test/fixtures/ruby/corpus/method-declaration.B.rb" Ruby)]
where pathMode = [(File (Path.absRel "test/fixtures/ruby/corpus/method-declaration.A.rb") lowerBound Ruby, File (Path.absRel "test/fixtures/ruby/corpus/method-declaration.B.rb") lowerBound Ruby)]
prefix = Path.relDir "test/fixtures/cli"

View File

@ -4,23 +4,25 @@ module Semantic.IO.Spec (spec) where
import Prelude hiding (readFile)
import Data.Blob
import Analysis.File as File
import Data.Blob as Blob
import Data.Handle
import SpecHelpers
import qualified System.Path as Path
spec :: Spec
spec = do
describe "readFile" $ do
it "returns a blob for extant files" $ do
Just blob <- readBlobFromFile (File "semantic.cabal" Unknown)
Just blob <- readBlobFromFile (File (Path.absRel "semantic.cabal") lowerBound Unknown)
blobPath blob `shouldBe` "semantic.cabal"
it "throws for absent files" $ do
readBlobFromFile (File "this file should not exist" Unknown) `shouldThrow` anyIOException
readBlobFromFile (File (Path.absRel "/dev/doesnotexist") lowerBound Unknown) `shouldThrow` anyIOException
describe "readBlobPairsFromHandle" $ do
let a = sourceBlob "method.rb" Ruby "def foo; end"
let b = sourceBlob "method.rb" Ruby "def bar(x); end"
let a = Blob.fromSource (Path.relFile "method.rb") Ruby "def foo; end"
let b = Blob.fromSource (Path.relFile "method.rb") Ruby "def bar(x); end"
it "returns blobs for valid JSON encoded diff input" $ do
blobs <- blobsFromFilePath "test/fixtures/cli/diff.json"
blobs `shouldBe` [Compare a b]
@ -45,7 +47,7 @@ spec = do
it "returns blobs for unsupported language" $ do
h <- openFileForReading "test/fixtures/cli/diff-unsupported-language.json"
blobs <- readBlobPairsFromHandle h
let b' = sourceBlob "test.kt" Unknown "fun main(args: Array<String>) {\nprintln(\"hi\")\n}\n"
let b' = Blob.fromSource (Path.relFile "test.kt") Unknown "fun main(args: Array<String>) {\nprintln(\"hi\")\n}\n"
blobs `shouldBe` [Insert b']
it "detects language based on filepath for empty language" $ do
@ -68,7 +70,7 @@ spec = do
it "returns blobs for valid JSON encoded parse input" $ do
h <- openFileForReading "test/fixtures/cli/parse.json"
blobs <- readBlobsFromHandle h
let a = sourceBlob "method.rb" Ruby "def foo; end"
let a = Blob.fromSource (Path.relFile "method.rb") Ruby "def foo; end"
blobs `shouldBe` [a]
it "throws on blank input" $ do

View File

@ -1,20 +1,23 @@
{-# LANGUAGE OverloadedStrings #-}
module Semantic.Spec (spec) where
import Control.Carrier.Reader
import Control.Exception (fromException)
import SpecHelpers
import Analysis.File
import Control.Carrier.Reader
import Control.Exception (fromException)
import qualified Data.Blob as Blob
import SpecHelpers
import qualified System.Path as Path
import Semantic.Api hiding (Blob)
-- we need some lenses here, oof
setBlobLanguage :: Language -> Blob -> Blob
setBlobLanguage lang b = b { blobFile = (blobFile b) { fileLanguage = lang }}
setBlobLanguage lang b = b { blobFile = (blobFile b) { fileBody = lang }}
spec :: Spec
spec = do
describe "parseBlob" $ do
let methodsBlob = makeBlob "def foo\nend\n" "methods.rb" Ruby mempty
let methodsBlob = Blob.fromSource (Path.relFile "methods.rb") Ruby "def foo\nend\n"
it "returns error if given an unknown language (json)" $ do
output <- fmap runBuilder . runTaskOrDie . runReader defaultLanguageModes $ parseTermBuilder TermJSONTree [ setBlobLanguage Unknown methodsBlob ]
@ -23,8 +26,8 @@ spec = do
it "throws if given an unknown language for sexpression output" $ do
res <- runTaskWithOptions defaultOptions (runReader defaultLanguageModes (runParseWithConfig (parseTermBuilder TermSExpression [setBlobLanguage Unknown methodsBlob])))
case res of
Left exc -> fromException exc `shouldBe` Just (NoLanguageForBlob "methods.rb")
Right _bad -> fail "Expected parseTermBuilder to fail for an unknown language"
Left exc -> fromException exc `shouldBe` Just (NoLanguageForBlob "methods.rb")
Right _bad -> fail "Expected parseTermBuilder to fail for an unknown language"
it "renders with the specified renderer" $ do
output <- fmap runBuilder . runTaskOrDie . runReader defaultLanguageModes $ parseTermBuilder TermSExpression [methodsBlob]

View File

@ -1,4 +1,5 @@
{-# LANGUAGE DataKinds, FlexibleContexts #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module SpecHelpers
@ -23,72 +24,67 @@ module SpecHelpers
, evaluateProject
) where
import Control.Abstract
import Control.Carrier.Fresh.Strict
import Control.Carrier.Parse.Simple
import Control.Carrier.Reader as X
import qualified Analysis.File as File
import Analysis.Name as X
import Control.Abstract
import Control.Carrier.Fresh.Strict
import Control.Carrier.Lift
import Control.Carrier.Parse.Simple
import Control.Carrier.Reader as X
import Control.Carrier.Resumable.Either
import Control.Carrier.State.Strict
import qualified Control.Carrier.Trace.Ignoring as Trace.Ignoring
import Control.Carrier.Resumable.Either
import Control.Carrier.Lift
import Control.Carrier.State.Strict
import Control.Exception (displayException)
import Control.Monad as X
import Data.Abstract.Address.Precise as X
import Data.Abstract.Evaluatable
import Data.Abstract.FreeVariables as X
import Control.Exception (displayException)
import Control.Monad as X
import Data.Abstract.Address.Precise as X
import Data.Abstract.Evaluatable
import Data.Abstract.FreeVariables as X
import qualified Data.Abstract.Heap as Heap
import Data.Abstract.Module as X
import Data.Abstract.ModuleTable as X hiding (lookup)
import Data.Abstract.Name as X
import Data.Abstract.Module as X
import Data.Abstract.ModuleTable as X hiding (lookup)
import qualified Data.Abstract.ScopeGraph as ScopeGraph
import Data.Abstract.Value.Concrete (Value(..), ValueError, runValueError)
import Data.Blob as X
import Data.Blob.IO as X
import Data.ByteString as X (ByteString)
import Data.ByteString.Builder (Builder, toLazyByteString)
import Data.ByteString.Lazy (toStrict)
import Data.Edit as X
import Data.Foldable (toList)
import Data.Functor.Listable as X
import Data.Language as X hiding (Precise)
import Data.List.NonEmpty as X (NonEmpty(..))
import Data.Maybe as X
import Data.Monoid as X (Monoid(..), First(..), Last(..))
import Data.Project as X
import Data.Proxy as X
import Data.Semigroup as X (Semigroup(..))
import Data.Semilattice.Lower as X
import Data.String
import Data.Sum as Sum
import Data.Term as X
import Data.Traversable as X (for)
import Debug.Trace as X (traceShowM, traceM)
import Parsing.Parser as X
import Semantic.Api hiding (File, Blob, BlobPair)
import Semantic.Config (Config(..), optionsLogLevel)
import Semantic.Graph (analysisParsers, runHeap, runScopeGraph)
import Semantic.Task as X
import Semantic.Telemetry (LogQueue, StatQueue)
import Semantic.Util as X
import Source.Range as X hiding (start, end, point)
import Source.Source as X (Source)
import Source.Span as X hiding (HasSpan(..), start, end, point)
import System.Exit (die)
import Data.Abstract.Value.Concrete (Value (..), ValueError, runValueError)
import Data.Blob as X
import Data.Blob.IO as X
import Data.ByteString as X (ByteString)
import Data.ByteString.Builder (Builder, toLazyByteString)
import Data.ByteString.Lazy (toStrict)
import Data.Edit as X
import Data.Foldable (toList)
import Data.Functor.Listable as X
import Data.Language as X hiding (Precise)
import Data.List.NonEmpty as X (NonEmpty (..))
import Data.Maybe as X
import Data.Monoid as X (First (..), Last (..), Monoid (..))
import Data.Project as X
import Data.Proxy as X
import Data.Semigroup as X (Semigroup (..))
import Data.Semilattice.Lower as X
import Data.Sum as Sum
import Data.Term as X
import Data.Traversable as X (for)
import Debug.Trace as X (traceM, traceShowM)
import Parsing.Parser as X
import Semantic.Api hiding (Blob, BlobPair, File)
import Semantic.Config (Config (..), optionsLogLevel)
import Semantic.Graph (analysisParsers, runHeap, runScopeGraph)
import Semantic.Task as X
import Semantic.Telemetry (LogQueue, StatQueue)
import Semantic.Util as X
import Source.Range as X hiding (end, point, start)
import Source.Source as X (Source)
import Source.Span as X hiding (HasSpan (..), end, point, start)
import System.Exit (die)
import qualified System.Path as Path
import Test.Hspec as X (Spec, SpecWith, context, describe, it, xit, parallel, pendingWith, around, runIO)
import Test.Hspec.Expectations as X
import Test.Hspec.LeanCheck as X
import Test.LeanCheck as X
import Unsafe.Coerce (unsafeCoerce)
import Test.Hspec as X (Spec, SpecWith, around, context, describe, it, parallel, pendingWith, runIO, xit)
import Test.Hspec.Expectations as X
import Test.Hspec.LeanCheck as X
import Test.LeanCheck as X
import Unsafe.Coerce (unsafeCoerce)
runBuilder :: Builder -> ByteString
runBuilder = toStrict . toLazyByteString
-- | This orphan instance is so we don't have to insert @name@ calls
-- in dozens and dozens of environment specs.
instance IsString Name where
fromString = X.name . fromString
-- | Returns an s-expression formatted diff for the specified FilePath pair.
diffFilePaths :: TaskSession -> Path.RelFile -> Path.RelFile -> IO ByteString
diffFilePaths session p1 p2 = do
@ -99,7 +95,7 @@ diffFilePaths session p1 p2 = do
-- | Returns an s-expression parse tree for the specified path.
parseFilePath :: TaskSession -> Path.RelFile -> IO (Either SomeException ByteString)
parseFilePath session path = do
blob <- readBlobFromFile (fileForTypedPath path)
blob <- readBlobFromFile (File.fromPath path)
res <- runTask session . runParse (configTreeSitterParseTimeout (config session)) . runReader defaultLanguageModes $ parseTermBuilder TermSExpression (toList blob)
pure (runBuilder <$> res)
@ -108,7 +104,7 @@ runParseWithConfig task = asks configTreeSitterParseTimeout >>= \ timeout -> run
-- | Read two files to a BlobPair.
readFilePathPair :: Path.RelFile -> Path.RelFile -> IO BlobPair
readFilePathPair p1 p2 = readFilePair (fileForTypedPath p1) (fileForTypedPath p2)
readFilePathPair p1 p2 = readFilePair (File.fromPath p1) (File.fromPath p2)
-- Run a Task and call `die` if it returns an Exception.
runTaskOrDie :: ParseC TaskC a -> IO a

View File

@ -1,12 +1,13 @@
{-# LANGUAGE OverloadedStrings #-}
module Tags.Spec (spec) where
import Control.Carrier.Reader
import Semantic.Api.Symbols
import Source.Loc
import SpecHelpers
import qualified Analysis.File as File
import Control.Carrier.Reader
import Semantic.Api.Symbols
import Source.Loc
import SpecHelpers
import qualified System.Path as Path
import Tags.Tagging as Tags
import Tags.Tagging as Tags
spec :: Spec
spec = do
@ -90,4 +91,4 @@ spec = do
]
parseTestFile :: Foldable t => t Tags.Kind -> Path.RelFile -> IO [Tag]
parseTestFile include path = runTaskOrDie $ readBlob (fileForPath (Path.toString path)) >>= runReader defaultLanguageModes . fmap (filter ((`elem` include) . kind)) . tagsForBlob
parseTestFile include path = runTaskOrDie $ readBlob (File.fromPath path) >>= runReader defaultLanguageModes . fmap (filter ((`elem` include) . kind)) . tagsForBlob