1
1
mirror of https://github.com/github/semantic.git synced 2024-12-29 18:06:14 +03:00

Merge branch 'master' into strict-equals

This commit is contained in:
Patrick Thomson 2018-05-23 11:33:32 -04:00 committed by GitHub
commit 7da61bd4c9
8 changed files with 152 additions and 65 deletions

View File

@ -11,6 +11,10 @@ build-type: Simple
-- extra-source-files:
cabal-version: >=1.10
flag release
description: Build with optimizations on (for CI or deployment builds)
default: False
library
hs-source-dirs: src
exposed-modules:
@ -221,13 +225,19 @@ library
, StandaloneDeriving
, StrictData
, TypeApplications
ghc-options: -Wall -Wcompat -Wincomplete-record-updates -Wincomplete-uni-patterns -Wredundant-constraints -fno-warn-name-shadowing -O -j
if flag(release)
ghc-options: -Wall -Werror -Wcompat -Wincomplete-record-updates -Wincomplete-uni-patterns -Wredundant-constraints -fno-warn-name-shadowing -O1 -j
else
ghc-options: -Wall -Wcompat -Wincomplete-record-updates -Wincomplete-uni-patterns -Wredundant-constraints -fno-warn-name-shadowing -O0 -j
ghc-prof-options: -fprof-auto
executable semantic
hs-source-dirs: app
main-is: Main.hs
ghc-options: -threaded -rtsopts "-with-rtsopts=-N -A4m -n2m" -static -j -O
if flag(release)
ghc-options: -threaded -rtsopts "-with-rtsopts=-N -A4m -n2m" -static -j -O1 -j
else
ghc-options: -threaded -rtsopts "-with-rtsopts=-N -A4m -n2m" -static -j -O0 -j
cc-options: -DU_STATIC_IMPLEMENTATION=1
cpp-options: -DU_STATIC_IMPLEMENTATION=1
build-depends: base
@ -268,6 +278,7 @@ test-suite test
, Test.Hspec.LeanCheck
build-depends: aeson
, array
, async
, base
, bifunctors
, bytestring
@ -327,7 +338,7 @@ benchmark evaluation
hs-source-dirs: bench
type: exitcode-stdio-1.0
main-is: Main.hs
ghc-options: -threaded -rtsopts "-with-rtsopts=-N -A4m -n2m -T" -static -j -O
ghc-options: -threaded -rtsopts "-with-rtsopts=-N -A4m -n2m -T" -static -j -O1
cc-options: -DU_STATIC_IMPLEMENTATION=1
cpp-options: -DU_STATIC_IMPLEMENTATION=1
default-language: Haskell2010

View File

@ -1,4 +1,4 @@
{-# LANGUAGE TypeFamilies, TypeOperators, UndecidableInstances #-}
{-# LANGUAGE ScopedTypeVariables, TypeFamilies, TypeOperators #-}
module Analysis.Abstract.Graph
( Graph(..)
, Vertex(..)
@ -7,9 +7,8 @@ module Analysis.Abstract.Graph
, variableDefinition
, moduleInclusion
, packageInclusion
, packageGraph
, graphingTerms
, graphingLoadErrors
, graphingPackages
, graphingModules
, graphing
) where
@ -17,7 +16,6 @@ module Analysis.Abstract.Graph
import Algebra.Graph.Export.Dot hiding (vertexName)
import Control.Abstract
import Data.Abstract.Address
import Data.Abstract.Evaluatable (LoadError (..))
import Data.Abstract.FreeVariables
import Data.Abstract.Module (Module(moduleInfo), ModuleInfo(..))
import Data.Abstract.Package (PackageInfo(..))
@ -56,7 +54,6 @@ style = (defaultStyle (byteString . vertexName))
graphingTerms :: ( Element Syntax.Identifier syntax
, Members '[ Reader (Environment (Located location) value)
, Reader ModuleInfo
, Reader PackageInfo
, State (Environment (Located location) value)
, State (Graph Vertex)
] effects
@ -72,34 +69,34 @@ graphingTerms recur term@(In _ syntax) = do
_ -> pure ()
recur term
-- | Add vertices to the graph for 'LoadError's.
graphingLoadErrors :: Members '[ Reader ModuleInfo
, Resumable (LoadError location value)
, State (Graph Vertex)
] effects
=> SubtermAlgebra (Base term) term (TermEvaluator term location value effects a)
-> SubtermAlgebra (Base term) term (TermEvaluator term location value effects a)
graphingLoadErrors recur term = TermEvaluator (runTermEvaluator (recur term) `resumeLoadError` (\ (ModuleNotFound name) -> moduleInclusion (Module (BC.pack name)) *> moduleNotFound name))
graphingPackages :: Members '[ Reader ModuleInfo
, Reader PackageInfo
, State (Graph Vertex)
] effects
=> SubtermAlgebra Module term (TermEvaluator term location value effects a)
-> SubtermAlgebra Module term (TermEvaluator term location value effects a)
graphingPackages recur m = packageInclusion (moduleVertex (moduleInfo m)) *> recur m
-- | Add vertices to the graph for evaluated modules and the packages containing them.
graphingModules :: Members '[ Reader ModuleInfo
, Reader PackageInfo
graphingModules :: forall term location value effects a
. Members '[ Modules location value
, Reader ModuleInfo
, State (Graph Vertex)
] effects
=> SubtermAlgebra Module term (TermEvaluator term location value effects a)
-> SubtermAlgebra Module term (TermEvaluator term location value effects a)
graphingModules recur m = do
let name = BC.pack (modulePath (moduleInfo m))
packageInclusion (Module name)
moduleInclusion (Module name)
recur m
graphingModules recur m = interpose @(Modules location value) pure (\ m yield -> case m of
Load path -> moduleInclusion (moduleVertex (ModuleInfo path)) >> send m >>= yield
Lookup path -> moduleInclusion (moduleVertex (ModuleInfo path)) >> send m >>= yield
_ -> send m >>= yield)
(recur m)
packageGraph :: PackageInfo -> Graph Vertex
packageGraph = vertex . Package . unName . packageName
packageVertex :: PackageInfo -> Vertex
packageVertex = Package . unName . packageName
moduleGraph :: ModuleInfo -> Graph Vertex
moduleGraph = vertex . Module . BC.pack . modulePath
moduleVertex :: ModuleInfo -> Vertex
moduleVertex = Module . BC.pack . modulePath
-- | Add an edge from the current package to the passed vertex.
packageInclusion :: ( Effectful m
@ -112,7 +109,7 @@ packageInclusion :: ( Effectful m
-> m effects ()
packageInclusion v = do
p <- currentPackage
appendGraph (packageGraph p `connect` vertex v)
appendGraph (vertex (packageVertex p) `connect` vertex v)
-- | Add an edge from the current module to the passed vertex.
moduleInclusion :: ( Effectful m
@ -125,7 +122,7 @@ moduleInclusion :: ( Effectful m
-> m effects ()
moduleInclusion v = do
m <- currentModule
appendGraph (moduleGraph m `connect` vertex v)
appendGraph (vertex (moduleVertex m) `connect` vertex v)
-- | Add an edge from the passed variable name to the module it originated within.
variableDefinition :: ( Member (Reader (Environment (Located location) value)) effects
@ -135,7 +132,7 @@ variableDefinition :: ( Member (Reader (Environment (Located location) value)) e
=> Name
-> TermEvaluator term (Located location) value effects ()
variableDefinition name = do
graph <- maybe lowerBound (moduleGraph . locationModule . unAddress) <$> TermEvaluator (lookupEnv name)
graph <- maybe lowerBound (vertex . moduleVertex . locationModule . unAddress) <$> TermEvaluator (lookupEnv name)
appendGraph (vertex (Variable (unName name)) `connect` graph)
appendGraph :: (Effectful m, Member (State (Graph Vertex)) effects) => Graph Vertex -> m effects ()

View File

@ -1,10 +1,14 @@
{-# LANGUAGE DataKinds, ScopedTypeVariables, TypeOperators #-}
{-# LANGUAGE DataKinds, GADTs, ScopedTypeVariables, TypeOperators #-}
module Parsing.TreeSitter
( parseToAST
( Timeout (..)
, parseToAST
) where
import Prologue
import Data.AST (AST, Node(Node))
import Control.Concurrent.Async
import Control.Monad
import Data.AST (AST, Node (Node))
import Data.Blob
import Data.ByteString.Unsafe (unsafeUseAsCStringLen)
import Data.Range
@ -12,24 +16,61 @@ import Data.Source
import Data.Span
import Data.Term
import Foreign
import Foreign.C.Types (CBool(..))
import Foreign.C.Types (CBool (..))
import Foreign.Marshal.Array (allocaArray)
import qualified TreeSitter.Tree as TS
import qualified TreeSitter.Parser as TS
import qualified TreeSitter.Node as TS
import System.Timeout
import qualified TreeSitter.Language as TS
import qualified TreeSitter.Node as TS
import qualified TreeSitter.Parser as TS
import qualified TreeSitter.Tree as TS
newtype Timeout = Milliseconds Int
-- Change this to putStrLn if you want to debug the locking/cancellation code.
-- TODO: Someday we should run this all in Eff so that we can 'trace'.
dbg :: String -> IO ()
dbg = const (pure ())
runParser :: (Enum grammar, Bounded grammar) => Ptr TS.Parser -> Source -> IO (Maybe (AST [] grammar))
runParser parser blobSource = unsafeUseAsCStringLen (sourceBytes blobSource) $ \ (source, len) ->
alloca (\ rootPtr -> do
let acquire = do
dbg "Starting parse"
-- Change this to TS.ts_parser_loop_until_cancelled if you want to test out cancellation
TS.ts_parser_parse_string parser nullPtr source len
let release t
| t == nullPtr = dbg "Parse failed"
| otherwise = dbg "Parse completed" *> TS.ts_tree_delete t
let go treePtr = do
if treePtr == nullPtr
then pure Nothing
else do
TS.ts_tree_root_node_p treePtr rootPtr
fmap Just (peek rootPtr >>= anaM toAST)
bracket acquire release go)
-- | Parse 'Source' with the given 'TS.Language' and return its AST.
parseToAST :: (Bounded grammar, Enum grammar) => Ptr TS.Language -> Blob -> IO (AST [] grammar)
parseToAST language Blob{..} = bracket TS.ts_parser_new TS.ts_parser_delete $ \ parser -> do
-- Returns Nothing if the operation timed out.
parseToAST :: (Bounded grammar, Enum grammar) => Timeout -> Ptr TS.Language -> Blob -> IO (Maybe (AST [] grammar))
parseToAST (Milliseconds s) language Blob{..} = bracket TS.ts_parser_new TS.ts_parser_delete $ \ parser -> do
let parserTimeout = s * 1000
TS.ts_parser_halt_on_error parser (CBool 1)
TS.ts_parser_set_language parser language
unsafeUseAsCStringLen (sourceBytes blobSource) $ \ (source, len) -> do
alloca (\ rootPtr -> do
bracket (TS.ts_parser_parse_string parser nullPtr source len) TS.ts_tree_delete $ \ tree -> do
TS.ts_tree_root_node_p tree rootPtr
peek rootPtr >>= anaM toAST
)
parsing <- async (runParser parser blobSource)
-- Kick the parser off asynchronously and wait according to the provided timeout.
res <- timeout parserTimeout (wait parsing)
-- If we get a Nothing back, then we failed, so we need to disable the parser, which
-- will let the call to runParser terminate, cleaning up appropriately
when (isNothing res) (TS.ts_parser_set_enabled parser (CBool 0))
pure (join res)
toAST :: forall grammar . (Bounded grammar, Enum grammar) => TS.Node -> IO (Base (AST [] grammar) TS.Node)

View File

@ -7,7 +7,6 @@ module Semantic.CLI
) where
import Data.Project
import Data.Language (Language)
import Data.List (intercalate)
import Data.List.Split (splitWhen)
import Data.Version (showVersion)
@ -18,7 +17,7 @@ import Prologue
import Rendering.Renderer
import qualified Semantic.AST as AST
import qualified Semantic.Diff as Diff
import Semantic.Graph as Semantic (Graph, GraphType(..), Vertex, graph, style)
import qualified Semantic.Graph as Graph
import Semantic.IO as IO
import qualified Semantic.Log as Log
import qualified Semantic.Parse as Parse
@ -29,9 +28,6 @@ import Text.Read
main :: IO ()
main = customExecParser (prefs showHelpOnEmpty) arguments >>= uncurry Task.runTaskWithOptions
runGraph :: Semantic.GraphType -> Maybe FilePath -> FilePath -> Language -> [FilePath] -> Task.TaskEff (Graph Vertex)
runGraph graphType rootDir dir excludeDirs = Semantic.graph graphType <=< Task.readProject rootDir dir excludeDirs
-- | A parser for the application's command-line arguments.
--
-- Returns a 'Task' to read the input, run the requested operation, and write the output to the specified output path or stdout.
@ -88,14 +84,16 @@ arguments = info (version <*> helper <*> ((,) <$> optionsParser <*> argumentsPar
graphCommand = command "graph" (info graphArgumentsParser (progDesc "Compute a graph for a directory or entry point"))
graphArgumentsParser = do
graphType <- flag ImportGraph ImportGraph (long "imports" <> help "Compute an import graph (default)")
<|> flag' CallGraph (long "calls" <> help "Compute a call graph")
graphType <- flag Graph.ImportGraph Graph.ImportGraph (long "imports" <> help "Compute an import graph (default)")
<|> flag' Graph.CallGraph (long "calls" <> help "Compute a call graph")
let style = Graph.style
includePackages <- switch (long "packages" <> help "Include a vertex for the package, with edges from it to each module")
serializer <- flag (Task.serialize (DOT style)) (Task.serialize (DOT style)) (long "dot" <> help "Output in DOT graph format (default)")
<|> flag' (Task.serialize JSON) (long "json" <> help "Output JSON graph")
rootDir <- rootDirectoryOption
excludeDirs <- excludeDirsOption
File{..} <- argument filePathReader (metavar "DIR:LANGUAGE | FILE")
pure $ runGraph graphType rootDir filePath (fromJust fileLanguage) excludeDirs >>= serializer
pure $ Task.readProject rootDir filePath (fromJust fileLanguage) excludeDirs >>= Graph.runGraph graphType includePackages >>= serializer
rootDirectoryOption = optional (strOption (long "root" <> help "Root directory of project. Optional, defaults to entry file/directory." <> metavar "DIR"))
excludeDirsOption = many (strOption (long "exclude-dir" <> help "Exclude a directory (e.g. vendor)" <> metavar "DIR"))

View File

@ -1,6 +1,6 @@
{-# LANGUAGE GADTs, TypeOperators #-}
module Semantic.Graph
( graph
( runGraph
, GraphType(..)
, Graph
, Vertex
@ -39,18 +39,20 @@ import Semantic.Task as Task
data GraphType = ImportGraph | CallGraph
graph :: Members '[Distribute WrappedTask, Files, Resolution, Task, Exc SomeException, Telemetry, Trace] effs
=> GraphType
-> Project
-> Eff effs (Graph Vertex)
graph graphType project
runGraph :: Members '[Distribute WrappedTask, Files, Resolution, Task, Exc SomeException, Telemetry, Trace] effs
=> GraphType
-> Bool
-> Project
-> Eff effs (Graph Vertex)
runGraph graphType includePackages project
| SomeAnalysisParser parser prelude <- someAnalysisParser
(Proxy :: Proxy '[ Evaluatable, Declarations1, FreeVariables1, Functor, Eq1, Ord1, Show1 ]) (projectLanguage project) = do
package <- parsePackage parser prelude project
let analyzeTerm = case graphType of
let analyzeTerm = withTermSpans . case graphType of
ImportGraph -> id
CallGraph -> graphingTerms
analyze runGraphAnalysis (evaluatePackageWith graphingModules (withTermSpans . graphingLoadErrors . analyzeTerm) package) >>= extractGraph
analyzeModule = (if includePackages then graphingPackages else id) . graphingModules
analyze runGraphAnalysis (evaluatePackageWith analyzeModule analyzeTerm package) >>= extractGraph
where extractGraph result = case result of
(Right ((_, graph), _), _) -> pure graph
_ -> Task.throwError (toException (Exc.ErrorCall ("graphImports: import graph rendering failed " <> show result)))

View File

@ -180,12 +180,21 @@ runTaskF = interpret $ \ task -> case task of
logError :: Member Telemetry effs => Options -> Level -> Blob -> Error.Error String -> [(String, String)] -> Eff effs ()
logError Options{..} level blob err = writeLog level (Error.formatError optionsPrintSource (optionsIsTerminal && optionsEnableColour) blob err)
data ParserCancelled = ParserTimedOut deriving (Show, Typeable)
instance Exception ParserCancelled
defaultTimeout :: Timeout
defaultTimeout = Milliseconds 5000
-- | Parse a 'Blob' in 'IO'.
runParser :: Members '[Reader Options, Telemetry, Exc SomeException, IO, Trace] effs => Blob -> Parser term -> Eff effs term
runParser blob@Blob{..} parser = case parser of
ASTParser language ->
time "parse.tree_sitter_ast_parse" languageTag $
IO.rethrowing (parseToAST language blob)
IO.rethrowing (parseToAST defaultTimeout language blob)
>>= maybeM (throwError (SomeException ParserTimedOut))
AssignmentParser parser assignment -> do
ast <- runParser blob parser `catchError` \ (SomeException err) -> do
writeStat (Stat.increment "parse.parse_failures" languageTag)

View File

@ -1,9 +1,20 @@
module Semantic.IO.Spec (spec) where
import Prelude hiding (readFile)
import Control.Concurrent.Async
import Foreign
import Foreign.C.Types (CBool (..))
import Semantic.IO
import System.Exit (ExitCode(..))
import System.IO (IOMode(..))
import System.Exit (ExitCode (..))
import System.IO (IOMode (..))
import Parsing.TreeSitter
import System.Timeout
import qualified TreeSitter.Language as TS
import qualified TreeSitter.Node as TS
import qualified TreeSitter.Parser as TS
import qualified TreeSitter.Tree as TS
import SpecHelpers
@ -64,6 +75,24 @@ spec = parallel $ do
h <- openFileForReading "test/fixtures/cli/diff-null-both-sides.json"
readBlobPairsFromHandle h `shouldThrow` (== ExitFailure 1)
describe "cancelable parsing" $
it "should be cancelable asynchronously" $ do
p <- TS.ts_parser_new
churn <- async $ do
TS.ts_parser_loop_until_cancelled p nullPtr nullPtr 0
pure True
res <- timeout 500 (wait churn)
res `shouldBe` Nothing
TS.ts_parser_set_enabled p (CBool 0)
done <- timeout 500 (wait churn)
done `shouldBe` (Just True)
TS.ts_parser_delete p
describe "readBlobsFromHandle" $ do
it "returns blobs for valid JSON encoded parse input" $ do
h <- openFileForReading "test/fixtures/cli/parse.json"

@ -1 +1 @@
Subproject commit 897785918ce87f51e541777978f33de09619dcc7
Subproject commit 4d08262bc306fe8e233feff4714a9c77b83edd77