mirror of
https://github.com/github/semantic.git
synced 2024-11-22 23:29:37 +03:00
Merge remote-tracking branch 'origin/master' into unicode-math
This commit is contained in:
commit
2fc456568d
2
.github/workflows/haskell.yml
vendored
2
.github/workflows/haskell.yml
vendored
@ -54,7 +54,7 @@ jobs:
|
||||
|
||||
- name: Build & test
|
||||
run: |
|
||||
cabal v2-build --project-file=cabal.project.ci
|
||||
cabal v2-build --project-file=cabal.project.ci semantic:exe:semantic
|
||||
cabal v2-run --project-file=cabal.project.ci semantic:test
|
||||
cabal v2-run --project-file=cabal.project.ci semantic-codeql:test
|
||||
cabal v2-run --project-file=cabal.project.ci semantic-core:test
|
||||
|
@ -66,7 +66,7 @@ We use `cabal's` [Nix-style local builds][nix] for development. To get started q
|
||||
git clone git@github.com:github/semantic.git
|
||||
cd semantic
|
||||
script/bootstrap
|
||||
cabal v2-build
|
||||
cabal v2-build all
|
||||
cabal v2-test
|
||||
cabal v2-run semantic -- --help
|
||||
```
|
||||
|
@ -1,7 +1,7 @@
|
||||
-- ATTENTION: care must be taken to keep this file in sync with cabal.project.ci and script/ghci-flags. If you add a package here, add it there (and add a package stanza with ghc-options to enable errors in CI at the bottom of that file).
|
||||
|
||||
-- Local packages
|
||||
packages: .
|
||||
packages: semantic
|
||||
semantic-analysis
|
||||
semantic-ast
|
||||
semantic-codeql
|
||||
|
@ -1,7 +1,7 @@
|
||||
-- ATTENTION: care must be taken to keep this file in sync with cabal.project and script/ghci-flags. If you add a package here, add it there (and add a package stanza with ghc-options to enable errors in CI at the bottom of this file).
|
||||
|
||||
-- Local packages
|
||||
packages: .
|
||||
packages: semantic
|
||||
semantic-analysis
|
||||
semantic-ast
|
||||
semantic-codeql
|
||||
|
@ -48,6 +48,10 @@ function flags {
|
||||
|
||||
# .hs source dirs
|
||||
# TODO: would be nice to figure this out from cabal.project & the .cabal files
|
||||
echo "-isemantic/app"
|
||||
echo "-isemantic/src"
|
||||
echo "-isemantic/bench"
|
||||
echo "-isemantic/test"
|
||||
echo "-isemantic-analysis/src"
|
||||
echo "-isemantic-ast/src"
|
||||
echo "-isemantic-codeql/src"
|
||||
@ -68,10 +72,6 @@ function flags {
|
||||
echo "-isemantic-tsx/src"
|
||||
echo "-isemantic-typescript/src"
|
||||
echo "-isemantic-tags/src"
|
||||
echo "-iapp"
|
||||
echo "-isrc"
|
||||
echo "-ibench"
|
||||
echo "-itest"
|
||||
|
||||
# disable automatic selection of packages
|
||||
echo "-hide-all-packages"
|
||||
|
@ -15,7 +15,7 @@ PROFILE_DIR="$PROFILES_DIR/$TODAY/$NOW-$CURRENT_BRANCH-$HEAD_SHA/"
|
||||
OUTFILE="$PROFILE_DIR/profile.out.log"
|
||||
ERRFILE="$PROFILE_DIR/profile.err.log"
|
||||
|
||||
cabal v2-build
|
||||
cabal v2-build all
|
||||
|
||||
mkdir -p "$PROFILE_DIR"
|
||||
|
||||
|
@ -54,13 +54,11 @@ common dependencies
|
||||
, bytestring ^>= 0.10.8.2
|
||||
, containers ^>= 0.6.0.1
|
||||
, directory ^>= 1.3.3.0
|
||||
, fastsum ^>= 0.1.1.1
|
||||
, fused-effects ^>= 1
|
||||
, fused-effects-exceptions ^>= 1
|
||||
, fused-effects-resumable ^>= 0.1
|
||||
, hashable >= 1.2.7 && < 1.4
|
||||
, tree-sitter ^>= 0.9.0.1
|
||||
, mtl ^>= 2.2.2
|
||||
, network ^>= 2.8.0.0
|
||||
, pathtype ^>= 0.8.1
|
||||
, process ^>= 1.6.3.0
|
||||
@ -91,7 +89,6 @@ library
|
||||
, Control.Effect.Sum.Project
|
||||
, Control.Effect.Timeout
|
||||
-- General datatype definitions & generic algorithms
|
||||
, Data.AST
|
||||
, Data.Blob
|
||||
, Data.Blob.IO
|
||||
, Data.Duration
|
||||
@ -106,9 +103,6 @@ library
|
||||
, Data.Map.Monoidal
|
||||
, Data.Maybe.Exts
|
||||
, Data.Semigroup.App
|
||||
, Data.Scientific.Exts
|
||||
, Data.Term
|
||||
, Numeric.Exts
|
||||
-- Parser glue
|
||||
, Parsing.Parser
|
||||
, Parsing.TreeSitter
|
||||
@ -153,16 +147,13 @@ library
|
||||
, haskeline ^>= 0.7.5.0
|
||||
, hostname ^>= 1.0
|
||||
, hscolour ^>= 1.24.4
|
||||
, kdt ^>= 0.2.4
|
||||
, lens >= 4.17 && < 4.19
|
||||
, mersenne-random-pure64 ^>= 0.2.2.0
|
||||
, network-uri ^>= 2.6.1.0
|
||||
, optparse-applicative >= 0.14.3 && < 0.16
|
||||
, parallel ^>= 3.2.2.0
|
||||
, parsers ^>= 0.12.9
|
||||
, prettyprinter >= 1.2 && < 2
|
||||
, pretty-show ^>= 1.9.5
|
||||
, profunctors ^>= 5.3
|
||||
, proto-lens >= 0.5 && < 0.7
|
||||
, reducers ^>= 3.12.3
|
||||
, semantic-go ^>= 0
|
||||
@ -209,16 +200,12 @@ test-suite test
|
||||
type: exitcode-stdio-1.0
|
||||
hs-source-dirs: test
|
||||
main-is: Spec.hs
|
||||
ghc-options: -Werror
|
||||
other-modules: Data.Functor.Classes.Generic.Spec
|
||||
, Data.Functor.Listable
|
||||
, Data.Graph.Spec
|
||||
, Data.Language.Spec
|
||||
, Data.Scientific.Spec
|
||||
, Data.Semigroup.App.Spec
|
||||
, Integration.Spec
|
||||
, Numeric.Spec
|
||||
, Parsing.Spec
|
||||
, Semantic.Spec
|
||||
, Semantic.CLI.Spec
|
||||
, Semantic.IO.Spec
|
@ -59,10 +59,6 @@ runParser ::
|
||||
-> Parser term
|
||||
-> m term
|
||||
runParser blob@Blob{..} parser = case parser of
|
||||
ASTParser language ->
|
||||
time "parse.tree_sitter_ast_parse" languageTag $ do
|
||||
config <- asks config
|
||||
executeParserAction (parseToAST (configTreeSitterParseTimeout config) language blob)
|
||||
|
||||
UnmarshalParser language -> do
|
||||
(time "parse.tree_sitter_precise_ast_parse" languageTag $ do
|
@ -50,10 +50,6 @@ runParser
|
||||
-> Parser term
|
||||
-> m term
|
||||
runParser timeout blob@Blob{..} parser = case parser of
|
||||
ASTParser language ->
|
||||
parseToAST timeout language blob
|
||||
>>= either (throwError . SomeException) pure
|
||||
|
||||
UnmarshalParser language ->
|
||||
parseToPreciseAST timeout timeout language blob
|
||||
>>= either (throwError . SomeException) pure
|
@ -29,7 +29,6 @@ module Parsing.Parser
|
||||
) where
|
||||
|
||||
import AST.Unmarshal
|
||||
import Data.AST
|
||||
import Data.Language
|
||||
import Data.Map (Map)
|
||||
import qualified Data.Map as Map
|
||||
@ -49,8 +48,6 @@ import qualified TreeSitter.Language as TS (Language)
|
||||
|
||||
-- | A parser from 'Source' onto some term type.
|
||||
data Parser term where
|
||||
-- | A parser producing 'AST' using a 'TS.Language'.
|
||||
ASTParser :: (Bounded grammar, Enum grammar, Show grammar) => Ptr TS.Language -> Parser (AST grammar)
|
||||
-- | A parser 'Unmarshal'ing to a precise AST type using a 'TS.Language'.
|
||||
UnmarshalParser :: Unmarshal t => Ptr TS.Language -> Parser (t Loc)
|
||||
|
@ -9,26 +9,20 @@
|
||||
module Parsing.TreeSitter
|
||||
( TSParseException (..)
|
||||
, Duration(..)
|
||||
, parseToAST
|
||||
, parseToPreciseAST
|
||||
) where
|
||||
|
||||
import Control.Carrier.Reader
|
||||
import Control.Exception as Exc
|
||||
import Control.Monad
|
||||
import Control.Monad.IO.Class
|
||||
import Data.Functor.Foldable
|
||||
import Foreign
|
||||
import GHC.Generics
|
||||
|
||||
import Data.AST (AST, Node (Node))
|
||||
import Data.Blob
|
||||
import Data.Duration
|
||||
import Data.Maybe.Exts
|
||||
import Data.Term
|
||||
import Source.Loc
|
||||
import qualified Source.Source as Source
|
||||
import Source.Span
|
||||
import qualified System.Timeout as System
|
||||
|
||||
import qualified TreeSitter.Cursor as TS
|
||||
@ -45,18 +39,6 @@ data TSParseException
|
||||
| UnmarshalFailure String
|
||||
deriving (Eq, Show, Generic)
|
||||
|
||||
-- | Parse a 'Blob' with the given 'TS.Language' and return its AST.
|
||||
-- Returns 'Nothing' if the operation timed out.
|
||||
parseToAST :: ( Bounded grammar
|
||||
, Enum grammar
|
||||
, MonadIO m
|
||||
)
|
||||
=> Duration
|
||||
-> Ptr TS.Language
|
||||
-> Blob
|
||||
-> m (Either TSParseException (AST grammar))
|
||||
parseToAST parseTimeout language blob = runParse parseTimeout language blob (anaM toAST <=< peek)
|
||||
|
||||
parseToPreciseAST
|
||||
:: ( MonadIO m
|
||||
, TS.Unmarshal t
|
||||
@ -102,22 +84,3 @@ runParse parseTimeout language Blob{..} action =
|
||||
TS.withRootNode treePtr action
|
||||
else
|
||||
Exc.throw IncompatibleVersions
|
||||
|
||||
toAST :: forall grammar . (Bounded grammar, Enum grammar) => TS.Node -> IO (Base (AST grammar) TS.Node)
|
||||
toAST node@TS.Node{..} = do
|
||||
let count = fromIntegral nodeChildCount
|
||||
children <- allocaArray count $ \ childNodesPtr -> do
|
||||
_ <- with nodeTSNode (`TS.ts_node_copy_child_nodes` childNodesPtr)
|
||||
peekArray count childNodesPtr
|
||||
pure $! In (Node (toEnum (min (fromIntegral nodeSymbol) (fromEnum (maxBound :: grammar)))) (Loc (nodeRange node) (nodeSpan node))) children
|
||||
|
||||
anaM :: (Corecursive t, Monad m, Traversable (Base t)) => (a -> m (Base t a)) -> a -> m t
|
||||
anaM g = a where a = pure . embed <=< traverse a <=< g
|
||||
|
||||
|
||||
nodeRange :: TS.Node -> Range
|
||||
nodeRange node = Range (fromIntegral (TS.nodeStartByte node)) (fromIntegral (TS.nodeEndByte node))
|
||||
|
||||
nodeSpan :: TS.Node -> Span
|
||||
nodeSpan node = TS.nodeStartPoint node `seq` TS.nodeEndPoint node `seq` Span (pointPos (TS.nodeStartPoint node)) (pointPos (TS.nodeEndPoint node))
|
||||
where pointPos TS.TSPoint{..} = pointRow `seq` pointColumn `seq` Pos (1 + fromIntegral pointRow) (1 + fromIntegral pointColumn)
|
@ -7,9 +7,7 @@
|
||||
{-# LANGUAGE ViewPatterns #-}
|
||||
{-# OPTIONS_GHC -Wno-missing-signatures -Wno-missing-exported-signatures -Wno-partial-type-signatures -O0 #-}
|
||||
module Semantic.Util
|
||||
( mergeErrors
|
||||
, reassociate
|
||||
, parseFile
|
||||
( parseFile
|
||||
, parseFileQuiet
|
||||
) where
|
||||
|
||||
@ -17,12 +15,10 @@ import Prelude hiding (readFile)
|
||||
|
||||
import Analysis.File
|
||||
import Control.Carrier.Parse.Simple
|
||||
import Control.Carrier.Resumable.Either (SomeError (..))
|
||||
import Control.Effect.Reader
|
||||
import Control.Exception hiding (evaluate)
|
||||
import Control.Monad
|
||||
import qualified Data.Language as Language
|
||||
import Data.Sum
|
||||
import Parsing.Parser
|
||||
import Semantic.Config
|
||||
import Semantic.Task
|
||||
@ -40,9 +36,3 @@ fileForPath (Path.absRel -> p) = File p (point (Pos 1 1)) (Language.forPath p)
|
||||
runTask', runTaskQuiet :: ParseC TaskC a -> IO a
|
||||
runTask' task = runTaskWithOptions debugOptions (asks configTreeSitterParseTimeout >>= \ timeout -> runParse timeout task) >>= either (die . displayException) pure
|
||||
runTaskQuiet task = runTaskWithOptions defaultOptions (asks configTreeSitterParseTimeout >>= \ timeout -> runParse timeout task) >>= either (die . displayException) pure
|
||||
|
||||
mergeErrors :: Either (SomeError (Sum errs)) (Either (SomeError err) result) -> Either (SomeError (Sum (err ': errs))) result
|
||||
mergeErrors = either (\ (SomeError sum) -> Left (SomeError (weaken sum))) (either (\ (SomeError err) -> Left (SomeError (inject err))) Right)
|
||||
|
||||
reassociate :: Either (SomeError err1) (Either (SomeError err2) (Either (SomeError err3) (Either (SomeError err4) (Either (SomeError err5) (Either (SomeError err6) (Either (SomeError err7) (Either (SomeError err8) result))))))) -> Either (SomeError (Sum '[err8, err7, err6, err5, err4, err3, err2, err1])) result
|
||||
reassociate = mergeErrors . mergeErrors . mergeErrors . mergeErrors . mergeErrors . mergeErrors . mergeErrors . mergeErrors . Right
|
@ -20,9 +20,7 @@ import Data.Bifunctor.Join
|
||||
import Data.Edit
|
||||
import qualified Data.Language as Language
|
||||
import Data.List.NonEmpty
|
||||
import Data.Term
|
||||
import Data.Text as T (Text, pack)
|
||||
import Data.Sum
|
||||
import Source.Loc
|
||||
import Source.Span
|
||||
import Test.LeanCheck
|
||||
@ -34,11 +32,6 @@ class Listable1 l where
|
||||
-- | The tiers for @l :: * -> *@, parameterized by the tiers for @a :: *@.
|
||||
liftTiers :: [Tier a] -> [Tier (l a)]
|
||||
|
||||
-- | A suitable definition of 'tiers' for 'Listable1' type constructors parameterized by 'Listable' types.
|
||||
tiers1 :: (Listable a, Listable1 l) => [Tier (l a)]
|
||||
tiers1 = liftTiers tiers
|
||||
|
||||
|
||||
-- | Lifting of 'Listable' to @* -> * -> *@.
|
||||
class Listable2 l where
|
||||
-- | The tiers for @l :: * -> * -> *@, parameterized by the tiers for @a :: *@ & @b :: *@.
|
||||
@ -85,22 +78,6 @@ instance Listable1 NonEmpty where
|
||||
instance Listable2 p => Listable1 (Join p) where
|
||||
liftTiers tiers = liftCons1 (liftTiers2 tiers tiers) Join
|
||||
|
||||
instance Listable1 f => Listable2 (TermF f) where
|
||||
liftTiers2 annotationTiers recurTiers = liftCons2 annotationTiers (liftTiers recurTiers) In
|
||||
|
||||
instance (Listable1 f, Listable a) => Listable1 (TermF f a) where
|
||||
liftTiers = liftTiers2 tiers
|
||||
|
||||
instance (Listable1 f, Listable a, Listable b) => Listable (TermF f a b) where
|
||||
tiers = tiers1
|
||||
|
||||
instance Listable1 f => Listable1 (Term f) where
|
||||
liftTiers annotationTiers = go
|
||||
where go = liftCons1 (liftTiers2 annotationTiers go) Term
|
||||
|
||||
instance (Listable1 f, Listable a) => Listable (Term f a) where
|
||||
tiers = tiers1
|
||||
|
||||
instance Listable2 Edit where
|
||||
liftTiers2 t1 t2 = liftCons1 t2 Insert \/ liftCons1 t1 Delete \/ liftCons2 t1 t2 Compare
|
||||
|
||||
@ -108,16 +85,6 @@ instance (Listable a, Listable b) => Listable (Edit a b) where
|
||||
tiers = tiers2
|
||||
|
||||
|
||||
instance (Listable1 f, Listable1 (Sum (g ': fs))) => Listable1 (Sum (f ': g ': fs)) where
|
||||
liftTiers tiers = (inject `mapT` ((liftTiers :: [Tier a] -> [Tier (f a)]) tiers)) \/ (weaken `mapT` ((liftTiers :: [Tier a] -> [Tier (Sum (g ': fs) a)]) tiers))
|
||||
|
||||
instance Listable1 f => Listable1 (Sum '[f]) where
|
||||
liftTiers tiers = inject `mapT` ((liftTiers :: [Tier a] -> [Tier (f a)]) tiers)
|
||||
|
||||
instance (Listable1 (Sum fs), Listable a) => Listable (Sum fs a) where
|
||||
tiers = tiers1
|
||||
|
||||
|
||||
instance Listable Name.Name where
|
||||
tiers = cons1 Name.name
|
||||
|
@ -124,6 +124,7 @@ tsxSkips = Path.relFile <$>
|
||||
typescriptSkips :: [Path.RelFile]
|
||||
typescriptSkips = Path.relFile <$>
|
||||
[ "npm/node_modules/slide/lib/async-map-ordered.js"
|
||||
, "npm/node_modules/request/node_modules/har-validator/node_modules/ajv/dist/regenerator.min.js"
|
||||
]
|
||||
|
||||
buildExamples :: TaskSession -> LanguageExample -> Path.RelDir -> IO Tasty.TestTree
|
13
semantic/test/Generators.hs
Normal file
13
semantic/test/Generators.hs
Normal file
@ -0,0 +1,13 @@
|
||||
{-# LANGUAGE OverloadedStrings #-}
|
||||
module Generators
|
||||
( source
|
||||
) where
|
||||
|
||||
import Hedgehog
|
||||
import qualified Hedgehog.Gen as Gen
|
||||
import qualified Source.Source
|
||||
|
||||
source :: MonadGen m => Hedgehog.Range Int -> m Source.Source.Source
|
||||
source r = Gen.frequency [ (1, empty), (20, nonEmpty) ]
|
||||
where empty = pure mempty
|
||||
nonEmpty = Source.Source.fromUTF8 <$> Gen.utf8 r (Gen.frequency [ (1, pure '\r'), (1, pure '\n'), (20, Gen.unicode) ])
|
@ -32,7 +32,7 @@ renderDiff ref new = unsafePerformIO $ do
|
||||
useJD <- (Path.hasExtension ".json" (Path.relPath ref) &&) <$> fmap isJust (Path.findExecutable "jd")
|
||||
pure $ if useJD
|
||||
then ["jd", "-set", ref, new]
|
||||
else ["git", "diff", ref, new]
|
||||
else ["diff", ref, new]
|
||||
{-# NOINLINE renderDiff #-}
|
||||
|
||||
testForParseFixture :: (String, [Blob] -> ParseC TaskC Builder, [File Language], Path.RelFile) -> TestTree
|
||||
@ -45,11 +45,11 @@ testForParseFixture (format, runParse, files, expected) =
|
||||
|
||||
parseFixtures :: [(String, [Blob] -> ParseC TaskC Builder, [File Language], Path.RelFile)]
|
||||
parseFixtures =
|
||||
[ ("s-expression", run . parseTermBuilder TermSExpression, path, Path.relFile "test/fixtures/ruby/corpus/and-or.parseA.txt")
|
||||
[ ("s-expression", run . parseTermBuilder TermSExpression, path, Path.relFile "semantic/test/fixtures/ruby/corpus/and-or.parseA.txt")
|
||||
, ("symbols", run . parseSymbolsBuilder Serializing.Format.JSON, path'', prefix </> Path.file "parse-tree.symbols.json")
|
||||
, ("protobuf symbols", run . parseSymbolsBuilder Serializing.Format.Proto, path'', prefix </> Path.file "parse-tree.symbols.protobuf.bin")
|
||||
]
|
||||
where path = [File (Path.absRel "test/fixtures/ruby/corpus/and-or.A.rb") lowerBound Ruby]
|
||||
path'' = [File (Path.absRel "test/fixtures/ruby/corpus/method-declaration.A.rb") lowerBound Ruby]
|
||||
prefix = Path.relDir "test/fixtures/cli"
|
||||
where path = [File (Path.absRel "semantic/test/fixtures/ruby/corpus/and-or.A.rb") lowerBound Ruby]
|
||||
path'' = [File (Path.absRel "semantic/test/fixtures/ruby/corpus/method-declaration.A.rb") lowerBound Ruby]
|
||||
prefix = Path.relDir "semantic/test/fixtures/cli"
|
||||
run = runReader defaultLanguageModes
|
@ -14,8 +14,8 @@ spec :: Spec
|
||||
spec = do
|
||||
describe "readFile" $ do
|
||||
it "returns a blob for extant files" $ do
|
||||
Just blob <- readBlobFromFile (File (Path.absRel "semantic.cabal") lowerBound Unknown)
|
||||
blobFilePath blob `shouldBe` "semantic.cabal"
|
||||
Just blob <- readBlobFromFile (File (Path.absRel "semantic/semantic.cabal") lowerBound Unknown)
|
||||
blobFilePath blob `shouldBe` "semantic/semantic.cabal"
|
||||
|
||||
it "throws for absent files" $ do
|
||||
readBlobFromFile (File (Path.absRel "/dev/doesnotexist") lowerBound Unknown) `shouldThrow` anyIOException
|
||||
@ -24,57 +24,57 @@ spec = do
|
||||
let a = Blob.fromSource (Path.relFile "method.rb") Ruby "def foo; end"
|
||||
let b = Blob.fromSource (Path.relFile "method.rb") Ruby "def bar(x); end"
|
||||
it "returns blobs for valid JSON encoded diff input" $ do
|
||||
blobs <- blobsFromFilePath "test/fixtures/cli/diff.json"
|
||||
blobs <- blobsFromFilePath "semantic/test/fixtures/cli/diff.json"
|
||||
blobs `shouldBe` [Compare a b]
|
||||
|
||||
it "returns blobs when there's no before" $ do
|
||||
blobs <- blobsFromFilePath "test/fixtures/cli/diff-no-before.json"
|
||||
blobs <- blobsFromFilePath "semantic/test/fixtures/cli/diff-no-before.json"
|
||||
blobs `shouldBe` [Insert b]
|
||||
|
||||
it "returns blobs when there's null before" $ do
|
||||
blobs <- blobsFromFilePath "test/fixtures/cli/diff-null-before.json"
|
||||
blobs <- blobsFromFilePath "semantic/test/fixtures/cli/diff-null-before.json"
|
||||
blobs `shouldBe` [Insert b]
|
||||
|
||||
it "returns blobs when there's no after" $ do
|
||||
blobs <- blobsFromFilePath "test/fixtures/cli/diff-no-after.json"
|
||||
blobs <- blobsFromFilePath "semantic/test/fixtures/cli/diff-no-after.json"
|
||||
blobs `shouldBe` [Delete a]
|
||||
|
||||
it "returns blobs when there's null after" $ do
|
||||
blobs <- blobsFromFilePath "test/fixtures/cli/diff-null-after.json"
|
||||
blobs <- blobsFromFilePath "semantic/test/fixtures/cli/diff-null-after.json"
|
||||
blobs `shouldBe` [Delete a]
|
||||
|
||||
|
||||
it "returns blobs for unsupported language" $ do
|
||||
h <- openFileForReading "test/fixtures/cli/diff-unsupported-language.json"
|
||||
h <- openFileForReading "semantic/test/fixtures/cli/diff-unsupported-language.json"
|
||||
blobs <- readBlobPairsFromHandle h
|
||||
let b' = Blob.fromSource (Path.relFile "test.kt") Unknown "fun main(args: Array<String>) {\nprintln(\"hi\")\n}\n"
|
||||
blobs `shouldBe` [Insert b']
|
||||
|
||||
it "detects language based on filepath for empty language" $ do
|
||||
blobs <- blobsFromFilePath "test/fixtures/cli/diff-empty-language.json"
|
||||
blobs <- blobsFromFilePath "semantic/test/fixtures/cli/diff-empty-language.json"
|
||||
blobs `shouldBe` [Compare a b]
|
||||
|
||||
it "throws on blank input" $ do
|
||||
h <- openFileForReading "test/fixtures/cli/blank.json"
|
||||
h <- openFileForReading "semantic/test/fixtures/cli/blank.json"
|
||||
readBlobPairsFromHandle h `shouldThrow` jsonException
|
||||
|
||||
it "throws if language field not given" $ do
|
||||
h <- openFileForReading "test/fixtures/cli/diff-no-language.json"
|
||||
h <- openFileForReading "semantic/test/fixtures/cli/diff-no-language.json"
|
||||
readBlobsFromHandle h `shouldThrow` jsonException
|
||||
|
||||
it "throws if null on before and after" $ do
|
||||
h <- openFileForReading "test/fixtures/cli/diff-null-both-sides.json"
|
||||
h <- openFileForReading "semantic/test/fixtures/cli/diff-null-both-sides.json"
|
||||
readBlobPairsFromHandle h `shouldThrow` jsonException
|
||||
|
||||
describe "readBlobsFromHandle" $ do
|
||||
it "returns blobs for valid JSON encoded parse input" $ do
|
||||
h <- openFileForReading "test/fixtures/cli/parse.json"
|
||||
h <- openFileForReading "semantic/test/fixtures/cli/parse.json"
|
||||
blobs <- readBlobsFromHandle h
|
||||
let a = Blob.fromSource (Path.relFile "method.rb") Ruby "def foo; end"
|
||||
blobs `shouldBe` [a]
|
||||
|
||||
it "throws on blank input" $ do
|
||||
h <- openFileForReading "test/fixtures/cli/blank.json"
|
||||
h <- openFileForReading "semantic/test/fixtures/cli/blank.json"
|
||||
readBlobsFromHandle h `shouldThrow` jsonException
|
||||
|
||||
where blobsFromFilePath path = do
|
@ -5,11 +5,8 @@ module Main (allTests, legacySpecs, main, tests) where
|
||||
import qualified Data.Functor.Classes.Generic.Spec
|
||||
import qualified Data.Graph.Spec
|
||||
import qualified Data.Language.Spec
|
||||
import qualified Data.Scientific.Spec
|
||||
import qualified Data.Semigroup.App.Spec
|
||||
import qualified Integration.Spec
|
||||
import qualified Numeric.Spec
|
||||
import qualified Parsing.Spec
|
||||
import qualified Tags.Spec
|
||||
import qualified Semantic.Spec
|
||||
import qualified Semantic.CLI.Spec
|
||||
@ -24,10 +21,8 @@ import Test.Tasty.Hspec as Tasty
|
||||
tests :: (?session :: TaskSession) => [TestTree]
|
||||
tests =
|
||||
[ Data.Language.Spec.testTree
|
||||
, Data.Scientific.Spec.testTree
|
||||
, Data.Semigroup.App.Spec.testTree
|
||||
, Integration.Spec.testTree
|
||||
, Numeric.Spec.testTree
|
||||
, Semantic.CLI.Spec.testTree
|
||||
, Semantic.Stat.Spec.testTree
|
||||
]
|
||||
@ -53,7 +48,6 @@ legacySpecs = parallel $ do
|
||||
describe "Tags.Spec" Tags.Spec.spec
|
||||
describe "Semantic" Semantic.Spec.spec
|
||||
describe "Semantic.IO" Semantic.IO.Spec.spec
|
||||
describe "Parsing" Parsing.Spec.spec
|
||||
|
||||
|
||||
main :: IO ()
|
@ -40,7 +40,6 @@ import Data.Monoid as X (First (..), Last (..), Monoid (..))
|
||||
import Data.Proxy as X
|
||||
import Data.Semigroup as X (Semigroup (..))
|
||||
import Data.Semilattice.Lower as X
|
||||
import Data.Term as X
|
||||
import Data.Traversable as X (for)
|
||||
import Debug.Trace as X (traceM, traceShowM)
|
||||
import Parsing.Parser as X
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user