1
1
mirror of https://github.com/github/semantic.git synced 2024-12-21 13:51:44 +03:00

Merge branch 'master' into jonabc/licensing

This commit is contained in:
Timothy Clem 2017-07-11 13:50:46 -07:00 committed by GitHub
commit ed080707b2
103 changed files with 888 additions and 801 deletions

@ -1 +1 @@
Subproject commit d9c324ffce1d252da76ee8b585e20262cc3a3a57
Subproject commit debe919f846e2f28dd4700e0cf39889e5fd5994a

@ -1 +1 @@
Subproject commit 148edad8bf31c70ede718cff0ba1f5cb3abc401e
Subproject commit bbb3639e95e07c7b19268f54c4307fc4142288a6

@ -1 +1 @@
Subproject commit 7c0c68390ae6c7a456499cc1087773cd4d02185c
Subproject commit b54bb16d4bee3dcabf728a3dd7d3015c9f970782

@ -1 +1 @@
Subproject commit 8a6082ee4acc58abb9760a3367318977bde84e0a
Subproject commit af8b9373c7b260e84e89c38be63e01bd26878184

View File

@ -20,13 +20,17 @@ library
, Command
, Command.Files
, Data.Align.Generic
, Data.Blob
, Data.Functor.Both
, Data.Functor.Classes.Eq.Generic
, Data.Functor.Classes.Show.Generic
, Data.Functor.Listable
, Data.Mergeable
, Data.Mergeable.Generic
, Data.Range
, Data.Record
, Data.Source
, Data.Span
, Data.Syntax
, Data.Syntax.Algebra
, Data.Syntax.Assignment
@ -59,7 +63,6 @@ library
, Patch
, Paths_semantic_diff
, Prologue
, Range
, Renderer
, Renderer.JSON
, Renderer.Patch
@ -71,8 +74,6 @@ library
, SemanticCmdLine
, SES
, SES.Myers
, Source
, SourceSpan
, SplitDiff
, Syntax
, Term
@ -147,7 +148,6 @@ test-suite test
, SemanticCmdLineSpec
, InterpreterSpec
, PatchOutputSpec
, RangeSpec
, SES.Myers.Spec
, SourceSpec
, SpecHelpers

View File

@ -14,13 +14,13 @@ import Data.Bifunctor.Join
import Data.Functor.Both
import Data.List (partition)
import Data.Maybe (fromJust)
import Data.Range
import Data.Source
import Data.Record
import Data.These
import Diff
import Info
import Patch
import Range
import Source hiding (break, drop, take)
import SplitDiff
import Term
@ -59,7 +59,7 @@ alignSyntax :: (Applicative f, HasField fields Range, Foldable g) => (forall a.
alignSyntax toJoinThese toNode getRange sources (infos :< syntax) =
catMaybes $ wrapInBranch <$> alignBranch getRange (join (toList syntax)) bothRanges
where bothRanges = modifyJoin (fromThese [] []) lineRanges
lineRanges = toJoinThese $ actualLineRangesWithin . byteRange <$> infos <*> sources
lineRanges = toJoinThese $ sourceLineRangesWithin . byteRange <$> infos <*> sources
wrapInBranch = applyThese $ toJoinThese (makeNode <$> infos)
makeNode info (range, children) = toNode (setByteRange info range :< children)

View File

@ -10,38 +10,39 @@ import Control.Exception (catch, IOException)
import Data.Aeson
import Data.These
import Data.Functor.Both
import qualified Data.Blob as Blob
import Data.Source
import Data.String
import Language
import Prologue hiding (readFile)
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import Prelude (fail)
import Source hiding (path)
import System.FilePath
-- | Read a file to a SourceBlob, transcoding to UTF-8 along the way.
readFile :: FilePath -> Maybe Language -> IO SourceBlob
-- | Read a file to a Blob, transcoding to UTF-8 along the way.
readFile :: FilePath -> Maybe Language -> IO Blob.Blob
readFile path language = do
raw <- (Just <$> B.readFile path) `catch` (const (pure Nothing) :: IOException -> IO (Maybe ByteString))
pure $ fromMaybe (emptySourceBlob path) (sourceBlob path language . Source <$> raw)
pure $ fromMaybe (Blob.emptyBlob path) (Blob.sourceBlob path language . fromBytes <$> raw)
-- | Return a language based on a FilePath's extension, or Nothing if extension is not found or not supported.
languageForFilePath :: FilePath -> Maybe Language
languageForFilePath = languageForType . toS . takeExtension
-- | Read JSON encoded blob pairs from a handle.
readBlobPairsFromHandle :: Handle -> IO [Both SourceBlob]
readBlobPairsFromHandle = fmap toSourceBlobPairs . readFromHandle
readBlobPairsFromHandle :: Handle -> IO [Both Blob.Blob]
readBlobPairsFromHandle = fmap toBlobPairs . readFromHandle
where
toSourceBlobPairs BlobDiff{..} = toSourceBlobPair <$> blobs
toSourceBlobPair blobs = Join (fromThese empty empty (runJoin (toSourceBlob <$> blobs)))
where empty = emptySourceBlob (mergeThese const (runJoin (path <$> blobs)))
toBlobPairs BlobDiff{..} = toBlobPair <$> blobs
toBlobPair blobs = Join (fromThese empty empty (runJoin (toBlob <$> blobs)))
where empty = Blob.emptyBlob (mergeThese const (runJoin (path <$> blobs)))
-- | Read JSON encoded blobs from a handle.
readBlobsFromHandle :: Handle -> IO [SourceBlob]
readBlobsFromHandle = fmap toSourceBlobs . readFromHandle
where toSourceBlobs BlobParse{..} = fmap toSourceBlob blobs
readBlobsFromHandle :: Handle -> IO [Blob.Blob]
readBlobsFromHandle = fmap toBlobs . readFromHandle
where toBlobs BlobParse{..} = fmap toBlob blobs
readFromHandle :: FromJSON a => Handle -> IO a
readFromHandle h = do
@ -50,8 +51,8 @@ readFromHandle h = do
Just d -> pure d
Nothing -> die ("invalid input on " <> show h <> ", expecting JSON")
toSourceBlob :: Blob -> SourceBlob
toSourceBlob Blob{..} = sourceBlob path language' (Source (encodeUtf8 content))
toBlob :: Blob -> Blob.Blob
toBlob Blob{..} = Blob.sourceBlob path language' (fromText content)
where language' = case language of
"" -> languageForFilePath path
_ -> readMaybe language
@ -66,10 +67,11 @@ newtype BlobParse = BlobParse { blobs :: [Blob] }
type BlobPair = Join These Blob
data Blob = Blob
{ path :: String
{ path :: FilePath
, content :: Text
, language :: String
} deriving (Show, Generic, FromJSON)
}
deriving (Show, Generic, FromJSON)
instance FromJSON BlobPair where
parseJSON = withObject "BlobPair" $ \o -> do

54
src/Data/Blob.hs Normal file
View File

@ -0,0 +1,54 @@
module Data.Blob
( Blob(..)
, BlobKind(..)
, modeToDigits
, defaultPlainBlob
, emptyBlob
, nullBlob
, blobExists
, sourceBlob
, nullOid
) where
import Data.Source as Source
import Language
import Numeric
import Prologue
-- | The source, oid, path, and Maybe BlobKind of a blob.
data Blob = Blob
{ blobSource :: Source -- ^ The UTF-8 encoded source text of the blob.
, blobOid :: ByteString -- ^ The Git object ID (SHA-1) of the blob.
, blobPath :: FilePath -- ^ The file path to the blob.
, blobKind :: Maybe BlobKind -- ^ The kind of blob, Nothing denotes a blob that doesn't exist (e.g. on one side of a diff for adding a new file or deleting a file).
, blobLanguage :: Maybe Language -- ^ The language of this blob. Nothing denotes a langauge we don't support yet.
}
deriving (Show, Eq)
-- | The kind and file mode of a 'Blob'.
data BlobKind = PlainBlob Word32 | ExecutableBlob Word32 | SymlinkBlob Word32
deriving (Show, Eq)
modeToDigits :: BlobKind -> ByteString
modeToDigits (PlainBlob mode) = toS $ showOct mode ""
modeToDigits (ExecutableBlob mode) = toS $ showOct mode ""
modeToDigits (SymlinkBlob mode) = toS $ showOct mode ""
-- | The default plain blob mode
defaultPlainBlob :: BlobKind
defaultPlainBlob = PlainBlob 0o100644
emptyBlob :: FilePath -> Blob
emptyBlob filepath = Blob mempty nullOid filepath Nothing Nothing
nullBlob :: Blob -> Bool
nullBlob Blob{..} = blobOid == nullOid || nullSource blobSource
blobExists :: Blob -> Bool
blobExists Blob{..} = isJust blobKind
sourceBlob :: FilePath -> Maybe Language -> Source -> Blob
sourceBlob filepath language source = Blob source nullOid filepath (Just defaultPlainBlob) language
nullOid :: ByteString
nullOid = "0000000000000000000000000000000000000000"

39
src/Data/Range.hs Normal file
View File

@ -0,0 +1,39 @@
{-# LANGUAGE DeriveAnyClass #-}
module Data.Range
( Range(..)
, rangeLength
, offsetRange
, intersectsRange
) where
import Data.Semigroup
import Prologue
import Test.LeanCheck
-- | A half-open interval of integers, defined by start & end indices.
data Range = Range { start :: {-# UNPACK #-} !Int, end :: {-# UNPACK #-} !Int }
deriving (Eq, Show, Generic, NFData)
-- | Return the length of the range.
rangeLength :: Range -> Int
rangeLength range = end range - start range
-- | Offset a range by a constant delta.
offsetRange :: Range -> Int -> Range
offsetRange a b = Range (start a + b) (end a + b)
-- | Test two ranges for intersection.
intersectsRange :: Range -> Range -> Bool
intersectsRange range1 range2 = start range1 < end range2 && start range2 < end range1
-- Instances
instance Semigroup Range where
Range start1 end1 <> Range start2 end2 = Range (min start1 start2) (max end1 end2)
instance Ord Range where
a <= b = start a <= start b
instance Listable Range where
tiers = cons2 Range

159
src/Data/Source.hs Normal file
View File

@ -0,0 +1,159 @@
{-# LANGUAGE GeneralizedNewtypeDeriving, MultiParamTypeClasses #-}
module Data.Source
( Source
, sourceBytes
, fromBytes
-- Measurement
, sourceLength
, nullSource
, totalRange
, totalSpan
-- En/decoding
, fromText
, toText
-- Slicing
, slice
, dropSource
-- Splitting
, sourceLines
, sourceLineRanges
, sourceLineRangesWithin
-- Conversion
, spanToRange
, spanToRangeInLineRanges
, sourceLineRangesByLineNumber
, rangeToSpan
-- Listable
, ListableByteString(..)
) where
import Data.Array
import qualified Data.ByteString as B
import Data.Char (ord)
import Data.List (span)
import Data.Range
import Data.Span
import Data.String (IsString(..))
import qualified Data.Text as T
import Prologue
import Test.LeanCheck
-- | The contents of a source file, represented as a 'ByteString'.
newtype Source = Source { sourceBytes :: B.ByteString }
deriving (Eq, IsString, Show)
fromBytes :: ByteString -> Source
fromBytes = Source
-- Measurement
sourceLength :: Source -> Int
sourceLength = B.length . sourceBytes
nullSource :: Source -> Bool
nullSource = B.null . sourceBytes
-- | Return a 'Range' that covers the entire text.
totalRange :: Source -> Range
totalRange = Range 0 . B.length . sourceBytes
-- | Return a 'Span' that covers the entire text.
totalSpan :: Source -> Span
totalSpan source = Span (Pos 1 1) (Pos (length ranges) (succ (end lastRange - start lastRange)))
where ranges = sourceLineRanges source
Just lastRange = getLast (foldMap (Last . Just) ranges)
-- En/decoding
-- | Return a 'Source' from a 'ByteString'.
fromText :: T.Text -> Source
fromText = Source . encodeUtf8
-- | Return the ByteString contained in the 'Source'.
toText :: Source -> Text
toText = decodeUtf8 . sourceBytes
-- | Return a 'Source' that contains a slice of the given 'Source'.
slice :: Range -> Source -> Source
slice range = take . drop
where drop = dropSource (start range)
take = takeSource (rangeLength range)
dropSource :: Int -> Source -> Source
dropSource i = Source . drop . sourceBytes
where drop = B.drop i
takeSource :: Int -> Source -> Source
takeSource i = Source . take . sourceBytes
where take = B.take i
-- Splitting
-- | Split the contents of the source after newlines.
sourceLines :: Source -> [Source]
sourceLines source = (`slice` source) <$> sourceLineRanges source
-- | Compute the 'Range's of each line in a 'Source'.
sourceLineRanges :: Source -> [Range]
sourceLineRanges source = sourceLineRangesWithin (totalRange source) source
-- | Compute the 'Range's of each line in a 'Range' of a 'Source'.
sourceLineRangesWithin :: Range -> Source -> [Range]
sourceLineRangesWithin range = uncurry (zipWith Range) . ((start range:) &&& (<> [ end range ])) . fmap (+ succ (start range)) . B.elemIndices (toEnum (ord '\n')) . sourceBytes . slice range
-- Conversion
-- | Compute the byte 'Range' corresponding to a given 'Span' in a 'Source'.
spanToRange :: Source -> Span -> Range
spanToRange = spanToRangeInLineRanges . sourceLineRangesByLineNumber
spanToRangeInLineRanges :: Array Int Range -> Span -> Range
spanToRangeInLineRanges lineRanges Span{..} = Range
(start (lineRanges ! posLine spanStart) + pred (posColumn spanStart))
(start (lineRanges ! posLine spanEnd) + pred (posColumn spanEnd))
sourceLineRangesByLineNumber :: Source -> Array Int Range
sourceLineRangesByLineNumber source = listArray (1, length lineRanges) lineRanges
where lineRanges = sourceLineRanges source
-- | Compute the 'Span' corresponding to a given byte 'Range' in a 'Source'.
rangeToSpan :: Source -> Range -> Span
rangeToSpan source (Range rangeStart rangeEnd) = Span startPos endPos
where startPos = Pos (firstLine + 1) (rangeStart - start firstRange + 1)
endPos = Pos (firstLine + length lineRanges) (rangeEnd - start lastRange + 1)
firstLine = length before
(before, rest) = span ((< rangeStart) . end) (sourceLineRanges source)
(lineRanges, _) = span ((<= rangeEnd) . start) rest
Just firstRange = getFirst (foldMap (First . Just) lineRanges)
Just lastRange = getLast (foldMap (Last . Just) lineRanges)
-- Instances
instance Semigroup Source where
Source a <> Source b = Source (a <> b)
instance Monoid Source where
mempty = Source B.empty
mappend = (<>)
instance Listable Source where
tiers = (Source . unListableByteString) `mapT` tiers
newtype ListableByteString = ListableByteString { unListableByteString :: B.ByteString }
instance Listable ListableByteString where
tiers = (ListableByteString . encodeUtf8 . T.pack) `mapT` strings
where strings = foldr ((\\//) . listsOf . toTiers) []
[ ['a'..'z'] <> ['A'..'Z'] <> ['0'..'9']
, [' '..'/'] <> [':'..'@'] <> ['['..'`'] <> ['{'..'~']
, [chr 0x00..chr 0x1f] <> [chr 127] -- Control characters.
, [chr 0xa0..chr 0x24f] ] -- Non-ASCII.
instance StringConv Source ByteString where
strConv _ = sourceBytes

62
src/Data/Span.hs Normal file
View File

@ -0,0 +1,62 @@
{-# LANGUAGE DeriveAnyClass #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
-- | Source position and span information
--
-- Mostly taken from purescript's SourcePos definition.
module Data.Span
( Span(..)
, Pos(..)
, emptySpan
) where
import Data.Aeson ((.=), (.:))
import qualified Data.Aeson as A
import Data.Semigroup
import Prologue
import Test.LeanCheck
-- | Source position information
data Pos = Pos
{ posLine :: !Int
, posColumn :: !Int
}
deriving (Show, Read, Eq, Ord, Generic, Hashable, NFData)
instance A.ToJSON Pos where
toJSON Pos{..} =
A.toJSON [posLine, posColumn]
instance A.FromJSON Pos where
parseJSON arr = do
[line, col] <- A.parseJSON arr
pure $ Pos line col
data Span = Span
{ spanStart :: Pos
, spanEnd :: Pos
}
deriving (Show, Read, Eq, Ord, Generic, Hashable, NFData)
emptySpan :: Span
emptySpan = Span (Pos 1 1) (Pos 1 1)
instance Semigroup Span where
Span start1 end1 <> Span start2 end2 = Span (min start1 start2) (max end1 end2)
instance A.ToJSON Span where
toJSON Span{..} =
A.object [ "start" .= spanStart
, "end" .= spanEnd
]
instance A.FromJSON Span where
parseJSON = A.withObject "Span" $ \o ->
Span <$>
o .: "start" <*>
o .: "end"
instance Listable Pos where
tiers = cons2 Pos
instance Listable Span where
tiers = cons2 Span

View File

@ -1,13 +1,13 @@
{-# LANGUAGE DataKinds, GADTs, InstanceSigs, MultiParamTypeClasses, RankNTypes, ScopedTypeVariables, StandaloneDeriving, TypeFamilies, TypeOperators #-}
-- | Assignment of AST onto some other structure (typically terms).
--
-- Parsing yields an AST represented as a Rose tree labelled with symbols in the languages grammar and source locations (byte Range and SourceSpan). An Assignment represents a (partial) map from AST nodes onto some other structure; in essence, its a parser that operates over trees. (For our purposes, this structure is typically Terms annotated with source locations.) Assignments are able to match based on symbol, sequence, and hierarchy; thus, in @x = y@, both @x@ and @y@ might have the same symbol, @Identifier@, the left can be assigned to a variable declaration, while the right can be assigned to a variable reference.
-- Parsing yields an AST represented as a Rose tree labelled with symbols in the languages grammar and source locations (byte Range and Span). An Assignment represents a (partial) map from AST nodes onto some other structure; in essence, its a parser that operates over trees. (For our purposes, this structure is typically Terms annotated with source locations.) Assignments are able to match based on symbol, sequence, and hierarchy; thus, in @x = y@, both @x@ and @y@ might have the same symbol, @Identifier@, the left can be assigned to a variable declaration, while the right can be assigned to a variable reference.
--
-- Assignments can be any of the following primitive rules:
--
-- 1. 'symbol' rules match a node against a specific symbol in the source languages grammar; they succeed iff a) there is a current node, and b) its symbol is equal to the argument symbol. Matching a 'symbol' rule does not advance past the current node, meaning that you can match a node against a symbol and also e.g. match against the nodes 'children'. This also means that some care must be taken, as repeating a symbol with 'many' or 'some' (see below) will never advance past the current node and could therefore loop forever.
--
-- 2. 'location' rules always succeed, and produce the current nodes Location (byte Range and SourceSpan). If there is no current node (i.e. if matching has advanced past the root node or past the last child node when operating within a 'children' rule), the location is instead the end of the most recently matched node, specified as a zero-width Range and SourceSpan. 'location' rules do not advance past the current node, meaning that you can both match a nodes 'location' and other properties.
-- 2. 'location' rules always succeed, and produce the current nodes Location (byte Range and Span). If there is no current node (i.e. if matching has advanced past the root node or past the last child node when operating within a 'children' rule), the location is instead the end of the most recently matched node, specified as a zero-width Range and Span. 'location' rules do not advance past the current node, meaning that you can both match a nodes 'location' and other properties.
--
-- 3. 'source' rules succeed whenever there is a current node (i.e. matching has not advanced past the root node or the last child node when operating within a 'children' rule), and produce its source as a ByteString. 'source' is intended to match leaf nodes such as e.g. comments. 'source' rules advance past the current node.
--
@ -94,11 +94,10 @@ import qualified Data.IntMap.Lazy as IntMap
import Data.Ix (inRange)
import Data.List.NonEmpty (nonEmpty)
import Data.Record
import qualified Data.Source as Source (Source, fromBytes, slice, sourceBytes, sourceLines)
import GHC.Stack
import qualified Info
import Prologue hiding (Alt, get, Location, state)
import Range (offsetRange)
import qualified Source (Source(..), drop, slice, sourceText, actualLines)
import System.Console.ANSI
import Text.Parser.TreeSitter.Language
import Text.Show hiding (show)
@ -115,26 +114,26 @@ data AssignmentF ast grammar a where
Source :: HasCallStack => AssignmentF ast grammar ByteString
Children :: HasCallStack => Assignment ast grammar a -> AssignmentF ast grammar a
Choose :: HasCallStack => IntMap.IntMap a -> AssignmentF ast grammar a
Many :: HasCallStack => Assignment ast grammar a -> AssignmentF ast grammar [a]
Alt :: HasCallStack => a -> a -> AssignmentF ast grammar a
Empty :: HasCallStack => AssignmentF ast grammar a
Throw :: HasCallStack => Error grammar -> AssignmentF ast grammar a
Catch :: HasCallStack => a -> (Error grammar -> a) -> AssignmentF ast grammar a
-- | Zero-width production of the current location.
--
-- If assigning at the end of input or at the end of a list of children, the loccation will be returned as an empty Range and SourceSpan at the current offset. Otherwise, it will be the Range and SourceSpan of the current node.
-- If assigning at the end of input or at the end of a list of children, the loccation will be returned as an empty Range and Span at the current offset. Otherwise, it will be the Range and Span of the current node.
location :: HasCallStack => Assignment ast grammar (Record Location)
location = Location `Then` return
-- | Zero-width projection of the current node.
--
-- Since this is zero-width, care must be taken not to repeat it without chaining on other rules. I.e. 'many (project f *> b)' is fine, but 'many (project f)' is not.
-- Since this is zero-width, care must be taken not to repeat it without chaining on other rules. I.e. @many (project f *> b)@ is fine, but @many (project f)@ is not.
project :: HasCallStack => (forall x. Base ast x -> a) -> Assignment ast grammar a
project projection = Project projection `Then` return
-- | Zero-width match of a node with the given symbol, producing the current nodes location.
--
-- Since this is zero-width, care must be taken not to repeat it without chaining on other rules. I.e. 'many (symbol A *> b)' is fine, but 'many (symbol A)' is not.
-- Since this is zero-width, care must be taken not to repeat it without chaining on other rules. I.e. @many (symbol A *> b)@ is fine, but @many (symbol A)@ is not.
symbol :: (Enum grammar, Eq grammar, HasCallStack) => grammar -> Assignment ast grammar (Record Location)
symbol s = withFrozenCallStack $ Choose (IntMap.singleton (fromEnum s) ()) `Then` (const location)
@ -155,7 +154,7 @@ while predicate step = many $ do
-- | A location specified as possibly-empty intervals of bytes and line/column positions.
type Location = '[Info.Range, Info.SourceSpan]
type Location = '[Info.Range, Info.Span]
-- | An AST node labelled with symbols and source location.
type AST grammar = Cofree [] (Record (Maybe grammar ': Location))
@ -167,7 +166,7 @@ data Result grammar a = Result { resultError :: Maybe (Error grammar), resultVal
data Error grammar where
Error
:: HasCallStack
=> { errorPos :: Info.SourcePos
=> { errorPos :: Info.Pos
, errorCause :: ErrorCause grammar
} -> Error grammar
@ -182,15 +181,13 @@ data ErrorCause grammar
-- | Pretty-print an Error with reference to the source where it occurred.
printError :: Show grammar => Source.Source -> Error grammar -> IO ()
printError source error@Error{..}
= do
withSGRCode [SetConsoleIntensity BoldIntensity] . putStrErr . (showSourcePos Nothing errorPos) . showString ": " $ ""
withSGRCode [SetColor Foreground Vivid Red] . putStrErr . (showString "error") . showString ": " . showExpectation error . showChar '\n' . showString (toS context) . (if isSuffixOf "\n" context then identity else showChar '\n') . showString (replicate (succ (Info.column errorPos + lineNumberDigits)) ' ') $ ""
withSGRCode [SetColor Foreground Vivid Green] . putStrErr . (showChar '^') . showChar '\n' . showString (prettyCallStack callStack) $ ""
where context = maybe "\n" (Source.sourceText . sconcat) (nonEmpty [ Source.Source (toS (showLineNumber i)) <> Source.Source ": " <> l | (i, l) <- zip [1..] (Source.actualLines source), inRange (Info.line errorPos - 2, Info.line errorPos) i ])
printError source error@Error{..} = do
withSGRCode [SetConsoleIntensity BoldIntensity] . putStrErr . showPos Nothing errorPos . showString ": " $ ""
withSGRCode [SetColor Foreground Vivid Red] . putStrErr . showString "error" . showString ": " . showExpectation error . showChar '\n' . showString (toS context) . (if isSuffixOf "\n" context then identity else showChar '\n') . showString (replicate (succ (Info.posColumn errorPos + lineNumberDigits)) ' ') $ ""
withSGRCode [SetColor Foreground Vivid Green] . putStrErr . showChar '^' . showChar '\n' . showString (prettyCallStack callStack) $ ""
where context = maybe "\n" (Source.sourceBytes . sconcat) (nonEmpty [ Source.fromBytes (toS (showLineNumber i)) <> Source.fromBytes ": " <> l | (i, l) <- zip [1..] (Source.sourceLines source), inRange (Info.posLine errorPos - 2, Info.posLine errorPos) i ])
showLineNumber n = let s = show n in replicate (lineNumberDigits - length s) ' ' <> s
lineNumberDigits = succ (floor (logBase 10 (fromIntegral (Info.line errorPos) :: Double)))
lineNumberDigits = succ (floor (logBase 10 (fromIntegral (Info.posLine errorPos) :: Double)))
putStrErr = hPutStr stderr
withSGRCode :: [SGR] -> IO a -> IO ()
@ -218,11 +215,11 @@ showSymbols [a, b] = shows a . showString " or " . shows b
showSymbols [a, b, c] = shows a . showString ", " . shows b . showString ", or " . shows c
showSymbols (h:t) = shows h . showString ", " . showSymbols t
showSourcePos :: Maybe FilePath -> Info.SourcePos -> ShowS
showSourcePos path Info.SourcePos{..} = maybe (showParen True (showString "interactive")) showString path . showChar ':' . shows line . showChar ':' . shows column
showPos :: Maybe FilePath -> Info.Pos -> ShowS
showPos path Info.Pos{..} = maybe (showParen True (showString "interactive")) showString path . showChar ':' . shows posLine . showChar ':' . shows posColumn
-- | Run an assignment over an AST exhaustively.
assign :: (HasField fields Info.Range, HasField fields Info.SourceSpan, HasField fields (Maybe grammar), Symbol grammar, Enum grammar, Eq grammar, Traversable f, HasCallStack) => Assignment (Cofree f (Record fields)) grammar a -> Source.Source -> Cofree f (Record fields) -> Result grammar a
assign :: (HasField fields Info.Range, HasField fields Info.Span, HasField fields (Maybe grammar), Symbol grammar, Enum grammar, Eq grammar, Traversable f, HasCallStack) => Assignment (Cofree f (Record fields)) grammar a -> Source.Source -> Cofree f (Record fields) -> Result grammar a
assign = assignBy (\ (r :< _) -> getField r :. getField r :. getField r :. Nil)
assignBy :: (Symbol grammar, Enum grammar, Eq grammar, Recursive ast, Foldable (Base ast), HasCallStack) => (forall x. Base ast x -> Record (Maybe grammar ': Location)) -> Assignment ast grammar a -> Source.Source -> ast -> Result grammar a
@ -241,21 +238,23 @@ runAssignment toRecord = iterFreer run . fmap ((pure .) . (,))
where run :: AssignmentF ast grammar x -> (x -> AssignmentState ast -> Result grammar (a, AssignmentState ast)) -> AssignmentState ast -> Result grammar (a, AssignmentState ast)
run assignment yield initialState = case (assignment, stateNodes) of
(Location, node : _) -> yield (rtail (toRecord (F.project node))) state
(Location, []) -> yield (Info.Range stateOffset stateOffset :. Info.SourceSpan statePos statePos :. Nil) state
(Location, []) -> yield (Info.Range stateOffset stateOffset :. Info.Span statePos statePos :. Nil) state
(Project projection, node : _) -> yield (projection (F.project node)) state
(Source, node : _) -> yield (Source.sourceText (Source.slice (offsetRange (Info.byteRange (toRecord (F.project node))) (negate stateOffset)) stateSource)) (advanceState (rtail . toRecord) state)
(Source, node : _) -> yield (Source.sourceBytes (Source.slice (Info.byteRange (toRecord (F.project node))) stateSource)) (advanceState (rtail . toRecord) state)
(Children childAssignment, node : _) -> case assignAllFrom toRecord childAssignment state { stateNodes = toList (F.project node) } of
Result _ (Just (a, state')) -> yield a (advanceState (rtail . toRecord) state' { stateNodes = stateNodes })
Result err Nothing -> Result err Nothing
(Choose choices, node : _) | Just symbol :. _ <- toRecord (F.project node), Just a <- IntMap.lookup (fromEnum symbol) choices -> yield a state
-- Nullability: some rules, e.g. 'pure a' and 'many a', should match at the end of input. Either side of an alternation may be nullable, ergo Alt can match at the end of input.
(Many _, []) -> yield [] state
(Many rule, _) -> uncurry yield (runMany rule state)
-- Nullability: some rules, e.g. @pure a@ and @many a@, should match at the end of input. Either side of an alternation may be nullable, ergo Alt can match at the end of input.
(Alt a b, _) -> yield a state <|> yield b state
(Throw e, _) -> Result (Just e) Nothing
(Catch during handler, _) -> case yield during state of
Result _ (Just (a, state')) -> pure (a, state')
Result err Nothing -> maybe empty (flip yield state . handler) err
(_, []) -> Result (Just (Error statePos (UnexpectedEndOfInput expectedSymbols))) Nothing
(_, node:_) -> let Info.SourceSpan startPos _ = Info.sourceSpan (toRecord (F.project node)) in Result (Error startPos . UnexpectedSymbol expectedSymbols <$> rhead (toRecord (F.project node)) <|> Just (Error startPos (ParseError expectedSymbols))) Nothing
(_, node:_) -> let Info.Span startPos _ = Info.sourceSpan (toRecord (F.project node)) in Result (Error startPos . UnexpectedSymbol expectedSymbols <$> rhead (toRecord (F.project node)) <|> Just (Error startPos (ParseError expectedSymbols))) Nothing
where state@AssignmentState{..} = case assignment of
Choose choices | all ((== Regular) . symbolType) (choiceSymbols choices) -> dropAnonymous (rhead . toRecord) initialState
_ -> initialState
@ -263,6 +262,11 @@ runAssignment toRecord = iterFreer run . fmap ((pure .) . (,))
Choose choices -> choiceSymbols choices
_ -> []
choiceSymbols choices = (toEnum :: Int -> grammar) <$> IntMap.keys choices
runMany :: Assignment ast grammar v -> AssignmentState ast -> ([v], AssignmentState ast)
runMany rule state = case runAssignment toRecord rule state of
Result _ (Just (a, state')) -> let (as, state'') = runMany rule state' in as `seq` (a : as, state'')
_ -> ([], state)
{-# INLINE run #-}
dropAnonymous :: (Symbol grammar, Recursive ast) => (forall x. Base ast x -> Maybe grammar) -> AssignmentState ast -> AssignmentState ast
dropAnonymous toSymbol state = state { stateNodes = dropWhile ((`notElem` [Just Regular, Nothing]) . fmap symbolType . toSymbol . F.project) (stateNodes state) }
@ -271,43 +275,47 @@ dropAnonymous toSymbol state = state { stateNodes = dropWhile ((`notElem` [Just
advanceState :: Recursive ast => (forall x. Base ast x -> Record Location) -> AssignmentState ast -> AssignmentState ast
advanceState toLocation state@AssignmentState{..}
| node : rest <- stateNodes
, range :. span :. Nil <- toLocation (F.project node) = AssignmentState (Info.end range) (Info.spanEnd span) (Source.drop (Info.end range - stateOffset) stateSource) rest
, range :. span :. Nil <- toLocation (F.project node) = AssignmentState (Info.end range) (Info.spanEnd span) stateSource rest
| otherwise = state
-- | State kept while running 'Assignment's.
data AssignmentState ast = AssignmentState
{ stateOffset :: Int -- ^ The offset into the Source thus far reached, measured in bytes.
, statePos :: Info.SourcePos -- ^ The (1-indexed) line/column position in the Source thus far reached.
, statePos :: Info.Pos -- ^ The (1-indexed) line/column position in the Source thus far reached.
, stateSource :: Source.Source -- ^ The remaining Source. Equal to dropping 'stateOffset' bytes off the original input Source.
, stateNodes :: [ast] -- ^ The remaining nodes to assign. Note that 'children' rules recur into subterms, and thus this does not necessarily reflect all of the terms remaining to be assigned in the overall algorithm, only those “in scope.”
}
deriving (Eq, Show)
makeState :: Source.Source -> [ast] -> AssignmentState ast
makeState source nodes = AssignmentState 0 (Info.SourcePos 1 1) source nodes
makeState = AssignmentState 0 (Info.Pos 1 1)
-- Instances
instance Enum grammar => Alternative (Assignment ast grammar) where
empty :: HasCallStack => Assignment ast grammar a
empty = Empty `Then` return
empty = Choose mempty `Then` return
(<|>) :: HasCallStack => Assignment ast grammar a -> Assignment ast grammar a -> Assignment ast grammar a
a <|> b = case (a, b) of
(_, Empty `Then` _) -> a
(Empty `Then` _, _) -> b
(Choose choices1 `Then` continue1, Choose choices2 `Then` continue2) -> Choose (IntMap.union (fmap continue1 choices1) (fmap continue2 choices2)) `Then` identity
_ -> wrap $ Alt a b
Return a <|> _ = Return a
a <|> b | Just c <- (liftA2 (<>) `on` choices) a b = Choose c `Then` identity
| otherwise = wrap $ Alt a b
where choices :: Assignment ast grammar a -> Maybe (IntMap (Assignment ast grammar a))
choices (Choose choices `Then` continue) = Just (continue <$> choices)
choices (Many rule `Then` continue) = fmap (const (Many rule `Then` continue)) <$> choices rule
choices _ = Nothing
many :: HasCallStack => Assignment ast grammar a -> Assignment ast grammar [a]
many a = Many a `Then` return
instance Show grammar => Show1 (AssignmentF ast grammar) where
liftShowsPrec sp sl d a = case a of
Location -> showString "Location" . sp d (Info.Range 0 0 :. Info.SourceSpan (Info.SourcePos 0 0) (Info.SourcePos 0 0) :. Nil)
Location -> showString "Location" . sp d (Info.Range 0 0 :. Info.Span (Info.Pos 1 1) (Info.Pos 1 1) :. Nil)
Project projection -> showsUnaryWith (const (const (showChar '_'))) "Project" d projection
Source -> showString "Source" . showChar ' ' . sp d ""
Children a -> showsUnaryWith (liftShowsPrec sp sl) "Children" d a
Choose choices -> showsUnaryWith (liftShowsPrec (liftShowsPrec sp sl) (liftShowList sp sl)) "Choose" d (IntMap.toList choices)
Many a -> showsUnaryWith (liftShowsPrec (\ d a -> sp d [a]) (sl . pure)) "Many" d a
Alt a b -> showsBinaryWith sp sp "Alt" d a b
Empty -> showString "Empty"
Throw e -> showsUnaryWith showsPrec "Throw" d e
Catch during handler -> showsBinaryWith sp (const (const (showChar '_'))) "Catch" d during handler

View File

@ -31,12 +31,19 @@ instance Eq1 Variable where liftEq = genericLiftEq
instance Show1 Variable where liftShowsPrec = genericLiftShowsPrec
data Class a = Class { classIdentifier :: !a, classSuperclasses :: ![a], classScope :: ![a] }
data Class a = Class { classIdentifier :: !a, classSuperclasses :: ![a], classBody :: ![a] }
deriving (Eq, Foldable, Functor, GAlign, Generic1, Show, Traversable)
instance Eq1 Class where liftEq = genericLiftEq
instance Show1 Class where liftShowsPrec = genericLiftShowsPrec
-- | A decorator in Python
data Decorator a = Decorator { decoratorIdentifier :: !a, decoratorParamaters :: ![a], decoratorBody :: !a }
deriving (Eq, Foldable, Functor, GAlign, Generic1, Show, Traversable)
instance Eq1 Decorator where liftEq = genericLiftEq
instance Show1 Decorator where liftShowsPrec = genericLiftShowsPrec
-- TODO: Generics, constraints.

View File

@ -4,7 +4,6 @@ module Data.Syntax.Literal where
import Data.Align.Generic
import Data.Functor.Classes.Eq.Generic
import Data.Functor.Classes.Show.Generic
import Data.Union
import GHC.Generics
import Prologue hiding (Set)
@ -53,7 +52,7 @@ instance Show1 Range where liftShowsPrec = genericLiftShowsPrec
-- Strings, symbols
newtype String a = String { stringElements :: [Union '[InterpolationElement, TextElement] a] }
newtype String a = String { stringElements :: [a] }
deriving (Eq, Foldable, Functor, GAlign, Generic1, Show, Traversable)
instance Eq1 String where liftEq = genericLiftEq

View File

@ -14,6 +14,14 @@ data If a = If { ifCondition :: !a, ifThenBody :: !a, ifElseBody :: !a }
instance Eq1 If where liftEq = genericLiftEq
instance Show1 If where liftShowsPrec = genericLiftShowsPrec
-- | Else statement. The else condition is any term, that upon successful completion, continues evaluation to the elseBody, e.g. `for ... else` in Python.
data Else a = Else { elseCondition :: !a, elseBody :: !a }
deriving (Eq, Foldable, Functor, GAlign, Generic1, Show, Traversable)
instance Eq1 Else where liftEq = genericLiftEq
instance Show1 Else where liftShowsPrec = genericLiftShowsPrec
-- TODO: Alternative definition would flatten if/else if/else chains: data If a = If ![(a, a)] !(Maybe a)
-- | A pattern-matching or computed jump control-flow statement, like 'switch' in C or JavaScript, or 'case' in Ruby or Haskell.
@ -30,6 +38,14 @@ newtype Pattern a = Pattern a
instance Eq1 Pattern where liftEq = genericLiftEq
instance Show1 Pattern where liftShowsPrec = genericLiftShowsPrec
-- | A let statement or local binding, like 'a as b' or 'let a = b'.
data Let a = Let { letVariable :: !a, letValue :: !a, letBody :: !a }
deriving (Eq, Foldable, Functor, GAlign, Generic1, Show, Traversable)
instance Eq1 Let where liftEq = genericLiftEq
instance Show1 Let where liftShowsPrec = genericLiftShowsPrec
-- Assignment
-- | Assignment to a variable or other lvalue.
@ -108,15 +124,13 @@ newtype Throw a = Throw a
instance Eq1 Throw where liftEq = genericLiftEq
instance Show1 Throw where liftShowsPrec = genericLiftShowsPrec
data Try with a = Try !a ![with a]
data Try a = Try !a ![a]
deriving (Eq, Foldable, Functor, GAlign, Generic1, Show, Traversable)
-- deriving instance (Eq a, Eq (with a)) => Eq (Try with a)
-- deriving instance (Show a, Show (with a)) => Show (Try with a)
instance Eq1 with => Eq1 (Try with) where liftEq = genericLiftEq
instance Show1 with => Show1 (Try with) where liftShowsPrec = genericLiftShowsPrec
instance Eq1 Try where liftEq = genericLiftEq
instance Show1 Try where liftShowsPrec = genericLiftShowsPrec
data Catch a = Catch !(Maybe a) !a
data Catch a = Catch !a !a
deriving (Eq, Foldable, Functor, GAlign, Generic1, Show, Traversable)
instance Eq1 Catch where liftEq = genericLiftEq

View File

@ -10,6 +10,9 @@ import Prologue hiding (Product)
data Annotation a = Annotation { annotationSubject :: !a, annotationType :: !a }
deriving (Eq, Foldable, Functor, GAlign, Generic1, Show, Traversable)
instance Eq1 Annotation where liftEq = genericLiftEq
instance Show1 Annotation where liftShowsPrec = genericLiftShowsPrec
newtype Product a = Product { productElements :: [a] }
deriving (Eq, Foldable, Functor, GAlign, Generic1, Show, Traversable)

View File

@ -1,8 +1,8 @@
{-# LANGUAGE DataKinds, ScopedTypeVariables, TypeFamilies, TypeOperators #-}
module FDoc.RecursionSchemes where
import Data.Range
import Data.Record
import Range
import Category
import Term
import Syntax

View File

@ -1,8 +1,8 @@
{-# LANGUAGE DataKinds, TypeOperators #-}
module FDoc.Term where
import Data.Range
import Data.Record
import Range
import Category
import Term
import Syntax

View File

@ -8,23 +8,22 @@ module Info
, Category(..)
, category
, setCategory
, SourceSpan(..)
, SourcePos(..)
, SourceSpans(..)
, Span(..)
, Pos(..)
, sourceSpan
, setSourceSpan
, setSpan
) where
import Category
import Data.Range
import Data.Record
import Range
import SourceSpan
import Data.Span
-- | The default set of fields produced by our parsers.
type DefaultFields = '[ Range, Category, SourceSpan ]
type DefaultFields = '[ Range, Category, Span ]
-- | A type alias for HasField constraints commonly used throughout semantic-diff.
type HasDefaultFields fields = (HasField fields Category, HasField fields Range, HasField fields SourceSpan)
type HasDefaultFields fields = (HasField fields Category, HasField fields Range, HasField fields Span)
byteRange :: HasField fields Range => Record fields -> Range
byteRange = getField
@ -38,8 +37,8 @@ category = getField
setCategory :: HasField fields Category => Record fields -> Category -> Record fields
setCategory = setField
sourceSpan :: HasField fields SourceSpan => Record fields -> SourceSpan
sourceSpan :: HasField fields Span => Record fields -> Span
sourceSpan = getField
setSourceSpan :: HasField fields SourceSpan => Record fields -> SourceSpan -> Record fields
setSourceSpan = setField
setSpan :: HasField fields Span => Record fields -> Span -> Record fields
setSpan = setField

View File

@ -1,9 +1,9 @@
{-# LANGUAGE DataKinds #-}
module Language.C where
import Data.Source
import Info
import Prologue
import Source
import qualified Syntax as S
import Term

View File

@ -1,11 +1,11 @@
{-# LANGUAGE DataKinds #-}
module Language.Go where
import Prologue
import Data.Source
import Info
import Source
import Term
import Prologue
import qualified Syntax as S
import Term
termAssignment
:: Source -- ^ The source of the term.

View File

@ -7,10 +7,10 @@ module Language.Markdown
import CMark
import Data.Record
import Data.Source
import Data.Syntax.Assignment (Location)
import Info
import Prologue hiding (Location)
import Source
import Text.Parser.TreeSitter.Language (Symbol(..), SymbolType(..))
data Grammar
@ -38,13 +38,15 @@ data Grammar
cmarkParser :: Source -> Cofree [] (Record (NodeType ': Location))
cmarkParser source = toTerm (totalRange source) (totalSpan source) $ commonmarkToNode [ optSourcePos, optSafe ] (toText source)
where toTerm :: Range -> SourceSpan -> Node -> Cofree [] (Record (NodeType ': Location))
where toTerm :: Range -> Span -> Node -> Cofree [] (Record (NodeType ': Location))
toTerm within withinSpan (Node position t children) =
let range = maybe within (sourceSpanToRange source . toSpan) position
let range = maybe within (spanToRangeInLineRanges lineRanges . toSpan) position
span = maybe withinSpan toSpan position
in cofree $ (t :. range :. span :. Nil) :< (toTerm range span <$> children)
toSpan PosInfo{..} = SourceSpan (SourcePos startLine startColumn) (SourcePos endLine (succ endColumn))
toSpan PosInfo{..} = Span (Pos startLine startColumn) (Pos (max startLine endLine) (succ (if endLine <= startLine then max startColumn endColumn else endColumn)))
lineRanges = sourceLineRangesByLineNumber source
toGrammar :: NodeType -> Grammar
toGrammar DOCUMENT{} = Document

View File

@ -62,7 +62,7 @@ paragraph :: Assignment
paragraph = makeTerm <$> symbol Paragraph <*> children (Markup.Paragraph <$> many inlineElement)
list :: Assignment
list = (cofree .) . (:<) <$> symbol List <*> (project (\ (((CMark.LIST CMark.ListAttributes{..}) :. _) :< _) -> case listType of
list = (cofree .) . (:<) <$> symbol List <*> (project (\ ((CMark.LIST CMark.ListAttributes{..} :. _) :< _) -> case listType of
CMark.BULLET_LIST -> inj . Markup.UnorderedList
CMark.ORDERED_LIST -> inj . Markup.OrderedList) <*> children (many item))
@ -71,7 +71,7 @@ item = makeTerm <$> symbol Item <*> children (many blockElement)
section :: Assignment
section = makeTerm <$> symbol Heading <*> (heading >>= \ headingTerm -> Markup.Section headingTerm <$> while (((<) `on` level) headingTerm) blockElement)
where heading = makeTerm <$> symbol Heading <*> (Markup.Heading <$> project (\ ((CMark.HEADING level :. _) :< _) -> level) <*> children (many inlineElement))
where heading = makeTerm <$> symbol Heading <*> (project (\ ((CMark.HEADING level :. _) :< _) -> Markup.Heading level) <*> children (many inlineElement))
level term = case term of
_ | Just section <- prj (unwrap term) -> level (Markup.sectionHeading section)
_ | Just heading <- prj (unwrap term) -> Markup.headingLevel heading
@ -81,10 +81,10 @@ blockQuote :: Assignment
blockQuote = makeTerm <$> symbol BlockQuote <*> children (Markup.BlockQuote <$> many blockElement)
codeBlock :: Assignment
codeBlock = makeTerm <$> symbol CodeBlock <*> (Markup.Code <$> project (\ (((CMark.CODE_BLOCK language _) :. _) :< _) -> nullText language) <*> source)
codeBlock = makeTerm <$> symbol CodeBlock <*> (project (\ ((CMark.CODE_BLOCK language _ :. _) :< _) -> Markup.Code (nullText language)) <*> source)
thematicBreak :: Assignment
thematicBreak = makeTerm <$> symbol ThematicBreak <*> (Markup.ThematicBreak <$ source)
thematicBreak = makeTerm <$> symbol ThematicBreak <*> pure Markup.ThematicBreak <* source
htmlBlock :: Assignment
htmlBlock = makeTerm <$> symbol HTMLBlock <*> (Markup.HTMLBlock <$> source)
@ -105,19 +105,19 @@ text :: Assignment
text = makeTerm <$> symbol Text <*> (Markup.Text <$> source)
link :: Assignment
link = makeTerm <$> symbol Link <*> (uncurry Markup.Link <$> project (\ (((CMark.LINK url title) :. _) :< _) -> (toS url, nullText title))) <* source
link = makeTerm <$> symbol Link <*> project (\ ((CMark.LINK url title :. _) :< _) -> Markup.Link (toS url) (nullText title)) <* source
image :: Assignment
image = makeTerm <$> symbol Image <*> (uncurry Markup.Image <$> project (\ (((CMark.IMAGE url title) :. _) :< _) -> (toS url, nullText title))) <* source
image = makeTerm <$> symbol Image <*> project (\ ((CMark.IMAGE url title :. _) :< _) -> Markup.Image (toS url) (nullText title)) <* source
code :: Assignment
code = makeTerm <$> symbol Code <*> (Markup.Code Nothing <$> source)
lineBreak :: Assignment
lineBreak = makeTerm <$> symbol LineBreak <*> (Markup.LineBreak <$ source)
lineBreak = makeTerm <$> symbol LineBreak <*> pure Markup.LineBreak <* source
softBreak :: Assignment
softBreak = makeTerm <$> symbol SoftBreak <*> (Markup.LineBreak <$ source)
softBreak = makeTerm <$> symbol SoftBreak <*> pure Markup.LineBreak <* source
-- Implementation details

View File

@ -6,4 +6,5 @@ import Text.Parser.TreeSitter.Language
import Text.Parser.TreeSitter.Python
-- | Statically-known rules corresponding to symbols in the grammar.
-- v1 - bump this to regenerate
mkSymbolDatatype (mkName "Grammar") tree_sitter_python

View File

@ -19,6 +19,7 @@ import qualified Data.Syntax.Declaration as Declaration
import qualified Data.Syntax.Expression as Expression
import qualified Data.Syntax.Literal as Literal
import qualified Data.Syntax.Statement as Statement
import qualified Data.Syntax.Type as Type
import Data.Union
import GHC.Generics
import GHC.Stack
@ -28,7 +29,9 @@ import qualified Term
type Syntax =
'[ Comment.Comment
, Declaration.Class
, Declaration.Comprehension
, Declaration.Decorator
, Declaration.Function
, Declaration.Import
, Declaration.Variable
@ -54,16 +57,24 @@ type Syntax =
, Redirect
, Statement.Assignment
, Statement.Break
, Statement.Catch
, Statement.Continue
, Statement.Else
, Statement.Finally
, Statement.ForEach
, Statement.If
, Statement.Let
, Statement.NoOp
, Statement.Return
, Statement.Throw
, Statement.Try
, Statement.While
, Statement.Yield
, Language.Python.Syntax.Ellipsis
, Syntax.Empty
, Syntax.Error Error
, Syntax.Identifier
, Type.Annotation
, []
]
@ -90,7 +101,12 @@ assignment :: Assignment
assignment = makeTerm <$> symbol Module <*> children (many declaration)
declaration :: Assignment
declaration = handleError $ comment <|> statement <|> expression
declaration = handleError $ classDefinition
<|> comment
<|> decoratedDefinition
<|> expression
<|> functionDefinition
<|> statement
statement :: Assignment
statement = assertStatement
@ -99,21 +115,29 @@ statement = assertStatement
<|> breakStatement
<|> continueStatement
<|> deleteStatement
<|> exceptClause
<|> execStatement
<|> expressionStatement
<|> finallyClause
<|> forStatement
<|> globalStatement
<|> ifStatement
<|> identifier
<|> import'
<|> importAlias
<|> importFrom
<|> nonlocalStatement
<|> passStatement
<|> printStatement
<|> raiseStatement
<|> returnStatement
<|> tryStatement
<|> whileStatement
<|> wildcardImport
<|> withStatement
expressionStatement :: Assignment
expressionStatement = symbol ExpressionStatement *> children expression
expressionStatement = symbol ExpressionStatement *> children declaration
expression :: Assignment
expression = await
@ -123,10 +147,11 @@ expression = await
<|> comparisonOperator
<|> comprehension
<|> conditionalExpression
<|> defaultParameter
<|> dottedName
<|> ellipsis
<|> expressionList
<|> lambda
<|> keywordArgument
<|> keywordIdentifier
<|> literal
<|> memberAccess
@ -134,8 +159,74 @@ expression = await
<|> subscript
<|> statement
<|> tuple
<|> type'
<|> typedParameter
<|> unaryOperator
defaultParameter :: Assignment
defaultParameter = makeTerm <$> symbol DefaultParameter <*> children (Statement.Assignment <$> expression <*> expression)
decoratedDefinition :: Assignment
decoratedDefinition = makeTerm <$> symbol DecoratedDefinition <*> (children $ do
(a, b) <- (symbol Decorator *> (children ((,) <$> expression <*> (symbol ArgumentList *> children ((many expression) <|> (many emptyTerm))))))
dec <- declaration
pure (Declaration.Decorator a b dec))
keywordArgument :: Assignment
keywordArgument = makeTerm <$> symbol KeywordArgument <*> children (Declaration.Variable <$> expression <*> emptyTerm <*> expression)
withStatement :: Assignment
withStatement = makeTerm <$> symbol WithStatement <*> (children $ do
(value, variable) <- (symbol WithItem *> (children $ (,) <$> identifier <*> identifier))
body <- expression
pure (Statement.Let variable value body))
forStatement :: Assignment
forStatement = symbol ForStatement >>= \ loc -> children (make loc <$> (makeTerm <$> symbol Variables <*> children (many expression)) <*> expressionList <*> (makeTerm <$> location <*> many expression) <*> (optional (makeTerm <$> symbol ElseClause <*> children (many declaration))))
where
make loc variables expressionList forBody forElseClause = case forElseClause of
Nothing -> makeTerm loc (Statement.ForEach variables expressionList forBody)
Just a -> makeTerm loc (Statement.Else (makeTerm loc $ Statement.ForEach variables expressionList forBody) a)
whileStatement :: Assignment
whileStatement = symbol WhileStatement >>= \ loc -> children (make loc <$> expression <*> (makeTerm <$> location <*> many expression) <*> (optional (makeTerm <$> symbol ElseClause <*> children (many declaration))))
where
make loc whileCondition whileBody whileElseClause = case whileElseClause of
Nothing -> makeTerm loc (Statement.While whileCondition whileBody)
Just a -> makeTerm loc (Statement.Else (makeTerm loc $ Statement.While whileCondition whileBody) a)
tryStatement :: Assignment
tryStatement = makeTerm <$> symbol TryStatement <*> children (Statement.Try <$> expression <*> (many (expression <|> elseClause)))
where elseClause = makeTerm <$> symbol ElseClause <*> children (Statement.Else <$> emptyTerm <*> (makeTerm <$> location <*> (many expression)))
functionDefinition :: Assignment
functionDefinition = (symbol FunctionDefinition >>= \ loc -> children (makeFunctionDeclaration loc <$> identifier <*> (symbol Parameters *> children (many expression)) <*> (optional (symbol Type *> children expression)) <*> (makeTerm <$> location <*> many declaration)))
<|> (symbol AsyncFunctionDefinition >>= \ loc -> children (makeAsyncFunctionDeclaration loc <$> async' <*> identifier <*> (symbol Parameters *> children (many expression)) <*> (optional (symbol Type *> children expression)) <*> (makeTerm <$> location <*> many declaration)))
<|> (symbol Lambda >>= \ loc -> children (makeFunctionDeclaration loc <$> (makeTerm <$> symbol AnonLambda <*> (Syntax.Identifier <$> source)) <*> ((symbol LambdaParameters *> children (many expression)) <|> (pure [])) <*> (optional (symbol Type *> children expression)) <*> (makeTerm <$> location <*> many declaration)))
where
makeFunctionDeclaration loc functionName' functionParameters ty functionBody = makeTerm loc $ Type.Annotation (makeTerm loc $ Declaration.Function functionName' functionParameters functionBody) (maybe (makeTerm loc Syntax.Empty) identity ty)
makeAsyncFunctionDeclaration loc async' functionName' functionParameters ty functionBody = makeTerm loc $ Type.Annotation (makeTerm loc $ Type.Annotation (makeTerm loc $ Declaration.Function functionName' functionParameters functionBody) (maybe (makeTerm loc Syntax.Empty) identity ty)) async'
async' :: Assignment
async' = makeTerm <$> symbol AnonAsync <*> (Syntax.Identifier <$> source)
classDefinition :: Assignment
classDefinition = makeTerm <$> symbol ClassDefinition <*> children (Declaration.Class <$> identifier <*> argumentList <*> (many declaration))
where argumentList = symbol ArgumentList *> children (many expression)
<|> pure []
typedParameter :: Assignment
typedParameter = makeTerm <$> symbol TypedParameter <*> children (Type.Annotation <$> identifier <*> type')
type' :: Assignment
type' = symbol Type *> children expression
-- TODO: support As expressions
exceptClause :: Assignment
exceptClause = makeTerm <$> symbol ExceptClause <*> children (Statement.Catch <$> (makeTerm <$> location <*> (many identifier)) <*> (makeTerm <$> location <*> (many expression)))
finallyClause :: Assignment
finallyClause = makeTerm <$> symbol FinallyClause <*> children (Statement.Finally <$> expression)
dottedName :: Assignment
dottedName = makeTerm <$> symbol DottedName <*> children (Expression.ScopeResolution <$> many expression)
@ -265,6 +356,12 @@ import' = makeTerm <$> symbol ImportStatement <*> children (Declaration.Import <
importFrom :: Assignment
importFrom = makeTerm <$> symbol ImportFromStatement <*> children (Declaration.Import <$> many expression)
importAlias :: Assignment
importAlias = makeTerm <$> symbol AliasedImport <*> children (flip Statement.Let <$> expression <*> expression <*> emptyTerm)
wildcardImport :: Assignment
wildcardImport = makeTerm <$> symbol WildcardImport <*> (Syntax.Identifier <$> source)
assertStatement :: Assignment
assertStatement = makeTerm <$ symbol AssertStatement <*> location <*> children (Expression.Call <$> (makeTerm <$> symbol AnonAssert <*> (Syntax.Identifier <$> source)) <*> many expression)
@ -289,7 +386,7 @@ await :: Assignment
await = makeTerm <$> symbol Await <*> children (Expression.Call <$> (makeTerm <$> symbol AnonAwait <*> (Syntax.Identifier <$> source)) <*> many expression)
returnStatement :: Assignment
returnStatement = makeTerm <$> symbol ReturnStatement <*> (Statement.Return <$> children expressionList)
returnStatement = makeTerm <$> symbol ReturnStatement <*> children (Statement.Return <$> (expressionList <|> emptyTerm))
deleteStatement :: Assignment
deleteStatement = makeTerm <$> symbol DeleteStatement <*> children (Expression.Call <$> deleteIdentifier <* symbol ExpressionList <*> children (many expression))
@ -334,12 +431,6 @@ boolean = makeTerm <$> symbol Grammar.True <*> (Literal.true <$ source)
none :: Assignment
none = makeTerm <$> symbol None <*> (Literal.Null <$ source)
lambda :: Assignment
lambda = makeTerm <$> symbol Lambda <*> children (Declaration.Function <$> lambdaIdentifier <*> lambdaParameters <*> lambdaBody)
where lambdaIdentifier = makeTerm <$> symbol AnonLambda <*> (Syntax.Identifier <$> source)
lambdaParameters = many identifier
lambdaBody = expression
comprehension :: Assignment
comprehension = makeTerm <$> symbol GeneratorExpression <*> children (comprehensionDeclaration expression)
<|> makeTerm <$> symbol ListComprehension <*> children (comprehensionDeclaration expression)

View File

@ -2,9 +2,9 @@
module Language.Ruby where
import Data.List (partition)
import Data.Source
import Info
import Prologue
import Source hiding (null)
import Language
import qualified Syntax as S
import Term

View File

@ -1,10 +1,10 @@
{-# LANGUAGE DataKinds #-}
module Language.TypeScript where
import Data.Source
import Info
import Prologue
import Source
import Language
import Prologue
import qualified Syntax as S
import Term

View File

@ -11,11 +11,11 @@ module Parser
) where
import qualified CMark
import Data.Functor.Foldable hiding (fold, Nil)
import Data.Record
import Data.Source as Source
import qualified Data.Syntax as Syntax
import Data.Syntax.Assignment
import Data.Functor.Foldable hiding (fold, Nil)
import qualified Data.Text as T
import Data.Union
import Info hiding (Empty, Go)
import Language
@ -24,7 +24,6 @@ import qualified Language.Markdown.Syntax as Markdown
import qualified Language.Python.Syntax as Python
import qualified Language.Ruby.Syntax as Ruby
import Prologue hiding (Location)
import Source
import Syntax hiding (Go)
import System.IO (hPutStrLn)
import System.Console.ANSI
@ -88,15 +87,15 @@ runParser parser = case parser of
let errors = termErrors term `asTypeOf` toList err
traverse_ (printError source) errors
unless (Prologue.null errors) $ do
withSGRCode [SetConsoleIntensity BoldIntensity, SetColor Foreground Vivid Red] . hPutStrLn stderr . (shows (Prologue.length errors) . showChar ' ' . showString (if Prologue.length errors == 1 then "error" else "errors")) $ ""
withSGRCode [SetConsoleIntensity BoldIntensity, SetColor Foreground Vivid Red] . hPutStrLn stderr . (shows (length errors) . showChar ' ' . showString (if length errors == 1 then "error" else "errors")) $ ""
pure term
Nothing -> pure (errorTerm source err)
TreeSitterParser language tslanguage -> treeSitterParser language tslanguage
MarkdownParser -> pure . cmarkParser
LineByLineParser -> lineByLineParser
LineByLineParser -> pure . lineByLineParser
errorTerm :: Syntax.Error (Error grammar) :< fs => Source -> Maybe (Error grammar) -> Term (Union fs) (Record Location)
errorTerm source err = cofree ((totalRange source :. totalSpan source :. Nil) :< inj (Syntax.Error (fromMaybe (Error (SourcePos 0 0) (UnexpectedEndOfInput [])) err)))
errorTerm source err = cofree ((totalRange source :. totalSpan source :. Nil) :< inj (Syntax.Error (fromMaybe (Error (Pos 0 0) (UnexpectedEndOfInput [])) err)))
termErrors :: (Syntax.Error (Error grammar) :< fs, Functor (Union fs), Foldable (Union fs)) => Term (Union fs) a -> [Error grammar]
termErrors = cata $ \ (_ :< s) -> case s of
@ -104,13 +103,6 @@ termErrors = cata $ \ (_ :< s) -> case s of
_ -> fold s
-- | A fallback parser that treats a file simply as rows of strings.
lineByLineParser :: Source -> IO (SyntaxTerm Text DefaultFields)
lineByLineParser source = pure . cofree . root $ case foldl' annotateLeaves ([], 0) lines of
(leaves, _) -> cofree <$> leaves
where
lines = actualLines source
root children = (sourceRange :. Program :. rangeToSourceSpan source sourceRange :. Nil) :< Indexed children
sourceRange = Source.totalRange source
leaf byteIndex line = (Range byteIndex (byteIndex + T.length line) :. Program :. rangeToSourceSpan source (Range byteIndex (byteIndex + T.length line)) :. Nil) :< Leaf line
annotateLeaves (accum, byteIndex) line =
(accum <> [ leaf byteIndex (Source.toText line) ] , byteIndex + Source.length line)
lineByLineParser :: Source -> SyntaxTerm Text DefaultFields
lineByLineParser source = cofree $ (totalRange source :. Program :. totalSpan source :. Nil) :< Indexed (zipWith toLine [1..] (sourceLineRanges source))
where toLine line range = cofree $ (range :. Program :. Span (Pos line 1) (Pos line (end range)) :. Nil) :< Leaf (toText (slice range source))

View File

@ -1,82 +0,0 @@
{-# LANGUAGE DeriveAnyClass #-}
module Range where
import qualified Data.Char as Char
import Data.List (span)
import Data.List.NonEmpty (nonEmpty)
import Data.Semigroup
import Data.String
import Prologue
import Test.LeanCheck
-- | A half-open interval of integers, defined by start & end indices.
data Range = Range { start :: Int, end :: Int }
deriving (Eq, Show, Generic, NFData)
-- | Return the length of the range.
rangeLength :: Range -> Int
rangeLength range = end range - start range
-- | Offset a range by a constant delta.
offsetRange :: Range -> Int -> Range
offsetRange a b = Range (start a + b) (end a + b)
-- | Divide a range in two at the given coordinate.
--
-- Passing a coordinate that does not lie between start and end will result in one of the ranges being empty.
divideRange :: Range -> Int -> (Range, Range)
divideRange Range{..} at = (Range start divider, Range divider end)
where divider = max (min end at) start
-- | Break a string down into words and sequences of punctuation. Return a list
-- | strings with ranges, assuming that the first character in the string is
-- | at the given index.
rangesAndWordsFrom :: Int -> String -> [(Range, String)]
rangesAndWordsFrom _ "" = []
rangesAndWordsFrom startIndex string = fromMaybe [] $ take isWord <|> take isPunctuation <|> skip Char.isSpace
where
save parsed = (Range startIndex $ endFor parsed, parsed)
take = parse (Just . save)
skip = parse (const Nothing)
endFor parsed = startIndex + length parsed
parse transform predicate = case span predicate string of
([], _) -> Nothing
(parsed, rest) -> Just . maybe identity (:) (transform parsed) $ rangesAndWordsFrom (endFor parsed) rest
-- | Is this a word character?
-- | Word characters are defined as in [Rubys `\p{Word}` syntax](http://ruby-doc.org/core-2.1.1/Regexp.html#class-Regexp-label-Character+Properties), i.e:.
-- | > A member of one of the following Unicode general category _Letter_, _Mark_, _Number_, _Connector_Punctuation_
isWord c = Char.isLetter c || Char.isNumber c || Char.isMark c || Char.generalCategory c == Char.ConnectorPunctuation
isPunctuation c = not (Char.isSpace c || isWord c)
-- | Return Just the last index from a non-empty range, or if the range is empty, Nothing.
maybeLastIndex :: Range -> Maybe Int
maybeLastIndex (Range start end) | start == end = Nothing
maybeLastIndex (Range _ end) = Just $ end - 1
-- | Test two ranges for intersection.
intersectsRange :: Range -> Range -> Bool
intersectsRange range1 range2 = start range1 < end range2 && start range2 < end range1
-- Return the (possibly empty, possibly ill-formed) intersection of two ranges.
intersectionRange :: Range -> Range -> Range
intersectionRange range1 range2 = Range (max (start range1) (start range2)) (min (end range1) (end range2))
-- | Return a range that contains both the given ranges.
unionRange :: Range -> Range -> Range
unionRange (Range start1 end1) (Range start2 end2) = Range (min start1 start2) (max end1 end2)
-- | Return a range that contains all the ranges in a Foldable, or the passed Range if the Foldable is empty.
unionRangesFrom :: Foldable f => Range -> f Range -> Range
unionRangesFrom range = maybe range sconcat . nonEmpty . toList
-- Instances
instance Semigroup Range where
a <> b = unionRange a b
instance Ord Range where
a <= b = start a <= start b
instance Listable Range where
tiers = cons2 Range

View File

@ -9,6 +9,7 @@ module Renderer.JSON
import Data.Aeson (ToJSON, toJSON, encode, object, (.=))
import Data.Aeson as A hiding (json)
import Data.Bifunctor.Join
import Data.Blob
import Data.Functor.Both (Both)
import qualified Data.Map as Map
import Data.Record
@ -17,7 +18,6 @@ import Info
import Language
import Patch
import Prologue hiding ((++))
import Source
import Syntax as S
--
@ -25,11 +25,11 @@ import Syntax as S
--
-- | Render a diff to a string representing its JSON.
renderJSONDiff :: ToJSON a => Both SourceBlob -> a -> Map.Map Text Value
renderJSONDiff :: ToJSON a => Both Blob -> a -> Map.Map Text Value
renderJSONDiff blobs diff = Map.fromList
[ ("diff", toJSON diff)
, ("oids", toJSON (decodeUtf8 . oid <$> toList blobs))
, ("paths", toJSON (path <$> toList blobs))
, ("oids", toJSON (decodeUtf8 . blobOid <$> toList blobs))
, ("paths", toJSON (blobPath <$> toList blobs))
]
instance StringConv (Map Text Value) ByteString where
@ -73,7 +73,7 @@ instance ToJSONFields Range where
instance ToJSONFields Category where
toJSONFields c = ["category" .= case c of { Other s -> s ; _ -> toS c }]
instance ToJSONFields SourceSpan where
instance ToJSONFields Span where
toJSONFields sourceSpan = [ "sourceSpan" .= sourceSpan ]
instance ToJSONFields a => ToJSONFields (Maybe a) where
@ -120,5 +120,5 @@ instance ToJSON a => ToJSON (File a) where
instance StringConv [Value] ByteString where
strConv _ = toS . (<> "\n") . encode
renderJSONTerm :: ToJSON a => SourceBlob -> a -> [Value]
renderJSONTerm SourceBlob{..} = pure . toJSON . File path blobLanguage
renderJSONTerm :: ToJSON a => Blob -> a -> [Value]
renderJSONTerm Blob{..} = pure . toJSON . File blobPath blobLanguage

View File

@ -9,25 +9,25 @@ module Renderer.Patch
import Alignment
import Data.Bifunctor.Join
import Data.Blob
import qualified Data.ByteString.Char8 as ByteString
import Data.Functor.Both as Both
import Data.List (span, unzip)
import Data.Range
import Data.Record
import Data.Source
import Data.These
import Diff
import Patch
import Prologue hiding (fst, snd)
import Range
import qualified Source
import Source hiding (break, drop, length, null, take)
import SplitDiff
-- | Render a timed out file as a truncated diff.
truncatePatch :: Both SourceBlob -> ByteString
truncatePatch :: Both Blob -> ByteString
truncatePatch blobs = header blobs <> "#timed_out\nTruncating diff: timeout reached.\n"
-- | Render a diff in the traditional patch format.
renderPatch :: (HasField fields Range, Traversable f) => Both SourceBlob -> Diff f (Record fields) -> File
renderPatch :: (HasField fields Range, Traversable f) => Both Blob -> Diff f (Record fields) -> File
renderPatch blobs diff = File $ if not (ByteString.null text) && ByteString.last text /= '\n'
then text <> "\n\\ No newline at end of file\n"
else text
@ -65,11 +65,11 @@ rowIncrement :: Join These a -> Both (Sum Int)
rowIncrement = Join . fromThese (Sum 0) (Sum 0) . runJoin . (Sum 1 <$)
-- | Given the before and after sources, render a hunk to a string.
showHunk :: Functor f => HasField fields Range => Both SourceBlob -> Hunk (SplitDiff f (Record fields)) -> ByteString
showHunk :: Functor f => HasField fields Range => Both Blob -> Hunk (SplitDiff f (Record fields)) -> ByteString
showHunk blobs hunk = maybeOffsetHeader <>
mconcat (showChange sources <$> changes hunk) <>
showLines (snd sources) ' ' (maybeSnd . runJoin <$> trailingContext hunk)
where sources = source <$> blobs
where sources = blobSource <$> blobs
maybeOffsetHeader = if lengthA > 0 && lengthB > 0
then offsetHeader
else mempty
@ -90,11 +90,11 @@ showLines source prefix lines = fromMaybe "" . mconcat $ fmap prepend . showLine
-- | Given a source, render a line to a string.
showLine :: Functor f => HasField fields Range => Source -> Maybe (SplitDiff f (Record fields)) -> Maybe ByteString
showLine source line | Just line <- line = Just . sourceText . (`slice` source) $ getRange line
showLine source line | Just line <- line = Just . sourceBytes . (`slice` source) $ getRange line
| otherwise = Nothing
-- | Returns the header given two source blobs and a hunk.
header :: Both SourceBlob -> ByteString
header :: Both Blob -> ByteString
header blobs = ByteString.intercalate "\n" ([filepathHeader, fileModeHeader] <> maybeFilepaths) <> "\n"
where filepathHeader = "diff --git a/" <> pathA <> " b/" <> pathB
fileModeHeader = case (modeA, modeB) of
@ -108,19 +108,19 @@ header blobs = ByteString.intercalate "\n" ([filepathHeader, fileModeHeader] <>
]
(Nothing, Nothing) -> ""
blobOidHeader = "index " <> oidA <> ".." <> oidB
modeHeader :: ByteString -> Maybe SourceKind -> ByteString -> ByteString
modeHeader :: ByteString -> Maybe BlobKind -> ByteString -> ByteString
modeHeader ty maybeMode path = case maybeMode of
Just _ -> ty <> "/" <> path
Nothing -> "/dev/null"
maybeFilepaths = if (nullOid == oidA && Source.null (snd sources)) || (nullOid == oidB && Source.null (fst sources)) then [] else [ beforeFilepath, afterFilepath ]
maybeFilepaths = if (nullOid == oidA && nullSource (snd sources)) || (nullOid == oidB && nullSource (fst sources)) then [] else [ beforeFilepath, afterFilepath ]
beforeFilepath = "--- " <> modeHeader "a" modeA pathA
afterFilepath = "+++ " <> modeHeader "b" modeB pathB
sources = source <$> blobs
(pathA, pathB) = case runJoin $ toS . path <$> blobs of
sources = blobSource <$> blobs
(pathA, pathB) = case runJoin $ toS . blobPath <$> blobs of
("", path) -> (path, path)
(path, "") -> (path, path)
paths -> paths
(oidA, oidB) = runJoin $ oid <$> blobs
(oidA, oidB) = runJoin $ blobOid <$> blobs
(modeA, modeB) = runJoin $ blobKind <$> blobs
-- | A hunk representing no changes.
@ -128,13 +128,13 @@ emptyHunk :: Hunk (SplitDiff a annotation)
emptyHunk = Hunk { offset = mempty, changes = [], trailingContext = [] }
-- | Render a diff as a series of hunks.
hunks :: (Traversable f, HasField fields Range) => Diff f (Record fields) -> Both SourceBlob -> [Hunk (SplitDiff [] (Record fields))]
hunks _ blobs | sources <- source <$> blobs
hunks :: (Traversable f, HasField fields Range) => Diff f (Record fields) -> Both Blob -> [Hunk (SplitDiff [] (Record fields))]
hunks _ blobs | sources <- blobSource <$> blobs
, sourcesEqual <- runBothWith (==) sources
, sourcesNull <- runBothWith (&&) (Source.null <$> sources)
, sourcesNull <- runBothWith (&&) (nullSource <$> sources)
, sourcesEqual || sourcesNull
= [emptyHunk]
hunks diff blobs = hunksInRows (pure 1) $ alignDiff (source <$> blobs) diff
hunks diff blobs = hunksInRows (pure 1) $ alignDiff (blobSource <$> blobs) diff
-- | Given beginning line numbers, turn rows in a split diff into hunks in a
-- | patch.

View File

@ -19,13 +19,16 @@ module Renderer.TOC
import Data.Aeson
import Data.Align (crosswalk)
import Data.Blob
import Data.Functor.Both hiding (fst, snd)
import qualified Data.Functor.Both as Both
import Data.Functor.Listable
import Data.List.NonEmpty (nonEmpty)
import Data.Proxy
import Data.Record
import Data.Source as Source
import Data.Text (toLower)
import qualified Data.Text as T
import Data.Text.Listable
import Data.These
import Data.Union
@ -35,7 +38,6 @@ import Patch
import Prologue
import qualified Data.List as List
import qualified Data.Map as Map hiding (null)
import Source hiding (null)
import Syntax as S
import Data.Syntax.Algebra (RAlgebra)
import qualified Data.Syntax as Syntax
@ -60,14 +62,14 @@ data JSONSummary
= JSONSummary
{ summaryCategoryName :: Text
, summaryTermName :: Text
, summarySourceSpan :: SourceSpan
, summarySpan :: Span
, summaryChangeType :: Text
}
| ErrorSummary { error :: Text, errorSpan :: SourceSpan }
| ErrorSummary { error :: Text, errorSpan :: Span }
deriving (Generic, Eq, Show)
instance ToJSON JSONSummary where
toJSON JSONSummary{..} = object [ "changeType" .= summaryChangeType, "category" .= summaryCategoryName, "term" .= summaryTermName, "span" .= summarySourceSpan ]
toJSON JSONSummary{..} = object [ "changeType" .= summaryChangeType, "category" .= summaryCategoryName, "term" .= summaryTermName, "span" .= summarySpan ]
toJSON ErrorSummary{..} = object [ "error" .= error, "span" .= errorSpan ]
isValidSummary :: JSONSummary -> Bool
@ -171,7 +173,7 @@ dedupe = foldl' go []
similarDeclaration = (==) `on` fmap (toLower . declarationIdentifier) . getDeclaration
-- | Construct a 'JSONSummary' from an 'Entry'. Returns 'Nothing' for 'Unchanged' patches.
entrySummary :: (HasField fields (Maybe Declaration), HasField fields SourceSpan) => Entry (Record fields) -> Maybe JSONSummary
entrySummary :: (HasField fields (Maybe Declaration), HasField fields Span) => Entry (Record fields) -> Maybe JSONSummary
entrySummary entry = case entry of
Unchanged _ -> Nothing
Changed a -> recordSummary a "modified"
@ -180,31 +182,31 @@ entrySummary entry = case entry of
Replaced a -> recordSummary a "modified"
-- | Construct a 'JSONSummary' from a node annotation and a change type label.
recordSummary :: (HasField fields (Maybe Declaration), HasField fields SourceSpan) => Record fields -> Text -> Maybe JSONSummary
recordSummary :: (HasField fields (Maybe Declaration), HasField fields Span) => Record fields -> Text -> Maybe JSONSummary
recordSummary record = case getDeclaration record of
Just (ErrorDeclaration text) -> Just . const (ErrorSummary text (sourceSpan record))
Just declaration -> Just . JSONSummary (toCategoryName declaration) (declarationIdentifier declaration) (sourceSpan record)
Nothing -> const Nothing
renderToCDiff :: (HasField fields (Maybe Declaration), HasField fields SourceSpan, Traversable f) => Both SourceBlob -> Diff f (Record fields) -> Summaries
renderToCDiff :: (HasField fields (Maybe Declaration), HasField fields Span, Traversable f) => Both Blob -> Diff f (Record fields) -> Summaries
renderToCDiff blobs = uncurry Summaries . bimap toMap toMap . List.partition isValidSummary . diffTOC
where toMap [] = mempty
toMap as = Map.singleton summaryKey (toJSON <$> as)
summaryKey = toS $ case runJoin (path <$> blobs) of
summaryKey = toS $ case runJoin (blobPath <$> blobs) of
(before, after) | null before -> after
| null after -> before
| before == after -> after
| otherwise -> before <> " -> " <> after
renderToCTerm :: (HasField fields (Maybe Declaration), HasField fields SourceSpan, Traversable f) => SourceBlob -> Term f (Record fields) -> Summaries
renderToCTerm :: (HasField fields (Maybe Declaration), HasField fields Span, Traversable f) => Blob -> Term f (Record fields) -> Summaries
renderToCTerm blob = uncurry Summaries . bimap toMap toMap . List.partition isValidSummary . termToC
where toMap [] = mempty
toMap as = Map.singleton (toS (path blob)) (toJSON <$> as)
toMap as = Map.singleton (toS (blobPath blob)) (toJSON <$> as)
diffTOC :: (HasField fields (Maybe Declaration), HasField fields SourceSpan, Traversable f) => Diff f (Record fields) -> [JSONSummary]
diffTOC :: (HasField fields (Maybe Declaration), HasField fields Span, Traversable f) => Diff f (Record fields) -> [JSONSummary]
diffTOC = mapMaybe entrySummary . dedupe . tableOfContentsBy declaration
termToC :: (HasField fields (Maybe Declaration), HasField fields SourceSpan, Traversable f) => Term f (Record fields) -> [JSONSummary]
termToC :: (HasField fields (Maybe Declaration), HasField fields Span, Traversable f) => Term f (Record fields) -> [JSONSummary]
termToC = mapMaybe (flip recordSummary "unchanged") . termTableOfContentsBy declaration
-- The user-facing category name
@ -212,7 +214,7 @@ toCategoryName :: Declaration -> Text
toCategoryName declaration = case declaration of
FunctionDeclaration _ -> "Function"
MethodDeclaration _ -> "Method"
SectionDeclaration _ -> "Section"
SectionDeclaration x -> "Heading " <> show (T.length (T.takeWhile (== '#') x))
ErrorDeclaration _ -> "ParseError"
instance Listable Declaration where

View File

@ -9,10 +9,12 @@ module Semantic
import Algorithm hiding (diff)
import Data.Align.Generic (GAlign)
import Data.Blob
import Data.Functor.Both as Both
import Data.Functor.Classes (Eq1, Show1)
import Data.Proxy
import Data.Record
import Data.Source
import qualified Data.Syntax.Declaration as Declaration
import Data.Union
import Diff
@ -26,7 +28,6 @@ import Parser
import Prologue
import Renderer
import Semantic.Task as Task
import Source
import Term
import Text.Show
@ -39,37 +40,37 @@ import Text.Show
-- - Built in concurrency where appropriate.
-- - Easy to consume this interface from other application (e.g a cmdline or web server app).
parseBlobs :: (Monoid output, StringConv output ByteString) => TermRenderer output -> [SourceBlob] -> Task ByteString
parseBlobs :: (Monoid output, StringConv output ByteString) => TermRenderer output -> [Blob] -> Task ByteString
parseBlobs renderer = fmap toS . distributeFoldMap (parseBlob renderer) . filter blobExists
-- | A task to parse a 'SourceBlob' and render the resulting 'Term'.
parseBlob :: TermRenderer output -> SourceBlob -> Task output
parseBlob renderer blob@SourceBlob{..} = case (renderer, blobLanguage) of
(ToCTermRenderer, Just Language.Markdown) -> parse markdownParser source >>= decorate (markupSectionAlgebra (Proxy :: Proxy Markdown.Error) source) >>= render (renderToCTerm blob)
(ToCTermRenderer, Just Language.Python) -> parse pythonParser source >>= decorate (declarationAlgebra (Proxy :: Proxy Python.Error) source) . hoistCofree (weaken :: Union fs a -> Union (Declaration.Method ': fs) a) >>= render (renderToCTerm blob)
(ToCTermRenderer, _) -> parse syntaxParser source >>= decorate (syntaxDeclarationAlgebra source) >>= render (renderToCTerm blob)
(JSONTermRenderer, Just Language.Markdown) -> parse markdownParser source >>= render (renderJSONTerm blob)
(JSONTermRenderer, Just Language.Python) -> parse pythonParser source >>= render (renderJSONTerm blob)
(JSONTermRenderer, _) -> parse syntaxParser source >>= decorate identifierAlgebra >>= render (renderJSONTerm blob)
(SExpressionTermRenderer, Just Language.Markdown) -> parse markdownParser source >>= decorate (ConstructorLabel . constructorLabel) >>= render renderSExpressionTerm . fmap keepConstructorLabel
(SExpressionTermRenderer, Just Language.Python) -> parse pythonParser source >>= decorate (ConstructorLabel . constructorLabel) >>= render renderSExpressionTerm . fmap keepConstructorLabel
(SExpressionTermRenderer, _) -> parse syntaxParser source >>= render renderSExpressionTerm . fmap keepCategory
-- | A task to parse a 'Blob' and render the resulting 'Term'.
parseBlob :: TermRenderer output -> Blob -> Task output
parseBlob renderer blob@Blob{..} = case (renderer, blobLanguage) of
(ToCTermRenderer, Just Language.Markdown) -> parse markdownParser blobSource >>= decorate (markupSectionAlgebra (Proxy :: Proxy Markdown.Error) blobSource) >>= render (renderToCTerm blob)
(ToCTermRenderer, Just Language.Python) -> parse pythonParser blobSource >>= decorate (declarationAlgebra (Proxy :: Proxy Python.Error) blobSource) . hoistCofree (weaken :: Union fs a -> Union (Declaration.Method ': fs) a) >>= render (renderToCTerm blob)
(ToCTermRenderer, _) -> parse syntaxParser blobSource >>= decorate (syntaxDeclarationAlgebra blobSource) >>= render (renderToCTerm blob)
(JSONTermRenderer, Just Language.Markdown) -> parse markdownParser blobSource >>= render (renderJSONTerm blob)
(JSONTermRenderer, Just Language.Python) -> parse pythonParser blobSource >>= render (renderJSONTerm blob)
(JSONTermRenderer, _) -> parse syntaxParser blobSource >>= decorate identifierAlgebra >>= render (renderJSONTerm blob)
(SExpressionTermRenderer, Just Language.Markdown) -> parse markdownParser blobSource >>= decorate (ConstructorLabel . constructorLabel) >>= render renderSExpressionTerm . fmap keepConstructorLabel
(SExpressionTermRenderer, Just Language.Python) -> parse pythonParser blobSource >>= decorate (ConstructorLabel . constructorLabel) >>= render renderSExpressionTerm . fmap keepConstructorLabel
(SExpressionTermRenderer, _) -> parse syntaxParser blobSource >>= render renderSExpressionTerm . fmap keepCategory
(IdentityTermRenderer, Just Language.Markdown) -> pure Nothing
(IdentityTermRenderer, Just Language.Python) -> pure Nothing
(IdentityTermRenderer, _) -> Just <$> parse syntaxParser source
(IdentityTermRenderer, _) -> Just <$> parse syntaxParser blobSource
where syntaxParser = parserForLanguage blobLanguage
diffBlobPairs :: (Monoid output, StringConv output ByteString) => DiffRenderer output -> [Both SourceBlob] -> Task ByteString
diffBlobPairs :: (Monoid output, StringConv output ByteString) => DiffRenderer output -> [Both Blob] -> Task ByteString
diffBlobPairs renderer = fmap toS . distributeFoldMap (diffBlobPair renderer) . filter (any blobExists)
-- | A task to parse a pair of 'SourceBlob's, diff them, and render the 'Diff'.
diffBlobPair :: DiffRenderer output -> Both SourceBlob -> Task output
-- | A task to parse a pair of 'Blob's, diff them, and render the 'Diff'.
diffBlobPair :: DiffRenderer output -> Both Blob -> Task output
diffBlobPair renderer blobs = case (renderer, effectiveLanguage) of
(ToCDiffRenderer, Just Language.Markdown) -> run (\ source -> parse markdownParser source >>= decorate (markupSectionAlgebra (Proxy :: Proxy Markdown.Error) source)) diffLinearly (renderToCDiff blobs)
(ToCDiffRenderer, Just Language.Python) -> run (\ source -> parse pythonParser source >>= decorate (declarationAlgebra (Proxy :: Proxy Python.Error) source) . hoistCofree (weaken :: Union fs a -> Union (Declaration.Method ': fs) a)) diffLinearly (renderToCDiff blobs)
(ToCDiffRenderer, _) -> run (\ source -> parse syntaxParser source >>= decorate (syntaxDeclarationAlgebra source)) diffTerms (renderToCDiff blobs)
(ToCDiffRenderer, Just Language.Markdown) -> run (\ blobSource -> parse markdownParser blobSource >>= decorate (markupSectionAlgebra (Proxy :: Proxy Markdown.Error) blobSource)) diffLinearly (renderToCDiff blobs)
(ToCDiffRenderer, Just Language.Python) -> run (\ blobSource -> parse pythonParser blobSource >>= decorate (declarationAlgebra (Proxy :: Proxy Python.Error) blobSource) . hoistCofree (weaken :: Union fs a -> Union (Declaration.Method ': fs) a)) diffLinearly (renderToCDiff blobs)
(ToCDiffRenderer, _) -> run (\ blobSource -> parse syntaxParser blobSource >>= decorate (syntaxDeclarationAlgebra blobSource)) diffTerms (renderToCDiff blobs)
(JSONDiffRenderer, Just Language.Markdown) -> run (parse markdownParser) diffLinearly (renderJSONDiff blobs)
(JSONDiffRenderer, Just Language.Python) -> run (parse pythonParser) diffLinearly (renderJSONDiff blobs)
(JSONDiffRenderer, _) -> run (decorate identifierAlgebra <=< parse syntaxParser) diffTerms (renderJSONDiff blobs)
@ -79,18 +80,18 @@ diffBlobPair renderer blobs = case (renderer, effectiveLanguage) of
(SExpressionDiffRenderer, Just Language.Markdown) -> run (decorate (ConstructorLabel . constructorLabel) <=< parse markdownParser) diffLinearly (renderSExpressionDiff . mapAnnotations keepConstructorLabel)
(SExpressionDiffRenderer, Just Language.Python) -> run (decorate (ConstructorLabel . constructorLabel) <=< parse pythonParser) diffLinearly (renderSExpressionDiff . mapAnnotations keepConstructorLabel)
(SExpressionDiffRenderer, _) -> run (parse syntaxParser) diffTerms (renderSExpressionDiff . mapAnnotations keepCategory)
(IdentityDiffRenderer, _) -> run (\ source -> parse syntaxParser source >>= decorate (syntaxDeclarationAlgebra source)) diffTerms Just
(IdentityDiffRenderer, _) -> run (\ blobSource -> parse syntaxParser blobSource >>= decorate (syntaxDeclarationAlgebra blobSource)) diffTerms Just
where effectiveLanguage = runBothWith (<|>) (blobLanguage <$> blobs)
syntaxParser = parserForLanguage effectiveLanguage
run :: Functor f => (Source -> Task (Term f a)) -> (Both (Term f a) -> Diff f a) -> (Diff f a -> output) -> Task output
run parse diff renderer = distributeFor blobs (parse . source) >>= diffTermPair blobs diff >>= render renderer
run parse diff renderer = distributeFor blobs (parse . blobSource) >>= diffTermPair blobs diff >>= render renderer
diffLinearly :: (Eq1 f, GAlign f, Show1 f, Traversable f) => Both (Term f (Record fields)) -> Diff f (Record fields)
diffLinearly = decoratingWith constructorLabel (diffTermsWith linearly comparableByConstructor)
-- | A task to diff a pair of 'Term's, producing insertion/deletion 'Patch'es for non-existent 'SourceBlob's.
diffTermPair :: Functor f => Both SourceBlob -> Differ f a -> Both (Term f a) -> Task (Diff f a)
-- | A task to diff a pair of 'Term's, producing insertion/deletion 'Patch'es for non-existent 'Blob's.
diffTermPair :: Functor f => Both Blob -> Differ f a -> Both (Term f a) -> Task (Diff f a)
diffTermPair blobs differ terms = case runJoin (blobExists <$> blobs) of
(True, False) -> pure (deleting (Both.fst terms))
(False, True) -> pure (inserting (Both.snd terms))

View File

@ -18,11 +18,11 @@ import qualified Control.Concurrent.Async as Async
import Control.Monad.Free.Freer
import Data.Functor.Both as Both
import Data.Record
import Data.Source
import Data.Syntax.Algebra (RAlgebra, decoratorWithAlgebra)
import Diff
import Parser
import Prologue
import Source
import Term
data TaskF output where

View File

@ -1,169 +0,0 @@
{-# LANGUAGE GeneralizedNewtypeDeriving, MultiParamTypeClasses #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
module Source where
import qualified Data.ByteString as B
import Data.List (span)
import Data.String (IsString(..))
import qualified Data.Text as T
import Language
import Numeric
import Range
import Prologue
import SourceSpan
import System.IO (FilePath)
import Test.LeanCheck
-- | The source, oid, path, and Maybe SourceKind of a blob.
data SourceBlob = SourceBlob
{ source :: Source -- ^ The UTF-8 encoded source text of the blob.
, oid :: ByteString -- ^ The Git object ID (SHA-1) of the blob.
, path :: FilePath -- ^ The file path to the blob.
, blobKind :: Maybe SourceKind -- ^ The kind of blob, Nothing denotes a blob that doesn't exist (e.g. on one side of a diff for adding a new file or deleting a file).
, blobLanguage :: Maybe Language -- ^ The language of this blob. Nothing denotes a langauge we don't support yet.
} deriving (Show, Eq)
-- | The contents of a source file, represented as a ByteString.
newtype Source = Source { sourceText :: B.ByteString }
deriving (Eq, IsString, Show)
-- | The kind of a blob, along with it's file mode.
data SourceKind = PlainBlob Word32 | ExecutableBlob Word32 | SymlinkBlob Word32
deriving (Show, Eq)
modeToDigits :: SourceKind -> ByteString
modeToDigits (PlainBlob mode) = toS $ showOct mode ""
modeToDigits (ExecutableBlob mode) = toS $ showOct mode ""
modeToDigits (SymlinkBlob mode) = toS $ showOct mode ""
-- | The default plain blob mode
defaultPlainBlob :: SourceKind
defaultPlainBlob = PlainBlob 0o100644
emptySourceBlob :: FilePath -> SourceBlob
emptySourceBlob filepath = SourceBlob Source.empty Source.nullOid filepath Nothing Nothing
nullBlob :: SourceBlob -> Bool
nullBlob SourceBlob{..} = oid == nullOid || Source.null source
blobExists :: SourceBlob -> Bool
blobExists SourceBlob{..} = isJust blobKind
sourceBlob :: FilePath -> Maybe Language -> Source -> SourceBlob
sourceBlob filepath language source = SourceBlob source Source.nullOid filepath (Just defaultPlainBlob) language
-- | Map blobs with Nothing blobKind to empty blobs.
idOrEmptySourceBlob :: SourceBlob -> SourceBlob
idOrEmptySourceBlob blob = if isNothing (blobKind blob)
then blob { oid = nullOid, blobKind = Nothing }
else blob
nullOid :: ByteString
nullOid = "0000000000000000000000000000000000000000"
empty :: Source
empty = Source B.empty
-- | Return a Source from a ByteString.
fromText :: T.Text -> Source
fromText = Source . encodeUtf8
-- | Return a Source that contains a slice of the given Source.
slice :: Range -> Source -> Source
slice range = take . drop
where drop = Source.drop (start range)
take = Source.take (rangeLength range)
drop :: Int -> Source -> Source
drop i = Source . drop . sourceText
where drop = B.drop i
take :: Int -> Source -> Source
take i = Source . take . sourceText
where take = B.take i
-- | Return the ByteString contained in the Source.
toText :: Source -> Text
toText = decodeUtf8 . sourceText
-- | Split the source into the longest prefix of elements that do not satisfy the predicate and the rest without copying.
break :: (Word8 -> Bool) -> Source -> (Source, Source)
break predicate (Source text) = let (start, remainder) = B.break predicate text in (Source start, Source remainder)
-- | Split the contents of the source after newlines.
actualLines :: Source -> [Source]
actualLines = fmap Source . actualLines' . sourceText
where actualLines' text
| B.null text = [ text ]
| otherwise = case B.break (== toEnum (fromEnum '\n')) text of
(l, lines') -> case B.uncons lines' of
Nothing -> [ l ]
Just (_, lines') -> (l <> B.singleton (toEnum (fromEnum '\n'))) : actualLines' lines'
-- | Compute the 'Range's of each line in a 'Source'.
actualLineRanges :: Source -> [Range]
actualLineRanges = Prologue.drop 1 . scanl toRange (Range 0 0) . actualLines
where toRange previous string = Range (end previous) $ end previous + B.length (sourceText string)
-- | Compute the 'Range's of each line in a 'Range' of a 'Source'.
actualLineRangesWithin :: Range -> Source -> [Range]
actualLineRangesWithin range = Prologue.drop 1 . scanl toRange (Range (start range) (start range)) . actualLines . slice range
where toRange previous string = Range (end previous) $ end previous + B.length (sourceText string)
-- | Compute the byte 'Range' corresponding to a given 'SourceSpan' in a 'Source'.
sourceSpanToRange :: Source -> SourceSpan -> Range
sourceSpanToRange source SourceSpan{..} = Range start end
where start = pred (sumLengths leadingRanges + column spanStart)
end = start + sumLengths (Prologue.take (line spanEnd - line spanStart) remainingRanges) + (column spanEnd - column spanStart)
(leadingRanges, remainingRanges) = splitAt (pred (line spanStart)) (actualLineRanges source)
sumLengths = sum . fmap rangeLength
-- | Compute the 'SourceSpan' corresponding to a given byte 'Range' in a 'Source'.
rangeToSourceSpan :: Source -> Range -> SourceSpan
rangeToSourceSpan source (Range rangeStart rangeEnd) = SourceSpan startPos endPos
where startPos = SourcePos (firstLine + 1) (rangeStart - start firstRange + 1)
endPos = SourcePos (firstLine + Prologue.length lineRanges) (rangeEnd - start lastRange + 1)
firstLine = Prologue.length before
(before, rest) = span ((< rangeStart) . end) (actualLineRanges source)
(lineRanges, _) = span ((<= rangeEnd) . start) rest
Just firstRange = getFirst (foldMap (First . Just) lineRanges)
Just lastRange = getLast (foldMap (Last . Just) lineRanges)
-- | Return a 'Range' that covers the entire text.
totalRange :: Source -> Range
totalRange = Range 0 . B.length . sourceText
-- | Return a 'SourceSpan' that covers the entire text.
totalSpan :: Source -> SourceSpan
totalSpan source = SourceSpan (SourcePos 1 1) (SourcePos (Prologue.length ranges) (succ (end lastRange - start lastRange)))
where ranges = actualLineRanges source
Just lastRange = getLast (foldMap (Last . Just) ranges)
length :: Source -> Int
length = B.length . sourceText
null :: Source -> Bool
null = B.null . sourceText
instance Semigroup Source where
Source a <> Source b = Source (a <> b)
instance Monoid Source where
mempty = Source.empty
mappend = (<>)
instance Listable Source where
tiers = (Source . unListableByteString) `mapT` tiers
newtype ListableByteString = ListableByteString { unListableByteString :: B.ByteString }
instance Listable ListableByteString where
tiers = (ListableByteString . encodeUtf8 . T.pack) `mapT` strings
where strings = foldr ((\\//) . listsOf . toTiers) []
[ ['a'..'z'] <> ['A'..'Z'] <> ['0'..'9']
, [' '..'/'] <> [':'..'@'] <> ['['..'`'] <> ['{'..'~']
, [chr 0x00..chr 0x1f] <> [chr 127] -- Control characters.
, [chr 0xa0..chr 0x24f] ] -- Non-ASCII.
instance StringConv Source ByteString where
strConv _ = sourceText

View File

@ -1,100 +0,0 @@
{-# LANGUAGE DeriveAnyClass #-}
{-# OPTIONS_GHC -funbox-strict-fields #-}
-- |
-- Source position and span information
-- Mostly taken from purescript's SourcePos definition.
--
module SourceSpan where
import Data.Aeson ((.=), (.:))
import qualified Data.Aeson as A
import Data.List.NonEmpty (nonEmpty)
import Data.Semigroup
import Data.These
import Prologue
import Test.LeanCheck
-- |
-- Source position information
--
data SourcePos = SourcePos
{ -- |
-- Line number
--
line :: Int
-- |
-- Column number
--
, column :: Int
} deriving (Show, Read, Eq, Ord, Generic, Hashable, NFData)
displaySourcePos :: SourcePos -> Text
displaySourcePos SourcePos{..} =
"line " <> show line <> ", column " <> show column
instance A.ToJSON SourcePos where
toJSON SourcePos{..} =
A.toJSON [line, column]
instance A.FromJSON SourcePos where
parseJSON arr = do
[line, col] <- A.parseJSON arr
pure $ SourcePos line col
data SourceSpan = SourceSpan
{ -- |
-- Start of the span
--
spanStart :: SourcePos
-- End of the span
--
, spanEnd :: SourcePos
} deriving (Show, Read, Eq, Ord, Generic, Hashable, NFData)
displayStartEndPos :: SourceSpan -> Text
displayStartEndPos sp =
displaySourcePos (spanStart sp) <> " - " <> displaySourcePos (spanEnd sp)
unionSourceSpansFrom :: Foldable f => SourceSpan -> f SourceSpan -> SourceSpan
unionSourceSpansFrom sourceSpan = maybe sourceSpan sconcat . nonEmpty . toList
unionSourceSpan :: SourceSpan -> SourceSpan -> SourceSpan
unionSourceSpan (SourceSpan start1 end1) (SourceSpan start2 end2) = SourceSpan (min start1 start2) (max end1 end2)
emptySourceSpan :: SourceSpan
emptySourceSpan = SourceSpan (SourcePos 1 1) (SourcePos 1 1)
instance Semigroup SourceSpan where
a <> b = unionSourceSpan a b
instance A.ToJSON SourceSpan where
toJSON SourceSpan{..} =
A.object [ "start" .= spanStart
, "end" .= spanEnd
]
instance A.FromJSON SourceSpan where
parseJSON = A.withObject "SourceSpan" $ \o ->
SourceSpan <$>
o .: "start" <*>
o .: "end"
newtype SourceSpans = SourceSpans { unSourceSpans :: These SourceSpan SourceSpan }
deriving (Eq, Show)
instance A.ToJSON SourceSpans where
toJSON (SourceSpans spans) = case spans of
(This span) -> A.object ["delete" .= span]
(That span) -> A.object ["insert" .= span]
(These span1 span2) -> A.object ["replace" .= (span1, span2)]
toEncoding (SourceSpans spans) = case spans of
(This span) -> A.pairs $ "delete" .= span
(That span) -> A.pairs $ "insert" .= span
(These span1 span2) -> A.pairs $ "replace" .= (span1, span2)
instance Listable SourcePos where
tiers = cons2 SourcePos
instance Listable SourceSpan where
tiers = cons2 SourceSpan

View File

@ -7,35 +7,35 @@ module TreeSitter
import Prologue hiding (Constructor)
import Category
import Data.ByteString.Unsafe (unsafeUseAsCStringLen)
import Data.Functor.Foldable hiding (Nil)
import Data.Ix
import Data.Range
import Data.Record
import Data.Source
import Data.Span
import qualified Data.Syntax.Assignment as A
import Language
import qualified Language.C as C
import qualified Language.Go as Go
import qualified Language.TypeScript as TS
import qualified Language.Ruby as Ruby
import Range
import Source
import qualified Syntax
import Foreign
import Foreign.C.String (peekCString)
import Foreign.Marshal.Array (allocaArray)
import Data.Text.Foreign (withCStringLen)
import qualified Syntax as S
import Term
import Text.Parser.TreeSitter hiding (Language(..))
import qualified Text.Parser.TreeSitter as TS
import SourceSpan
import Info
-- | Returns a TreeSitter parser for the given language and TreeSitter grammar.
treeSitterParser :: Language -> Ptr TS.Language -> Source -> IO (Term (Syntax.Syntax Text) (Record DefaultFields))
treeSitterParser language grammar source = bracket ts_document_new ts_document_free $ \ document -> do
ts_document_set_language document grammar
withCStringLen (toText source) $ \ (sourceText, len) -> do
ts_document_set_input_string_with_length document sourceText len
unsafeUseAsCStringLen (sourceBytes source) $ \ (sourceBytes, len) -> do
ts_document_set_input_string_with_length document sourceBytes len
ts_document_parse_halt_on_error document
term <- documentToTerm language document source
pure term
@ -45,7 +45,7 @@ treeSitterParser language grammar source = bracket ts_document_new ts_document_f
parseToAST :: (Bounded grammar, Enum grammar) => Ptr TS.Language -> Source -> IO (Cofree [] (Record (Maybe grammar ': A.Location)))
parseToAST language source = bracket ts_document_new ts_document_free $ \ document -> do
ts_document_set_language document language
root <- withCStringLen (toText source) $ \ (source, len) -> do
root <- unsafeUseAsCStringLen (sourceBytes source) $ \ (source, len) -> do
ts_document_set_input_string_with_length document source len
ts_document_parse_halt_on_error document
alloca (\ rootPtr -> do
@ -102,16 +102,16 @@ isNonEmpty = (/= Empty) . category . extract
nodeRange :: Node -> Range
nodeRange Node{..} = Range (fromIntegral nodeStartByte) (fromIntegral nodeEndByte)
nodeSpan :: Node -> SourceSpan
nodeSpan Node{..} = nodeStartPoint `seq` nodeEndPoint `seq` SourceSpan (pointPos nodeStartPoint) (pointPos nodeEndPoint)
where pointPos TSPoint{..} = pointRow `seq` pointColumn `seq` SourcePos (1 + fromIntegral pointRow) (1 + fromIntegral pointColumn)
nodeSpan :: Node -> Span
nodeSpan Node{..} = nodeStartPoint `seq` nodeEndPoint `seq` Span (pointPos nodeStartPoint) (pointPos nodeEndPoint)
where pointPos TSPoint{..} = pointRow `seq` pointColumn `seq` Pos (1 + fromIntegral pointRow) (1 + fromIntegral pointColumn)
assignTerm :: Language -> Source -> Record DefaultFields -> [ SyntaxTerm Text '[ Range, Category, SourceSpan ] ] -> IO [ SyntaxTerm Text '[ Range, Category, SourceSpan ] ] -> IO (SyntaxTerm Text '[ Range, Category, SourceSpan ])
assignTerm :: Language -> Source -> Record DefaultFields -> [ SyntaxTerm Text DefaultFields ] -> IO [ SyntaxTerm Text DefaultFields ] -> IO (SyntaxTerm Text DefaultFields)
assignTerm language source annotation children allChildren =
cofree . (annotation :<) <$> case assignTermByLanguage language source (category annotation) children of
Just a -> pure a
_ -> defaultTermAssignment source (category annotation) children allChildren
where assignTermByLanguage :: Language -> Source -> Category -> [ SyntaxTerm Text '[ Range, Category, SourceSpan ] ] -> Maybe (S.Syntax Text (SyntaxTerm Text '[ Range, Category, SourceSpan ]))
where assignTermByLanguage :: Language -> Source -> Category -> [ SyntaxTerm Text DefaultFields ] -> Maybe (S.Syntax Text (SyntaxTerm Text DefaultFields))
assignTermByLanguage language = case language of
C -> C.termAssignment
Language.Go -> Go.termAssignment

View File

@ -10,14 +10,14 @@ import Data.Functor.Both as Both
import Data.Functor.Listable
import Data.List (nub)
import Data.Monoid hiding ((<>))
import Data.Range
import Data.Record
import qualified Data.Source as Source
import qualified Data.Text as Text
import Data.These
import Patch
import Prologue hiding (fst, snd)
import qualified Prologue
import Range
import qualified Source
import SplitDiff
import Syntax
import Term
@ -222,9 +222,9 @@ toAlignBranchInputs elements = (sources, join . (`evalState` both 0 0) . travers
alignBranchElement element = case element of
Child key contents -> Child key <$> joinCrosswalk lines contents
Margin contents -> Margin <$> joinCrosswalk lines contents
where lines = fmap Source.toText . Source.actualLines . Source.fromText
where lines = fmap Source.toText . Source.sourceLines . Source.fromText
sources = foldMap Source.fromText <$> bothContents elements
ranges = fmap (filter (\ (Range start end) -> start /= end)) $ Source.actualLineRangesWithin <$> (Source.totalRange <$> sources) <*> sources
ranges = fmap (filter (\ (Range start end) -> start /= end)) $ Source.sourceLineRangesWithin <$> (Source.totalRange <$> sources) <*> sources
bothContents = foldMap (modifyJoin (fromThese [] []) . fmap (:[]) . branchElementContents)
branchElementContents (Child _ contents) = contents
branchElementContents (Margin contents) = contents

View File

@ -1,12 +1,12 @@
module CommandSpec where
import Command
import Data.Blob
import Data.Functor.Both as Both
import Data.Maybe
import Data.String
import Language
import Prologue hiding (readFile, toList)
import Source
import Test.Hspec hiding (shouldBe, shouldNotBe, shouldThrow, errorCall)
import Test.Hspec.Expectations.Pretty
@ -15,7 +15,7 @@ spec = parallel $ do
describe "readFile" $ do
it "returns a blob for extant files" $ do
blob <- readFile "semantic-diff.cabal" Nothing
path blob `shouldBe` "semantic-diff.cabal"
blobPath blob `shouldBe` "semantic-diff.cabal"
it "returns a nullBlob for absent files" $ do
blob <- readFile "this file should not exist" Nothing
@ -30,26 +30,26 @@ spec = parallel $ do
it "returns blobs when there's no before" $ do
blobs <- blobsFromFilePath "test/fixtures/input/diff-no-before.json"
blobs `shouldBe` [both (emptySourceBlob "method.rb") b]
blobs `shouldBe` [both (emptyBlob "method.rb") b]
it "returns blobs when there's null before" $ do
blobs <- blobsFromFilePath "test/fixtures/input/diff-null-before.json"
blobs `shouldBe` [both (emptySourceBlob "method.rb") b]
blobs `shouldBe` [both (emptyBlob "method.rb") b]
it "returns blobs when there's no after" $ do
blobs <- blobsFromFilePath "test/fixtures/input/diff-no-after.json"
blobs `shouldBe` [both a (emptySourceBlob "method.rb")]
blobs `shouldBe` [both a (emptyBlob "method.rb")]
it "returns blobs when there's null after" $ do
blobs <- blobsFromFilePath "test/fixtures/input/diff-null-after.json"
blobs `shouldBe` [both a (emptySourceBlob "method.rb")]
blobs `shouldBe` [both a (emptyBlob "method.rb")]
it "returns blobs for unsupported language" $ do
h <- openFile "test/fixtures/input/diff-unsupported-language.json" ReadMode
blobs <- readBlobPairsFromHandle h
let b' = sourceBlob "test.kt" Nothing "fun main(args: Array<String>) {\nprintln(\"hi\")\n}\n"
blobs `shouldBe` [both (emptySourceBlob "test.kt") b']
blobs `shouldBe` [both (emptyBlob "test.kt") b']
it "detects language based on filepath for empty language" $ do
blobs <- blobsFromFilePath "test/fixtures/input/diff-empty-language.json"
@ -79,4 +79,4 @@ spec = parallel $ do
blobs <- readBlobPairsFromHandle h
pure blobs
data Fixture = Fixture { shas :: Both String, expectedBlobs :: [Both SourceBlob] }
data Fixture = Fixture { shas :: Both String, expectedBlobs :: [Both Blob] }

View File

@ -3,10 +3,10 @@ module Data.Syntax.Assignment.Spec where
import Data.ByteString.Char8 as B (words, length)
import Data.Record
import Data.Source
import Data.Syntax.Assignment
import Info
import Prologue
import Source hiding (source, length)
import Test.Hspec
import Text.Parser.TreeSitter.Language (Symbol(..), SymbolType(..))
@ -14,20 +14,20 @@ spec :: Spec
spec = do
describe "Applicative" $
it "matches in sequence" $
runAssignment headF ((,) <$> red <*> red) (makeState "helloworld" [node Red 0 5 [], node Red 5 10 []]) `shouldBe` Result Nothing (Just ((Out "hello", Out "world"), AssignmentState 10 (Info.SourcePos 1 11) "" []))
runAssignment headF ((,) <$> red <*> red) (makeState "helloworld" [node Red 0 5 [], node Red 5 10 []]) `shouldBe` Result Nothing (Just ((Out "hello", Out "world"), AssignmentState 10 (Info.Pos 1 11) "helloworld" []))
describe "Alternative" $ do
it "attempts multiple alternatives" $
runAssignment headF (green <|> red) (makeState "hello" [node Red 0 5 []]) `shouldBe` Result Nothing (Just (Out "hello", AssignmentState 5 (Info.SourcePos 1 6) "" []))
runAssignment headF (green <|> red) (makeState "hello" [node Red 0 5 []]) `shouldBe` Result Nothing (Just (Out "hello", AssignmentState 5 (Info.Pos 1 6) "hello" []))
it "matches repetitions" $
let s = "colourless green ideas sleep furiously"
w = words s
(_, nodes) = foldl (\ (i, prev) word -> (i + B.length word + 1, prev <> [node Red i (i + B.length word) []])) (0, []) w in
resultValue (runAssignment headF (many red) (makeState (Source s) nodes)) `shouldBe` Just (Out <$> w, AssignmentState (B.length s) (Info.SourcePos 1 (succ (B.length s))) "" [])
resultValue (runAssignment headF (many red) (makeState (fromBytes s) nodes)) `shouldBe` Just (Out <$> w, AssignmentState (B.length s) (Info.Pos 1 (succ (B.length s))) (fromBytes s) [])
it "matches one-or-more repetitions against one or more input nodes" $
resultValue (runAssignment headF (some red) (makeState "hello" [node Red 0 5 []])) `shouldBe` Just ([Out "hello"], AssignmentState 5 (Info.SourcePos 1 6) "" [])
resultValue (runAssignment headF (some red) (makeState "hello" [node Red 0 5 []])) `shouldBe` Just ([Out "hello"], AssignmentState 5 (Info.Pos 1 6) "hello" [])
describe "symbol" $ do
it "matches nodes with the same symbol" $
@ -42,24 +42,24 @@ spec = do
assignBy headF source "hi" (node Red 0 2 []) `shouldBe` Result Nothing (Just "hi")
it "advances past the current node" $
snd <$> runAssignment headF source (makeState "hi" [ node Red 0 2 [] ]) `shouldBe` Result Nothing (Just (AssignmentState 2 (Info.SourcePos 1 3) "" []))
snd <$> runAssignment headF source (makeState "hi" [ node Red 0 2 [] ]) `shouldBe` Result Nothing (Just (AssignmentState 2 (Info.Pos 1 3) "hi" []))
describe "children" $ do
it "advances past the current node" $
snd <$> runAssignment headF (children (pure (Out ""))) (makeState "a" [node Red 0 1 []]) `shouldBe` Result Nothing (Just (AssignmentState 1 (Info.SourcePos 1 2) "" []))
snd <$> runAssignment headF (children (pure (Out ""))) (makeState "a" [node Red 0 1 []]) `shouldBe` Result Nothing (Just (AssignmentState 1 (Info.Pos 1 2) "a" []))
it "matches if its subrule matches" $
() <$ runAssignment headF (children red) (makeState "a" [node Blue 0 1 [node Red 0 1 []]]) `shouldBe` Result Nothing (Just ())
it "does not match if its subrule does not match" $
(runAssignment headF (children red) (makeState "a" [node Blue 0 1 [node Green 0 1 []]])) `shouldBe` Result (Just (Error (Info.SourcePos 1 1) (UnexpectedSymbol [Red] Green))) Nothing
(runAssignment headF (children red) (makeState "a" [node Blue 0 1 [node Green 0 1 []]])) `shouldBe` Result (Just (Error (Info.Pos 1 1) (UnexpectedSymbol [Red] Green))) Nothing
it "matches nested children" $
runAssignment headF
(symbol Red *> children (symbol Green *> children (symbol Blue *> source)))
(makeState "1" [ node Red 0 1 [ node Green 0 1 [ node Blue 0 1 [] ] ] ])
`shouldBe`
Result Nothing (Just ("1", AssignmentState 1 (Info.SourcePos 1 2) "" []))
Result Nothing (Just ("1", AssignmentState 1 (Info.Pos 1 2) "1" []))
it "continues after children" $
resultValue (runAssignment headF
@ -68,7 +68,7 @@ spec = do
(makeState "BC" [ node Red 0 1 [ node Green 0 1 [] ]
, node Blue 1 2 [] ]))
`shouldBe`
Just (["B", "C"], AssignmentState 2 (Info.SourcePos 1 3) "" [])
Just (["B", "C"], AssignmentState 2 (Info.Pos 1 3) "BC" [])
it "matches multiple nested children" $
runAssignment headF
@ -76,20 +76,20 @@ spec = do
(makeState "12" [ node Red 0 2 [ node Green 0 1 [ node Blue 0 1 [] ]
, node Green 1 2 [ node Blue 1 2 [] ] ] ])
`shouldBe`
Result Nothing (Just (["1", "2"], AssignmentState 2 (Info.SourcePos 1 3) "" []))
Result Nothing (Just (["1", "2"], AssignmentState 2 (Info.Pos 1 3) "12" []))
describe "runAssignment" $ do
it "drops anonymous nodes before matching symbols" $
runAssignment headF red (makeState "magenta red" [node Magenta 0 7 [], node Red 8 11 []]) `shouldBe` Result Nothing (Just (Out "red", AssignmentState 11 (Info.SourcePos 1 12) "" []))
runAssignment headF red (makeState "magenta red" [node Magenta 0 7 [], node Red 8 11 []]) `shouldBe` Result Nothing (Just (Out "red", AssignmentState 11 (Info.Pos 1 12) "magenta red" []))
it "does not drop anonymous nodes after matching" $
runAssignment headF red (makeState "red magenta" [node Red 0 3 [], node Magenta 4 11 []]) `shouldBe` Result Nothing (Just (Out "red", AssignmentState 3 (Info.SourcePos 1 4) " magenta" [node Magenta 4 11 []]))
runAssignment headF red (makeState "red magenta" [node Red 0 3 [], node Magenta 4 11 []]) `shouldBe` Result Nothing (Just (Out "red", AssignmentState 3 (Info.Pos 1 4) "red magenta" [node Magenta 4 11 []]))
it "does not drop anonymous nodes when requested" $
runAssignment headF ((,) <$> magenta <*> red) (makeState "magenta red" [node Magenta 0 7 [], node Red 8 11 []]) `shouldBe` Result Nothing (Just ((Out "magenta", Out "red"), AssignmentState 11 (Info.SourcePos 1 12) "" []))
runAssignment headF ((,) <$> magenta <*> red) (makeState "magenta red" [node Magenta 0 7 [], node Red 8 11 []]) `shouldBe` Result Nothing (Just ((Out "magenta", Out "red"), AssignmentState 11 (Info.Pos 1 12) "magenta red" []))
node :: symbol -> Int -> Int -> [AST symbol] -> AST symbol
node symbol start end children = cofree $ (Just symbol :. Range start end :. Info.SourceSpan (Info.SourcePos 1 (succ start)) (Info.SourcePos 1 (succ end)) :. Nil) :< children
node symbol start end children = cofree $ (Just symbol :. Range start end :. Info.Span (Info.Pos 1 (succ start)) (Info.Pos 1 (succ end)) :. Nil) :< children
data Grammar = Red | Green | Blue | Magenta
deriving (Enum, Eq, Show)

View File

@ -1,11 +1,11 @@
module PatchOutputSpec where
import Prologue
import Data.Blob
import Data.Functor.Both
import Data.Range
import Data.Record
import Range
import Renderer.Patch
import Source
import Syntax
import Test.Hspec (Spec, describe, it, parallel)
import Test.Hspec.Expectations.Pretty
@ -14,4 +14,4 @@ spec :: Spec
spec = parallel $ do
describe "hunks" $ do
it "empty diffs have empty hunks" $
hunks (wrap $ pure (Range 0 0 :. Nil) :< Leaf ("" :: Text)) (both (SourceBlob Source.empty "abcde" "path2.txt" (Just defaultPlainBlob) Nothing) (SourceBlob Source.empty "xyz" "path2.txt" (Just defaultPlainBlob) Nothing)) `shouldBe` [Hunk {offset = pure 0, changes = [], trailingContext = []}]
hunks (wrap $ pure (Range 0 0 :. Nil) :< Leaf ("" :: Text)) (both (Blob mempty "abcde" "path2.txt" (Just defaultPlainBlob) Nothing) (Blob mempty "xyz" "path2.txt" (Just defaultPlainBlob) Nothing)) `shouldBe` [Hunk {offset = pure 0, changes = [], trailingContext = []}]

View File

@ -1,33 +0,0 @@
module RangeSpec where
import Prologue
import Range
import Test.Hspec (Spec, describe, it, parallel)
import Test.Hspec.Expectations.Pretty
spec :: Spec
spec = parallel $ do
describe "rangesAndWordsFrom" $ do
it "should produce no ranges for the empty string" $
rangesAndWordsFrom 0 mempty `shouldBe` []
it "should produce no ranges for whitespace" $
rangesAndWordsFrom 0 " \t\n " `shouldBe` []
it "should produce a list containing the range of the string for a single-word string" $
rangesAndWordsFrom 0 "word" `shouldBe` [ (Range 0 4, "word") ]
it "should produce a list of ranges for whitespace-separated words" $
rangesAndWordsFrom 0 "wordOne wordTwo" `shouldBe` [ (Range 0 7, "wordOne"), (Range 8 15, "wordTwo") ]
it "should skip multiple whitespace characters" $
rangesAndWordsFrom 0 "a b" `shouldBe` [ (Range 0 1, "a"), (Range 3 4, "b") ]
it "should skip whitespace at the start" $
rangesAndWordsFrom 0 " a b" `shouldBe` [ (Range 2 3, "a"), (Range 4 5, "b") ]
it "should skip whitespace at the end" $
rangesAndWordsFrom 0 "a b " `shouldBe` [ (Range 0 1, "a"), (Range 2 3, "b") ]
it "should produce ranges offset by its start index" $
rangesAndWordsFrom 100 "a b" `shouldBe` [ (Range 100 101, "a"), (Range 102 103, "b") ]

View File

@ -1,5 +1,6 @@
module SemanticSpec where
import Data.Blob
import Data.Functor.Both as Both
import Language
import Patch
@ -7,7 +8,6 @@ import Prologue
import Renderer
import Semantic
import Semantic.Task
import Source
import Syntax
import Test.Hspec hiding (shouldBe, shouldNotBe, shouldThrow, errorCall)
import Test.Hspec.Expectations.Pretty
@ -29,12 +29,12 @@ spec = parallel $ do
describe "diffTermPair" $ do
it "produces an Insert when the first blob is missing" $ do
result <- runTask (diffTermPair (both (emptySourceBlob "/foo") (sourceBlob "/foo" Nothing "")) (runBothWith replacing) (pure (cofree (() :< []))))
result <- runTask (diffTermPair (both (emptyBlob "/foo") (sourceBlob "/foo" Nothing "")) (runBothWith replacing) (pure (cofree (() :< []))))
(() <$) <$> result `shouldBe` pure (Insert ())
it "produces a Delete when the second blob is missing" $ do
result <- runTask (diffTermPair (both (sourceBlob "/foo" Nothing "") (emptySourceBlob "/foo")) (runBothWith replacing) (pure (cofree (() :< []))))
result <- runTask (diffTermPair (both (sourceBlob "/foo" Nothing "") (emptyBlob "/foo")) (runBothWith replacing) (pure (cofree (() :< []))))
(() <$) <$> result `shouldBe` pure (Delete ())
where
methodsBlob = SourceBlob (Source "def foo\nend\n") "ff7bbbe9495f61d9e1e58c597502d152bab1761e" "methods.rb" (Just defaultPlainBlob) (Just Ruby)
methodsBlob = Blob "def foo\nend\n" "ff7bbbe9495f61d9e1e58c597502d152bab1761e" "methods.rb" (Just defaultPlainBlob) (Just Ruby)

View File

@ -1,51 +1,51 @@
module SourceSpec where
import Data.Range
import Data.Source
import Data.Span
import qualified Data.Text as Text
import Prologue hiding (list)
import Range
import Source
import SourceSpan
import Test.Hspec
import Test.Hspec.LeanCheck
import Test.LeanCheck
spec :: Spec
spec = parallel $ do
describe "actualLineRanges" $ do
describe "sourceLineRanges" $ do
prop "produces 1 more range than there are newlines" $
\ source -> Prologue.length (actualLineRanges source) `shouldBe` succ (Text.count "\n" (toText source))
\ source -> length (sourceLineRanges source) `shouldBe` succ (Text.count "\n" (toText source))
prop "produces exhaustive ranges" $
\ source -> foldMap (`slice` source) (actualLineRanges source) `shouldBe` source
\ source -> foldMap (`slice` source) (sourceLineRanges source) `shouldBe` source
describe "sourceSpanToRange" $ do
describe "spanToRange" $ do
prop "computes single-line ranges" . forAll (unListableByteString `mapT` tiers) $
\ s -> let source = Source s
spans = zipWith (\ i Range {..} -> SourceSpan (SourcePos i 1) (SourcePos i (succ (end - start)))) [1..] ranges
ranges = actualLineRanges source in
sourceSpanToRange source <$> spans `shouldBe` ranges
\ s -> let source = fromBytes s
spans = zipWith (\ i Range {..} -> Span (Pos i 1) (Pos i (succ (end - start)))) [1..] ranges
ranges = sourceLineRanges source in
spanToRange source <$> spans `shouldBe` ranges
prop "computes multi-line ranges" $
\ source ->
sourceSpanToRange source (totalSpan source) `shouldBe` totalRange source
spanToRange source (totalSpan source) `shouldBe` totalRange source
prop "computes sub-line ranges" $
\ s -> let source = "*" <> s <> "*" in
sourceSpanToRange source (insetSpan (totalSpan source)) `shouldBe` insetRange (totalRange source)
spanToRange source (insetSpan (totalSpan source)) `shouldBe` insetRange (totalRange source)
prop "inverse of rangeToSourceSpan" $
\ a b -> let s = a <> "\n" <> b in sourceSpanToRange s (totalSpan s) `shouldBe` totalRange s
prop "inverse of rangeToSpan" $
\ a b -> let s = a <> "\n" <> b in spanToRange s (totalSpan s) `shouldBe` totalRange s
describe "rangeToSourceSpan" $ do
prop "inverse of sourceSpanToRange" $
\ a b -> let s = a <> "\n" <> b in rangeToSourceSpan s (totalRange s) `shouldBe` totalSpan s
describe "rangeToSpan" $ do
prop "inverse of spanToRange" $
\ a b -> let s = a <> "\n" <> b in rangeToSpan s (totalRange s) `shouldBe` totalSpan s
describe "totalSpan" $ do
prop "covers single lines" $
\ n -> totalSpan (fromText (Text.replicate n "*")) `shouldBe` SourceSpan (SourcePos 1 1) (SourcePos 1 (max 1 (succ n)))
\ n -> totalSpan (fromText (Text.replicate n "*")) `shouldBe` Span (Pos 1 1) (Pos 1 (max 1 (succ n)))
prop "covers multiple lines" $
\ n -> totalSpan (fromText (Text.intersperse '\n' (Text.replicate n "*"))) `shouldBe` SourceSpan (SourcePos 1 1) (SourcePos (max 1 n) (if n > 0 then 2 else 1))
\ n -> totalSpan (fromText (Text.intersperse '\n' (Text.replicate n "*"))) `shouldBe` Span (Pos 1 1) (Pos (max 1 n) (if n > 0 then 2 else 1))
prop "preserves characters" . forAll (toTiers (list +| [chr 0xa0..chr 0x24f])) $
\ c -> Text.unpack (toText (fromText (Text.singleton c))) `shouldBe` [c]
@ -54,9 +54,9 @@ spec = parallel $ do
\ s -> fromText (toText s) `shouldBe` s
insetSpan :: SourceSpan -> SourceSpan
insetSpan sourceSpan = sourceSpan { spanStart = (spanStart sourceSpan) { column = succ (column (spanStart sourceSpan)) }
, spanEnd = (spanEnd sourceSpan) { column = pred (column (spanEnd sourceSpan)) } }
insetSpan :: Span -> Span
insetSpan sourceSpan = sourceSpan { spanStart = (spanStart sourceSpan) { posColumn = succ (posColumn (spanStart sourceSpan)) }
, spanEnd = (spanEnd sourceSpan) { posColumn = pred (posColumn (spanEnd sourceSpan)) } }
insetRange :: Range -> Range
insetRange Range {..} = Range (succ start) (pred end)

View File

@ -9,7 +9,6 @@ import qualified Data.Syntax.Assignment.Spec
import qualified DiffSpec
import qualified InterpreterSpec
import qualified PatchOutputSpec
import qualified RangeSpec
import qualified SES.Myers.Spec
import qualified SourceSpec
import qualified TermSpec
@ -30,7 +29,6 @@ main = hspec $ do
describe "Diff" DiffSpec.spec
describe "Interpreter" InterpreterSpec.spec
describe "PatchOutput" PatchOutputSpec.spec
describe "Range" RangeSpec.spec
describe "SES.Myers" SES.Myers.Spec.spec
describe "Source" SourceSpec.spec
describe "Term" TermSpec.spec

View File

@ -7,9 +7,11 @@ module SpecHelpers
, unListableDiff
) where
import Data.Blob
import qualified Data.ByteString as B
import Data.Functor.Both
import Data.Functor.Listable
import Data.Source
import Diff
import Language
import Patch
@ -17,7 +19,6 @@ import Prologue hiding (readFile)
import Renderer
import Semantic
import Semantic.Task
import Source
import System.FilePath
import Term
@ -33,15 +34,15 @@ parseFilePath path = do
blob <- readFile path
runTask (parseBlob SExpressionTermRenderer blob)
-- | Read a file to a SourceBlob.
-- | Read a file to a Blob.
--
-- NB: This is intentionally duplicated from Command.Files because eventually
-- we want to be able to test a core Semantic library that has no knowledge of
-- the filesystem or Git. The tests, however, will still leverage reading files.
readFile :: FilePath -> IO SourceBlob
readFile :: FilePath -> IO Blob
readFile path = do
source <- (Just . Source <$> B.readFile path) `catch` (const (pure Nothing) :: IOException -> IO (Maybe Source))
pure $ fromMaybe (emptySourceBlob path) (sourceBlob path (languageForFilePath path) <$> source)
source <- (Just . fromBytes <$> B.readFile path) `catch` (const (pure Nothing) :: IOException -> IO (Maybe Source))
pure $ fromMaybe (emptyBlob path) (sourceBlob path (languageForFilePath path) <$> source)
-- | Returns a Maybe Language based on the FilePath's extension.
languageForFilePath :: FilePath -> Maybe Language

View File

@ -4,9 +4,11 @@ module TOCSpec where
import Data.Aeson
import Category as C
import Data.Blob
import Data.Functor.Both
import Data.Functor.Listable
import Data.Record
import Data.Source
import Data.Text.Listable
import Data.These
import Diff
@ -20,7 +22,6 @@ import Renderer.TOC
import RWS
import Semantic
import Semantic.Task
import Source
import SpecHelpers
import Syntax as S
import Term
@ -50,7 +51,7 @@ spec = parallel $ do
\ diff -> let diff' = fmap (1 <$) <$> mapAnnotations (const (0 :: Int)) (wrap (pure 0 :< Indexed [unListableDiff diff :: Diff (Syntax ()) Int])) in
tableOfContentsBy (\ (n :< _) -> if n == 0 then Just n else Nothing) diff' `shouldBe`
if Prologue.null diff' then [Unchanged 0]
else replicate (Prologue.length diff') (Changed 0)
else replicate (length diff') (Changed 0)
describe "diffTOC" $ do
it "blank if there are no methods" $
@ -148,11 +149,17 @@ spec = parallel $ do
output <- runTask (diffBlobPair ToCDiffRenderer blobs)
toS output `shouldBe` ("{\"changes\":{\"test/fixtures/toc/ruby/methods.A.rb -> test/fixtures/toc/ruby/methods.X.rb\":[{\"span\":{\"start\":[4,1],\"end\":[5,4]},\"category\":\"Method\",\"term\":\"baz\",\"changeType\":\"removed\"}]},\"errors\":{\"test/fixtures/toc/ruby/methods.A.rb -> test/fixtures/toc/ruby/methods.X.rb\":[{\"span\":{\"start\":[1,1],\"end\":[3,1]},\"error\":\"def bar\\nen\\n\"}]}}\n" :: ByteString)
it "summarizes Markdown headings" $ do
blobs <- blobsForPaths (both "markdown/headings.A.md" "markdown/headings.B.md")
output <- runTask (diffBlobPair ToCDiffRenderer blobs)
toS output `shouldBe` ("{\"changes\":{\"test/fixtures/toc/markdown/headings.A.md -> test/fixtures/toc/markdown/headings.B.md\":[{\"span\":{\"start\":[5,1],\"end\":[5,7]},\"category\":\"Heading 2\",\"term\":\"## Two\",\"changeType\":\"added\"}]},\"errors\":{}}\n" :: ByteString)
type Diff' = SyntaxDiff Text (Maybe Declaration ': DefaultFields)
type Term' = SyntaxTerm Text (Maybe Declaration ': DefaultFields)
numTocSummaries :: Diff' -> Int
numTocSummaries diff = Prologue.length $ filter isValidSummary (diffTOC diff)
numTocSummaries diff = length $ filter isValidSummary (diffTOC diff)
-- Return a diff where body is inserted in the expressions of a function. The function is present in both sides of the diff.
programWithChange :: Term' -> Diff'
@ -211,11 +218,11 @@ isMethodOrFunction a = case runCofree (unListableF a) of
(a :< _) | getField a == C.SingletonMethod -> True
_ -> False
blobsForPaths :: Both FilePath -> IO (Both SourceBlob)
blobsForPaths :: Both FilePath -> IO (Both Blob)
blobsForPaths = traverse (readFile . ("test/fixtures/toc/" <>))
sourceSpanBetween :: (Int, Int) -> (Int, Int) -> SourceSpan
sourceSpanBetween (s1, e1) (s2, e2) = SourceSpan (SourcePos s1 e1) (SourcePos s2 e2)
sourceSpanBetween :: (Int, Int) -> (Int, Int) -> Span
sourceSpanBetween (s1, e1) (s2, e2) = Span (Pos s1 e1) (Pos s2 e2)
blankDiff :: Diff'
blankDiff = wrap (pure arrayInfo :< Indexed [ inserting (cofree $ literalInfo :< Leaf "\"a\"") ])
@ -223,8 +230,8 @@ blankDiff = wrap (pure arrayInfo :< Indexed [ inserting (cofree $ literalInfo :<
arrayInfo = Nothing :. Range 0 3 :. ArrayLiteral :. sourceSpanBetween (1, 1) (1, 5) :. Nil
literalInfo = Nothing :. Range 1 2 :. StringLiteral :. sourceSpanBetween (1, 2) (1, 4) :. Nil
blankDiffBlobs :: Both SourceBlob
blankDiffBlobs = both (SourceBlob (fromText "[]") nullOid "a.js" (Just defaultPlainBlob) (Just TypeScript)) (SourceBlob (fromText "[a]") nullOid "b.js" (Just defaultPlainBlob) (Just TypeScript))
blankDiffBlobs :: Both Blob
blankDiffBlobs = both (Blob (fromText "[]") nullOid "a.js" (Just defaultPlainBlob) (Just TypeScript)) (Blob (fromText "[a]") nullOid "b.js" (Just defaultPlainBlob) (Just TypeScript))
instance Listable Text where
tiers = unListableText `mapT` tiers

View File

@ -2,5 +2,6 @@
(ExpressionStatements
(BooleanOperator
(Identifier)
{ (Other "||") -> (Other "&&") }
{ (Other "||")
->(Other "&&") }
(Identifier))))

View File

@ -2,5 +2,6 @@
(ExpressionStatements
(BooleanOperator
(Identifier)
{ (Other "&&") -> (Other "||") }
{ (Other "&&")
->(Other "||") }
(Identifier))))

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
(StringLiteral)))))+}

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
(StringLiteral)))))+}

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
(StringLiteral)))))-}

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
(StringLiteral)))))-}

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
{ (StringLiteral)

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
{ (StringLiteral)

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
(StringLiteral)))))

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
(StringLiteral)))))

View File

@ -2,11 +2,9 @@
(ExpressionStatements
(Operator
(Other "delete")
{
(SubscriptAccess
{ (SubscriptAccess
(Identifier)
(StringLiteral))
->
(MemberAccess
->(MemberAccess
(Identifier)
(Identifier)) })))

View File

@ -2,11 +2,9 @@
(ExpressionStatements
(Operator
(Other "delete")
{
(MemberAccess
{ (MemberAccess
(Identifier)
(Identifier))
->
(SubscriptAccess
->(SubscriptAccess
(Identifier)
(StringLiteral)) })))

View File

@ -13,49 +13,49 @@
{ (Identifier)
->(Identifier) }))
(Export
(Other "export_specifier"
{ (Identifier)
->(Identifier)}
{ (Identifier)
->(Identifier)})
(Other "export_specifier"
{ (Identifier)
->(Identifier)}
{ (Identifier)
->(Identifier)})
(Other "export_specifier"
{ (Identifier)
->(Identifier)}))
(Export
(Other "export_specifier"
{ (Identifier)
->(Identifier) }
{ (Identifier)
->(Identifier) })
(Other "export_specifier"
{ (Identifier)
->(Identifier) }
{ (Identifier)
->(Identifier) })
(Other "export_specifier"
{ (Identifier)
->(Identifier) }))
(Export
(VarDecl
{ (Identifier)
->(Identifier)})
->(Identifier) })
(VarDecl
{ (Identifier)
->(Identifier)})
->(Identifier) })
(VarDecl
{ (Identifier)
->(Identifier)}))
(Export
->(Identifier) }))
(Export
(VarAssignment
{ (Identifier)
->(Identifier)}
->(Identifier) }
{ (Identifier)
->(Identifier)})
->(Identifier) })
(VarAssignment
{ (Identifier)
->(Identifier)}
->(Identifier) }
{ (Identifier)
->(Identifier)})
->(Identifier) })
(VarDecl
{ (Identifier)
->(Identifier)})
->(Identifier) })
(VarDecl
{ (Identifier)
->(Identifier)}))
(Export
{ (Identifier)
->(Identifier)})
->(Identifier) }))
(Export
{ (Identifier)
->(Identifier) })
{+(Export
(Function
(Identifier)

View File

@ -64,16 +64,16 @@
(Function
(Params)))
(Export
{+(Function
(Identifier)
(Params))+}
{-(Other "export_specifier"
(Identifier)
(Identifier))-})
{+(Export
{+(Function
(Identifier)
(Params))+}
{-(Other "export_specifier"
(Identifier)
(Identifier))-})
{+(Export
(Other "export_specifier"
(Identifier)
(Identifier)))+}
(Identifier)
(Identifier)))+}
(Export
{ (StringLiteral)
->(StringLiteral) })

View File

@ -1,9 +1,14 @@
(Program
(If
{ (Identifier) -> (MemberAccess (Identifier) (Identifier)) }
{ (Identifier)
->(MemberAccess
(Identifier)
(Identifier)) }
(ExpressionStatements
(ExpressionStatements
(FunctionCall
(Identifier)
{ (Identifier) -> (Identifier) }))
{ +(ExpressionStatements (Identifier))+ })))
{ (Identifier)
->(Identifier) }))
{+(ExpressionStatements
(Identifier))+})))

View File

@ -1,9 +1,14 @@
(Program
(If
{ (MemberAccess (Identifier) (Identifier)) -> (Identifier) }
{ (MemberAccess
(Identifier)
(Identifier))
->(Identifier) }
(ExpressionStatements
(ExpressionStatements
(FunctionCall
(Identifier)
{ (Identifier) -> (Identifier) }))
{ -(ExpressionStatements (Identifier))- })))
{ (Identifier)
->(Identifier) }))
{-(ExpressionStatements
(Identifier))-})))

View File

@ -2,5 +2,6 @@
(ExpressionStatements
(RelationalOperator
(Identifier)
{ (Other "<") -> (Other "<=") }
{ (Other "<")
->(Other "<=") }
(Identifier))))

View File

@ -2,5 +2,6 @@
(ExpressionStatements
(RelationalOperator
(Identifier)
{ (Other "<=") -> (Other "<") }
{ (Other "<=")
->(Other "<") }
(Identifier))))

View File

@ -1,6 +1,8 @@
(Program
(ExpressionStatements
(Operator
{ (Other "typeof") -> (Identifier) }
{ (Identifier) -> (Other "instanceof") }
{ +(Identifier)+ })))
{ (Other "typeof")
->(Identifier) }
{ (Identifier)
->(Other "instanceof") }
{+(Identifier)+})))

View File

@ -1,6 +1,8 @@
(Program
(ExpressionStatements
(Operator
{ (Identifier) -> (Other "typeof") }
{ (Other "instanceof") -> (Identifier) }
{ -(Identifier)- })))
{ (Identifier)
->(Other "typeof") }
{ (Other "instanceof")
->(Identifier) }
{-(Identifier)-})))

View File

@ -6,3 +6,6 @@ def c(d):
def g(g, h,):
i
def h(i=j):
i

View File

@ -1,2 +1,2 @@
import a, b
import b.c as d
import a, b as c
import b.c as d, e

View File

@ -0,0 +1,3 @@
# One
Just some text

View File

@ -0,0 +1,5 @@
# One
Just some text
## Two

View File

@ -65,7 +65,7 @@
(Other "ambient_declaration"
(Class
(Identifier)
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params
@ -77,7 +77,7 @@
(Identifier)
(Ty
(Other "predefined_type")))
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params)

View File

@ -11,7 +11,7 @@
(Other "ambient_declaration"
(Class
(Identifier)
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params
@ -23,7 +23,7 @@
(Identifier)
(Ty
(Other "predefined_type")))
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params)

View File

@ -65,7 +65,7 @@
(Other "ambient_declaration"
(Class
(Identifier)
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params
@ -77,7 +77,7 @@
(Identifier)
(Ty
(Other "predefined_type")))
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params)

View File

@ -11,7 +11,7 @@
(Other "ambient_declaration"
(Class
(Identifier)
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params
@ -23,7 +23,7 @@
(Identifier)
(Ty
(Other "predefined_type")))
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params)

View File

@ -72,7 +72,7 @@
(Other "ambient_declaration"
(Class
(Identifier)
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params
@ -84,7 +84,7 @@
(Identifier)
(Ty
(Other "predefined_type")))
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params)

View File

@ -74,7 +74,7 @@
(Other "ambient_declaration"
(Class
(Identifier)
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params
@ -86,7 +86,7 @@
(Identifier)
(Ty
(Other "predefined_type")))
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params)

View File

@ -65,7 +65,7 @@
(Other "ambient_declaration"
(Class
(Identifier)
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params
@ -77,7 +77,7 @@
(Identifier)
(Ty
(Other "predefined_type")))
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params)

View File

@ -11,7 +11,7 @@
(Other "ambient_declaration"
(Class
(Identifier)
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params
@ -23,7 +23,7 @@
(Identifier)
(Ty
(Other "predefined_type")))
(Other "ambient_method_declaration"
(Method
(Identifier)
(Other "call_signature"
(Params)

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
(StringLiteral)))))+}

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
(StringLiteral)))))+}

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
(StringLiteral)))))-}

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
(StringLiteral)))))-}

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
{ (StringLiteral)

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
{ (StringLiteral)

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
(StringLiteral)))))

View File

@ -2,7 +2,7 @@
(ExpressionStatements
(Constructor
(FunctionCall
(Identifier)
(Other "reserved_identifier")
(Identifier)
(NumberLiteral)
(StringLiteral)))))

View File

@ -13,49 +13,49 @@
{ (Identifier)
->(Identifier) }))
(Export
(Other "export_specifier"
{ (Identifier)
->(Identifier)}
{ (Identifier)
->(Identifier)})
(Other "export_specifier"
{ (Identifier)
->(Identifier)}
{ (Identifier)
->(Identifier)})
(Other "export_specifier"
{ (Identifier)
->(Identifier)}))
(Export
(Other "export_specifier"
{ (Identifier)
->(Identifier) }
{ (Identifier)
->(Identifier) })
(Other "export_specifier"
{ (Identifier)
->(Identifier) }
{ (Identifier)
->(Identifier) })
(Other "export_specifier"
{ (Identifier)
->(Identifier) }))
(Export
(VarDecl
{ (Identifier)
->(Identifier)})
->(Identifier) })
(VarDecl
{ (Identifier)
->(Identifier)})
->(Identifier) })
(VarDecl
{ (Identifier)
->(Identifier)}))
(Export
->(Identifier) }))
(Export
(VarAssignment
{ (Identifier)
->(Identifier)}
->(Identifier) }
{ (Identifier)
->(Identifier)})
->(Identifier) })
(VarAssignment
{ (Identifier)
->(Identifier)}
->(Identifier) }
{ (Identifier)
->(Identifier)})
->(Identifier) })
(VarDecl
{ (Identifier)
->(Identifier)})
->(Identifier) })
(VarDecl
{ (Identifier)
->(Identifier)}))
(Export
{ (Identifier)
->(Identifier)})
->(Identifier) }))
(Export
{ (Identifier)
->(Identifier) })
{+(Export
(Function
(Identifier)

View File

@ -64,16 +64,16 @@
(Function
(Params)))
(Export
{+(Function
(Identifier)
(Params))+}
{-(Other "export_specifier"
(Identifier)
(Identifier))-})
{+(Export
{+(Function
(Identifier)
(Params))+}
{-(Other "export_specifier"
(Identifier)
(Identifier))-})
{+(Export
(Other "export_specifier"
(Identifier)
(Identifier)))+}
(Identifier)
(Identifier)))+}
(Export
{ (StringLiteral)
->(StringLiteral) })

View File

@ -5,7 +5,7 @@
(Other "type_parameter"
(Identifier)))
(Other "property_signature"
(Identifier)
(Other "reserved_identifier")
(Ty
(Other "literal_type"
(StringLiteral))))

View File

@ -2,7 +2,7 @@
(Interface
(Identifier)
(Other "property_signature"
(Identifier)
(Other "reserved_identifier")
(Ty
(Other "literal_type"
(StringLiteral))))

View File

@ -5,7 +5,7 @@
(Other "type_parameter"
(Identifier)))
(Other "property_signature"
(Identifier)
(Other "reserved_identifier")
(Ty
(Other "literal_type"
(StringLiteral))))

View File

@ -2,7 +2,7 @@
(Interface
(Identifier)
(Other "property_signature"
(Identifier)
(Other "reserved_identifier")
(Ty
(Other "literal_type"
(StringLiteral))))

View File

@ -6,7 +6,7 @@
(Other "type_parameter"
(Identifier)))-}
(Other "property_signature"
(Identifier)
(Other "reserved_identifier")
(Ty
(Other "literal_type"
{ (StringLiteral)

View File

@ -6,7 +6,7 @@
(Other "type_parameter"
(Identifier)))+}
(Other "property_signature"
(Identifier)
(Other "reserved_identifier")
(Ty
(Other "literal_type"
{ (StringLiteral)

View File

@ -5,7 +5,7 @@
(Other "type_parameter"
(Identifier)))
(Other "property_signature"
(Identifier)
(Other "reserved_identifier")
(Ty
(Other "literal_type"
(StringLiteral))))

View File

@ -2,7 +2,7 @@
(Interface
(Identifier)
(Other "property_signature"
(Identifier)
(Other "reserved_identifier")
(Ty
(Other "literal_type"
(StringLiteral))))

View File

@ -5,6 +5,7 @@
(Method
(Other "accessibility_modifier")
(Other "readonly")
(Other "reserved_identifier")
(Identifier)
(Params)
(Ty

View File

@ -5,6 +5,7 @@
(Method
(Other "accessibility_modifier")
(Other "readonly")
(Other "reserved_identifier")
(Identifier)
(Params)
(Ty

View File

@ -5,6 +5,7 @@
(Method
(Other "accessibility_modifier")
{+(Other "readonly")+}
{+(Other "reserved_identifier")+}
(Identifier)
(Params)
(Ty

Some files were not shown because too many files have changed in this diff Show More