mirror of
https://github.com/github/semantic.git
synced 2024-12-20 21:31:48 +03:00
Merge pull request #318 from github/remove-reprinter-and-rewriter
Remove experimental reprinting and rewriting facilities.
This commit is contained in:
commit
2592c9b93b
@ -165,12 +165,6 @@ library
|
||||
, Data.Patch
|
||||
, Data.Project
|
||||
, Data.Quieterm
|
||||
, Data.Reprinting.Errors
|
||||
, Data.Reprinting.Fragment
|
||||
, Data.Reprinting.Operator
|
||||
, Data.Reprinting.Scope
|
||||
, Data.Reprinting.Splice
|
||||
, Data.Reprinting.Token
|
||||
, Data.Semigroup.App
|
||||
, Data.Scientific.Exts
|
||||
-- À la carte syntax types
|
||||
@ -196,9 +190,7 @@ library
|
||||
, Language.Go.Syntax
|
||||
, Language.Go.Type
|
||||
, Language.JSON.Assignment
|
||||
, Language.JSON.PrettyPrint
|
||||
, Language.Ruby.Assignment
|
||||
, Language.Ruby.PrettyPrint
|
||||
, Language.Ruby.Syntax
|
||||
, Language.TSX.Assignment
|
||||
, Language.TSX.Syntax
|
||||
@ -213,7 +205,6 @@ library
|
||||
, Language.PHP.Assignment
|
||||
, Language.PHP.Syntax
|
||||
, Language.Python.Assignment
|
||||
, Language.Python.PrettyPrint
|
||||
, Language.Python.Syntax
|
||||
, Numeric.Exts
|
||||
-- Parser glue
|
||||
@ -224,10 +215,6 @@ library
|
||||
, Rendering.Graph
|
||||
, Rendering.JSON
|
||||
, Rendering.TOC
|
||||
, Reprinting.Tokenize
|
||||
, Reprinting.Translate
|
||||
, Reprinting.Typeset
|
||||
, Reprinting.Pipeline
|
||||
-- High-level flow & operational functionality (logging, stats, etc.)
|
||||
, Semantic.Analysis
|
||||
-- API
|
||||
@ -362,10 +349,6 @@ test-suite test
|
||||
, Integration.Spec
|
||||
, Numeric.Spec
|
||||
, Parsing.Spec
|
||||
, Reprinting.Spec
|
||||
, Rewriting.Go.Spec
|
||||
, Rewriting.JSON.Spec
|
||||
, Rewriting.Python.Spec
|
||||
, Rendering.TOC.Spec
|
||||
, Semantic.Spec
|
||||
, Semantic.CLI.Spec
|
||||
|
@ -1,13 +0,0 @@
|
||||
module Data.Reprinting.Errors ( TranslationError (..) ) where
|
||||
|
||||
import Data.Reprinting.Token
|
||||
import Data.Reprinting.Scope
|
||||
|
||||
-- | Represents failure occurring in a 'Concrete' machine during the translation
|
||||
-- phases of the reprinting pipeline.
|
||||
data TranslationError
|
||||
= UnbalancedPair Scope [Scope]
|
||||
-- ^ Thrown if an unbalanced 'Enter'/'Exit' pair is encountered.
|
||||
| NoTranslation Element [Scope]
|
||||
-- ^ Thrown if no translation found for a given element.
|
||||
deriving (Eq, Show)
|
@ -1,38 +0,0 @@
|
||||
{-# LANGUAGE RankNTypes #-}
|
||||
|
||||
module Data.Reprinting.Fragment
|
||||
( Fragment(..)
|
||||
, copy
|
||||
, insert
|
||||
, defer
|
||||
) where
|
||||
|
||||
import Data.Text (Text)
|
||||
import Streaming
|
||||
import Streaming.Prelude (yield)
|
||||
|
||||
import Data.Reprinting.Scope
|
||||
import Data.Reprinting.Token
|
||||
|
||||
-- | An intermediate representation of concrete syntax in the reprinting pipeline.
|
||||
data Fragment
|
||||
= Verbatim Text
|
||||
-- ^ Verbatim copy of original 'Text' (un-refactored).
|
||||
| New Element [Scope] Text
|
||||
-- ^ New 'Text' to be inserted, along with original 'Element' and `Scope`
|
||||
-- allowing later steps to re-write.
|
||||
| Defer Element [Scope]
|
||||
-- ^ To be handled further down the pipeline.
|
||||
deriving (Eq, Show)
|
||||
|
||||
-- | Copy along some original, un-refactored 'Text'.
|
||||
copy :: Monad m => Text -> Stream (Of Fragment) m ()
|
||||
copy = yield . Verbatim
|
||||
|
||||
-- | Insert some new 'Text'.
|
||||
insert :: Monad m => Element -> [Scope] -> Text -> Stream (Of Fragment) m ()
|
||||
insert el c = yield . New el c
|
||||
|
||||
-- | Defer processing an element to a later stage.
|
||||
defer :: Monad m => Element -> [Scope] -> Stream (Of Fragment) m ()
|
||||
defer el = yield . Defer el
|
@ -1,40 +0,0 @@
|
||||
module Data.Reprinting.Operator
|
||||
( Operator (..)
|
||||
, Direction (..)
|
||||
) where
|
||||
|
||||
data Direction
|
||||
= Less
|
||||
| Greater
|
||||
deriving (Show, Eq)
|
||||
|
||||
-- | A sum type representing every concievable infix operator a
|
||||
-- language can define. These are handled by instances of 'Concrete'
|
||||
-- and given appropriate precedence.
|
||||
data Operator
|
||||
= Add
|
||||
| Multiply
|
||||
| Subtract
|
||||
| Divide
|
||||
| Modulus
|
||||
| Raise
|
||||
| FloorDivide
|
||||
| RegexMatch
|
||||
| RegexNotMatch
|
||||
| LogicalOr
|
||||
| LogicalAnd
|
||||
| LogicalNot
|
||||
| LogicalXor
|
||||
| BinaryOr
|
||||
| BinaryAnd
|
||||
| BinaryXor
|
||||
| BinaryComplement
|
||||
| NumericNegate
|
||||
| LeftShift
|
||||
| RightShift
|
||||
| Eql
|
||||
| StrictEql
|
||||
| Compare Direction
|
||||
| CompareEql Direction
|
||||
| Spaceship
|
||||
deriving (Show, Eq)
|
@ -1,52 +0,0 @@
|
||||
module Data.Reprinting.Scope
|
||||
( Scope (..)
|
||||
, precedenceOf
|
||||
, imperativeDepth
|
||||
) where
|
||||
|
||||
import Data.Reprinting.Operator
|
||||
|
||||
|
||||
-- | A 'Scope' represents a scope in which other tokens can be
|
||||
-- interpreted. For example, in the 'Imperative' context a 'Sep'
|
||||
-- could be a semicolon or newline, whereas in a 'List' context a
|
||||
-- 'Sep' is probably going to be a comma.
|
||||
-- TODO: look into sharing control-flow constructs with 'Flow'
|
||||
data Scope
|
||||
= List -- ^ List literals (usually comma-separated, in square brackets)
|
||||
| Hash -- ^ Hashes (key-value pairs, in curly brackets)
|
||||
| Pair -- ^ Colon-separated key-value pairs
|
||||
| Slice -- ^ Range-selection context, as in Go or Python
|
||||
| Method -- ^ Member-function declaration
|
||||
| Atom -- ^ Quoted symbols, e.g. Ruby Symbol
|
||||
| Function -- ^ Function declaration
|
||||
| Namespace -- ^ Namespace/module context
|
||||
| Call -- ^ Function call (usually comma-separated arguments)
|
||||
| Params -- ^ Function parameters (ibid.)
|
||||
| Return -- ^ Zero or more values
|
||||
| Loop -- ^ @for@, @while@, @foreach@ loops
|
||||
| If -- ^ Conditionals
|
||||
| Case -- ^ @case@ or @switch@ context
|
||||
| InfixL Operator Int -- ^ Left-associative operators, with context
|
||||
| Prefix Operator -- ^ Prefix operators
|
||||
| Indexing -- ^ Single-element array/list indexing
|
||||
| Imperative -- ^ ALGOL-style top-to-bottom int
|
||||
| Interpolation -- ^ String interpolation
|
||||
| Catch -- ^ @try@
|
||||
| Finally -- ^ @except@
|
||||
| BeginBlock -- ^ Ruby-specific: @BEGIN@
|
||||
| EndBlock -- ^ Ruby-specific: @END@
|
||||
| Class -- ^ Class definition
|
||||
deriving (Show, Eq)
|
||||
|
||||
precedenceOf :: [Scope] -> Int
|
||||
precedenceOf cs = case filter isInfix cs of
|
||||
(InfixL _ n:_) -> n
|
||||
_ -> 0
|
||||
where isInfix (InfixL _ _) = True
|
||||
isInfix _ = False
|
||||
|
||||
|
||||
-- | Depth of imperative scope.
|
||||
imperativeDepth :: [Scope] -> Int
|
||||
imperativeDepth = length . filter (== Imperative)
|
@ -1,67 +0,0 @@
|
||||
{-# LANGUAGE RankNTypes #-}
|
||||
|
||||
module Data.Reprinting.Splice
|
||||
( Fragment(..)
|
||||
, copy
|
||||
, insert
|
||||
, defer
|
||||
, Splice(..)
|
||||
, emit
|
||||
, emitIf
|
||||
, layout
|
||||
, indent
|
||||
, layouts
|
||||
, space
|
||||
, Whitespace(..)
|
||||
, Indentation(..)
|
||||
) where
|
||||
|
||||
import Prologue hiding (Element)
|
||||
|
||||
import Streaming
|
||||
import Streaming.Prelude (yield)
|
||||
|
||||
import Data.Reprinting.Fragment
|
||||
|
||||
-- | The final representation of concrete syntax in the reprinting pipeline.
|
||||
data Splice
|
||||
= Emit Text
|
||||
| Layout Whitespace
|
||||
deriving (Eq, Show)
|
||||
|
||||
-- | Emit some 'Text' as a 'Splice'.
|
||||
emit :: Monad m => Text -> Stream (Of Splice) m ()
|
||||
emit = yield . Emit
|
||||
|
||||
-- | Emit the provided 'Text' if the given predicate is true.
|
||||
emitIf :: Monad m => Bool -> Text -> Stream (Of Splice) m ()
|
||||
emitIf p = when p . emit
|
||||
|
||||
-- | Construct a layout 'Splice'.
|
||||
layout :: Monad m => Whitespace -> Stream (Of Splice) m ()
|
||||
layout = yield . Layout
|
||||
|
||||
-- | @indent w n@ emits @w@ 'Spaces' @n@ times.
|
||||
indent :: Monad m => Int -> Int -> Stream (Of Splice) m ()
|
||||
indent width times
|
||||
| times > 0 = replicateM_ times (layout (Indent width Spaces))
|
||||
| otherwise = pure ()
|
||||
|
||||
-- | Construct multiple layouts.
|
||||
layouts :: Monad m => [Whitespace] -> Stream (Of Splice) m ()
|
||||
layouts = traverse_ (yield . Layout)
|
||||
|
||||
-- | Single space.
|
||||
space :: Monad m => Stream (Of Splice) m ()
|
||||
space = yield (Layout Space)
|
||||
|
||||
-- | Indentation, spacing, and other whitespace.
|
||||
data Whitespace
|
||||
= HardWrap
|
||||
| SoftWrap
|
||||
| Space
|
||||
| Indent Int Indentation
|
||||
deriving (Eq, Show)
|
||||
|
||||
data Indentation = Tabs | Spaces
|
||||
deriving (Eq, Show)
|
@ -1,78 +0,0 @@
|
||||
module Data.Reprinting.Token
|
||||
( Token (..)
|
||||
, isChunk
|
||||
, isControl
|
||||
, Element (..)
|
||||
, Control (..)
|
||||
, Flow (..)
|
||||
) where
|
||||
|
||||
import Data.Reprinting.Scope
|
||||
import Data.Text (Text)
|
||||
import Source.Source (Source)
|
||||
|
||||
-- | 'Token' encapsulates 'Element' and 'Control' tokens, as well as sliced
|
||||
-- portions of the original 'Source' for a given AST.
|
||||
data Token
|
||||
= Chunk Source -- ^ Verbatim 'Source' from AST, unmodified.
|
||||
| Element Element -- ^ Content token to be rendered.
|
||||
| Control Control -- ^ AST's context.
|
||||
deriving (Show, Eq)
|
||||
|
||||
isChunk :: Token -> Bool
|
||||
isChunk (Chunk _) = True
|
||||
isChunk _ = False
|
||||
|
||||
isControl :: Token -> Bool
|
||||
isControl (Control _) = True
|
||||
isControl _ = False
|
||||
|
||||
-- | 'Element' tokens describe atomic pieces of source code to be
|
||||
-- output to a rendered document. These tokens are language-agnostic
|
||||
-- and are interpreted into language-specific representations at a
|
||||
-- later point in the reprinting pipeline.
|
||||
data Element
|
||||
= Run Text -- ^ A literal chunk of text.
|
||||
| Truth Bool -- ^ A boolean value.
|
||||
| Glyph Text -- ^ A glyph like 'a' or #a.
|
||||
| Nullity -- ^ @null@ or @nil@ or some other zero value.
|
||||
| Sep -- ^ Some sort of delimiter, interpreted in some 'Context'.
|
||||
| Sym -- ^ Some sort of symbol, interpreted in some 'Context'.
|
||||
| Open -- ^ The beginning of some 'Context', such as an @[@ or @{@.
|
||||
| Close -- ^ The opposite of 'Open'.
|
||||
| Access -- ^ Member/method access
|
||||
| Resolve -- ^ Namespace/package resolution
|
||||
| Assign -- ^ Variable binding
|
||||
| Self -- ^ @self@ or @this@
|
||||
| Superclass -- ^ @super@
|
||||
| Flow Flow -- ^ Control-flow token (@if@, @else@, @for@...)
|
||||
| Extends -- ^ Subclassing indicator (syntax varies)
|
||||
deriving (Eq, Show)
|
||||
|
||||
-- | Helper datum to corral control-flow entities like @while@, @for@,
|
||||
-- etc. Usually corresponds to a keyword in a given language.
|
||||
data Flow
|
||||
= Break
|
||||
| Continue
|
||||
| Else
|
||||
| For
|
||||
| Foreach
|
||||
| In -- ^ Usually associated with 'Foreach' loops
|
||||
| Rescue -- ^ AKA @catch@ in most languages
|
||||
| Retry
|
||||
| Switch -- ^ AKA @case@
|
||||
| Then -- ^ The true-branch of @if@-statements
|
||||
| Try
|
||||
| While
|
||||
| Yield
|
||||
deriving (Eq, Show)
|
||||
|
||||
-- | 'Control' tokens describe information about some AST's context.
|
||||
-- Though these are ultimately rendered as whitespace (or nothing) on
|
||||
-- the page, they are needed to provide information as to how deeply
|
||||
-- subsequent entries in the pipeline should indent.
|
||||
data Control
|
||||
= Enter Scope
|
||||
| Exit Scope
|
||||
| Log String
|
||||
deriving (Eq, Show)
|
@ -8,13 +8,11 @@ import Data.JSON.Fields
|
||||
import qualified Data.Set as Set
|
||||
import Data.Sum
|
||||
import Data.Term
|
||||
import qualified Data.Reprinting.Token as Token
|
||||
import GHC.Types (Constraint)
|
||||
import GHC.TypeLits
|
||||
import Diffing.Algorithm
|
||||
import Prelude
|
||||
import Prologue
|
||||
import Reprinting.Tokenize hiding (Element)
|
||||
import Source.Loc
|
||||
import Source.Range as Range
|
||||
import Source.Span as Span
|
||||
@ -133,9 +131,6 @@ instance Evaluatable Identifier where
|
||||
ref _ _ (Identifier name) = lookupSlot (Declaration name)
|
||||
|
||||
|
||||
instance Tokenize Identifier where
|
||||
tokenize = yield . Token.Run . formatName . Data.Syntax.name
|
||||
|
||||
instance FreeVariables1 Identifier where
|
||||
liftFreeVariables _ (Identifier x) = Set.singleton x
|
||||
|
||||
@ -163,9 +158,6 @@ data Empty a = Empty
|
||||
instance Evaluatable Empty where
|
||||
eval _ _ _ = unit
|
||||
|
||||
instance Tokenize Empty where
|
||||
tokenize = ignore
|
||||
|
||||
-- | Syntax representing a parsing or assignment error.
|
||||
data Error a = Error { errorCallStack :: ErrorStack, errorExpected :: [String], errorActual :: Maybe String, errorChildren :: [a] }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -173,10 +165,6 @@ data Error a = Error { errorCallStack :: ErrorStack, errorExpected :: [String],
|
||||
|
||||
instance Evaluatable Error
|
||||
|
||||
instance Tokenize Error where
|
||||
-- TODO: Considering producing comments like "ERROR: due to.." instead of ignoring.
|
||||
tokenize = ignore
|
||||
|
||||
errorSyntax :: Error.Error String -> [a] -> Error a
|
||||
errorSyntax Error.Error{..} = Error (ErrorStack $ errorSite <$> getCallStack callStack) errorExpected errorActual
|
||||
|
||||
@ -239,8 +227,5 @@ instance Hashable1 Context where liftHashWithSalt = foldl
|
||||
instance Evaluatable Context where
|
||||
eval eval _ Context{..} = eval contextSubject
|
||||
|
||||
instance Tokenize Context where
|
||||
tokenize Context{..} = sequenceA_ (sepTrailing contextTerms) *> contextSubject
|
||||
|
||||
instance Declarations1 Context where
|
||||
liftDeclaredName declaredName = declaredName . contextSubject
|
||||
|
@ -7,7 +7,6 @@ import Prologue
|
||||
import Data.Abstract.Evaluatable
|
||||
import Data.JSON.Fields
|
||||
import Diffing.Algorithm
|
||||
import Reprinting.Tokenize as Token
|
||||
|
||||
-- | An unnested comment (line or block).
|
||||
newtype Comment a = Comment { commentContent :: Text }
|
||||
@ -17,9 +16,6 @@ newtype Comment a = Comment { commentContent :: Text }
|
||||
instance Evaluatable Comment where
|
||||
eval _ _ _ = unit
|
||||
|
||||
instance Tokenize Comment where
|
||||
tokenize = yield . Run . commentContent
|
||||
|
||||
-- TODO: nested comment types
|
||||
-- TODO: documentation comment types
|
||||
-- TODO: literate programming comment types? alternatively, consider those as markup
|
||||
|
@ -13,9 +13,7 @@ import Data.Abstract.Evaluatable
|
||||
import Data.Abstract.Name (__self)
|
||||
import qualified Data.Abstract.ScopeGraph as ScopeGraph
|
||||
import Data.JSON.Fields
|
||||
import qualified Data.Reprinting.Scope as Scope
|
||||
import Diffing.Algorithm
|
||||
import Reprinting.Tokenize hiding (Superclass)
|
||||
import Source.Span
|
||||
|
||||
data Function a = Function { functionContext :: ![a], functionName :: !a, functionParameters :: ![a], functionBody :: !a }
|
||||
@ -59,11 +57,6 @@ declareFunction name accessControl span kind = do
|
||||
name' <- declareMaybeName name Default accessControl span kind (Just associatedScope)
|
||||
pure (name', associatedScope)
|
||||
|
||||
instance Tokenize Function where
|
||||
tokenize Function{..} = within' Scope.Function $ do
|
||||
functionName
|
||||
within' Scope.Params $ sequenceA_ (sep functionParameters)
|
||||
functionBody
|
||||
|
||||
instance Declarations1 Function where
|
||||
liftDeclaredName declaredName = declaredName . functionName
|
||||
@ -101,12 +94,6 @@ instance Evaluatable Method where
|
||||
v <- function name params methodBody associatedScope
|
||||
v <$ assign addr v
|
||||
|
||||
instance Tokenize Data.Syntax.Declaration.Method where
|
||||
tokenize Method{..} = within' Scope.Method $ do
|
||||
methodName
|
||||
within' Scope.Params $ sequenceA_ (sep methodParameters)
|
||||
methodBody
|
||||
|
||||
instance Declarations1 Method where
|
||||
liftDeclaredName declaredName = declaredName . methodName
|
||||
|
||||
|
@ -9,7 +9,6 @@ import Data.Abstract.Module (ModuleInfo (..))
|
||||
import Data.JSON.Fields
|
||||
import qualified Data.Text as T
|
||||
import Diffing.Algorithm
|
||||
import Reprinting.Tokenize
|
||||
import Source.Span
|
||||
|
||||
-- A file directive like the Ruby constant `__FILE__`.
|
||||
@ -20,10 +19,6 @@ data File a = File
|
||||
instance Evaluatable File where
|
||||
eval _ _ File = currentModule >>= string . T.pack . modulePath
|
||||
|
||||
-- We may need a separate token class for these given additional languages
|
||||
instance Tokenize File where
|
||||
tokenize _ = yield . Run $ "__FILE__"
|
||||
|
||||
|
||||
-- A line directive like the Ruby constant `__LINE__`.
|
||||
data Line a = Line
|
||||
@ -32,7 +27,3 @@ data Line a = Line
|
||||
|
||||
instance Evaluatable Line where
|
||||
eval _ _ Line = currentSpan >>= integer . fromIntegral . line . start
|
||||
|
||||
-- PT TODO: proper token for this
|
||||
instance Tokenize Line where
|
||||
tokenize _ = yield . Run $ "__FILE__"
|
||||
|
@ -11,13 +11,9 @@ import Data.Abstract.Name as Name
|
||||
import Data.Abstract.Number (liftIntegralFrac, liftReal, liftedExponent, liftedFloorDiv)
|
||||
import Data.Fixed
|
||||
import Data.JSON.Fields
|
||||
import Data.List (intersperse)
|
||||
import qualified Data.List.NonEmpty as NonEmpty
|
||||
import qualified Data.Map.Strict as Map
|
||||
import qualified Data.Reprinting.Scope as Scope
|
||||
import qualified Data.Reprinting.Token as Token
|
||||
import Diffing.Algorithm hiding (Delete)
|
||||
import Reprinting.Tokenize hiding (Superclass)
|
||||
import qualified Data.Abstract.ScopeGraph as ScopeGraph
|
||||
|
||||
-- | Typical prefix function application, like `f(x)` in many languages, or `f x` in Haskell.
|
||||
@ -34,13 +30,6 @@ instance Evaluatable Call where
|
||||
args <- traverse eval callParams
|
||||
call op args
|
||||
|
||||
instance Tokenize Call where
|
||||
tokenize Call{..} = within Scope.Call $ do
|
||||
-- TODO: callContext
|
||||
callFunction
|
||||
within' Scope.Params $ sequenceA_ (sep callParams)
|
||||
callBlock
|
||||
|
||||
data LessThan a = LessThan { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically LessThan
|
||||
@ -49,9 +38,6 @@ instance Evaluatable LessThan where
|
||||
eval eval _ t = traverse eval t >>= go where
|
||||
go (LessThan a b) = liftComparison (Concrete (<)) a b
|
||||
|
||||
instance Tokenize LessThan where
|
||||
tokenize LessThan{..} = within' (Scope.InfixL (Compare Less) 4) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data LessThanEqual a = LessThanEqual { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically LessThanEqual
|
||||
@ -60,9 +46,6 @@ instance Evaluatable LessThanEqual where
|
||||
eval eval _ t = traverse eval t >>= go where
|
||||
go (LessThanEqual a b) = liftComparison (Concrete (<=)) a b
|
||||
|
||||
instance Tokenize LessThanEqual where
|
||||
tokenize LessThanEqual{..} = within' (Scope.InfixL (CompareEql Less) 4) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data GreaterThan a = GreaterThan { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically GreaterThan
|
||||
@ -71,9 +54,6 @@ instance Evaluatable GreaterThan where
|
||||
eval eval _ t = traverse eval t >>= go where
|
||||
go (GreaterThan a b) = liftComparison (Concrete (>)) a b
|
||||
|
||||
instance Tokenize GreaterThan where
|
||||
tokenize GreaterThan{..} = within' (Scope.InfixL (Compare Greater) 4) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data GreaterThanEqual a = GreaterThanEqual { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically GreaterThanEqual
|
||||
@ -82,9 +62,6 @@ instance Evaluatable GreaterThanEqual where
|
||||
eval eval _ t = traverse eval t >>= go where
|
||||
go (GreaterThanEqual a b) = liftComparison (Concrete (>=)) a b
|
||||
|
||||
instance Tokenize GreaterThanEqual where
|
||||
tokenize GreaterThanEqual{..} = within' (Scope.InfixL (CompareEql Greater) 4) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data Equal a = Equal { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Equal
|
||||
@ -95,9 +72,6 @@ instance Evaluatable Equal where
|
||||
-- We need some mechanism to customize this behavior per-language.
|
||||
go (Equal a b) = liftComparison (Concrete (==)) a b
|
||||
|
||||
instance Tokenize Equal where
|
||||
tokenize Equal{..} = within' (Scope.InfixL Eql 4) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data StrictEqual a = StrictEqual { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically StrictEqual
|
||||
@ -108,9 +82,6 @@ instance Evaluatable StrictEqual where
|
||||
-- We need some mechanism to customize this behavior per-language.
|
||||
go (StrictEqual a b) = liftComparison (Concrete (==)) a b
|
||||
|
||||
instance Tokenize StrictEqual where
|
||||
tokenize StrictEqual{..} = within' (Scope.InfixL StrictEql 4) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data Comparison a = Comparison { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Comparison
|
||||
@ -119,9 +90,6 @@ instance Evaluatable Comparison where
|
||||
eval eval _ t = traverse eval t >>= go where
|
||||
go (Comparison a b) = liftComparison (Concrete (==)) a b
|
||||
|
||||
instance Tokenize Comparison where
|
||||
tokenize Comparison{..} = within' (Scope.InfixL Spaceship 4) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data Plus a = Plus { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Plus
|
||||
@ -130,9 +98,6 @@ instance Evaluatable Plus where
|
||||
eval eval _ t = traverse eval t >>= go where
|
||||
go (Plus a b) = liftNumeric2 add a b where add = liftReal (+)
|
||||
|
||||
instance Tokenize Plus where
|
||||
tokenize Plus{..} = within' (Scope.InfixL Add 6) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data Minus a = Minus { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Minus
|
||||
@ -141,9 +106,6 @@ instance Evaluatable Minus where
|
||||
eval eval _ t = traverse eval t >>= go where
|
||||
go (Minus a b) = liftNumeric2 (liftReal (-)) a b
|
||||
|
||||
instance Tokenize Minus where
|
||||
tokenize Minus{..} = within' (Scope.InfixL Subtract 6) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data Times a = Times { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Times
|
||||
@ -152,9 +114,6 @@ instance Evaluatable Times where
|
||||
eval eval _ t = traverse eval t >>= go where
|
||||
go (Times a b) = liftNumeric2 (liftReal (*)) a b
|
||||
|
||||
instance Tokenize Times where
|
||||
tokenize Times{..} = within' (Scope.InfixL Multiply 7) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data DividedBy a = DividedBy { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically DividedBy
|
||||
@ -163,9 +122,6 @@ instance Evaluatable DividedBy where
|
||||
eval eval _ t = traverse eval t >>= go where
|
||||
go (DividedBy a b) = liftNumeric2 (liftIntegralFrac div (/)) a b
|
||||
|
||||
instance Tokenize DividedBy where
|
||||
tokenize DividedBy{..} = within' (Scope.InfixL Divide 7) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data Modulo a = Modulo { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Modulo
|
||||
@ -174,9 +130,6 @@ instance Evaluatable Modulo where
|
||||
eval eval _ t = traverse eval t >>= go where
|
||||
go (Modulo a b) = liftNumeric2 (liftIntegralFrac mod mod') a b
|
||||
|
||||
instance Tokenize Modulo where
|
||||
tokenize Modulo{..} = within' (Scope.InfixL Modulus 7) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data Power a = Power { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Power
|
||||
@ -185,9 +138,6 @@ instance Evaluatable Power where
|
||||
eval eval _ t = traverse eval t >>= go where
|
||||
go (Power a b) = liftNumeric2 liftedExponent a b
|
||||
|
||||
instance Tokenize Power where
|
||||
tokenize Power{..} = within' (Scope.InfixL Raise 9) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
newtype Negate a = Negate { value :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Negate
|
||||
@ -196,9 +146,6 @@ instance Evaluatable Negate where
|
||||
eval eval _ t = traverse eval t >>= go where
|
||||
go (Negate a) = liftNumeric negate a
|
||||
|
||||
instance Tokenize Negate where
|
||||
tokenize Negate{..} = within' (Scope.Prefix NumericNegate) $ yield Token.Sym <* value
|
||||
|
||||
data FloorDivision a = FloorDivision { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically FloorDivision
|
||||
@ -207,9 +154,6 @@ instance Evaluatable FloorDivision where
|
||||
eval eval _ t = traverse eval t >>= go where
|
||||
go (FloorDivision a b) = liftNumeric2 liftedFloorDiv a b
|
||||
|
||||
instance Tokenize FloorDivision where
|
||||
tokenize FloorDivision{..} = within' (Scope.InfixL FloorDivide 7) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
-- | Regex matching operators (Ruby's =~ and ~!)
|
||||
data Matches a = Matches { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -217,18 +161,12 @@ data Matches a = Matches { lhs :: a, rhs :: a }
|
||||
|
||||
instance Evaluatable Matches
|
||||
|
||||
instance Tokenize Matches where
|
||||
tokenize Matches{..} = within' (Scope.InfixL RegexMatch 1) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data NotMatches a = NotMatches { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically NotMatches
|
||||
|
||||
instance Evaluatable NotMatches
|
||||
|
||||
instance Tokenize NotMatches where
|
||||
tokenize NotMatches{..} = within' (Scope.InfixL RegexNotMatch 1) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data Or a = Or { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Or
|
||||
@ -238,9 +176,6 @@ instance Evaluatable Or where
|
||||
a' <- eval a
|
||||
ifthenelse a' (pure a') (eval b)
|
||||
|
||||
instance Tokenize Or where
|
||||
tokenize Or{..} = within' (Scope.InfixL LogicalOr 2) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data And a = And { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically And
|
||||
@ -250,9 +185,6 @@ instance Evaluatable And where
|
||||
a' <- eval a
|
||||
ifthenelse a' (eval b) (pure a')
|
||||
|
||||
instance Tokenize And where
|
||||
tokenize And{..} = within' (Scope.InfixL LogicalAnd 2) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
newtype Not a = Not { value :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Not
|
||||
@ -260,9 +192,6 @@ newtype Not a = Not { value :: a }
|
||||
instance Evaluatable Not where
|
||||
eval eval _ (Not a) = eval a >>= asBool >>= boolean . not
|
||||
|
||||
instance Tokenize Not where
|
||||
tokenize Not{..} = within' (Scope.Prefix LogicalNot) $ yield Token.Sym <* value
|
||||
|
||||
data XOr a = XOr { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically XOr
|
||||
@ -271,9 +200,6 @@ instance Evaluatable XOr where
|
||||
-- N.B. we have to use Monad rather than Applicative/Traversable on 'And' and 'Or' so that we don't evaluate both operands
|
||||
eval eval _ (XOr a b) = liftA2 (/=) (eval a >>= asBool) (eval b >>= asBool) >>= boolean
|
||||
|
||||
instance Tokenize XOr where
|
||||
tokenize XOr{..} = within' (Scope.InfixL LogicalXor 2) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
-- | Javascript delete operator
|
||||
newtype Delete a = Delete { value :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -319,9 +245,6 @@ instance Evaluatable BOr where
|
||||
b' <- eval b >>= castToInteger
|
||||
liftBitwise2 (.|.) a' b'
|
||||
|
||||
instance Tokenize BOr where
|
||||
tokenize BOr{..} = within' (Scope.InfixL BinaryOr 4) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data BAnd a = BAnd { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically BAnd
|
||||
@ -332,9 +255,6 @@ instance Evaluatable BAnd where
|
||||
b' <- eval b >>= castToInteger
|
||||
liftBitwise2 (.&.) a' b'
|
||||
|
||||
instance Tokenize BAnd where
|
||||
tokenize BAnd{..} = within' (Scope.InfixL BinaryAnd 5) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data BXOr a = BXOr { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically BXOr
|
||||
@ -345,9 +265,6 @@ instance Evaluatable BXOr where
|
||||
b' <- eval b >>= castToInteger
|
||||
liftBitwise2 xor a' b'
|
||||
|
||||
instance Tokenize BXOr where
|
||||
tokenize BXOr{..} = within' (Scope.InfixL BinaryXor 5) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data LShift a = LShift { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically LShift
|
||||
@ -360,9 +277,6 @@ instance Evaluatable LShift where
|
||||
where
|
||||
shiftL' a b = shiftL a (fromIntegral (toInteger b))
|
||||
|
||||
instance Tokenize LShift where
|
||||
tokenize LShift{..} = within' (Scope.InfixL LeftShift 4) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data RShift a = RShift { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically RShift
|
||||
@ -375,9 +289,6 @@ instance Evaluatable RShift where
|
||||
where
|
||||
shiftR' a b = shiftR a (fromIntegral (toInteger b))
|
||||
|
||||
instance Tokenize RShift where
|
||||
tokenize RShift{..} = within' (Scope.InfixL RightShift 4) $ lhs *> yield Token.Sym <* rhs
|
||||
|
||||
data UnsignedRShift a = UnsignedRShift { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically UnsignedRShift
|
||||
@ -397,9 +308,6 @@ instance Evaluatable Complement where
|
||||
a' <- eval a >>= castToInteger
|
||||
liftBitwise complement a'
|
||||
|
||||
instance Tokenize Complement where
|
||||
tokenize Complement{..} = within' (Scope.Prefix BinaryComplement) $ yield Token.Sym <* value
|
||||
|
||||
-- | Member Access (e.g. a.b)
|
||||
data MemberAccess a = MemberAccess { lhs :: a, rhs :: a }
|
||||
deriving (Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -448,9 +356,6 @@ instance Evaluatable MemberAccess where
|
||||
Nothing -> throwEvalError (ReferenceError lhsValue rhs)
|
||||
|
||||
|
||||
instance Tokenize MemberAccess where
|
||||
tokenize MemberAccess{..} = lhs *> yield Access <* rhs
|
||||
|
||||
-- | Subscript (e.g a[1])
|
||||
data Subscript a = Subscript { lhs :: a, rhs :: [a] }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -462,18 +367,12 @@ instance Evaluatable Subscript where
|
||||
eval eval _ (Subscript l [r]) = join (index <$> eval l <*> eval r)
|
||||
eval _ _ (Subscript _ _) = throwUnspecializedError (UnspecializedError "Eval unspecialized for subscript with slices")
|
||||
|
||||
instance Tokenize Subscript where
|
||||
tokenize Subscript{..} = lhs *> within' Scope.Indexing (sequenceA_ (intersperse (yield Token.Sep) rhs))
|
||||
|
||||
data Member a = Member { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Member
|
||||
|
||||
instance Evaluatable Member where
|
||||
|
||||
instance Tokenize Member where
|
||||
tokenize Member{..} = lhs *> yield Token.Access <* rhs
|
||||
|
||||
-- | Enumeration (e.g. a[1:10:1] in Python (start at index 1, stop at index 10, step 1 element from start to stop))
|
||||
data Enumeration a = Enumeration { enumerationStart :: !a, enumerationEnd :: !a, enumerationStep :: !a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -482,9 +381,6 @@ data Enumeration a = Enumeration { enumerationStart :: !a, enumerationEnd :: !a,
|
||||
-- TODO: Implement Eval instance for Enumeration
|
||||
instance Evaluatable Enumeration
|
||||
|
||||
instance Tokenize Enumeration where
|
||||
tokenize Enumeration{..} = within Scope.Slice $ enumerationStart *> enumerationEnd *> enumerationStep
|
||||
|
||||
-- | InstanceOf (e.g. a instanceof b in JavaScript
|
||||
data InstanceOf a = InstanceOf { instanceOfSubject :: !a, instanceOfObject :: !a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -503,10 +399,6 @@ instance Hashable1 ScopeResolution where liftHashWithSalt = foldl
|
||||
|
||||
instance Evaluatable ScopeResolution
|
||||
|
||||
instance Tokenize ScopeResolution where
|
||||
tokenize (ScopeResolution (a :| rest)) =
|
||||
a *> for_ rest (yield Token.Resolve *>)
|
||||
|
||||
instance Declarations1 ScopeResolution where
|
||||
liftDeclaredName declaredName = declaredName . NonEmpty.last . scopes
|
||||
|
||||
@ -584,16 +476,10 @@ data Super a = Super
|
||||
|
||||
instance Evaluatable Super
|
||||
|
||||
instance Tokenize Super where
|
||||
tokenize _ = yield Token.Superclass
|
||||
|
||||
data This a = This
|
||||
deriving (Diffable, Eq, Foldable, Functor, Generic1, Ord, Show, Traversable, FreeVariables1, Declarations1, ToJSONFields1, Hashable1, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically This
|
||||
|
||||
instance Tokenize This where
|
||||
tokenize _ = yield Self
|
||||
|
||||
instance Evaluatable This where
|
||||
eval _ _ This = do
|
||||
span <- ask @Span
|
||||
|
@ -7,12 +7,10 @@ import Prologue hiding (Set, hash, null)
|
||||
|
||||
import Data.Abstract.Evaluatable as Eval
|
||||
import Data.JSON.Fields
|
||||
import qualified Data.Reprinting.Scope as Scope
|
||||
import Data.Scientific.Exts
|
||||
import qualified Data.Text as T
|
||||
import Diffing.Algorithm
|
||||
import Numeric.Exts
|
||||
import Reprinting.Tokenize as Tok
|
||||
import Text.Read (readMaybe)
|
||||
|
||||
-- Boolean
|
||||
@ -31,9 +29,6 @@ false = Boolean False
|
||||
instance Evaluatable Boolean where
|
||||
eval _ _ (Boolean x) = boolean x
|
||||
|
||||
instance Tokenize Boolean where
|
||||
tokenize = yield . Truth . booleanContent
|
||||
|
||||
-- | A literal integer of unspecified width. No particular base is implied.
|
||||
newtype Integer a = Integer { integerContent :: Text }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -44,9 +39,6 @@ instance Evaluatable Data.Syntax.Literal.Integer where
|
||||
eval _ _ (Data.Syntax.Literal.Integer x) =
|
||||
either (const (throwEvalError (IntegerFormatError x))) pure (parseInteger x) >>= integer
|
||||
|
||||
instance Tokenize Data.Syntax.Literal.Integer where
|
||||
tokenize = yield . Run . integerContent
|
||||
|
||||
-- | A literal float of unspecified width.
|
||||
|
||||
newtype Float a = Float { floatContent :: Text }
|
||||
@ -58,9 +50,6 @@ instance Evaluatable Data.Syntax.Literal.Float where
|
||||
eval _ _ (Float s) =
|
||||
either (const (throwEvalError (FloatFormatError s))) pure (parseScientific s) >>= float
|
||||
|
||||
instance Tokenize Data.Syntax.Literal.Float where
|
||||
tokenize = yield . Run . floatContent
|
||||
|
||||
-- Rational literals e.g. `2/3r`
|
||||
newtype Rational a = Rational { value :: Text }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -73,9 +62,6 @@ instance Evaluatable Data.Syntax.Literal.Rational where
|
||||
parsed = readMaybe @Prelude.Integer (T.unpack trimmed)
|
||||
in maybe (throwEvalError (RationalFormatError r)) (pure . toRational) parsed >>= rational
|
||||
|
||||
instance Tokenize Data.Syntax.Literal.Rational where
|
||||
tokenize (Rational t) = yield . Run $ t
|
||||
|
||||
-- Complex literals e.g. `3 + 2i`
|
||||
newtype Complex a = Complex { value :: Text }
|
||||
deriving (Diffable, Eq, Foldable, Functor, Generic1, Hashable1, Ord, Show, Traversable, FreeVariables1, Declarations1, ToJSONFields1, NFData1)
|
||||
@ -84,9 +70,6 @@ newtype Complex a = Complex { value :: Text }
|
||||
-- TODO: Implement Eval instance for Complex
|
||||
instance Evaluatable Complex
|
||||
|
||||
instance Tokenize Complex where
|
||||
tokenize (Complex v) = yield . Run $ v
|
||||
|
||||
-- Strings, symbols
|
||||
|
||||
newtype String a = String { stringElements :: [a] }
|
||||
@ -98,18 +81,12 @@ newtype String a = String { stringElements :: [a] }
|
||||
-- TODO: Implement Eval instance for String
|
||||
instance Evaluatable Data.Syntax.Literal.String
|
||||
|
||||
instance Tokenize Data.Syntax.Literal.String where
|
||||
tokenize = sequenceA_
|
||||
|
||||
newtype Character a = Character { characterContent :: Text }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Ord1, Show1) via Generically Character
|
||||
|
||||
instance Evaluatable Data.Syntax.Literal.Character
|
||||
|
||||
instance Tokenize Character where
|
||||
tokenize = yield . Glyph . characterContent
|
||||
|
||||
-- | An interpolation element within a string literal.
|
||||
newtype InterpolationElement a = InterpolationElement { interpolationBody :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -118,9 +95,6 @@ newtype InterpolationElement a = InterpolationElement { interpolationBody :: a }
|
||||
-- TODO: Implement Eval instance for InterpolationElement
|
||||
instance Evaluatable InterpolationElement
|
||||
|
||||
instance Tokenize InterpolationElement where
|
||||
tokenize = sequenceA_
|
||||
|
||||
-- | A sequence of textual contents within a string literal.
|
||||
newtype TextElement a = TextElement { textElementContent :: Text }
|
||||
deriving (Diffable, Eq, Foldable, Functor, Generic1, Hashable1, Ord, Show, Traversable, FreeVariables1, Declarations1, ToJSONFields1, NFData1)
|
||||
@ -129,9 +103,6 @@ newtype TextElement a = TextElement { textElementContent :: Text }
|
||||
instance Evaluatable TextElement where
|
||||
eval _ _ (TextElement x) = string x
|
||||
|
||||
instance Tokenize TextElement where
|
||||
tokenize = yield . Run . textElementContent
|
||||
|
||||
isTripleQuoted :: TextElement a -> Bool
|
||||
isTripleQuoted (TextElement t) =
|
||||
let trip = "\"\"\""
|
||||
@ -148,18 +119,12 @@ newtype EscapeSequence a = EscapeSequence { value :: Text }
|
||||
-- TODO: Implement Eval instance for EscapeSequence
|
||||
instance Evaluatable EscapeSequence
|
||||
|
||||
instance Tokenize EscapeSequence where
|
||||
tokenize (EscapeSequence e) = yield . Run $ e
|
||||
|
||||
data Null a = Null
|
||||
deriving (Eq, Ord, Show, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, FreeVariables1, Declarations1, ToJSONFields1, NFData1)
|
||||
deriving (Eq1, Ord1, Show1) via Generically Null
|
||||
|
||||
instance Evaluatable Null where eval _ _ _ = pure null
|
||||
|
||||
instance Tokenize Null where
|
||||
tokenize _ = yield Nullity
|
||||
|
||||
newtype Symbol a = Symbol { symbolElements :: [a] }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Ord1, Show1) via Generically Symbol
|
||||
@ -167,9 +132,6 @@ newtype Symbol a = Symbol { symbolElements :: [a] }
|
||||
-- TODO: Implement Eval instance for Symbol
|
||||
instance Evaluatable Symbol
|
||||
|
||||
instance Tokenize Symbol where
|
||||
tokenize s = within Scope.Atom (yield Sym *> sequenceA_ s)
|
||||
|
||||
newtype SymbolElement a = SymbolElement { symbolContent :: Text }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Ord1, Show1) via Generically SymbolElement
|
||||
@ -177,9 +139,6 @@ newtype SymbolElement a = SymbolElement { symbolContent :: Text }
|
||||
instance Evaluatable SymbolElement where
|
||||
eval _ _ (SymbolElement s) = string s
|
||||
|
||||
instance Tokenize SymbolElement where
|
||||
tokenize = yield . Run . symbolContent
|
||||
|
||||
newtype Regex a = Regex { regexContent :: Text }
|
||||
deriving (Diffable, Eq, Foldable, Functor, Generic1, Hashable1, Ord, Show, Traversable, FreeVariables1, Declarations1, ToJSONFields1, NFData1)
|
||||
deriving (Eq1, Ord1, Show1) via Generically Regex
|
||||
@ -190,9 +149,6 @@ newtype Regex a = Regex { regexContent :: Text }
|
||||
instance Evaluatable Regex where
|
||||
eval _ _ (Regex x) = string x
|
||||
|
||||
instance Tokenize Regex where
|
||||
tokenize = yield . Run . regexContent
|
||||
|
||||
-- Collections
|
||||
|
||||
newtype Array a = Array { arrayElements :: [a] }
|
||||
@ -202,9 +158,6 @@ newtype Array a = Array { arrayElements :: [a] }
|
||||
instance Evaluatable Array where
|
||||
eval eval _ Array{..} = array =<< traverse eval arrayElements
|
||||
|
||||
instance Tokenize Array where
|
||||
tokenize = list . arrayElements
|
||||
|
||||
newtype Hash a = Hash { hashElements :: [a] }
|
||||
deriving (Eq, Ord, Show, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, FreeVariables1, Declarations1, ToJSONFields1, NFData1)
|
||||
deriving (Eq1, Ord1, Show1) via Generically Hash
|
||||
@ -214,9 +167,6 @@ instance Evaluatable Hash where
|
||||
elements <- traverse (eval >=> asPair) (hashElements t)
|
||||
Eval.hash elements
|
||||
|
||||
instance Tokenize Hash where
|
||||
tokenize = Tok.hash . hashElements
|
||||
|
||||
data KeyValue a = KeyValue { key :: !a, value :: !a }
|
||||
deriving (Eq, Ord, Show, Foldable, Traversable, Functor, Generic1, Hashable1, Diffable, FreeVariables1, Declarations1, ToJSONFields1, NFData1)
|
||||
deriving (Eq1, Ord1, Show1) via Generically KeyValue
|
||||
@ -227,9 +177,6 @@ instance Evaluatable KeyValue where
|
||||
v <- eval value
|
||||
kvPair k v
|
||||
|
||||
instance Tokenize KeyValue where
|
||||
tokenize (KeyValue k v) = pair k v
|
||||
|
||||
newtype Tuple a = Tuple { tupleContents :: [a] }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Ord1, Show1) via Generically Tuple
|
||||
|
@ -10,12 +10,9 @@ import Data.Aeson (ToJSON1 (..))
|
||||
import Data.JSON.Fields
|
||||
import qualified Data.Abstract.ScopeGraph as ScopeGraph
|
||||
import qualified Data.Map.Strict as Map
|
||||
import qualified Data.Reprinting.Scope as Scope
|
||||
import qualified Data.Reprinting.Token as Token
|
||||
import Data.Semigroup.App
|
||||
import Data.Semigroup.Foldable
|
||||
import Diffing.Algorithm
|
||||
import Reprinting.Tokenize (Tokenize (..), imperative, within', yield)
|
||||
|
||||
-- | Imperative sequence of statements/declarations s.t.:
|
||||
--
|
||||
@ -33,9 +30,6 @@ instance Evaluatable Statements where
|
||||
eval eval _ (Statements xs) =
|
||||
maybe unit (runApp . foldMap1 (App . eval)) (nonEmpty xs)
|
||||
|
||||
instance Tokenize Statements where
|
||||
tokenize = imperative
|
||||
|
||||
newtype StatementBlock a = StatementBlock { statements :: [a] }
|
||||
deriving (Diffable, Eq, Foldable, Functor, Generic1, Hashable1, Ord, Show, Traversable, FreeVariables1, Declarations1, ToJSONFields1, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically StatementBlock
|
||||
@ -46,9 +40,6 @@ instance Evaluatable StatementBlock where
|
||||
eval eval _ (StatementBlock xs) =
|
||||
maybe unit (runApp . foldMap1 (App . eval)) (nonEmpty xs)
|
||||
|
||||
instance Tokenize StatementBlock where
|
||||
tokenize = imperative
|
||||
|
||||
-- | Conditional. This must have an else block, which can be filled with some default value when omitted in the source, e.g. 'pure ()' for C-style if-without-else or 'pure Nothing' for Ruby-style, in both cases assuming some appropriate Applicative context into which the If will be lifted.
|
||||
data If a = If { ifCondition :: !a, ifThenBody :: !a, ifElseBody :: !a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -59,13 +50,6 @@ instance Evaluatable If where
|
||||
bool <- eval cond
|
||||
ifthenelse bool (eval if') (eval else')
|
||||
|
||||
instance Tokenize If where
|
||||
tokenize If{..} = within' Scope.If $ do
|
||||
ifCondition
|
||||
yield (Token.Flow Token.Then)
|
||||
ifThenBody
|
||||
yield (Token.Flow Token.Else)
|
||||
ifElseBody
|
||||
|
||||
-- | Else statement. The else condition is any term, that upon successful completion, continues evaluation to the elseBody, e.g. `for ... else` in Python.
|
||||
data Else a = Else { elseCondition :: !a, elseBody :: !a }
|
||||
@ -75,8 +59,6 @@ data Else a = Else { elseCondition :: !a, elseBody :: !a }
|
||||
-- TODO: Implement Eval instance for Else
|
||||
instance Evaluatable Else
|
||||
|
||||
instance Tokenize Else where
|
||||
tokenize Else{..} = within' Scope.If (yield (Token.Flow Token.Else) *> elseCondition *> yield Token.Sep *> elseBody)
|
||||
|
||||
-- TODO: Alternative definition would flatten if/else if/else chains: data If a = If ![(a, a)] !(Maybe a)
|
||||
|
||||
@ -96,12 +78,6 @@ data Match a = Match { matchSubject :: !a, matchPatterns :: !a }
|
||||
-- TODO: Implement Eval instance for Match
|
||||
instance Evaluatable Match
|
||||
|
||||
instance Tokenize Match where
|
||||
tokenize Match{..} = do
|
||||
yield (Token.Flow Token.Switch)
|
||||
matchSubject
|
||||
yield (Token.Flow Token.In) -- This may need further refinement
|
||||
matchPatterns
|
||||
|
||||
-- | A pattern in a pattern-matching or computed jump control-flow statement, like 'case' in C or JavaScript, 'when' in Ruby, or the left-hand side of '->' in the body of Haskell 'case' expressions.
|
||||
data Pattern a = Pattern { value :: !a, patternBody :: !a }
|
||||
@ -111,9 +87,6 @@ data Pattern a = Pattern { value :: !a, patternBody :: !a }
|
||||
-- TODO: Implement Eval instance for Pattern
|
||||
instance Evaluatable Pattern
|
||||
|
||||
instance Tokenize Pattern where
|
||||
tokenize Pattern{..} = within' Scope.Case (value *> patternBody)
|
||||
|
||||
-- | A let statement or local binding, like 'a as b' or 'let a = b'.
|
||||
data Let a = Let { letVariable :: !a, letValue :: !a, letBody :: !a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -164,10 +137,6 @@ instance Evaluatable Assignment where
|
||||
assign lhs rhs
|
||||
pure rhs
|
||||
|
||||
instance Tokenize Assignment where
|
||||
-- Should we be using 'assignmentContext' in here?
|
||||
tokenize Assignment{..} = assignmentTarget *> yield Token.Assign <* assignmentValue
|
||||
|
||||
-- | Post increment operator (e.g. 1++ in Go, or i++ in C).
|
||||
newtype PostIncrement a = PostIncrement { value :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -212,9 +181,6 @@ newtype Return a = Return { value :: a }
|
||||
instance Evaluatable Return where
|
||||
eval eval _ (Return x) = eval x >>= earlyReturn
|
||||
|
||||
instance Tokenize Return where
|
||||
tokenize (Return x) = within' Scope.Return x
|
||||
|
||||
newtype Yield a = Yield { value :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Yield
|
||||
@ -222,9 +188,6 @@ newtype Yield a = Yield { value :: a }
|
||||
-- TODO: Implement Eval instance for Yield
|
||||
instance Evaluatable Yield
|
||||
|
||||
instance Tokenize Yield where
|
||||
tokenize (Yield y) = yield (Token.Flow Token.Yield) *> y
|
||||
|
||||
|
||||
newtype Break a = Break { value :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -233,9 +196,6 @@ newtype Break a = Break { value :: a }
|
||||
instance Evaluatable Break where
|
||||
eval eval _ (Break x) = eval x >>= throwBreak
|
||||
|
||||
instance Tokenize Break where
|
||||
tokenize (Break b) = yield (Token.Flow Token.Break) *> b
|
||||
|
||||
newtype Continue a = Continue { value :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Continue
|
||||
@ -243,9 +203,6 @@ newtype Continue a = Continue { value :: a }
|
||||
instance Evaluatable Continue where
|
||||
eval eval _ (Continue x) = eval x >>= throwContinue
|
||||
|
||||
instance Tokenize Continue where
|
||||
tokenize (Continue c) = yield (Token.Flow Token.Continue) *> c
|
||||
|
||||
newtype Retry a = Retry { value :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Retry
|
||||
@ -253,9 +210,6 @@ newtype Retry a = Retry { value :: a }
|
||||
-- TODO: Implement Eval instance for Retry
|
||||
instance Evaluatable Retry
|
||||
|
||||
instance Tokenize Retry where
|
||||
tokenize (Retry r) = yield (Token.Flow Token.Retry) *> r
|
||||
|
||||
newtype NoOp a = NoOp { value :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically NoOp
|
||||
@ -279,14 +233,6 @@ data ForEach a = ForEach { forEachBinding :: !a, forEachSubject :: !a, forEachBo
|
||||
-- TODO: Implement Eval instance for ForEach
|
||||
instance Evaluatable ForEach
|
||||
|
||||
instance Tokenize ForEach where
|
||||
tokenize ForEach{..} = within' Scope.Loop $ do
|
||||
yield (Token.Flow Token.Foreach)
|
||||
forEachBinding
|
||||
yield (Token.Flow Token.In)
|
||||
forEachSubject
|
||||
forEachBody
|
||||
|
||||
data While a = While { whileCondition :: !a, whileBody :: !a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically While
|
||||
@ -294,12 +240,6 @@ data While a = While { whileCondition :: !a, whileBody :: !a }
|
||||
instance Evaluatable While where
|
||||
eval eval _ While{..} = while (eval whileCondition) (eval whileBody)
|
||||
|
||||
instance Tokenize While where
|
||||
tokenize While{..} = within' Scope.Loop $ do
|
||||
yield (Token.Flow Token.While)
|
||||
whileCondition
|
||||
whileBody
|
||||
|
||||
data DoWhile a = DoWhile { doWhileCondition :: !a, doWhileBody :: !a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically DoWhile
|
||||
@ -324,13 +264,6 @@ data Try a = Try { tryBody :: !a, tryCatch :: ![a] }
|
||||
-- TODO: Implement Eval instance for Try
|
||||
instance Evaluatable Try
|
||||
|
||||
instance Tokenize Try where
|
||||
tokenize Try{..} = do
|
||||
yield (Token.Flow Token.Try)
|
||||
tryBody
|
||||
yield (Token.Flow Token.Rescue)
|
||||
sequenceA_ tryCatch
|
||||
|
||||
data Catch a = Catch { catchException :: !a, catchBody :: !a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Catch
|
||||
@ -338,9 +271,6 @@ data Catch a = Catch { catchException :: !a, catchBody :: !a }
|
||||
-- TODO: Implement Eval instance for Catch
|
||||
instance Evaluatable Catch
|
||||
|
||||
instance Tokenize Catch where
|
||||
tokenize Data.Syntax.Statement.Catch{..} = within' Scope.Catch $ catchException *> catchBody
|
||||
|
||||
newtype Finally a = Finally { value :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Finally
|
||||
@ -348,9 +278,6 @@ newtype Finally a = Finally { value :: a }
|
||||
-- TODO: Implement Eval instance for Finally
|
||||
instance Evaluatable Finally
|
||||
|
||||
instance Tokenize Finally where
|
||||
tokenize (Finally f) = within' Scope.Finally f
|
||||
|
||||
-- Scoping
|
||||
|
||||
-- | ScopeEntry (e.g. `BEGIN {}` block in Ruby or Perl).
|
||||
@ -361,9 +288,6 @@ newtype ScopeEntry a = ScopeEntry { terms :: [a] }
|
||||
-- TODO: Implement Eval instance for ScopeEntry
|
||||
instance Evaluatable ScopeEntry
|
||||
|
||||
instance Tokenize ScopeEntry where
|
||||
tokenize (ScopeEntry t) = within' Scope.BeginBlock (sequenceA_ t)
|
||||
|
||||
|
||||
-- | ScopeExit (e.g. `END {}` block in Ruby or Perl).
|
||||
newtype ScopeExit a = ScopeExit { terms :: [a] }
|
||||
@ -372,6 +296,3 @@ newtype ScopeExit a = ScopeExit { terms :: [a] }
|
||||
|
||||
-- TODO: Implement Eval instance for ScopeExit
|
||||
instance Evaluatable ScopeExit
|
||||
|
||||
instance Tokenize ScopeExit where
|
||||
tokenize (ScopeExit t) = within' Scope.EndBlock (sequenceA_ t)
|
||||
|
@ -7,7 +7,6 @@ import Data.JSON.Fields
|
||||
import Diffing.Algorithm
|
||||
import Prelude hiding (Bool, Float, Int, Double)
|
||||
import Prologue hiding (Map)
|
||||
import Reprinting.Tokenize
|
||||
|
||||
data Array a = Array { arraySize :: Maybe a, arrayElementType :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -26,11 +25,6 @@ data Annotation a = Annotation { annotationSubject :: a, annotationType :: a }
|
||||
instance Evaluatable Annotation where
|
||||
eval eval _ Annotation{..} = eval annotationSubject
|
||||
|
||||
instance Tokenize Annotation where
|
||||
-- FIXME: This ignores annotationType.
|
||||
-- TODO: Not sure what this should look like yet
|
||||
tokenize Annotation{..} = annotationSubject
|
||||
|
||||
|
||||
data Function a = Function { functionParameters :: [a], functionReturn :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
|
@ -1,86 +0,0 @@
|
||||
module Language.JSON.PrettyPrint
|
||||
( defaultBeautyOpts
|
||||
, defaultJSONPipeline
|
||||
, printingJSON
|
||||
, beautifyingJSON
|
||||
, minimizingJSON
|
||||
) where
|
||||
|
||||
import Prologue
|
||||
|
||||
import Control.Effect.Error
|
||||
import Streaming
|
||||
import qualified Streaming.Prelude as Streaming
|
||||
|
||||
import Data.Reprinting.Errors
|
||||
import Data.Reprinting.Scope
|
||||
import Data.Reprinting.Splice
|
||||
import Data.Reprinting.Token
|
||||
|
||||
-- | Default printing pipeline for JSON.
|
||||
defaultJSONPipeline :: (Member (Error TranslationError) sig, Carrier sig m)
|
||||
=> Stream (Of Fragment) m a
|
||||
-> Stream (Of Splice) m a
|
||||
defaultJSONPipeline
|
||||
= beautifyingJSON defaultBeautyOpts
|
||||
. printingJSON
|
||||
|
||||
-- | Print JSON syntax.
|
||||
printingJSON :: Monad m
|
||||
=> Stream (Of Fragment) m a
|
||||
-> Stream (Of Fragment) m a
|
||||
printingJSON = Streaming.map step where
|
||||
step s@(Defer el cs) =
|
||||
let ins = New el cs
|
||||
in case (el, listToMaybe cs) of
|
||||
(Truth True, _) -> ins "true"
|
||||
(Truth False, _) -> ins "false"
|
||||
(Nullity, _) -> ins "null"
|
||||
|
||||
(Open, Just List) -> ins "["
|
||||
(Close, Just List) -> ins "]"
|
||||
(Open, Just Hash) -> ins "{"
|
||||
(Close, Just Hash) -> ins "}"
|
||||
|
||||
(Sep, Just List) -> ins ","
|
||||
(Sep, Just Pair) -> ins ":"
|
||||
(Sep, Just Hash) -> ins ","
|
||||
|
||||
_ -> s
|
||||
step x = x
|
||||
|
||||
-- TODO: Fill out and implement configurable options like indentation count,
|
||||
-- tabs vs. spaces, etc.
|
||||
data JSONBeautyOpts = JSONBeautyOpts { jsonIndent :: Int, jsonUseTabs :: Bool }
|
||||
deriving (Eq, Show)
|
||||
|
||||
defaultBeautyOpts :: JSONBeautyOpts
|
||||
defaultBeautyOpts = JSONBeautyOpts 2 False
|
||||
|
||||
-- | Produce JSON with configurable whitespace and layout.
|
||||
beautifyingJSON :: (Member (Error TranslationError) sig, Carrier sig m)
|
||||
=> JSONBeautyOpts
|
||||
-> Stream (Of Fragment) m a
|
||||
-> Stream (Of Splice) m a
|
||||
beautifyingJSON _ s = Streaming.for s step where
|
||||
step (Defer el cs) = effect (throwError (NoTranslation el cs))
|
||||
step (Verbatim txt) = emit txt
|
||||
step (New el cs txt) = case (el, cs) of
|
||||
(Open, Hash:_) -> emit txt *> layout HardWrap *> indent 2 (hashDepth cs)
|
||||
(Close, Hash:rest) -> layout HardWrap *> indent 2 (hashDepth rest) *> emit txt
|
||||
(Sep, List:_) -> emit txt *> space
|
||||
(Sep, Pair:_) -> emit txt *> space
|
||||
(Sep, Hash:_) -> emit txt *> layout HardWrap *> indent 2 (hashDepth cs)
|
||||
_ -> emit txt
|
||||
|
||||
-- | Produce whitespace minimal JSON.
|
||||
minimizingJSON :: (Member (Error TranslationError) sig, Carrier sig m)
|
||||
=> Stream (Of Fragment) m a
|
||||
-> Stream (Of Splice) m a
|
||||
minimizingJSON s = Streaming.for s step where
|
||||
step (Defer el cs) = effect (throwError (NoTranslation el cs))
|
||||
step (Verbatim txt) = emit txt
|
||||
step (New _ _ txt) = emit txt
|
||||
|
||||
hashDepth :: [Scope] -> Int
|
||||
hashDepth = length . filter (== Hash)
|
@ -1,70 +0,0 @@
|
||||
{-# LANGUAGE RankNTypes #-}
|
||||
|
||||
module Language.Python.PrettyPrint ( printingPython ) where
|
||||
|
||||
import Control.Effect.Error
|
||||
import Streaming
|
||||
import qualified Streaming.Prelude as Streaming
|
||||
|
||||
import Data.Reprinting.Errors
|
||||
import Data.Reprinting.Splice
|
||||
import Data.Reprinting.Token as Token
|
||||
import Data.Reprinting.Scope
|
||||
import Data.Reprinting.Operator
|
||||
|
||||
-- | Print Python syntax.
|
||||
printingPython :: (Member (Error TranslationError) sig, Carrier sig m)
|
||||
=> Stream (Of Fragment) m a
|
||||
-> Stream (Of Splice) m a
|
||||
printingPython s = Streaming.for s step
|
||||
|
||||
step :: (Member (Error TranslationError) sig, Carrier sig m) => Fragment -> Stream (Of Splice) m ()
|
||||
step (Verbatim txt) = emit txt
|
||||
step (New _ _ txt) = emit txt
|
||||
step (Defer el cs) = case (el, cs) of
|
||||
-- Function declarations
|
||||
(Open, Function:_) -> emit "def" *> space
|
||||
(Open, Params:Function:_) -> emit "("
|
||||
(Close, Params:Function:_) -> emit "):"
|
||||
(Close, Function:xs) -> endContext (imperativeDepth xs)
|
||||
|
||||
-- Return statements
|
||||
(Open, Return:_) -> emit "return" *> space
|
||||
(Close, Return:_) -> pure ()
|
||||
(Open, Imperative:Return:_) -> pure ()
|
||||
(Sep, Imperative:Return:_) -> emit "," *> space
|
||||
(Close, Imperative:Return:_) -> pure () -- Don't hardwarp or indent for return statements
|
||||
|
||||
-- If statements
|
||||
(Open, If:_) -> emit "if" *> space
|
||||
(Flow Then, If:_) -> emit ":"
|
||||
(Flow Else, If:xs) -> endContext (imperativeDepth xs) *> emit "else:"
|
||||
(Close, If:_) -> pure ()
|
||||
|
||||
-- Booleans
|
||||
(Truth True, _) -> emit "True"
|
||||
(Truth False, _) -> emit "False"
|
||||
|
||||
-- Infix binary operators
|
||||
(Open, InfixL _ p:xs) -> emitIf (p < precedenceOf xs) "("
|
||||
(Sym, InfixL Add _:_) -> space *> emit "+" *> space
|
||||
(Sym, InfixL Multiply _:_) -> space *> emit "*" *> space
|
||||
(Sym, InfixL Subtract _:_) -> space *> emit "-" *> space
|
||||
(Close, InfixL _ p:xs) -> emitIf (p < precedenceOf xs) ")"
|
||||
|
||||
-- General params handling
|
||||
(Open, Params:_) -> emit "("
|
||||
(Sep, Params:_) -> emit "," *> space
|
||||
(Close, Params:_) -> emit ")"
|
||||
|
||||
-- Imperative context and whitespace handling
|
||||
(Open, [Imperative]) -> pure () -- Don't indent at the top-level imperative context...
|
||||
(Close, [Imperative]) -> layout HardWrap -- but end the program with a newline.
|
||||
(Open, Imperative:xs) -> layout HardWrap *> indent 4 (imperativeDepth xs)
|
||||
(Sep, Imperative:xs) -> layout HardWrap *> indent 4 (imperativeDepth xs)
|
||||
(Close, Imperative:_) -> pure ()
|
||||
|
||||
_ -> effect (throwError (NoTranslation el cs))
|
||||
|
||||
where
|
||||
endContext times = layout HardWrap *> indent 4 (pred times)
|
@ -1,58 +0,0 @@
|
||||
{-# LANGUAGE Rank2Types #-}
|
||||
|
||||
module Language.Ruby.PrettyPrint ( printingRuby ) where
|
||||
|
||||
import Control.Effect.Error
|
||||
import Streaming
|
||||
import qualified Streaming.Prelude as Streaming
|
||||
|
||||
import Data.Reprinting.Scope
|
||||
import Data.Reprinting.Errors
|
||||
import Data.Reprinting.Operator
|
||||
import Data.Reprinting.Splice
|
||||
import Data.Reprinting.Token as Token
|
||||
|
||||
-- | Print Ruby syntax.
|
||||
printingRuby :: (Member (Error TranslationError) sig, Carrier sig m)
|
||||
=> Stream (Of Fragment) m a
|
||||
-> Stream (Of Splice) m a
|
||||
printingRuby s = Streaming.for s step
|
||||
|
||||
step :: (Member (Error TranslationError) sig, Carrier sig m)
|
||||
=> Fragment
|
||||
-> Stream (Of Splice) m ()
|
||||
step (Verbatim txt) = emit txt
|
||||
step (New _ _ txt) = emit txt
|
||||
step (Defer el cs) = case (el, cs) of
|
||||
(Open, Method:_) -> emit "def" *> space
|
||||
(Close, Method:xs) -> endContext (imperativeDepth xs) *> emit "end"
|
||||
|
||||
-- ODO: do..end vs {..} should be configurable.
|
||||
(Open, Function:_) -> space *> emit "do" *> space
|
||||
(Open, Params:Function:_) -> emit "|"
|
||||
(Close, Params:Function:_) -> emit "|"
|
||||
(Close, Function:xs) -> endContext (imperativeDepth xs) *> emit "end"
|
||||
|
||||
-- ODO: Parens for calls are a style choice, make configurable.
|
||||
(Open, Params:_) -> emit "("
|
||||
(Sep, Params:_) -> emit "," *> space
|
||||
(Close, Params:_) -> emit ")"
|
||||
|
||||
(Open, InfixL _ p:xs) -> emitIf (p < precedenceOf xs) "("
|
||||
(Sym, InfixL Add _:_) -> space *> emit "+" *> space
|
||||
(Sym, InfixL Multiply _:_) -> space *> emit "*" *> space
|
||||
(Sym, InfixL Subtract _:_) -> space *> emit "-" *> space
|
||||
(Close, InfixL _ p:xs) -> emitIf (p < precedenceOf xs) ")"
|
||||
|
||||
(Open, [Imperative]) -> pure ()
|
||||
(Open, Imperative:xs) -> layout HardWrap *> indent 2 (imperativeDepth xs)
|
||||
(Sep, Imperative:xs) -> layout HardWrap *> indent 2 (imperativeDepth xs)
|
||||
(Close, [Imperative]) -> layout HardWrap
|
||||
(Close, Imperative:xs) -> indent 2 (pred (imperativeDepth xs))
|
||||
|
||||
(Sep, Call:_) -> emit "."
|
||||
|
||||
_ -> effect (throwError (NoTranslation el cs))
|
||||
|
||||
where
|
||||
endContext times = layout HardWrap *> indent 2 (pred times)
|
@ -18,13 +18,10 @@ import qualified Data.Abstract.ScopeGraph as ScopeGraph
|
||||
import Data.JSON.Fields
|
||||
import qualified Data.Language as Language
|
||||
import qualified Data.Map.Strict as Map
|
||||
import qualified Data.Reprinting.Scope as Scope
|
||||
import qualified Data.Reprinting.Token as Token
|
||||
import Data.Semigroup.App
|
||||
import Data.Semigroup.Foldable
|
||||
import qualified Data.Text as T
|
||||
import Diffing.Algorithm
|
||||
import Reprinting.Tokenize hiding (Superclass)
|
||||
import System.FilePath.Posix
|
||||
|
||||
-- TODO: Fully sort out ruby require/load mechanics
|
||||
@ -85,13 +82,6 @@ instance Evaluatable Send where
|
||||
call boundFunc args -- TODO pass through sendBlock
|
||||
maybe callFunction (`withScopeAndFrame` callFunction) lhsFrame
|
||||
|
||||
instance Tokenize Send where
|
||||
tokenize Send{..} = within Scope.Call $ do
|
||||
maybe (pure ()) (\r -> r *> yield Sep) sendReceiver
|
||||
fromMaybe (pure ()) sendSelector
|
||||
within' Scope.Params $ sequenceA_ (sep sendArgs)
|
||||
fromMaybe (pure ()) sendBlock
|
||||
|
||||
data Require a = Require { requireRelative :: Bool, requirePath :: !a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Require
|
||||
@ -106,13 +96,6 @@ instance Evaluatable Require where
|
||||
insertFrameLink ScopeGraph.Import (Map.singleton moduleScope moduleFrame)
|
||||
pure v -- Returns True if the file was loaded, False if it was already loaded. http://ruby-doc.org/core-2.5.0/Kernel.html#method-i-require
|
||||
|
||||
instance Tokenize Require where
|
||||
tokenize Require{..} = do
|
||||
yield . Run $ if requireRelative
|
||||
then "require_relative"
|
||||
else "require"
|
||||
within' Scope.Params requirePath
|
||||
|
||||
doRequire :: ( Member (Boolean value) sig
|
||||
, Member (Modules address value) sig
|
||||
, Carrier sig m
|
||||
@ -130,11 +113,6 @@ data Load a = Load { loadPath :: a, loadWrap :: Maybe a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Load
|
||||
|
||||
instance Tokenize Load where
|
||||
tokenize Load{..} = do
|
||||
yield (Run "load")
|
||||
within' Scope.Params $ loadPath *> fromMaybe (pure ()) loadWrap
|
||||
|
||||
instance Evaluatable Load where
|
||||
eval eval _ (Load x Nothing) = do
|
||||
path <- eval x >>= asString
|
||||
@ -228,14 +206,6 @@ instance Evaluatable Class where
|
||||
instance Declarations1 Class where
|
||||
liftDeclaredName declaredName = declaredName . classIdentifier
|
||||
|
||||
instance Tokenize Class where
|
||||
tokenize Class{..} = within' Scope.Class $ do
|
||||
classIdentifier
|
||||
case classSuperClass of
|
||||
Just a -> yield Token.Extends *> a
|
||||
Nothing -> pure ()
|
||||
classBody
|
||||
|
||||
|
||||
data Module a = Module { moduleIdentifier :: !a, moduleStatements :: ![a] }
|
||||
deriving (Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -280,12 +250,6 @@ instance Evaluatable Module where
|
||||
instance Declarations1 Module where
|
||||
liftDeclaredName declaredName = declaredName . moduleIdentifier
|
||||
|
||||
instance Tokenize Module where
|
||||
tokenize Module{..} = do
|
||||
yield (Run "module")
|
||||
moduleIdentifier
|
||||
within' Scope.Namespace $ sequenceA_ moduleStatements
|
||||
|
||||
|
||||
data LowPrecedenceAnd a = LowPrecedenceAnd { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -298,11 +262,6 @@ instance Evaluatable LowPrecedenceAnd where
|
||||
cond <- a
|
||||
ifthenelse cond b (pure cond)
|
||||
|
||||
-- TODO: These should probably be expressed with a new context/token,
|
||||
-- rather than a literal run, and need to take surrounding precedence
|
||||
-- into account.
|
||||
instance Tokenize LowPrecedenceAnd where
|
||||
tokenize LowPrecedenceAnd{..} = lhs *> yield (Token.Run "and") <* rhs
|
||||
|
||||
data LowPrecedenceOr a = LowPrecedenceOr { lhs :: a, rhs :: a }
|
||||
deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
@ -315,9 +274,6 @@ instance Evaluatable LowPrecedenceOr where
|
||||
cond <- a
|
||||
ifthenelse cond (pure cond) b
|
||||
|
||||
instance Tokenize LowPrecedenceOr where
|
||||
tokenize LowPrecedenceOr{..} = lhs *> yield (Token.Run "or") <* rhs
|
||||
|
||||
data Assignment a = Assignment { assignmentContext :: ![a], assignmentTarget :: !a, assignmentValue :: !a }
|
||||
deriving (Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, NFData1)
|
||||
deriving (Eq1, Show1, Ord1) via Generically Assignment
|
||||
@ -351,9 +307,6 @@ instance Evaluatable Assignment where
|
||||
assign lhs rhs
|
||||
pure rhs
|
||||
|
||||
instance Tokenize Assignment where
|
||||
-- Should we be using 'assignmentContext' in here?
|
||||
tokenize Assignment{..} = assignmentTarget *> yield Token.Assign <* assignmentValue
|
||||
|
||||
-- | A call to @super@ without parentheses in Ruby is known as "zsuper", which has
|
||||
-- the semantics of invoking @super()@ but implicitly passing the current function's
|
||||
@ -363,6 +316,3 @@ data ZSuper a = ZSuper
|
||||
deriving (Eq1, Show1, Ord1) via Generically ZSuper
|
||||
|
||||
instance Evaluatable ZSuper
|
||||
|
||||
instance Tokenize ZSuper where
|
||||
tokenize _ = yield $ Run "super"
|
||||
|
@ -1,178 +0,0 @@
|
||||
{- |
|
||||
|
||||
This module represents the top-level interface for @semantic@'s
|
||||
reprinting functionality. Reprinting here is defined as the
|
||||
conversion, given some 'Source' code, of the parsed (and possibly
|
||||
modified) syntax tree corresponding to that source, back into a
|
||||
document representing that source code.
|
||||
|
||||
The approach is based on techniques drawn from:
|
||||
|
||||
* /A Pretty Good Formatting Pipeline/ by Bagge and Hasu (2010)
|
||||
* /Scrap Your Reprinter/ by Orchard et al (2017)
|
||||
|
||||
The reprinter was designed with the following goals in mind:
|
||||
|
||||
* Laziness: a node that was unmodified in a refactoring pass
|
||||
should draw its representation from the original source file,
|
||||
rather than being explicitly pretty-printed. The only nodes
|
||||
that do not consult the original document are those that were
|
||||
synthesized during a refactoring operation.
|
||||
* Generality: each syntax node should have one and only one
|
||||
declaration that describes how reprinting takes place. No node
|
||||
should be concerned with language-specific syntactic issues.
|
||||
* Precedence-sensitivity: semantic syntax nodes do not contain
|
||||
information about parenthesization of binary operators.
|
||||
Binary operators should report their precedence and the
|
||||
pipeline should insert parentheses as necessary.
|
||||
* Modularity: each stage in the reprinting pipeline
|
||||
should be testable independently.
|
||||
* Time/space efficiency: the reprinting algorithm should scale
|
||||
to trees with hundreds of thousands of nodes without linear
|
||||
space usage.
|
||||
* Roundtrip correctness: reprinting an unmodified syntax tree
|
||||
should produce source text exactly corresponding to the original
|
||||
file.
|
||||
|
||||
The reprinter takes the form of a pipeline operating over a stream of
|
||||
tokens. Each stage in the pipeline converts a given token to a
|
||||
lower-level representation, ultimately resulting in a 'Doc' data type
|
||||
from the @prettyprinter@ library (to which we defer the nitty-gritty
|
||||
details of actually pretty-printing text). A representation of the
|
||||
stages of the pipeline follows:
|
||||
|
||||
@
|
||||
|
||||
[Start]
|
||||
The Pipeline starts with a tree, where terms are annotated with 'History' to
|
||||
denote what's been refactored.
|
||||
(Language-agnostic)
|
||||
|
|
||||
| AST
|
||||
|
|
||||
v
|
||||
[Tokenize]
|
||||
A subterm algebra converting a tree (terms) to a stream of tokens.
|
||||
(Language-agnostic)
|
||||
|
|
||||
| Seq Token
|
||||
|
|
||||
v
|
||||
[Translate]
|
||||
A stack machine interface through which tokens are interpreted to splices
|
||||
(with context). A splice is a concrete representation of syntax, to which
|
||||
additional language specific transformations can be applied.
|
||||
(Language-agnostic)
|
||||
|
|
||||
| Seq Fragment
|
||||
|
|
||||
v
|
||||
[PrettyPrint] --> <Format> --> <Beautify> --> <...>
|
||||
A language specific stack machine interface allowing further refinement of the
|
||||
sequence of splices. Language machines should emit specific keywords,
|
||||
punctutation, and layout rules. Additional steps can be added for project
|
||||
specific style, formatting, and even post-processing (minimizers, etc).
|
||||
(Language-specific, Project-specific)
|
||||
|
|
||||
| Seq Splice
|
||||
|
|
||||
v
|
||||
[Typeset]
|
||||
A stack machine that converts splices to a Doc. (Language-agnostic)
|
||||
|
|
||||
| Doc
|
||||
|
|
||||
v
|
||||
[Print]
|
||||
A simple function that produces 'Text' or 'Source' with the desired layout
|
||||
settings from a 'Doc'. (Language-agnostic)
|
||||
|
|
||||
| Text
|
||||
|
|
||||
v
|
||||
|
||||
@
|
||||
|
||||
-}
|
||||
|
||||
{-# LANGUAGE AllowAmbiguousTypes, RankNTypes, ScopedTypeVariables #-}
|
||||
module Reprinting.Pipeline
|
||||
( runReprinter
|
||||
, runTokenizing
|
||||
, runContextualizing
|
||||
, runTranslating
|
||||
) where
|
||||
|
||||
import Control.Effect as Effect
|
||||
import Control.Effect.Error as Effect
|
||||
import Control.Effect.State as Effect
|
||||
import Data.Text.Prettyprint.Doc
|
||||
import Data.Text.Prettyprint.Doc.Render.Text
|
||||
import Streaming
|
||||
import qualified Streaming.Prelude as Streaming
|
||||
|
||||
import Data.Reprinting.Errors
|
||||
import Data.Reprinting.Scope
|
||||
import Data.Reprinting.Splice
|
||||
import Data.Reprinting.Token
|
||||
import Data.Term
|
||||
import Reprinting.Tokenize
|
||||
import Reprinting.Translate
|
||||
import Reprinting.Typeset
|
||||
import qualified Source.Source as Source
|
||||
|
||||
-- | Run the reprinting pipeline given the original 'Source', a language specific
|
||||
-- translation function (as a function over 'Stream's) and the provided 'Term'.
|
||||
runReprinter :: Tokenize a
|
||||
=> Source.Source
|
||||
-> (Stream (Of Fragment) TranslatorC () -> Stream (Of Splice) TranslatorC ())
|
||||
-> Term a History
|
||||
-> Either TranslationError Source.Source
|
||||
runReprinter src translating
|
||||
= fmap go
|
||||
. Effect.run
|
||||
. Effect.runError
|
||||
. evalState @[Scope] mempty
|
||||
. Streaming.mconcat_
|
||||
. typesetting
|
||||
. translating
|
||||
. contextualizing
|
||||
. tokenizing src
|
||||
where go = Source.fromText . renderStrict . layoutPretty defaultLayoutOptions
|
||||
|
||||
-- | Run the reprinting pipeline up to tokenizing.
|
||||
runTokenizing :: Tokenize a
|
||||
=> Source.Source
|
||||
-> Term a History
|
||||
-> [Token]
|
||||
runTokenizing src
|
||||
= runIdentity
|
||||
. Streaming.toList_
|
||||
. tokenizing src
|
||||
|
||||
-- | Run the reprinting pipeline up to contextualizing.
|
||||
runContextualizing :: Tokenize a
|
||||
=> Source.Source
|
||||
-> Term a History
|
||||
-> Either TranslationError [Fragment]
|
||||
runContextualizing src
|
||||
= Effect.run
|
||||
. Effect.runError
|
||||
. evalState @[Scope] mempty
|
||||
. Streaming.toList_
|
||||
. contextualizing
|
||||
. tokenizing src
|
||||
|
||||
runTranslating :: Tokenize a
|
||||
=> Source.Source
|
||||
-> (Stream (Of Fragment) TranslatorC () -> Stream (Of Splice) TranslatorC ())
|
||||
-> Term a History
|
||||
-> Either TranslationError [Splice]
|
||||
runTranslating src translating
|
||||
= Effect.run
|
||||
. Effect.runError
|
||||
. evalState @[Scope] mempty
|
||||
. Streaming.toList_
|
||||
. translating
|
||||
. contextualizing
|
||||
. tokenizing src
|
@ -1,251 +0,0 @@
|
||||
{-# LANGUAGE GADTs, LambdaCase, RankNTypes, UndecidableInstances #-}
|
||||
|
||||
module Reprinting.Tokenize
|
||||
( module Token
|
||||
, module Scope
|
||||
, module Operator
|
||||
, History (..)
|
||||
, mark
|
||||
, remark
|
||||
-- * The Reprinter monad
|
||||
, Tokenizer
|
||||
, yield
|
||||
, control
|
||||
, within
|
||||
, within'
|
||||
, log
|
||||
, ignore
|
||||
, sep
|
||||
, sepTrailing
|
||||
, list
|
||||
, hash
|
||||
, pair
|
||||
, imperative
|
||||
-- * Tokenize interface
|
||||
, Tokenize (..)
|
||||
-- * Invocation/results
|
||||
, tokenizing
|
||||
) where
|
||||
|
||||
import Prelude hiding (fail, filter, log)
|
||||
import Prologue hiding (Element, hash)
|
||||
import Streaming hiding (Sum)
|
||||
import qualified Streaming.Prelude as Streaming
|
||||
|
||||
import Data.History
|
||||
import Data.List (intersperse)
|
||||
import Data.Reprinting.Operator as Operator
|
||||
import Data.Reprinting.Scope (Scope)
|
||||
import qualified Data.Reprinting.Scope as Scope
|
||||
import Data.Reprinting.Token as Token
|
||||
import Data.Term
|
||||
import Source.Range
|
||||
import Source.Source (Source)
|
||||
import qualified Source.Source as Source
|
||||
|
||||
-- | The 'Tokenizer' monad represents a context in which 'Control'
|
||||
-- tokens and 'Element' tokens can be sent to some downstream
|
||||
-- consumer. Its primary interface is through the 'Tokenize'
|
||||
-- typeclass, and is compiled to a 'Data.Machine.Source' by
|
||||
-- 'tokenizing'.
|
||||
data Tokenizer a where
|
||||
Pure :: a -> Tokenizer a
|
||||
Bind :: Tokenizer a -> (a -> Tokenizer b) -> Tokenizer b
|
||||
|
||||
Tell :: Token -> Tokenizer ()
|
||||
|
||||
Get :: Tokenizer State
|
||||
Put :: State -> Tokenizer ()
|
||||
|
||||
-- Tokenizers are compiled directly into Stream values. Note that the
|
||||
-- state parameter is internal to the tokenizer being run: the invoker
|
||||
-- of 'tokenizing' doesn't need to keep track of it at all.
|
||||
compile :: Monad m => State -> Tokenizer a -> Stream (Of Token) m (State, a)
|
||||
compile p = \case
|
||||
Pure a -> pure (p, a)
|
||||
Bind a f -> compile p a >>= (\(new, v) -> compile new (f v))
|
||||
Tell t -> Streaming.yield t $> (p, ())
|
||||
Get -> pure (p, p)
|
||||
Put p' -> pure (p', ())
|
||||
|
||||
instance Functor Tokenizer where fmap = liftA
|
||||
|
||||
instance Applicative Tokenizer where
|
||||
pure = Pure
|
||||
(<*>) = ap
|
||||
|
||||
instance Monad Tokenizer where (>>=) = Bind
|
||||
|
||||
data Strategy
|
||||
= Reprinting
|
||||
| PrettyPrinting
|
||||
deriving (Eq, Show)
|
||||
|
||||
data Filter
|
||||
= AllowAll
|
||||
| ForbidData
|
||||
deriving (Eq, Show)
|
||||
|
||||
data State = State
|
||||
{ source :: Source -- We need to be able to slice
|
||||
, history :: History -- What's the history of the term we're examining
|
||||
, strategy :: Strategy -- What are we doing right now?
|
||||
, cursor :: Int -- Where do we begin slices?
|
||||
, filter :: Filter -- Should we ignore data tokens?
|
||||
} deriving (Show, Eq)
|
||||
|
||||
-- Builtins
|
||||
|
||||
-- | Yield an 'Element' token in a 'Tokenizer' context.
|
||||
yield :: Element -> Tokenizer ()
|
||||
yield e = do
|
||||
on <- filter <$> Get
|
||||
when (on == AllowAll) . Tell . Element $ e
|
||||
|
||||
-- | Yield a 'Control' token.
|
||||
control :: Control -> Tokenizer ()
|
||||
control = Tell . Control
|
||||
|
||||
-- | Yield a 'Chunk' of some 'Source'.
|
||||
chunk :: Source -> Tokenizer ()
|
||||
chunk = Tell . Chunk
|
||||
|
||||
-- | Ensures that the final chunk is emitted
|
||||
finish :: Tokenizer ()
|
||||
finish = do
|
||||
crs <- asks cursor
|
||||
log ("Finishing, cursor is " <> show crs)
|
||||
src <- asks source
|
||||
chunk (Source.drop crs src)
|
||||
|
||||
-- State handling
|
||||
|
||||
asks :: (State -> a) -> Tokenizer a
|
||||
asks f = f <$> Get
|
||||
|
||||
modify :: (State -> State) -> Tokenizer ()
|
||||
modify f = Get >>= \x -> Put . f $! x
|
||||
|
||||
allowAll, forbidData :: Tokenizer ()
|
||||
allowAll = modify (\x -> x { filter = AllowAll })
|
||||
forbidData = modify (\x -> x { filter = ForbidData })
|
||||
|
||||
move :: Int -> Tokenizer ()
|
||||
move c = modify (\x -> x { cursor = c })
|
||||
|
||||
withHistory :: Annotated t History
|
||||
=> t
|
||||
-> Tokenizer a
|
||||
-> Tokenizer a
|
||||
withHistory t act = do
|
||||
old <- asks history
|
||||
modify (\x -> x { history = annotation t })
|
||||
act <* modify (\x -> x { history = old })
|
||||
|
||||
withStrategy :: Strategy -> Tokenizer a -> Tokenizer a
|
||||
withStrategy s act = do
|
||||
old <- Get
|
||||
Put (old { strategy = s })
|
||||
res <- act
|
||||
new <- Get
|
||||
Put (new { strategy = strategy old })
|
||||
pure res
|
||||
|
||||
-- The reprinting algorithm.
|
||||
|
||||
-- | A subterm algebra inspired by the /Scrap Your Reprinter/ algorithm.
|
||||
descend :: Tokenize constr => SubtermAlgebra constr (Term a History) (Tokenizer ())
|
||||
descend t = do
|
||||
(State src hist strat crs _) <- asks id
|
||||
let into s = withHistory (subterm s) (subtermRef s)
|
||||
case (hist, strat) of
|
||||
(Unmodified _, _) -> do
|
||||
tokenize (fmap into t)
|
||||
forbidData
|
||||
(Refactored _, PrettyPrinting) -> do
|
||||
allowAll
|
||||
tokenize (fmap into t)
|
||||
(Refactored r, Reprinting) -> do
|
||||
allowAll
|
||||
let delimiter = Range crs (start r)
|
||||
unless (delimiter == Range 0 0) $ do
|
||||
log ("slicing: " <> show delimiter)
|
||||
chunk (Source.slice src delimiter)
|
||||
move (start r)
|
||||
tokenize (fmap (withStrategy PrettyPrinting . into) t)
|
||||
move (end r)
|
||||
|
||||
|
||||
-- Combinators
|
||||
|
||||
-- | Emit a log message to the token stream. Useful for debugging.
|
||||
log :: String -> Tokenizer ()
|
||||
log = control . Log
|
||||
|
||||
-- | Emit an Enter for the given context, then run the provided
|
||||
-- action, then emit a corresponding Exit.
|
||||
within :: Scope -> Tokenizer () -> Tokenizer ()
|
||||
within c r = control (Enter c) *> r <* control (Exit c)
|
||||
|
||||
-- | Like 'within', but adds 'Open' and 'Close' elements around the action.
|
||||
within' :: Scope -> Tokenizer () -> Tokenizer ()
|
||||
within' c x = within c $ yield Token.Open *> x <* yield Token.Close
|
||||
|
||||
-- | Emit a sequence of tokens interspersed with 'Sep'.
|
||||
sep :: Foldable t => t (Tokenizer ()) -> [Tokenizer ()]
|
||||
sep = intersperse (yield Token.Sep) . toList
|
||||
|
||||
-- | Emit a sequence of tokens each with trailing 'Sep'.
|
||||
sepTrailing :: Foldable t => t (Tokenizer ()) -> [Tokenizer ()]
|
||||
sepTrailing = foldr (\x acc -> x : yield Token.Sep : acc) mempty
|
||||
|
||||
-- | Emit a sequence of tokens within a 'List' Scope with appropriate 'Open',
|
||||
-- 'TClose' tokens surrounding.
|
||||
list :: Foldable t => t (Tokenizer ()) -> Tokenizer ()
|
||||
list = within' Scope.List . sequenceA_ . sep
|
||||
|
||||
-- | Emit a sequence of tokens within a 'Hash' Scope with appropriate
|
||||
-- 'Open', 'TClose' tokens surrounding and interspersing 'Sep'.
|
||||
hash :: Foldable t => t (Tokenizer ()) -> Tokenizer ()
|
||||
hash = within' Scope.Hash . sequenceA_ . sep
|
||||
|
||||
-- | Emit key value tokens with a 'Sep' within a scoped 'Pair'.
|
||||
pair :: Tokenizer () -> Tokenizer () -> Tokenizer ()
|
||||
pair k v = within Scope.Pair $ k *> yield Token.Sep <* v
|
||||
|
||||
-- | Emit a sequence of tokens within an 'Imperative' scope with
|
||||
-- appropriate 'Open', 'Close' tokens surrounding and interspersing
|
||||
-- 'Sep'.
|
||||
imperative :: Foldable t => t (Tokenizer ()) -> Tokenizer ()
|
||||
imperative = within' Scope.Imperative . sequenceA_ . sep
|
||||
|
||||
-- | Shortcut for @const (pure ())@, useful for when no action
|
||||
-- should be taken.
|
||||
ignore :: a -> Tokenizer ()
|
||||
ignore = const (pure ())
|
||||
|
||||
-- | An instance of the 'Tokenize' typeclass describes how to emit tokens to
|
||||
-- pretty print the value of the supplied constructor in its AST context.
|
||||
class (Show1 constr, Traversable constr) => Tokenize constr where
|
||||
-- | Should emit control and data tokens.
|
||||
tokenize :: FAlgebra constr (Tokenizer ())
|
||||
|
||||
tokenizing :: (Monad m, Tokenize a)
|
||||
=> Source
|
||||
-> Term a History
|
||||
-> Stream (Of Token) m ()
|
||||
tokenizing src term = pipe
|
||||
where pipe = fmap snd $ compile state go
|
||||
state = State src (termAnnotation term) Reprinting 0 ForbidData
|
||||
go = forbidData *> foldSubterms descend term <* finish
|
||||
|
||||
-- | Sums of reprintable terms are reprintable.
|
||||
instance (Apply Show1 fs, Apply Functor fs, Apply Foldable fs, Apply Traversable fs, Apply Tokenize fs) => Tokenize (Sum fs) where
|
||||
tokenize = apply @Tokenize tokenize
|
||||
|
||||
-- | Annotated terms are reprintable and operate in a context derived from the annotation.
|
||||
instance Tokenize a => Tokenize (TermF a History) where
|
||||
tokenize t = withHistory t (tokenize (termFOut t))
|
||||
|
||||
instance Tokenize [] where
|
||||
tokenize = imperative
|
@ -1,50 +0,0 @@
|
||||
{-# LANGUAGE LambdaCase #-}
|
||||
|
||||
module Reprinting.Translate
|
||||
( contextualizing
|
||||
, TranslatorC
|
||||
) where
|
||||
|
||||
import Control.Effect.Error
|
||||
import Control.Effect.Pure
|
||||
import Control.Effect.State
|
||||
import Control.Monad
|
||||
import Streaming
|
||||
import qualified Streaming.Prelude as Streaming
|
||||
|
||||
import Data.Reprinting.Errors
|
||||
import Data.Reprinting.Scope
|
||||
import Data.Reprinting.Splice
|
||||
import Data.Reprinting.Token
|
||||
import qualified Source.Source as Source
|
||||
|
||||
type TranslatorC
|
||||
= StateC [Scope]
|
||||
( ErrorC TranslationError PureC)
|
||||
|
||||
contextualizing :: Stream (Of Token) TranslatorC a
|
||||
-> Stream (Of Fragment) TranslatorC a
|
||||
contextualizing = Streaming.mapMaybeM $ \case
|
||||
Chunk source -> pure . Just . Verbatim . Source.toText $ source
|
||||
Element t -> Just <$> case t of
|
||||
Run f -> get >>= \c -> pure (New t c f)
|
||||
_ -> get >>= pure . Defer t
|
||||
Control ctl -> Nothing <$ case ctl of
|
||||
Enter c -> enterScope c
|
||||
Exit c -> exitScope c
|
||||
_ -> pure ()
|
||||
|
||||
enterScope :: (Member (State [Scope]) sig, Carrier sig m)
|
||||
=> Scope
|
||||
-> m ()
|
||||
enterScope c = modify (c :)
|
||||
|
||||
exitScope :: ( Member (State [Scope]) sig
|
||||
, Member (Error TranslationError) sig
|
||||
, Carrier sig m
|
||||
)
|
||||
=> Scope
|
||||
-> m ()
|
||||
exitScope c = get >>= \case
|
||||
(x:xs) -> when (x == c) (put xs)
|
||||
cs -> throwError (UnbalancedPair c cs)
|
@ -1,40 +0,0 @@
|
||||
module Reprinting.Typeset
|
||||
( typesetting
|
||||
, typesettingWithVisualWhitespace
|
||||
) where
|
||||
|
||||
import Prologue
|
||||
|
||||
import Streaming
|
||||
import qualified Streaming.Prelude as Streaming
|
||||
import Data.Reprinting.Splice hiding (space)
|
||||
import Data.Text.Prettyprint.Doc
|
||||
|
||||
typesetting :: Monad m => Stream (Of Splice) m x
|
||||
-> Stream (Of (Doc a)) m x
|
||||
typesetting = Streaming.map step
|
||||
|
||||
step :: Splice -> Doc a
|
||||
step (Emit t) = pretty t
|
||||
step (Layout SoftWrap) = softline
|
||||
step (Layout HardWrap) = hardline
|
||||
step (Layout Space) = space
|
||||
step (Layout (Indent 0 Spaces)) = mempty
|
||||
step (Layout (Indent n Spaces)) = stimes n space
|
||||
step (Layout (Indent 0 Tabs)) = mempty
|
||||
step (Layout (Indent n Tabs)) = stimes n "\t"
|
||||
|
||||
-- | Typeset, but show whitespace with printable characters for debugging purposes.
|
||||
typesettingWithVisualWhitespace :: Monad m
|
||||
=> Stream (Of Splice) m x
|
||||
-> Stream (Of (Doc a)) m x
|
||||
typesettingWithVisualWhitespace = Streaming.map step where
|
||||
step :: Splice -> Doc a
|
||||
step (Emit t) = pretty t
|
||||
step (Layout SoftWrap) = softline
|
||||
step (Layout HardWrap) = "\\n" <> hardline
|
||||
step (Layout Space) = "."
|
||||
step (Layout (Indent 0 Spaces)) = mempty
|
||||
step (Layout (Indent n Spaces)) = stimes n "."
|
||||
step (Layout (Indent 0 Tabs)) = mempty
|
||||
step (Layout (Indent n Tabs)) = stimes n "\t"
|
@ -1,71 +0,0 @@
|
||||
{-# LANGUAGE GADTs, OverloadedLists, TypeOperators #-}
|
||||
|
||||
module Reprinting.Spec (spec) where
|
||||
|
||||
import SpecHelpers
|
||||
|
||||
import Control.Effect.Parse
|
||||
import Data.Foldable
|
||||
import Streaming hiding (Sum)
|
||||
import qualified Streaming.Prelude as Streaming
|
||||
|
||||
import Control.Rewriting
|
||||
import qualified Data.Language as Language
|
||||
import Data.Reprinting.Scope
|
||||
import Data.Reprinting.Token
|
||||
import Data.Sum
|
||||
import qualified Data.Syntax.Literal as Literal
|
||||
import Language.JSON.PrettyPrint
|
||||
import Reprinting.Pipeline
|
||||
import Reprinting.Tokenize
|
||||
|
||||
increaseNumbers :: (Literal.Float :< fs, Apply Functor fs) => Rule (Term (Sum fs) History)
|
||||
increaseNumbers = do
|
||||
(Literal.Float c) <- target >>= guardTerm
|
||||
create (Literal.Float (c <> "0"))
|
||||
|
||||
spec :: Spec
|
||||
spec = describe "reprinting" $ do
|
||||
context "JSON" $ do
|
||||
let path = "test/fixtures/javascript/reprinting/map.json"
|
||||
(src, tree) <- runIO $ do
|
||||
src <- blobSource <$> readBlobFromFile' (File path Language.JSON)
|
||||
tree <- parseFileQuiet jsonParser path
|
||||
pure (src, tree)
|
||||
|
||||
describe "tokenization" $ do
|
||||
|
||||
it "should pass over a pristine tree" $ do
|
||||
let tagged = mark Unmodified tree
|
||||
let toks = runIdentity . Streaming.toList_ $ tokenizing src tagged
|
||||
toks `shouldSatisfy` not . null
|
||||
head toks `shouldSatisfy` isControl
|
||||
last toks `shouldSatisfy` isChunk
|
||||
|
||||
it "should emit control tokens but only 1 chunk for a wholly-modified tree" $ do
|
||||
let toks = runIdentity . Streaming.toList_ $ tokenizing src (mark Refactored tree)
|
||||
for_ @[] [List, Hash] $ \t -> do
|
||||
toks `shouldSatisfy` elem (Control (Enter t))
|
||||
toks `shouldSatisfy` elem (Control (Exit t))
|
||||
|
||||
describe "pipeline" $ do
|
||||
|
||||
it "should roundtrip exactly over a pristine tree" $ do
|
||||
let tagged = mark Unmodified tree
|
||||
let printed = runReprinter src defaultJSONPipeline tagged
|
||||
printed `shouldBe` Right src
|
||||
|
||||
it "should roundtrip exactly over a wholly-modified tree" $ do
|
||||
let tagged = mark Refactored tree
|
||||
let printed = runReprinter src defaultJSONPipeline tagged
|
||||
printed `shouldBe` Right src
|
||||
|
||||
it "should be able to parse the output of a refactor" $ do
|
||||
let maybeTagged = rewrite (mark Unmodified tree) (topDownAny increaseNumbers)
|
||||
tagged <- maybe (fail "rewrite failed") pure maybeTagged
|
||||
|
||||
let eitherPrinted = runReprinter src defaultJSONPipeline tagged
|
||||
printed <- either (fail "reprinter failed") pure eitherPrinted
|
||||
|
||||
tree' <- runTaskOrDie (runParseWithConfig (parse jsonParser (makeBlob printed path Language.JSON mempty)))
|
||||
length tree' `shouldSatisfy` (/= 0)
|
@ -1,40 +0,0 @@
|
||||
{-# LANGUAGE TypeOperators #-}
|
||||
|
||||
module Rewriting.Go.Spec (spec) where
|
||||
|
||||
import Control.Rewriting
|
||||
import Data.List
|
||||
import Data.Sum
|
||||
import qualified Data.Syntax.Literal as Lit
|
||||
import qualified Data.Syntax.Statement as Stmt
|
||||
import Data.Text (Text)
|
||||
import SpecHelpers
|
||||
|
||||
-- This gets the Text contents of all integers
|
||||
integerMatcher :: (Lit.Integer :< fs) => Rewrite (Term (Sum fs) ann) Text
|
||||
integerMatcher = enter Lit.integerContent
|
||||
|
||||
-- This matches all for-loops with its index variable new variable bound to 0,
|
||||
-- e.g. `for i := 0; i < 10; i++`
|
||||
loopMatcher :: ( Stmt.For :< fs
|
||||
, Stmt.Assignment :< fs
|
||||
, Lit.Integer :< fs)
|
||||
=> Rule (Term (Sum fs) ann)
|
||||
loopMatcher = target <* go where
|
||||
go = enter Stmt.forBefore
|
||||
>>> enter Stmt.assignmentValue
|
||||
>>> enter Lit.integerContent
|
||||
>>> ensure (== "0")
|
||||
|
||||
|
||||
spec :: Spec
|
||||
spec = describe "recursively" $ do
|
||||
it "extracts integers" $ do
|
||||
parsed <- parseFileQuiet goParser "test/fixtures/go/matching/integers.go"
|
||||
let matched = recursively integerMatcher parsed
|
||||
sort matched `shouldBe` ["1", "2", "3"]
|
||||
|
||||
it "counts for loops" $ do
|
||||
parsed <- parseFileQuiet goParser "test/fixtures/go/matching/for.go"
|
||||
let matched = recursively @[] @(Term _ _) loopMatcher parsed
|
||||
length matched `shouldBe` 2
|
@ -1,57 +0,0 @@
|
||||
{-# LANGUAGE TypeFamilies, TypeOperators #-}
|
||||
|
||||
module Rewriting.JSON.Spec (spec) where
|
||||
|
||||
import Prelude hiding (id, (.))
|
||||
import SpecHelpers
|
||||
|
||||
import Control.Category
|
||||
import Control.Rewriting as Rewriting
|
||||
import qualified Data.ByteString as B
|
||||
import Data.History as History
|
||||
import Data.Sum
|
||||
import qualified Data.Syntax.Literal as Literal
|
||||
import Data.Text (Text)
|
||||
import Language.JSON.PrettyPrint
|
||||
import Reprinting.Pipeline
|
||||
import qualified Source.Source as Source
|
||||
|
||||
-- Adds a "hi": "bye" key-value pair to any empty Hash.
|
||||
onTrees :: ( Literal.TextElement :< syn
|
||||
, Literal.Hash :< syn
|
||||
, Literal.KeyValue :< syn
|
||||
, Apply Functor syn
|
||||
, term ~ Term (Sum syn) History
|
||||
) => Rule term
|
||||
onTrees = do
|
||||
Literal.Hash els <- Rewriting.target >>= guardTerm
|
||||
guard (null els)
|
||||
k <- create $ Literal.TextElement "\"hi\""
|
||||
v <- create $ Literal.TextElement "\"bye\""
|
||||
pair <- create $ Literal.KeyValue k v
|
||||
create (Literal.Hash (pair : els))
|
||||
|
||||
-- Matches only "hi" string literals.
|
||||
isHi :: ( Literal.TextElement :< fs
|
||||
) => Rewrite (Term (Sum fs) History) Text
|
||||
isHi = enter Literal.textElementContent
|
||||
>>> ensure (== "\"hi\"")
|
||||
|
||||
spec :: Spec
|
||||
spec = describe "rewriting" $ do
|
||||
let path = "test/fixtures/json/rewriting/add_keys.json"
|
||||
|
||||
bytes <- runIO $ Source.fromUTF8 <$> B.readFile path
|
||||
|
||||
refactored <- runIO $ do
|
||||
json <- parseFileQuiet jsonParser path
|
||||
let result = rewrite @Maybe (History.mark Unmodified json) (topDownAny onTrees)
|
||||
maybe (fail "rewrite failed") pure result
|
||||
|
||||
it "should add keys to JSON values" $ do
|
||||
length (recursively @[] isHi refactored) `shouldBe` 1
|
||||
|
||||
it "should round-trip correctly" $ do
|
||||
let res = runReprinter bytes defaultJSONPipeline refactored
|
||||
expected <- Source.fromUTF8 <$> B.readFile "test/fixtures/json/rewriting/add_keys_expected.json"
|
||||
res `shouldBe` Right expected
|
@ -1,35 +0,0 @@
|
||||
{-# LANGUAGE TypeFamilies, TypeOperators #-}
|
||||
|
||||
module Rewriting.Python.Spec (spec) where
|
||||
|
||||
import Control.Arrow
|
||||
import Control.Rewriting
|
||||
import Data.Sum
|
||||
import qualified Data.Syntax.Declaration as Decl
|
||||
import qualified Data.Syntax.Literal as Lit
|
||||
import SpecHelpers
|
||||
|
||||
-- This gets the Text contents of all integers
|
||||
docstringMatcher :: ( Decl.Function :< fs
|
||||
, [] :< fs
|
||||
, Lit.TextElement :< fs
|
||||
, term ~ Term (Sum fs) ann
|
||||
) => Rewrite term (TermF Decl.Function ann term)
|
||||
docstringMatcher =
|
||||
narrowF <* (enter Decl.functionBody
|
||||
>>> narrow @[]
|
||||
>>> mhead
|
||||
>>> narrow @Lit.TextElement
|
||||
>>> ensure Lit.isTripleQuoted)
|
||||
|
||||
spec :: Spec
|
||||
spec = describe "matching/python" $ do
|
||||
it "matches top-level docstrings" $ do
|
||||
parsed <- parseFileQuiet pythonParser "test/fixtures/python/matching/docstrings.py"
|
||||
let matched = recursively @[] docstringMatcher parsed
|
||||
length matched `shouldBe` 2
|
||||
|
||||
it "matches docstrings recursively" $ do
|
||||
parsed <- parseFileQuiet pythonParser "test/fixtures/python/matching/docstrings_nested.py"
|
||||
let matched = recursively @[] docstringMatcher parsed
|
||||
length matched `shouldBe` 3
|
@ -26,10 +26,6 @@ import qualified Integration.Spec
|
||||
import qualified Numeric.Spec
|
||||
import qualified Parsing.Spec
|
||||
import qualified Rendering.TOC.Spec
|
||||
import qualified Reprinting.Spec
|
||||
import qualified Rewriting.Go.Spec
|
||||
import qualified Rewriting.JSON.Spec
|
||||
import qualified Rewriting.Python.Spec
|
||||
import qualified Tags.Spec
|
||||
import qualified Semantic.Spec
|
||||
import qualified Semantic.CLI.Spec
|
||||
@ -86,10 +82,6 @@ legacySpecs = parallel $ do
|
||||
describe "Diffing.Interpreter" Diffing.Interpreter.Spec.spec
|
||||
describe "Graphing.Calls" Graphing.Calls.Spec.spec
|
||||
describe "Rendering.TOC" Rendering.TOC.Spec.spec
|
||||
describe "Reprinting.Spec" Reprinting.Spec.spec
|
||||
describe "Rewriting.Go" Rewriting.Go.Spec.spec
|
||||
describe "Rewriting.JSON" Rewriting.JSON.Spec.spec
|
||||
describe "Rewriting.Python" Rewriting.Python.Spec.spec
|
||||
describe "Tags.Spec" Tags.Spec.spec
|
||||
describe "Semantic" Semantic.Spec.spec
|
||||
describe "Semantic.IO" Semantic.IO.Spec.spec
|
||||
|
Loading…
Reference in New Issue
Block a user