From a36af2e785335b7e4bb66d5f3aaa565842ab267a Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Wed, 19 Sep 2018 11:54:12 -0400 Subject: [PATCH 01/23] Remove Hungarian-style T- prefix from tokens and scopes. Prefixes on data constructors are generally an antipattern in Haskell: if you're concerned about name collisions, have clients use qualified imports for whatever modules they need. As such, this removes the T- prefixes from the `Token` and `Context` types. This also renames Context to Scope, which is a more exact and readable name. --- semantic.cabal | 4 +- src/Data/Reprinting/Errors.hs | 5 ++- src/Data/Reprinting/Fragment.hs | 11 ++--- src/Data/Reprinting/Operator.hs | 12 +++++ src/Data/Reprinting/Scope.hs | 37 +++++++++++++++ src/Data/Reprinting/Token.hs | 72 +++++++----------------------- src/Data/Syntax/Declaration.hs | 17 +++---- src/Data/Syntax/Expression.hs | 35 ++++++++------- src/Data/Syntax/Statement.hs | 23 +++++----- src/Language/JSON/PrettyPrint.hs | 29 ++++++------ src/Language/Python/PrettyPrint.hs | 55 ++++++++++++----------- src/Language/Ruby/PrettyPrint.hs | 47 ++++++++++--------- src/Language/Ruby/Syntax.hs | 19 ++++---- src/Reprinting/Pipeline.hs | 12 ++--- src/Reprinting/Tokenize.hs | 50 ++++++++++++--------- src/Reprinting/Translate.hs | 17 +++---- test/Reprinting/Spec.hs | 39 ++++++++-------- 17 files changed, 261 insertions(+), 223 deletions(-) create mode 100644 src/Data/Reprinting/Operator.hs create mode 100644 src/Data/Reprinting/Scope.hs diff --git a/semantic.cabal b/semantic.cabal index 3dbf608ff..9c29c3d04 100644 --- a/semantic.cabal +++ b/semantic.cabal @@ -100,9 +100,11 @@ library , Data.Range , Data.Record , Data.Reprinting.Errors - , Data.Reprinting.Token , Data.Reprinting.Fragment + , Data.Reprinting.Operator + , Data.Reprinting.Scope , Data.Reprinting.Splice + , Data.Reprinting.Token , Data.Semigroup.App , Data.Scientific.Exts , Data.Source diff --git a/src/Data/Reprinting/Errors.hs b/src/Data/Reprinting/Errors.hs index c887c5b2d..6f686710d 100644 --- a/src/Data/Reprinting/Errors.hs +++ b/src/Data/Reprinting/Errors.hs @@ -1,12 +1,13 @@ module Data.Reprinting.Errors ( TranslationError (..) ) where import Data.Reprinting.Token +import Data.Reprinting.Scope -- | Represents failure occurring in a 'Concrete' machine during the translation -- phases of the reprinting pipeline. data TranslationError - = UnbalancedPair Context [Context] + = UnbalancedPair Scope [Scope] -- ^ Thrown if an unbalanced 'Enter'/'Exit' pair is encountered. - | NoTranslation Element [Context] + | NoTranslation Element [Scope] -- ^ Thrown if no translation found for a given element. deriving (Eq, Show) diff --git a/src/Data/Reprinting/Fragment.hs b/src/Data/Reprinting/Fragment.hs index 1ddf18036..c92a0c833 100644 --- a/src/Data/Reprinting/Fragment.hs +++ b/src/Data/Reprinting/Fragment.hs @@ -10,16 +10,17 @@ module Data.Reprinting.Fragment import Data.Machine import Data.Text (Text) +import Data.Reprinting.Scope import Data.Reprinting.Token -- | An intermediate representation of concrete syntax in the reprinting pipeline. data Fragment = Verbatim Text -- ^ Verbatim copy of original 'Text' (un-refactored). - | New Element [Context] Text - -- ^ New 'Text' to be inserted, along with original 'Element' and `Context` + | New Element [Scope] Text + -- ^ New 'Text' to be inserted, along with original 'Element' and `Scope` -- allowing later steps to re-write. - | Defer Element [Context] + | Defer Element [Scope] -- ^ To be handled further down the pipeline. deriving (Eq, Show) @@ -28,9 +29,9 @@ copy :: Text -> Plan k Fragment () copy = yield . Verbatim -- | Insert some new 'Text'. -insert :: Element -> [Context] -> Text -> Plan k Fragment () +insert :: Element -> [Scope] -> Text -> Plan k Fragment () insert el c = yield . New el c -- | Defer processing an element to a later stage. -defer :: Element -> [Context] -> Plan k Fragment () +defer :: Element -> [Scope] -> Plan k Fragment () defer el = yield . Defer el diff --git a/src/Data/Reprinting/Operator.hs b/src/Data/Reprinting/Operator.hs new file mode 100644 index 000000000..0df8bf813 --- /dev/null +++ b/src/Data/Reprinting/Operator.hs @@ -0,0 +1,12 @@ +module Data.Reprinting.Operator + ( Operator (..) + ) where + +-- | A sum type representing every concievable infix operator a +-- language can define. These are handled by instances of 'Concrete' +-- and given appropriate precedence. +data Operator + = Add + | Multiply + | Subtract + deriving (Show, Eq) diff --git a/src/Data/Reprinting/Scope.hs b/src/Data/Reprinting/Scope.hs new file mode 100644 index 000000000..afbe26d00 --- /dev/null +++ b/src/Data/Reprinting/Scope.hs @@ -0,0 +1,37 @@ +module Data.Reprinting.Scope + ( Scope (..) + , precedenceOf + , imperativeDepth + ) where + +import Data.Reprinting.Operator + +-- | A 'Scope' represents a scope in which other tokens can be +-- interpreted. For example, in the 'Imperative' context a 'TSep' +-- could be a semicolon or newline, whereas in a 'List' context a +-- 'TSep' is probably going to be a comma. +data Scope + = List + | Hash + | Pair + | Method + | Function + | Call + | Params + | Return + | If + | InfixL Operator Int + | Imperative + deriving (Show, Eq) + +precedenceOf :: [Scope] -> Int +precedenceOf cs = case filter isInfix cs of + (InfixL _ n:_) -> n + _ -> 0 + where isInfix (InfixL _ _) = True + isInfix _ = False + + +-- | Depth of imperative scope. +imperativeDepth :: [Scope] -> Int +imperativeDepth = length . filter (== Imperative) diff --git a/src/Data/Reprinting/Token.hs b/src/Data/Reprinting/Token.hs index cf0474244..fa8dd1642 100644 --- a/src/Data/Reprinting/Token.hs +++ b/src/Data/Reprinting/Token.hs @@ -4,21 +4,18 @@ module Data.Reprinting.Token , isControl , Element (..) , Control (..) - , Context (..) - , imperativeDepth - , precedenceOf - , Operator (..) ) where import Data.Text (Text) import Data.Source (Source) +import Data.Reprinting.Scope -- | 'Token' encapsulates 'Element' and 'Control' tokens, as well as sliced -- portions of the original 'Source' for a given AST. data Token = Chunk Source -- ^ Verbatim 'Source' from AST, unmodified. - | TElement Element -- ^ Content token to be rendered. - | TControl Control -- ^ AST's context. + | Element Element -- ^ Content token to be rendered. + | Control Control -- ^ AST's context. deriving (Show, Eq) isChunk :: Token -> Bool @@ -26,7 +23,7 @@ isChunk (Chunk _) = True isChunk _ = False isControl :: Token -> Bool -isControl (TControl _) = True +isControl (Control _) = True isControl _ = False -- | 'Element' tokens describe atomic pieces of source code to be @@ -34,15 +31,15 @@ isControl _ = False -- and are interpreted into language-specific representations at a -- later point in the reprinting pipeline. data Element - = Run Text -- ^ A literal chunk of text. - | Truth Bool -- ^ A boolean value. - | Nullity -- ^ @null@ or @nil@ or some other zero value. - | TSep -- ^ Some sort of delimiter, interpreted in some 'Context'. - | TSym -- ^ Some sort of symbol, interpreted in some 'Context'. - | TThen - | TElse - | TOpen -- ^ The beginning of some 'Context', such as an @[@ or @{@. - | TClose -- ^ The opposite of 'TOpen'. + = Run Text -- ^ A literal chunk of text. + | Truth Bool -- ^ A boolean value. + | Nullity -- ^ @null@ or @nil@ or some other zero value. + | Sep -- ^ Some sort of delimiter, interpreted in some 'Context'. + | Sym -- ^ Some sort of symbol, interpreted in some 'Context'. + | Then + | Else + | Open -- ^ The beginning of some 'Context', such as an @[@ or @{@. + | Close -- ^ The opposite of 'TOpen'. deriving (Eq, Show) -- | 'Control' tokens describe information about some AST's context. @@ -50,46 +47,7 @@ data Element -- the page, they are needed to provide information as to how deeply -- subsequent entries in the pipeline should indent. data Control - = Enter Context - | Exit Context + = Enter Scope + | Exit Scope | Log String deriving (Eq, Show) - --- | A 'Context' represents a scope in which other tokens can be --- interpreted. For example, in the 'Imperative' context a 'TSep' --- could be a semicolon or newline, whereas in a 'List' context a --- 'TSep' is probably going to be a comma. -data Context - = TList - | THash - | TPair - | TMethod - | TFunction - | TCall - | TParams - | TReturn - | TIf - | TInfixL Operator Int - | Imperative - deriving (Show, Eq) - -precedenceOf :: [Context] -> Int -precedenceOf cs = case filter isInfix cs of - (TInfixL _ n:_) -> n - _ -> 0 - where isInfix (TInfixL _ _) = True - isInfix _ = False - - --- | Depth of imperative scope. -imperativeDepth :: [Context] -> Int -imperativeDepth = length . filter (== Imperative) - --- | A sum type representing every concievable infix operator a --- language can define. These are handled by instances of 'Concrete' --- and given appropriate precedence. -data Operator - = Add - | Multiply - | Subtract - deriving (Show, Eq) diff --git a/src/Data/Syntax/Declaration.hs b/src/Data/Syntax/Declaration.hs index f52e36d87..485d80c57 100644 --- a/src/Data/Syntax/Declaration.hs +++ b/src/Data/Syntax/Declaration.hs @@ -1,16 +1,17 @@ -{-# LANGUAGE DeriveAnyClass, MultiParamTypeClasses, ScopedTypeVariables, UndecidableInstances, TupleSections #-} +{-# LANGUAGE DeriveAnyClass, MultiParamTypeClasses, ScopedTypeVariables, TupleSections, UndecidableInstances #-} {-# OPTIONS_GHC -Wno-missing-export-lists #-} module Data.Syntax.Declaration where +import Control.Abstract.ScopeGraph import qualified Data.Abstract.Environment as Env import Data.Abstract.Evaluatable -import Control.Abstract.ScopeGraph import Data.JSON.Fields +import qualified Data.Map.Strict as Map +import qualified Data.Reprinting.Scope as Scope import qualified Data.Set as Set import Diffing.Algorithm import Prologue import Proto3.Suite.Class -import qualified Data.Map.Strict as Map import Reprinting.Tokenize data Function a = Function { functionContext :: ![a], functionName :: !a, functionParameters :: ![a], functionBody :: !a } @@ -35,9 +36,9 @@ instance Evaluatable Function where where paramNames = foldMap (maybeToList . declaredName . subterm) instance Tokenize Function where - tokenize Function{..} = within' TFunction $ do + tokenize Function{..} = within' Scope.Function $ do functionName - within' TParams $ sequenceA_ (sep functionParameters) + within' Scope.Params $ sequenceA_ (sep functionParameters) functionBody instance Declarations1 Function where @@ -67,10 +68,10 @@ instance Evaluatable Method where pure (Rval addr) where paramNames = foldMap (maybeToList . declaredName . subterm) -instance Tokenize Method where - tokenize Method{..} = within' TMethod $ do +instance Tokenize Data.Syntax.Declaration.Method where + tokenize Method{..} = within' Scope.Method $ do methodName - within' TParams $ sequenceA_ (sep methodParameters) + within' Scope.Params $ sequenceA_ (sep methodParameters) methodBody instance Declarations1 Method where diff --git a/src/Data/Syntax/Expression.hs b/src/Data/Syntax/Expression.hs index 80c58c117..e0f936d14 100644 --- a/src/Data/Syntax/Expression.hs +++ b/src/Data/Syntax/Expression.hs @@ -1,18 +1,21 @@ -{-# LANGUAGE DeriveAnyClass, MultiParamTypeClasses, ScopedTypeVariables, UndecidableInstances, DuplicateRecordFields #-} +{-# LANGUAGE DeriveAnyClass, DuplicateRecordFields, MultiParamTypeClasses, ScopedTypeVariables, UndecidableInstances #-} {-# OPTIONS_GHC -Wno-missing-export-lists #-} module Data.Syntax.Expression where -import Control.Abstract.ScopeGraph as ScopeGraph -import Data.Abstract.Evaluatable hiding (Member) -import Data.Abstract.Number (liftIntegralFrac, liftReal, liftedExponent, liftedFloorDiv) -import Data.Bits -import Data.Fixed -import Data.JSON.Fields -import Diffing.Algorithm hiding (Delete) -import Prologue hiding (index, Member, This, null) import Prelude hiding (null) +import Prologue hiding (Member, This, index, null) + +import Data.Fixed import Proto3.Suite.Class -import Reprinting.Tokenize + +import Control.Abstract.ScopeGraph as ScopeGraph +import Data.Abstract.Evaluatable hiding (Member) +import Data.Abstract.Number (liftIntegralFrac, liftReal, liftedExponent, liftedFloorDiv) +import Data.JSON.Fields +import qualified Data.Reprinting.Scope as Scope +import Diffing.Algorithm hiding (Delete) +import Reprinting.Tokenize +import qualified Data.Reprinting.Token as Token -- | Typical prefix function application, like `f(x)` in many languages, or `f x` in Haskell. data Call a = Call { callContext :: ![a], callFunction :: !a, callParams :: ![a], callBlock :: !a } @@ -30,10 +33,10 @@ instance Evaluatable Call where Rval <$> call op recv args instance Tokenize Call where - tokenize Call{..} = within TCall $ do + tokenize Call{..} = within Scope.Call $ do -- TODO: callContext callFunction - within' TParams $ sequenceA_ (sep callParams) + within' Scope.Params $ sequenceA_ (sep callParams) callBlock data LessThan a = LessThan { lhs :: a, rhs :: a } @@ -136,7 +139,7 @@ instance Evaluatable Plus where go (Plus a b) = liftNumeric2 add a b where add = liftReal (+) instance Tokenize Plus where - tokenize Plus{..} = within' (TInfixL Add 6) $ lhs *> yield TSym <* rhs + tokenize Plus{..} = within' (Scope.InfixL Add 6) $ lhs *> yield Token.Sym <* rhs data Minus a = Minus { lhs :: a, rhs :: a } deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, Named1, Message1) @@ -150,7 +153,7 @@ instance Evaluatable Minus where go (Minus a b) = liftNumeric2 sub a b where sub = liftReal (-) instance Tokenize Minus where - tokenize Minus{..} = within' (TInfixL Subtract 6) $ lhs *> yield TSym <* rhs + tokenize Minus{..} = within' (Scope.InfixL Subtract 6) $ lhs *> yield Token.Sym <* rhs data Times a = Times { lhs :: a, rhs :: a } deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, Named1, Message1) @@ -164,7 +167,7 @@ instance Evaluatable Times where go (Times a b) = liftNumeric2 mul a b where mul = liftReal (*) instance Tokenize Times where - tokenize Times{..} = within' (TInfixL Multiply 7) $ lhs *> yield TSym <* rhs + tokenize Times{..} = within' (Scope.InfixL Multiply 7) $ lhs *> yield Token.Sym <* rhs data DividedBy a = DividedBy { lhs :: a, rhs :: a } deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, Named1, Message1) @@ -540,7 +543,7 @@ newtype New a = New { newSubject :: [a] } deriving (Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, Named1, Message1) instance Declarations1 New where - liftDeclaredName _ (New []) = Nothing + liftDeclaredName _ (New []) = Nothing liftDeclaredName declaredName (New (subject : _)) = declaredName subject instance Eq1 New where liftEq = genericLiftEq diff --git a/src/Data/Syntax/Statement.hs b/src/Data/Syntax/Statement.hs index 65a5fb444..93ffc377e 100644 --- a/src/Data/Syntax/Statement.hs +++ b/src/Data/Syntax/Statement.hs @@ -2,18 +2,21 @@ {-# OPTIONS_GHC -Wno-missing-export-lists #-} module Data.Syntax.Statement where -import Data.Abstract.Evaluatable -import Control.Abstract.ScopeGraph +import Prologue + import qualified Data.Map.Strict as Map import Data.Aeson (ToJSON1 (..)) -import Data.JSON.Fields import Data.Semigroup.App import Data.Semigroup.Foldable -import Diffing.Algorithm -import Prelude -import Prologue import Proto3.Suite.Class + +import Data.Abstract.Evaluatable +import Control.Abstract.ScopeGraph +import Data.JSON.Fields +import Diffing.Algorithm import Reprinting.Tokenize +import qualified Data.Reprinting.Token as Token +import qualified Data.Reprinting.Scope as Scope -- | Imperative sequence of statements/declarations s.t.: -- @@ -52,11 +55,11 @@ instance Evaluatable If where Rval <$> ifthenelse bool (subtermAddress if') (subtermAddress else') instance Tokenize If where - tokenize If{..} = within' TIf $ do + tokenize If{..} = within' Scope.If $ do ifCondition - yield TThen + yield Token.Then ifThenBody - yield TElse + yield Token.Else ifElseBody -- | Else statement. The else condition is any term, that upon successful completion, continues evaluation to the elseBody, e.g. `for ... else` in Python. @@ -224,7 +227,7 @@ instance Evaluatable Return where eval (Return x) = Rval <$> (subtermAddress x >>= earlyReturn) instance Tokenize Return where - tokenize (Return x) = within' TReturn x + tokenize (Return x) = within' Scope.Return x newtype Yield a = Yield { value :: a } deriving (Declarations1, Diffable, Eq, Foldable, FreeVariables1, Functor, Generic1, Hashable1, Ord, Show, ToJSONFields1, Traversable, Named1, Message1) diff --git a/src/Language/JSON/PrettyPrint.hs b/src/Language/JSON/PrettyPrint.hs index 0b090fd9f..8f9bd2401 100644 --- a/src/Language/JSON/PrettyPrint.hs +++ b/src/Language/JSON/PrettyPrint.hs @@ -16,6 +16,7 @@ import Data.Machine import Data.Reprinting.Errors import Data.Reprinting.Splice import Data.Reprinting.Token +import Data.Reprinting.Scope -- | Default printing pipeline for JSON. defaultJSONPipeline :: (Member (Exc TranslationError) effs) @@ -34,14 +35,14 @@ printingJSON = repeatedly (await >>= step) where (Truth False, _) -> ins "false" (Nullity, _) -> ins "null" - (TOpen, Just TList) -> ins "[" - (TClose, Just TList) -> ins "]" - (TOpen, Just THash) -> ins "{" - (TClose, Just THash) -> ins "}" + (Open, Just List) -> ins "[" + (Close, Just List) -> ins "]" + (Open, Just Hash) -> ins "{" + (Close, Just Hash) -> ins "}" - (TSep, Just TList) -> ins "," - (TSep, Just TPair) -> ins ":" - (TSep, Just THash) -> ins "," + (Sep, Just List) -> ins "," + (Sep, Just Pair) -> ins ":" + (Sep, Just Hash) -> ins "," _ -> yield s step x = yield x @@ -61,11 +62,11 @@ beautifyingJSON _ = repeatedly (await >>= step) where step (Defer el cs) = lift (throwError (NoTranslation el cs)) step (Verbatim txt) = emit txt step (New el cs txt) = case (el, cs) of - (TOpen, THash:_) -> emit txt *> layout HardWrap *> indent 2 (hashDepth cs) - (TClose, THash:rest) -> layout HardWrap *> indent 2 (hashDepth rest) *> emit txt - (TSep, TList:_) -> emit txt *> space - (TSep, TPair:_) -> emit txt *> space - (TSep, THash:_) -> emit txt *> layout HardWrap *> indent 2 (hashDepth cs) + (Open, Hash:_) -> emit txt *> layout HardWrap *> indent 2 (hashDepth cs) + (Close, Hash:rest) -> layout HardWrap *> indent 2 (hashDepth rest) *> emit txt + (Sep, List:_) -> emit txt *> space + (Sep, Pair:_) -> emit txt *> space + (Sep, Hash:_) -> emit txt *> layout HardWrap *> indent 2 (hashDepth cs) _ -> emit txt -- | Produce whitespace minimal JSON. @@ -76,5 +77,5 @@ minimizingJSON = repeatedly (await >>= step) where step (Verbatim txt) = emit txt step (New _ _ txt) = emit txt -hashDepth :: [Context] -> Int -hashDepth = length . filter (== THash) +hashDepth :: [Scope] -> Int +hashDepth = length . filter (== Hash) diff --git a/src/Language/Python/PrettyPrint.hs b/src/Language/Python/PrettyPrint.hs index 0de3631dc..426dc1e14 100644 --- a/src/Language/Python/PrettyPrint.hs +++ b/src/Language/Python/PrettyPrint.hs @@ -6,9 +6,12 @@ import Control.Monad.Effect import Control.Monad.Effect.Exception (Exc, throwError) import Control.Monad.Trans (lift) import Data.Machine + import Data.Reprinting.Errors import Data.Reprinting.Splice import Data.Reprinting.Token as Token +import Data.Reprinting.Scope +import Data.Reprinting.Operator -- | Print Python syntax. printingPython :: (Member (Exc TranslationError) effs) => ProcessT (Eff effs) Fragment Splice @@ -19,46 +22,46 @@ step (Verbatim txt) = emit txt step (New _ _ txt) = emit txt step (Defer el cs) = case (el, cs) of -- Function declarations - (TOpen, TFunction:_) -> emit "def" *> space - (TOpen, TParams:TFunction:_) -> emit "(" - (TClose, TParams:TFunction:_) -> emit "):" - (TClose, TFunction:xs) -> endContext (imperativeDepth xs) + (Open, Function:_) -> emit "def" *> space + (Open, Params:Function:_) -> emit "(" + (Close, Params:Function:_) -> emit "):" + (Close, Function:xs) -> endContext (imperativeDepth xs) -- Return statements - (TOpen, TReturn:_) -> emit "return" *> space - (TClose, TReturn:_) -> pure () - (TOpen, Imperative:TReturn:_) -> pure () - (TSep, Imperative:TReturn:_) -> emit "," *> space - (TClose, Imperative:TReturn:_) -> pure () -- Don't hardwarp or indent for return statements + (Open, Return:_) -> emit "return" *> space + (Close, Return:_) -> pure () + (Open, Imperative:Return:_) -> pure () + (Sep, Imperative:Return:_) -> emit "," *> space + (Close, Imperative:Return:_) -> pure () -- Don't hardwarp or indent for return statements -- If statements - (TOpen, TIf:_) -> emit "if" *> space - (TThen, TIf:_) -> emit ":" - (TElse, TIf:xs) -> endContext (imperativeDepth xs) *> emit "else:" - (TClose, TIf:_) -> pure () + (Open, If:_) -> emit "if" *> space + (hen, If:_) -> emit ":" + (Else, If:xs) -> endContext (imperativeDepth xs) *> emit "else:" + (Close, If:_) -> pure () -- Booleans (Truth True, _) -> emit "True" (Truth False, _) -> emit "False" -- Infix binary operators - (TOpen, TInfixL _ p:xs) -> emitIf (p < precedenceOf xs) "(" - (TSym, TInfixL Add _:_) -> space *> emit "+" *> space - (TSym, TInfixL Multiply _:_) -> space *> emit "*" *> space - (TSym, TInfixL Subtract _:_) -> space *> emit "-" *> space - (TClose, TInfixL _ p:xs) -> emitIf (p < precedenceOf xs) ")" + (Open, InfixL _ p:xs) -> emitIf (p < precedenceOf xs) "(" + (Sym, InfixL Add _:_) -> space *> emit "+" *> space + (Sym, InfixL Multiply _:_) -> space *> emit "*" *> space + (Sym, InfixL Subtract _:_) -> space *> emit "-" *> space + (Close, InfixL _ p:xs) -> emitIf (p < precedenceOf xs) ")" -- General params handling - (TOpen, TParams:_) -> emit "(" - (TSep, TParams:_) -> emit "," *> space - (TClose, TParams:_) -> emit ")" + (Open, Params:_) -> emit "(" + (Sep, Params:_) -> emit "," *> space + (Close, Params:_) -> emit ")" -- Imperative context and whitespace handling - (TOpen, [Imperative]) -> pure () -- Don't indent at the top-level imperative context... - (TClose, [Imperative]) -> layout HardWrap -- but end the program with a newline. - (TOpen, Imperative:xs) -> layout HardWrap *> indent 4 (imperativeDepth xs) - (TSep, Imperative:xs) -> layout HardWrap *> indent 4 (imperativeDepth xs) - (TClose, Imperative:_) -> pure () + (Open, [Imperative]) -> pure () -- Don't indent at the top-level imperative context... + (Close, [Imperative]) -> layout HardWrap -- but end the program with a newline. + (Open, Imperative:xs) -> layout HardWrap *> indent 4 (imperativeDepth xs) + (Sep, Imperative:xs) -> layout HardWrap *> indent 4 (imperativeDepth xs) + (Close, Imperative:_) -> pure () _ -> lift (throwError (NoTranslation el cs)) diff --git a/src/Language/Ruby/PrettyPrint.hs b/src/Language/Ruby/PrettyPrint.hs index b77600004..c39ce7983 100644 --- a/src/Language/Ruby/PrettyPrint.hs +++ b/src/Language/Ruby/PrettyPrint.hs @@ -6,7 +6,10 @@ import Control.Monad.Effect import Control.Monad.Effect.Exception (Exc, throwError) import Control.Monad.Trans (lift) import Data.Machine + +import Data.Reprinting.Scope import Data.Reprinting.Errors +import Data.Reprinting.Operator import Data.Reprinting.Splice import Data.Reprinting.Token as Token @@ -18,33 +21,33 @@ step :: (Member (Exc TranslationError) effs) => Fragment -> PlanT k Splice (Eff step (Verbatim txt) = emit txt step (New _ _ txt) = emit txt step (Defer el cs) = case (el, cs) of - (TOpen, TMethod:_) -> emit "def" *> space - (TClose, TMethod:xs) -> endContext (imperativeDepth xs) *> emit "end" + (Open, Method:_) -> emit "def" *> space + (Close, Method:xs) -> endContext (imperativeDepth xs) *> emit "end" - -- TODO: do..end vs {..} should be configurable. - (TOpen, TFunction:_) -> space *> emit "do" *> space - (TOpen, TParams:TFunction:_) -> emit "|" - (TClose, TParams:TFunction:_) -> emit "|" - (TClose, TFunction:xs) -> endContext (imperativeDepth xs) *> emit "end" + -- ODO: do..end vs {..} should be configurable. + (Open, Function:_) -> space *> emit "do" *> space + (Open, Params:Function:_) -> emit "|" + (Close, Params:Function:_) -> emit "|" + (Close, Function:xs) -> endContext (imperativeDepth xs) *> emit "end" - -- TODO: Parens for calls are a style choice, make configurable. - (TOpen, TParams:_) -> emit "(" - (TSep, TParams:_) -> emit "," *> space - (TClose, TParams:_) -> emit ")" + -- ODO: Parens for calls are a style choice, make configurable. + (Open, Params:_) -> emit "(" + (Sep, Params:_) -> emit "," *> space + (Close, Params:_) -> emit ")" - (TOpen, TInfixL _ p:xs) -> emitIf (p < precedenceOf xs) "(" - (TSym, TInfixL Add _:_) -> space *> emit "+" *> space - (TSym, TInfixL Multiply _:_) -> space *> emit "*" *> space - (TSym, TInfixL Subtract _:_) -> space *> emit "-" *> space - (TClose, TInfixL _ p:xs) -> emitIf (p < precedenceOf xs) ")" + (Open, InfixL _ p:xs) -> emitIf (p < precedenceOf xs) "(" + (Sym, InfixL Add _:_) -> space *> emit "+" *> space + (Sym, InfixL Multiply _:_) -> space *> emit "*" *> space + (Sym, InfixL Subtract _:_) -> space *> emit "-" *> space + (Close, InfixL _ p:xs) -> emitIf (p < precedenceOf xs) ")" - (TOpen, [Imperative]) -> pure () - (TOpen, Imperative:xs) -> layout HardWrap *> indent 2 (imperativeDepth xs) - (TSep, Imperative:xs) -> layout HardWrap *> indent 2 (imperativeDepth xs) - (TClose, [Imperative]) -> layout HardWrap - (TClose, Imperative:xs) -> indent 2 (pred (imperativeDepth xs)) + (Open, [Imperative]) -> pure () + (Open, Imperative:xs) -> layout HardWrap *> indent 2 (imperativeDepth xs) + (Sep, Imperative:xs) -> layout HardWrap *> indent 2 (imperativeDepth xs) + (Close, [Imperative]) -> layout HardWrap + (Close, Imperative:xs) -> indent 2 (pred (imperativeDepth xs)) - (TSep, TCall:_) -> emit "." + (Sep, Call:_) -> emit "." _ -> lift (throwError (NoTranslation el cs)) diff --git a/src/Language/Ruby/Syntax.hs b/src/Language/Ruby/Syntax.hs index 209efa628..4bf6da56e 100644 --- a/src/Language/Ruby/Syntax.hs +++ b/src/Language/Ruby/Syntax.hs @@ -2,21 +2,22 @@ {-# OPTIONS_GHC -Wno-missing-export-lists #-} module Language.Ruby.Syntax where -import Control.Abstract.Value (Boolean) import Control.Monad (unless) +import qualified Data.Text as T +import Prologue +import System.FilePath.Posix + +import Control.Abstract.Value (Boolean) import Data.Abstract.BaseError import Data.Abstract.Evaluatable import qualified Data.Abstract.Module as M import Data.Abstract.Path +import qualified Data.Reprinting.Scope as Scope import Data.JSON.Fields import qualified Data.Language as Language -import qualified Data.Text as T import Diffing.Algorithm -import Prologue import Proto3.Suite.Class import Reprinting.Tokenize -import System.FilePath.Posix - -- TODO: Fully sort out ruby require/load mechanics -- @@ -68,10 +69,10 @@ instance Evaluatable Send where Rval <$> call func recv args -- TODO pass through sendBlock instance Tokenize Send where - tokenize Send{..} = within TCall $ do - maybe (pure ()) (\r -> r *> yield TSep) sendReceiver + tokenize Send{..} = within Scope.Call $ do + maybe (pure ()) (\r -> r *> yield Sep) sendReceiver fromMaybe (pure ()) sendSelector - within' TParams $ sequenceA_ (sep sendArgs) + within' Scope.Params $ sequenceA_ (sep sendArgs) fromMaybe (pure ()) sendBlock data Require a = Require { requireRelative :: Bool, requirePath :: !a } @@ -98,7 +99,7 @@ doRequire :: ( Member (Boolean value) effects doRequire path = do result <- lookupModule path case result of - Nothing -> (,) . fst . snd <$> load path <*> boolean True + Nothing -> (,) . fst . snd <$> load path <*> boolean True Just (_, (env, _)) -> (env,) <$> boolean False diff --git a/src/Reprinting/Pipeline.hs b/src/Reprinting/Pipeline.hs index 3cfa49155..0a7a63406 100644 --- a/src/Reprinting/Pipeline.hs +++ b/src/Reprinting/Pipeline.hs @@ -108,14 +108,16 @@ import qualified Control.Monad.Effect.Exception as Exc import Control.Monad.Effect.State import Data.Machine hiding (Source) import Data.Machine.Runner +import Data.Text.Prettyprint.Doc +import Data.Text.Prettyprint.Doc.Render.Text + import Data.Record import Data.Reprinting.Errors +import Data.Reprinting.Scope import Data.Reprinting.Splice import Data.Reprinting.Token import qualified Data.Source as Source import Data.Term -import Data.Text.Prettyprint.Doc -import Data.Text.Prettyprint.Doc.Render.Text import Reprinting.Tokenize import Reprinting.Translate import Reprinting.Typeset @@ -137,7 +139,7 @@ runReprinter src translating tree . Effect.run . Exc.runError . fmap snd - . runState (mempty :: [Context]) + . runState (mempty :: [Scope]) . foldT $ source (tokenizing src tree) ~> contextualizing ~> translating @@ -169,7 +171,7 @@ runContextualizing src tree = Effect.run . Exc.runError . fmap snd - . runState (mempty :: [Context]) + . runState (mempty :: [Scope]) . runT $ source (tokenizing src tree) ~> contextualizing @@ -186,7 +188,7 @@ runTranslating src translating tree = Effect.run . Exc.runError . fmap snd - . runState (mempty :: [Context]) + . runState (mempty :: [Scope]) . runT $ source (tokenizing src tree) ~> contextualizing ~> translating diff --git a/src/Reprinting/Tokenize.hs b/src/Reprinting/Tokenize.hs index 11a93992b..7e8e82286 100644 --- a/src/Reprinting/Tokenize.hs +++ b/src/Reprinting/Tokenize.hs @@ -1,7 +1,9 @@ {-# LANGUAGE GADTs, LambdaCase, RankNTypes, UndecidableInstances #-} module Reprinting.Tokenize - ( module Data.Reprinting.Token + ( module Token + , module Scope + , module Operator , History (..) , mark , remark @@ -33,7 +35,10 @@ import Data.List (intersperse) import qualified Data.Machine as Machine import Data.Range import Data.Record -import Data.Reprinting.Token +import Data.Reprinting.Scope (Scope) +import qualified Data.Reprinting.Scope as Scope +import Data.Reprinting.Token as Token +import Data.Reprinting.Operator as Operator import Data.Source import Data.Term @@ -95,11 +100,11 @@ data State = State yield :: Element -> Tokenizer () yield e = do on <- filter <$> Get - when (on == AllowAll) . Tell . TElement $ e + when (on == AllowAll) . Tell . Element $ e -- | Yield a 'Control' token. control :: Control -> Tokenizer () -control = Tell . TControl +control = Tell . Control -- | Yield a 'Chunk' of some 'Source'. chunk :: Source -> Tokenizer () @@ -179,39 +184,40 @@ log = control . Log -- | Emit an Enter for the given context, then run the provided -- action, then emit a corresponding Exit. -within :: Context -> Tokenizer () -> Tokenizer () +within :: Scope -> Tokenizer () -> Tokenizer () within c r = control (Enter c) *> r <* control (Exit c) --- | Like 'within', but adds 'TOpen' and 'TClose' elements around the action. -within' :: Context -> Tokenizer () -> Tokenizer () -within' c x = within c $ yield TOpen *> x <* yield TClose +-- | Like 'within', but adds 'Open' and 'Close' elements around the action. +within' :: Scope -> Tokenizer () -> Tokenizer () +within' c x = within c $ yield Token.Open *> x <* yield Token.Close --- | Emit a sequence of tokens interspersed with 'TSep'. +-- | Emit a sequence of tokens interspersed with 'Sep'. sep :: Foldable t => t (Tokenizer ()) -> [Tokenizer ()] -sep = intersperse (yield TSep) . toList +sep = intersperse (yield Token.Sep) . toList --- | Emit a sequence of tokens each with trailing 'TSep'. +-- | Emit a sequence of tokens each with trailing 'Sep'. sepTrailing :: Foldable t => t (Tokenizer ()) -> [Tokenizer ()] -sepTrailing = foldr (\x acc -> x : yield TSep : acc) mempty +sepTrailing = foldr (\x acc -> x : yield Token.Sep : acc) mempty --- | Emit a sequence of tokens within a 'TList' Context with appropriate 'TOpen', +-- | Emit a sequence of tokens within a 'List' Scope with appropriate 'Open', -- 'TClose' tokens surrounding. list :: Foldable t => t (Tokenizer ()) -> Tokenizer () -list = within' TList . sequenceA_ . sep +list = within' Scope.List . sequenceA_ . sep --- | Emit a sequence of tokens within a 'THash' Context with appropriate --- 'TOpen', 'TClose' tokens surrounding and interspersing 'TSep'. +-- | Emit a sequence of tokens within a 'Hash' Scope with appropriate +-- 'Open', 'TClose' tokens surrounding and interspersing 'Sep'. hash :: Foldable t => t (Tokenizer ()) -> Tokenizer () -hash = within' THash . sequenceA_ . sep +hash = within' Scope.Hash . sequenceA_ . sep --- | Emit key value tokens with a 'TSep' within an TPair Context +-- | Emit key value tokens with a 'Sep' within a scoped 'Pair'. pair :: Tokenizer () -> Tokenizer () -> Tokenizer () -pair k v = within TPair $ k *> yield TSep <* v +pair k v = within Scope.Pair $ k *> yield Token.Sep <* v --- | Emit a sequence of tokens within an Imperative Context with appropriate --- 'TOpen', 'TClose' tokens surrounding and interspersing 'TSep'. +-- | Emit a sequence of tokens within an 'Imperative' scope with +-- appropriate 'Open', 'Close' tokens surrounding and interspersing +-- 'Sep'. imperative :: Foldable t => t (Tokenizer ()) -> Tokenizer () -imperative = within' Imperative . sequenceA_ . sep +imperative = within' Scope.Imperative . sequenceA_ . sep -- | Shortcut for @const (pure ())@, useful for when no action -- should be taken. diff --git a/src/Reprinting/Translate.hs b/src/Reprinting/Translate.hs index 72723ffa7..a1aa946d6 100644 --- a/src/Reprinting/Translate.hs +++ b/src/Reprinting/Translate.hs @@ -16,25 +16,26 @@ import Data.Machine import Data.Reprinting.Errors import Data.Reprinting.Splice import Data.Reprinting.Token +import Data.Reprinting.Scope import qualified Data.Source as Source -type Translator = Eff '[State [Context], Exc TranslationError] +type Translator = Eff '[State [Scope], Exc TranslationError] contextualizing :: ProcessT Translator Token Fragment contextualizing = repeatedly $ await >>= \case Chunk source -> yield . Verbatim . Source.toText $ source - TElement t -> case t of + Element t -> case t of Run f -> lift get >>= \c -> yield (New t c f) _ -> lift get >>= yield . Defer t - TControl ctl -> case ctl of - Enter c -> enterContext c - Exit c -> exitContext c + Control ctl -> case ctl of + Enter c -> enterScope c + Exit c -> exitScope c _ -> pure () -enterContext, exitContext :: Context -> PlanT k Fragment Translator () +enterScope, exitScope :: Scope -> PlanT k Fragment Translator () -enterContext c = lift (modify' (c :)) +enterScope c = lift (modify' (c :)) -exitContext c = lift get >>= \case +exitScope c = lift get >>= \case (x:xs) -> when (x == c) (lift (modify' (const xs))) cs -> lift (Exc.throwError (UnbalancedPair c cs)) diff --git a/test/Reprinting/Spec.hs b/test/Reprinting/Spec.hs index 14c6bef63..5149f154d 100644 --- a/test/Reprinting/Spec.hs +++ b/test/Reprinting/Spec.hs @@ -2,24 +2,27 @@ module Reprinting.Spec where -import SpecHelpers hiding (project, inject) +import SpecHelpers hiding (inject, project) -import Data.Functor.Foldable (embed, cata) -import qualified Data.Language as Language -import qualified Data.Syntax.Literal as Literal -import Data.Algebra -import Reprinting.Tokenize -import Reprinting.Pipeline -import Data.Sum -import Data.Foldable -import Semantic.IO -import Semantic.Util.Rewriting hiding (parseFile) -import Data.Blob -import Language.JSON.PrettyPrint -import Language.Ruby.PrettyPrint -import Language.Python.PrettyPrint +import Data.Foldable +import Data.Functor.Foldable (cata, embed) import qualified Data.Machine as Machine +import Data.Algebra +import Data.Blob +import qualified Data.Language as Language +import Data.Reprinting.Token +import Data.Reprinting.Scope +import Data.Sum +import qualified Data.Syntax.Literal as Literal +import Language.JSON.PrettyPrint +import Language.Python.PrettyPrint +import Language.Ruby.PrettyPrint +import Reprinting.Pipeline +import Reprinting.Tokenize +import Semantic.IO +import Semantic.Util.Rewriting hiding (parseFile) + spec :: Spec spec = describe "reprinting" $ do context "JSON" $ do @@ -40,9 +43,9 @@ spec = describe "reprinting" $ do it "should emit control tokens but only 1 chunk for a wholly-modified tree" $ do let toks = Machine.run $ tokenizing src (mark Refactored tree) - for_ @[] [TList, THash] $ \t -> do - toks `shouldSatisfy` elem (TControl (Enter t)) - toks `shouldSatisfy` elem (TControl (Exit t)) + for_ @[] [List, Hash] $ \t -> do + toks `shouldSatisfy` elem (Control (Enter t)) + toks `shouldSatisfy` elem (Control (Exit t)) describe "pipeline" $ do From 649dd2132f373faf925a6ef1cc48474b6f65ffda Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Tue, 18 Sep 2018 16:55:04 -0400 Subject: [PATCH 02/23] renameKey works well now --- src/Semantic/Util.hs | 2 ++ src/Semantic/Util/Rewriting.hs | 36 ++++++++++++++++++++-------------- 2 files changed, 23 insertions(+), 15 deletions(-) diff --git a/src/Semantic/Util.hs b/src/Semantic/Util.hs index 3e4d99181..a92d77cfa 100644 --- a/src/Semantic/Util.hs +++ b/src/Semantic/Util.hs @@ -38,6 +38,8 @@ import Semantic.Telemetry (LogQueue, StatQueue) import System.Exit (die) import System.FilePath.Posix (takeDirectory) import Text.Show.Pretty (ppShow) +import qualified Semantic.Util.Rewriting as R + justEvaluating = runM diff --git a/src/Semantic/Util/Rewriting.hs b/src/Semantic/Util/Rewriting.hs index 7108ef1e6..8436e22a5 100644 --- a/src/Semantic/Util/Rewriting.hs +++ b/src/Semantic/Util/Rewriting.hs @@ -2,13 +2,17 @@ {-# OPTIONS_GHC -Wno-missing-signatures -Wno-missing-export-lists #-} module Semantic.Util.Rewriting where -import Prelude hiding (id, (.), readFile) +import Prelude hiding (id, readFile, (.)) +import Prologue + +import Control.Category +import qualified Data.ByteString.Char8 as BC +import Text.Show.Pretty (pPrint) import Control.Abstract import Control.Abstract.Matching -import Control.Category +import Control.Rewriting hiding (fromMatcher, target) import Data.Blob -import qualified Data.ByteString.Char8 as BC import Data.History import qualified Data.Language as Language import Data.Machine @@ -20,11 +24,10 @@ import qualified Data.Sum as Sum import qualified Data.Syntax.Literal as Literal import Data.Term import Language.JSON.PrettyPrint -import Language.Ruby.PrettyPrint import Language.Python.PrettyPrint +import Language.Ruby.PrettyPrint import Matching.Core import Parsing.Parser -import Prologue hiding (weaken) import Reprinting.Pipeline import Semantic.IO as IO import Semantic.Task @@ -81,18 +84,21 @@ testJSONFile = do tree <- parseFile jsonParser path pure (src, tree) -renameKey :: (Literal.TextElement :< fs, Literal.KeyValue :< fs, Apply Functor fs) => Term (Sum fs) (Record (History ': fields)) -> Term (Sum fs) (Record (History ': fields)) -renameKey p = case projectTerm p of - Just (Literal.KeyValue k v) - | Just (Literal.TextElement x) <- Sum.project (termOut k) - , x == "\"foo\"" - -> let newKey = termIn (termAnnotation k) (inject (Literal.TextElement "\"fooA\"")) - in remark Refactored (termIn (termAnnotation p) (inject (Literal.KeyValue newKey v))) - _ -> Term (fmap renameKey (unTerm p)) +renameKey :: ( Literal.TextElement :< fs + , Apply Functor fs + , term ~ Term (Sum fs) (Record (History : fields)) + ) + => Rewrite (env, term) m (Literal.KeyValue term) +renameKey = do + Literal.KeyValue k v <- id + guard (projectTerm k == (Just (Literal.TextElement "\"foo\""))) + new <- modified (Literal.TextElement "\"fooA\"") + pure (Literal.KeyValue new v) testRenameKey = do (src, tree) <- testJSONFile - let tagged = renameKey (mark Unmodified tree) + let (Right tagged) = applyPure (somewhere renameKey) () (mark Unmodified tree) + pPrint tagged printToTerm $ runReprinter src defaultJSONPipeline tagged increaseNumbers :: (Literal.Float :< fs, Apply Functor fs) => Term (Sum fs) (Record (History ': fields)) -> Term (Sum fs) (Record (History ': fields)) @@ -180,7 +186,7 @@ changeKV = auto $ either id injKV injKV :: (term, Literal.KeyValue term) -> term injKV (term, Literal.KeyValue k v) = case projectTerm v of Just (Literal.Array elems) -> remark Refactored (termIn ann (inject (Literal.KeyValue k (newArray elems)))) - _ -> term + _ -> term where newArray xs = termIn ann (inject (Literal.Array (xs <> [float]))) float = termIn ann (inject (Literal.Float "4")) ann = termAnnotation term From 62560cf462c49c27d6cf08f3fcd5521193f5fda5 Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Tue, 18 Sep 2018 17:06:02 -0400 Subject: [PATCH 03/23] fix increaseNumbers --- src/Semantic/Util/Rewriting.hs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/Semantic/Util/Rewriting.hs b/src/Semantic/Util/Rewriting.hs index 8436e22a5..125cc4410 100644 --- a/src/Semantic/Util/Rewriting.hs +++ b/src/Semantic/Util/Rewriting.hs @@ -101,10 +101,10 @@ testRenameKey = do pPrint tagged printToTerm $ runReprinter src defaultJSONPipeline tagged -increaseNumbers :: (Literal.Float :< fs, Apply Functor fs) => Term (Sum fs) (Record (History ': fields)) -> Term (Sum fs) (Record (History ': fields)) -increaseNumbers p = case Sum.project (termOut p) of - Just (Literal.Float t) -> remark Refactored (termIn (termAnnotation p) (inject (Literal.Float (t <> "0")))) - Nothing -> Term (fmap increaseNumbers (unTerm p)) +increaseNumbers :: (term ~ Term (Sum fs) (Record (History : fields))) => Rewrite (env, term) m (Literal.Float term) +increaseNumbers = do + (Literal.Float c) <- id + pure (Literal.Float (c <> "0")) addKVPair :: forall effs syntax ann fields term . ( Apply Functor syntax From 9a1f6e9835e30530517d3920b51f69b35c2f4e08 Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Wed, 19 Sep 2018 11:58:44 -0400 Subject: [PATCH 04/23] bump api --- src/Data/Syntax.hs | 2 +- src/Semantic/Util/Rewriting.hs | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/Data/Syntax.hs b/src/Data/Syntax.hs index 226913c4f..7d4245b0c 100644 --- a/src/Data/Syntax.hs +++ b/src/Data/Syntax.hs @@ -18,7 +18,7 @@ import GHC.TypeLits import Diffing.Algorithm hiding (Empty) import Prelude import Prologue -import Reprinting.Tokenize hiding (Context, Element) +import Reprinting.Tokenize hiding (Element) import qualified Assigning.Assignment as Assignment import qualified Data.Error as Error import Proto3.Suite.Class diff --git a/src/Semantic/Util/Rewriting.hs b/src/Semantic/Util/Rewriting.hs index 125cc4410..b3c839cca 100644 --- a/src/Semantic/Util/Rewriting.hs +++ b/src/Semantic/Util/Rewriting.hs @@ -88,7 +88,7 @@ renameKey :: ( Literal.TextElement :< fs , Apply Functor fs , term ~ Term (Sum fs) (Record (History : fields)) ) - => Rewrite (env, term) m (Literal.KeyValue term) + => Rewrite (env, term) (Literal.KeyValue term) renameKey = do Literal.KeyValue k v <- id guard (projectTerm k == (Just (Literal.TextElement "\"foo\""))) @@ -97,11 +97,11 @@ renameKey = do testRenameKey = do (src, tree) <- testJSONFile - let (Right tagged) = applyPure (somewhere renameKey) () (mark Unmodified tree) + let (Right tagged) = rewrite (somewhere' renameKey) () (mark Unmodified tree) pPrint tagged printToTerm $ runReprinter src defaultJSONPipeline tagged -increaseNumbers :: (term ~ Term (Sum fs) (Record (History : fields))) => Rewrite (env, term) m (Literal.Float term) +increaseNumbers :: (term ~ Term (Sum fs) (Record (History : fields))) => Rewrite (env, term) (Literal.Float term) increaseNumbers = do (Literal.Float c) <- id pure (Literal.Float (c <> "0")) From 0cc5fc5e190fc018ceaaa072dc20fe8b33cda24f Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Wed, 19 Sep 2018 12:06:09 -0400 Subject: [PATCH 05/23] port addKVPair --- src/Semantic/Util/Rewriting.hs | 36 ++++++++++++---------------------- 1 file changed, 13 insertions(+), 23 deletions(-) diff --git a/src/Semantic/Util/Rewriting.hs b/src/Semantic/Util/Rewriting.hs index b3c839cca..a90a1363f 100644 --- a/src/Semantic/Util/Rewriting.hs +++ b/src/Semantic/Util/Rewriting.hs @@ -106,32 +106,22 @@ increaseNumbers = do (Literal.Float c) <- id pure (Literal.Float (c <> "0")) -addKVPair :: forall effs syntax ann fields term . - ( Apply Functor syntax - , Literal.Hash :< syntax - , Literal.Array :< syntax - , Literal.TextElement :< syntax - , Literal.KeyValue :< syntax - , ann ~ Record (History ': fields) - , term ~ Term (Sum syntax) ann - ) => - ProcessT (Eff effs) (Either term (term, Literal.Hash term)) term -addKVPair = repeatedly $ do - t <- await - Data.Machine.yield (either id injKVPair t) - where - injKVPair :: (term, Literal.Hash term) -> term - injKVPair (origTerm, Literal.Hash xs) = - remark Refactored (injectTerm ann (Literal.Hash (xs <> [newItem]))) - where - newItem = termIn ann (inject (Literal.KeyValue k v)) - k = termIn ann (inject (Literal.TextElement "\"added\"")) - v = termIn ann (inject (Literal.Array [])) - ann = termAnnotation origTerm +addKVPair :: ( Literal.TextElement :< syn + , Literal.KeyValue :< syn + , Literal.Array :< syn + , Apply Functor syn + , term ~ Term (Sum syn) (Record (History : fields)) + ) => Rewrite (env, term) (Literal.Hash term) +addKVPair = do + Literal.Hash els <- id + k <- modified $ Literal.TextElement "\"added\"" + v <- modified $ Literal.Array [] + pair <- modified $ (Literal.KeyValue k v) + pure (Literal.Hash (pair : els)) testAddKVPair = do (src, tree) <- testJSONFile - tagged <- runM $ cata (toAlgebra (fromMatcher matchHash ~> addKVPair)) (mark Unmodified tree) + let (Right tagged) = rewrite (somewhere addKVPair markRefactored) () (mark Unmodified tree) printToTerm $ runReprinter src defaultJSONPipeline tagged overwriteFloats :: forall effs syntax ann fields term . From 7f81565efa72a9b2a36dbe0692b8b1daf63ff4e8 Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Wed, 19 Sep 2018 12:38:10 -0400 Subject: [PATCH 06/23] broken overwriteFloats, not sure what's up --- src/Semantic/Util/Rewriting.hs | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/src/Semantic/Util/Rewriting.hs b/src/Semantic/Util/Rewriting.hs index a90a1363f..c5ea3a1ed 100644 --- a/src/Semantic/Util/Rewriting.hs +++ b/src/Semantic/Util/Rewriting.hs @@ -124,22 +124,13 @@ testAddKVPair = do let (Right tagged) = rewrite (somewhere addKVPair markRefactored) () (mark Unmodified tree) printToTerm $ runReprinter src defaultJSONPipeline tagged -overwriteFloats :: forall effs syntax ann fields term . - ( Apply Functor syntax - , Literal.Float :< syntax - , ann ~ Record (History ': fields) - , term ~ Term (Sum syntax) ann - ) => - ProcessT (Eff effs) (Either term (term, Literal.Float term)) term -overwriteFloats = repeatedly $ do - t <- await - Data.Machine.yield (either id injFloat t) - where injFloat :: (term, Literal.Float term) -> term - injFloat (term, _) = remark Refactored (termIn (termAnnotation term) (inject (Literal.Float "0"))) +overwriteFloats :: Rewrite (env, term) (Literal.Float term) +overwriteFloats = pure (Literal.Float "0") testOverwriteFloats = do (src, tree) <- testJSONFile - tagged <- runM $ cata (toAlgebra (fromMatcher matchFloat ~> overwriteFloats)) (mark Unmodified tree) + let (Right tagged) = rewrite (somewhere overwriteFloats markRefactored) () (mark Unmodified tree) + pPrint tagged printToTerm $ runReprinter src defaultJSONPipeline tagged findKV :: From ba246ca80508d81db4f07261f6e63838fa620531 Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Wed, 19 Sep 2018 12:58:18 -0400 Subject: [PATCH 07/23] last few fixes. now to investigate bugs --- src/Data/Term.hs | 6 ++++ src/Semantic/Util/Rewriting.hs | 60 ++++++++-------------------------- 2 files changed, 20 insertions(+), 46 deletions(-) diff --git a/src/Data/Term.hs b/src/Data/Term.hs index 22faabefb..355c89714 100644 --- a/src/Data/Term.hs +++ b/src/Data/Term.hs @@ -6,6 +6,7 @@ module Data.Term , termOut , injectTerm , projectTerm +, guardTerm , TermF(..) , termSize , hoistTerm @@ -37,6 +38,11 @@ termOut = termFOut . unTerm projectTerm :: forall f syntax ann . (f :< syntax) => Term (Sum syntax) ann -> Maybe (f (Term (Sum syntax) ann)) projectTerm = Sum.project . termOut +guardTerm :: forall m f syntax ann . (f :< syntax, Alternative m) + => Term (Sum syntax) ann + -> m (f (Term (Sum syntax) ann)) +guardTerm = Sum.projectGuard . termOut + data TermF syntax ann recur = In { termFAnnotation :: ann, termFOut :: syntax recur } deriving (Eq, Ord, Foldable, Functor, Show, Traversable) diff --git a/src/Semantic/Util/Rewriting.hs b/src/Semantic/Util/Rewriting.hs index c5ea3a1ed..7e80138f0 100644 --- a/src/Semantic/Util/Rewriting.hs +++ b/src/Semantic/Util/Rewriting.hs @@ -1,5 +1,5 @@ {-# LANGUAGE ScopedTypeVariables, TypeFamilies, TypeOperators #-} -{-# OPTIONS_GHC -Wno-missing-signatures -Wno-missing-export-lists #-} +{-# OPTIONS_GHC -Wno-missing-signatures -Wno-missing-export-lists -Wno-incomplete-uni-patterns #-} module Semantic.Util.Rewriting where import Prelude hiding (id, readFile, (.)) @@ -9,24 +9,19 @@ import Control.Category import qualified Data.ByteString.Char8 as BC import Text.Show.Pretty (pPrint) -import Control.Abstract import Control.Abstract.Matching import Control.Rewriting hiding (fromMatcher, target) import Data.Blob import Data.History import qualified Data.Language as Language -import Data.Machine -import Data.Machine.Runner import Data.Project hiding (readFile) import Data.Record import qualified Data.Source as Source -import qualified Data.Sum as Sum import qualified Data.Syntax.Literal as Literal import Data.Term import Language.JSON.PrettyPrint import Language.Python.PrettyPrint import Language.Ruby.PrettyPrint -import Matching.Core import Parsing.Parser import Reprinting.Pipeline import Semantic.IO as IO @@ -133,14 +128,6 @@ testOverwriteFloats = do pPrint tagged printToTerm $ runReprinter src defaultJSONPipeline tagged -findKV :: - ( Literal.KeyValue :< syntax - , Literal.TextElement :< syntax - , term ~ Term (Sum syntax) ann - ) => - Text -> ProcessT (Eff effs) term (Either term (term, Literal.KeyValue term)) -findKV name = fromMatcher (kvMatcher name) - kvMatcher :: forall fs ann term . ( Literal.KeyValue :< fs , Literal.TextElement :< fs @@ -153,42 +140,23 @@ kvMatcher name = matchM projectTerm target <* matchKey where match Literal.textElementContent $ ensure (== name) -changeKV :: forall effs syntax ann fields term . - ( Apply Functor syntax - , Literal.KeyValue :< syntax - , Literal.Array :< syntax - , Literal.Float :< syntax - , ann ~ Record (History ': fields) - , term ~ Term (Sum syntax) ann - ) => - ProcessT (Eff effs) (Either term (term, Literal.KeyValue term)) term -changeKV = auto $ either id injKV - where - injKV :: (term, Literal.KeyValue term) -> term - injKV (term, Literal.KeyValue k v) = case projectTerm v of - Just (Literal.Array elems) -> remark Refactored (termIn ann (inject (Literal.KeyValue k (newArray elems)))) - _ -> term - where newArray xs = termIn ann (inject (Literal.Array (xs <> [float]))) - float = termIn ann (inject (Literal.Float "4")) - ann = termAnnotation term +changeKV :: ( Apply Functor syntax + , Literal.Array :< syntax + , Literal.Float :< syntax + , term ~ Term (Sum syntax) (Record (History : fields)) + ) + => Rewrite (env, term) (Literal.KeyValue term) +changeKV = do + (Literal.KeyValue k v) <- id + (Literal.Array vals) <- guardTerm v + let float = remark Refactored (injectTerm (annotation v) (Literal.Float "4")) + let newArr = remark Refactored (injectTerm (annotation v) (Literal.Array (float:vals))) + pure (Literal.KeyValue k newArr) testChangeKV = do (src, tree) <- testJSONFile - tagged <- runM $ cata (toAlgebra (findKV "\"bar\"" ~> changeKV)) (mark Unmodified tree) + let (Right tagged) = rewrite (somewhere' changeKV) () (mark Unmodified tree) printToTerm $ runReprinter src defaultJSONPipeline tagged --- Temporary, until new KURE system lands. -fromMatcher :: Matcher from to -> ProcessT (Eff effs) from (Either from (from, to)) -fromMatcher m = auto go where go x = maybe (Left x) (\y -> Right (x, y)) (stepMatcher x m) - --- Turn a 'ProccessT' into an FAlgebra. -toAlgebra :: (Traversable (Base t), Corecursive t) - => ProcessT (Eff effs) t t - -> FAlgebra (Base t) (Eff effs t) -toAlgebra m t = do - inner <- sequenceA t - res <- runT1 (source (Just (embed inner)) ~> m) - pure (fromMaybe (embed inner) res) - parseFile :: Parser term -> FilePath -> IO term parseFile parser = runTask . (parse parser <=< readBlob . file) From 2c28df61354817d431955a1a14fb6759bac28652 Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Thu, 4 Oct 2018 11:43:25 -0400 Subject: [PATCH 08/23] fallout from the Record-ectomy --- src/Semantic/Util/Rewriting.hs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/Semantic/Util/Rewriting.hs b/src/Semantic/Util/Rewriting.hs index 695f04818..e75361af3 100644 --- a/src/Semantic/Util/Rewriting.hs +++ b/src/Semantic/Util/Rewriting.hs @@ -80,7 +80,7 @@ testJSONFile = do renameKey :: ( Literal.TextElement :< fs , Apply Functor fs - , term ~ Term (Sum fs) (Record History) + , term ~ Term (Sum fs) History ) => Rewrite (env, term) (Literal.KeyValue term) renameKey = do @@ -95,7 +95,7 @@ testRenameKey = do pPrint tagged printToTerm $ runReprinter src defaultJSONPipeline tagged -increaseNumbers :: (term ~ Term (Sum fs) (Record (History : fields))) => Rewrite (env, term) (Literal.Float term) +increaseNumbers :: (term ~ Term (Sum fs) History) => Rewrite (env, term) (Literal.Float term) increaseNumbers = do (Literal.Float c) <- id pure (Literal.Float (c <> "0")) @@ -104,7 +104,7 @@ addKVPair :: ( Literal.TextElement :< syn , Literal.KeyValue :< syn , Literal.Array :< syn , Apply Functor syn - , term ~ Term (Sum syn) (Record History) + , term ~ Term (Sum syn) History ) => Rewrite (env, term) (Literal.Hash term) addKVPair = do Literal.Hash els <- id @@ -127,7 +127,7 @@ testOverwriteFloats = do pPrint tagged printToTerm $ runReprinter src defaultJSONPipeline tagged -kvMatcher :: forall fs ann term . +kvMatcher :: forall fs term . ( Literal.KeyValue :< fs , Literal.TextElement :< fs , term ~ Term (Sum fs) History @@ -142,7 +142,7 @@ kvMatcher name = matchM projectTerm target <* matchKey where changeKV :: ( Apply Functor syntax , Literal.Array :< syntax , Literal.Float :< syntax - , term ~ Term (Sum syntax) (Record (History : fields)) + , term ~ Term (Sum syntax) History ) => Rewrite (env, term) (Literal.KeyValue term) changeKV = do From 845d5892defd2fc82bc6658a8ac3a4755d0bc59e Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Fri, 5 Oct 2018 11:08:14 -0400 Subject: [PATCH 09/23] Remove otiose imports. --- src/Semantic/Util.hs | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/Semantic/Util.hs b/src/Semantic/Util.hs index eb8c4fc38..62f50ec87 100644 --- a/src/Semantic/Util.hs +++ b/src/Semantic/Util.hs @@ -33,9 +33,6 @@ import Semantic.Task import Semantic.Telemetry (LogQueue, StatQueue) import System.Exit (die) import System.FilePath.Posix (takeDirectory) -import Text.Show.Pretty (ppShow) -import qualified Semantic.Util.Rewriting as R - justEvaluating = runM From 53a7f19d9006091417c6a7a71d8ef50814e066f7 Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Fri, 5 Oct 2018 11:23:42 -0400 Subject: [PATCH 10/23] fix lints --- src/Semantic/Util/Rewriting.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Semantic/Util/Rewriting.hs b/src/Semantic/Util/Rewriting.hs index e75361af3..4153fec4a 100644 --- a/src/Semantic/Util/Rewriting.hs +++ b/src/Semantic/Util/Rewriting.hs @@ -85,7 +85,7 @@ renameKey :: ( Literal.TextElement :< fs => Rewrite (env, term) (Literal.KeyValue term) renameKey = do Literal.KeyValue k v <- id - guard (projectTerm k == (Just (Literal.TextElement "\"foo\""))) + guard (projectTerm k == Just (Literal.TextElement "\"foo\"")) new <- modified (Literal.TextElement "\"fooA\"") pure (Literal.KeyValue new v) @@ -110,7 +110,7 @@ addKVPair = do Literal.Hash els <- id k <- modified $ Literal.TextElement "\"added\"" v <- modified $ Literal.Array [] - pair <- modified $ (Literal.KeyValue k v) + pair <- modified $ Literal.KeyValue k v pure (Literal.Hash (pair : els)) testAddKVPair = do From 5c3a65338affc225ff745a93e21ff2afd04926f0 Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Fri, 5 Oct 2018 11:50:54 -0400 Subject: [PATCH 11/23] debug utility --- src/Semantic/Util/Rewriting.hs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/Semantic/Util/Rewriting.hs b/src/Semantic/Util/Rewriting.hs index 4153fec4a..20eb1907f 100644 --- a/src/Semantic/Util/Rewriting.hs +++ b/src/Semantic/Util/Rewriting.hs @@ -26,6 +26,15 @@ import Reprinting.Pipeline import Semantic.IO as IO import Semantic.Task +debugTree act = do + (src, tree') <- act + let tree = mark Unmodified tree' + putStrLn "*** Original file ***" + pPrint tree + putStrLn "\n\n*** Tokenizing ***" + pPrint (runTokenizing src tree) + putStrLn "\n\n*** Done ***" + testPythonFile = do let path = "test/fixtures/python/reprinting/function.py" src <- blobSource <$> readBlobFromPath (File path Language.Python) From 8549d70b779557f9eeb05abb496620542689a1c5 Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Wed, 10 Oct 2018 12:17:10 -0400 Subject: [PATCH 12/23] Refactor: remove duplicate code in Analysis.Declaration The family of functions that extracted a meaningful "identifier" for a declaration contained a large amount of duplication, owing to type-specific pattern-matching. Encapsulating this pattern-matching in a Rule means we can go from six functions to one. --- src/Analysis/Declaration.hs | 126 ++++++++++++++++-------------------- 1 file changed, 55 insertions(+), 71 deletions(-) diff --git a/src/Analysis/Declaration.hs b/src/Analysis/Declaration.hs index 8d88eb7db..178a8176f 100644 --- a/src/Analysis/Declaration.hs +++ b/src/Analysis/Declaration.hs @@ -1,34 +1,37 @@ -{-# LANGUAGE TypeOperators, ScopedTypeVariables, TypeFamilies, UndecidableInstances #-} +{-# LANGUAGE RankNTypes, ScopedTypeVariables, TypeFamilies, TypeOperators, UndecidableInstances #-} module Analysis.Declaration ( Declaration(..) , HasDeclaration , declarationAlgebra ) where -import Data.Blob -import Data.Error (Error(..), showExpectation) -import Data.Language as Language -import Data.Range -import Data.Location -import Data.Source as Source -import Data.Sum +import Prologue hiding (first, project) + +import Control.Arrow hiding (first) +import qualified Data.Text as T + +import Control.Rewriting hiding (apply) +import Data.Blob +import Data.Error (Error (..), showExpectation) +import Data.Language as Language +import Data.Location +import Data.Range +import Data.Source as Source import qualified Data.Syntax as Syntax import qualified Data.Syntax.Declaration as Declaration -import Data.Term -import qualified Data.Text as T +import Data.Term import qualified Language.Markdown.Syntax as Markdown import qualified Language.Ruby.Syntax as Ruby.Syntax import qualified Language.TypeScript.Syntax as TypeScript.Syntax -import Prologue hiding (project) -- | A declaration’s identifier and type. data Declaration - = MethodDeclaration { declarationIdentifier :: T.Text, declarationText :: T.Text, declarationSpan :: Span, declarationLanguage :: Language, declarationReceiver :: Maybe T.Text } - | ClassDeclaration { declarationIdentifier :: T.Text, declarationText :: T.Text, declarationSpan :: Span, declarationLanguage :: Language } - | ModuleDeclaration { declarationIdentifier :: T.Text, declarationText :: T.Text, declarationSpan :: Span, declarationLanguage :: Language } - | FunctionDeclaration { declarationIdentifier :: T.Text, declarationText :: T.Text, declarationSpan :: Span, declarationLanguage :: Language } - | HeadingDeclaration { declarationIdentifier :: T.Text, declarationText :: T.Text, declarationSpan :: Span, declarationLanguage :: Language, declarationLevel :: Int } - | ErrorDeclaration { declarationIdentifier :: T.Text, declarationText :: T.Text, declarationSpan :: Span, declarationLanguage :: Language } + = MethodDeclaration { declarationIdentifier :: Text, declarationText :: Text, declarationSpan :: Span, declarationLanguage :: Language, declarationReceiver :: Maybe Text } + | ClassDeclaration { declarationIdentifier :: Text, declarationText :: Text, declarationSpan :: Span, declarationLanguage :: Language } + | ModuleDeclaration { declarationIdentifier :: Text, declarationText :: Text, declarationSpan :: Span, declarationLanguage :: Language } + | FunctionDeclaration { declarationIdentifier :: Text, declarationText :: Text, declarationSpan :: Span, declarationLanguage :: Language } + | HeadingDeclaration { declarationIdentifier :: Text, declarationText :: Text, declarationSpan :: Span, declarationLanguage :: Language, declarationLevel :: Int } + | ErrorDeclaration { declarationIdentifier :: Text, declarationText :: Text, declarationSpan :: Span, declarationLanguage :: Language } deriving (Eq, Generic, Show) @@ -96,37 +99,63 @@ instance CustomHasDeclaration whole Declaration.Function where -- Do not summarize anonymous functions | isEmpty identifierAnn = Nothing -- Named functions - | otherwise = Just $ FunctionDeclaration (getSource blobSource identifierAnn) (getFunctionSource blob (In ann decl)) (locationSpan ann) blobLanguage + | otherwise = Just $ FunctionDeclaration (getSource blobSource identifierAnn) functionSource (locationSpan ann) blobLanguage where isEmpty = (== 0) . rangeLength . locationByteRange + functionSource = getIdentifier (arr Declaration.functionBody) blob (In ann decl) -- | Produce a 'MethodDeclaration' for 'Declaration.Method' nodes. If the method’s receiver is non-empty (defined as having a non-empty 'Range'), the 'declarationIdentifier' will be formatted as 'receiver.method_name'; otherwise it will be simply 'method_name'. instance CustomHasDeclaration whole Declaration.Method where customToDeclaration blob@Blob{..} ann decl@(Declaration.Method _ (Term (In receiverAnn receiverF), _) (Term (In identifierAnn _), _) _ _) -- Methods without a receiver - | isEmpty receiverAnn = Just $ MethodDeclaration (getSource blobSource identifierAnn) (getMethodSource blob (In ann decl)) (locationSpan ann) blobLanguage Nothing + | isEmpty receiverAnn = Just $ MethodDeclaration (getSource blobSource identifierAnn) methodSource (locationSpan ann) blobLanguage Nothing -- Methods with a receiver type and an identifier (e.g. (a *Type) in Go). | blobLanguage == Go - , [ _, Term (In receiverType _) ] <- toList receiverF = Just $ MethodDeclaration (getSource blobSource identifierAnn) (getMethodSource blob (In ann decl)) (locationSpan ann) blobLanguage (Just (getSource blobSource receiverType)) + , [ _, Term (In receiverType _) ] <- toList receiverF = Just $ MethodDeclaration (getSource blobSource identifierAnn) methodSource (locationSpan ann) blobLanguage (Just (getSource blobSource receiverType)) -- Methods with a receiver (class methods) are formatted like `receiver.method_name` - | otherwise = Just $ MethodDeclaration (getSource blobSource identifierAnn) (getMethodSource blob (In ann decl)) (locationSpan ann) blobLanguage (Just (getSource blobSource receiverAnn)) - where isEmpty = (== 0) . rangeLength . locationByteRange + | otherwise = Just $ MethodDeclaration (getSource blobSource identifierAnn) methodSource (locationSpan ann) blobLanguage (Just (getSource blobSource receiverAnn)) + where + isEmpty = (== 0) . rangeLength . locationByteRange + methodSource = getIdentifier (arr Declaration.methodBody) blob (In ann decl) -- | Produce a 'ClassDeclaration' for 'Declaration.Class' nodes. instance CustomHasDeclaration whole Declaration.Class where customToDeclaration blob@Blob{..} ann decl@(Declaration.Class _ (Term (In identifierAnn _), _) _ _) - = Just $ ClassDeclaration (getSource blobSource identifierAnn) (getClassSource blob (In ann decl)) (locationSpan ann) blobLanguage + = Just $ ClassDeclaration (getSource blobSource identifierAnn) classSource (locationSpan ann) blobLanguage + where classSource = getIdentifier (arr Declaration.classBody) blob (In ann decl) instance CustomHasDeclaration whole Ruby.Syntax.Class where customToDeclaration blob@Blob{..} ann decl@(Ruby.Syntax.Class (Term (In identifierAnn _), _) _ _) - = Just $ ClassDeclaration (getSource blobSource identifierAnn) (getRubyClassSource blob (In ann decl)) (locationSpan ann) blobLanguage + = Just $ ClassDeclaration (getSource blobSource identifierAnn) rubyClassSource (locationSpan ann) blobLanguage + where rubyClassSource = getIdentifier (arr Ruby.Syntax.classBody) blob (In ann decl) instance CustomHasDeclaration whole Ruby.Syntax.Module where customToDeclaration blob@Blob{..} ann decl@(Ruby.Syntax.Module (Term (In identifierAnn _), _) _) - = Just $ ModuleDeclaration (getSource blobSource identifierAnn) (getRubyModuleSource blob (In ann decl)) (locationSpan ann) blobLanguage + = Just $ ModuleDeclaration (getSource blobSource identifierAnn) rubyModuleSource (locationSpan ann) blobLanguage + where rubyModuleSource = getIdentifier (arr Ruby.Syntax.moduleStatements >>> first) blob (In ann decl) instance CustomHasDeclaration whole TypeScript.Syntax.Module where customToDeclaration blob@Blob{..} ann decl@(TypeScript.Syntax.Module (Term (In identifierAnn _), _) _) - = Just $ ModuleDeclaration (getSource blobSource identifierAnn) (getTypeScriptModuleSource blob (In ann decl)) (locationSpan ann) blobLanguage + = Just $ ModuleDeclaration (getSource blobSource identifierAnn) tsModuleSource (locationSpan ann) blobLanguage + where tsModuleSource = getIdentifier (arr TypeScript.Syntax.moduleStatements >>> first) blob (In ann decl) + +-- When encountering a Declaration-annotated term, we need to extract a Text +-- for the resulting Declaration's 'declarationIdentifier' field. This text +-- is constructed by slicing out text from the original blob corresponding +-- to a location, which is found via the passed-in rule. +getIdentifier :: Functor m + => Rule () (m (Term syntax Location)) (Term syntax Location) + -> Blob + -> TermF m Location (Term syntax Location, a) + -> Text +getIdentifier finder Blob{..} (In a r) + = let declRange = locationByteRange a + bodyRange = locationByteRange <$> rewrite (finder >>^ annotation) () (fmap fst r) + -- Text-based gyrations to slice the identifier out of the provided blob source + sliceFrom = T.stripEnd . toText . flip Source.slice blobSource . subtractRange declRange + in either (const mempty) sliceFrom bodyRange + +first :: Rule env [a] a +first = target >>= maybeM (Prologue.fail "empty list") . listToMaybe getSource :: Source -> Location -> Text getSource blobSource = toText . flip Source.slice blobSource . locationByteRange @@ -171,48 +200,3 @@ instance HasDeclarationWithStrategy 'Default whole syntax where -- | The 'Custom' strategy delegates the selection of the strategy to the 'CustomHasDeclaration' instance for the type. instance CustomHasDeclaration whole syntax => HasDeclarationWithStrategy 'Custom whole syntax where toDeclarationWithStrategy _ = customToDeclaration - - -getMethodSource :: Blob -> TermF Declaration.Method Location (Term syntax Location, a) -> T.Text -getMethodSource Blob{..} (In a r) - = let declRange = locationByteRange a - bodyRange = locationByteRange <$> case r of - Declaration.Method _ _ _ _ (Term (In a' _), _) -> Just a' - in maybe mempty (T.stripEnd . toText . flip Source.slice blobSource . subtractRange declRange) bodyRange - -getFunctionSource :: Blob -> TermF Declaration.Function Location (Term syntax Location, a) -> T.Text -getFunctionSource Blob{..} (In a r) - = let declRange = locationByteRange a - bodyRange = locationByteRange <$> case r of - Declaration.Function _ _ _ (Term (In a' _), _) -> Just a' - in maybe mempty (T.stripEnd . toText . flip Source.slice blobSource . subtractRange declRange) bodyRange - -getClassSource :: Blob -> TermF Declaration.Class Location (Term syntax Location, a) -> T.Text -getClassSource Blob{..} (In a r) - = let declRange = locationByteRange a - bodyRange = locationByteRange <$> case r of - Declaration.Class _ _ _ (Term (In a' _), _) -> Just a' - in maybe mempty (T.stripEnd . toText . flip Source.slice blobSource . subtractRange declRange) bodyRange - -getRubyClassSource :: Blob -> TermF Ruby.Syntax.Class Location (Term syntax Location, a) -> T.Text -getRubyClassSource Blob{..} (In a r) - = let declRange = locationByteRange a - bodyRange = locationByteRange <$> case r of - Ruby.Syntax.Class _ _ (Term (In a' _), _) -> Just a' - in maybe mempty (T.stripEnd . toText . flip Source.slice blobSource . subtractRange declRange) bodyRange - -getRubyModuleSource :: Blob -> TermF Ruby.Syntax.Module Location (Term syntax Location, a) -> T.Text -getRubyModuleSource Blob{..} (In a r) - = let declRange = locationByteRange a - bodyRange = locationByteRange <$> case r of - Ruby.Syntax.Module _ [(Term (In a' _), _)] -> Just a' - _ -> Nothing - in maybe mempty (T.stripEnd . toText . flip Source.slice blobSource . subtractRange declRange) bodyRange - -getTypeScriptModuleSource :: Blob -> TermF TypeScript.Syntax.Module Location (Term syntax Location, a) -> T.Text -getTypeScriptModuleSource Blob{..} (In a r) - = let declRange = locationByteRange a - bodyRange = locationByteRange <$> case r of - TypeScript.Syntax.Module _ [(Term (In a' _), _)] -> Just a' - _ -> Nothing - in maybe mempty (T.stripEnd . toText . flip Source.slice blobSource . subtractRange declRange) bodyRange From deb0956a8e4574f9b966d5689bf21b4e40afbf2e Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Wed, 10 Oct 2018 12:48:59 -0400 Subject: [PATCH 13/23] remove annoying duplication --- src/Semantic/Util/Rewriting.hs | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/src/Semantic/Util/Rewriting.hs b/src/Semantic/Util/Rewriting.hs index 20eb1907f..4d76136e6 100644 --- a/src/Semantic/Util/Rewriting.hs +++ b/src/Semantic/Util/Rewriting.hs @@ -26,19 +26,10 @@ import Reprinting.Pipeline import Semantic.IO as IO import Semantic.Task -debugTree act = do - (src, tree') <- act - let tree = mark Unmodified tree' - putStrLn "*** Original file ***" - pPrint tree - putStrLn "\n\n*** Tokenizing ***" - pPrint (runTokenizing src tree) - putStrLn "\n\n*** Done ***" - testPythonFile = do let path = "test/fixtures/python/reprinting/function.py" src <- blobSource <$> readBlobFromPath (File path Language.Python) - tree <- parseFile miniPythonParser path + tree <- parseFile' miniPythonParser path pure (src, tree) testPythonPipeline = do @@ -60,7 +51,7 @@ testPythonPipeline''' = do testRubyFile = do let path = "test/fixtures/ruby/reprinting/infix.rb" src <- blobSource <$> readBlobFromPath (File path Language.Ruby) - tree <- parseFile miniRubyParser path + tree <- parseFile' miniRubyParser path pure (src, tree) testRubyPipeline = do @@ -84,7 +75,7 @@ printToTerm = either (putStrLn . show) (BC.putStr . Source.sourceBytes) testJSONFile = do let path = "test/fixtures/javascript/reprinting/map.json" src <- blobSource <$> readBlobFromPath (File path Language.JSON) - tree <- parseFile jsonParser path + tree <- parseFile' jsonParser path pure (src, tree) renameKey :: ( Literal.TextElement :< fs @@ -166,5 +157,5 @@ testChangeKV = do let (Right tagged) = rewrite (somewhere' changeKV) () (mark Unmodified tree) printToTerm $ runReprinter src defaultJSONPipeline tagged -parseFile :: Parser term -> FilePath -> IO term -parseFile parser = runTask . (parse parser <=< readBlob . file) +parseFile' :: Parser term -> FilePath -> IO term +parseFile' parser = runTask . (parse parser <=< readBlob . file) From 5e52bede0ab2ea8d9348525bee2c2a07a849a610 Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Mon, 15 Oct 2018 12:47:18 -0400 Subject: [PATCH 14/23] Fix the test suite. --- test/Reprinting/Spec.hs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/test/Reprinting/Spec.hs b/test/Reprinting/Spec.hs index 5149f154d..b9a6ba573 100644 --- a/test/Reprinting/Spec.hs +++ b/test/Reprinting/Spec.hs @@ -8,11 +8,12 @@ import Data.Foldable import Data.Functor.Foldable (cata, embed) import qualified Data.Machine as Machine +import Control.Rewriting hiding (context) import Data.Algebra import Data.Blob import qualified Data.Language as Language -import Data.Reprinting.Token import Data.Reprinting.Scope +import Data.Reprinting.Token import Data.Sum import qualified Data.Syntax.Literal as Literal import Language.JSON.PrettyPrint @@ -60,7 +61,7 @@ spec = describe "reprinting" $ do printed `shouldBe` Right src it "should be able to parse the output of a refactor" $ do - let tagged = increaseNumbers (mark Refactored tree) + let (Right tagged) = rewrite (somewhere increaseNumbers markRefactored) () (mark Unmodified tree) let (Right printed) = runReprinter src defaultJSONPipeline tagged tree' <- runTask (parse jsonParser (Blob printed path Language.JSON)) length tree' `shouldSatisfy` (/= 0) From 4fc7bb59aebccbd0a001dfd7cecd102fc648f975 Mon Sep 17 00:00:00 2001 From: Timothy Clem Date: Tue, 16 Oct 2018 11:28:53 -0700 Subject: [PATCH 15/23] No need for double async (timeout already does this) --- src/Parsing/TreeSitter.hs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/Parsing/TreeSitter.hs b/src/Parsing/TreeSitter.hs index 7e4e3b55d..f712b7114 100644 --- a/src/Parsing/TreeSitter.hs +++ b/src/Parsing/TreeSitter.hs @@ -6,7 +6,6 @@ module Parsing.TreeSitter import Prologue hiding (bracket) -import Control.Concurrent.Async import qualified Control.Exception as Exc (bracket) import Control.Monad.Effect import Control.Monad.Effect.Exception @@ -65,10 +64,8 @@ parseToAST parseTimeout language Blob{..} = bracket TS.ts_parser_new TS.ts_parse trace $ "tree-sitter: beginning parsing " <> blobPath - parsing <- liftIO . async $ runParser parser blobSource - -- Kick the parser off asynchronously and wait according to the provided timeout. - res <- timeout parseTimeout $ liftIO (wait parsing) + res <- timeout parseTimeout $ liftIO (runParser parser blobSource) case res of Just Failed -> Nothing <$ trace ("tree-sitter: parsing failed " <> blobPath) From 0d762fba863fbc57a75f39e985fe32c1da9d066e Mon Sep 17 00:00:00 2001 From: Timothy Clem Date: Tue, 16 Oct 2018 11:40:26 -0700 Subject: [PATCH 16/23] Include blob language in output too --- src/Semantic/Parse.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Semantic/Parse.hs b/src/Semantic/Parse.hs index 14559b69d..67c395a13 100644 --- a/src/Semantic/Parse.hs +++ b/src/Semantic/Parse.hs @@ -38,7 +38,7 @@ runParse QuietTermRenderer = distributeFoldMap $ \blob -> where showTiming Blob{..} (res, duration) = let status = if isLeft res then "ERR" else "OK" - in stringUtf8 (status <> "\t" <> blobPath <> "\t" <> show duration <> " ms\n") + in stringUtf8 (status <> "\t" <> show blobLanguage <> "\t" <> blobPath <> "\t" <> show duration <> " ms\n") -- | For testing and running parse-examples. runParse' :: (Member (Exc SomeException) effs, Member Task effs) => Blob -> Eff effs Builder From 75bf696e478ed61cc62baf0bcba5e5430eb642ff Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Tue, 16 Oct 2018 16:58:06 -0400 Subject: [PATCH 17/23] Fix crash/race associated with `bracket` (#2207) The bracket that I wrote inside effects does not properly handle asynchronous exceptions, as it has no way to call the mask function. As such, because the asynchronous exception is rethrown by both `bracket` and `wait`, the exception handler will trigger twice. This is what is causing the crash: `bracket` is passing the TSParser we create in parseToAST to ts_parser_delete twice. The best thing to do here is to create the new `Resource` effect, which is interpreted down to `Control.Exception.bracket`, which has the correct asynchronous-masking behavior, unlike the `bracket` in `Control.Monad.Effect.Exception`, which I propose to remove in a patch to `effects`. This also bumps haskell-tree-sitter so that the `ts_node_copy_child_nodes` function is considered `interruptible`. To test: 1. Download [this file](https://gist.ghe.io/tclem/c2ffe3d20b248fdac59588aa98f168ae) 2. Run `TREE_SITTER_PARSE_TIMEOUT=1000 stack exec semantic -- --log-level=debug parse lexer.rb` Before applying this patch, you will see a crash associated with a double-free; afterwards, it should time out normally. --- semantic.cabal | 1 + src/Parsing/TreeSitter.hs | 18 +++++++++++++++--- src/Semantic/REPL.hs | 3 ++- src/Semantic/Resource.hs | 35 +++++++++++++++++++++++++++++++++++ src/Semantic/Task.hs | 10 +++++++--- vendor/haskell-tree-sitter | 2 +- 6 files changed, 61 insertions(+), 8 deletions(-) create mode 100644 src/Semantic/Resource.hs diff --git a/semantic.cabal b/semantic.cabal index 89f73bbc5..a1fe5c2a6 100644 --- a/semantic.cabal +++ b/semantic.cabal @@ -189,6 +189,7 @@ library , Semantic.Parse , Semantic.REPL , Semantic.Resolution + , Semantic.Resource , Semantic.Task , Semantic.Telemetry , Semantic.Telemetry.AsyncQueue diff --git a/src/Parsing/TreeSitter.hs b/src/Parsing/TreeSitter.hs index 7e4e3b55d..22341e8d4 100644 --- a/src/Parsing/TreeSitter.hs +++ b/src/Parsing/TreeSitter.hs @@ -9,7 +9,7 @@ import Prologue hiding (bracket) import Control.Concurrent.Async import qualified Control.Exception as Exc (bracket) import Control.Monad.Effect -import Control.Monad.Effect.Exception +import Control.Monad.Effect.Exception hiding (bracket) import Control.Monad.Effect.Trace import Control.Monad.IO.Class import Data.ByteString.Unsafe (unsafeUseAsCStringLen) @@ -24,6 +24,7 @@ import Data.Location import Data.Source import Data.Span import Data.Term +import Semantic.Resource import Semantic.Timeout import qualified TreeSitter.Language as TS @@ -57,8 +58,19 @@ runParser parser blobSource = unsafeUseAsCStringLen (sourceBytes blobSource) $ -- | Parse 'Source' with the given 'TS.Language' and return its AST. -- Returns Nothing if the operation timed out. -parseToAST :: (Bounded grammar, Enum grammar, Member (Lift IO) effects, Member Timeout effects, Member Trace effects, PureEffects effects) => Duration -> Ptr TS.Language -> Blob -> Eff effects (Maybe (AST [] grammar)) -parseToAST parseTimeout language Blob{..} = bracket TS.ts_parser_new TS.ts_parser_delete $ \ parser -> do +parseToAST :: ( Bounded grammar + , Enum grammar + , Member (Lift IO) effects + , Member Resource effects + , Member Timeout effects + , Member Trace effects + , PureEffects effects + ) + => Duration + -> Ptr TS.Language + -> Blob + -> Eff effects (Maybe (AST [] grammar)) +parseToAST parseTimeout language Blob{..} = bracket (liftIO TS.ts_parser_new) (liftIO . TS.ts_parser_delete) $ \ parser -> do liftIO $ do TS.ts_parser_halt_on_error parser (CBool 1) TS.ts_parser_set_language parser language diff --git a/src/Semantic/REPL.hs b/src/Semantic/REPL.hs index 370140ca4..7e2e9aa39 100644 --- a/src/Semantic/REPL.hs +++ b/src/Semantic/REPL.hs @@ -31,6 +31,7 @@ import Semantic.Distribute import Semantic.Graph import Semantic.IO as IO import Semantic.Resolution +import Semantic.Resource import Semantic.Task hiding (Error) import Semantic.Telemetry import Semantic.Timeout @@ -70,7 +71,7 @@ runREPL prefs settings = interpret $ \case rubyREPL = repl (Proxy @'Language.Ruby) rubyParser -repl proxy parser paths = defaultConfig debugOptions >>= \ config -> runM . runDistribute . runTimeout (runM . runDistribute) . runError @_ @_ @SomeException . runTelemetryIgnoringStat (logOptionsFromConfig config) . runTraceInTelemetry . runReader config . IO.runFiles . runResolution . runTaskF $ do +repl proxy parser paths = defaultConfig debugOptions >>= \ config -> runM . runDistribute . runResource (runM . runDistribute) . runTimeout (runM . runDistribute . runResource (runM . runDistribute)) . runError @_ @_ @SomeException . runTelemetryIgnoringStat (logOptionsFromConfig config) . runTraceInTelemetry . runReader config . IO.runFiles . runResolution . runTaskF $ do blobs <- catMaybes <$> traverse IO.readFile (flip File (Language.reflect proxy) <$> paths) package <- fmap (fmap quieterm) <$> parsePackage parser (Project (takeDirectory (maybe "/" fst (uncons paths))) blobs (Language.reflect proxy) []) modules <- topologicalSort <$> runImportGraphToModules proxy (snd <$> package) diff --git a/src/Semantic/Resource.hs b/src/Semantic/Resource.hs new file mode 100644 index 000000000..c83f516ed --- /dev/null +++ b/src/Semantic/Resource.hs @@ -0,0 +1,35 @@ +{-# LANGUAGE GADTs, TypeOperators, RankNTypes #-} + +module Semantic.Resource + ( Resource (..) + , bracket + , runResource + ) where + +import Control.Monad.Effect +import Control.Monad.IO.Class +import qualified Control.Exception as Exc + +data Resource m output where + Resource :: m res -> (res -> m any) -> (res -> m output) -> Resource m output + +instance PureEffect Resource +instance Effect Resource where + handleState c dist (Request (Resource fore aft go) k) + = Request (Resource (dist (fore <$ c)) (dist . fmap aft) (dist . fmap go)) (dist . fmap k) + +bracket :: (Member Resource effs, Effectful m) + => m effs res + -> (res -> m effs any) + -> (res -> m effs b) + -> m effs b +bracket fore aft go = send (Resource (lowerEff fore) (lowerEff . aft) (lowerEff . go)) + +runResource :: (Member (Lift IO) effects, PureEffects effects) + => (forall x . Eff effects x -> IO x) + -> Eff (Resource ': effects) a + -> Eff effects a +runResource handler = interpret (\(Resource fore aft go) + -> liftIO (Exc.bracket (handler (runResource handler fore)) + (\res -> handler (runResource handler (aft res))) + (\res -> handler (runResource handler (go res))))) diff --git a/src/Semantic/Task.hs b/src/Semantic/Task.hs index 7ef98f90e..f60c0c7bc 100644 --- a/src/Semantic/Task.hs +++ b/src/Semantic/Task.hs @@ -85,6 +85,7 @@ import Semantic.Distribute import Semantic.Timeout import qualified Semantic.IO as IO import Semantic.Resolution +import Semantic.Resource import Semantic.Telemetry import Serializing.Format hiding (Options) import System.Exit (die) @@ -98,6 +99,7 @@ type TaskEff = Eff '[ Task , Telemetry , Exc SomeException , Timeout + , Resource , Distribute , Lift IO ] @@ -151,7 +153,8 @@ runTaskWithConfig options logger statter task = do run = runM . runDistribute - . runTimeout (runM . runDistribute) + . runResource (runM . runDistribute) + . runTimeout (runM . runDistribute . runResource (runM . runDistribute)) . runError . runTelemetry logger statter . runTraceInTelemetry @@ -186,7 +189,7 @@ instance Effect Task where handleState c dist (Request (Serialize format input) k) = Request (Serialize format input) (dist . (<$ c) . k) -- | Run a 'Task' effect by performing the actions in 'IO'. -runTaskF :: (Member (Exc SomeException) effs, Member (Lift IO) effs, Member (Reader Config) effs, Member Telemetry effs, Member Timeout effs, Member Trace effs, PureEffects effs) => Eff (Task ': effs) a -> Eff effs a +runTaskF :: (Member (Exc SomeException) effs, Member (Lift IO) effs, Member (Reader Config) effs, Member Resource effs, Member Telemetry effs, Member Timeout effs, Member Trace effs, PureEffects effs) => Eff (Task ': effs) a -> Eff effs a runTaskF = interpret $ \ task -> case task of Parse parser blob -> runParser blob parser Analyze interpret analysis -> pure (interpret analysis) @@ -208,7 +211,7 @@ data ParserCancelled = ParserTimedOut FilePath Language | AssignmentTimedOut Fil instance Exception ParserCancelled -- | Parse a 'Blob' in 'IO'. -runParser :: (Member (Exc SomeException) effs, Member (Lift IO) effs, Member (Reader Config) effs, Member Telemetry effs, Member Timeout effs, Member Trace effs, PureEffects effs) => Blob -> Parser term -> Eff effs term +runParser :: (Member (Exc SomeException) effs, Member (Lift IO) effs, Member (Reader Config) effs, Member Resource effs, Member Telemetry effs, Member Timeout effs, Member Trace effs, PureEffects effs) => Blob -> Parser term -> Eff effs term runParser blob@Blob{..} parser = case parser of ASTParser language -> time "parse.tree_sitter_ast_parse" languageTag $ do @@ -238,6 +241,7 @@ runParser blob@Blob{..} parser = case parser of , Member Telemetry effs , Member Timeout effs , Member Trace effs + , Member Resource effs , PureEffects effs ) => (Source -> assignment (Term (Sum syntaxes) Assignment.Location) -> ast -> Either (Error.Error String) (Term (Sum syntaxes) Assignment.Location)) diff --git a/vendor/haskell-tree-sitter b/vendor/haskell-tree-sitter index 7ee860f41..9c28ccf49 160000 --- a/vendor/haskell-tree-sitter +++ b/vendor/haskell-tree-sitter @@ -1 +1 @@ -Subproject commit 7ee860f415959357ec031df93bc424b0f89dbe48 +Subproject commit 9c28ccf49be8bbc78635bb0927ae1ae43d2f580b From 1dfb4981ccc9767867580d0499d0f0afb29ef7b0 Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Tue, 16 Oct 2018 17:10:34 -0400 Subject: [PATCH 18/23] warnings --- src/Parsing/TreeSitter.hs | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/Parsing/TreeSitter.hs b/src/Parsing/TreeSitter.hs index 22341e8d4..892cc03de 100644 --- a/src/Parsing/TreeSitter.hs +++ b/src/Parsing/TreeSitter.hs @@ -9,7 +9,6 @@ import Prologue hiding (bracket) import Control.Concurrent.Async import qualified Control.Exception as Exc (bracket) import Control.Monad.Effect -import Control.Monad.Effect.Exception hiding (bracket) import Control.Monad.Effect.Trace import Control.Monad.IO.Class import Data.ByteString.Unsafe (unsafeUseAsCStringLen) @@ -64,7 +63,6 @@ parseToAST :: ( Bounded grammar , Member Resource effects , Member Timeout effects , Member Trace effects - , PureEffects effects ) => Duration -> Ptr TS.Language From 37522ddc355f6a8a301f5771c85f61d924d8790a Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Tue, 16 Oct 2018 17:15:36 -0400 Subject: [PATCH 19/23] lints --- src/Semantic/Resource.hs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Semantic/Resource.hs b/src/Semantic/Resource.hs index c83f516ed..55c69e9ae 100644 --- a/src/Semantic/Resource.hs +++ b/src/Semantic/Resource.hs @@ -31,5 +31,5 @@ runResource :: (Member (Lift IO) effects, PureEffects effects) -> Eff effects a runResource handler = interpret (\(Resource fore aft go) -> liftIO (Exc.bracket (handler (runResource handler fore)) - (\res -> handler (runResource handler (aft res))) - (\res -> handler (runResource handler (go res))))) + (handler . runResource handler . aft) + (handler . runResource handler . go) From fb1806f3e9679373738496b090c72f2a6d4832b2 Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Tue, 16 Oct 2018 17:18:59 -0400 Subject: [PATCH 20/23] whoops --- src/Semantic/Resource.hs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/Semantic/Resource.hs b/src/Semantic/Resource.hs index 55c69e9ae..2672115fa 100644 --- a/src/Semantic/Resource.hs +++ b/src/Semantic/Resource.hs @@ -30,6 +30,7 @@ runResource :: (Member (Lift IO) effects, PureEffects effects) -> Eff (Resource ': effects) a -> Eff effects a runResource handler = interpret (\(Resource fore aft go) - -> liftIO (Exc.bracket (handler (runResource handler fore)) - (handler . runResource handler . aft) - (handler . runResource handler . go) + -> liftIO (Exc.bracket + (handler (runResource handler fore)) + (handler . runResource handler . aft) + (handler . runResource handler . go))) From 1de1f0b5c184acb9f2f05b702ab45ad96d6ba0f1 Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Tue, 16 Oct 2018 17:39:36 -0400 Subject: [PATCH 21/23] try catching ParserTimedOut exceptions in parse-examples --- test/Examples.hs | 1 + 1 file changed, 1 insertion(+) diff --git a/test/Examples.hs b/test/Examples.hs index 64f68a4f6..1b6d79980 100644 --- a/test/Examples.hs +++ b/test/Examples.hs @@ -50,6 +50,7 @@ main = withOptions opts $ \ config logger statter -> hspec . parallel $ do Left (SomeException e) -> case cast e of -- We have a number of known assignment timeouts, consider these pending specs instead of failing the build. Just (AssignmentTimedOut _ _) -> pendingWith $ show (displayException e) + Just (ParserTimedOut _ _) -> pendingWith $ show (displayException e) -- Other exceptions are true failures _ -> expectationFailure (show (displayException e)) _ -> if file `elem` knownFailures From d56badfb29d0319542320388c2401d95aae1ae57 Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Tue, 16 Oct 2018 20:10:34 -0400 Subject: [PATCH 22/23] Use upstream Resource effect. --- semantic.cabal | 1 - src/Parsing/TreeSitter.hs | 2 +- src/Semantic/REPL.hs | 2 +- src/Semantic/Resource.hs | 36 ------------------------------------ src/Semantic/Task.hs | 2 +- vendor/effects | 2 +- 6 files changed, 4 insertions(+), 41 deletions(-) delete mode 100644 src/Semantic/Resource.hs diff --git a/semantic.cabal b/semantic.cabal index a1fe5c2a6..89f73bbc5 100644 --- a/semantic.cabal +++ b/semantic.cabal @@ -189,7 +189,6 @@ library , Semantic.Parse , Semantic.REPL , Semantic.Resolution - , Semantic.Resource , Semantic.Task , Semantic.Telemetry , Semantic.Telemetry.AsyncQueue diff --git a/src/Parsing/TreeSitter.hs b/src/Parsing/TreeSitter.hs index 892cc03de..a3f75b083 100644 --- a/src/Parsing/TreeSitter.hs +++ b/src/Parsing/TreeSitter.hs @@ -9,6 +9,7 @@ import Prologue hiding (bracket) import Control.Concurrent.Async import qualified Control.Exception as Exc (bracket) import Control.Monad.Effect +import Control.Monad.Effect.Resource import Control.Monad.Effect.Trace import Control.Monad.IO.Class import Data.ByteString.Unsafe (unsafeUseAsCStringLen) @@ -23,7 +24,6 @@ import Data.Location import Data.Source import Data.Span import Data.Term -import Semantic.Resource import Semantic.Timeout import qualified TreeSitter.Language as TS diff --git a/src/Semantic/REPL.hs b/src/Semantic/REPL.hs index 7e2e9aa39..09ac3523e 100644 --- a/src/Semantic/REPL.hs +++ b/src/Semantic/REPL.hs @@ -5,6 +5,7 @@ module Semantic.REPL ) where import Control.Abstract hiding (Continue, List, string) +import Control.Monad.Effect.Resource import Control.Monad.IO.Class import Data.Abstract.Address.Precise as Precise import Data.Abstract.Environment as Env @@ -31,7 +32,6 @@ import Semantic.Distribute import Semantic.Graph import Semantic.IO as IO import Semantic.Resolution -import Semantic.Resource import Semantic.Task hiding (Error) import Semantic.Telemetry import Semantic.Timeout diff --git a/src/Semantic/Resource.hs b/src/Semantic/Resource.hs deleted file mode 100644 index 2672115fa..000000000 --- a/src/Semantic/Resource.hs +++ /dev/null @@ -1,36 +0,0 @@ -{-# LANGUAGE GADTs, TypeOperators, RankNTypes #-} - -module Semantic.Resource - ( Resource (..) - , bracket - , runResource - ) where - -import Control.Monad.Effect -import Control.Monad.IO.Class -import qualified Control.Exception as Exc - -data Resource m output where - Resource :: m res -> (res -> m any) -> (res -> m output) -> Resource m output - -instance PureEffect Resource -instance Effect Resource where - handleState c dist (Request (Resource fore aft go) k) - = Request (Resource (dist (fore <$ c)) (dist . fmap aft) (dist . fmap go)) (dist . fmap k) - -bracket :: (Member Resource effs, Effectful m) - => m effs res - -> (res -> m effs any) - -> (res -> m effs b) - -> m effs b -bracket fore aft go = send (Resource (lowerEff fore) (lowerEff . aft) (lowerEff . go)) - -runResource :: (Member (Lift IO) effects, PureEffects effects) - => (forall x . Eff effects x -> IO x) - -> Eff (Resource ': effects) a - -> Eff effects a -runResource handler = interpret (\(Resource fore aft go) - -> liftIO (Exc.bracket - (handler (runResource handler fore)) - (handler . runResource handler . aft) - (handler . runResource handler . go))) diff --git a/src/Semantic/Task.hs b/src/Semantic/Task.hs index f60c0c7bc..826280fe6 100644 --- a/src/Semantic/Task.hs +++ b/src/Semantic/Task.hs @@ -61,6 +61,7 @@ import Control.Monad import Control.Monad.Effect import Control.Monad.Effect.Exception import Control.Monad.Effect.Reader +import Control.Monad.Effect.Resource import Control.Monad.Effect.Trace import Data.Blob import Data.Bool @@ -85,7 +86,6 @@ import Semantic.Distribute import Semantic.Timeout import qualified Semantic.IO as IO import Semantic.Resolution -import Semantic.Resource import Semantic.Telemetry import Serializing.Format hiding (Options) import System.Exit (die) diff --git a/vendor/effects b/vendor/effects index 8ded4a641..e7858dacc 160000 --- a/vendor/effects +++ b/vendor/effects @@ -1 +1 @@ -Subproject commit 8ded4a64133ce77ddd2fc734f455753e62af0ad3 +Subproject commit e7858dacce6fbb43e76a49e4dbeff1f1815aa290 From d3db7540041f66a5a49a357c035daf3e3cd8be1a Mon Sep 17 00:00:00 2001 From: Patrick Thomson Date: Tue, 16 Oct 2018 23:04:39 -0400 Subject: [PATCH 23/23] weirdness in tests --- test/Rendering/TOC/Spec.hs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/Rendering/TOC/Spec.hs b/test/Rendering/TOC/Spec.hs index d6b4e2f14..0317fcb5f 100644 --- a/test/Rendering/TOC/Spec.hs +++ b/test/Rendering/TOC/Spec.hs @@ -16,7 +16,7 @@ import Data.Sum import Data.Term import Data.Text (Text) import Data.Text.Encoding (encodeUtf8) -import Data.Union +import Data.Union hiding (forAll) import Diffing.Algorithm import Diffing.Interpreter import Prelude