1
1
mirror of https://github.com/github/semantic.git synced 2024-12-30 02:14:20 +03:00

Merge branch 'master' into array-initializer-children

This commit is contained in:
Timothy Clem 2018-09-13 12:54:50 -07:00 committed by GitHub
commit 18249a0a19
15 changed files with 324 additions and 278 deletions

View File

@ -352,6 +352,7 @@ test-suite test
, hspec-expectations-pretty-diff , hspec-expectations-pretty-diff
, HUnit , HUnit
, leancheck , leancheck
, machines
, mtl , mtl
, network , network
, proto3-suite , proto3-suite

View File

@ -1,3 +1,5 @@
{-# LANGUAGE RankNTypes #-}
module Data.Reprinting.Splice module Data.Reprinting.Splice
( Fragment(..) ( Fragment(..)
, copy , copy
@ -5,17 +7,21 @@ module Data.Reprinting.Splice
, defer , defer
, Splice(..) , Splice(..)
, emit , emit
, emitIf
, layout , layout
, indent
, layouts , layouts
, space , space
, Whitespace(..) , Whitespace(..)
, Indentation(..) , Indentation(..)
) where ) where
import Data.Reprinting.Token
import Data.Sequence (singleton, fromList)
import Prologue hiding (Element) import Prologue hiding (Element)
import Data.Machine
import Data.Reprinting.Token
-- | An intermediate representation of concrete syntax in the reprinting pipeline. -- | An intermediate representation of concrete syntax in the reprinting pipeline.
data Fragment data Fragment
= Verbatim Text = Verbatim Text
@ -28,16 +34,16 @@ data Fragment
deriving (Eq, Show) deriving (Eq, Show)
-- | Copy along some original, un-refactored 'Text'. -- | Copy along some original, un-refactored 'Text'.
copy :: Text -> Seq Fragment copy :: Text -> Plan k Fragment ()
copy = singleton . Verbatim copy = yield . Verbatim
-- | Insert some new 'Text'. -- | Insert some new 'Text'.
insert :: Element -> [Context] -> Text -> Seq Fragment insert :: Element -> [Context] -> Text -> Plan k Fragment ()
insert el c = singleton . New el c insert el c = yield . New el c
-- | Defer processing an element to a later stage. -- | Defer processing an element to a later stage.
defer :: Element -> [Context] -> Seq Fragment defer :: Element -> [Context] -> Plan k Fragment ()
defer el = singleton . Defer el defer el = yield . Defer el
-- | The final representation of concrete syntax in the reprinting pipeline. -- | The final representation of concrete syntax in the reprinting pipeline.
data Splice data Splice
@ -46,20 +52,30 @@ data Splice
deriving (Eq, Show) deriving (Eq, Show)
-- | Emit some 'Text' as a 'Splice'. -- | Emit some 'Text' as a 'Splice'.
emit :: Text -> Seq Splice emit :: Text -> Plan k Splice ()
emit = singleton . Emit emit = yield . Emit
-- | Emit the provided 'Text' if the given predicate is true.
emitIf :: Bool -> Text -> Plan k Splice ()
emitIf p = when p . emit
-- | Construct a layout 'Splice'. -- | Construct a layout 'Splice'.
layout :: Whitespace -> Seq Splice layout :: Whitespace -> Plan k Splice ()
layout = singleton . Layout layout = yield . Layout
-- | @indent w n@ emits @w@ 'Spaces' @n@ times.
indent :: Int -> Int -> Plan k Splice ()
indent width times
| times > 0 = replicateM_ times (layout (Indent width Spaces))
| otherwise = pure ()
-- | Construct multiple layouts. -- | Construct multiple layouts.
layouts :: [Whitespace] -> Seq Splice layouts :: [Whitespace] -> Plan k Splice ()
layouts = fromList . fmap Layout layouts = traverse_ (yield . Layout)
-- | Single space. -- | Single space.
space :: Seq Splice space :: Plan k Splice ()
space = layout Space space = yield (Layout Space)
-- | Indentation, spacing, and other whitespace. -- | Indentation, spacing, and other whitespace.
data Whitespace data Whitespace

View File

@ -1,8 +1,12 @@
module Data.Reprinting.Token module Data.Reprinting.Token
( Token (..) ( Token (..)
, isChunk
, isControl
, Element (..) , Element (..)
, Control (..) , Control (..)
, Context (..) , Context (..)
, imperativeDepth
, precedenceOf
, Operator (..) , Operator (..)
) where ) where
@ -17,12 +21,20 @@ data Token
| TControl Control -- ^ AST's context. | TControl Control -- ^ AST's context.
deriving (Show, Eq) deriving (Show, Eq)
isChunk :: Token -> Bool
isChunk (Chunk _) = True
isChunk _ = False
isControl :: Token -> Bool
isControl (TControl _) = True
isControl _ = False
-- | 'Element' tokens describe atomic pieces of source code to be -- | 'Element' tokens describe atomic pieces of source code to be
-- output to a rendered document. These tokens are language-agnostic -- output to a rendered document. These tokens are language-agnostic
-- and are interpreted into language-specific representations at a -- and are interpreted into language-specific representations at a
-- later point in the reprinting pipeline. -- later point in the reprinting pipeline.
data Element data Element
= Fragment Text -- ^ A literal chunk of text. = Run Text -- ^ A literal chunk of text.
| Truth Bool -- ^ A boolean value. | Truth Bool -- ^ A boolean value.
| Nullity -- ^ @null@ or @nil@ or some other zero value. | Nullity -- ^ @null@ or @nil@ or some other zero value.
| TSep -- ^ Some sort of delimiter, interpreted in some 'Context'. | TSep -- ^ Some sort of delimiter, interpreted in some 'Context'.
@ -61,6 +73,18 @@ data Context
| Imperative | Imperative
deriving (Show, Eq) deriving (Show, Eq)
precedenceOf :: [Context] -> Int
precedenceOf cs = case filter isInfix cs of
(TInfixL _ n:_) -> n
_ -> 0
where isInfix (TInfixL _ _) = True
isInfix _ = False
-- | Depth of imperative scope.
imperativeDepth :: [Context] -> Int
imperativeDepth = length . filter (== Imperative)
-- | A sum type representing every concievable infix operator a -- | A sum type representing every concievable infix operator a
-- language can define. These are handled by instances of 'Concrete' -- language can define. These are handled by instances of 'Concrete'
-- and given appropriate precedence. -- and given appropriate precedence.

View File

@ -167,7 +167,7 @@ instance Evaluatable Identifier where
eval (Identifier name) = pure (LvalLocal name) eval (Identifier name) = pure (LvalLocal name)
instance Tokenize Identifier where instance Tokenize Identifier where
tokenize = yield . Fragment . formatName . Data.Syntax.name tokenize = yield . Run . formatName . Data.Syntax.name
instance FreeVariables1 Identifier where instance FreeVariables1 Identifier where
liftFreeVariables _ (Identifier x) = Set.singleton x liftFreeVariables _ (Identifier x) = Set.singleton x

View File

@ -21,7 +21,7 @@ instance Evaluatable Comment where
eval _ = rvalBox unit eval _ = rvalBox unit
instance Tokenize Comment where instance Tokenize Comment where
tokenize = yield . Fragment . commentContent tokenize = yield . Run . commentContent
-- TODO: nested comment types -- TODO: nested comment types
-- TODO: documentation comment types -- TODO: documentation comment types

View File

@ -52,7 +52,7 @@ instance Evaluatable Data.Syntax.Literal.Integer where
rvalBox =<< (integer <$> either (const (throwEvalError (IntegerFormatError x))) pure (parseInteger x)) rvalBox =<< (integer <$> either (const (throwEvalError (IntegerFormatError x))) pure (parseInteger x))
instance Tokenize Data.Syntax.Literal.Integer where instance Tokenize Data.Syntax.Literal.Integer where
tokenize = yield . Fragment . integerContent tokenize = yield . Run . integerContent
-- | A literal float of unspecified width. -- | A literal float of unspecified width.
@ -68,7 +68,7 @@ instance Evaluatable Data.Syntax.Literal.Float where
rvalBox =<< (float <$> either (const (throwEvalError (FloatFormatError s))) pure (parseScientific s)) rvalBox =<< (float <$> either (const (throwEvalError (FloatFormatError s))) pure (parseScientific s))
instance Tokenize Data.Syntax.Literal.Float where instance Tokenize Data.Syntax.Literal.Float where
tokenize = yield . Fragment . floatContent tokenize = yield . Run . floatContent
-- Rational literals e.g. `2/3r` -- Rational literals e.g. `2/3r`
newtype Rational a = Rational { value :: Text } newtype Rational a = Rational { value :: Text }
@ -142,7 +142,7 @@ instance Evaluatable TextElement where
eval (TextElement x) = rvalBox (string x) eval (TextElement x) = rvalBox (string x)
instance Tokenize TextElement where instance Tokenize TextElement where
tokenize = yield . Fragment . textElementContent tokenize = yield . Run . textElementContent
-- | A sequence of textual contents within a string literal. -- | A sequence of textual contents within a string literal.
newtype EscapeSequence a = EscapeSequence { value :: Text } newtype EscapeSequence a = EscapeSequence { value :: Text }

View File

@ -8,10 +8,11 @@ module Language.JSON.PrettyPrint
import Prologue hiding (throwError) import Prologue hiding (throwError)
import Control.Arrow
import Control.Monad.Effect import Control.Monad.Effect
import Control.Monad.Effect.Exception (Exc, throwError) import Control.Monad.Effect.Exception (Exc, throwError)
import Control.Monad.Trans (lift)
import Data.Machine import Data.Machine
import Data.Reprinting.Errors import Data.Reprinting.Errors
import Data.Reprinting.Splice import Data.Reprinting.Splice
import Data.Reprinting.Token import Data.Reprinting.Token
@ -25,10 +26,9 @@ defaultJSONPipeline
-- | Print JSON syntax. -- | Print JSON syntax.
printingJSON :: Monad m => ProcessT m Fragment Fragment printingJSON :: Monad m => ProcessT m Fragment Fragment
printingJSON = auto step ~> flattened where printingJSON = repeatedly (await >>= step) where
step :: Fragment -> Seq Fragment
step s@(Defer el cs) = step s@(Defer el cs) =
let ins = insert el cs let ins = yield . New el cs
in case (el, listToMaybe cs) of in case (el, listToMaybe cs) of
(Truth True, _) -> ins "true" (Truth True, _) -> ins "true"
(Truth False, _) -> ins "false" (Truth False, _) -> ins "false"
@ -43,9 +43,8 @@ printingJSON = auto step ~> flattened where
(TSep, Just TPair) -> ins ":" (TSep, Just TPair) -> ins ":"
(TSep, Just THash) -> ins "," (TSep, Just THash) -> ins ","
_ -> pure s _ -> yield s
step x = yield x
step x = pure x
-- TODO: Fill out and implement configurable options like indentation count, -- TODO: Fill out and implement configurable options like indentation count,
-- tabs vs. spaces, etc. -- tabs vs. spaces, etc.
@ -58,21 +57,21 @@ defaultBeautyOpts = JSONBeautyOpts 2 False
-- | Produce JSON with configurable whitespace and layout. -- | Produce JSON with configurable whitespace and layout.
beautifyingJSON :: (Member (Exc TranslationError) effs) beautifyingJSON :: (Member (Exc TranslationError) effs)
=> JSONBeautyOpts -> ProcessT (Eff effs) Fragment Splice => JSONBeautyOpts -> ProcessT (Eff effs) Fragment Splice
beautifyingJSON _ = autoT (Kleisli step) ~> flattened where beautifyingJSON _ = repeatedly (await >>= step) where
step (Defer el cs) = throwError (NoTranslation el cs) step (Defer el cs) = lift (throwError (NoTranslation el cs))
step (Verbatim txt) = pure $ emit txt step (Verbatim txt) = emit txt
step (New el cs txt) = pure $ case (el, listToMaybe cs) of step (New el cs txt) = case (el, listToMaybe cs) of
(TOpen, Just THash) -> emit txt <> layouts [HardWrap, Indent 2 Spaces] (TOpen, Just THash) -> emit txt *> layouts [HardWrap, Indent 2 Spaces]
(TClose, Just THash) -> layout HardWrap <> emit txt (TClose, Just THash) -> layout HardWrap *> emit txt
(TSep, Just TList) -> emit txt <> space (TSep, Just TList) -> emit txt *> space
(TSep, Just TPair) -> emit txt <> space (TSep, Just TPair) -> emit txt *> space
(TSep, Just THash) -> emit txt <> layouts [HardWrap, Indent 2 Spaces] (TSep, Just THash) -> emit txt *> layouts [HardWrap, Indent 2 Spaces]
_ -> emit txt _ -> emit txt
-- | Produce whitespace minimal JSON. -- | Produce whitespace minimal JSON.
minimizingJSON :: (Member (Exc TranslationError) effs) minimizingJSON :: (Member (Exc TranslationError) effs)
=> ProcessT (Eff effs) Fragment Splice => ProcessT (Eff effs) Fragment Splice
minimizingJSON = autoT (Kleisli step) ~> flattened where minimizingJSON = repeatedly (await >>= step) where
step (Defer el cs) = throwError (NoTranslation el cs) step (Defer el cs) = lift (throwError (NoTranslation el cs))
step (Verbatim txt) = pure $ emit txt step (Verbatim txt) = emit txt
step (New _ _ txt) = pure $ emit txt step (New _ _ txt) = emit txt

View File

@ -1,84 +1,66 @@
{-# LANGUAGE RankNTypes #-}
module Language.Python.PrettyPrint ( printingPython ) where module Language.Python.PrettyPrint ( printingPython ) where
import Control.Arrow
import Control.Monad.Effect import Control.Monad.Effect
import Control.Monad.Effect.Exception (Exc, throwError) import Control.Monad.Effect.Exception (Exc, throwError)
import Control.Monad.Trans (lift)
import Data.Machine import Data.Machine
import Data.Reprinting.Errors import Data.Reprinting.Errors
import Data.Reprinting.Splice import Data.Reprinting.Splice
import Data.Reprinting.Token as Token import Data.Reprinting.Token as Token
import Data.Semigroup (stimes)
import Data.Sequence (Seq)
-- | Print Python syntax. -- | Print Python syntax.
printingPython :: (Member (Exc TranslationError) effs) => ProcessT (Eff effs) Fragment Splice printingPython :: (Member (Exc TranslationError) effs) => ProcessT (Eff effs) Fragment Splice
printingPython = autoT (Kleisli step) ~> flattened printingPython = repeatedly (await >>= step)
step :: (Member (Exc TranslationError) effs) => Fragment -> Eff effs (Seq Splice) step :: (Member (Exc TranslationError) effs) => Fragment -> PlanT k Splice (Eff effs) ()
step (Verbatim txt) = pure $ emit txt step (Verbatim txt) = emit txt
step (New _ _ txt) = pure $ emit txt step (New _ _ txt) = emit txt
step (Defer el cs) = case (el, cs) of step (Defer el cs) = case (el, cs) of
-- Function declarations -- Function declarations
(TOpen, TFunction:_) -> pure $ emit "def" <> space (TOpen, TFunction:_) -> emit "def" *> space
(TOpen, TParams:TFunction:_) -> pure $ emit "(" (TOpen, TParams:TFunction:_) -> emit "("
(TClose, TParams:TFunction:_) -> pure $ emit "):" (TClose, TParams:TFunction:_) -> emit "):"
(TClose, TFunction:xs) -> pure $ endContext (depth xs) (TClose, TFunction:xs) -> endContext (imperativeDepth xs)
-- Return statements -- Return statements
(TOpen, TReturn:_) -> pure $ emit "return" <> space (TOpen, TReturn:_) -> emit "return" *> space
(TClose, TReturn:_) -> pure mempty (TClose, TReturn:_) -> pure ()
(TOpen, Imperative:TReturn:_) -> pure mempty (TOpen, Imperative:TReturn:_) -> pure ()
(TSep, Imperative:TReturn:_) -> pure $ emit "," <> space (TSep, Imperative:TReturn:_) -> emit "," *> space
(TClose, Imperative:TReturn:_) -> pure mempty -- Don't hardwarp or indent for return statements (TClose, Imperative:TReturn:_) -> pure () -- Don't hardwarp or indent for return statements
-- If statements -- If statements
(TOpen, TIf:_) -> pure $ emit "if" <> space (TOpen, TIf:_) -> emit "if" *> space
(TThen, TIf:_) -> pure $ emit ":" (TThen, TIf:_) -> emit ":"
(TElse, TIf:xs) -> pure $ endContext (depth xs) <> emit "else:" (TElse, TIf:xs) -> endContext (imperativeDepth xs) *> emit "else:"
(TClose, TIf:_) -> pure mempty (TClose, TIf:_) -> pure ()
-- Booleans -- Booleans
(Truth True, _) -> pure $ emit "True" (Truth True, _) -> emit "True"
(Truth False, _) -> pure $ emit "False" (Truth False, _) -> emit "False"
-- Infix binary operators -- Infix binary operators
(TOpen, TInfixL _ p:xs) -> emitIf (p < prec xs) "(" (TOpen, TInfixL _ p:xs) -> emitIf (p < precedenceOf xs) "("
(TSym, TInfixL Add _:_) -> pure $ space <> emit "+" <> space (TSym, TInfixL Add _:_) -> space *> emit "+" *> space
(TSym, TInfixL Multiply _:_) -> pure $ space <> emit "*" <> space (TSym, TInfixL Multiply _:_) -> space *> emit "*" *> space
(TSym, TInfixL Subtract _:_) -> pure $ space <> emit "-" <> space (TSym, TInfixL Subtract _:_) -> space *> emit "-" *> space
(TClose, TInfixL _ p:xs) -> emitIf (p < prec xs) ")" (TClose, TInfixL _ p:xs) -> emitIf (p < precedenceOf xs) ")"
-- General params handling -- General params handling
(TOpen, TParams:_) -> pure $ emit "(" (TOpen, TParams:_) -> emit "("
(TSep, TParams:_) -> pure $ emit "," <> space (TSep, TParams:_) -> emit "," *> space
(TClose, TParams:_) -> pure $ emit ")" (TClose, TParams:_) -> emit ")"
-- Imperative context and whitespace handling -- Imperative context and whitespace handling
(TOpen, [Imperative]) -> pure mempty -- Don't indent at the top-level imperative context... (TOpen, [Imperative]) -> pure () -- Don't indent at the top-level imperative context...
(TClose, [Imperative]) -> pure $ layout HardWrap -- but end the program with a newline. (TClose, [Imperative]) -> layout HardWrap -- but end the program with a newline.
(TOpen, Imperative:xs) -> pure $ layout HardWrap <> indent (depth xs) (TOpen, Imperative:xs) -> layout HardWrap *> indent 4 (imperativeDepth xs)
(TSep, Imperative:xs) -> pure $ layout HardWrap <> indent (depth xs) (TSep, Imperative:xs) -> layout HardWrap *> indent 4 (imperativeDepth xs)
(TClose, Imperative:_) -> pure mempty (TClose, Imperative:_) -> pure ()
_ -> throwError (NoTranslation el cs) _ -> lift (throwError (NoTranslation el cs))
where where
emitIf predicate txt = pure $ if predicate then emit txt else mempty endContext times = layout HardWrap *> indent 4 (pred times)
endContext times = layout HardWrap <> indent (pred times)
prec :: [Context] -> Int
prec cs = case filter isInfix cs of
(TInfixL _ n:_) -> n
_ -> 0
where isInfix (TInfixL _ _) = True
isInfix _ = False
-- | Depth of imperative scope.
depth :: [Context] -> Int
depth = length . filter (== Imperative)
-- | Indent n times.
indent :: Integral b => b -> Seq Splice
indent times
| times > 0 = stimes times (layout (Indent 4 Spaces))
| otherwise = mempty

View File

@ -7,7 +7,7 @@ module Language.Ruby.Assignment
, Term , Term
) where ) where
import Prologue hiding (for) import Prologue hiding (for, unless)
import Assigning.Assignment hiding (Assignment, Error) import Assigning.Assignment hiding (Assignment, Error)
import qualified Assigning.Assignment as Assignment import qualified Assigning.Assignment as Assignment

View File

@ -1,70 +1,52 @@
{-# LANGUAGE Rank2Types #-}
module Language.Ruby.PrettyPrint ( printingRuby ) where module Language.Ruby.PrettyPrint ( printingRuby ) where
import Control.Arrow
import Control.Monad.Effect import Control.Monad.Effect
import Control.Monad.Effect.Exception (Exc, throwError) import Control.Monad.Effect.Exception (Exc, throwError)
import Control.Monad.Trans (lift)
import Data.Machine import Data.Machine
import Data.Sequence (Seq)
import Data.Reprinting.Errors import Data.Reprinting.Errors
import Data.Reprinting.Splice import Data.Reprinting.Splice
import Data.Reprinting.Token as Token import Data.Reprinting.Token as Token
import Data.Semigroup (stimes)
-- | Print Ruby syntax. -- | Print Ruby syntax.
printingRuby :: (Member (Exc TranslationError) effs) => ProcessT (Eff effs) Fragment Splice printingRuby :: (Member (Exc TranslationError) effs) => ProcessT (Eff effs) Fragment Splice
printingRuby = autoT (Kleisli step) ~> flattened printingRuby = repeatedly (await >>= step)
step :: (Member (Exc TranslationError) effs) => Fragment -> Eff effs (Seq Splice) step :: (Member (Exc TranslationError) effs) => Fragment -> PlanT k Splice (Eff effs) ()
step (Verbatim txt) = pure $ emit txt step (Verbatim txt) = emit txt
step (New _ _ txt) = pure $ emit txt step (New _ _ txt) = emit txt
step (Defer el cs) = case (el, cs) of step (Defer el cs) = case (el, cs) of
(TOpen, TMethod:_) -> pure $ emit "def" <> space (TOpen, TMethod:_) -> emit "def" *> space
(TClose, TMethod:xs) -> pure $ endContext (depth xs) <> emit "end" (TClose, TMethod:xs) -> endContext (imperativeDepth xs) *> emit "end"
-- TODO: do..end vs {..} should be configurable. -- TODO: do..end vs {..} should be configurable.
(TOpen, TFunction:_) -> pure $ space <> emit "do" <> space (TOpen, TFunction:_) -> space *> emit "do" *> space
(TOpen, TParams:TFunction:_) -> pure $ emit "|" (TOpen, TParams:TFunction:_) -> emit "|"
(TClose, TParams:TFunction:_) -> pure $ emit "|" (TClose, TParams:TFunction:_) -> emit "|"
(TClose, TFunction:xs) -> pure $ endContext (depth xs) <> emit "end" (TClose, TFunction:xs) -> endContext (imperativeDepth xs) *> emit "end"
-- TODO: Parens for calls are a style choice, make configurable. -- TODO: Parens for calls are a style choice, make configurable.
(TOpen, TParams:_) -> pure $ emit "(" (TOpen, TParams:_) -> emit "("
(TSep, TParams:_) -> pure $ emit "," <> space (TSep, TParams:_) -> emit "," *> space
(TClose, TParams:_) -> pure $ emit ")" (TClose, TParams:_) -> emit ")"
(TOpen, TInfixL _ p:xs) -> emitIf (p < prec xs) "(" (TOpen, TInfixL _ p:xs) -> emitIf (p < precedenceOf xs) "("
(TSym, TInfixL Add _:_) -> pure $ space <> emit "+" <> space (TSym, TInfixL Add _:_) -> space *> emit "+" *> space
(TSym, TInfixL Multiply _:_) -> pure $ space <> emit "*" <> space (TSym, TInfixL Multiply _:_) -> space *> emit "*" *> space
(TSym, TInfixL Subtract _:_) -> pure $ space <> emit "-" <> space (TSym, TInfixL Subtract _:_) -> space *> emit "-" *> space
(TClose, TInfixL _ p:xs) -> emitIf (p < prec xs) ")" (TClose, TInfixL _ p:xs) -> emitIf (p < precedenceOf xs) ")"
(TOpen, [Imperative]) -> pure mempty (TOpen, [Imperative]) -> pure ()
(TOpen, Imperative:xs) -> pure $ layout HardWrap <> indent (depth xs) (TOpen, Imperative:xs) -> layout HardWrap *> indent 2 (imperativeDepth xs)
(TSep, Imperative:xs) -> pure $ layout HardWrap <> indent (depth xs) (TSep, Imperative:xs) -> layout HardWrap *> indent 2 (imperativeDepth xs)
(TClose, [Imperative]) -> pure $ layout HardWrap (TClose, [Imperative]) -> layout HardWrap
(TClose, Imperative:xs) -> pure $ indent (pred (depth xs)) (TClose, Imperative:xs) -> indent 2 (pred (imperativeDepth xs))
(TSep, TCall:_) -> pure $ emit "." (TSep, TCall:_) -> emit "."
_ -> throwError (NoTranslation el cs) _ -> lift (throwError (NoTranslation el cs))
where where
emitIf predicate txt = pure $ if predicate then emit txt else mempty endContext times = layout HardWrap *> indent 2 (pred times)
endContext times = layout HardWrap <> indent (pred times)
prec :: [Context] -> Int
prec cs = case filter isInfix cs of
(TInfixL _ n:_) -> n
_ -> 0
where isInfix (TInfixL _ _) = True
isInfix _ = False
-- | Depth of imperative scope.
depth :: [Context] -> Int
depth = length . filter (== Imperative)
-- | Indent n times.
indent :: Integral b => b -> Seq Splice
indent times
| times > 0 = stimes times (layout (Indent 2 Spaces))
| otherwise = mempty

View File

@ -32,7 +32,7 @@ import Control.Exception as X hiding (Handler (..), assert, evaluate, throw, thr
-- Typeclasses -- Typeclasses
import Control.Applicative as X import Control.Applicative as X
import Control.Arrow as X ((&&&), (***)) import Control.Arrow as X ((&&&), (***))
import Control.Monad as X hiding (fail, return, unless, when) import Control.Monad as X hiding (fail, return)
import Control.Monad.Except as X (MonadError (..)) import Control.Monad.Except as X (MonadError (..))
import Control.Monad.Fail as X (MonadFail (..)) import Control.Monad.Fail as X (MonadFail (..))
import Data.Algebra as X import Data.Algebra as X

View File

@ -6,7 +6,7 @@ module Rendering.Symbol
, parseSymbolFields , parseSymbolFields
) where ) where
import Prologue import Prologue hiding (when)
import Analysis.Declaration import Analysis.Declaration
import Data.Aeson import Data.Aeson
import Data.Blob import Data.Blob

View File

@ -1,4 +1,4 @@
{-# LANGUAGE GADTs, RankNTypes, TypeOperators, UndecidableInstances #-} {-# LANGUAGE GADTs, LambdaCase, RankNTypes, UndecidableInstances #-}
module Reprinting.Tokenize module Reprinting.Tokenize
( module Data.Reprinting.Token ( module Data.Reprinting.Token
@ -25,35 +25,153 @@ module Reprinting.Tokenize
, tokenizing , tokenizing
) where ) where
import Prelude hiding (fail, log) import Prelude hiding (fail, log, filter)
import Prologue hiding (hash, Element) import Prologue hiding (Element, hash)
import Control.Monad.Effect
import Control.Monad.Effect.Reader
import Control.Monad.Effect.State
import Control.Monad.Effect.Writer
import Data.History import Data.History
import Data.List (intersperse) import Data.List (intersperse)
import qualified Data.Machine as Machine
import Data.Range import Data.Range
import Data.Record import Data.Record
import Data.Reprinting.Token import Data.Reprinting.Token
import Data.Sequence (singleton)
import Data.Source import Data.Source
import Data.Term import Data.Term
-- | The 'Tokenizer' monad represents a context in which 'Control' -- | The 'Tokenizer' monad represents a context in which 'Control'
-- tokens and 'Element' tokens can be sent to some downstream -- tokens and 'Element' tokens can be sent to some downstream
-- consumer. Its primary interface is through the 'Tokenize' -- consumer. Its primary interface is through the 'Tokenize'
-- typeclass. -- typeclass, and is compiled to a 'Data.Machine.Source' by
type Tokenizer = Eff '[Reader RPContext, State RPState, Writer (Seq Token)] -- 'tokenizing'.
data Tokenizer a where
Pure :: a -> Tokenizer a
Bind :: Tokenizer a -> (a -> Tokenizer b) -> Tokenizer b
Tell :: Token -> Tokenizer ()
Get :: Tokenizer State
Put :: State -> Tokenizer ()
-- Tokenizers are compiled into a Plan capable of being converted
-- to a Source. Note that the state parameter is internal to the
-- tokenizer being run: the invoker of 'tokenizing' doesn't need
-- to keep track of it at all.
compile :: State -> Tokenizer a -> Machine.Plan k Token (State, a)
compile p = \case
Pure a -> pure (p, a)
Bind a f -> compile p a >>= (\(new, v) -> compile new (f v))
Tell t -> Machine.yield t $> (p, ())
Get -> pure (p, p)
Put p' -> pure (p', ())
instance Functor Tokenizer where fmap = liftA
instance Applicative Tokenizer where
pure = Pure
(<*>) = ap
instance Monad Tokenizer where (>>=) = Bind
data Strategy
= Reprinting
| PrettyPrinting
deriving (Eq, Show)
data Filter
= AllowAll
| ForbidData
deriving (Eq, Show)
data State = State
{ source :: Source -- We need to be able to slice
, history :: History -- What's the history of the term we're examining
, strategy :: Strategy -- What are we doing right now?
, cursor :: Int -- Where do we begin slices?
, filter :: Filter -- Should we ignore data tokens?
} deriving (Show, Eq)
-- Builtins
-- | Yield an 'Element' token in a 'Tokenizer' context. -- | Yield an 'Element' token in a 'Tokenizer' context.
yield :: Element -> Tokenizer () yield :: Element -> Tokenizer ()
yield = tell . singleton . TElement yield e = do
on <- filter <$> Get
when (on == AllowAll) . Tell . TElement $ e
-- | Yield a 'Control' token in a 'Tokenizer' context. -- | Yield a 'Control' token.
control :: Control -> Tokenizer () control :: Control -> Tokenizer ()
control = tell . singleton . TControl control = Tell . TControl
-- | Yield a 'Chunk' of some 'Source'.
chunk :: Source -> Tokenizer ()
chunk = Tell . Chunk
-- | Ensures that the final chunk is emitted
finish :: Tokenizer ()
finish = do
crs <- asks cursor
log ("Finishing, cursor is " <> show crs)
src <- asks source
chunk (dropSource crs src)
-- State handling
asks :: (State -> a) -> Tokenizer a
asks f = f <$> Get
modify :: (State -> State) -> Tokenizer ()
modify f = Get >>= \x -> Put . f $! x
allowAll, forbidData :: Tokenizer ()
allowAll = modify (\x -> x { filter = AllowAll })
forbidData = modify (\x -> x { filter = ForbidData })
move :: Int -> Tokenizer ()
move c = modify (\x -> x { cursor = c })
withHistory :: (Annotated t (Record fields), HasField fields History)
=> t
-> Tokenizer a
-> Tokenizer a
withHistory t act = do
old <- asks history
modify (\x -> x { history = getField (annotation t)})
act <* modify (\x -> x { history = old })
withStrategy :: Strategy -> Tokenizer a -> Tokenizer a
withStrategy s act = do
old <- Get
Put (old { strategy = s })
res <- act
new <- Get
Put (new { strategy = strategy old })
pure res
-- The reprinting algorithm.
-- | A subterm algebra inspired by the /Scrap Your Reprinter/ algorithm.
descend :: (Tokenize constr, HasField fields History) => SubtermAlgebra constr (Term a (Record fields)) (Tokenizer ())
descend t = do
(State src hist strat crs _) <- asks id
let into s = withHistory (subterm s) (subtermRef s)
case (hist, strat) of
(Unmodified _, _) -> do
tokenize (fmap into t)
forbidData
(Refactored _, PrettyPrinting) -> do
allowAll
tokenize (fmap into t)
(Refactored r, Reprinting) -> do
allowAll
let delimiter = Range crs (start r)
unless (delimiter == Range 0 0) $ do
log ("slicing: " <> show delimiter)
chunk (slice delimiter src)
move (start r)
tokenize (fmap (withStrategy PrettyPrinting . into) t)
move (end r)
-- Combinators
-- | Emit a log message to the token stream. Useful for debugging. -- | Emit a log message to the token stream. Useful for debugging.
log :: String -> Tokenizer () log :: String -> Tokenizer ()
@ -106,6 +224,15 @@ class (Show1 constr, Traversable constr) => Tokenize constr where
-- | Should emit control and data tokens. -- | Should emit control and data tokens.
tokenize :: FAlgebra constr (Tokenizer ()) tokenize :: FAlgebra constr (Tokenizer ())
tokenizing :: (Show (Record fields), Tokenize a, HasField fields History)
=> Source
-> Term a (Record fields)
-> Machine.Source Token
tokenizing src term = pipe
where pipe = Machine.construct . fmap snd $ compile state go
state = State src (getField (termAnnotation term)) Reprinting 0 ForbidData
go = forbidData *> foldSubterms descend term <* finish
-- | Sums of reprintable terms are reprintable. -- | Sums of reprintable terms are reprintable.
instance (Apply Show1 fs, Apply Functor fs, Apply Foldable fs, Apply Traversable fs, Apply Tokenize fs) => Tokenize (Sum fs) where instance (Apply Show1 fs, Apply Functor fs, Apply Foldable fs, Apply Traversable fs, Apply Tokenize fs) => Tokenize (Sum fs) where
tokenize = apply @Tokenize tokenize tokenize = apply @Tokenize tokenize
@ -116,76 +243,3 @@ instance (HasField fields History, Show (Record fields), Tokenize a) => Tokenize
instance Tokenize [] where instance Tokenize [] where
tokenize = imperative tokenize = imperative
-- | The top-level function. Pass in a 'Source' and a 'Term' and
-- you'll get out a 'Seq' of 'Token's for later processing.
tokenizing :: (Show (Record fields), Tokenize a, HasField fields History) => Source -> Term a (Record fields) -> Seq Token
tokenizing s t = let h = getField (termAnnotation t) in
run
. fmap fst
. runWriter
. fmap snd
. runState (RPState 0)
. runReader (RPContext s h Reprinting)
$ foldSubterms descend t *> finish
-- Private interfaces
newtype RPState = RPState
{ _cursor :: Int -- from SYR, used to slice and dice a 'Source' (mutates)
} deriving (Show, Eq)
setCursor :: Int -> RPState -> RPState
setCursor c s = s { _cursor = c }
data RPContext = RPContext
{ _source :: Source
, _history :: History
, _strategy :: Strategy
} deriving (Show, Eq)
data Strategy
= Reprinting
| PrettyPrinting
deriving (Eq, Show)
setStrategy :: Strategy -> RPContext -> RPContext
setStrategy s c = c { _strategy = s }
setHistory :: History -> RPContext -> RPContext
setHistory h c = c { _history = h }
chunk :: Source -> Tokenizer ()
chunk = tell . singleton . Chunk
finish :: Tokenizer ()
finish = do
crs <- gets _cursor
src <- asks _source
chunk (dropSource crs src)
withHistory :: (Annotated t (Record fields), HasField fields History) => t -> Tokenizer a -> Tokenizer a
withHistory x = local (setHistory (getField (annotation x)))
withStrategy :: Strategy -> Tokenizer a -> Tokenizer a
withStrategy x = local (setStrategy x)
-- | A subterm algebra inspired by the /Scrap Your Reprinter/ algorithm.
descend :: (Tokenize constr, HasField fields History) => SubtermAlgebra constr (Term a (Record fields)) (Tokenizer ())
descend t = do
-- log (showsPrec1 0 (() <$ t) "")
hist <- asks _history
strat <- asks _strategy
let into s = withHistory (subterm s) (subtermRef s)
case (hist, strat) of
(Unmodified _, _) -> traverse_ into t
(Refactored _, PrettyPrinting) -> tokenize (fmap into t)
(Refactored r, Reprinting) -> do
crs <- gets _cursor
src <- asks _source
let delimiter = Range crs (start r)
log ("slicing: " <> show delimiter)
chunk (slice delimiter src)
modify' (setCursor (start r))
tokenize (fmap (withStrategy PrettyPrinting . into) t)
modify' (setCursor (end r))

View File

@ -1,55 +1,40 @@
{-# LANGUAGE AllowAmbiguousTypes, OverloadedLists, ScopedTypeVariables, TypeFamilyDependencies, TypeOperators #-} {-# LANGUAGE LambdaCase #-}
module Reprinting.Translate module Reprinting.Translate
( Translator ( Translator
, contextualizing , contextualizing
) where ) where
import Prologue hiding (Element) import Control.Monad
import Control.Arrow
import Control.Monad.Effect import Control.Monad.Effect
import Control.Monad.Effect.Exception (Exc) import Control.Monad.Effect.Exception (Exc)
import qualified Control.Monad.Effect.Exception as Exc import qualified Control.Monad.Effect.Exception as Exc
import Control.Monad.Effect.State import Control.Monad.Effect.State
import Control.Monad.Trans
import Data.Machine import Data.Machine
import Data.Reprinting.Errors
import Data.Reprinting.Splice import Data.Reprinting.Splice
import Data.Reprinting.Token import Data.Reprinting.Token
import Data.Reprinting.Errors
import qualified Data.Source as Source import qualified Data.Source as Source
type Translator = Eff '[State [Context], Exc TranslationError] type Translator = Eff '[State [Context], Exc TranslationError]
-- | Prepare for language specific translation by contextualizing 'Token's to contextualizing :: ProcessT Translator Token Fragment
-- 'Fragment's. contextualizing = repeatedly $ await >>= \case
contextualizing :: Chunk source -> yield . Verbatim . Source.toText $ source
( Member (State [Context]) effs TElement t -> case t of
, Member (Exc TranslationError) effs Run f -> lift get >>= \c -> yield (New t c f)
) _ -> lift get >>= yield . Defer t
=> ProcessT (Eff effs) Token Fragment
contextualizing = autoT (Kleisli step) ~> flattened where
step t = case t of
Chunk source -> pure $ copy (Source.toText source)
TElement el -> toFragment el <$> get
TControl ctl -> case ctl of TControl ctl -> case ctl of
Log _ -> pure mempty Enter c -> enterContext c
Enter c -> enterContext c $> mempty Exit c -> exitContext c
Exit c -> exitContext c $> mempty _ -> pure ()
toFragment el cs = case el of enterContext, exitContext :: Context -> PlanT k Fragment Translator ()
Fragment f -> insert el cs f
_ -> defer el cs
enterContext :: (Member (State [Context]) effs) => Context -> Eff effs () enterContext c = lift (modify' (c :))
enterContext c = modify' (c :)
exitContext :: exitContext c = lift get >>= \case
( Member (State [Context]) effs (x:xs) -> when (x == c) (lift (modify' (const xs)))
, Member (Exc TranslationError) effs cs -> lift (Exc.throwError (UnbalancedPair c cs))
)
=> Context -> Eff effs ()
exitContext c = do
current <- get
case current of
(x:xs) | x == c -> modify' (const xs)
cs -> Exc.throwError (UnbalancedPair c cs)

View File

@ -18,6 +18,7 @@ import Data.Blob
import Language.JSON.PrettyPrint import Language.JSON.PrettyPrint
import Language.Ruby.PrettyPrint import Language.Ruby.PrettyPrint
import Language.Python.PrettyPrint import Language.Python.PrettyPrint
import qualified Data.Machine as Machine
spec :: Spec spec :: Spec
spec = describe "reprinting" $ do spec = describe "reprinting" $ do
@ -32,11 +33,13 @@ spec = describe "reprinting" $ do
it "should pass over a pristine tree" $ do it "should pass over a pristine tree" $ do
let tagged = mark Unmodified tree let tagged = mark Unmodified tree
let toks = tokenizing src tagged let toks = Machine.run $ tokenizing src tagged
toks `shouldBe` [Chunk src] toks `shouldSatisfy` not . null
head toks `shouldSatisfy` isControl
last toks `shouldSatisfy` isChunk
it "should emit control tokens but only 1 chunk for a wholly-modified tree" $ do it "should emit control tokens but only 1 chunk for a wholly-modified tree" $ do
let toks = tokenizing src (mark Refactored tree) let toks = Machine.run $ tokenizing src (mark Refactored tree)
for_ @[] [TList, THash] $ \t -> do for_ @[] [TList, THash] $ \t -> do
toks `shouldSatisfy` elem (TControl (Enter t)) toks `shouldSatisfy` elem (TControl (Enter t))
toks `shouldSatisfy` elem (TControl (Exit t)) toks `shouldSatisfy` elem (TControl (Exit t))