1
1
mirror of https://github.com/github/semantic.git synced 2024-12-29 01:42:43 +03:00

Lints, formatting, docs, cleanup

This commit is contained in:
Timothy Clem 2018-08-22 13:35:40 -07:00
parent b0dab3ddd9
commit 3e0a6b95cc
11 changed files with 47 additions and 38 deletions

View File

@ -12,16 +12,26 @@ import Data.Range
-- | 'History' values, when attached to a given 'Term', describe the ways in
-- which that term was modified during a refactoring pass, if any.
data History
= Refactored Range -- ^ A 'Refactored' node was changed by a refactor but still has (possibly-inaccurate) position information.
| Unmodified Range -- ^ An 'Unmodified' node was not changed, but maybe have 'Refactored' children.
= Refactored Range
-- ^ A 'Refactored' node was changed by a refactor but still has
-- (possibly-inaccurate) position information.
| Unmodified Range
-- ^ An 'Unmodified' node was not changed, but maybe have 'Refactored'
-- children.
deriving (Show, Eq)
-- | Convert a 'Term' annotated with a 'Range' to one annotated with a 'History'.
mark :: Functor f => (Range -> History) -> f (Record (Range ': fields)) -> f (Record (History ': fields))
mark :: Functor f
=> (Range -> History)
-> f (Record (Range ': fields))
-> f (Record (History ': fields))
mark f = fmap go where go (r :. a) = f r :. a
-- | Change the 'History' annotation on a 'Term'.
remark :: Functor f => (Range -> History) -> f (Record (History ': fields)) -> f (Record (History ': fields))
remark :: Functor f
=> (Range -> History)
-> f (Record (History ': fields))
-> f (Record (History ': fields))
remark f = fmap go where
go (r :. a) = x :. a where
x = case r of

View File

@ -1,6 +1,4 @@
module Data.Reprinting.Errors
( TranslationException (..)
) where
module Data.Reprinting.Errors ( TranslationException (..) ) where
import Data.Reprinting.Token

View File

@ -5,12 +5,10 @@ module Data.Reprinting.Splice
, layouts
, space
, indent
, Datum(..)
, copy
, insert
, raw
, Whitespace(..)
) where

View File

@ -2,7 +2,7 @@
{-# OPTIONS_GHC -Wno-missing-export-lists #-}
module Data.Syntax.Literal where
import Data.Abstract.Evaluatable hiding (Close)
import Data.Abstract.Evaluatable
import Data.JSON.Fields
import Data.Scientific.Exts
import qualified Data.Text as T

View File

@ -45,6 +45,8 @@ import qualified Language.Ruby.Syntax as Ruby.Syntax
import Prologue hiding (for)
import Proto3.Suite (Named (..), Named1 (..))
-- TODO: Only needed for as long as we carry around a mini ruby syntax.
{-# ANN module ("HLint: ignore Reduce duplication" :: String) #-}
-- | Small version of Ruby syntax for testing the code rewriting pipeline.
type MiniSyntax = '[

View File

@ -28,10 +28,10 @@ printingRuby = flattened <~ autoT (Kleisli step) where
(TSep, TParams:_) -> pure $ emit "," <> space
(TClose, TParams:_) -> pure $ emit ")"
(TOpen, Imperative:[]) -> pure $ mempty
(TOpen, [Imperative]) -> pure mempty
(TOpen, Imperative:xs) -> pure $ layout HardWrap <> indent (depth xs)
(TSep, Imperative:xs) -> pure $ layout HardWrap <> indent (depth xs)
(TClose, Imperative:[]) -> pure $ layout HardWrap
(TClose, [Imperative]) -> pure $ layout HardWrap
(TClose, Imperative:xs) -> pure $ indent (pred (depth xs))
(TSep, TCall:_) -> pure $ emit "."

View File

@ -73,7 +73,7 @@ data SomeAnalysisParser typeclasses ann where
, HasPrelude lang
, HasPostlude lang
)
=> Parser (Term (Sum fs) ann) -- A parser.
=> Parser (Term (Sum fs) ann) -- ^ A parser.
-> Proxy lang
-> SomeAnalysisParser typeclasses ann
@ -86,9 +86,9 @@ someAnalysisParser :: ( ApplyAll' typeclasses Go.Syntax
, ApplyAll' typeclasses TypeScript.Syntax
, ApplyAll' typeclasses Haskell.Syntax
)
=> proxy typeclasses -- A proxy for the list of typeclasses required, e.g. @(Proxy :: Proxy '[Show1])@.
-> Language -- The 'Language' to select.
-> SomeAnalysisParser typeclasses (Record Location) -- A 'SomeAnalysisParser' abstracting the syntax type to be produced.
=> proxy typeclasses -- ^ A proxy for the list of typeclasses required, e.g. @(Proxy :: Proxy '[Show1])@.
-> Language -- ^ The 'Language' to select.
-> SomeAnalysisParser typeclasses (Record Location) -- ^ A 'SomeAnalysisParser' abstracting the syntax type to be produced.
someAnalysisParser _ Go = SomeAnalysisParser goParser (Proxy :: Proxy 'Go)
someAnalysisParser _ Haskell = SomeAnalysisParser haskellParser (Proxy :: Proxy 'Haskell)
someAnalysisParser _ Java = SomeAnalysisParser javaParser (Proxy :: Proxy 'Java)
@ -106,9 +106,9 @@ data Parser term where
ASTParser :: (Bounded grammar, Enum grammar, Show grammar) => Ptr TS.Language -> Parser (AST [] grammar)
-- | A parser producing an à la carte term given an 'AST'-producing parser and an 'Assignment' onto 'Term's in some syntax type.
AssignmentParser :: (Enum grammar, Ix grammar, Show grammar, TS.Symbol grammar, Syntax.Error :< fs, Eq1 ast, Apply Foldable fs, Apply Functor fs, Foldable ast, Functor ast)
=> Parser (Term ast (Node grammar)) -- A parser producing AST.
-> Assignment ast grammar (Term (Sum fs) (Record Location)) -- An assignment from AST onto 'Term's.
-> Parser (Term (Sum fs) (Record Location)) -- A parser producing 'Term's.
=> Parser (Term ast (Node grammar)) -- ^ A parser producing AST.
-> Assignment ast grammar (Term (Sum fs) (Record Location)) -- ^ An assignment from AST onto 'Term's.
-> Parser (Term (Sum fs) (Record Location)) -- ^ A parser producing 'Term's.
DeterministicParser :: (Enum grammar, Ord grammar, Show grammar, Element Syntax.Error syntaxes, Apply Foldable syntaxes, Apply Functor syntaxes)
=> Parser (AST [] grammar)
-> Deterministic.Assignment grammar (Term (Sum syntaxes) (Record Location))

View File

@ -73,7 +73,7 @@ stages of the pipeline follows:
specific style, formatting, and even post-processing (minimizers, etc).
(Language-specific, Project-specific)
|
| Seq Datum
| Seq Splice
|
v
[Typeset]

View File

@ -23,28 +23,31 @@ type Translator = Eff '[State [Context], Exc TranslationException]
translating ::
( Member (State [Context]) effs
, Member (Exc TranslationException) effs
) => ProcessT (Eff effs) Token Datum
)
=> ProcessT (Eff effs) Token Datum
translating = flattened <~ autoT (Kleisli step) where
step t = case t of
Chunk source -> pure $ copy (Source.toText source)
TElement el -> get >>= pure . spliceFragments el
TElement el -> toDatum el <$> get
TControl ctl -> case ctl of
Log _ -> pure mempty
Enter c -> enterContext c $> mempty
Exit c -> exitContext c $> mempty
spliceFragments el cs = case el of
toDatum el cs = case el of
Fragment f -> insert el cs f
_ -> raw el cs
enterContext :: (Member (State [Context]) effs) => Context -> Eff effs ()
enterContext c = modify' (c :)
enterContext :: (Member (State [Context]) effs) => Context -> Eff effs ()
enterContext c = modify' (c :)
exitContext ::
( Member (State [Context]) effs
, Member (Exc TranslationException) effs
) => Context -> Eff effs ()
exitContext c = do
current <- get
case current of
(x:xs) | x == c -> modify' (const xs)
cs -> Exc.throwError (InvalidContext c cs)
exitContext ::
( Member (State [Context]) effs
, Member (Exc TranslationException) effs
)
=> Context -> Eff effs ()
exitContext c = do
current <- get
case current of
(x:xs) | x == c -> modify' (const xs)
cs -> Exc.throwError (InvalidContext c cs)

View File

@ -1,4 +1,3 @@
-- {-# LANGUAGE GeneralizedNewtypeDeriving, ScopedTypeVariables, TypeFamilies, TypeOperators #-}
{-# LANGUAGE ScopedTypeVariables, TypeFamilies, TypeOperators #-}
{-# OPTIONS_GHC -Wno-missing-signatures -Wno-missing-export-lists #-}
module Semantic.Util where
@ -104,7 +103,6 @@ callGraphProject parser proxy opts paths = runTaskWithOptions opts $ do
callGraphRubyProject = callGraphProject rubyParser (Proxy @'Language.Ruby) debugOptions
-- Evaluate a project consisting of the listed paths.
evaluateProject proxy parser paths = withOptions debugOptions $ \ config logger statter ->
evaluateProject' (TaskConfig config logger statter) proxy parser paths

View File

@ -14,7 +14,7 @@ import Data.Sum
import Data.Foldable
import Semantic.IO
import Data.Blob
import Language.JSON.Translate
import Language.JSON.PrettyPrint
spec :: Spec
spec = describe "reprinting" $ do
@ -34,7 +34,7 @@ spec = describe "reprinting" $ do
it "should emit control tokens but only 1 chunk for a wholly-modified tree" $ do
let toks = tokenizing src (mark Refactored tree)
for_ @[] [List, THash] $ \t -> do
for_ @[] [TList, THash] $ \t -> do
toks `shouldSatisfy` elem (TControl (Enter t))
toks `shouldSatisfy` elem (TControl (Exit t))