1
1
mirror of https://github.com/github/semantic.git synced 2024-12-24 23:42:31 +03:00

Merge branch 'master' into 🔥-javascript

This commit is contained in:
Rob Rix 2017-05-17 13:17:29 -04:00
commit 64551e1327
30 changed files with 18 additions and 1816 deletions

1
UI/.gitignore vendored
View File

@ -1 +0,0 @@
*.html

View File

@ -1,165 +0,0 @@
table {
width: 100%;
table-layout: fixed;
border-collapse: separate;
border-spacing: 0;
box-sizing: border-box;
}
td {
vertical-align: top;
padding: 0;
}
.blob-num {
width: 1%;
min-width: 50px;
white-space: nowrap;
text-align: right;
font-family: Consolas, "Liberation Mono", Menlo, Courier, monospace;
font-size: 12px;
line-height: 18px;
color: rgba(0, 0, 0, 0.3);
border: solid #eee;
border-width: 0 1px 0 0;
padding-left: 10px;
padding-right: 10px;
-webkit-user-select: none;
}
.blob-num-replacement {
background-color: #ffdddd;
border-color: #f1c0c0
}
.blob-code+.blob-num-replacement {
background-color: #dbffdb;
border-color: #c1e9c1
}
.blob-code-replacement {
background-color: #ffecec;
}
.blob-code-replacement:last-child {
background-color: #eaffea;
}
.blob-code {
padding-left: 10px;
padding-right: 10px;
font-family: Consolas, "Liberation Mono", Menlo, Courier, monospace;
font-size: 13px;
line-height: 18px;
color: rgb(51, 51, 51);
tab-size: 8;
}
.blob-code>* {
font-size: 12px;
line-height: 16px;
}
.empty-cell {
background-color: #fafafa;
border-right-color: #eee;
}
.blob-code+.blob-num {
border-left-width: 1px;
}
#before, #after {
width: 50%;
}
#before {
float: left;
}
#after {
float: right;
}
body {
margin: 0;
padding: 0;
font-family: monospace;
}
.blob-code:last-child .patch,
.blob-code:last-child .insert,
.blob-code:last-child .replace {
background-color: #a6f3a6;
}
.blob-code .patch,
.blob-code .delete,
.blob-code .replace {
background-color: #f8cbcb;
}
.diff div, .diff ul, .diff li, .diff dl, .diff dd, .diff span {
white-space: pre-wrap;
display: inline;
margin: 0;
padding: 0;
color: initial;
}
.diff dt {
display: none;
}
/* syntax highlighting */
.diff .category-regex,
.diff .category-string {
color: #183691;
}
.diff .category-false,
.diff .category-true,
.diff .category-null,
.diff .category-undefined,
.diff .category-number,
.diff .category-function_call>li>.category-identifier,
.diff .category-function_call>li>.category-argument_list>li>.category-identifier,
.diff .category-function_call>li>.category-member_access>.category-identifier,
.diff .category-member_access>:not(:first-child)>.category-identifier,
.diff .category-scope_resolution_expression>li>.category-identifier,
.diff .category-symbol {
color: #0086b3;
}
.diff .category-comment {
color: #969896;
}
.diff [class^="category-"][class$="_op"],
.diff .category-ternary,
.diff .category-conditional,
.diff .category-additive,
.diff .category-multiplicative,
.diff .category-complement,
.diff .category-var_declaration,
.diff .category-new_expression,
.diff .category-if_statement,
.diff .category-do_statement,
.diff .category-for_statement,
.diff .category-for_in_statement,
.diff .category-return_statement,
.diff .category-function,
.diff .category-assignment,
.diff .category-var_assignment,
.diff .category-method_declaration,
.diff .category-case_statement,
.diff .category-else_block,
.diff .category-when_block,
.diff .category-then_block,
.diff .category-class_declaration,
.diff .category-module_declaration {
color: #a71d5d;
}
.diff .category-function>li:first-child>.category-identifier,
.diff .category-method_declaration>li:first-child>.category-identifier,
.diff .category-new_expression>li>.category-function_call>li:first-child>.category-identifier,
.diff .category-pair>li:first-child>.category-identifier,
.diff .category-assignment>li:first-child>.category-member_access>:not(:first-child)>.category-identifier,
.diff .category-class_declaration>li>.category-identifier,
.diff .category-module_declaration>li>.category-identifier {
color: #795da3;
}

View File

@ -60,7 +60,6 @@ library
, Renderer
, Renderer.JSON
, Renderer.Patch
, Renderer.Summary
, Renderer.SExpression
, Renderer.TOC
, RWS
@ -155,7 +154,6 @@ test-suite test
, DiffSpec
, SemanticSpec
, SemanticCmdLineSpec
, SummarySpec
, GitmonClientSpec
, InterpreterSpec
, PatchOutputSpec

View File

@ -46,9 +46,6 @@ patchDiff = DiffArguments PatchRenderer identityDecorator
jsonDiff :: DiffArguments'
jsonDiff = DiffArguments JSONDiffRenderer identityDecorator
summaryDiff :: DiffArguments'
summaryDiff = DiffArguments SummaryRenderer identityDecorator
sExpressionDiff :: DiffArguments'
sExpressionDiff = DiffArguments (SExpressionDiffRenderer TreeOnly) identityDecorator

View File

@ -25,7 +25,6 @@ import Prologue
import Renderer.JSON as R
import Renderer.Patch as R
import Renderer.SExpression as R
import Renderer.Summary as R
import Renderer.TOC as R
import Source (SourceBlob(..), Source)
import Syntax as S
@ -35,7 +34,6 @@ import Term
data DiffRenderer fields output where
PatchRenderer :: HasField fields Range => DiffRenderer fields File
JSONDiffRenderer :: (ToJSONFields (Record fields), HasField fields Range) => DiffRenderer fields (Map Text Value)
SummaryRenderer :: HasDefaultFields fields => DiffRenderer fields Summaries
SExpressionDiffRenderer :: (HasField fields Category, HasField fields SourceSpan) => SExpressionFormat -> DiffRenderer fields ByteString
ToCRenderer :: (HasField fields Category, HasField fields (Maybe Declaration), HasField fields SourceSpan) => DiffRenderer fields Summaries
@ -43,7 +41,6 @@ resolveDiffRenderer :: (Monoid output, StringConv output ByteString) => DiffRend
resolveDiffRenderer renderer = case renderer of
PatchRenderer -> (File .) . R.patch
JSONDiffRenderer -> R.json
SummaryRenderer -> R.summary
SExpressionDiffRenderer format -> R.sExpression format
ToCRenderer -> R.toc
@ -102,7 +99,6 @@ instance StringConv File ByteString where
instance Show (DiffRenderer fields output) where
showsPrec _ PatchRenderer = showString "PatchRenderer"
showsPrec _ JSONDiffRenderer = showString "JSONDiffRenderer"
showsPrec _ SummaryRenderer = showString "SummaryRenderer"
showsPrec d (SExpressionDiffRenderer format) = showsUnaryWith showsPrec "SExpressionDiffRenderer" d format
showsPrec _ ToCRenderer = showString "ToCRenderer"

View File

@ -1,548 +0,0 @@
{-# LANGUAGE MultiParamTypeClasses, ScopedTypeVariables #-}
{-# OPTIONS_GHC -Wno-deprecations #-}
-- Disabling deprecation warnings due to pattern match against RescueModifier.
module Renderer.Summary (Summaries(..), summary, diffSummaries, DiffSummary(..), DiffInfo(..), diffToDiffSummaries, isBranchInfo, isErrorSummary, JSONSummary(..)) where
import Prologue
import Diff
import Patch
import Term
import Info (HasDefaultFields, category, byteRange)
import Range
import Syntax as S
import Category as C
import Data.Functor.Both hiding (fst, snd)
import qualified Data.Functor.Both as Both
import Data.Functor.Listable
import Data.List.NonEmpty (nonEmpty)
import qualified Data.Text as Text
import Data.Text.Listable
import Data.Record
import Data.These
import Text.PrettyPrint.Leijen.Text ((<+>), squotes, space, string, Doc, punctuate, pretty, hsep)
import qualified Text.PrettyPrint.Leijen.Text as P
import Data.Aeson
import SourceSpan
import Source hiding (null)
import qualified Data.Map as Map
import qualified Data.List as List
data Summaries = Summaries { changes, errors :: !(Map Text [Value]) }
deriving Show
instance Monoid Summaries where
mempty = Summaries mempty mempty
mappend (Summaries c1 e1) (Summaries c2 e2) = Summaries (Map.unionWith (<>) c1 c2) (Map.unionWith (<>) e1 e2)
instance StringConv Summaries ByteString where
strConv _ = toS . (<> "\n") . encode
instance ToJSON Summaries where
toJSON Summaries{..} = object [ "changes" .= changes, "errors" .= errors ]
data Annotatable a = Annotatable a | Unannotatable a
annotatable :: SyntaxTerm leaf fields -> Annotatable (SyntaxTerm leaf fields)
annotatable term = isAnnotatable (unwrap term) term
where isAnnotatable syntax = case syntax of
S.Class{} -> Annotatable
S.Method{} -> Annotatable
S.Function{} -> Annotatable
S.Module{} -> Annotatable
S.Namespace{} -> Annotatable
S.Interface{} -> Annotatable
_ -> Unannotatable
data Identifiable a = Identifiable a | Unidentifiable a
identifiable :: SyntaxTerm leaf fields -> Identifiable (SyntaxTerm leaf fields)
identifiable term = isIdentifiable (unwrap term) term
where isIdentifiable syntax = case syntax of
S.FunctionCall{} -> Identifiable
S.MethodCall{} -> Identifiable
S.Function{} -> Identifiable
S.Assignment{} -> Identifiable
S.OperatorAssignment{} -> Identifiable
S.VarAssignment{} -> Identifiable
S.SubscriptAccess{} -> Identifiable
S.Module{} -> Identifiable
S.Namespace{} -> Identifiable
S.Interface{} -> Identifiable
S.Class{} -> Identifiable
S.Method{} -> Identifiable
S.Leaf{} -> Identifiable
S.DoWhile{} -> Identifiable
S.Import{} -> Identifiable
S.Export{} -> Identifiable
S.Ternary{} -> Identifiable
S.If{} -> Identifiable
S.Try{} -> Identifiable
S.Switch{} -> Identifiable
S.Rescue{} -> Identifiable
S.Pair{} -> Identifiable
S.Array ty _ -> maybe Unidentifiable (const Identifiable) ty
S.Object ty _ -> maybe Unidentifiable (const Identifiable) ty
S.BlockStatement{} -> Identifiable
S.TypeDecl{} -> Identifiable
S.Ty{} -> Identifiable
_ -> Unidentifiable
data JSONSummary info span = JSONSummary { info :: info, span :: span }
| ErrorSummary { info :: info, span :: span }
deriving (Generic, Eq, Show)
instance (ToJSON info, ToJSON span) => ToJSON (JSONSummary info span) where
toJSON JSONSummary{..} = object [ "summary" .= info, "span" .= span ]
toJSON ErrorSummary{..} = object [ "summary" .= info, "span" .= span ]
isErrorSummary :: JSONSummary info span -> Bool
isErrorSummary ErrorSummary{} = True
isErrorSummary _ = False
data DiffInfo = LeafInfo { leafCategory :: Category, termName :: Text, sourceSpan :: SourceSpan }
| BranchInfo { branches :: [ DiffInfo ], branchCategory :: Category, branchType :: Branch }
| ErrorInfo { errorSpan :: SourceSpan, termName :: Text }
| HideInfo -- Hide/Strip from summary output entirely.
deriving (Eq, Show)
data Branch = BIndexed | BFixed | BCommented | BIf deriving (Show, Eq, Generic)
data DiffSummary a = DiffSummary {
diffSummaryPatch :: Patch a,
parentAnnotation :: [Either (Category, Text) (Category, Text)]
} deriving (Eq, Functor, Show, Generic)
summary :: HasDefaultFields fields => Both SourceBlob -> Diff (Syntax Text) (Record fields) -> Summaries
summary blobs diff = Summaries changes errors
where
changes = if null changes' then mempty else Map.singleton summaryKey (toJSON <$> changes')
errors = if null errors' then mempty else Map.singleton summaryKey (toJSON <$> errors')
(errors', changes') = List.partition isErrorSummary summaries
summaryKey = toSummaryKey (path <$> blobs)
summaries = diffSummaries blobs diff
-- Returns a key representing the filename. If the filenames are different,
-- return 'before -> after'.
toSummaryKey :: Both FilePath -> Text
toSummaryKey = runBothWith $ \before after ->
toS $ case (before, after) of
("", after) -> after
(before, "") -> before
(before, after) | before == after -> after
(before, after) | not (null before) && not (null after) -> before <> " -> " <> after
(_, _) -> mempty
-- Returns a list of diff summary texts given two source blobs and a diff.
diffSummaries :: (StringConv leaf Text, HasDefaultFields fields) => Both SourceBlob -> SyntaxDiff leaf fields -> [JSONSummary Text SourceSpans]
diffSummaries blobs diff = summaryToTexts =<< diffToDiffSummaries (source <$> blobs) diff
-- Takes a 'DiffSummary DiffInfo' and returns a list of JSON Summaries whose text summaries represent the LeafInfo summaries of the 'DiffSummary'.
summaryToTexts :: DiffSummary DiffInfo -> [JSONSummary Text SourceSpans]
summaryToTexts DiffSummary{..} = appendParentContexts <$> jsonDocSummaries diffSummaryPatch
where appendParentContexts jsonSummary =
jsonSummary { info = show $ info jsonSummary <+> parentContexts parentAnnotation }
-- Returns a list of 'DiffSummary' given two source blobs and a diff.
diffToDiffSummaries :: (StringConv leaf Text, HasDefaultFields fields) => Both Source -> SyntaxDiff leaf fields -> [DiffSummary DiffInfo]
diffToDiffSummaries sources = para $ \diff ->
let
diff' = free (Prologue.fst <$> diff)
annotateWithCategory :: [DiffSummary DiffInfo] -> [DiffSummary DiffInfo]
annotateWithCategory children = case (beforeTerm diff', afterTerm diff') of
(_, Just diff'') -> appendSummary (Both.snd sources) diff'' <$> children
(Just diff'', _) -> appendSummary (Both.fst sources) diff'' <$> children
(Nothing, Nothing) -> []
in case diff of
-- Skip comments and leaves since they don't have any changes
(Free (_ :< syntax)) -> annotateWithCategory (toList syntax >>= snd)
(Pure patch) -> [ DiffSummary (mapPatch (termToDiffInfo beforeSource) (termToDiffInfo afterSource) patch) [] ]
where
(beforeSource, afterSource) = runJoin sources
-- Flattens a patch of diff infos into a list of docs, one for every 'LeafInfo' or `ErrorInfo` it contains.
jsonDocSummaries :: Patch DiffInfo -> [JSONSummary Doc SourceSpans]
jsonDocSummaries patch = case patch of
Replace i1 i2 -> zipWith (\a b ->
JSONSummary
{
info = info (prefixWithPatch patch This a) <+> "with" <+> info b
, span = SourceSpans $ These (span a) (span b)
}) (toLeafInfos i1) (toLeafInfos i2)
Insert info -> prefixWithPatch patch That <$> toLeafInfos info
Delete info -> prefixWithPatch patch This <$> toLeafInfos info
-- Prefixes a given doc with the type of patch it represents.
prefixWithPatch :: Patch DiffInfo -> (SourceSpan -> These SourceSpan SourceSpan) -> JSONSummary Doc SourceSpan -> JSONSummary Doc SourceSpans
prefixWithPatch patch constructor = prefixWithThe (patchToPrefix patch)
where
prefixWithThe prefix jsonSummary = jsonSummary
{
info = prefix <+> info jsonSummary
, span = SourceSpans $ constructor (span jsonSummary)
}
patchToPrefix patch = case patch of
(Replace _ _) -> "Replaced"
(Insert _) -> "Added"
(Delete _) -> "Deleted"
toLeafInfos :: DiffInfo -> [JSONSummary Doc SourceSpan]
toLeafInfos err@ErrorInfo{..} = pure $ ErrorSummary (pretty err) errorSpan
toLeafInfos BranchInfo{..} = branches >>= toLeafInfos
toLeafInfos HideInfo = []
toLeafInfos LeafInfo{..} = pure $ JSONSummary (summary leafCategory termName) sourceSpan
where
summary :: Category -> Text -> Doc
summary category termName = case category of
C.NumberLiteral -> squotes $ toDoc termName
C.IntegerLiteral -> squotes $ toDoc termName
C.Boolean -> squotes $ toDoc termName
C.StringLiteral -> termAndCategoryName
C.Export -> termAndCategoryName
C.Import -> termAndCategoryName
C.Subshell -> termAndCategoryName
C.AnonymousFunction -> "an" <+> toDoc termName <+> "function"
C.Begin -> categoryName'
C.Select -> categoryName'
C.Else -> categoryName'
C.Ensure -> categoryName'
C.Break -> categoryName'
C.Continue -> categoryName'
C.BeginBlock -> categoryName'
C.EndBlock -> categoryName'
C.Yield | Text.null termName -> categoryName'
C.Return | Text.null termName -> categoryName'
C.Switch | Text.null termName -> categoryName'
_ -> "the" <+> squotes (toDoc termName) <+> toDoc categoryName
where
termAndCategoryName = "the" <+> toDoc termName <+> toDoc categoryName
categoryName = toCategoryName category
categoryName' = case categoryName of
name | startsWithVowel name -> "an" <+> toDoc name
| otherwise -> "a" <+> toDoc name
startsWithVowel text = getAny $ foldMap (Any . flip Text.isPrefixOf text) vowels
vowels = Text.singleton <$> ("aeiouAEIOU" :: [Char])
-- Returns a text representing a specific term given a source and a term.
toTermName :: forall leaf fields. (StringConv leaf Text, HasDefaultFields fields) => Source -> SyntaxTerm leaf fields -> Text
toTermName source term = case unwrap term of
S.Send _ _ -> termNameFromSource term
S.Ty _ -> termNameFromSource term
S.TypeDecl id _ -> toTermName' id
S.TypeAssertion _ _ -> termNameFromSource term
S.TypeConversion _ _ -> termNameFromSource term
S.Go expr -> toTermName' expr
S.Defer expr -> toTermName' expr
S.AnonymousFunction params _ -> "anonymous" <> paramsToArgNames params
S.Fixed children -> termNameFromChildren term children
S.Indexed children -> maybe "branch" sconcat (nonEmpty (intersperse ", " (toTermName' <$> children)))
Leaf leaf -> toS leaf
S.Assignment identifier _ -> toTermName' identifier
S.Function identifier _ _ -> toTermName' identifier
S.ParameterDecl _ _ -> termNameFromSource term
S.FunctionCall i _ args -> case unwrap i of
S.AnonymousFunction params _ ->
-- Omit a function call's arguments if it's arguments match the underlying
-- anonymous function's arguments.
if (category . extract <$> args) == (category . extract <$> params)
then toTermName' i
else "(" <> toTermName' i <> ")" <> paramsToArgNames args
_ -> toTermName' i <> paramsToArgNames args
S.MemberAccess base property -> case (unwrap base, unwrap property) of
(S.FunctionCall{}, S.FunctionCall{}) -> toTermName' base <> "()." <> toTermName' property <> "()"
(S.FunctionCall{}, _) -> toTermName' base <> "()." <> toTermName' property
(_, S.FunctionCall{}) -> toTermName' base <> "." <> toTermName' property <> "()"
(_, _) -> toTermName' base <> "." <> toTermName' property
S.MethodCall targetId methodId _ methodParams -> toTermName' targetId <> sep <> toTermName' methodId <> paramsToArgNames methodParams
where sep = case unwrap targetId of
S.FunctionCall{} -> "()."
_ -> "."
S.SubscriptAccess base element -> case (unwrap base, unwrap element) of
(S.FunctionCall{}, S.FunctionCall{}) -> toTermName' base <> "()." <> toTermName' element <> "()"
(S.FunctionCall{}, _) -> toTermName' base <> "()." <> toTermName' element
(_, S.FunctionCall{}) -> toTermName' base <> "[" <> toTermName' element <> "()" <> "]"
(S.Indexed _, _) -> case category . extract $ base of
SliceTy -> termNameFromSource base <> toTermName' element
_ -> toTermName' base <> "[" <> toTermName' element <> "]"
(_, _) -> toTermName' base <> "[" <> toTermName' element <> "]"
S.VarAssignment varId _ -> termNameFromChildren term varId
S.VarDecl _ -> termNameFromSource term
-- TODO: We should remove Case from Syntax since I don't think we should ever
-- evaluate Case as a single toTermName Text - joshvera
S.Case expr _ -> termNameFromSource expr
S.Switch exprs _ -> maybe "" toTermName' (fmap snd (unsnoc exprs))
S.Ternary expr _ -> toTermName' expr
S.OperatorAssignment id _ -> toTermName' id
S.Operator _ -> termNameFromSource term
S.Object ty kvs -> maybe ("{ " <> Text.intercalate ", " (toTermName' <$> kvs) <> " }") termNameFromSource ty
S.Pair k v -> toKeyName k <> toArgName v
S.Return children -> Text.intercalate ", " (termNameFromSource <$> children)
S.Yield children -> Text.intercalate ", " (termNameFromSource <$> children)
S.ParseError _ -> termNameFromSource term
S.If expr _ -> termNameFromSource expr
S.For clauses _ -> termNameFromChildren term clauses
S.While expr _ -> toTermName' expr
S.DoWhile _ expr -> toTermName' expr
S.Throw expr -> termNameFromSource expr
S.Constructor expr -> toTermName' expr
S.Try clauses _ _ _ -> termNameFromChildren term clauses
S.Select clauses -> termNameFromChildren term clauses
S.Array ty _ -> maybe (termNameFromSource term) termNameFromSource ty
S.Class identifier _ _ -> toTermName' identifier
S.Method _ identifier (Just receiver) args _ -> termNameFromSource receiver <> "." <> toTermName' identifier <> paramsToArgNames args
S.Method _ identifier Nothing args _ -> toTermName' identifier <> paramsToArgNames args
S.Comment a -> toS a
S.Commented _ _ -> termNameFromChildren term (toList $ unwrap term)
S.Module identifier _ -> toTermName' identifier
S.Namespace identifier _ -> toTermName' identifier
S.Interface identifier _ _ -> toTermName' identifier
S.Import identifier [] -> termNameFromSource identifier
S.Import identifier exprs -> termNameFromChildren term exprs <> " from " <> toTermName' identifier
S.Export Nothing expr -> "{ " <> Text.intercalate ", " (termNameFromSource <$> expr) <> " }"
S.Export (Just identifier) [] -> "{ " <> toTermName' identifier <> " }"
S.Export (Just identifier) expr -> "{ " <> Text.intercalate ", " (termNameFromSource <$> expr) <> " }" <> " from " <> toTermName' identifier
S.Negate expr -> toTermName' expr
S.Struct ty _ -> maybe (termNameFromSource term) termNameFromSource ty
S.Rescue args _ -> Text.intercalate ", " $ toTermName' <$> args
S.Break expr -> maybe "" toTermName' expr
S.Continue expr -> maybe "" toTermName' expr
S.BlockStatement children -> termNameFromChildren term children
S.DefaultCase children -> termNameFromChildren term children
S.FieldDecl children -> termNameFromChildren term children
where toTermName' = toTermName source
termNameFromChildren term children = termNameFromRange (unionRangesFrom (range term) (range <$> children))
termNameFromSource term = termNameFromRange (range term)
termNameFromRange range = toText $ Source.slice range source
range = byteRange . extract
paramsToArgNames params = "(" <> Text.intercalate ", " (toArgName <$> params) <> ")"
toArgName :: SyntaxTerm leaf fields -> Text
toArgName arg = case identifiable arg of
Identifiable arg -> toTermName' arg
Unidentifiable _ -> ""
toKeyName key = case toTermName' key of
n | Text.head n == ':' -> n <> " => "
n -> n <> ": "
parentContexts :: [Either (Category, Text) (Category, Text)] -> Doc
parentContexts contexts = hsep $ either identifiableDoc annotatableDoc <$> contexts
where
identifiableDoc (c, t) = case c of
C.Assignment -> "in an" <+> catName c <+> "to" <+> termName t
C.Select -> "in a" <+> catName c
C.Begin -> "in a" <+> catName c
C.Else -> "in an" <+> catName c
C.Elsif -> "in the" <+> squotes (termName t) <+> catName c
C.Method -> "in the" <+> squotes (termName t) <+> catName c
C.SingletonMethod -> "in the" <+> squotes (termName t) <+> catName c
C.Ternary -> "in the" <+> squotes (termName t) <+> catName c
C.Ensure -> "in an" <+> catName c
C.Rescue -> case t of
"" -> "in a" <+> catName c
_ -> "in the" <+> squotes (termName t) <+> catName c
C.Modifier C.Rescue -> "in the" <+> squotes ("rescue" <+> termName t) <+> "modifier"
C.If -> "in the" <+> squotes (termName t) <+> catName c
C.Case -> "in the" <+> squotes (termName t) <+> catName c
C.Break -> case t of
"" -> "in a" <+> catName c
_ -> "in the" <+> squotes (termName t) <+> catName c
C.Continue -> case t of
"" -> "in a" <+> catName c
_ -> "in the" <+> squotes (termName t) <+> catName c
C.Switch -> case t of
"" -> "in a" <+> catName c
_ -> "in the" <+> squotes (termName t) <+> catName c
C.When -> "in a" <+> catName c
C.BeginBlock -> "in a" <+> catName c
C.EndBlock -> "in an" <+> catName c
C.DefaultCase -> "in a" <+> catName c
C.TypeDecl -> "in the" <+> squotes (termName t) <+> catName c
_ -> "in the" <+> termName t <+> catName c
annotatableDoc (c, t) = "of the" <+> squotes (termName t) <+> catName c
catName = toDoc . toCategoryName
termName = toDoc
toDoc :: Text -> Doc
toDoc = string . toS
termToDiffInfo :: (StringConv leaf Text, HasDefaultFields fields) => Source -> SyntaxTerm leaf fields -> DiffInfo
termToDiffInfo blob term = case unwrap term of
S.Indexed children -> BranchInfo (termToDiffInfo' <$> children) (category $ extract term) BIndexed
S.Fixed children -> BranchInfo (termToDiffInfo' <$> children) (category $ extract term) BFixed
S.AnonymousFunction _ _ -> LeafInfo C.AnonymousFunction (toTermName' term) (getField $ extract term)
S.Comment _ -> HideInfo
S.Commented cs leaf -> BranchInfo (termToDiffInfo' <$> cs <> maybeToList leaf) (category $ extract term) BCommented
S.ParseError _ -> ErrorInfo (getField $ extract term) (toTermName' term)
_ -> toLeafInfo term
where toTermName' = toTermName blob
termToDiffInfo' = termToDiffInfo blob
toLeafInfo term = LeafInfo (category $ extract term) (toTermName' term) (getField $ extract term)
-- | Append a parentAnnotation to the current DiffSummary instance.
-- | For a DiffSummary without a parentAnnotation, we append a parentAnnotation with the first identifiable term.
-- | For a DiffSummary with a parentAnnotation, we append the next annotatable term to the extant parentAnnotation.
-- | If a DiffSummary already has a parentAnnotation, and a (grand) parentAnnotation, then we return the summary without modification.
appendSummary :: (StringConv leaf Text, HasDefaultFields fields) => Source -> SyntaxTerm leaf fields -> DiffSummary DiffInfo -> DiffSummary DiffInfo
appendSummary source term summary =
case (parentAnnotation summary, identifiable term, annotatable term) of
([], Identifiable _, _) -> appendParentAnnotation Left
([_], _, Annotatable _) -> appendParentAnnotation Right
(_, _, _) -> summary
where
appendParentAnnotation constructor = summary
{ parentAnnotation = parentAnnotation summary <> [ constructor (category (extract term), toTermName source term) ] }
isBranchInfo :: DiffInfo -> Bool
isBranchInfo info = case info of
BranchInfo{} -> True
_ -> False
-- The user-facing category name of 'a'.
class HasCategory a where
toCategoryName :: a -> Text
-- Instances
instance HasCategory Text where
toCategoryName = identity
instance HasCategory Category where
toCategoryName category = case category of
C.Ty -> "type"
ArrayLiteral -> "array"
BooleanOperator -> "boolean operator"
MathOperator -> "math operator"
BitwiseOperator -> "bitwise operator"
RelationalOperator -> "relational operator"
Boolean -> "boolean"
DictionaryLiteral -> "dictionary"
C.Comment -> "comment"
C.ParseError -> "error"
ExpressionStatements -> "expression statements"
C.Assignment -> "assignment"
C.Function -> "function"
C.FunctionCall -> "function call"
C.MemberAccess -> "member access"
C.MethodCall -> "method call"
C.Args -> "arguments"
C.VarAssignment -> "var assignment"
C.VarDecl -> "variable"
C.Switch -> "switch statement"
C.Case -> "case statement"
C.SubscriptAccess -> "subscript access"
C.MathAssignment -> "math assignment"
C.Ternary -> "ternary expression"
C.Operator -> "operator"
Identifier -> "identifier"
IntegerLiteral -> "integer"
NumberLiteral -> "number"
FloatLiteral -> "float"
Other s -> s
C.Pair -> "pair"
C.Params -> "params"
Program -> "top level"
Regex -> "regex"
StringLiteral -> "string"
SymbolLiteral -> "symbol"
TemplateString -> "template string"
C.For -> "for statement"
C.While -> "while statement"
C.DoWhile -> "do/while statement"
C.Object -> "object"
C.Return -> "return statement"
C.Throw -> "throw statement"
C.Constructor -> "constructor"
C.Catch -> "catch statement"
C.Try -> "try statement"
C.Finally -> "finally statement"
C.Class -> "class"
C.Method -> "method"
C.If -> "if statement"
C.CommaOperator -> "comma operator"
C.Empty -> "empty statement"
C.Module -> "module"
C.Namespace -> "namespace"
C.Interface -> "interface"
C.Import -> "import statement"
C.Export -> "export statement"
C.AnonymousFunction -> "anonymous function"
C.Interpolation -> "interpolation"
C.Subshell -> "subshell command"
C.OperatorAssignment -> "operator assignment"
C.Yield -> "yield statement"
C.Until -> "until statement"
C.Unless -> "unless statement"
C.Begin -> "begin statement"
C.Else -> "else block"
C.Elsif -> "elsif block"
C.Ensure -> "ensure block"
C.Rescue -> "rescue block"
C.RescueModifier -> "rescue modifier"
C.When -> "when comparison"
C.RescuedException -> "last exception"
C.RescueArgs -> "arguments"
C.Negate -> "negate"
C.Select -> "select statement"
C.Go -> "go statement"
C.Slice -> "slice literal"
C.Defer -> "defer statement"
C.TypeAssertion -> "type assertion statement"
C.TypeConversion -> "type conversion expression"
C.ArgumentPair -> "argument"
C.KeywordParameter -> "parameter"
C.OptionalParameter -> "parameter"
C.SplatParameter -> "parameter"
C.HashSplatParameter -> "parameter"
C.BlockParameter -> "parameter"
C.ArrayTy -> "array type"
C.DictionaryTy -> "dictionary type"
C.StructTy -> "struct type"
C.Struct -> "struct"
C.Break -> "break statement"
C.Continue -> "continue statement"
C.Binary -> "binary statement"
C.Unary -> "unary statement"
C.Constant -> "constant"
C.Superclass -> "superclass"
C.SingletonClass -> "singleton class"
C.SingletonMethod -> "method"
C.RangeExpression -> "range"
C.ScopeOperator -> "scope operator"
C.BeginBlock -> "BEGIN block"
C.EndBlock -> "END block"
C.ParameterDecl -> "parameter declaration"
C.DefaultCase -> "default statement"
C.TypeDecl -> "type declaration"
C.PointerTy -> "pointer type"
C.FieldDecl -> "field declaration"
C.SliceTy -> "slice type"
C.Element -> "element"
C.Literal -> "literal"
C.ChannelTy -> "channel type"
C.Send -> "send statement"
C.IndexExpression -> "index expression"
C.FunctionTy -> "function type"
C.IncrementStatement -> "increment statement"
C.DecrementStatement -> "decrement statement"
C.QualifiedIdentifier -> "qualified identifier"
C.FieldDeclarations -> "field declarations"
C.RuneLiteral -> "rune literal"
C.Modifier C.Rescue -> "rescue modifier"
C.Modifier c -> toCategoryName c
instance HasField fields Category => HasCategory (SyntaxTerm leaf fields) where
toCategoryName = toCategoryName . category . extract
instance Listable Branch where
tiers = cons0 BIndexed \/ cons0 BFixed \/ cons0 BCommented \/ cons0 BIf
instance Listable1 DiffSummary where
liftTiers termTiers = liftCons2 (liftTiers termTiers) (liftTiers (eitherTiers (liftTiers (mapT unListableText tiers)))) DiffSummary
where eitherTiers tiers = liftTiers2 tiers tiers
instance Listable a => Listable (DiffSummary a) where
tiers = tiers1
instance P.Pretty DiffInfo where
pretty LeafInfo{..} = squotes (string $ toSL termName) <+> string (toSL (toCategoryName leafCategory))
pretty BranchInfo{..} = mconcat $ punctuate (string "," P.<> space) (pretty <$> branches)
pretty ErrorInfo{..} = squotes (string $ toSL termName) <+> "at" <+> (string . toSL $ displayStartEndPos errorSpan)
pretty HideInfo = ""

View File

@ -1,7 +1,8 @@
{-# LANGUAGE DeriveAnyClass, RankNTypes #-}
{-# LANGUAGE DeriveAnyClass, MultiParamTypeClasses, RankNTypes #-}
module Renderer.TOC
( toc
, diffTOC
, Summaries(..)
, JSONSummary(..)
, Summarizable(..)
, isValidSummary
@ -28,13 +29,25 @@ import Diff
import Info
import Patch
import Prologue
import Renderer.Summary (Summaries(..))
import qualified Data.List as List
import qualified Data.Map as Map hiding (null)
import Source hiding (null)
import Syntax as S
import Term
data Summaries = Summaries { changes, errors :: !(Map Text [Value]) }
deriving Show
instance Monoid Summaries where
mempty = Summaries mempty mempty
mappend (Summaries c1 e1) (Summaries c2 e2) = Summaries (Map.unionWith (<>) c1 c2) (Map.unionWith (<>) e1 e2)
instance StringConv Summaries ByteString where
strConv _ = toS . (<> "\n") . encode
instance ToJSON Summaries where
toJSON Summaries{..} = object [ "changes" .= changes, "errors" .= errors ]
data JSONSummary = JSONSummary { info :: Summarizable }
| ErrorSummary { error :: Text, errorSpan :: SourceSpan }
deriving (Generic, Eq, Show)

View File

@ -76,9 +76,8 @@ arguments gitDir alternates = info (version <*> helper <*> argumentsParser) desc
diffArgumentsParser = Diff
<$> ( ( flag patchDiff patchDiff (long "patch" <> help "Output a patch(1)-compatible diff (default)")
<|> flag' jsonDiff (long "json" <> help "Output a json diff")
<|> flag' summaryDiff (long "summary" <> help "Output a diff summary")
<|> flag' sExpressionDiff (long "sexpression" <> help "Output an s-expression diff tree")
<|> flag' tocDiff (long "toc" <> help "Output a table of contents diff summary") )
<|> flag' tocDiff (long "toc" <> help "Output a table of contents for a diff") )
<*> ( DiffPaths
<$> argument filePathReader (metavar "FILE_A")
<*> argument filePathReader (metavar "FILE_B")

View File

@ -55,11 +55,6 @@ spec = parallel $ do
blobs `shouldBe` [both b b]
describe "fetchDiffs" $ do
it "generates diff summaries for two shas" $ do
(errors, summaries) <- fetchDiffsOutput summaryText "test/fixtures/git/examples/all-languages.git" "dfac8fd681b0749af137aebf3203e77a06fbafc2" "2e4144eb8c44f007463ec34cb66353f0041161fe" [("methods.rb", Just Ruby)] (const identity) Renderer.SummaryRenderer
errors `shouldBe` Just (fromList [])
summaries `shouldBe` Just (fromList [("methods.rb", ["Added the 'foo()' method"])])
it "generates toc summaries for two shas" $ do
(errors, summaries) <- fetchDiffsOutput termText "test/fixtures/git/examples/all-languages.git" "dfac8fd681b0749af137aebf3203e77a06fbafc2" "2e4144eb8c44f007463ec34cb66353f0041161fe" [("methods.rb", Just Ruby)] declarationDecorator Renderer.ToCRenderer
errors `shouldBe` Just (fromList [])
@ -71,11 +66,11 @@ spec = parallel $ do
summaries `shouldBe` Just (fromList [("methods.rb", ["foo"])])
it "errors with bad shas" $
fetchDiffsOutput summaryText "test/fixtures/git/examples/all-languages.git" "dead" "beef" [("methods.rb", Just Ruby)] (const identity) Renderer.SummaryRenderer
fetchDiffsOutput termText "test/fixtures/git/examples/all-languages.git" "dead" "beef" [("methods.rb", Just Ruby)] declarationDecorator Renderer.ToCRenderer
`shouldThrow` (== Git.BackendError "Could not lookup dead: Object not found - no match for prefix (dead000000000000000000000000000000000000)")
it "errors with bad repo path" $
fetchDiffsOutput summaryText "test/fixtures/git/examples/not-a-repo.git" "dfac8fd681b0749af137aebf3203e77a06fbafc2" "2e4144eb8c44f007463ec34cb66353f0041161fe" [("methods.rb", Just Ruby)] (const identity) Renderer.SummaryRenderer
fetchDiffsOutput termText "test/fixtures/git/examples/not-a-repo.git" "dfac8fd681b0749af137aebf3203e77a06fbafc2" "2e4144eb8c44f007463ec34cb66353f0041161fe" [("methods.rb", Just Ruby)] declarationDecorator Renderer.ToCRenderer
`shouldThrow` errorCall "Could not open repository \"test/fixtures/git/examples/not-a-repo.git\""
where repoPath = "test/fixtures/git/examples/all-languages.git"
@ -112,10 +107,6 @@ summaries f = parseMaybe $ \o -> do
pure (path, V.toList ys)
pure $ fromList xs
summaryText :: Object -> Text
summaryText o = fromMaybe (panic "key 'summary' not found") $
parseMaybe (.: "summary") o
termText :: Object -> Text
termText o = fromMaybe (panic "key 'term' not found") $
parseMaybe (.: "term") o

View File

@ -62,12 +62,10 @@ data DiffFixture = DiffFixture
instance Listable DiffFixture where
tiers = cons0 (DiffFixture (patchDiff pathMode "" []) patchOutput)
\/ cons0 (DiffFixture (jsonDiff pathMode "" []) jsonOutput)
\/ cons0 (DiffFixture (summaryDiff pathMode "" []) summaryOutput)
\/ cons0 (DiffFixture (sExpressionDiff pathMode "" []) sExpressionOutput)
\/ cons0 (DiffFixture (tocDiff pathMode "" []) tocOutput)
\/ cons0 (DiffFixture (patchDiff commitMode repo []) patchOutput')
\/ cons0 (DiffFixture (jsonDiff commitMode repo []) jsonOutput')
\/ cons0 (DiffFixture (summaryDiff commitMode repo []) summaryOutput')
\/ cons0 (DiffFixture (sExpressionDiff commitMode repo []) sExpressionOutput')
\/ cons0 (DiffFixture (tocDiff commitMode repo []) tocOutput')
@ -77,8 +75,6 @@ instance Listable DiffFixture where
patchOutput = "diff --git a/test/fixtures/ruby/method-declaration.A.rb b/test/fixtures/ruby/method-declaration.B.rb\nindex 0000000000000000000000000000000000000000..0000000000000000000000000000000000000000 100644\n--- a/test/fixtures/ruby/method-declaration.A.rb\n+++ b/test/fixtures/ruby/method-declaration.B.rb\n@@ -1,3 +1,4 @@\n-def foo\n+def bar(a)\n+ baz\n end\n\n"
patchOutput' = "diff --git a/methods.rb b/methods.rb\nnew file mode 100644\nindex 0000000000000000000000000000000000000000..ff7bbbe9495f61d9e1e58c597502d152bab1761e\n--- /dev/null\n+++ b/methods.rb\n+def foo\n+end\n\n"
summaryOutput = "{\"changes\":{\"test/fixtures/ruby/method-declaration.A.rb -> test/fixtures/ruby/method-declaration.B.rb\":[{\"span\":{\"replace\":[{\"start\":[1,5],\"end\":[1,8]},{\"start\":[1,5],\"end\":[1,8]}]},\"summary\":\"Replaced the 'foo' identifier with the 'bar' identifier in the 'bar(\226\128\166)' method\"},{\"span\":{\"insert\":{\"start\":[1,9],\"end\":[1,10]}},\"summary\":\"Added the 'a' identifier in the 'bar(\226\128\166)' method\"},{\"span\":{\"insert\":{\"start\":[2,3],\"end\":[2,6]}},\"summary\":\"Added the 'baz' identifier in the 'bar(\226\128\166)' method\"}]},\"errors\":{}}\n"
summaryOutput' = "{\"changes\":{\"methods.rb\":[{\"span\":{\"insert\":{\"start\":[1,1],\"end\":[2,4]}},\"summary\":\"Added the 'foo()' method\"}]},\"errors\":{}}\n"
jsonOutput = "{\"oids\":[\"0000000000000000000000000000000000000000\",\"0000000000000000000000000000000000000000\"],\"paths\":[\"test/fixtures/ruby/method-declaration.A.rb\",\"test/fixtures/ruby/method-declaration.B.rb\"],\"rows\":[[{\"category\":\"Program\",\"children\":[{\"category\":\"Method\",\"children\":[{\"replace\":{\"category\":\"Identifier\",\"children\":[],\"sourceRange\":[4,7],\"sourceSpan\":{\"start\":[1,5],\"end\":[1,8]}}}],\"sourceRange\":[0,8],\"sourceSpan\":{\"start\":[1,1],\"end\":[2,4]}}],\"sourceRange\":[0,8],\"number\":1,\"sourceSpan\":{\"start\":[1,1],\"end\":[3,1]}},{\"category\":\"Program\",\"children\":[{\"category\":\"Method\",\"children\":[{\"replace\":{\"category\":\"Identifier\",\"children\":[],\"sourceRange\":[4,7],\"sourceSpan\":{\"start\":[1,5],\"end\":[1,8]}}},{\"insert\":{\"category\":\"Params\",\"children\":[{\"category\":\"Identifier\",\"children\":[],\"sourceRange\":[8,9],\"sourceSpan\":{\"start\":[1,9],\"end\":[1,10]}}],\"sourceRange\":[7,11],\"sourceSpan\":{\"start\":[1,8],\"end\":[2,3]}}}],\"sourceRange\":[0,11],\"sourceSpan\":{\"start\":[1,1],\"end\":[3,4]}}],\"sourceRange\":[0,11],\"number\":1,\"sourceSpan\":{\"start\":[1,1],\"end\":[4,1]}}],[{\"category\":\"Program\",\"children\":[{\"category\":\"Method\",\"children\":[{\"insert\":{\"category\":\"Params\",\"children\":[],\"sourceRange\":[11,13],\"sourceSpan\":{\"start\":[1,8],\"end\":[2,3]}}},{\"insert\":{\"category\":\"Identifier\",\"children\":[],\"sourceRange\":[13,16],\"sourceSpan\":{\"start\":[2,3],\"end\":[2,6]}}}],\"sourceRange\":[11,17],\"sourceSpan\":{\"start\":[1,1],\"end\":[3,4]}}],\"sourceRange\":[11,17],\"number\":2,\"sourceSpan\":{\"start\":[1,1],\"end\":[4,1]}}],[{\"category\":\"Program\",\"children\":[{\"category\":\"Method\",\"children\":[],\"sourceRange\":[8,11],\"sourceSpan\":{\"start\":[1,1],\"end\":[2,4]}}],\"sourceRange\":[8,12],\"number\":2,\"sourceSpan\":{\"start\":[1,1],\"end\":[3,1]}},{\"category\":\"Program\",\"children\":[{\"category\":\"Method\",\"children\":[],\"sourceRange\":[17,20],\"sourceSpan\":{\"start\":[1,1],\"end\":[3,4]}}],\"sourceRange\":[17,21],\"number\":3,\"sourceSpan\":{\"start\":[1,1],\"end\":[4,1]}}],[{\"category\":\"Program\",\"children\":[],\"sourceRange\":[12,12],\"number\":3,\"sourceSpan\":{\"start\":[1,1],\"end\":[3,1]}},{\"category\":\"Program\",\"children\":[],\"sourceRange\":[21,21],\"number\":4,\"sourceSpan\":{\"start\":[1,1],\"end\":[4,1]}}]]}\n"
jsonOutput' = "{\"oids\":[\"0000000000000000000000000000000000000000\",\"ff7bbbe9495f61d9e1e58c597502d152bab1761e\"],\"paths\":[\"methods.rb\",\"methods.rb\"],\"rows\":[[{\"insert\":{\"category\":\"Program\",\"children\":[{\"category\":\"Method\",\"children\":[{\"category\":\"Identifier\",\"children\":[],\"sourceRange\":[4,7],\"sourceSpan\":{\"start\":[1,5],\"end\":[1,8]}}],\"sourceRange\":[0,8],\"sourceSpan\":{\"start\":[1,1],\"end\":[2,4]}}],\"sourceRange\":[0,8],\"sourceSpan\":{\"start\":[1,1],\"end\":[3,1]}},\"number\":1}],[{\"insert\":{\"category\":\"Program\",\"children\":[{\"category\":\"Method\",\"children\":[],\"sourceRange\":[8,11],\"sourceSpan\":{\"start\":[1,1],\"end\":[2,4]}}],\"sourceRange\":[8,12],\"sourceSpan\":{\"start\":[1,1],\"end\":[3,1]}},\"number\":2}],[{\"insert\":{\"category\":\"Program\",\"children\":[],\"sourceRange\":[12,12],\"sourceSpan\":{\"start\":[1,1],\"end\":[3,1]}},\"number\":3}]]}\n"

View File

@ -7,7 +7,6 @@ import qualified Data.Mergeable.Spec
import qualified Data.RandomWalkSimilarity.Spec
import qualified Data.Syntax.Assignment.Spec
import qualified DiffSpec
import qualified SummarySpec
import qualified GitmonClientSpec
import qualified InterpreterSpec
import qualified PatchOutputSpec
@ -30,7 +29,6 @@ main = hspec $ do
describe "Data.RandomWalkSimilarity" Data.RandomWalkSimilarity.Spec.spec
describe "Data.Syntax.Assignment" Data.Syntax.Assignment.Spec.spec
describe "Diff" DiffSpec.spec
describe "Summary" SummarySpec.spec
describe "Interpreter" InterpreterSpec.spec
describe "PatchOutput" PatchOutputSpec.spec
describe "Range" RangeSpec.spec

View File

@ -1,102 +0,0 @@
{-# LANGUAGE DataKinds #-}
module SummarySpec where
import Category
import Data.Functor.Both
import Data.Functor.Listable
import Data.List (partition)
import RWS
import Data.Record
import Data.String
import Diff
import Language
import Renderer.Summary
import Info
import Interpreter
import Patch
import Prologue
import Source
import SpecHelpers
import Syntax
import Term
import Test.Hspec (Spec, describe, it, parallel)
import Test.Hspec.Expectations.Pretty
import Test.Hspec.LeanCheck
import Data.These
sourceSpanBetween :: (Int, Int) -> (Int, Int) -> SourceSpan
sourceSpanBetween (s1, e1) (s2, e2) = SourceSpan (SourcePos s1 e1) (SourcePos s2 e2)
arrayInfo :: Record '[Category, Range, SourceSpan]
arrayInfo = ArrayLiteral :. Range 0 3 :. sourceSpanBetween (1, 1) (1, 5) :. Nil
literalInfo :: Record '[Category, Range, SourceSpan]
literalInfo = StringLiteral :. Range 1 2 :. sourceSpanBetween (1, 2) (1, 4) :. Nil
testDiff :: Diff (Syntax Text) (Record '[Category, Range, SourceSpan])
testDiff = free $ Free (pure arrayInfo :< Indexed [ free $ Pure (Insert (cofree $ literalInfo :< Leaf "\"a\"")) ])
testSummary :: DiffSummary DiffInfo
testSummary = DiffSummary { diffSummaryPatch = Insert (LeafInfo StringLiteral "a" $ sourceSpanBetween (1,1) (1, 2)), parentAnnotation = [] }
replacementSummary :: DiffSummary DiffInfo
replacementSummary = DiffSummary { diffSummaryPatch = Replace (LeafInfo StringLiteral "a" $ sourceSpanBetween (1, 2) (1, 4)) (LeafInfo SymbolLiteral "b" $ sourceSpanBetween (1,1) (1, 2)), parentAnnotation = [Left (Info.FunctionCall, "foo")] }
blobs :: Both SourceBlob
blobs = both (SourceBlob (fromText "[]") nullOid "a.js" (Just defaultPlainBlob) (Just TypeScript)) (SourceBlob (fromText "[a]") nullOid "b.js" (Just defaultPlainBlob) (Just TypeScript))
spec :: Spec
spec = parallel $ do
describe "diffSummaries" $ do
it "outputs a diff summary" $
diffSummaries blobs testDiff `shouldBe` [ JSONSummary "Added the \"a\" string" (SourceSpans . That $ sourceSpanBetween (1, 2) (1, 4)) ]
prop "equal terms produce identity diffs" $
\ a -> let term = defaultFeatureVectorDecorator (category . headF) (unListableF a :: SyntaxTerm String '[Category, Range, SourceSpan]) in
diffSummaries blobs (diffTerms term term) `shouldBe` []
describe "DiffInfo" $ do
prop "patches in summaries match the patches in diffs" $
\a -> let
diff = unListableDiff a :: SyntaxDiff String '[Category, Range, SourceSpan]
summaries = diffToDiffSummaries (source <$> blobs) diff
patches = toList diff
in
case (partition isBranchNode (diffSummaryPatch <$> summaries), partition isIndexedOrFixed patches) of
((branchPatches, otherPatches), (branchDiffPatches, otherDiffPatches)) ->
(() <$ branchPatches, () <$ otherPatches) `shouldBe` (() <$ branchDiffPatches, () <$ otherDiffPatches)
prop "generates one LeafInfo for each child in an arbitrary branch patch" $
\a -> let
diff = unListableDiff a :: SyntaxDiff String '[Category, Range, SourceSpan]
diffInfoPatches = diffSummaryPatch <$> diffToDiffSummaries (source <$> blobs) diff
syntaxPatches = toList diff
extractLeaves :: DiffInfo -> [DiffInfo]
extractLeaves (BranchInfo children _ _) = join $ extractLeaves <$> children
extractLeaves leaf = [ leaf ]
extractDiffLeaves :: SyntaxTerm String '[Category, Range, SourceSpan] -> [ SyntaxTerm String '[Category, Range, SourceSpan] ]
extractDiffLeaves term = case unwrap term of
(Indexed children) -> join $ extractDiffLeaves <$> children
(Fixed children) -> join $ extractDiffLeaves <$> children
Commented children leaf -> children <> maybeToList leaf >>= extractDiffLeaves
_ -> [ term ]
in
case (partition isBranchNode diffInfoPatches, partition isIndexedOrFixed syntaxPatches) of
((branchPatches, _), (diffPatches, _)) ->
let listOfLeaves = foldMap extractLeaves (join $ toList <$> branchPatches)
listOfDiffLeaves = foldMap extractDiffLeaves (diffPatches >>= toList)
in
Prologue.length listOfLeaves `shouldBe` Prologue.length listOfDiffLeaves
isIndexedOrFixed :: Patch (Term (Syntax a) annotation) -> Bool
isIndexedOrFixed = any (isIndexedOrFixed' . unwrap)
isIndexedOrFixed' :: Syntax a f -> Bool
isIndexedOrFixed' syntax = case syntax of
(Indexed _) -> True
(Fixed _) -> True
(Commented _ _) -> True
_ -> False
isBranchNode :: Patch DiffInfo -> Bool
isBranchNode = any isBranchInfo

View File

@ -1,50 +0,0 @@
# May 16th, 2016
## What we were even doing here
This was our inaugural weekly.
Were sort of looking at this meeting as a retrospective on the previous week. Were very aiming very roughly at ~10min, but since weve never done this before, and since Rick is starting new this week, well see how it goes.
We went in first name alphabetical order, and shared three things from the last week:
1. What went well.
2. What went less well.
3. What we learned.
## Retrospective
@joshvera:
- Diff summaries! We now have a functional way of mapping diffs to their summaries. Tricky path to get to it, but pretty simple completed.
- Also worked on alignment stuff. Tricky stuff, possibly undecidable stuff.
- Weve been learning about recursion schemes, and different ways to tear down and build up finite & infinite data structures in versatile & structured ways.
@rewinfrey (on his first week with us! :tada:):
- Last week was wrapping up some work on a tenant scoping problem. Handed that off to @bryanaknight.
- Ran through `semantic-diff` setup, and `stack test` is all green.
- Also spending some time configuring Atom for Haskell &c.
- Excited to be here! :tada: (**Ed:** And were excited to have you! :heart:)
@robrix:
- Diff summaries went well.
- Working on alignment also went well, thanks to :pear:ing w/ @joshvera.
- I was reminded that at its best, :pear:ing is a “greater than the sum of its parts” sort of thing.
- Forgot to note that syncing up w/ @jbarnette on the meta-discussion around alignment was incredibly valuable. I spent a lot of time Writing Things Down last week, and Im very glad I did.
The above took us 8min. Nice!
## Metaretrospective
Since this was our inaugural weekly chat, we also did a retrospective on the retrospective:
- @joshvera observed that even if hes not working closely with @rewinfrey & @robrix, hell have some idea of what we were working on, but less of an idea of what we learned; its both more interesting & harder to discover.
- @rewinfrey pointed out that this can help us discover unknown unknowns; “I learned x” gives others a chance to say “have you heard of y, which supersedes x?”
- @rewinfrey further noted that this is sort of a “what would be worth learning?” question, which sets us up nicely for this week. Its a good chance to confirm that goals for the week are useful!
- @robrix was very glad to get the above feedback; this metaretrospective was super valuable.
- @robrix later realized he forgot to set down what the goals of this meeting are (in his opinion), which would be worth talking about.

View File

@ -1,119 +0,0 @@
# May 24th, 2016
NB: On Tuesday this week since Monday was Victoria Day.
## Agenda
1. @robrix to describe a couple of tweaks hed like to make to this meeting, followed by discussion of same:
- Adding “what did you enjoy” question.
- Rotate facilitator & note-taking tasks weekly.
- Going through the retro questions point by point instead of person by person.
2. Retrospective on last week:
- What went well?
- What was challenging?
- What did you learn?
- What did you enjoy? (If we didnt decide not to do this.)
3. (Meta)retrospective on the format &c.
## Tweaks to the format
- :+1: to trying it out.
## What went well?
@joshvera:
- Diff summary meeting on Wed.
- Finished up a lot of the skeleton of diff summaries.
- Yesterday paired w/ @rewinfrey & shared context about summaries.
@rewinfrey:
- Pairing w/ @robrix last week.
- Asking questions!
- Learned a lot.
- Came out of the week feeling hopeful rather than defeated.
- Pairing w/ @joshvera yesterday on diff summaries. Got context about shapes, helped solidify intuitions re: the datatypes &c.
@robrix:
- Pairing!
- Made a lot of progress on the alignment stuff, which was a secondary goal, so its a pretty great bonus.
- Particularly interesting building the infrastructure to do property tests around alignment stuff.
## What was challenging?
@joshvera:
- Stubbing out parts of diff summaries that we dont have good answers for. E.g. productions, what type of info well get out of parent annotations/contexts when constructing summaries.
@rewinfrey:
- Newness to Haskell & the project. Trying to solidify understanding of Haskell at the same time as the shapes of the data, especially in contexts like the Alignment problem. Challenge is diminishing over time.
@robrix:
- Pairing as much as we did was exhausting, just because I have to be “on” for extended periods of time; worried that I tired @rewinfrey out too. Feel like this will be ameliorated organically what with long weekends and schedule tweaks and @joshvera being freed up a bit. Also just getting used to it will help.
- There are some question marks about parts of the alignment stuff that are making me a bit nervous. Adding test cases for a couple of those will definitely help!
## What did you learn?
@joshvera:
- Theres a ton more stuff to do to get the types of diff summaries that we actually want as opposed to the ones that our current system produces, e.g. w.r.t. `tree-sitter`.
@rewinfrey:
- Learned a lot.
- The transition from thinking about function application to thinking about function composition was an eye-opener.
- Understanding a lot more about why certain shapes are advantageous.
@robrix:
- The halting problem applies to human analysis of programs too; sometimes you still have to go through a problem step by step in your head to understand it.
- Analyzing infinite loops in terms of general recursion/primitive recursion is pretty clarifying; e.g. “why isnt this primitive recursive? Can this be strictly positive?”
- Reminded that I dont _really_ understand a thing til Ive articulated it clearly.
## What did you enjoy?
@joshvera:
- Pairing w/ @rewinfrey yesterday.
- Giving the demo Wednesday.
- Voicing how I think about the structures we use.
@rewinfrey:
- > I feel like everything is pretty awesome.
- Getting paid to do Haskell is a dream come true.
- > Haskell honeymoon.
- Learning.
- Working w/ @joshvera & @robrix. Patience; willingness to share; egolessness. **Ed:** 💟
@robrix:
- Pairing w/ @rewinfrey.
- @rewinfreys questions!
- Articulating approaches taken; e.g. the meta-level of the function application to function composition transition. I hadnt really been conscious of that transition until @rewinfrey asked why I used pointfree style in one part of the code, or how I decide whether to write a function tacitly or not. Being forced to articulate it helped me understand that this had happened, which in turn sheds light on the difference between “can” and “should” for this particular problem.
## Metaretro
- @joshvera: Some of the questions felt like I was repeating thoughts by rephrasing through a different lens.
- @rewinfrey: I think I might keep these questions in mind throughout the week.
- @robrix: All of the above.
In response, going to drop the “What did you enjoy?” question, & have the facilitator/note-taker for the next meeting open the agenda PR for it the week before (i.e. today).

View File

@ -1,81 +0,0 @@
# May 31th, 2016
NB: On Tuesday this week since Monday was Memorial Day.
## Agenda
1. Retrospective on last week:
- What went well?
- What was challenging?
- What did you learn?
## What went well?
@joshvera:
- Diff summaries merged.
- Introduced a new prelude.
- Pairing with Rick.
- Alignment!
@rewinfrey:
- Pairing with Josh.
- Was at MoonConf and enjoyed the conference.
- Almost finished with Haskell Tic Tac Toe.
@robrix:
- Alignment resolved!
- Getting diff summaries merged.
## What was challenging?
@joshvera:
- Didn't make as much progress on the structure of diff summaries as desired.
@rewinfrey:
- Continuing to adjust to the project.
- Hit an edge case in Minimax that is tricky.
@robrix:
- Not sure why the line approach in alignment solved the problem.
- In stack 1.1 you cannot rely on it rebuilding internal packages (must clean and rebuild semantic diff tool)
## What did you learn?
@joshvera:
- Learned about the different types of preludes.
- Learned about an extension in GHC 8.0 that introduces Applicative Do syntax, but is tricky to use in parallel computations because of the order of executation.
@rewinfrey:
- Learned about different recursion schemes (zygotmorphism
- The transition from thinking about function application to thinking about function composition was an eye-opener.
- Understanding a lot more about why certain shapes are advantageous.
- http://livestream.com/accounts/16500216/events/5467460 (first video)
- Greg Pfeil's talk: 02:24 Recursion Where FP Hits Bottom
- Amar Shah's talk: 04:24 Point Free or Die
- Ashley Powell's talk: 05:57 Negotiating Salary for Women in Tech
- Patrick Thomson's talk: 06:25 Bracer: Transforming Real-World Languages with Coproducts and Recursion Schemes
@robrix:
- Learned a lot about GHCi debugging.
- Remembered to question assumptions, specifically about alignment property tests that were throwing results off.
- Using Arbitrary for purposes other than QuickCheck.
## Other Items
@robrix:
- Planning this week about the timeline for staffshipping diff summaries.
- Planning for a possible mini-summit in late June.

View File

@ -1,35 +0,0 @@
# Planning Meeting June 1, 2016
## Observations
@robrix:
- Since Alignment Diff PR was merged, it offers a good chance to step back and look at what would be most valuable in support of Diff Summaries and the 90 day goals.
- Want to triage Diff Summaries and if we should be reprioritizing staff shipping Semantic Diff now that Alignment is merged.
@joshvera:
- Reviewed open issues to prepare for planning meeting while evaluating how they fit into our milestones.
- Thinking about how we can ensure resilience if Semantic Diff times out, or fails and having fall back to traditional diffing.
- Also thinking about how to ensure Semantic Diff can render correctly.
## Focus Points
- Update semantic diff sha in github/github so semantic diff can be updated in github local development environments [PR](https://github.com/github/github/pull/56240)
- Improving Diff Summaries (performance and structure)
- Profiling and algorithm improvements to SES (Performance label)
- Fix Ruby Parser
- Unicorn Timeouts (algorithmic improvement, better fallback (should probably be driven by workflow tools))
- Benchmarking
- Other Features: detecting and rendering moves (as part of DotCom milestone)
## Plan
@robrix & @joshvera:
- Rob to set aside Benchmarking for now, focus on profiling (pair with Rick).
- Josh and Rick to pair on Diff Summary performance while Rob is out.
- Have Rick take a look at the Unicorn timeouts (involving some profiling to identify where things are slowest).
- Rob on vacation starting Tuesday June 7th (for 10 days)
- Schedule mini-summit for week of June 20th.
- Longer term planning including solidifying a road map will be held at the mini-summit.

View File

@ -1,63 +0,0 @@
# May 31th, 2016
## Agenda
1. Retrospective on last week:
- What went well?
- What was challenging?
- What did you learn?
## What went well?
@robrix:
- As of this morning were pushing data to graphite.
- There's a 2 hour window of when data is pulled in to graphite.
- We have a much more thorough understanding of the shape of the SES problems.
@rewinfrey:
- Came away with a much more thorough understanding of SES.
- Pairing with Rob on the profiling issues.
@joshvera
- Exploring the current productions of tree-sitter output.
- Got an airbnb for the Mini-Summit.
## What was challenging?
@robrix:
- SES performance has been a problem. Briefly confused into thinking we had solved the problem. Were profiling semantic-diff-tool and running the tests on semantic-diff. If we dont clean semantic-diff-tool then it doesnt know that semantic-diff has been rebuilt and it doesnt try to relink it. Happens specifically when changing branches.
- SES performance depends on O(n) cost function.
@rewinfrey:
- Heartbreaking to discover that a huge performance win was a bad build.
- Felt like it was hard to contribute to the deployment and build process.
- Do we have a fallback in place in case S3 fails?
@joshvera:
- Error productions from tree-sitter are difficult to debug and obscure diff summary output.
- Understanding and communicating how our deployment process works to other people. Maybe this means we need better documentation?
## What did you learn?
@robrix:
- Learning about parallelism because we have large asymptotic factors in SES.
- Developed a stronger intuition for why cost has to be linear with respect to the size of the diff tree.
@rewinfrey:
- Learned a lot about profiling in Haskell.
- Learned how to use Profiteur to visualize the space and time costs for a given computation.
- Trying to use the Eval monad to parallelize the Minimax algorithm.
@joshvera:
- Learned about designing CRISPR proteins that can be edited into bacteria to defend against viruses and plasmids.
- Read up on GHC's Core language in order to understand some of the optimizations GHC performs.
## Other Items
- Mini-Summit plans set for the week of June 20th.
- Rob on vacation starting Tuesday June 7th! :sunglasses:

View File

@ -1,74 +0,0 @@
# Semantic Diff Problems (Mini-Summit)
### Performance (most significant problem)
- SES / Alignment are biggest time / space consumers.
- Profiling small subsets of code paths rather than the full context.
- Adding more criterion benchmarks for code paths not currently profiled (like Diff Summaries).
##### Alignment performance
- Has to visit each child of each remaining line.
##### [SES](https://github.com/github/semantic-diff/files/22485/An.O.ND.Difference.Algorithm.and.its.Variations.pdf) Performance
- n^3 the size of the tree.
- Can try bounded SES (looks ahead by a fixed size of nodes).
- Identify more comparisons we can skip (i.e. don't compare functions with array literals).
- Does not look like there are more easy wins here (algorithm is already implemented to prevent unnecessary comparisions).
- In some cases, the diffing is expensive because we don't have more fine-grain identifiers for certain diffs. (e.g. a test file with 100 statement expressions).
- Diffing against identifiers (use the edit distance to determine whether to compare terms with SES or not).
- This could result in us missing a function rename though.
- Not a catchall, but it can help increase performance in a larger number of cases.
##### [RWS](https://github.com/github/semantic-diff/files/325837/RWS-Diff.Flexible.and.Efficient.Change.Detection.in.Hierarchical.Data.pdf) Performance
- Random Walk Similarity.
- computes approximation to the minimal edit script.
- O(log N) rather than O(n^3).
- RWS does not rely on identifiers.
- RWS solves our performance problem in the general form.
- Can allow us to diff patches of patches (something we cannot do currently with our implementation of SES).
##### Diff summaries performance
- Performance of DS is dependent on diffing (Diff Terms, Interpreter, cost functions)
### Failing too hard
- Request is not completing if Semantic Diff fails.
- How can we fail better on dotcom?
- How can we fail better when parsing? (both in Semantic Diff and dotcom)
### Responsiveness
- Async fetch diff summaries / diffs / progressive diffs or diff summaries
### Improving grammars
- Fix Ruby parser.
- Testing and verifying other grammars.
### Measure effectiveness of grammars
### Tooling
- Why isn't parallelization of SES having the expected effect?
- Should focus on low hanging fruit but we're not going to write a debugger.
### Time limitations with respect to solutions and team
### Ramp up time is extremely variable.
### Onboarding
- Pairing has been fantastic.
- SES algorithm requires some context and background to understand the code at the general / macro level.
- Plan a bit before pairing to gain context.
### Pre-launch Ideas
- Test on a couple file server nodes and run semantic diff on javascript repos.
- Collect repos, files, shas that contain error nodes to gain a % of error rates and expose errors in tree sitter grammars.
- If sources have errors, can we use a parser that validates the source is correct?
- Configure a script that is as language independent as possible that can automate the error collection process but allows us to specify an independent validating parser for each language.

View File

@ -1,36 +0,0 @@
# June 27th, 2016 weekly
## What went well?
@joshvera: Pairing, minisummitting, RWS discussions.
@rewinfrey: Pairing, context on recursion schemes, started independent work on the project, minisummitting. Defined what to work on next
@robrix: Minisummit: got to know both of you better & really enjoyed that. Before that I was on vacation but you both did a great job!
## What went less well?
@joshvera: Lots more problems turned up. Lots of stuff that has taken on more importance as weve thought about it more. Feel like I couldve made more progress on diff summaries by now. Some of that has been minisummit, some of that has been every time we do more work on it there seems to be new layers peeling off exposing other issues & more work needing to be done.
@rewinfrey: Maybe Im overly optimistic but I dont have anything to point to that I didnt think went well. The challenges we identified during minisummit felt like a good sign of the project moving forward.
@robrix: Ditto. May end up being a bit distracted over the next couple of weeks figuring out some stuff re: summit & my attendance of it.
## What did you learn?
@joshvera: Type ∋ Type isnt as easy as it sounds. Learned about you both too!
@rewinfrey: Learned about you both. Recursion schemes! Relationships between algebras & projections, coalgebras & embeddings, and recursion-schemes `Base` type family. Further explored some other morphisms. RWS (albeit misreading some of it).
@robrix: Learned about you both! RWS. Some stuff about derivative-parsing. Learned a lot about communication too.
## Anything else?
@joshvera: Out Thursday/Friday.
@rewinfrey: Josh, how did the blue suit fit? (“Really well.”)
@robrix: Canada Day on Friday. Youre both invited to celebrate it as well, by being as Canadian as possible.

View File

@ -1,29 +0,0 @@
# July 12th, 2016 weekly
## What went well?
@joshvera: Glad to see objects working, and the test harness was helpful for that.
@robrix: RWS work went well, specifically about ordering. Also confirming that RWS does not conflict with moves, although we do not support moves yet. Also generics with zipping turned out well!
@rewinfrey: The move went well, more or less.
## What went less well?
@joshvera: Was sick and would have liked to contribute to test harness earlier.
@robrix: Scrap your boilerplate style of generic programming in Haskell is tough.
@rewinfrey: Adding test cases manually is tough. Want to find a way to automate that process.
## What did you learn?
@joshvera: Learned a lot about RWS, and generics in Haskell.
@robrix: Learned about natural transformations. Understood RWS ordering problem. How to metaprogram in Haskell without headaches! Wrote a catamorphism that annotates every element in the structure with the result of the fold up to that element.
@rewinfrey: Learned how to install curtains, and visiting Ikea is ideal on the 4th of July.
## Anything else?
@robrix: Reminder about Q3 goals, will be the focus of 1:1's for the next couple weeks. Checkin with how weekly format is going (@joshvera & @rewinfrey no complaints). Also see everyone at Summit!

View File

@ -1,30 +0,0 @@
# July 26, 2016 weekly
Last week was Summit.
## What went well?
@rewinfrey - Pairing w/ @joshvera. Made significant progress on diff summary property tests. And the PR got merged!
@joshvera - Pairing. Summit!
@robrix - Summit! @joshvera & @rewinfrey really came together on the diff summary stuff, too ❤️
## What was challenging?
@rewinfrey - How to add more test cases in a non-manual way.
@joshvera - Figuring out how much the JS parser covers. If we improved our error handling to cover more cases that might get us partway to staff shipping.
@robrix - **After the fact:** I dont remember what I said, now, nor what was challenging that week.
## What did you learn?
@rewinfrey - All the things at Summit! Learned a lot about property testing while pairing with @joshvera.
@joshvera - Learned about property tests. Been reading about probabilities through this machine learning book. Onto continuous probability.
@robrix - Some stuff about derivative parsers, probably?

View File

@ -1,45 +0,0 @@
## What went well?
@rewinfrey
* Made a lot of progress on auto generation of test cases.
@robrix
* Mergeable PR went well. A pretty big step for maintenance costs.
Self-assessment forms are really streamlined compared to last time.
@joshvera
* Adding remaining cases to Syntax is going pretty well.
## What went less well?
@rewinfrey
* Initial confusion for being on platform support was confusing. By the end of the week we knocked out a production bug that was affecting customers.
@robrix
* Though the self-assessment training was more streamlined, there was a lot of training involved.
@joshvera
* Realized mapping C into Syntax will be more trouble than anticipated.
## What did you learn?
@rewinfrey
* Learned a lot about Haskells shell interactions.
* Specifically creating process and how Haskell abstracts from communicating with the shell.
* Learned about Yesod to do web programming in Haskell.
@robrix
* Reinforced knowledge of generic programing and property tests for laws governing new type classes. In the case of Mergeable, its difficult to describe the powerset behavior Mergeable has, and property tests helped with that.
@joshvera
* Learned about probability distributions and recalled the `suchThat` is kind of like filter for Arbitrary types in quickcheck.

View File

@ -1,37 +0,0 @@
### What went well?
@robrix: Interpreter stuff was effective in cleaning up / clarifying some previously messy code. diffing algorithm is now a little scripting language.
@joshvera: Javascript syntaxes went well. Self assessments were more streamlined.
@rewinfrey: Self assessments were simple. Pairing with Rob went well (first time really driving).
### What didn't go well?
@robrix: Ancillary tasks not going so well (patent process).
@joshvera: Self assessments took time away from working on minor diff summary tasks. Getting C into syntax is going to be difficult.
@rewinfrey: Recognizing that there is a better way to handle the effects in the test generator in the types, but not quite there skillwise to implement it (but also recognize I will get there :) ).
### What did you learn?
@robrix: Cemented understanding of free monad and interpreters. learned about type families and type classes for derivative parsers. keep things in single module and break out as needed to avoid orphan instances.
@joshvera: Learned about tradeoffs of performance of free monads we currently use, and learned more about effects. over the weekend read about comonads as pointed spaces.
@rewinfrey: Learned about how to model effects in the type system (via free and cofree), and learned foundational Yesod (routing, controllers, models / migrations, etc.).
### Other things?
@robrix: Re: Staff shipping in two weeks -- how do we feel? Our performance issues are still a concern, but responsiveness is being looked at outside the team.
@joshvera: Feeling okay with staff shipping, but performance should be addressed before staff shipping. This seems possible.
@robrix: Please keep in mind the following:
- If we can't meet the deadline, can we punt?
- No death marches please.
@joshvera: Let's reconvene later this week and update status of deliverables.
@robrix: Goal is to get to smaller, more regular releases. Right now that is hard, because the initial staffship of Diff Summaries frontloads a lot of functionality that future releases will benefit from.

View File

@ -1,27 +0,0 @@
# August 15th, 2016 weekly
## What went well?
@rewinfrey: Test generation branch got merged in. Completion of the syntax for JS! Progress on diff summaries <3
@joshvera: Deploying `semantic-diff` went pretty well. Added a bunch of issues for the remaining tasks. Its a manageable chunk of work for this week.
@robrix: The _p_,_q_-gram precomputation branch got merged. Also resolved RWS bias, and got a chance to bounce that off @jbarnette ❤️
## What went …less well?
@rewinfrey: We arent producing replacement patches, which is a bit of a question mark.
@joshvera: Some `nil`s in Ruby causing some exceptions (tho they were easy to resolve). Almost botched a deploy, that couldve gone better.
@robrix: Running into #683 at the last mile of the precomputing _p_,_q_-grams branch was frankly a bit of a shock.
## What did we learn?
@rewinfrey: Learned a lot about ambiguity & contradiction in Haskells typechecking & type inference. Very cool seeing how unification is able to resolve types at the very last. Did some pairing coercing a monadic value into a monadic wrapper.
@joshvera: Composing effects in `freer`. Also that we rotate into platform support on Sunday, as I did, so Ill be doing that this week.
@robrix: Discovered #683! Also learned that the RWS-Diff papers implementation suffers this problem, but they didnt notice (or at least didnt note it), probably due to some ameliorating factors (they match up equalities in an earlier pass which we lack, meaning that the most obvious effects of this wouldnt be felt). This was also a good reminder that some of our correct behaviour is only incidentally captured in our tests (if at all), partly because its often difficult extracting a precise property to test given an intuitive grasp of what a correct diff should look like. **Edit:** Bonus round: `.jbarnette me` exists!

View File

@ -1,42 +0,0 @@
# August 22nd, 2016 weekly
## What Went Well?
@joshvera: A lot of pull requests, a lot of changes. Hard to remember everything we covered, but covered a lot of bug fixes, a lot of testing and improving term productions in preparation for staff shipping diff summaries.
@robrix: We covered a lot of ground especially in RWS.
@rewinfrey: Great to dive into the meat of RWS, and also make so much progress on diff summaries. Good week of work.
## What Went Less Well?
@joshvera: Would have liked to get error reporting into diff summaries. Also on platform support the week before staff shipping so in hindsight, timing could have been better.
@robrix: Some concern that changes to RWS have overfitted it to our fixtures rather than maintaining general applicability. I have changes in mind, but those changes are on the magnitude of weeks, not days. Also apologies for pushing back reviews.
@rewinfrey: Programming by coincidence to improve our RWS output.
## What did we learn?
@joshvera: Learned more about RWS. Also updates to RWS and test output allows us to verify if our models are correct in a loose machine learning analogous way. The pruning step before running RWS was interesting to learn about.
@robrix: Learned about biases RWS has and that our implementation has, and why the paper didn't run into them or describe them as much. Want to introduce a proportional feature vector rather than a stochastic feature vector so we can make comparisons more meaningful between tree nodes.
@rewinfrey: Learned about xml diff subscription and querying.
## Other items
Staff Ship Plan:
- Write blog post
- Test github/github locally
- Unless something major happens, we'll staff ship this afternoon (probably around 2pm EST)
- Follow up with people in the #javascript for repo suggestions diff summaries
- Reprioritize / add issues in the milestone for post staffship
- Look at improving error messages and refining the diff summary productions

View File

@ -1,100 +0,0 @@
# September 9th, 2016
- Hack week was last week and we skipped the weekly.
- We moved the weekly to the end of the week to try to cut down on the “wait, what _did_ happen last week?” thing.
- This ended up being @tclems first weekly 👋
#### What went well?
@joshvera:
Hack week:
- Made a lot of progress on the TypeScript parser.
- May be able to use that as the basis for a more rigorous JavaScript parser as well.
This week:
- Understand RWS & some other diffing algos a lot better than before.
@rewinfrey:
- Pairing w/ @tclem.
- Updating test cases is much more efficient.
- Modelling effects in Free.
@tclem:
- Little fixes.
- Static linking of ICU in dev.
- statsd client.
@robrix:
- semantic-diffd
- Docker
- kubes
- Markdown
#### What were the challenges?
@joshvera:
- Integrating a pass before RWS. Using constant-time (per-subtree)SES before RWS to match up equal things. There are some ordering problems with the result.
- Ambiguities in the TypeScript grammar. Possibly due to JS actually being context-sensitive. @maxbrunsfeld advises parsing a superset of the language… but which superset?
@rewinfrey:
- Trying to get an effect system in Free. Got it, but tricky.
- Also picked up an issue with template strings which defies debugging.
@tclem:
- > My head hurts by the end of the day.
- Being challenged by some of the new concepts, the vernacular &c.
- Hard to know when to jump down the rabbit hole and learn a thing or when to gloss over it.
- Have a queue of things to read.
@robrix:
- Converting between line/column ranges and character ranges.
#### What did you learn?
@joshvera:
- > I became one with the TypeScript grammar.
- > Maybe we should find a way to have the grammars write themselves.
- Working on this `effects` package (formerly called `freer`) & its `Eff` type. It uses an open union data structure, which `Data.Record` is sort of an approximation of. `Eff` has this detail where its list of function types are “type-aligned.” Every item in the list is a function type where they all chain together, a -> b, b -> c, etc. Adding, removing, & replacing effects is constant-time.
@rewinfrey:
- Free, and how to model effects in Free.
- Straight-up category theory stuff. Compositions of natural transformations &c.
@tclem:
- Post about functors, applicatives, and monads shown in pictures.
- How this manages to be a bridge between pure & stateful functions.
- Helped explain the optparse-applicative syntax.
@robrix:
- CMark exists, which is cool.
- Parsing/term ingestion had been producing errors for weirdly-formatted for loops &c. Not only were we hitting this on a regular basis, it was also causing confusing/poor change summaries.
#### Meta
“What were the challenges?” can be a bit redundant with “what did you learn?” Should we focus this on challenges that we need help with? Gonna give that a try.
Next week: @joshvera, @rewinfrey, & @robrix are off to ICFP. @tclem may or may not hold weekly solo at his discretion.

View File

@ -1,64 +0,0 @@
#09-30-2018
**What went well?**
@tclem: Quiet week with everyone in Japan. Was heads down in parsing issues, identifying determiners. Tackled some other harder issues (ongoing now around tooling and command line parsing).
@joshvera: ICFP was spectacular. There's a lot to write down, and ideas that could potentially apply to future features or techniques in general that would be useful. Testing RWS has gone well. Issue came up with small RWS / SES diff ordering.
@rewinfrey: ICFP also amazing. This week small issues and started looking at keeping parent contexts for nested contexts (and have a RFC out).
@pengwynn: Good to interface with everyone, and to see if we provide further support with platform and integrating with dotcom.
**What were the challenges?**
@tclem: Challenging being totally on my own a couple weeks into the project. Some issues came up that required some heavy investigation (like monad transformers).
@joshvera: Coming back has been surprisingly difficult to have normal hours (going to Japan was easy). RWS bug with ordering terms is concerning, but should be manageable. Some PRs on master fail and think they are problems that come up in our property tests rarely (should look back and get those seeds so we can reproduce them). We have to make our diffs commute, so that if we're producing the same diff terms, they should always be in the same order. Sometimes surrounding context will change or alter diffs.
@rewinfrey: I'm not looking at RWS so things are going well :p
**What did you learn?**
@tclem: Studying folds and algebraic data types. Learning about binary tree operations. Understanding of the entire architecture stack.
@joshvera: Learned a lot at ICFP - specifically about effects and coeffects (reifying as types in a language). Effects do something to a context, coeffects reach into and look at a context. For example, a high security program that communicates with a low security program could allow the low security program to use a coeffect to understand something about what happened in the high security program, but wouldn't reveal the sensitive information.
@rewinfrey: Learned a lot about freer monad and effects in general. Another big take away was homotopy type theory.
----
@pengwynn: So I'd like to ask what are we missing on the team?
@joshvera: The help we get from dotcom (mclark and brianmario) is very helpful so we don't have to spend as much time diving into dotcom. We haven't received a lot of design attention - have a couple technical reasons why semantic diff is currently disabled. Would like to follow up with Fabian on design next week. Would like to find people with more Haskell experience, or with stronger background in diffing in general outside of the context of version control. One of the bigger technical areas of work we will have to do is writing parsers and ensuring they are correct.
@pengwynn: How do we leverage data to make autonomous coding a reality? As we move closer to 2017 what's the team make up look like?
@joshvera: We're getting to a point where semantic diffing is stable. Figuring out a way to generate a corpus from the information we have would be a really hard technical solution, but would pay back in dividends. Things like semantic merging. Also Rob has ideas about how we would compare structures across different languages, and code navigation (semantic click through to definition). What are the most useful features for people we could develop immediately? It would be good to have a stronger interface with product people than we currently do.
@tclem: We could have a dedicated effort on parsing.
@pengwynn: Is that a polyglot or Haskeller?
@joshvera: That's someone like Max (Max Brunsfeld) at the moment -- but in general that means a polyglot.
@tclem: I would +1 for fabian or product design. Having someone that can help us understand the context of the future of PR's, and where that general set of features are going.
@pengwynn: What would it take once we launch diff summaries to go from 2 languages to 25? If that's one hire we can make that happen. Want to
poll the group to determine what our holes are and what we should look for when hiring for the next teammate.
@tclem: Looking at parsers is a good thing.
@joshvera: Getting someone to be able to write parsers from a specification (which is researched). Taking existing stuff and putting it into a system that works for us. Also integrating with existing compilers, so we don't have to manually update our parsers every time a language change would be very beneficial. Automating that problem away would be beneficial, too. Hooking into different compilers would be something that would save us effort and energy in the future.
@pengwynn: This is awesome, and spurs some conversations with JD's team.
----
@rewinfrey: Where are we with getting diff summaries staff shipped and also out to gen pop? Would it be good to have a conversation about that early next week to clarify the direction for prioritization?
@joshvera: RWS and platform interface issues are the two main things preventing us from staff shipping again. Also want to talk with product design to take existing feedback and adjust accordingly. There are a few things in the summer eyes milestone that I think would be most useful, but maybe those aren't what product thinks is most useful. There could be more tiny bugs that only come up in certain contexts that we might not be aware of since we only do 100 property tests.
@tclem: It feels like we're close, and that it's really product and design and incorporating diff summaries into the new focus on code review.
@joshvera: I'm planning to talk with Fabian early next week to sync up on design.

View File

@ -1,48 +0,0 @@
# Oct 7th, 2016
## What went well?
@tclem
- productive week! Fixed lots of little bugs, project reorg finished
- Sorted out a variety of dotcom issues
- Tracking latest tree-sitter again
@rewinfrey
- getting out annotated nested parent context. Thanks @joshvera!
- import/export syntax (new stuff, personally rewarding)
@joshvera
- happy with all the PRs that got merged in this week
- pairing went well this week (time well spent to unblock people)
## What were the challenges?
@tclem
- not getting to do Haskell book (too heads down on other stuff)
- dotcom deploys hard with env var changes (need work here)
@rewinfrey
- translating tree sitter productions for import/export syntax. How to de-structure stuff correctly.
@joshvera
- not a lot to report that didn't go well!
- would like to have more design conversations at beginning of week
- lost track a little bit on where progressive diff and related projects are
all
- a bit hard to track various teams
## What did you learn?
@tclem
- how tree-sitter works. the base library, the node compiler, each of the grammars.
@rewinfrey
- api difference between named nodes in tree-sitter
- better understanding of how we tie into tree-sitter API via the FFI
@joshvera
- tree-sitter APIs that we aren't using
- split diff output is slow!
- about measures of probability distributions that are multi-dimensional
- difference between variance and volatility

View File

@ -1,27 +0,0 @@
# What went well
Rick - A lot that got accomplished this week. Take apart the structure when were parsing terms in the Javascript module. Pairing with Tim and Josh. Getting a bit more confidence when manipulating cofree structures.
Tim - Feel like were in a good place for next staff-ship. Knowing that there will be a few more design iterations. Fixed a number of remaining issues. A bit of UI polish on the .com side. Felt like it was a good week for pairing.
Josh - Pairing, performance improvements were straightforward and pointed the way to further improvements.
# What didnt go well
Rick - Hitting a weird source slicing error when adding import statements. Feels like navigating the project or adding features is a sometimes a back and forth with current mental model.
Tim - Felt like it was a slow week, out Monday and feeling sick. Would appreciate a concrete list of things to dive into next.
Josh - Sick for the first half of the week. Wished there was lower hanging frut in terms of performance.
# What did we learn
Rick - Reading a book called Thinking like a Mathematician. An overview of how to think in proofs. Understanding universal quantification has shed light on equational reasoning. How simple the hspec-expectations module is, and what improvements could be made.
Tim - From pairing with Rick, learned about extracting and unwrapping data structures. Dealing with and extracting things from cofree. Learning about Ripper and s-expressions.
Josh - The curse of dimensionality will end us.
# Other things
Its probably more important to get a Ruby parser now whether its in Haskell, Javascript, or Ripper output, than whether or not users can contribute to it.

View File

@ -1,63 +0,0 @@
January 6, 2017
Welcome back @robrix!
In place of our usual format we had an informal conversation identifying status of projects, concerns, and potential projects for the future. Also @joshvera gave us an excellent introduction to finding persistent homologies.
Below are the main talking points of the conversation (speakers are not identified):
----
Want to identify a mile stone for what to do next for diff summaries.
- What's the minimal work necessary to get this in front of customers?
- Dependents feature shipped only with Ruby support, so shipping only with JavaScript, Markdown, Ruby (?!) should be fine.
- Possible rubric for evaluating a summary statement's value: can a screen reader easily understand the statement?
- Consider limiting diff summary statements to only those statements with significant meaning.
- Create heuristics to determine what "significant" means in the context of diff summaries.
- View diff summaries through other lenses (i.e. security). Example: does a change to this regex represent a security concern?
- Par down feature to essential 2 or 3 aspects that drive most value for customer. Use this to drive conversations with product going forward.
What project or work can we identify and prioritize next for data-science?
Current approach to adding parsers is not scalable. Can we generate mappings somehow programmatically? @robrix possibly to look into this problem starting next week.
----
Persistent Homologies
Motivating problem:
**The problem with all machine learning models is error correction must be done via an error-correcting step. Even then, bias and skewing occur as a natural bi-product of the data samples used to train and test the models.**
Motivating questions:
**Is there a way to find significant dimensions in a high dimensional data set that do not rely on probability?**
**Can we use linear functions and stochastic properties to determine what those significant features are?**
Homologies:
Given a data set, a homology is a cycle or loop of points in that data set that are significant given a significance function (e.g. distance function).
With these data points, or topological invariants, we can use filtration to further refine the cycles or loops to find the most significant cycles. These are the strongest, or longest living, cycles. This helps us see what points in the n-dimensional data set are most important in relation to one another (rather than in relation to an assumption that is reified through a probabilistic test or approach).
How does one measure how "good" a persistent homology is? One can use a stability distance function to measure the stability of the distance function used to identify cycles.
----
Future project ideas
Semantic index: a semantically structured index of a GitHub project. Like ctags, the goal would be to create a link-based index of symbols, names or definitions of classes, methods, functions and vars.
This would allow a GitHub user to browse a project based on its semantic index to understand the projects layout, including its classes, methods contained within a class, and from where methods are invoked.
Other possible uses of this could be:
1. Visualize the weight of a class or method name based on its frequency of invocation in a project.
2. Visualize the churn weight of a class or method based on its frequency of change over time.
3. Security implications / risk assessment for the health and stability of a project: given that we know the most important classes and methods of a project (based on point 1), those that are most significant and change the most (point 2) represent a risk to the stability of the project. Projects with main code paths with high degree of churn represent projects that may not provide production level stability and safety for end users. Or indicate that they are still "works in progress" and not ready to be used in a production environment.
4. Indexed project would link to the source code (e.g. method name links to current master's method definition).