Decaf language-gfm

This commit is contained in:
confused-Techie 2023-07-08 18:12:07 -07:00
parent d51de56216
commit db8ea2e5f1
2 changed files with 55 additions and 957 deletions

View File

@ -1,897 +0,0 @@
describe "GitHub Flavored Markdown grammar", ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-gfm")
runs ->
grammar = atom.grammars.grammarForScopeName("source.gfm")
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.gfm"
it "tokenizes spaces", ->
{tokens} = grammar.tokenizeLine(" ")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
it "tokenizes horizontal rules", ->
{tokens} = grammar.tokenizeLine("***")
expect(tokens[0]).toEqual value: "***", scopes: ["source.gfm", "comment.hr.gfm"]
{tokens} = grammar.tokenizeLine("---")
expect(tokens[0]).toEqual value: "---", scopes: ["source.gfm", "comment.hr.gfm"]
{tokens} = grammar.tokenizeLine("___")
expect(tokens[0]).toEqual value: "___", scopes: ["source.gfm", "comment.hr.gfm"]
it "tokenizes escaped characters", ->
{tokens} = grammar.tokenizeLine("\\*")
expect(tokens[0]).toEqual value: "\\*", scopes: ["source.gfm", "constant.character.escape.gfm"]
{tokens} = grammar.tokenizeLine("\\\\")
expect(tokens[0]).toEqual value: "\\\\", scopes: ["source.gfm", "constant.character.escape.gfm"]
{tokens} = grammar.tokenizeLine("\\abc")
expect(tokens[0]).toEqual value: "\\a", scopes: ["source.gfm", "constant.character.escape.gfm"]
expect(tokens[1]).toEqual value: "bc", scopes: ["source.gfm"]
it "tokenizes ***bold italic*** text", ->
{tokens} = grammar.tokenizeLine("this is ***bold italic*** text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[2]).toEqual value: "bold italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[3]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ***bold\nitalic***!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes ___bold italic___ text", ->
{tokens} = grammar.tokenizeLine("this is ___bold italic___ text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[2]).toEqual value: "bold italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[3]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ___bold\nitalic___!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(firstLineTokens[2]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[0]).toEqual value: "italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[1]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
it "tokenizes **bold** text", ->
{tokens} = grammar.tokenizeLine("**bold**")
expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[1]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[2]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is **not\nbold**!")
expect(firstLineTokens[0]).toEqual value: "this is **not", scopes: ["source.gfm"]
expect(secondLineTokens[0]).toEqual value: "bold**!", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not**bold**")
expect(tokens[0]).toEqual value: "not", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[3]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
it "tokenizes __bold__ text", ->
{tokens} = grammar.tokenizeLine("____")
expect(tokens[0]).toEqual value: "____", scopes: ["source.gfm", "comment.hr.gfm"]
{tokens} = grammar.tokenizeLine("__bold__")
expect(tokens[0]).toEqual value: "__", scopes: [ 'source.gfm', 'markup.bold.gfm', 'punctuation.definition.entity.gfm' ]
expect(tokens[1]).toEqual value: "bold", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[2]).toEqual value: "__", scopes: [ 'source.gfm', 'markup.bold.gfm', 'punctuation.definition.entity.gfm' ]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is __not\nbold__!")
expect(firstLineTokens[0]).toEqual value: "this is __not", scopes: ["source.gfm"]
expect(secondLineTokens[0]).toEqual value: "bold__!", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not__bold__")
expect(tokens[0]).toEqual value: "not__bold__", scopes: ["source.gfm"]
it "tokenizes *italic* text", ->
{tokens} = grammar.tokenizeLine("**")
expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("this is *italic* text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "*", scopes: [ "source.gfm", "markup.italic.gfm", "punctuation.definition.entity.gfm" ]
expect(tokens[2]).toEqual value: "italic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[3]).toEqual value: "*", scopes: [ "source.gfm", "markup.italic.gfm", "punctuation.definition.entity.gfm" ]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("is*italic*")
expect(tokens[0]).toEqual value: "is", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "*", scopes: [ "source.gfm", "markup.italic.gfm", "punctuation.definition.entity.gfm" ]
expect(tokens[2]).toEqual value: "italic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[3]).toEqual value: "*", scopes: [ "source.gfm", "markup.italic.gfm", "punctuation.definition.entity.gfm" ]
{tokens} = grammar.tokenizeLine("* not italic")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[2]).toEqual value: "not italic", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is *not\nitalic*!")
expect(firstLineTokens[0]).toEqual value: "this is *not", scopes: ["source.gfm"]
expect(secondLineTokens[0]).toEqual value: "italic*!", scopes: ["source.gfm"]
it "tokenizes _italic_ text", ->
{tokens} = grammar.tokenizeLine("__")
expect(tokens[0]).toEqual value: "__", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("this is _italic_ text")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "_", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ]
expect(tokens[2]).toEqual value: "italic", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[3]).toEqual value: "_", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ]
expect(tokens[4]).toEqual value: " text", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not_italic_")
expect(tokens[0]).toEqual value: "not_italic_", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not x^{a}_m y^{b}_n italic")
expect(tokens[0]).toEqual value: "not x^{a}_m y^{b}_n italic", scopes: ["source.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is _not\nitalic_!")
expect(firstLineTokens[0]).toEqual value: "this is _not", scopes: ["source.gfm"]
expect(secondLineTokens[0]).toEqual value: "italic_!", scopes: ["source.gfm"]
it "tokenizes ~~strike~~ text", ->
{tokens} = grammar.tokenizeLine("~~strike~~")
expect(tokens[0]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[1]).toEqual value: "strike", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[2]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
[firstLineTokens, secondLineTokens] = grammar.tokenizeLines("this is ~~str\nike~~!")
expect(firstLineTokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(firstLineTokens[1]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(firstLineTokens[2]).toEqual value: "str", scopes: ["source.gfm", "markup.strike.gfm"]
expect(secondLineTokens[0]).toEqual value: "ike", scopes: ["source.gfm", "markup.strike.gfm"]
expect(secondLineTokens[1]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(secondLineTokens[2]).toEqual value: "!", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("not~~strike~~")
expect(tokens[0]).toEqual value: "not~~strike~~", scopes: ["source.gfm"]
it "tokenizes headings", ->
{tokens} = grammar.tokenizeLine("# Heading 1")
expect(tokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 1", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]
{tokens} = grammar.tokenizeLine("## Heading 2")
expect(tokens[0]).toEqual value: "##", scopes: ["source.gfm", "markup.heading.heading-2.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-2.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 2", scopes: ["source.gfm", "markup.heading.heading-2.gfm"]
{tokens} = grammar.tokenizeLine("### Heading 3")
expect(tokens[0]).toEqual value: "###", scopes: ["source.gfm", "markup.heading.heading-3.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-3.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 3", scopes: ["source.gfm", "markup.heading.heading-3.gfm"]
{tokens} = grammar.tokenizeLine("#### Heading 4")
expect(tokens[0]).toEqual value: "####", scopes: ["source.gfm", "markup.heading.heading-4.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-4.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 4", scopes: ["source.gfm", "markup.heading.heading-4.gfm"]
{tokens} = grammar.tokenizeLine("##### Heading 5")
expect(tokens[0]).toEqual value: "#####", scopes: ["source.gfm", "markup.heading.heading-5.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-5.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 5", scopes: ["source.gfm", "markup.heading.heading-5.gfm"]
{tokens} = grammar.tokenizeLine("###### Heading 6")
expect(tokens[0]).toEqual value: "######", scopes: ["source.gfm", "markup.heading.heading-6.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-6.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading 6", scopes: ["source.gfm", "markup.heading.heading-6.gfm"]
it "tokenizes matches inside of headers", ->
{tokens} = grammar.tokenizeLine("# Heading :one:")
expect(tokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]
expect(tokens[2]).toEqual value: "Heading ", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]
expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.start.gfm"]
expect(tokens[4]).toEqual value: "one", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.word.gfm"]
expect(tokens[5]).toEqual value: ":", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.end.gfm"]
it "tokenizes an :emoji:", ->
{tokens} = grammar.tokenizeLine("this is :no_good:")
expect(tokens[0]).toEqual value: "this is ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: ":", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.start.gfm"]
expect(tokens[2]).toEqual value: "no_good", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.word.gfm"]
expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "string.emoji.gfm", "string.emoji.end.gfm"]
{tokens} = grammar.tokenizeLine("this is :no good:")
expect(tokens[0]).toEqual value: "this is :no good:", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("http://localhost:8080")
expect(tokens[0]).toEqual value: "http://localhost:8080", scopes: ["source.gfm"]
it "tokenizes a ``` code block", ->
{tokens, ruleStack} = grammar.tokenizeLine("```")
expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("```", ruleStack)
expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "tokenizes a ~~~ code block", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~")
expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("~~~", ruleStack)
expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "doesn't tokenise ~`~ as a code block", ->
{tokens} = grammar.tokenizeLine("~`~")
expect(tokens[0]).toEqual value: '~', scopes: ['source.gfm']
expect(tokens[1]).toEqual value: '`', scopes: ['source.gfm', 'markup.raw.gfm']
expect(tokens[2]).toEqual value: '~', scopes: ['source.gfm', 'markup.raw.gfm']
it "tokenises code-blocks with borders of differing lengths", ->
[firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines("~~~\nfoo bar\n~~~~~~~")
expect(firstLineTokens[0]).toEqual value: '~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']
expect(secondLineTokens[0]).toEqual value: 'foo bar', scopes: ['source.gfm', 'markup.raw.gfm']
expect(thirdLineTokens[0]).toEqual value: '~~~~~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']
[firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines("~~~~~~~\nfoo bar\n~~~")
expect(firstLineTokens[0]).toEqual value: '~~~~~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']
expect(secondLineTokens[0]).toEqual value: 'foo bar', scopes: ['source.gfm', 'markup.raw.gfm']
expect(thirdLineTokens[0]).toEqual value: '~~~', scopes: ['source.gfm', 'markup.raw.gfm']
it "tokenizes a ``` code block with trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("```")
expect(tokens[0]).toEqual value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("``` ", ruleStack)
expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "tokenizes a ~~~ code block with trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~")
expect(tokens[0]).toEqual value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]
it "tokenises a ``` code block with an unknown language", ->
{tokens, ruleStack} = grammar.tokenizeLine("``` myLanguage")
expect(tokens[0]).toEqual value: '``` myLanguage', scopes: ['source.gfm', 'markup.code.other.gfm', 'support.gfm']
{tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack)
expect(tokens[0]).toEqual value: "-> 'hello'", scopes: ['source.gfm', 'markup.code.other.gfm', 'source.embedded.mylanguage']
{tokens} = grammar.tokenizeLine("```", ruleStack)
expect(tokens[0]).toEqual value: '```', scopes: ['source.gfm', 'markup.code.other.gfm', 'support.gfm']
it "tokenizes a ``` code block with a known language", ->
{tokens, ruleStack} = grammar.tokenizeLine("``` bash")
expect(tokens[0]).toEqual value: "``` bash", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("```js ")
expect(tokens[0]).toEqual value: "```js ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
{tokens, ruleStack} = grammar.tokenizeLine("```JS ")
expect(tokens[0]).toEqual value: "```JS ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
{tokens, ruleStack} = grammar.tokenizeLine("```r ")
expect(tokens[0]).toEqual value: "```r ", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```properties ")
expect(tokens[0]).toEqual value: "```properties ", scopes: ["source.gfm", "markup.code.git-config.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.git-config"
it "tokenizes a Rmarkdown ``` code block", ->
{tokens, ruleStack} = grammar.tokenizeLine("```{r}")
expect(tokens[0]).toEqual value: "```{r}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{r,eval=TRUE,cache=FALSE}")
expect(tokens[0]).toEqual value: "```{r,eval=TRUE,cache=FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{r eval=TRUE,cache=FALSE}")
expect(tokens[0]).toEqual value: "```{r eval=TRUE,cache=FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
it "tokenizes a Rmarkdown ``` code block with whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("```{r }")
expect(tokens[0]).toEqual value: "```{r }", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{R } ")
expect(tokens[0]).toEqual value: "```{R } ", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
{tokens, ruleStack} = grammar.tokenizeLine("```{r eval = TRUE, cache = FALSE}")
expect(tokens[0]).toEqual value: "```{r eval = TRUE, cache = FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.r"
it "tokenizes a ~~~ code block with a language", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~ bash")
expect(tokens[0]).toEqual value: "~~~ bash", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("~~~js ")
expect(tokens[0]).toEqual value: "~~~js ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
{tokens, ruleStack} = grammar.tokenizeLine("~~~properties ")
expect(tokens[0]).toEqual value: "~~~properties ", scopes: ["source.gfm", "markup.code.git-config.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.git-config"
it "tokenizes a ``` code block with a language and trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("``` bash")
{tokens} = grammar.tokenizeLine("``` ", ruleStack)
expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("```js ")
{tokens} = grammar.tokenizeLine("``` ", ruleStack)
expect(tokens[0]).toEqual value: "``` ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
it "tokenizes a ~~~ code block with a language and trailing whitespace", ->
{tokens, ruleStack} = grammar.tokenizeLine("~~~ bash")
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.code.shell.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.shell"
{tokens, ruleStack} = grammar.tokenizeLine("~~~js ")
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.js"
{tokens, ruleStack} = grammar.tokenizeLine("~~~ properties ")
{tokens} = grammar.tokenizeLine("~~~ ", ruleStack)
expect(tokens[0]).toEqual value: "~~~ ", scopes: ["source.gfm", "markup.code.git-config.gfm", "support.gfm"]
expect(ruleStack[1].contentScopeName).toBe "source.embedded.git-config"
it "tokenizes inline `code` blocks", ->
{tokens} = grammar.tokenizeLine("`this` is `code`")
expect(tokens[0]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[1]).toEqual value: "this", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[2]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[3]).toEqual value: " is ", scopes: ["source.gfm"]
expect(tokens[4]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[5]).toEqual value: "code", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[6]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("``")
expect(tokens[0]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[1]).toEqual value: "`", scopes: ["source.gfm", "markup.raw.gfm"]
{tokens} = grammar.tokenizeLine("``a\\`b``")
expect(tokens[0]).toEqual value: "``", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[1]).toEqual value: "a\\`b", scopes: ["source.gfm", "markup.raw.gfm"]
expect(tokens[2]).toEqual value: "``", scopes: ["source.gfm", "markup.raw.gfm"]
it "tokenizes [links](links)", ->
{tokens} = grammar.tokenizeLine("please click [this link](website)")
expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes reference [links][links]", ->
{tokens} = grammar.tokenizeLine("please click [this link][website]")
expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes id-less reference [links][]", ->
{tokens} = grammar.tokenizeLine("please click [this link][]")
expect(tokens[0]).toEqual value: "please click ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [link]: footers", ->
{tokens} = grammar.tokenizeLine("[aLink]: http://website")
expect(tokens[0]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "aLink", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[2]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[3]).toEqual value: ":", scopes: ["source.gfm", "link", "punctuation.separator.key-value.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "link"]
expect(tokens[5]).toEqual value: "http://website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
it "tokenizes [link]: <footers>", ->
{tokens} = grammar.tokenizeLine("[aLink]: <http://website>")
expect(tokens[0]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "aLink", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[2]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[3]).toEqual value: ": <", scopes: ["source.gfm", "link"]
expect(tokens[4]).toEqual value: "http://website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[5]).toEqual value: ">", scopes: ["source.gfm", "link"]
it "tokenizes [![links](links)](links)", ->
{tokens} = grammar.tokenizeLine("[![title](image)](link)")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [![links](links)][links]", ->
{tokens} = grammar.tokenizeLine("[![title](image)][link]")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [![links][links]](links)", ->
{tokens} = grammar.tokenizeLine("[![title][image]](link)")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes [![links][links]][links]", ->
{tokens} = grammar.tokenizeLine("[![title][image]][link]")
expect(tokens[0]).toEqual value: "[!", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[1]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[2]).toEqual value: "title", scopes: ["source.gfm", "link", "entity.gfm"]
expect(tokens[3]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[4]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[5]).toEqual value: "image", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[6]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[7]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
expect(tokens[8]).toEqual value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]
expect(tokens[9]).toEqual value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]
expect(tokens[10]).toEqual value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]
it "tokenizes mentions", ->
{tokens} = grammar.tokenizeLine("sentence with no space before@name ")
expect(tokens[0]).toEqual value: "sentence with no space before@name ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name '@name' @name's @name. @name, (@name) [@name]")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " '", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[4]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[5]).toEqual value: "' ", scopes: ["source.gfm"]
expect(tokens[6]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[7]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[8]).toEqual value: "'s ", scopes: ["source.gfm"]
expect(tokens[9]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[10]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[11]).toEqual value: ". ", scopes: ["source.gfm"]
expect(tokens[12]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[13]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[14]).toEqual value: ", (", scopes: ["source.gfm"]
expect(tokens[15]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[16]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[17]).toEqual value: ") [", scopes: ["source.gfm"]
expect(tokens[18]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[19]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[20]).toEqual value: "]", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine('"@name"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: '"', scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name/ and an invalid symbol after")
expect(tokens[0]).toEqual value: "sentence with a space before @name/ and an invalid symbol after", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name that continues")
expect(tokens[0]).toEqual value: "sentence with a space before ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " that continues", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("* @name at the start of an unordered list")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[2]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[3]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[4]).toEqual value: " at the start of an unordered list", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337_hubot with numbers, letters and underscores")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337_hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and underscores", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337-hubot with numbers, letters and hyphens")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337-hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and hyphens", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name at the start of a line")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " at the start of a line", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("any email like you@domain.com shouldn't mistakenly be matched as a mention")
expect(tokens[0]).toEqual value: "any email like you@domain.com shouldn't mistakenly be matched as a mention", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person's")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: "'s", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person;")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: ";", scopes: ["source.gfm"]
it "tokenizes issue numbers", ->
{tokens} = grammar.tokenizeLine("sentence with no space before#12 ")
expect(tokens[0]).toEqual value: "sentence with no space before#12 ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" #101 '#101' #101's #101. #101, (#101) [#101]")
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: " '", scopes: ["source.gfm"]
expect(tokens[4]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[5]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[6]).toEqual value: "' ", scopes: ["source.gfm"]
expect(tokens[7]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[8]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[9]).toEqual value: "'s ", scopes: ["source.gfm"]
expect(tokens[10]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[11]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[12]).toEqual value: ". ", scopes: ["source.gfm"]
expect(tokens[13]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[14]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[15]).toEqual value: ", (", scopes: ["source.gfm"]
expect(tokens[16]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[17]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[18]).toEqual value: ") [", scopes: ["source.gfm"]
expect(tokens[19]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[20]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[21]).toEqual value: "]", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine('"#101"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "101", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: '"', scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before #123i and a character after")
expect(tokens[0]).toEqual value: "sentence with a space before #123i and a character after", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before #123 that continues")
expect(tokens[0]).toEqual value: "sentence with a space before ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "123", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: " that continues", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" #123's")
expect(tokens[1]).toEqual value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]
expect(tokens[2]).toEqual value: "123", scopes: ["source.gfm", "string.issue.number.gfm"]
expect(tokens[3]).toEqual value: "'s", scopes: ["source.gfm"]
it "tokenizes unordered lists", ->
{tokens} = grammar.tokenizeLine("*Item 1")
expect(tokens[0]).not.toEqual value: "*Item 1", scopes: ["source.gfm", "variable.unordered.list.gfm"]
{tokens} = grammar.tokenizeLine(" * Item 1")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Item 1", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" + Item 2")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "+", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Item 2", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" - Item 3")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "-", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Item 3", scopes: ["source.gfm"]
it "tokenizes ordered lists", ->
{tokens} = grammar.tokenizeLine("1.First Item")
expect(tokens[0]).toEqual value: "1.First Item", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" 1. First Item")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "1.", scopes: ["source.gfm", "variable.ordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "First Item", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" 10. Tenth Item")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "10.", scopes: ["source.gfm", "variable.ordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Tenth Item", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine(" 111. Hundred and eleventh item")
expect(tokens[0]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "111.", scopes: ["source.gfm", "variable.ordered.list.gfm"]
expect(tokens[2]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "Hundred and eleventh item", scopes: ["source.gfm"]
it "tokenizes > quoted text", ->
{tokens} = grammar.tokenizeLine("> Quotation :+1:")
expect(tokens[0]).toEqual value: ">", scopes: ["source.gfm", "comment.quote.gfm", "support.quote.gfm"]
expect(tokens[1]).toEqual value: " Quotation :+1:", scopes: ["source.gfm", "comment.quote.gfm"]
it "tokenizes HTML entities", ->
{tokens} = grammar.tokenizeLine("&trade; &#8482; &a1; &#xb3;")
expect(tokens[0]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[1]).toEqual value: "trade", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[2]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[3]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[4]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[5]).toEqual value: "#8482", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[6]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[7]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[8]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[9]).toEqual value: "a1", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[10]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[11]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[12]).toEqual value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[13]).toEqual value: "#xb3", scopes: ["source.gfm", "constant.character.entity.gfm"]
expect(tokens[14]).toEqual value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
it "tokenizes HTML entities in *italic* text", ->
{tokens} = grammar.tokenizeLine("*&trade; &#8482; &#xb3;*")
expect(tokens[0]).toEqual value: "*", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "*", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ]
{tokens} = grammar.tokenizeLine("_&trade; &#8482; &#xb3;_")
expect(tokens[0]).toEqual value: "_", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "_", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ]
it "tokenizes HTML entities in **bold** text", ->
{tokens} = grammar.tokenizeLine("**&trade; &#8482; &#xb3;**")
expect(tokens[0]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
{tokens} = grammar.tokenizeLine("__&trade; &#8482; &#xb3;__")
expect(tokens[0]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "__", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]
it "tokenizes HTML entities in ***bold italic*** text", ->
{tokens} = grammar.tokenizeLine("***&trade; &#8482; &#xb3;***")
expect(tokens[0]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: [ "source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm" ]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: [ "source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm" ]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[12]).toEqual value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]
{tokens} = grammar.tokenizeLine("___&trade; &#8482; &#xb3;___")
expect(tokens[0]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.bold.italic.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]
expect(tokens[12]).toEqual value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]
it "tokenizes HTML entities in strikethrough text", ->
{tokens} = grammar.tokenizeLine("~~&trade; &#8482; &#xb3;~~")
expect(tokens[0]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[1]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[2]).toEqual value: "trade", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]
expect(tokens[3]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[4]).toEqual value: " ", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[5]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[6]).toEqual value: "#8482", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]
expect(tokens[7]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[8]).toEqual value: " ", scopes: ["source.gfm", "markup.strike.gfm"]
expect(tokens[9]).toEqual value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[10]).toEqual value: "#xb3", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]
expect(tokens[11]).toEqual value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]
expect(tokens[12]).toEqual value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]
it "tokenizes HTML comments", ->
{tokens} = grammar.tokenizeLine("<!-- a comment -->")
expect(tokens[0]).toEqual value: "<!--", scopes: ["source.gfm", "comment.block.gfm", "punctuation.definition.comment.gfm"]
expect(tokens[1]).toEqual value: " a comment ", scopes: ["source.gfm", "comment.block.gfm"]
expect(tokens[2]).toEqual value: "-->", scopes: ["source.gfm", "comment.block.gfm", "punctuation.definition.comment.gfm"]
it "tokenizes YAML front matter", ->
[firstLineTokens, secondLineTokens, thirdLineTokens] = grammar.tokenizeLines """
---
front: matter
---
"""
expect(firstLineTokens[0]).toEqual value: "---", scopes: ["source.gfm", "front-matter.yaml.gfm", "comment.hr.gfm"]
expect(secondLineTokens[0]).toEqual value: "front: matter", scopes: ["source.gfm", "front-matter.yaml.gfm"]
expect(thirdLineTokens[0]).toEqual value: "---", scopes: ["source.gfm", "front-matter.yaml.gfm", "comment.hr.gfm"]
it "tokenizes linebreaks", ->
{tokens} = grammar.tokenizeLine("line ")
expect(tokens[0]).toEqual value: "line", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm", "linebreak.gfm"]
it "tokenizes tables", ->
[headerTokens, alignTokens, contentTokens] = grammar.tokenizeLines """
| Column 1 | Column 2 |
|:----------|:---------:|
| Content 1 | Content 2 |
"""
# Header line
expect(headerTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(headerTokens[1]).toEqual value: " Column 1 ", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(headerTokens[3]).toEqual value: " Column 2 ", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[4]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
# Alignment line
expect(alignTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(alignTokens[1]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"]
expect(alignTokens[2]).toEqual value: "----------", scopes: ["source.gfm", "table.gfm", "border.header"]
expect(alignTokens[3]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(alignTokens[4]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"]
expect(alignTokens[5]).toEqual value: "---------", scopes: ["source.gfm", "table.gfm", "border.header"]
expect(alignTokens[6]).toEqual value: ":", scopes: ["source.gfm", "table.gfm", "border.alignment"]
expect(alignTokens[7]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
# Content line
expect(contentTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(contentTokens[1]).toEqual value: " Content 1 ", scopes: ["source.gfm", "table.gfm"]
expect(contentTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(contentTokens[3]).toEqual value: " Content 2 ", scopes: ["source.gfm", "table.gfm"]
expect(contentTokens[4]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
[headerTokens, emptyLineTokens, headingTokens] = grammar.tokenizeLines """
| Column 1 | Column 2\t
# Heading
"""
expect(headerTokens[0]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.outer"]
expect(headerTokens[1]).toEqual value: " Column 1 ", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[2]).toEqual value: "|", scopes: ["source.gfm", "table.gfm", "border.pipe.inner"]
expect(headerTokens[3]).toEqual value: " Column 2", scopes: ["source.gfm", "table.gfm"]
expect(headerTokens[4]).toEqual value: "\t", scopes: ["source.gfm", "table.gfm"]
expect(headingTokens[0]).toEqual value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]
expect(headingTokens[1]).toEqual value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]
expect(headingTokens[2]).toEqual value: "Heading", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]
it "tokenizes criticmarkup", ->
[addToken, delToken, hlToken, subToken] = grammar.tokenizeLines """
Add{++ some text++}
Delete{-- some text--}
Highlight {==some text==}{>>with comment<<}
Replace {~~this~>by that~~}
"""
# Addition
expect(addToken[0]).toEqual value: "Add", scopes: ["source.gfm"]
expect(addToken[1]).toEqual value: "{++", scopes: ["source.gfm", "markup.inserted.critic.gfm.addition", "punctuation.definition.inserted.critic.gfm.addition.marker"]
expect(addToken[2]).toEqual value: " some text", scopes: ["source.gfm", "markup.inserted.critic.gfm.addition"]
expect(addToken[3]).toEqual value: "++}", scopes: ["source.gfm", "markup.inserted.critic.gfm.addition", "punctuation.definition.inserted.critic.gfm.addition.marker"]
# Deletion
expect(delToken[0]).toEqual value: "Delete", scopes: ["source.gfm"]
expect(delToken[1]).toEqual value: "{--", scopes: ["source.gfm", "markup.deleted.critic.gfm.deletion", "punctuation.definition.deleted.critic.gfm.deletion.marker"]
expect(delToken[2]).toEqual value: " some text", scopes: ["source.gfm", "markup.deleted.critic.gfm.deletion"]
expect(delToken[3]).toEqual value: "--}", scopes: ["source.gfm", "markup.deleted.critic.gfm.deletion", "punctuation.definition.deleted.critic.gfm.deletion.marker"]
# Comment and highlight
expect(hlToken[0]).toEqual value: "Highlight ", scopes: ["source.gfm"]
expect(hlToken[1]).toEqual value: "{==", scopes: ["source.gfm", "critic.gfm.highlight", "critic.gfm.highlight.marker"]
expect(hlToken[2]).toEqual value: "some text", scopes: ["source.gfm", "critic.gfm.highlight"]
expect(hlToken[3]).toEqual value: "==}", scopes: ["source.gfm", "critic.gfm.highlight", "critic.gfm.highlight.marker"]
expect(hlToken[4]).toEqual value: "{>>", scopes: ["source.gfm", "critic.gfm.comment", "critic.gfm.comment.marker"]
expect(hlToken[5]).toEqual value: "with comment", scopes: ["source.gfm", "critic.gfm.comment"]
expect(hlToken[6]).toEqual value: "<<}", scopes: ["source.gfm", "critic.gfm.comment", "critic.gfm.comment.marker"]
# Replace
expect(subToken[0]).toEqual value: "Replace ", scopes: ["source.gfm"]
expect(subToken[1]).toEqual value: "{~~", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution", "punctuation.definition.changed.critic.gfm.substitution.marker"]
expect(subToken[2]).toEqual value: "this", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution"]
expect(subToken[3]).toEqual value: "~>", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution", "punctuation.definition.changed.critic.gfm.substitution.operator"]
expect(subToken[4]).toEqual value: "by that", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution"]
expect(subToken[5]).toEqual value: "~~}", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution", "punctuation.definition.changed.critic.gfm.substitution.marker"]

View File

@ -1,26 +1,21 @@
/*
* decaffeinate suggestions:
* DS101: Remove unnecessary use of Array.from
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/main/docs/suggestions.md
*/
describe("GitHub Flavored Markdown grammar", function() {
let grammar = null;
beforeEach(function() {
waitsForPromise(() => atom.packages.activatePackage("language-gfm"));
return runs(() => grammar = atom.grammars.grammarForScopeName("source.gfm"));
runs(() => grammar = atom.grammars.grammarForScopeName("source.gfm"));
});
it("parses the grammar", function() {
expect(grammar).toBeDefined();
return expect(grammar.scopeName).toBe("source.gfm");
expect(grammar.scopeName).toBe("source.gfm");
});
it("tokenizes spaces", function() {
const {tokens} = grammar.tokenizeLine(" ");
return expect(tokens[0]).toEqual({value: " ", scopes: ["source.gfm"]});
expect(tokens[0]).toEqual({value: " ", scopes: ["source.gfm"]});
});
it("tokenizes horizontal rules", function() {
@ -31,7 +26,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[0]).toEqual({value: "---", scopes: ["source.gfm", "comment.hr.gfm"]});
({tokens} = grammar.tokenizeLine("___"));
return expect(tokens[0]).toEqual({value: "___", scopes: ["source.gfm", "comment.hr.gfm"]});
expect(tokens[0]).toEqual({value: "___", scopes: ["source.gfm", "comment.hr.gfm"]});
});
it("tokenizes escaped characters", function() {
@ -43,7 +38,7 @@ describe("GitHub Flavored Markdown grammar", function() {
({tokens} = grammar.tokenizeLine("\\abc"));
expect(tokens[0]).toEqual({value: "\\a", scopes: ["source.gfm", "constant.character.escape.gfm"]});
return expect(tokens[1]).toEqual({value: "bc", scopes: ["source.gfm"]});
expect(tokens[1]).toEqual({value: "bc", scopes: ["source.gfm"]});
});
it("tokenizes ***bold italic*** text", function() {
@ -60,7 +55,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(firstLineTokens[2]).toEqual({value: "bold", scopes: ["source.gfm", "markup.bold.italic.gfm"]});
expect(secondLineTokens[0]).toEqual({value: "italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]});
expect(secondLineTokens[1]).toEqual({value: "***", scopes: ["source.gfm", "markup.bold.italic.gfm"]});
return expect(secondLineTokens[2]).toEqual({value: "!", scopes: ["source.gfm"]});
expect(secondLineTokens[2]).toEqual({value: "!", scopes: ["source.gfm"]});
});
it("tokenizes ___bold italic___ text", function() {
@ -77,7 +72,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(firstLineTokens[2]).toEqual({value: "bold", scopes: ["source.gfm", "markup.bold.italic.gfm"]});
expect(secondLineTokens[0]).toEqual({value: "italic", scopes: ["source.gfm", "markup.bold.italic.gfm"]});
expect(secondLineTokens[1]).toEqual({value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]});
return expect(secondLineTokens[2]).toEqual({value: "!", scopes: ["source.gfm"]});
expect(secondLineTokens[2]).toEqual({value: "!", scopes: ["source.gfm"]});
});
it("tokenizes **bold** text", function() {
@ -94,7 +89,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[0]).toEqual({value: "not", scopes: ["source.gfm"]});
expect(tokens[1]).toEqual({value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]});
expect(tokens[2]).toEqual({value: "bold", scopes: ["source.gfm", "markup.bold.gfm"]});
return expect(tokens[3]).toEqual({value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]});
expect(tokens[3]).toEqual({value: "**", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]});
});
it("tokenizes __bold__ text", function() {
@ -111,7 +106,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(secondLineTokens[0]).toEqual({value: "bold__!", scopes: ["source.gfm"]});
({tokens} = grammar.tokenizeLine("not__bold__"));
return expect(tokens[0]).toEqual({value: "not__bold__", scopes: ["source.gfm"]});
expect(tokens[0]).toEqual({value: "not__bold__", scopes: ["source.gfm"]});
});
it("tokenizes *italic* text", function() {
@ -138,7 +133,7 @@ describe("GitHub Flavored Markdown grammar", function() {
const [firstLineTokens, secondLineTokens] = Array.from(grammar.tokenizeLines("this is *not\nitalic*!"));
expect(firstLineTokens[0]).toEqual({value: "this is *not", scopes: ["source.gfm"]});
return expect(secondLineTokens[0]).toEqual({value: "italic*!", scopes: ["source.gfm"]});
expect(secondLineTokens[0]).toEqual({value: "italic*!", scopes: ["source.gfm"]});
});
it("tokenizes _italic_ text", function() {
@ -160,7 +155,7 @@ describe("GitHub Flavored Markdown grammar", function() {
const [firstLineTokens, secondLineTokens] = Array.from(grammar.tokenizeLines("this is _not\nitalic_!"));
expect(firstLineTokens[0]).toEqual({value: "this is _not", scopes: ["source.gfm"]});
return expect(secondLineTokens[0]).toEqual({value: "italic_!", scopes: ["source.gfm"]});
expect(secondLineTokens[0]).toEqual({value: "italic_!", scopes: ["source.gfm"]});
});
it("tokenizes ~~strike~~ text", function() {
@ -178,7 +173,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(secondLineTokens[2]).toEqual({value: "!", scopes: ["source.gfm"]});
({tokens} = grammar.tokenizeLine("not~~strike~~"));
return expect(tokens[0]).toEqual({value: "not~~strike~~", scopes: ["source.gfm"]});
expect(tokens[0]).toEqual({value: "not~~strike~~", scopes: ["source.gfm"]});
});
it("tokenizes headings", function() {
@ -210,7 +205,7 @@ describe("GitHub Flavored Markdown grammar", function() {
({tokens} = grammar.tokenizeLine("###### Heading 6"));
expect(tokens[0]).toEqual({value: "######", scopes: ["source.gfm", "markup.heading.heading-6.gfm", "markup.heading.marker.gfm"]});
expect(tokens[1]).toEqual({value: " ", scopes: ["source.gfm", "markup.heading.heading-6.gfm", "markup.heading.space.gfm"]});
return expect(tokens[2]).toEqual({value: "Heading 6", scopes: ["source.gfm", "markup.heading.heading-6.gfm"]});
expect(tokens[2]).toEqual({value: "Heading 6", scopes: ["source.gfm", "markup.heading.heading-6.gfm"]});
});
it("tokenizes matches inside of headers", function() {
@ -220,7 +215,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[2]).toEqual({value: "Heading ", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]});
expect(tokens[3]).toEqual({value: ":", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.start.gfm"]});
expect(tokens[4]).toEqual({value: "one", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.word.gfm"]});
return expect(tokens[5]).toEqual({value: ":", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.end.gfm"]});
expect(tokens[5]).toEqual({value: ":", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "string.emoji.gfm", "string.emoji.end.gfm"]});
});
it("tokenizes an :emoji:", function() {
@ -234,7 +229,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[0]).toEqual({value: "this is :no good:", scopes: ["source.gfm"]});
({tokens} = grammar.tokenizeLine("http://localhost:8080"));
return expect(tokens[0]).toEqual({value: "http://localhost:8080", scopes: ["source.gfm"]});
expect(tokens[0]).toEqual({value: "http://localhost:8080", scopes: ["source.gfm"]});
});
it("tokenizes a ``` code block", function() {
@ -243,7 +238,7 @@ describe("GitHub Flavored Markdown grammar", function() {
({tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack));
expect(tokens[0]).toEqual({value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]});
({tokens} = grammar.tokenizeLine("```", ruleStack));
return expect(tokens[0]).toEqual({value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]});
expect(tokens[0]).toEqual({value: "```", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]});
});
it("tokenizes a ~~~ code block", function() {
@ -252,14 +247,14 @@ describe("GitHub Flavored Markdown grammar", function() {
({tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack));
expect(tokens[0]).toEqual({value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]});
({tokens} = grammar.tokenizeLine("~~~", ruleStack));
return expect(tokens[0]).toEqual({value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]});
expect(tokens[0]).toEqual({value: "~~~", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]});
});
it("doesn't tokenise ~`~ as a code block", function() {
const {tokens} = grammar.tokenizeLine("~`~");
expect(tokens[0]).toEqual({value: '~', scopes: ['source.gfm']});
expect(tokens[1]).toEqual({value: '`', scopes: ['source.gfm', 'markup.raw.gfm']});
return expect(tokens[2]).toEqual({value: '~', scopes: ['source.gfm', 'markup.raw.gfm']});
expect(tokens[2]).toEqual({value: '~', scopes: ['source.gfm', 'markup.raw.gfm']});
});
it("tokenises code-blocks with borders of differing lengths", function() {
@ -271,7 +266,7 @@ describe("GitHub Flavored Markdown grammar", function() {
[firstLineTokens, secondLineTokens, thirdLineTokens] = Array.from(grammar.tokenizeLines("~~~~~~~\nfoo bar\n~~~"));
expect(firstLineTokens[0]).toEqual({value: '~~~~~~~', scopes: ['source.gfm', 'markup.raw.gfm', 'support.gfm']});
expect(secondLineTokens[0]).toEqual({value: 'foo bar', scopes: ['source.gfm', 'markup.raw.gfm']});
return expect(thirdLineTokens[0]).toEqual({value: '~~~', scopes: ['source.gfm', 'markup.raw.gfm']});
expect(thirdLineTokens[0]).toEqual({value: '~~~', scopes: ['source.gfm', 'markup.raw.gfm']});
});
it("tokenizes a ``` code block with trailing whitespace", function() {
@ -280,7 +275,7 @@ describe("GitHub Flavored Markdown grammar", function() {
({tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack));
expect(tokens[0]).toEqual({value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]});
({tokens} = grammar.tokenizeLine("``` ", ruleStack));
return expect(tokens[0]).toEqual({value: "``` ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]});
expect(tokens[0]).toEqual({value: "``` ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]});
});
it("tokenizes a ~~~ code block with trailing whitespace", function() {
@ -289,7 +284,7 @@ describe("GitHub Flavored Markdown grammar", function() {
({tokens, ruleStack} = grammar.tokenizeLine("-> 'hello'", ruleStack));
expect(tokens[0]).toEqual({value: "-> 'hello'", scopes: ["source.gfm", "markup.raw.gfm"]});
({tokens} = grammar.tokenizeLine("~~~ ", ruleStack));
return expect(tokens[0]).toEqual({value: "~~~ ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]});
expect(tokens[0]).toEqual({value: "~~~ ", scopes: ["source.gfm", "markup.raw.gfm", "support.gfm"]});
});
it("tokenises a ``` code block with an unknown language", function() {
@ -300,7 +295,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[0]).toEqual({value: "-> 'hello'", scopes: ['source.gfm', 'markup.code.other.gfm', 'source.embedded.mylanguage']});
({tokens} = grammar.tokenizeLine("```", ruleStack));
return expect(tokens[0]).toEqual({value: '```', scopes: ['source.gfm', 'markup.code.other.gfm', 'support.gfm']});
expect(tokens[0]).toEqual({value: '```', scopes: ['source.gfm', 'markup.code.other.gfm', 'support.gfm']});
});
it("tokenizes a ``` code block with a known language", function() {
@ -322,7 +317,7 @@ describe("GitHub Flavored Markdown grammar", function() {
({tokens, ruleStack} = grammar.tokenizeLine("```properties "));
expect(tokens[0]).toEqual({value: "```properties ", scopes: ["source.gfm", "markup.code.git-config.gfm", "support.gfm"]});
return expect(ruleStack[1].contentScopeName).toBe("source.embedded.git-config");
expect(ruleStack[1].contentScopeName).toBe("source.embedded.git-config");
});
it("tokenizes a Rmarkdown ``` code block", function() {
@ -336,7 +331,7 @@ describe("GitHub Flavored Markdown grammar", function() {
({tokens, ruleStack} = grammar.tokenizeLine("```{r eval=TRUE,cache=FALSE}"));
expect(tokens[0]).toEqual({value: "```{r eval=TRUE,cache=FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]});
return expect(ruleStack[1].contentScopeName).toBe("source.embedded.r");
expect(ruleStack[1].contentScopeName).toBe("source.embedded.r");
});
it("tokenizes a Rmarkdown ``` code block with whitespace", function() {
@ -350,7 +345,7 @@ describe("GitHub Flavored Markdown grammar", function() {
({tokens, ruleStack} = grammar.tokenizeLine("```{r eval = TRUE, cache = FALSE}"));
expect(tokens[0]).toEqual({value: "```{r eval = TRUE, cache = FALSE}", scopes: ["source.gfm", "markup.code.r.gfm", "support.gfm"]});
return expect(ruleStack[1].contentScopeName).toBe("source.embedded.r");
expect(ruleStack[1].contentScopeName).toBe("source.embedded.r");
});
it("tokenizes a ~~~ code block with a language", function() {
@ -364,7 +359,7 @@ describe("GitHub Flavored Markdown grammar", function() {
({tokens, ruleStack} = grammar.tokenizeLine("~~~properties "));
expect(tokens[0]).toEqual({value: "~~~properties ", scopes: ["source.gfm", "markup.code.git-config.gfm", "support.gfm"]});
return expect(ruleStack[1].contentScopeName).toBe("source.embedded.git-config");
expect(ruleStack[1].contentScopeName).toBe("source.embedded.git-config");
});
it("tokenizes a ``` code block with a language and trailing whitespace", function() {
@ -376,7 +371,7 @@ describe("GitHub Flavored Markdown grammar", function() {
({tokens, ruleStack} = grammar.tokenizeLine("```js "));
({tokens} = grammar.tokenizeLine("``` ", ruleStack));
expect(tokens[0]).toEqual({value: "``` ", scopes: ["source.gfm", "markup.code.js.gfm", "support.gfm"]});
return expect(ruleStack[1].contentScopeName).toBe("source.embedded.js");
expect(ruleStack[1].contentScopeName).toBe("source.embedded.js");
});
it("tokenizes a ~~~ code block with a language and trailing whitespace", function() {
@ -393,7 +388,7 @@ describe("GitHub Flavored Markdown grammar", function() {
({tokens, ruleStack} = grammar.tokenizeLine("~~~ properties "));
({tokens} = grammar.tokenizeLine("~~~ ", ruleStack));
expect(tokens[0]).toEqual({value: "~~~ ", scopes: ["source.gfm", "markup.code.git-config.gfm", "support.gfm"]});
return expect(ruleStack[1].contentScopeName).toBe("source.embedded.git-config");
expect(ruleStack[1].contentScopeName).toBe("source.embedded.git-config");
});
it("tokenizes inline `code` blocks", function() {
@ -413,7 +408,7 @@ describe("GitHub Flavored Markdown grammar", function() {
({tokens} = grammar.tokenizeLine("``a\\`b``"));
expect(tokens[0]).toEqual({value: "``", scopes: ["source.gfm", "markup.raw.gfm"]});
expect(tokens[1]).toEqual({value: "a\\`b", scopes: ["source.gfm", "markup.raw.gfm"]});
return expect(tokens[2]).toEqual({value: "``", scopes: ["source.gfm", "markup.raw.gfm"]});
expect(tokens[2]).toEqual({value: "``", scopes: ["source.gfm", "markup.raw.gfm"]});
});
it("tokenizes [links](links)", function() {
@ -424,7 +419,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[3]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[4]).toEqual({value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]});
expect(tokens[5]).toEqual({value: "website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]});
return expect(tokens[6]).toEqual({value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[6]).toEqual({value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
});
it("tokenizes reference [links][links]", function() {
@ -435,7 +430,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[3]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[4]).toEqual({value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]});
expect(tokens[5]).toEqual({value: "website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]});
return expect(tokens[6]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[6]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
});
it("tokenizes id-less reference [links][]", function() {
@ -445,7 +440,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[2]).toEqual({value: "this link", scopes: ["source.gfm", "link", "entity.gfm"]});
expect(tokens[3]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[4]).toEqual({value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]});
return expect(tokens[5]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[5]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
});
it("tokenizes [link]: footers", function() {
@ -455,7 +450,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[2]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[3]).toEqual({value: ":", scopes: ["source.gfm", "link", "punctuation.separator.key-value.gfm"]});
expect(tokens[4]).toEqual({value: " ", scopes: ["source.gfm", "link"]});
return expect(tokens[5]).toEqual({value: "http://website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]});
expect(tokens[5]).toEqual({value: "http://website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]});
});
it("tokenizes [link]: <footers>", function() {
@ -465,7 +460,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[2]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[3]).toEqual({value: ": <", scopes: ["source.gfm", "link"]});
expect(tokens[4]).toEqual({value: "http://website", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]});
return expect(tokens[5]).toEqual({value: ">", scopes: ["source.gfm", "link"]});
expect(tokens[5]).toEqual({value: ">", scopes: ["source.gfm", "link"]});
});
it("tokenizes [![links](links)](links)", function() {
@ -480,7 +475,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[7]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[8]).toEqual({value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]});
expect(tokens[9]).toEqual({value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]});
return expect(tokens[10]).toEqual({value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[10]).toEqual({value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
});
it("tokenizes [![links](links)][links]", function() {
@ -495,7 +490,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[7]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[8]).toEqual({value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]});
expect(tokens[9]).toEqual({value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]});
return expect(tokens[10]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[10]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
});
it("tokenizes [![links][links]](links)", function() {
@ -510,7 +505,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[7]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[8]).toEqual({value: "(", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]});
expect(tokens[9]).toEqual({value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]});
return expect(tokens[10]).toEqual({value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[10]).toEqual({value: ")", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
});
it("tokenizes [![links][links]][links]", function() {
@ -525,7 +520,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[7]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[8]).toEqual({value: "[", scopes: ["source.gfm", "link", "punctuation.definition.begin.gfm"]});
expect(tokens[9]).toEqual({value: "link", scopes: ["source.gfm", "link", "markup.underline.link.gfm"]});
return expect(tokens[10]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
expect(tokens[10]).toEqual({value: "]", scopes: ["source.gfm", "link", "punctuation.definition.end.gfm"]});
});
it("tokenizes mentions", function() {
@ -605,7 +600,7 @@ describe("GitHub Flavored Markdown grammar", function() {
({tokens} = grammar.tokenizeLine("@person;"));
expect(tokens[0]).toEqual({value: "@", scopes: ["source.gfm", "variable.mention.gfm"]});
expect(tokens[1]).toEqual({value: "person", scopes: ["source.gfm", "string.username.gfm"]});
return expect(tokens[2]).toEqual({value: ";", scopes: ["source.gfm"]});
expect(tokens[2]).toEqual({value: ";", scopes: ["source.gfm"]});
});
it("tokenizes issue numbers", function() {
@ -653,7 +648,7 @@ describe("GitHub Flavored Markdown grammar", function() {
({tokens} = grammar.tokenizeLine(" #123's"));
expect(tokens[1]).toEqual({value: "#", scopes: ["source.gfm", "variable.issue.tag.gfm"]});
expect(tokens[2]).toEqual({value: "123", scopes: ["source.gfm", "string.issue.number.gfm"]});
return expect(tokens[3]).toEqual({value: "'s", scopes: ["source.gfm"]});
expect(tokens[3]).toEqual({value: "'s", scopes: ["source.gfm"]});
});
it("tokenizes unordered lists", function() {
@ -676,7 +671,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[0]).toEqual({value: " ", scopes: ["source.gfm"]});
expect(tokens[1]).toEqual({value: "-", scopes: ["source.gfm", "variable.unordered.list.gfm"]});
expect(tokens[2]).toEqual({value: " ", scopes: ["source.gfm"]});
return expect(tokens[3]).toEqual({value: "Item 3", scopes: ["source.gfm"]});
expect(tokens[3]).toEqual({value: "Item 3", scopes: ["source.gfm"]});
});
it("tokenizes ordered lists", function() {
@ -699,13 +694,13 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[0]).toEqual({value: " ", scopes: ["source.gfm"]});
expect(tokens[1]).toEqual({value: "111.", scopes: ["source.gfm", "variable.ordered.list.gfm"]});
expect(tokens[2]).toEqual({value: " ", scopes: ["source.gfm"]});
return expect(tokens[3]).toEqual({value: "Hundred and eleventh item", scopes: ["source.gfm"]});
expect(tokens[3]).toEqual({value: "Hundred and eleventh item", scopes: ["source.gfm"]});
});
it("tokenizes > quoted text", function() {
const {tokens} = grammar.tokenizeLine("> Quotation :+1:");
expect(tokens[0]).toEqual({value: ">", scopes: ["source.gfm", "comment.quote.gfm", "support.quote.gfm"]});
return expect(tokens[1]).toEqual({value: " Quotation :+1:", scopes: ["source.gfm", "comment.quote.gfm"]});
expect(tokens[1]).toEqual({value: " Quotation :+1:", scopes: ["source.gfm", "comment.quote.gfm"]});
});
it("tokenizes HTML entities", function() {
@ -730,7 +725,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[12]).toEqual({value: "&", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]});
expect(tokens[13]).toEqual({value: "#xb3", scopes: ["source.gfm", "constant.character.entity.gfm"]});
return expect(tokens[14]).toEqual({value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]});
expect(tokens[14]).toEqual({value: ";", scopes: ["source.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]});
});
it("tokenizes HTML entities in *italic* text", function() {
@ -762,7 +757,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[9]).toEqual({value: "&", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]});
expect(tokens[10]).toEqual({value: "#xb3", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm"]});
expect(tokens[11]).toEqual({value: ";", scopes: ["source.gfm", "markup.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]});
return expect(tokens[12]).toEqual({value: "_", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ]});
expect(tokens[12]).toEqual({value: "_", scopes: [ 'source.gfm', 'markup.italic.gfm', 'punctuation.definition.entity.gfm' ]});
});
it("tokenizes HTML entities in **bold** text", function() {
@ -794,7 +789,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[9]).toEqual({value: "&", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]});
expect(tokens[10]).toEqual({value: "#xb3", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm"]});
expect(tokens[11]).toEqual({value: ";", scopes: ["source.gfm", "markup.bold.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]});
return expect(tokens[12]).toEqual({value: "__", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]});
expect(tokens[12]).toEqual({value: "__", scopes: ["source.gfm", "markup.bold.gfm", "punctuation.definition.entity.gfm"]});
});
it("tokenizes HTML entities in ***bold italic*** text", function() {
@ -826,7 +821,7 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[9]).toEqual({value: "&", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]});
expect(tokens[10]).toEqual({value: "#xb3", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]});
expect(tokens[11]).toEqual({value: ";", scopes: ["source.gfm", "markup.bold.italic.gfm", "constant.character.entity.gfm"]});
return expect(tokens[12]).toEqual({value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]});
expect(tokens[12]).toEqual({value: "___", scopes: ["source.gfm", "markup.bold.italic.gfm"]});
});
it("tokenizes HTML entities in strikethrough text", function() {
@ -843,14 +838,14 @@ describe("GitHub Flavored Markdown grammar", function() {
expect(tokens[9]).toEqual({value: "&", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]});
expect(tokens[10]).toEqual({value: "#xb3", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm"]});
expect(tokens[11]).toEqual({value: ";", scopes: ["source.gfm", "markup.strike.gfm", "constant.character.entity.gfm", "punctuation.definition.entity.gfm"]});
return expect(tokens[12]).toEqual({value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]});
expect(tokens[12]).toEqual({value: "~~", scopes: ["source.gfm", "markup.strike.gfm"]});
});
it("tokenizes HTML comments", function() {
const {tokens} = grammar.tokenizeLine("<!-- a comment -->");
expect(tokens[0]).toEqual({value: "<!--", scopes: ["source.gfm", "comment.block.gfm", "punctuation.definition.comment.gfm"]});
expect(tokens[1]).toEqual({value: " a comment ", scopes: ["source.gfm", "comment.block.gfm"]});
return expect(tokens[2]).toEqual({value: "-->", scopes: ["source.gfm", "comment.block.gfm", "punctuation.definition.comment.gfm"]});
expect(tokens[2]).toEqual({value: "-->", scopes: ["source.gfm", "comment.block.gfm", "punctuation.definition.comment.gfm"]});
});
it("tokenizes YAML front matter", function() {
@ -863,13 +858,13 @@ front: matter
expect(firstLineTokens[0]).toEqual({value: "---", scopes: ["source.gfm", "front-matter.yaml.gfm", "comment.hr.gfm"]});
expect(secondLineTokens[0]).toEqual({value: "front: matter", scopes: ["source.gfm", "front-matter.yaml.gfm"]});
return expect(thirdLineTokens[0]).toEqual({value: "---", scopes: ["source.gfm", "front-matter.yaml.gfm", "comment.hr.gfm"]});
expect(thirdLineTokens[0]).toEqual({value: "---", scopes: ["source.gfm", "front-matter.yaml.gfm", "comment.hr.gfm"]});
});
it("tokenizes linebreaks", function() {
const {tokens} = grammar.tokenizeLine("line ");
expect(tokens[0]).toEqual({value: "line", scopes: ["source.gfm"]});
return expect(tokens[1]).toEqual({value: " ", scopes: ["source.gfm", "linebreak.gfm"]});
expect(tokens[1]).toEqual({value: " ", scopes: ["source.gfm", "linebreak.gfm"]});
});
it("tokenizes tables", function() {
@ -920,10 +915,10 @@ front: matter
expect(headingTokens[0]).toEqual({value: "#", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.marker.gfm"]});
expect(headingTokens[1]).toEqual({value: " ", scopes: ["source.gfm", "markup.heading.heading-1.gfm", "markup.heading.space.gfm"]});
return expect(headingTokens[2]).toEqual({value: "Heading", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]});
expect(headingTokens[2]).toEqual({value: "Heading", scopes: ["source.gfm", "markup.heading.heading-1.gfm"]});
});
return it("tokenizes criticmarkup", function() {
it("tokenizes criticmarkup", function() {
const [addToken, delToken, hlToken, subToken] = Array.from(grammar.tokenizeLines(`\
Add{++ some text++}
Delete{-- some text--}
@ -955,6 +950,6 @@ Replace {~~this~>by that~~}\
expect(subToken[2]).toEqual({value: "this", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution"]});
expect(subToken[3]).toEqual({value: "~>", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution", "punctuation.definition.changed.critic.gfm.substitution.operator"]});
expect(subToken[4]).toEqual({value: "by that", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution"]});
return expect(subToken[5]).toEqual({value: "~~}", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution", "punctuation.definition.changed.critic.gfm.substitution.marker"]});
expect(subToken[5]).toEqual({value: "~~}", scopes: ["source.gfm", "markup.changed.critic.gfm.substitution", "punctuation.definition.changed.critic.gfm.substitution.marker"]});
});
});