Manual decaf language-python spec

This commit is contained in:
confused-Techie 2023-08-23 01:07:56 -07:00
parent 068e5c4f31
commit 420df26093
6 changed files with 97 additions and 1007 deletions

View File

@ -1,84 +0,0 @@
describe 'Python settings', ->
[editor, languageMode] = []
afterEach ->
editor.destroy()
beforeEach ->
atom.config.set 'core.useTreeSitterParsers', false
waitsForPromise ->
atom.workspace.open().then (o) ->
editor = o
languageMode = editor.languageMode
waitsForPromise ->
atom.packages.activatePackage('language-python')
it 'matches lines correctly using the increaseIndentPattern', ->
increaseIndentRegex = languageMode.increaseIndentRegexForScopeDescriptor(['source.python'])
expect(increaseIndentRegex.findNextMatchSync('for i in range(n):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' for i in range(n):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('async for i in range(n):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' async for i in range(n):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('class TheClass(Object):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' class TheClass(Object):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('def f(x):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' def f(x):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('async def f(x):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' async def f(x):')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('if this_var == that_var:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' if this_var == that_var:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('elif this_var == that_var:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' elif this_var == that_var:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('else:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' else:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('except Exception:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' except Exception:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('except Exception as e:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' except Exception as e:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('finally:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' finally:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('with open("filename") as f:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' with open("filename") as f:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('async with open("filename") as f:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' async with open("filename") as f:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('while True:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync(' while True:')).toBeTruthy()
expect(increaseIndentRegex.findNextMatchSync('\t\t while True:')).toBeTruthy()
it 'does not match lines incorrectly using the increaseIndentPattern', ->
increaseIndentRegex = languageMode.increaseIndentRegexForScopeDescriptor(['source.python'])
expect(increaseIndentRegex.findNextMatchSync('for i in range(n)')).toBeFalsy()
expect(increaseIndentRegex.findNextMatchSync('class TheClass(Object)')).toBeFalsy()
expect(increaseIndentRegex.findNextMatchSync('def f(x)')).toBeFalsy()
expect(increaseIndentRegex.findNextMatchSync('if this_var == that_var')).toBeFalsy()
expect(increaseIndentRegex.findNextMatchSync('"for i in range(n):"')).toBeFalsy()
it 'matches lines correctly using the decreaseIndentPattern', ->
decreaseIndentRegex = languageMode.decreaseIndentRegexForScopeDescriptor(['source.python'])
expect(decreaseIndentRegex.findNextMatchSync('elif this_var == that_var:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync(' elif this_var == that_var:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync('else:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync(' else:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync('except Exception:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync(' except Exception:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync('except Exception as e:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync(' except Exception as e:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync('finally:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync(' finally:')).toBeTruthy()
expect(decreaseIndentRegex.findNextMatchSync('\t\t finally:')).toBeTruthy()
it 'does not match lines incorrectly using the decreaseIndentPattern', ->
decreaseIndentRegex = languageMode.decreaseIndentRegexForScopeDescriptor(['source.python'])
# NOTE! This first one is different from most other rote tests here.
expect(decreaseIndentRegex.findNextMatchSync('else: expression()')).toBeFalsy()
expect(decreaseIndentRegex.findNextMatchSync('elif this_var == that_var')).toBeFalsy()
expect(decreaseIndentRegex.findNextMatchSync(' elif this_var == that_var')).toBeFalsy()
expect(decreaseIndentRegex.findNextMatchSync('else')).toBeFalsy()
expect(decreaseIndentRegex.findNextMatchSync(' "finally:"')).toBeFalsy()

View File

@ -1,26 +1,22 @@
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/main/docs/suggestions.md
*/
describe('Python settings', function() {
describe('Python settings', () => {
let [editor, languageMode] = [];
afterEach(() => editor.destroy());
beforeEach(function() {
beforeEach(() => {
atom.config.set('core.useTreeSitterParsers', false);
waitsForPromise(() => atom.workspace.open().then(function(o) {
editor = o;
return languageMode = editor.languageMode;
languageMode = editor.languageMode;
}));
return waitsForPromise(() => atom.packages.activatePackage('language-python'));
waitsForPromise(() => atom.packages.activatePackage('language-python'));
});
it('matches lines correctly using the increaseIndentPattern', function() {
it('matches lines correctly using the increaseIndentPattern', () => {
const increaseIndentRegex = languageMode.increaseIndentRegexForScopeDescriptor(['source.python']);
expect(increaseIndentRegex.findNextMatchSync('for i in range(n):')).toBeTruthy();
@ -51,20 +47,20 @@ describe('Python settings', function() {
expect(increaseIndentRegex.findNextMatchSync(' async with open("filename") as f:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('while True:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync(' while True:')).toBeTruthy();
return expect(increaseIndentRegex.findNextMatchSync('\t\t while True:')).toBeTruthy();
expect(increaseIndentRegex.findNextMatchSync('\t\t while True:')).toBeTruthy();
});
it('does not match lines incorrectly using the increaseIndentPattern', function() {
it('does not match lines incorrectly using the increaseIndentPattern', () => {
const increaseIndentRegex = languageMode.increaseIndentRegexForScopeDescriptor(['source.python']);
expect(increaseIndentRegex.findNextMatchSync('for i in range(n)')).toBeFalsy();
expect(increaseIndentRegex.findNextMatchSync('class TheClass(Object)')).toBeFalsy();
expect(increaseIndentRegex.findNextMatchSync('def f(x)')).toBeFalsy();
expect(increaseIndentRegex.findNextMatchSync('if this_var == that_var')).toBeFalsy();
return expect(increaseIndentRegex.findNextMatchSync('"for i in range(n):"')).toBeFalsy();
expect(increaseIndentRegex.findNextMatchSync('"for i in range(n):"')).toBeFalsy();
});
it('matches lines correctly using the decreaseIndentPattern', function() {
it('matches lines correctly using the decreaseIndentPattern', () => {
const decreaseIndentRegex = languageMode.decreaseIndentRegexForScopeDescriptor(['source.python']);
expect(decreaseIndentRegex.findNextMatchSync('elif this_var == that_var:')).toBeTruthy();
@ -77,10 +73,10 @@ describe('Python settings', function() {
expect(decreaseIndentRegex.findNextMatchSync(' except Exception as e:')).toBeTruthy();
expect(decreaseIndentRegex.findNextMatchSync('finally:')).toBeTruthy();
expect(decreaseIndentRegex.findNextMatchSync(' finally:')).toBeTruthy();
return expect(decreaseIndentRegex.findNextMatchSync('\t\t finally:')).toBeTruthy();
expect(decreaseIndentRegex.findNextMatchSync('\t\t finally:')).toBeTruthy();
});
return it('does not match lines incorrectly using the decreaseIndentPattern', function() {
it('does not match lines incorrectly using the decreaseIndentPattern', () => {
const decreaseIndentRegex = languageMode.decreaseIndentRegexForScopeDescriptor(['source.python']);
// NOTE! This first one is different from most other rote tests here.
@ -88,6 +84,6 @@ describe('Python settings', function() {
expect(decreaseIndentRegex.findNextMatchSync('elif this_var == that_var')).toBeFalsy();
expect(decreaseIndentRegex.findNextMatchSync(' elif this_var == that_var')).toBeFalsy();
expect(decreaseIndentRegex.findNextMatchSync('else')).toBeFalsy();
return expect(decreaseIndentRegex.findNextMatchSync(' "finally:"')).toBeFalsy();
expect(decreaseIndentRegex.findNextMatchSync(' "finally:"')).toBeFalsy();
});
});

View File

@ -1,53 +0,0 @@
describe 'Python regular expression grammar', ->
grammar = null
beforeEach ->
atom.config.set 'core.useTreeSitterParsers', false
waitsForPromise ->
atom.packages.activatePackage('language-python')
runs ->
grammar = atom.grammars.grammarForScopeName('source.regexp.python')
describe 'character classes', ->
it 'does not recursively match character classes', ->
{tokens} = grammar.tokenizeLine '[.:[\\]@]'
expect(tokens[0]).toEqual value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']
expect(tokens[1]).toEqual value: '.:[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']
expect(tokens[2]).toEqual value: '\\]', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'constant.character.escape.backslash.regexp']
expect(tokens[3]).toEqual value: '@', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']
expect(tokens[4]).toEqual value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp']
it 'does not end the character class early if the first character is a ]', ->
{tokens} = grammar.tokenizeLine '[][]'
expect(tokens[0]).toEqual value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']
expect(tokens[1]).toEqual value: '][', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']
expect(tokens[2]).toEqual value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp']
{tokens} = grammar.tokenizeLine '[^][]'
expect(tokens[0]).toEqual value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']
expect(tokens[1]).toEqual value: '^', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'keyword.operator.negation.regexp']
expect(tokens[2]).toEqual value: '][', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']
expect(tokens[3]).toEqual value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp']
it 'escapes the character following any backslash', ->
{tokens} = grammar.tokenizeLine '''\\q\\(\\[\\'\\"\\?\\^\\-\\*\\.\\#'''
expect(tokens[0]).toEqual value: '\\q', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[1]).toEqual value: '\\(', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[2]).toEqual value: '\\[', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[3]).toEqual value: '\\\'', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[4]).toEqual value: '\\"', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[5]).toEqual value: '\\?', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[6]).toEqual value: '\\^', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[7]).toEqual value: '\\-', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[8]).toEqual value: '\\*', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[9]).toEqual value: '\\.', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
expect(tokens[10]).toEqual value: '\\#', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']
{tokens} = grammar.tokenizeLine '''(\\()\\)'''
expect(tokens[0]).toEqual value: '(', scopes: ['source.regexp.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']
expect(tokens[1]).toEqual value: '\\(', scopes: ['source.regexp.python', 'meta.group.regexp', 'constant.character.escape.backslash.regexp']
expect(tokens[2]).toEqual value: ')', scopes: ['source.regexp.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']
expect(tokens[3]).toEqual value: '\\)', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']

View File

@ -1,31 +1,27 @@
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/main/docs/suggestions.md
*/
describe('Python regular expression grammar', function() {
describe('Python regular expression grammar', () => {
let grammar = null;
beforeEach(function() {
beforeEach(() => {
atom.config.set('core.useTreeSitterParsers', false);
waitsForPromise(() => atom.packages.activatePackage('language-python'));
return runs(() => grammar = atom.grammars.grammarForScopeName('source.regexp.python'));
runs(() => grammar = atom.grammars.grammarForScopeName('source.regexp.python'));
});
return describe('character classes', function() {
it('does not recursively match character classes', function() {
describe('character classes', () => {
it('does not recursively match character classes', () => {
const {tokens} = grammar.tokenizeLine('[.:[\\]@]');
expect(tokens[0]).toEqual({value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']});
expect(tokens[1]).toEqual({value: '.:[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']});
expect(tokens[2]).toEqual({value: '\\]', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'constant.character.escape.backslash.regexp']});
expect(tokens[3]).toEqual({value: '@', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']});
return expect(tokens[4]).toEqual({value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp']});
expect(tokens[4]).toEqual({value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp']});
});
it('does not end the character class early if the first character is a ]', function() {
it('does not end the character class early if the first character is a ]', () => {
let {tokens} = grammar.tokenizeLine('[][]');
expect(tokens[0]).toEqual({value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']});
expect(tokens[1]).toEqual({value: '][', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']});
@ -35,10 +31,10 @@ describe('Python regular expression grammar', function() {
expect(tokens[0]).toEqual({value: '[', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']});
expect(tokens[1]).toEqual({value: '^', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'keyword.operator.negation.regexp']});
expect(tokens[2]).toEqual({value: '][', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp']});
return expect(tokens[3]).toEqual({value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp']});
expect(tokens[3]).toEqual({value: ']', scopes: ['source.regexp.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.end.regexp']});
});
return it('escapes the character following any backslash', function() {
it('escapes the character following any backslash', () => {
let {tokens} = grammar.tokenizeLine('\\q\\(\\[\\\'\\"\\?\\^\\-\\*\\.\\#');
expect(tokens[0]).toEqual({value: '\\q', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
expect(tokens[1]).toEqual({value: '\\(', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
@ -56,7 +52,7 @@ describe('Python regular expression grammar', function() {
expect(tokens[0]).toEqual({value: '(', scopes: ['source.regexp.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']});
expect(tokens[1]).toEqual({value: '\\(', scopes: ['source.regexp.python', 'meta.group.regexp', 'constant.character.escape.backslash.regexp']});
expect(tokens[2]).toEqual({value: ')', scopes: ['source.regexp.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']});
return expect(tokens[3]).toEqual({value: '\\)', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
expect(tokens[3]).toEqual({value: '\\)', scopes: ['source.regexp.python', 'constant.character.escape.backslash.regexp']});
});
});
});

View File

@ -1,760 +0,0 @@
path = require 'path'
grammarTest = require 'atom-grammar-test'
describe "Python grammar", ->
grammar = null
beforeEach ->
atom.config.set 'core.useTreeSitterParsers', false
waitsForPromise ->
atom.packages.activatePackage("language-python")
runs ->
grammar = atom.grammars.grammarForScopeName("source.python")
it "recognises shebang on firstline", ->
expect(grammar.firstLineRegex.findNextMatchSync("#!/usr/bin/env python")).not.toBeNull()
expect(grammar.firstLineRegex.findNextMatchSync("#! /usr/bin/env python")).not.toBeNull()
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.python"
it "tokenizes `yield`", ->
{tokens} = grammar.tokenizeLine 'yield v'
expect(tokens[0]).toEqual value: 'yield', scopes: ['source.python', 'keyword.control.statement.python']
it "tokenizes `yield from`", ->
{tokens} = grammar.tokenizeLine 'yield from v'
expect(tokens[0]).toEqual value: 'yield from', scopes: ['source.python', 'keyword.control.statement.python']
it "tokenizes multi-line strings", ->
tokens = grammar.tokenizeLines('"1\\\n2"')
# Line 0
expect(tokens[0][0].value).toBe '"'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][1].value).toBe '1'
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python']
expect(tokens[0][2].value).toBe '\\'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.newline.python']
expect(tokens[0][3]).not.toBeDefined()
# Line 1
expect(tokens[1][0].value).toBe '2'
expect(tokens[1][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python']
expect(tokens[1][1].value).toBe '"'
expect(tokens[1][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
expect(tokens[1][2]).not.toBeDefined()
it "terminates a single-quoted raw string containing opening parenthesis at closing quote", ->
tokens = grammar.tokenizeLines("r'%d(' #foo")
expect(tokens[0][0].value).toBe 'r'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe "'"
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '('
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']
expect(tokens[0][4].value).toBe "'"
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates a single-quoted raw string containing opening bracket at closing quote", ->
tokens = grammar.tokenizeLines("r'%d[' #foo")
expect(tokens[0][0].value).toBe 'r'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe "'"
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '['
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']
expect(tokens[0][4].value).toBe "'"
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates a double-quoted raw string containing opening parenthesis at closing quote", ->
tokens = grammar.tokenizeLines('r"%d(" #foo')
expect(tokens[0][0].value).toBe 'r'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe '"'
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '('
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']
expect(tokens[0][4].value).toBe '"'
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates a double-quoted raw string containing opening bracket at closing quote", ->
tokens = grammar.tokenizeLines('r"%d[" #foo')
expect(tokens[0][0].value).toBe 'r'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe '"'
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '['
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']
expect(tokens[0][4].value).toBe '"'
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates a unicode single-quoted raw string containing opening parenthesis at closing quote", ->
tokens = grammar.tokenizeLines("ur'%d(' #foo")
expect(tokens[0][0].value).toBe 'ur'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe "'"
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '('
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']
expect(tokens[0][4].value).toBe "'"
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates a unicode single-quoted raw string containing opening bracket at closing quote", ->
tokens = grammar.tokenizeLines("ur'%d[' #foo")
expect(tokens[0][0].value).toBe 'ur'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe "'"
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '['
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']
expect(tokens[0][4].value).toBe "'"
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.single.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates a unicode double-quoted raw string containing opening parenthesis at closing quote", ->
tokens = grammar.tokenizeLines('ur"%d(" #foo')
expect(tokens[0][0].value).toBe 'ur'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe '"'
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '('
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'meta.group.regexp', 'punctuation.definition.group.regexp']
expect(tokens[0][4].value).toBe '"'
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates a unicode double-quoted raw string containing opening bracket at closing quote", ->
tokens = grammar.tokenizeLines('ur"%d[" #foo')
expect(tokens[0][0].value).toBe 'ur'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'storage.type.string.python']
expect(tokens[0][1].value).toBe '"'
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][2].value).toBe '%d'
expect(tokens[0][2].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.placeholder.python']
expect(tokens[0][3].value).toBe '['
expect(tokens[0][3].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'constant.other.character-class.set.regexp', 'punctuation.definition.character-class.begin.regexp']
expect(tokens[0][4].value).toBe '"'
expect(tokens[0][4].scopes).toEqual ['source.python', 'string.quoted.double.single-line.unicode-raw-regex.python', 'punctuation.definition.string.end.python']
expect(tokens[0][5].value).toBe ' '
expect(tokens[0][5].scopes).toEqual ['source.python']
expect(tokens[0][6].value).toBe '#'
expect(tokens[0][6].scopes).toEqual ['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']
expect(tokens[0][7].value).toBe 'foo'
expect(tokens[0][7].scopes).toEqual ['source.python', 'comment.line.number-sign.python']
it "terminates referencing an item in a list variable after a sequence of a closing and opening bracket", ->
tokens = grammar.tokenizeLines('foo[i[0]][j[0]]')
expect(tokens[0][0].value).toBe 'foo'
expect(tokens[0][0].scopes).toEqual ['source.python', 'meta.item-access.python']
expect(tokens[0][1].value).toBe '['
expect(tokens[0][1].scopes).toEqual ['source.python', 'meta.item-access.python', 'punctuation.definition.arguments.begin.python']
expect(tokens[0][2].value).toBe 'i'
expect(tokens[0][2].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python']
expect(tokens[0][3].value).toBe '['
expect(tokens[0][3].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python', 'punctuation.definition.arguments.begin.python']
expect(tokens[0][4].value).toBe '0'
expect(tokens[0][4].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'constant.numeric.integer.decimal.python']
expect(tokens[0][5].value).toBe ']'
expect(tokens[0][5].scopes).toEqual ['source.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python']
expect(tokens[0][6].value).toBe ']'
expect(tokens[0][6].scopes).toEqual ['source.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python']
expect(tokens[0][7].value).toBe '['
expect(tokens[0][7].scopes).toEqual ['source.python', 'meta.structure.list.python', 'punctuation.definition.list.begin.python']
expect(tokens[0][8].value).toBe 'j'
expect(tokens[0][8].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python']
expect(tokens[0][9].value).toBe '['
expect(tokens[0][9].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'punctuation.definition.arguments.begin.python']
expect(tokens[0][10].value).toBe '0'
expect(tokens[0][10].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'meta.item-access.arguments.python', 'constant.numeric.integer.decimal.python']
expect(tokens[0][11].value).toBe ']'
expect(tokens[0][11].scopes).toEqual ['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python']
expect(tokens[0][12].value).toBe ']'
expect(tokens[0][12].scopes).toEqual ['source.python', 'meta.structure.list.python', 'punctuation.definition.list.end.python']
it "tokenizes a hex escape inside a string", ->
tokens = grammar.tokenizeLines('"\\x5A"')
expect(tokens[0][0].value).toBe '"'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][1].value).toBe '\\x5A'
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.hex.python']
tokens = grammar.tokenizeLines('"\\x9f"')
expect(tokens[0][0].value).toBe '"'
expect(tokens[0][0].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[0][1].value).toBe '\\x9f'
expect(tokens[0][1].scopes).toEqual ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.hex.python']
describe "f-strings", ->
it "tokenizes them", ->
{tokens} = grammar.tokenizeLine "f'hello'"
expect(tokens[0]).toEqual value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python']
expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python']
expect(tokens[2]).toEqual value: 'hello', scopes: ['source.python', "string.quoted.single.single-line.format.python"]
expect(tokens[3]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python']
it "tokenizes {{ and }} as escape characters", ->
{tokens} = grammar.tokenizeLine "f'he}}l{{lo'"
expect(tokens[0]).toEqual value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python']
expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python']
expect(tokens[2]).toEqual value: 'he', scopes: ['source.python', "string.quoted.single.single-line.format.python"]
expect(tokens[3]).toEqual value: '}}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'constant.character.escape.curly-bracket.python']
expect(tokens[4]).toEqual value: 'l', scopes: ['source.python', "string.quoted.single.single-line.format.python"]
expect(tokens[5]).toEqual value: '{{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'constant.character.escape.curly-bracket.python']
expect(tokens[6]).toEqual value: 'lo', scopes: ['source.python', "string.quoted.single.single-line.format.python"]
expect(tokens[7]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python']
it "tokenizes unmatched closing curly brackets as invalid", ->
{tokens} = grammar.tokenizeLine "f'he}llo'"
expect(tokens[0]).toEqual value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python']
expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python']
expect(tokens[2]).toEqual value: 'he', scopes: ['source.python', "string.quoted.single.single-line.format.python"]
expect(tokens[3]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'invalid.illegal.closing-curly-bracket.python']
expect(tokens[4]).toEqual value: 'llo', scopes: ['source.python', "string.quoted.single.single-line.format.python"]
expect(tokens[5]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python']
describe "in expressions", ->
it "tokenizes variables", ->
{tokens} = grammar.tokenizeLine "f'{abc}'"
expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']
expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']
expect(tokens[4]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']
it "tokenizes arithmetic", ->
{tokens} = grammar.tokenizeLine "f'{5 - 3}'"
expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']
expect(tokens[3]).toEqual value: '5', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python']
expect(tokens[5]).toEqual value: '-', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'keyword.operator.arithmetic.python']
expect(tokens[7]).toEqual value: '3', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python']
expect(tokens[8]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']
it "tokenizes function and method calls", ->
{tokens} = grammar.tokenizeLine "f'{name.decode(\"utf-8\").lower()}'"
expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']
expect(tokens[3]).toEqual value: 'name', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'variable.other.object.python']
expect(tokens[4]).toEqual value: '.', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.separator.method.period.python']
expect(tokens[5]).toEqual value: 'decode', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'entity.name.function.python']
expect(tokens[6]).toEqual value: '(', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.begin.bracket.round.python']
expect(tokens[7]).toEqual value: '"', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'meta.method-call.arguments.python', "string.quoted.double.single-line.python", 'punctuation.definition.string.begin.python']
expect(tokens[8]).toEqual value: 'utf-8', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'meta.method-call.arguments.python', "string.quoted.double.single-line.python"]
expect(tokens[9]).toEqual value: '"', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'meta.method-call.arguments.python', "string.quoted.double.single-line.python", 'punctuation.definition.string.end.python']
expect(tokens[10]).toEqual value: ')', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.end.bracket.round.python']
expect(tokens[11]).toEqual value: '.', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.separator.method.period.python']
expect(tokens[12]).toEqual value: 'lower', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'entity.name.function.python']
expect(tokens[13]).toEqual value: '(', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.begin.bracket.round.python']
expect(tokens[14]).toEqual value: ')', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.end.bracket.round.python']
expect(tokens[15]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']
it "tokenizes conversion flags", ->
{tokens} = grammar.tokenizeLine "f'{abc!r}'"
expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']
expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']
expect(tokens[4]).toEqual value: '!r', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']
expect(tokens[5]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']
it "tokenizes format specifiers", ->
{tokens} = grammar.tokenizeLine "f'{abc:^d}'"
expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']
expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']
expect(tokens[4]).toEqual value: ':^d', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']
expect(tokens[5]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']
it "tokenizes nested replacement fields in top-level format specifiers", ->
{tokens} = grammar.tokenizeLine "f'{abc:{align}d}'"
expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']
expect(tokens[3]).toEqual value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']
expect(tokens[4]).toEqual value: ':', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']
expect(tokens[5]).toEqual value: '{align}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python']
expect(tokens[6]).toEqual value: 'd', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']
expect(tokens[7]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']
it "tokenizes backslashes as invalid", ->
{tokens} = grammar.tokenizeLine "f'{ab\\n}'"
expect(tokens[2]).toEqual value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']
expect(tokens[3]).toEqual value: 'ab', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']
expect(tokens[4]).toEqual value: '\\', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'invalid.illegal.backslash.python']
expect(tokens[6]).toEqual value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']
describe "binary strings", ->
it "tokenizes them", ->
{tokens} = grammar.tokenizeLine "b'test'"
expect(tokens[0]).toEqual value: 'b', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'storage.type.string.python']
expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.begin.python']
expect(tokens[2]).toEqual value: 'test', scopes: ['source.python', "string.quoted.single.single-line.binary.python"]
expect(tokens[3]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.end.python']
it "tokenizes invalid characters", ->
{tokens} = grammar.tokenizeLine "b'tést'"
expect(tokens[0]).toEqual value: 'b', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'storage.type.string.python']
expect(tokens[1]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.begin.python']
expect(tokens[2]).toEqual value: 't', scopes: ['source.python', "string.quoted.single.single-line.binary.python"]
expect(tokens[3]).toEqual value: 'é', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'invalid.illegal.character-out-of-range.python']
expect(tokens[4]).toEqual value: 'st', scopes: ['source.python', "string.quoted.single.single-line.binary.python"]
expect(tokens[5]).toEqual value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.end.python']
describe "docstrings", ->
it "tokenizes them", ->
lines = grammar.tokenizeLines '''
"""
Bla bla bla "wow" what's this?
"""
'''
expect(lines[0][0]).toEqual value: '"""', scopes: ['source.python', 'string.quoted.double.block.python', 'punctuation.definition.string.begin.python']
expect(lines[1][0]).toEqual value: ' Bla bla bla "wow" what\'s this?', scopes: ['source.python', 'string.quoted.double.block.python']
expect(lines[2][0]).toEqual value: '"""', scopes: ['source.python', 'string.quoted.double.block.python', 'punctuation.definition.string.end.python']
lines = grammar.tokenizeLines """
'''
Bla bla bla "wow" what's this?
'''
"""
expect(lines[0][0]).toEqual value: "'''", scopes: ['source.python', 'string.quoted.single.block.python', 'punctuation.definition.string.begin.python']
expect(lines[1][0]).toEqual value: ' Bla bla bla "wow" what\'s this?', scopes: ['source.python', 'string.quoted.single.block.python']
expect(lines[2][0]).toEqual value: "'''", scopes: ['source.python', 'string.quoted.single.block.python', 'punctuation.definition.string.end.python']
describe "string formatting", ->
describe "%-style formatting", ->
it "tokenizes the conversion type", ->
{tokens} = grammar.tokenizeLine '"%d"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%d', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes an optional mapping key", ->
{tokens} = grammar.tokenizeLine '"%(key)x"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%(key)x', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes an optional conversion flag", ->
{tokens} = grammar.tokenizeLine '"% F"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '% F', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes an optional field width", ->
{tokens} = grammar.tokenizeLine '"%11s"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%11s', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes * as the optional field width", ->
{tokens} = grammar.tokenizeLine '"%*g"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%*g', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes an optional precision", ->
{tokens} = grammar.tokenizeLine '"%.4r"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%.4r', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes * as the optional precision", ->
{tokens} = grammar.tokenizeLine '"%.*%"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%.*%', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes an optional length modifier", ->
{tokens} = grammar.tokenizeLine '"%Lo"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%Lo', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes complex formats", ->
{tokens} = grammar.tokenizeLine '"%(key)#5.*hc"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '%(key)#5.*hc', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
describe "{}-style formatting", ->
it "tokenizes the empty replacement field", ->
{tokens} = grammar.tokenizeLine '"{}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes a number as the field name", ->
{tokens} = grammar.tokenizeLine '"{1}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{1}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes a variable name as the field name", ->
{tokens} = grammar.tokenizeLine '"{key}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{key}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes field name attributes", ->
{tokens} = grammar.tokenizeLine '"{key.length}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{key.length}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
{tokens} = grammar.tokenizeLine '"{4.width}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{4.width}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
{tokens} = grammar.tokenizeLine '"{python2[\'3\']}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{python2[\'3\']}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
{tokens} = grammar.tokenizeLine '"{2[4]}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{2[4]}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes multiple field name attributes", ->
{tokens} = grammar.tokenizeLine '"{nested.a[2][\'val\'].value}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{nested.a[2][\'val\'].value}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes conversions", ->
{tokens} = grammar.tokenizeLine '"{!r}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{!r}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
describe "format specifiers", ->
it "tokenizes alignment", ->
{tokens} = grammar.tokenizeLine '"{:<}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:<}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
{tokens} = grammar.tokenizeLine '"{:a^}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:a^}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes signs", ->
{tokens} = grammar.tokenizeLine '"{:+}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:+}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
{tokens} = grammar.tokenizeLine '"{: }"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{: }', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes the alternate form indicator", ->
{tokens} = grammar.tokenizeLine '"{:#}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:#}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes 0", ->
{tokens} = grammar.tokenizeLine '"{:0}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:0}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes the width", ->
{tokens} = grammar.tokenizeLine '"{:34}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:34}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes the grouping option", ->
{tokens} = grammar.tokenizeLine '"{:,}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:,}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes the precision", ->
{tokens} = grammar.tokenizeLine '"{:.5}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:.5}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes the type", ->
{tokens} = grammar.tokenizeLine '"{:b}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:b}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes nested replacement fields", ->
{tokens} = grammar.tokenizeLine '"{:{align}-.{precision}%}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{:', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '{align}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python']
expect(tokens[3]).toEqual value: '-.', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[4]).toEqual value: '{precision}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python']
expect(tokens[5]).toEqual value: '%}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[6]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes complex formats", ->
{tokens} = grammar.tokenizeLine '"{0.players[2]!a:2>-#01_.3d}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{0.players[2]!a:2>-#01_.3d}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes {{ and }} as escape characters and not formatters", ->
{tokens} = grammar.tokenizeLine '"{{hello}}"'
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: '{{', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.curly-bracket.python']
expect(tokens[2]).toEqual value: 'hello', scopes: ['source.python', 'string.quoted.double.single-line.python']
expect(tokens[3]).toEqual value: '}}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.curly-bracket.python']
expect(tokens[4]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']
it "tokenizes properties of self as self-type variables", ->
tokens = grammar.tokenizeLines('self.foo')
expect(tokens[0][0]).toEqual value: 'self', scopes: ['source.python', 'variable.language.self.python']
expect(tokens[0][1]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python']
expect(tokens[0][2]).toEqual value: 'foo', scopes: ['source.python', 'variable.other.property.python']
it "tokenizes cls as a self-type variable", ->
tokens = grammar.tokenizeLines('cls.foo')
expect(tokens[0][0]).toEqual value: 'cls', scopes: ['source.python', 'variable.language.self.python']
expect(tokens[0][1]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python']
expect(tokens[0][2]).toEqual value: 'foo', scopes: ['source.python', 'variable.other.property.python']
it "tokenizes properties of a variable as variables", ->
tokens = grammar.tokenizeLines('bar.foo')
expect(tokens[0][0]).toEqual value: 'bar', scopes: ['source.python', 'variable.other.object.python']
expect(tokens[0][1]).toEqual value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python']
expect(tokens[0][2]).toEqual value: 'foo', scopes: ['source.python', 'variable.other.property.python']
# Add the grammar test fixtures
grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python.py')
grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python_functions.py')
grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python_lambdas.py')
grammarTest path.join(__dirname, 'fixtures/grammar/syntax_test_python_typing.py')
describe "SQL highlighting", ->
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage('language-sql')
it "tokenizes SQL inline highlighting on blocks", ->
delimsByScope =
"string.quoted.double.block.sql.python": '"""'
"string.quoted.single.block.sql.python": "'''"
for scope, delim in delimsByScope
tokens = grammar.tokenizeLines(
delim +
'SELECT bar
FROM foo'
+ delim
)
expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python']
expect(tokens[1][0]).toEqual value: 'SELECT', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[1][1]).toEqual value: ' bar', scopes: ['source.python', scope, 'meta.embedded.sql']
expect(tokens[2][0]).toEqual value: 'FROM', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[2][1]).toEqual value ' foo', scopes: ['source.python', scope, 'meta.embedded.sql']
expect(tokens[3][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python']
it "tokenizes SQL inline highlighting on blocks with a CTE", ->
# Note that these scopes do not contain .sql because we can't definitively tell
# if the string contains SQL or not
delimsByScope =
"string.quoted.double.block.python": '"""'
"string.quoted.single.block.python": "'''"
for scope, delim of delimsByScope
tokens = grammar.tokenizeLines("""
#{delim}
WITH example_cte AS (
SELECT bar
FROM foo
GROUP BY bar
)
SELECT COUNT(*)
FROM example_cte
#{delim}
""")
expect(tokens[0][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.begin.python']
expect(tokens[1][0]).toEqual value: 'WITH', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[1][1]).toEqual value: ' example_cte ', scopes: ['source.python', scope, 'meta.embedded.sql']
expect(tokens[1][2]).toEqual value: 'AS', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.alias.sql']
expect(tokens[1][3]).toEqual value: ' ', scopes: ['source.python', scope, 'meta.embedded.sql']
expect(tokens[1][4]).toEqual value: '(', scopes: ['source.python', scope, 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql']
expect(tokens[2][0]).toEqual value: 'SELECT', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[2][1]).toEqual value: ' bar', scopes: ['source.python', scope, 'meta.embedded.sql']
expect(tokens[3][0]).toEqual value: 'FROM', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[3][1]).toEqual value: ' foo', scopes: ['source.python', scope, 'meta.embedded.sql']
expect(tokens[4][0]).toEqual value: 'GROUP BY', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[4][1]).toEqual value: ' bar', scopes: ['source.python', scope, 'meta.embedded.sql']
expect(tokens[5][0]).toEqual value: ')', scopes: ['source.python', scope, 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql']
expect(tokens[7][0]).toEqual value: 'SELECT', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[8][0]).toEqual value: 'FROM', scopes: ['source.python', scope, 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[9][0]).toEqual value: delim, scopes: ['source.python', scope, 'punctuation.definition.string.end.python']
it "tokenizes SQL inline highlighting on single line with a CTE", ->
{tokens} = grammar.tokenizeLine('\'WITH example_cte AS (SELECT bar FROM foo) SELECT COUNT(*) FROM example_cte\'')
expect(tokens[0]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: 'WITH', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[2]).toEqual value: ' example_cte ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[3]).toEqual value: 'AS', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.alias.sql']
expect(tokens[4]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[5]).toEqual value: '(', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql']
expect(tokens[6]).toEqual value: 'SELECT', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[7]).toEqual value: ' bar ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[8]).toEqual value: 'FROM', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[9]).toEqual value: ' foo', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[10]).toEqual value: ')', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql']
expect(tokens[11]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[12]).toEqual value: 'SELECT', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[13]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[14]).toEqual value: 'COUNT', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'support.function.aggregate.sql']
expect(tokens[15]).toEqual value: '(', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.begin.sql']
expect(tokens[16]).toEqual value: '*', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.operator.star.sql']
expect(tokens[17]).toEqual value: ')', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql']
expect(tokens[18]).toEqual value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[19]).toEqual value: 'FROM', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']
expect(tokens[20]).toEqual value: ' example_cte', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']
expect(tokens[21]).toEqual value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'punctuation.definition.string.end.python']
it "tokenizes Python escape characters and formatting specifiers in SQL strings", ->
{tokens} = grammar.tokenizeLine('"INSERT INTO url (image_uri) VALUES (\\\'%s\\\');" % values')
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'punctuation.definition.string.begin.python']
expect(tokens[10]).toEqual value: '\\\'', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.character.escape.single-quote.python']
expect(tokens[11]).toEqual value: '%s', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.other.placeholder.python']
expect(tokens[12]).toEqual value: '\\\'', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.character.escape.single-quote.python']
expect(tokens[13]).toEqual value: ')', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql']
expect(tokens[15]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'punctuation.definition.string.end.python']
expect(tokens[17]).toEqual value: '%', scopes: ['source.python', 'keyword.operator.arithmetic.python']
it "recognizes DELETE as an HTTP method", ->
{tokens} = grammar.tokenizeLine('"DELETE /api/v1/endpoint"')
expect(tokens[0]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']
expect(tokens[1]).toEqual value: 'DELETE /api/v1/endpoint', scopes: ['source.python', 'string.quoted.double.single-line.python']
expect(tokens[2]).toEqual value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']

View File

@ -1,9 +1,4 @@
/*
* decaffeinate suggestions:
* DS102: Remove unnecessary code created because of implicit returns
* DS205: Consider reworking code to avoid use of IIFEs
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/main/docs/suggestions.md
*/
const path = require('path');
const grammarTest = require('atom-grammar-test');
@ -16,29 +11,29 @@ describe("Python grammar", function() {
waitsForPromise(() => atom.packages.activatePackage("language-python"));
return runs(() => grammar = atom.grammars.grammarForScopeName("source.python"));
runs(() => grammar = atom.grammars.grammarForScopeName("source.python"));
});
it("recognises shebang on firstline", function() {
expect(grammar.firstLineRegex.findNextMatchSync("#!/usr/bin/env python")).not.toBeNull();
return expect(grammar.firstLineRegex.findNextMatchSync("#! /usr/bin/env python")).not.toBeNull();
expect(grammar.firstLineRegex.findNextMatchSync("#! /usr/bin/env python")).not.toBeNull();
});
it("parses the grammar", function() {
expect(grammar).toBeDefined();
return expect(grammar.scopeName).toBe("source.python");
expect(grammar.scopeName).toBe("source.python");
});
it("tokenizes `yield`", function() {
const {tokens} = grammar.tokenizeLine('yield v');
return expect(tokens[0]).toEqual({value: 'yield', scopes: ['source.python', 'keyword.control.statement.python']});
expect(tokens[0]).toEqual({value: 'yield', scopes: ['source.python', 'keyword.control.statement.python']});
});
it("tokenizes `yield from`", function() {
const {tokens} = grammar.tokenizeLine('yield from v');
return expect(tokens[0]).toEqual({value: 'yield from', scopes: ['source.python', 'keyword.control.statement.python']});
expect(tokens[0]).toEqual({value: 'yield from', scopes: ['source.python', 'keyword.control.statement.python']});
});
it("tokenizes multi-line strings", function() {
@ -63,7 +58,7 @@ describe("Python grammar", function() {
expect(tokens[1][1].value).toBe('"');
expect(tokens[1][1].scopes).toEqual(['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']);
return expect(tokens[1][2]).not.toBeDefined();
expect(tokens[1][2]).not.toBeDefined();
});
it("terminates a single-quoted raw string containing opening parenthesis at closing quote", function() {
@ -84,7 +79,7 @@ describe("Python grammar", function() {
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
return expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates a single-quoted raw string containing opening bracket at closing quote", function() {
@ -105,7 +100,7 @@ describe("Python grammar", function() {
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
return expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates a double-quoted raw string containing opening parenthesis at closing quote", function() {
@ -126,7 +121,7 @@ describe("Python grammar", function() {
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
return expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates a double-quoted raw string containing opening bracket at closing quote", function() {
@ -147,7 +142,7 @@ describe("Python grammar", function() {
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
return expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates a unicode single-quoted raw string containing opening parenthesis at closing quote", function() {
@ -168,7 +163,7 @@ describe("Python grammar", function() {
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
return expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates a unicode single-quoted raw string containing opening bracket at closing quote", function() {
@ -189,7 +184,7 @@ describe("Python grammar", function() {
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
return expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates a unicode double-quoted raw string containing opening parenthesis at closing quote", function() {
@ -210,7 +205,7 @@ describe("Python grammar", function() {
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
return expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates a unicode double-quoted raw string containing opening bracket at closing quote", function() {
@ -231,7 +226,7 @@ describe("Python grammar", function() {
expect(tokens[0][6].value).toBe('#');
expect(tokens[0][6].scopes).toEqual(['source.python', 'comment.line.number-sign.python', 'punctuation.definition.comment.python']);
expect(tokens[0][7].value).toBe('foo');
return expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
expect(tokens[0][7].scopes).toEqual(['source.python', 'comment.line.number-sign.python']);
});
it("terminates referencing an item in a list variable after a sequence of a closing and opening bracket", function() {
@ -262,7 +257,7 @@ describe("Python grammar", function() {
expect(tokens[0][11].value).toBe(']');
expect(tokens[0][11].scopes).toEqual(['source.python', 'meta.structure.list.python', 'meta.structure.list.item.python', 'meta.item-access.python', 'punctuation.definition.arguments.end.python']);
expect(tokens[0][12].value).toBe(']');
return expect(tokens[0][12].scopes).toEqual(['source.python', 'meta.structure.list.python', 'punctuation.definition.list.end.python']);
expect(tokens[0][12].scopes).toEqual(['source.python', 'meta.structure.list.python', 'punctuation.definition.list.end.python']);
});
it("tokenizes a hex escape inside a string", function() {
@ -278,7 +273,7 @@ describe("Python grammar", function() {
expect(tokens[0][0].value).toBe('"');
expect(tokens[0][0].scopes).toEqual(['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']);
expect(tokens[0][1].value).toBe('\\x9f');
return expect(tokens[0][1].scopes).toEqual(['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.hex.python']);
expect(tokens[0][1].scopes).toEqual(['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.hex.python']);
});
describe("f-strings", function() {
@ -288,7 +283,7 @@ describe("Python grammar", function() {
expect(tokens[0]).toEqual({value: 'f', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'storage.type.string.python']});
expect(tokens[1]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.begin.python']});
expect(tokens[2]).toEqual({value: 'hello', scopes: ['source.python', "string.quoted.single.single-line.format.python"]});
return expect(tokens[3]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python']});
expect(tokens[3]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python']});
});
it("tokenizes {{ and }} as escape characters", function() {
@ -301,7 +296,7 @@ describe("Python grammar", function() {
expect(tokens[4]).toEqual({value: 'l', scopes: ['source.python', "string.quoted.single.single-line.format.python"]});
expect(tokens[5]).toEqual({value: '{{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'constant.character.escape.curly-bracket.python']});
expect(tokens[6]).toEqual({value: 'lo', scopes: ['source.python', "string.quoted.single.single-line.format.python"]});
return expect(tokens[7]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python']});
expect(tokens[7]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python']});
});
it("tokenizes unmatched closing curly brackets as invalid", function() {
@ -312,16 +307,16 @@ describe("Python grammar", function() {
expect(tokens[2]).toEqual({value: 'he', scopes: ['source.python', "string.quoted.single.single-line.format.python"]});
expect(tokens[3]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'invalid.illegal.closing-curly-bracket.python']});
expect(tokens[4]).toEqual({value: 'llo', scopes: ['source.python', "string.quoted.single.single-line.format.python"]});
return expect(tokens[5]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python']});
expect(tokens[5]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.format.python", 'punctuation.definition.string.end.python']});
});
return describe("in expressions", function() {
describe("in expressions", function() {
it("tokenizes variables", function() {
const {tokens} = grammar.tokenizeLine("f'{abc}'");
expect(tokens[2]).toEqual({value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']});
expect(tokens[3]).toEqual({value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']});
return expect(tokens[4]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
expect(tokens[4]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
});
it("tokenizes arithmetic", function() {
@ -331,7 +326,7 @@ describe("Python grammar", function() {
expect(tokens[3]).toEqual({value: '5', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python']});
expect(tokens[5]).toEqual({value: '-', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'keyword.operator.arithmetic.python']});
expect(tokens[7]).toEqual({value: '3', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'constant.numeric.integer.decimal.python']});
return expect(tokens[8]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
expect(tokens[8]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
});
it("tokenizes function and method calls", function() {
@ -350,7 +345,7 @@ describe("Python grammar", function() {
expect(tokens[12]).toEqual({value: 'lower', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'entity.name.function.python']});
expect(tokens[13]).toEqual({value: '(', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.begin.bracket.round.python']});
expect(tokens[14]).toEqual({value: ')', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'meta.method-call.python', 'punctuation.definition.arguments.end.bracket.round.python']});
return expect(tokens[15]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
expect(tokens[15]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
});
it("tokenizes conversion flags", function() {
@ -359,7 +354,7 @@ describe("Python grammar", function() {
expect(tokens[2]).toEqual({value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']});
expect(tokens[3]).toEqual({value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']});
expect(tokens[4]).toEqual({value: '!r', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']});
return expect(tokens[5]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
expect(tokens[5]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
});
it("tokenizes format specifiers", function() {
@ -368,7 +363,7 @@ describe("Python grammar", function() {
expect(tokens[2]).toEqual({value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']});
expect(tokens[3]).toEqual({value: 'abc', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']});
expect(tokens[4]).toEqual({value: ':^d', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']});
return expect(tokens[5]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
expect(tokens[5]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
});
it("tokenizes nested replacement fields in top-level format specifiers", function() {
@ -379,16 +374,16 @@ describe("Python grammar", function() {
expect(tokens[4]).toEqual({value: ':', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']});
expect(tokens[5]).toEqual({value: '{align}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python']});
expect(tokens[6]).toEqual({value: 'd', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'constant.other.placeholder.python']});
return expect(tokens[7]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
expect(tokens[7]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
});
return it("tokenizes backslashes as invalid", function() {
it("tokenizes backslashes as invalid", function() {
const {tokens} = grammar.tokenizeLine("f'{ab\\n}'");
expect(tokens[2]).toEqual({value: '{', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.begin.bracket.curly.python']});
expect(tokens[3]).toEqual({value: 'ab', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python']});
expect(tokens[4]).toEqual({value: '\\', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'meta.embedded.python', 'invalid.illegal.backslash.python']});
return expect(tokens[6]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
expect(tokens[6]).toEqual({value: '}', scopes: ['source.python', "string.quoted.single.single-line.format.python", 'meta.interpolation.python', 'punctuation.definition.interpolation.end.bracket.curly.python']});
});
});
});
@ -400,10 +395,10 @@ describe("Python grammar", function() {
expect(tokens[0]).toEqual({value: 'b', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'storage.type.string.python']});
expect(tokens[1]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.begin.python']});
expect(tokens[2]).toEqual({value: 'test', scopes: ['source.python', "string.quoted.single.single-line.binary.python"]});
return expect(tokens[3]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.end.python']});
expect(tokens[3]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.end.python']});
});
return it("tokenizes invalid characters", function() {
it("tokenizes invalid characters", function() {
const {tokens} = grammar.tokenizeLine("b'tést'");
expect(tokens[0]).toEqual({value: 'b', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'storage.type.string.python']});
@ -411,14 +406,14 @@ describe("Python grammar", function() {
expect(tokens[2]).toEqual({value: 't', scopes: ['source.python', "string.quoted.single.single-line.binary.python"]});
expect(tokens[3]).toEqual({value: 'é', scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'invalid.illegal.character-out-of-range.python']});
expect(tokens[4]).toEqual({value: 'st', scopes: ['source.python', "string.quoted.single.single-line.binary.python"]});
return expect(tokens[5]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.end.python']});
expect(tokens[5]).toEqual({value: "'", scopes: ['source.python', "string.quoted.single.single-line.binary.python", 'punctuation.definition.string.end.python']});
});
});
describe("docstrings", () => it("tokenizes them", function() {
let lines = grammar.tokenizeLines(`\
"""
Bla bla bla "wow" what's this?
Bla bla bla "wow" what's this?
"""\
`
);
@ -429,14 +424,14 @@ Bla bla bla "wow" what's this?
lines = grammar.tokenizeLines(`\
'''
Bla bla bla "wow" what's this?
Bla bla bla "wow" what's this?
'''\
`
);
expect(lines[0][0]).toEqual({value: "'''", scopes: ['source.python', 'string.quoted.single.block.python', 'punctuation.definition.string.begin.python']});
expect(lines[1][0]).toEqual({value: ' Bla bla bla "wow" what\'s this?', scopes: ['source.python', 'string.quoted.single.block.python']});
return expect(lines[2][0]).toEqual({value: "'''", scopes: ['source.python', 'string.quoted.single.block.python', 'punctuation.definition.string.end.python']});
expect(lines[2][0]).toEqual({value: "'''", scopes: ['source.python', 'string.quoted.single.block.python', 'punctuation.definition.string.end.python']});
}));
@ -447,7 +442,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%d', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes an optional mapping key", function() {
@ -455,7 +450,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%(key)x', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes an optional conversion flag", function() {
@ -463,7 +458,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '% F', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes an optional field width", function() {
@ -471,7 +466,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%11s', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes * as the optional field width", function() {
@ -479,7 +474,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%*g', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes an optional precision", function() {
@ -487,7 +482,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%.4r', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes * as the optional precision", function() {
@ -495,7 +490,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%.*%', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes an optional length modifier", function() {
@ -503,25 +498,25 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%Lo', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
return it("tokenizes complex formats", function() {
it("tokenizes complex formats", function() {
const {tokens} = grammar.tokenizeLine('"%(key)#5.*hc"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '%(key)#5.*hc', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
});
return describe("{}-style formatting", function() {
describe("{}-style formatting", function() {
it("tokenizes the empty replacement field", function() {
const {tokens} = grammar.tokenizeLine('"{}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes a number as the field name", function() {
@ -529,7 +524,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{1}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes a variable name as the field name", function() {
@ -537,7 +532,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{key}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes field name attributes", function() {
@ -563,7 +558,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{2[4]}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes multiple field name attributes", function() {
@ -571,7 +566,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{nested.a[2][\'val\'].value}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes conversions", function() {
@ -579,7 +574,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{!r}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
describe("format specifiers", function() {
@ -594,7 +589,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:a^}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes signs", function() {
@ -608,7 +603,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{: }', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes the alternate form indicator", function() {
@ -616,7 +611,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:#}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes 0", function() {
@ -624,7 +619,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:0}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes the width", function() {
@ -632,7 +627,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:34}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes the grouping option", function() {
@ -640,7 +635,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:,}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes the precision", function() {
@ -648,7 +643,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:.5}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes the type", function() {
@ -656,10 +651,10 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{:b}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
return it("tokenizes nested replacement fields", function() {
it("tokenizes nested replacement fields", function() {
const {tokens} = grammar.tokenizeLine('"{:{align}-.{precision}%}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
@ -668,7 +663,7 @@ Bla bla bla "wow" what's this?
expect(tokens[3]).toEqual({value: '-.', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
expect(tokens[4]).toEqual({value: '{precision}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python', 'constant.other.placeholder.python']});
expect(tokens[5]).toEqual({value: '%}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[6]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[6]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
});
@ -677,17 +672,17 @@ Bla bla bla "wow" what's this?
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{0.players[2]!a:2>-#01_.3d}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.other.placeholder.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
return it("tokenizes {{ and }} as escape characters and not formatters", function() {
it("tokenizes {{ and }} as escape characters and not formatters", function() {
const {tokens} = grammar.tokenizeLine('"{{hello}}"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: '{{', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.curly-bracket.python']});
expect(tokens[2]).toEqual({value: 'hello', scopes: ['source.python', 'string.quoted.double.single-line.python']});
expect(tokens[3]).toEqual({value: '}}', scopes: ['source.python', 'string.quoted.double.single-line.python', 'constant.character.escape.curly-bracket.python']});
return expect(tokens[4]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[4]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
});
});
@ -697,7 +692,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0][0]).toEqual({value: 'self', scopes: ['source.python', 'variable.language.self.python']});
expect(tokens[0][1]).toEqual({value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python']});
return expect(tokens[0][2]).toEqual({value: 'foo', scopes: ['source.python', 'variable.other.property.python']});
expect(tokens[0][2]).toEqual({value: 'foo', scopes: ['source.python', 'variable.other.property.python']});
});
it("tokenizes cls as a self-type variable", function() {
@ -705,7 +700,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0][0]).toEqual({value: 'cls', scopes: ['source.python', 'variable.language.self.python']});
expect(tokens[0][1]).toEqual({value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python']});
return expect(tokens[0][2]).toEqual({value: 'foo', scopes: ['source.python', 'variable.other.property.python']});
expect(tokens[0][2]).toEqual({value: 'foo', scopes: ['source.python', 'variable.other.property.python']});
});
it("tokenizes properties of a variable as variables", function() {
@ -713,7 +708,7 @@ Bla bla bla "wow" what's this?
expect(tokens[0][0]).toEqual({value: 'bar', scopes: ['source.python', 'variable.other.object.python']});
expect(tokens[0][1]).toEqual({value: '.', scopes: ['source.python', 'punctuation.separator.property.period.python']});
return expect(tokens[0][2]).toEqual({value: 'foo', scopes: ['source.python', 'variable.other.property.python']});
expect(tokens[0][2]).toEqual({value: 'foo', scopes: ['source.python', 'variable.other.property.python']});
});
// Add the grammar test fixtures
@ -722,7 +717,7 @@ Bla bla bla "wow" what's this?
grammarTest(path.join(__dirname, 'fixtures/grammar/syntax_test_python_lambdas.py'));
grammarTest(path.join(__dirname, 'fixtures/grammar/syntax_test_python_typing.py'));
return describe("SQL highlighting", function() {
describe("SQL highlighting", function() {
beforeEach(() => waitsForPromise(() => atom.packages.activatePackage('language-sql')));
it("tokenizes SQL inline highlighting on blocks", function() {
@ -823,7 +818,7 @@ ${delim}\
expect(tokens[18]).toEqual({value: ' ', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']});
expect(tokens[19]).toEqual({value: 'FROM', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql', 'keyword.other.DML.sql']});
expect(tokens[20]).toEqual({value: ' example_cte', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'meta.embedded.sql']});
return expect(tokens[21]).toEqual({value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'punctuation.definition.string.end.python']});
expect(tokens[21]).toEqual({value: '\'', scopes: ['source.python', 'string.quoted.single.single-line.sql.python', 'punctuation.definition.string.end.python']});
});
it("tokenizes Python escape characters and formatting specifiers in SQL strings", function() {
@ -835,15 +830,15 @@ ${delim}\
expect(tokens[12]).toEqual({value: '\\\'', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'constant.character.escape.single-quote.python']});
expect(tokens[13]).toEqual({value: ')', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'meta.embedded.sql', 'punctuation.definition.section.bracket.round.end.sql']});
expect(tokens[15]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.sql.python', 'punctuation.definition.string.end.python']});
return expect(tokens[17]).toEqual({value: '%', scopes: ['source.python', 'keyword.operator.arithmetic.python']});
expect(tokens[17]).toEqual({value: '%', scopes: ['source.python', 'keyword.operator.arithmetic.python']});
});
return it("recognizes DELETE as an HTTP method", function() {
it("recognizes DELETE as an HTTP method", function() {
const {tokens} = grammar.tokenizeLine('"DELETE /api/v1/endpoint"');
expect(tokens[0]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.begin.python']});
expect(tokens[1]).toEqual({value: 'DELETE /api/v1/endpoint', scopes: ['source.python', 'string.quoted.double.single-line.python']});
return expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
expect(tokens[2]).toEqual({value: '"', scopes: ['source.python', 'string.quoted.double.single-line.python', 'punctuation.definition.string.end.python']});
});
});
});