2012-08-28 02:36:36 +04:00
|
|
|
TokenizedBuffer = require 'tokenized-buffer'
|
|
|
|
LanguageMode = require 'language-mode'
|
2013-03-12 21:38:05 +04:00
|
|
|
Buffer = require 'text-buffer'
|
2012-08-28 02:36:36 +04:00
|
|
|
Range = require 'range'
|
2013-03-12 21:38:05 +04:00
|
|
|
_ = require 'underscore'
|
2012-08-28 02:36:36 +04:00
|
|
|
|
2012-11-23 21:46:26 +04:00
|
|
|
describe "TokenizedBuffer", ->
|
2012-11-20 03:50:49 +04:00
|
|
|
[editSession, tokenizedBuffer, buffer, changeHandler] = []
|
2012-08-28 02:36:36 +04:00
|
|
|
|
|
|
|
beforeEach ->
|
2013-03-26 03:31:01 +04:00
|
|
|
atom.activatePackage('javascript.tmbundle', sync: true)
|
2012-11-21 20:09:08 +04:00
|
|
|
# enable async tokenization
|
|
|
|
TokenizedBuffer.prototype.chunkSize = 5
|
|
|
|
jasmine.unspy(TokenizedBuffer.prototype, 'tokenizeInBackground')
|
|
|
|
|
|
|
|
fullyTokenize = (tokenizedBuffer) ->
|
2012-11-22 05:38:39 +04:00
|
|
|
advanceClock() while tokenizedBuffer.firstInvalidRow()?
|
2012-12-20 09:44:21 +04:00
|
|
|
changeHandler?.reset()
|
2012-11-21 20:09:08 +04:00
|
|
|
|
|
|
|
describe "when the buffer contains soft-tabs", ->
|
|
|
|
beforeEach ->
|
2013-02-28 21:11:06 +04:00
|
|
|
editSession = project.buildEditSession('sample.js', autoIndent: false)
|
2012-11-21 20:09:08 +04:00
|
|
|
buffer = editSession.buffer
|
|
|
|
tokenizedBuffer = editSession.displayBuffer.tokenizedBuffer
|
2012-11-23 23:14:46 +04:00
|
|
|
editSession.setVisible(true)
|
2012-11-21 20:09:08 +04:00
|
|
|
changeHandler = jasmine.createSpy('changeHandler')
|
2013-01-05 00:59:11 +04:00
|
|
|
tokenizedBuffer.on "changed", changeHandler
|
2012-11-21 20:09:08 +04:00
|
|
|
|
|
|
|
afterEach ->
|
|
|
|
editSession.destroy()
|
|
|
|
|
|
|
|
describe "on construction", ->
|
|
|
|
it "initially creates un-tokenized screen lines, then tokenizes lines chunk at a time in the background", ->
|
|
|
|
line0 = tokenizedBuffer.lineForScreenRow(0)
|
|
|
|
expect(line0.tokens.length).toBe 1
|
|
|
|
expect(line0.tokens[0]).toEqual(value: line0.text, scopes: ['source.js'])
|
|
|
|
|
|
|
|
line11 = tokenizedBuffer.lineForScreenRow(11)
|
|
|
|
expect(line11.tokens.length).toBe 2
|
|
|
|
expect(line11.tokens[0]).toEqual(value: " ", scopes: ['source.js'], isAtomic: true)
|
|
|
|
expect(line11.tokens[1]).toEqual(value: "return sort(Array.apply(this, arguments));", scopes: ['source.js'])
|
|
|
|
|
|
|
|
# background tokenization has not begun
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(0).ruleStack).toBeUndefined()
|
|
|
|
|
|
|
|
# tokenize chunk 1
|
|
|
|
advanceClock()
|
2012-11-20 03:50:49 +04:00
|
|
|
expect(tokenizedBuffer.lineForScreenRow(0).ruleStack?).toBeTruthy()
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(4).ruleStack?).toBeTruthy()
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).ruleStack?).toBeFalsy()
|
|
|
|
expect(changeHandler).toHaveBeenCalledWith(start: 0, end: 4, delta: 0)
|
|
|
|
changeHandler.reset()
|
|
|
|
|
2012-11-21 20:09:08 +04:00
|
|
|
# tokenize chunk 2
|
|
|
|
advanceClock()
|
2012-11-20 03:50:49 +04:00
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).ruleStack?).toBeTruthy()
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(9).ruleStack?).toBeTruthy()
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(10).ruleStack?).toBeFalsy()
|
|
|
|
expect(changeHandler).toHaveBeenCalledWith(start: 5, end: 9, delta: 0)
|
|
|
|
changeHandler.reset()
|
|
|
|
|
2012-11-21 20:09:08 +04:00
|
|
|
# tokenize last chunk
|
|
|
|
advanceClock()
|
2012-11-20 03:50:49 +04:00
|
|
|
expect(tokenizedBuffer.lineForScreenRow(10).ruleStack?).toBeTruthy()
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(12).ruleStack?).toBeTruthy()
|
|
|
|
expect(changeHandler).toHaveBeenCalledWith(start: 10, end: 12, delta: 0)
|
|
|
|
|
2012-11-21 20:09:08 +04:00
|
|
|
describe "when the buffer is partially tokenized", ->
|
|
|
|
beforeEach ->
|
|
|
|
# tokenize chunk 1 only
|
|
|
|
advanceClock()
|
2012-11-22 21:07:24 +04:00
|
|
|
changeHandler.reset()
|
2012-11-21 20:09:08 +04:00
|
|
|
|
2012-11-22 21:07:24 +04:00
|
|
|
describe "when there is a buffer change inside the tokenized region", ->
|
|
|
|
describe "when lines are added", ->
|
|
|
|
it "pushes the invalid rows down", ->
|
|
|
|
expect(tokenizedBuffer.firstInvalidRow()).toBe 5
|
|
|
|
buffer.insert([1, 0], '\n\n')
|
|
|
|
changeHandler.reset()
|
|
|
|
|
|
|
|
expect(tokenizedBuffer.firstInvalidRow()).toBe 7
|
|
|
|
advanceClock()
|
|
|
|
expect(changeHandler).toHaveBeenCalledWith(start: 7, end: 11, delta: 0)
|
|
|
|
|
|
|
|
describe "when lines are removed", ->
|
|
|
|
it "pulls the invalid rows up", ->
|
|
|
|
expect(tokenizedBuffer.firstInvalidRow()).toBe 5
|
|
|
|
buffer.delete([[1, 0], [3, 0]])
|
|
|
|
changeHandler.reset()
|
|
|
|
|
|
|
|
expect(tokenizedBuffer.firstInvalidRow()).toBe 3
|
|
|
|
advanceClock()
|
|
|
|
expect(changeHandler).toHaveBeenCalledWith(start: 3, end: 7, delta: 0)
|
|
|
|
|
|
|
|
describe "when the change invalidates all the lines before the current invalid region", ->
|
|
|
|
it "retokenizes the invalidated lines and continues into the valid region", ->
|
|
|
|
expect(tokenizedBuffer.firstInvalidRow()).toBe 5
|
|
|
|
buffer.insert([2, 0], '/*')
|
|
|
|
changeHandler.reset()
|
|
|
|
expect(tokenizedBuffer.firstInvalidRow()).toBe 3
|
|
|
|
|
|
|
|
advanceClock()
|
|
|
|
expect(changeHandler).toHaveBeenCalledWith(start: 3, end: 7, delta: 0)
|
|
|
|
expect(tokenizedBuffer.firstInvalidRow()).toBe 8
|
2012-11-21 20:09:08 +04:00
|
|
|
|
|
|
|
describe "when there is a buffer change surrounding an invalid row", ->
|
2012-11-22 22:02:14 +04:00
|
|
|
it "pushes the invalid row to the end of the change", ->
|
|
|
|
buffer.change([[4, 0], [6, 0]], "\n\n\n")
|
|
|
|
changeHandler.reset()
|
|
|
|
|
|
|
|
expect(tokenizedBuffer.firstInvalidRow()).toBe 8
|
|
|
|
advanceClock()
|
2012-11-21 20:09:08 +04:00
|
|
|
|
|
|
|
describe "when there is a buffer change inside an invalid region", ->
|
2012-11-22 22:02:14 +04:00
|
|
|
it "does not attempt to tokenize the lines in the change, and preserves the existing invalid row", ->
|
|
|
|
expect(tokenizedBuffer.firstInvalidRow()).toBe 5
|
|
|
|
buffer.change([[6, 0], [7, 0]], "\n\n\n")
|
|
|
|
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(6).ruleStack?).toBeFalsy()
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(7).ruleStack?).toBeFalsy()
|
|
|
|
|
|
|
|
changeHandler.reset()
|
|
|
|
expect(tokenizedBuffer.firstInvalidRow()).toBe 5
|
2012-08-28 02:36:36 +04:00
|
|
|
|
2012-11-21 20:09:08 +04:00
|
|
|
describe "when the buffer is fully tokenized", ->
|
|
|
|
beforeEach ->
|
|
|
|
fullyTokenize(tokenizedBuffer)
|
|
|
|
|
|
|
|
describe "when there is a buffer change that is smaller than the chunk size", ->
|
2012-11-20 04:11:20 +04:00
|
|
|
describe "when lines are updated, but none are added or removed", ->
|
2012-11-21 20:09:08 +04:00
|
|
|
it "updates tokens to reflect the change", ->
|
|
|
|
buffer.change([[0, 0], [2, 0]], "foo()\n7\n")
|
2012-11-20 04:11:20 +04:00
|
|
|
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(0).tokens[1]).toEqual(value: '(', scopes: ['source.js', 'meta.brace.round.js'])
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(1).tokens[0]).toEqual(value: '7', scopes: ['source.js', 'constant.numeric.js'])
|
|
|
|
# line 2 is unchanged
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(2).tokens[2]).toEqual(value: 'if', scopes: ['source.js', 'keyword.control.js'])
|
|
|
|
|
|
|
|
expect(changeHandler).toHaveBeenCalled()
|
|
|
|
[event] = changeHandler.argsForCall[0]
|
|
|
|
delete event.bufferChange
|
|
|
|
expect(event).toEqual(start: 0, end: 2, delta: 0)
|
|
|
|
|
2012-11-21 21:02:52 +04:00
|
|
|
describe "when the change invalidates the tokenization of subsequent lines", ->
|
|
|
|
it "schedules the invalidated lines to be tokenized in the background", ->
|
|
|
|
buffer.insert([5, 30], '/* */')
|
|
|
|
changeHandler.reset()
|
|
|
|
buffer.insert([2, 0], '/*')
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(3).tokens[0].scopes).toEqual ['source.js']
|
|
|
|
expect(changeHandler).toHaveBeenCalled()
|
|
|
|
[event] = changeHandler.argsForCall[0]
|
|
|
|
delete event.bufferChange
|
|
|
|
expect(event).toEqual(start: 2, end: 2, delta: 0)
|
|
|
|
changeHandler.reset()
|
|
|
|
|
|
|
|
advanceClock()
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(3).tokens[0].scopes).toEqual ['source.js', 'comment.block.js']
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(4).tokens[0].scopes).toEqual ['source.js', 'comment.block.js']
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).tokens[0].scopes).toEqual ['source.js', 'comment.block.js']
|
|
|
|
expect(changeHandler).toHaveBeenCalled()
|
|
|
|
[event] = changeHandler.argsForCall[0]
|
|
|
|
delete event.bufferChange
|
|
|
|
expect(event).toEqual(start: 3, end: 5, delta: 0)
|
2012-11-20 04:11:20 +04:00
|
|
|
|
|
|
|
it "resumes highlighting with the state of the previous line", ->
|
|
|
|
buffer.insert([0, 0], '/*')
|
|
|
|
buffer.insert([5, 0], '*/')
|
|
|
|
|
|
|
|
buffer.insert([1, 0], 'var ')
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(1).tokens[0].scopes).toEqual ['source.js', 'comment.block.js']
|
|
|
|
|
|
|
|
describe "when lines are both updated and removed", ->
|
2012-11-22 05:38:39 +04:00
|
|
|
it "updates tokens to reflect the change", ->
|
|
|
|
buffer.change([[1, 0], [3, 0]], "foo()")
|
2012-11-20 04:11:20 +04:00
|
|
|
|
|
|
|
# previous line 0 remains
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(0).tokens[0]).toEqual(value: 'var', scopes: ['source.js', 'storage.modifier.js'])
|
|
|
|
|
|
|
|
# previous line 3 should be combined with input to form line 1
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(1).tokens[0]).toEqual(value: 'foo', scopes: ['source.js'])
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(1).tokens[6]).toEqual(value: '=', scopes: ['source.js', 'keyword.operator.js'])
|
|
|
|
|
|
|
|
# lines below deleted regions should be shifted upward
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(2).tokens[2]).toEqual(value: 'while', scopes: ['source.js', 'keyword.control.js'])
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(3).tokens[4]).toEqual(value: '=', scopes: ['source.js', 'keyword.operator.js'])
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(4).tokens[4]).toEqual(value: '<', scopes: ['source.js', 'keyword.operator.js'])
|
|
|
|
|
|
|
|
expect(changeHandler).toHaveBeenCalled()
|
|
|
|
[event] = changeHandler.argsForCall[0]
|
|
|
|
delete event.bufferChange
|
|
|
|
expect(event).toEqual(start: 1, end: 3, delta: -2)
|
|
|
|
|
2012-11-22 05:38:39 +04:00
|
|
|
describe "when the change invalidates the tokenization of subsequent lines", ->
|
|
|
|
it "schedules the invalidated lines to be tokenized in the background", ->
|
2012-11-20 04:11:20 +04:00
|
|
|
buffer.insert([5, 30], '/* */')
|
|
|
|
changeHandler.reset()
|
|
|
|
|
2012-11-22 05:38:39 +04:00
|
|
|
buffer.change([[2, 0], [3, 0]], '/*')
|
2012-11-20 04:11:20 +04:00
|
|
|
expect(tokenizedBuffer.lineForScreenRow(2).tokens[0].scopes).toEqual ['source.js', 'comment.block.js', 'punctuation.definition.comment.js']
|
2012-11-22 05:38:39 +04:00
|
|
|
expect(tokenizedBuffer.lineForScreenRow(3).tokens[0].scopes).toEqual ['source.js']
|
|
|
|
expect(changeHandler).toHaveBeenCalled()
|
|
|
|
[event] = changeHandler.argsForCall[0]
|
|
|
|
delete event.bufferChange
|
|
|
|
expect(event).toEqual(start: 2, end: 3, delta: -1)
|
|
|
|
changeHandler.reset()
|
|
|
|
|
|
|
|
advanceClock()
|
2012-11-20 04:11:20 +04:00
|
|
|
expect(tokenizedBuffer.lineForScreenRow(3).tokens[0].scopes).toEqual ['source.js', 'comment.block.js']
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(4).tokens[0].scopes).toEqual ['source.js', 'comment.block.js']
|
|
|
|
expect(changeHandler).toHaveBeenCalled()
|
|
|
|
[event] = changeHandler.argsForCall[0]
|
|
|
|
delete event.bufferChange
|
2012-11-22 05:38:39 +04:00
|
|
|
expect(event).toEqual(start: 3, end: 4, delta: 0)
|
2012-11-20 04:11:20 +04:00
|
|
|
|
|
|
|
describe "when lines are both updated and inserted", ->
|
2012-11-22 05:38:39 +04:00
|
|
|
it "updates tokens to reflect the change", ->
|
|
|
|
buffer.change([[1, 0], [2, 0]], "foo()\nbar()\nbaz()\nquux()")
|
2012-11-20 04:11:20 +04:00
|
|
|
|
|
|
|
# previous line 0 remains
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(0).tokens[0]).toEqual( value: 'var', scopes: ['source.js', 'storage.modifier.js'])
|
|
|
|
|
|
|
|
# 3 new lines inserted
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(1).tokens[0]).toEqual(value: 'foo', scopes: ['source.js'])
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(2).tokens[0]).toEqual(value: 'bar', scopes: ['source.js'])
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(3).tokens[0]).toEqual(value: 'baz', scopes: ['source.js'])
|
|
|
|
|
|
|
|
# previous line 2 is joined with quux() on line 4
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(4).tokens[0]).toEqual(value: 'quux', scopes: ['source.js'])
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(4).tokens[4]).toEqual(value: 'if', scopes: ['source.js', 'keyword.control.js'])
|
|
|
|
|
|
|
|
# previous line 3 is pushed down to become line 5
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).tokens[4]).toEqual(value: '=', scopes: ['source.js', 'keyword.operator.js'])
|
|
|
|
|
|
|
|
expect(changeHandler).toHaveBeenCalled()
|
|
|
|
[event] = changeHandler.argsForCall[0]
|
|
|
|
delete event.bufferChange
|
|
|
|
expect(event).toEqual(start: 1, end: 2, delta: 2)
|
|
|
|
|
2012-11-22 05:38:39 +04:00
|
|
|
describe "when the change invalidates the tokenization of subsequent lines", ->
|
|
|
|
it "schedules the invalidated lines to be tokenized in the background", ->
|
2012-11-20 04:11:20 +04:00
|
|
|
buffer.insert([5, 30], '/* */')
|
|
|
|
changeHandler.reset()
|
|
|
|
|
|
|
|
buffer.insert([2, 0], '/*\nabcde\nabcder')
|
2012-11-22 05:38:39 +04:00
|
|
|
expect(changeHandler).toHaveBeenCalled()
|
|
|
|
[event] = changeHandler.argsForCall[0]
|
|
|
|
delete event.bufferChange
|
|
|
|
expect(event).toEqual(start: 2, end: 2, delta: 2)
|
2012-11-20 04:11:20 +04:00
|
|
|
expect(tokenizedBuffer.lineForScreenRow(2).tokens[0].scopes).toEqual ['source.js', 'comment.block.js', 'punctuation.definition.comment.js']
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(3).tokens[0].scopes).toEqual ['source.js', 'comment.block.js']
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(4).tokens[0].scopes).toEqual ['source.js', 'comment.block.js']
|
2012-11-22 05:38:39 +04:00
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).tokens[0].scopes).toEqual ['source.js']
|
|
|
|
changeHandler.reset()
|
|
|
|
|
|
|
|
advanceClock() # tokenize invalidated lines in background
|
2012-11-20 04:11:20 +04:00
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).tokens[0].scopes).toEqual ['source.js', 'comment.block.js']
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(6).tokens[0].scopes).toEqual ['source.js', 'comment.block.js']
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(7).tokens[0].scopes).toEqual ['source.js', 'comment.block.js']
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(8).tokens[0].scopes).not.toBe ['source.js', 'comment.block.js']
|
|
|
|
|
|
|
|
expect(changeHandler).toHaveBeenCalled()
|
|
|
|
[event] = changeHandler.argsForCall[0]
|
|
|
|
delete event.bufferChange
|
2012-11-22 05:38:39 +04:00
|
|
|
expect(event).toEqual(start: 5, end: 7, delta: 0)
|
2012-11-20 04:11:20 +04:00
|
|
|
|
2012-12-12 04:51:05 +04:00
|
|
|
describe "when there is an insertion that is larger than the chunk size", ->
|
|
|
|
it "tokenizes the initial chunk synchronously, then tokenizes the remaining lines in the background", ->
|
|
|
|
commentBlock = _.multiplyString("// a comment\n", tokenizedBuffer.chunkSize + 2)
|
|
|
|
buffer.insert([0,0], commentBlock)
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(0).ruleStack?).toBeTruthy()
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(4).ruleStack?).toBeTruthy()
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).ruleStack?).toBeFalsy()
|
|
|
|
|
|
|
|
advanceClock()
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).ruleStack?).toBeTruthy()
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(6).ruleStack?).toBeTruthy()
|
|
|
|
|
2012-11-21 20:09:08 +04:00
|
|
|
describe ".findOpeningBracket(closingBufferPosition)", ->
|
|
|
|
it "returns the position of the matching bracket, skipping any nested brackets", ->
|
|
|
|
expect(tokenizedBuffer.findOpeningBracket([9, 2])).toEqual [1, 29]
|
2012-11-20 04:11:20 +04:00
|
|
|
|
2012-11-21 20:09:08 +04:00
|
|
|
describe ".findClosingBracket(startBufferPosition)", ->
|
|
|
|
it "returns the position of the matching bracket, skipping any nested brackets", ->
|
|
|
|
expect(tokenizedBuffer.findClosingBracket([1, 29])).toEqual [9, 2]
|
2012-11-20 04:11:20 +04:00
|
|
|
|
2012-11-21 20:09:08 +04:00
|
|
|
it "tokenizes leading whitespace based on the new tab length", ->
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).tokens[0].isAtomic).toBeTruthy()
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).tokens[0].value).toBe " "
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).tokens[1].isAtomic).toBeTruthy()
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).tokens[1].value).toBe " "
|
2012-11-20 04:11:20 +04:00
|
|
|
|
2012-11-21 20:09:08 +04:00
|
|
|
tokenizedBuffer.setTabLength(4)
|
|
|
|
fullyTokenize(tokenizedBuffer)
|
2012-11-20 04:11:20 +04:00
|
|
|
|
2012-11-21 20:09:08 +04:00
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).tokens[0].isAtomic).toBeTruthy()
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).tokens[0].value).toBe " "
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).tokens[1].isAtomic).toBeFalsy()
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(5).tokens[1].value).toBe " current "
|
2012-11-16 19:57:46 +04:00
|
|
|
|
2012-11-21 20:09:08 +04:00
|
|
|
describe "when the buffer contains hard-tabs", ->
|
|
|
|
beforeEach ->
|
2013-03-26 03:31:01 +04:00
|
|
|
atom.activatePackage('coffee-script-tmbundle', sync: true)
|
2012-11-21 20:09:08 +04:00
|
|
|
tabLength = 2
|
2013-02-28 21:11:06 +04:00
|
|
|
editSession = project.buildEditSession('sample-with-tabs.coffee', { tabLength })
|
2012-11-21 20:09:08 +04:00
|
|
|
buffer = editSession.buffer
|
|
|
|
tokenizedBuffer = editSession.displayBuffer.tokenizedBuffer
|
2012-11-23 23:14:46 +04:00
|
|
|
editSession.setVisible(true)
|
2012-08-28 02:36:36 +04:00
|
|
|
|
2012-11-21 20:09:08 +04:00
|
|
|
afterEach ->
|
|
|
|
editSession.destroy()
|
2012-08-28 02:36:36 +04:00
|
|
|
|
2012-11-21 20:09:08 +04:00
|
|
|
describe "when the buffer is fully tokenized", ->
|
2012-08-28 02:36:36 +04:00
|
|
|
beforeEach ->
|
2012-11-21 20:09:08 +04:00
|
|
|
fullyTokenize(tokenizedBuffer)
|
2012-08-28 02:36:36 +04:00
|
|
|
|
2012-11-21 20:09:08 +04:00
|
|
|
it "renders each tab as its own atomic token with a value of size tabLength", ->
|
|
|
|
tabAsSpaces = _.multiplyString(' ', editSession.getTabLength())
|
2012-08-28 02:36:36 +04:00
|
|
|
screenLine0 = tokenizedBuffer.lineForScreenRow(0)
|
2012-10-26 22:21:20 +04:00
|
|
|
expect(screenLine0.text).toBe "# Econ 101#{tabAsSpaces}"
|
2012-08-28 02:36:36 +04:00
|
|
|
{ tokens } = screenLine0
|
2012-11-06 21:03:54 +04:00
|
|
|
|
2012-10-18 22:43:17 +04:00
|
|
|
expect(tokens.length).toBe 3
|
2012-08-28 02:36:36 +04:00
|
|
|
expect(tokens[0].value).toBe "#"
|
2012-10-18 22:43:17 +04:00
|
|
|
expect(tokens[1].value).toBe " Econ 101"
|
2012-10-26 22:21:20 +04:00
|
|
|
expect(tokens[2].value).toBe tabAsSpaces
|
2012-10-18 22:43:17 +04:00
|
|
|
expect(tokens[2].scopes).toEqual tokens[1].scopes
|
|
|
|
expect(tokens[2].isAtomic).toBeTruthy()
|
2012-08-28 02:36:36 +04:00
|
|
|
|
2012-10-26 22:21:20 +04:00
|
|
|
expect(tokenizedBuffer.lineForScreenRow(2).text).toBe "#{tabAsSpaces} buy()#{tabAsSpaces}while supply > demand"
|
2012-11-06 22:44:50 +04:00
|
|
|
|
2013-04-22 04:28:58 +04:00
|
|
|
describe "when the language mode emits a 'grammar-updated' event based on an included grammar being activated", ->
|
|
|
|
it "retokenizes the buffer", ->
|
|
|
|
atom.activatePackage('ruby.tmbundle', sync: true)
|
|
|
|
atom.activatePackage('ruby-on-rails-tmbundle', sync: true)
|
|
|
|
|
|
|
|
editSession = project.buildEditSession()
|
|
|
|
editSession.setVisible(true)
|
|
|
|
editSession.setGrammar(syntax.selectGrammar('test.erb'))
|
|
|
|
editSession.buffer.setText("<div class='name'><%= User.find(2).full_name %></div>")
|
|
|
|
tokenizedBuffer = editSession.displayBuffer.tokenizedBuffer
|
|
|
|
fullyTokenize(tokenizedBuffer)
|
|
|
|
|
|
|
|
{tokens} = tokenizedBuffer.lineForScreenRow(0)
|
|
|
|
expect(tokens[0]).toEqual value: "<div class='name'>", scopes: ["text.html.ruby"]
|
|
|
|
|
|
|
|
atom.activatePackage('html.tmbundle', sync: true)
|
|
|
|
fullyTokenize(tokenizedBuffer)
|
|
|
|
{tokens} = tokenizedBuffer.lineForScreenRow(0)
|
|
|
|
expect(tokens[0]).toEqual value: '<', scopes: ["text.html.ruby","meta.tag.block.any.html","punctuation.definition.tag.begin.html"]
|
|
|
|
|
2012-12-20 09:44:21 +04:00
|
|
|
describe "when a Git commit message file is tokenized", ->
|
|
|
|
beforeEach ->
|
2013-03-26 03:31:01 +04:00
|
|
|
atom.activatePackage('git.tmbundle', sync: true)
|
2013-02-28 21:11:06 +04:00
|
|
|
editSession = project.buildEditSession('COMMIT_EDITMSG', autoIndent: false)
|
2012-12-20 09:44:21 +04:00
|
|
|
buffer = editSession.buffer
|
|
|
|
tokenizedBuffer = editSession.displayBuffer.tokenizedBuffer
|
|
|
|
editSession.setVisible(true)
|
|
|
|
fullyTokenize(tokenizedBuffer)
|
|
|
|
|
|
|
|
afterEach ->
|
|
|
|
editSession.destroy()
|
|
|
|
|
2012-12-21 09:08:09 +04:00
|
|
|
it "correctly parses a long line", ->
|
|
|
|
longLine = tokenizedBuffer.lineForScreenRow(0)
|
|
|
|
expect(longLine.text).toBe "longggggggggggggggggggggggggggggggggggggggggggggggg"
|
|
|
|
{ tokens } = longLine
|
|
|
|
|
|
|
|
expect(tokens[0].value).toBe "longggggggggggggggggggggggggggggggggggggggggggggggg"
|
|
|
|
expect(tokens[0].scopes).toEqual ["text.git-commit", "meta.scope.message.git-commit", "invalid.deprecated.line-too-long.git-commit"]
|
|
|
|
|
2012-12-20 09:44:21 +04:00
|
|
|
it "correctly parses the number sign of the first comment line", ->
|
|
|
|
commentLine = tokenizedBuffer.lineForScreenRow(1)
|
|
|
|
expect(commentLine.text).toBe "# Please enter the commit message for your changes. Lines starting"
|
|
|
|
{ tokens } = commentLine
|
|
|
|
|
|
|
|
expect(tokens[0].value).toBe "#"
|
|
|
|
expect(tokens[0].scopes).toEqual ["text.git-commit", "meta.scope.metadata.git-commit", "comment.line.number-sign.git-commit", "punctuation.definition.comment.git-commit"]
|
2012-12-21 21:52:45 +04:00
|
|
|
|
|
|
|
describe "when a C++ source file is tokenized", ->
|
|
|
|
beforeEach ->
|
2013-03-26 03:31:01 +04:00
|
|
|
atom.activatePackage('c.tmbundle', sync: true)
|
2013-02-28 21:11:06 +04:00
|
|
|
editSession = project.buildEditSession('includes.cc', autoIndent: false)
|
2012-12-21 21:52:45 +04:00
|
|
|
buffer = editSession.buffer
|
|
|
|
tokenizedBuffer = editSession.displayBuffer.tokenizedBuffer
|
|
|
|
editSession.setVisible(true)
|
|
|
|
fullyTokenize(tokenizedBuffer)
|
|
|
|
|
|
|
|
afterEach ->
|
|
|
|
editSession.destroy()
|
|
|
|
|
|
|
|
it "correctly parses the first include line", ->
|
|
|
|
longLine = tokenizedBuffer.lineForScreenRow(0)
|
|
|
|
expect(longLine.text).toBe '#include "a.h"'
|
|
|
|
{ tokens } = longLine
|
|
|
|
|
|
|
|
expect(tokens[0].value).toBe "#"
|
|
|
|
expect(tokens[0].scopes).toEqual ["source.c++", "meta.preprocessor.c.include"]
|
|
|
|
expect(tokens[1].value).toBe 'include'
|
|
|
|
expect(tokens[1].scopes).toEqual ["source.c++", "meta.preprocessor.c.include", "keyword.control.import.include.c"]
|
|
|
|
|
|
|
|
it "correctly parses the second include line", ->
|
|
|
|
commentLine = tokenizedBuffer.lineForScreenRow(1)
|
|
|
|
expect(commentLine.text).toBe '#include "b.h"'
|
|
|
|
{ tokens } = commentLine
|
|
|
|
|
|
|
|
expect(tokens[0].value).toBe "#"
|
|
|
|
expect(tokens[0].scopes).toEqual ["source.c++", "meta.preprocessor.c.include"]
|
|
|
|
expect(tokens[1].value).toBe 'include'
|
|
|
|
expect(tokens[1].scopes).toEqual ["source.c++", "meta.preprocessor.c.include", "keyword.control.import.include.c"]
|
2012-12-26 23:41:50 +04:00
|
|
|
|
|
|
|
describe "when a Ruby source file is tokenized", ->
|
|
|
|
beforeEach ->
|
2013-02-28 21:11:06 +04:00
|
|
|
editSession = project.buildEditSession('hello.rb', autoIndent: false)
|
2012-12-26 23:41:50 +04:00
|
|
|
buffer = editSession.buffer
|
|
|
|
tokenizedBuffer = editSession.displayBuffer.tokenizedBuffer
|
|
|
|
editSession.setVisible(true)
|
|
|
|
fullyTokenize(tokenizedBuffer)
|
|
|
|
|
|
|
|
afterEach ->
|
|
|
|
editSession.destroy()
|
|
|
|
|
|
|
|
it "doesn't loop infinitely (regression)", ->
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(0).text).toBe 'a = {'
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(1).text).toBe ' "b" => "c",'
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(2).text).toBe '}'
|
|
|
|
expect(tokenizedBuffer.lineForScreenRow(3).text).toBe ''
|
2012-12-22 02:58:33 +04:00
|
|
|
|
2013-01-08 22:50:23 +04:00
|
|
|
describe "when an Objective-C source file is tokenized", ->
|
2012-12-22 02:58:33 +04:00
|
|
|
beforeEach ->
|
2013-03-26 03:31:01 +04:00
|
|
|
atom.activatePackage('c.tmbundle', sync: true)
|
|
|
|
atom.activatePackage('objective-c.tmbundle', sync: true)
|
2013-02-28 21:11:06 +04:00
|
|
|
editSession = project.buildEditSession('function.mm', autoIndent: false)
|
2012-12-22 02:58:33 +04:00
|
|
|
buffer = editSession.buffer
|
|
|
|
tokenizedBuffer = editSession.displayBuffer.tokenizedBuffer
|
|
|
|
editSession.setVisible(true)
|
|
|
|
fullyTokenize(tokenizedBuffer)
|
|
|
|
|
|
|
|
afterEach ->
|
|
|
|
editSession.destroy()
|
|
|
|
|
|
|
|
it "correctly parses variable type when it is a built-in Cocoa class", ->
|
|
|
|
commentLine = tokenizedBuffer.lineForScreenRow(1)
|
|
|
|
expect(commentLine.text).toBe 'NSString *a = @"a\\nb";'
|
|
|
|
{ tokens } = commentLine
|
|
|
|
|
|
|
|
expect(tokens[0].value).toBe "NSString"
|
|
|
|
expect(tokens[0].scopes).toEqual ["source.objc++", "meta.function.c", "meta.block.c", "support.class.cocoa"]
|
2012-12-23 00:40:47 +04:00
|
|
|
|
|
|
|
it "correctly parses the semicolon at the end of the line", ->
|
|
|
|
commentLine = tokenizedBuffer.lineForScreenRow(1)
|
|
|
|
expect(commentLine.text).toBe 'NSString *a = @"a\\nb";'
|
|
|
|
{ tokens } = commentLine
|
|
|
|
|
|
|
|
lastToken = tokens.length - 1
|
|
|
|
expect(lastToken).toBeGreaterThan 0
|
|
|
|
expect(tokens[lastToken].value).toBe ";"
|
|
|
|
expect(tokens[lastToken].scopes).toEqual ["source.objc++", "meta.function.c", "meta.block.c"]
|
2012-12-23 01:04:51 +04:00
|
|
|
|
|
|
|
it "correctly parses the string characters before the escaped character", ->
|
|
|
|
commentLine = tokenizedBuffer.lineForScreenRow(1)
|
|
|
|
expect(commentLine.text).toBe 'NSString *a = @"a\\nb";'
|
|
|
|
{ tokens } = commentLine
|
|
|
|
|
|
|
|
expect(tokens[2].value).toBe '@"'
|
|
|
|
expect(tokens[2].scopes).toEqual ["source.objc++", "meta.function.c", "meta.block.c", "string.quoted.double.objc", "punctuation.definition.string.begin.objc"]
|