fix bad merge

This commit is contained in:
Jason Poon 2015-11-17 02:49:56 -08:00
parent 797dcbd801
commit c49c99d298
2 changed files with 52 additions and 93 deletions

View File

@ -1,5 +1,3 @@
// For documentation on the test framework, see https://mochajs.org/.
// The module 'assert' provides assertion methods from node
import * as assert from 'assert';
@ -97,4 +95,4 @@ suite("Cmd line tests - lexing", () => {
assert.equal(tokens[1].content, new Token(TokenType.LineNumber, ",").content);
assert.equal(tokens[2].content, new Token(TokenType.LineNumber, "30").content);
});
});
});

View File

@ -4,98 +4,59 @@ import * as assert from 'assert';
// You can import and use all API from the 'vscode' module
// as well as import your extension to test it
import * as vscode from 'vscode';
import * as lexer from '../src/cmd_line/lexer'
import * as token from '../src/cmd_line/token'
import * as myExtension from '../extension';
import * as lexerState from '../src/cmd_line/scanner'
suite("Cmd line tests - lexing", () => {
suite("Cmd line tests - lexer state", () => {
test("can lex empty string", () => {
var tokens = lexer.scan("");
assert.equal(tokens.length, 0);
});
test("can init lexer state", () => {
var state = new lexerState.Scanner("dog");
assert.equal(state.input, "dog");
});
test("can detect EOF with empty input", () => {
var state = new lexerState.Scanner("");
assert.ok(state.isAtEof);
});
test("can lex comma", () => {
var tokens = lexer.scan(",");
assert.equal(tokens[0].content, new token.TokenComma().content);
});
test("next() returns EOF at EOF", () => {
var state = new lexerState.Scanner("");
assert.equal(state.next(), lexerState.Scanner.EOF);
assert.equal(state.next(), lexerState.Scanner.EOF);
assert.equal(state.next(), lexerState.Scanner.EOF);
});
test("can lex percent", () => {
var tokens = lexer.scan("%");
assert.equal(tokens[0].content, new token.TokenPercent().content);
});
test("next() can scan", () => {
var state = new lexerState.Scanner("dog");
assert.equal(state.next(), "d");
assert.equal(state.next(), "o");
assert.equal(state.next(), "g")
assert.equal(state.next(), lexerState.Scanner.EOF);
});
test("can emit", () => {
var state = new lexerState.Scanner("dog cat");
state.next();
state.next();
state.next();
assert.equal(state.emit(), "dog");
state.next();
state.next();
state.next();
state.next();
assert.equal(state.emit(), " cat");
});
test("can lex dollar", () => {
var tokens = lexer.scan("$");
assert.equal(tokens[0].content, new token.TokenDollar().content);
});
test("can lex dot", () => {
var tokens = lexer.scan(".");
assert.equal(tokens[0].content, new token.TokenDot().content);
});
test("can lex one number", () => {
var tokens = lexer.scan("1");
assert.equal(tokens[0].content, new token.TokenLineNumber("1").content);
});
test("can lex longer number", () => {
var tokens = lexer.scan("100");
assert.equal(tokens[0].content, new token.TokenLineNumber("100").content);
});
test("can lex plus", () => {
var tokens = lexer.scan("+");
assert.equal(tokens[0].content, new token.TokenPlus().content);
});
test("can lex minus", () => {
var tokens = lexer.scan("-");
assert.equal(tokens[0].content, new token.TokenMinus().content);
});
test("can lex forward search", () => {
var tokens = lexer.scan("/horses/");
assert.equal(tokens[0].content, new token.TokenSlashSearch("horses").content);
});
test("can lex forward search escaping", () => {
var tokens = lexer.scan("/hor\\/ses/");
assert.equal(tokens[0].content, new token.TokenSlashSearch("hor/ses").content);
});
test("can lex reverse search", () => {
var tokens = lexer.scan("?worms?");
assert.equal(tokens[0].content, new token.TokenQuestionMarkSearch("worms").content);
});
test("can lex reverse search escaping", () => {
var tokens = lexer.scan("?wor\\?ms?");
assert.equal(tokens[0].content, new token.TokenQuestionMarkSearch("wor?ms").content);
});
test("can lex command name", () => {
var tokens = lexer.scan("w");
assert.equal(tokens[0].content, new token.TokenCommandName("w").content);
});
test("can lex command args", () => {
var tokens = lexer.scan("w something");
assert.equal(tokens[0].content, new token.TokenCommandName("w").content);
assert.equal(tokens[1].content, new token.TokenCommandArgs("something").content);
});
test("can lex long command name and args", () => {
var tokens = lexer.scan("write12 something here");
assert.equal(tokens[0].content, new token.TokenCommandName("write").content);
assert.equal(tokens[1].content, new token.TokenCommandArgs("12 something here").content);
});
test("can lex left and right line refs", () => {
var tokens = lexer.scan("20,30");
assert.equal(tokens[0].content, new token.TokenLineNumber("20").content);
assert.equal(tokens[1].content, new token.TokenLineNumber(",").content);
assert.equal(tokens[2].content, new token.TokenLineNumber("30").content);
});
});
>>>>>>> code cleanup
test("can ignore", () => {
var state = new lexerState.Scanner("dog cat");
state.next();
state.next();
state.next();
state.next();
state.ignore();
state.next();
state.next();
state.next();
assert.equal(state.emit(), "cat");
});
});