cleanups to lex syntax highlighter thing

This commit is contained in:
Felix Angell 2018-04-21 18:58:21 +01:00
parent 33cf82bb0c
commit 159346ab91
5 changed files with 38 additions and 13 deletions

View File

@ -36,6 +36,8 @@ type SyntaxCriteria struct {
Colour int `toml:"colouring"`
Match []string `toml:"match"`
Pattern string `toml:"pattern"`
MatchList map[string]bool
}
type Command struct {

View File

@ -120,17 +120,38 @@ func configureAndValidate(conf *TomlConfig) {
log.Println("Syntax Highlighting")
{
syntaxSet := []*LanguageSyntaxConfig{}
conf.associations = map[string]*LanguageSyntaxConfig{}
for lang, extSet := range conf.Associations {
log.Println(lang, "=>", extSet.Extensions)
languageConfig := loadSyntaxDef(lang)
syntaxSet = append(syntaxSet, languageConfig)
for _, ext := range extSet.Extensions {
log.Println("registering", ext, "as", lang)
conf.associations[ext] = languageConfig
}
}
// go through each language
// and store the matches keywords
// as a hashmap for faster lookup
for _, language := range syntaxSet {
for _, syn := range language.Syntax {
syn.MatchList = map[string]bool{}
for _, item := range syn.Match {
if _, ok := syn.MatchList[item]; ok {
log.Println("Warning duplicate match item '" + item + "'")
continue
}
syn.MatchList[item] = true
}
}
}
}
}

View File

@ -1,7 +1,7 @@
package cfg
type LanguageSyntaxConfig struct {
Syntax map[string]SyntaxCriteria `toml:"syntax"`
Syntax map[string]*SyntaxCriteria `toml:"syntax"`
}
type DefaultSyntax map[string]string
@ -28,10 +28,10 @@ match = [
colouring = 0xf0a400
match = [
"for", "break", "if", "else", "continue", "return",
"goto", "static", "extern", "const", "typedef",
"goto", "extern", "const", "typedef",
"struct", "union", "register", "enum",
"do", "static", "sizeof", "volatile", "unsigned",
"switch", "case", "goto", "default"
"switch", "case", "default"
]
[syntax.string_literal]

View File

@ -780,7 +780,10 @@ func lexFindMatches(matches *map[int]syntaxRuneInfo, currLine string, toMatch ma
// start up a lexer instance and
// lex the line.
lexer := lex.New(currLine)
for _, tok := range lexer.Tokenize() {
tokenStream := lexer.Tokenize()
for _, tok := range tokenStream {
if _, ok := toMatch[tok.Lexeme]; ok {
(*matches)[tok.Start] = syntaxRuneInfo{bg, -1, len(tok.Lexeme)}
}
@ -790,7 +793,7 @@ func lexFindMatches(matches *map[int]syntaxRuneInfo, currLine string, toMatch ma
func (b *Buffer) syntaxHighlightLine(currLine string) map[int]syntaxRuneInfo {
matches := map[int]syntaxRuneInfo{}
subjects := make([]cfg.SyntaxCriteria, len(b.languageInfo.Syntax))
subjects := make([]*cfg.SyntaxCriteria, len(b.languageInfo.Syntax))
colours := make([]int, len(b.languageInfo.Syntax))
idx := 0
@ -833,16 +836,9 @@ func (b *Buffer) syntaxHighlightLine(currLine string) map[int]syntaxRuneInfo {
}
}
} else {
// FIXME bit of cleanup is due here!
matchList := make(map[string]bool, len(syntax.Match))
for _, val := range syntax.Match {
matchList[val] = true
}
background := colours[syntaxIndex]
foreground := 0
lexFindMatches(&matches, currLine, matchList, background, foreground)
lexFindMatches(&matches, currLine, syntax.MatchList, background, foreground)
}
}

View File

@ -1,5 +1,7 @@
package lex
import "fmt"
type TokenType uint
const (
@ -14,4 +16,8 @@ type Token struct {
func NewToken(lexeme string, kind TokenType, start int) *Token {
return &Token {lexeme, kind, start}
}
func (t *Token) String() string {
return fmt.Sprintf("lexeme: %s, type %s, at pos %d", t.Lexeme, t.Type, t.Start)
}