command palette is now has a hand written lexer

this means that we can 'type check' command palette arguments, e.g. if something should be a string or not, etc.
it uses the same lexer which is used for the syntax highlighting ... let's see how long that lasts.
This commit is contained in:
Felix Angell 2019-03-02 20:54:01 +00:00
parent 1633194c22
commit 8687f612d4
14 changed files with 187 additions and 63 deletions

0
"this is a test.c" Normal file
View File

View File

@ -4,15 +4,17 @@ import (
"log"
"strconv"
"strings"
"github.com/felixangell/phi/lex"
)
type BufferAction struct {
name string
proc func(*BufferView, []string) bool
proc func(*BufferView, []*lex.Token) bool
showInPalette bool
}
func NewBufferAction(name string, proc func(*BufferView, []string) bool) BufferAction {
func NewBufferAction(name string, proc func(*BufferView, []*lex.Token) bool) BufferAction {
return BufferAction{
name: name,
proc: proc,
@ -20,7 +22,7 @@ func NewBufferAction(name string, proc func(*BufferView, []string) bool) BufferA
}
}
func OpenFile(v *BufferView, commands []string) bool {
func OpenFile(v *BufferView, commands []*lex.Token) bool {
path := ""
if path == "" {
panic("unimplemented")
@ -42,12 +44,20 @@ func OpenFile(v *BufferView, commands []string) bool {
return false
}
func NewFile(v *BufferView, commands []string) bool {
func NewFile(v *BufferView, commands []*lex.Token) bool {
// TODO some nice error stuff
// have an error roll thing in the view?
if !commands[0].IsType(lex.String) {
return false
}
fileName := commands[0].Lexeme
// strip out the quotes (1...n-1)
fileName = fileName[1 : len(fileName)-1]
buff := v.AddBuffer()
buff.OpenFile(commands[0])
buff.OpenFile(fileName)
buff.SetFocus(true)
v.focusedBuff = buff.index
@ -55,12 +65,16 @@ func NewFile(v *BufferView, commands []string) bool {
return false
}
func GotoLine(v *BufferView, commands []string) bool {
func GotoLine(v *BufferView, commands []*lex.Token) bool {
if len(commands) == 0 {
return false
}
lineNum, err := strconv.ParseInt(commands[0], 10, 64)
if !commands[0].IsType(lex.Number) {
return false
}
lineNum, err := strconv.ParseInt(commands[0].Lexeme, 10, 64)
if err != nil {
log.Println("goto line invalid argument ", err.Error())
return false
@ -75,7 +89,7 @@ func GotoLine(v *BufferView, commands []string) bool {
return false
}
func focusLeft(v *BufferView, commands []string) bool {
func focusLeft(v *BufferView, commands []*lex.Token) bool {
if v == nil {
return false
}
@ -83,7 +97,7 @@ func focusLeft(v *BufferView, commands []string) bool {
return false
}
func focusRight(v *BufferView, commands []string) bool {
func focusRight(v *BufferView, commands []*lex.Token) bool {
if v == nil {
return false
}
@ -91,7 +105,7 @@ func focusRight(v *BufferView, commands []string) bool {
return false
}
func pageDown(v *BufferView, commands []string) bool {
func pageDown(v *BufferView, commands []*lex.Token) bool {
if v == nil {
return false
}
@ -107,7 +121,7 @@ func pageDown(v *BufferView, commands []string) bool {
return false
}
func pageUp(v *BufferView, commands []string) bool {
func pageUp(v *BufferView, commands []*lex.Token) bool {
if v == nil {
return false
}

View File

@ -476,7 +476,7 @@ func (b *Buffer) processTextInput(r rune) bool {
actionName, actionExists := source[key]
if actionExists {
if action, ok := register[actionName]; ok {
return action.proc(b.parent, []string{})
return action.proc(b.parent, []*lex.Token{})
}
} else {
log.Println("warning, unimplemented shortcut", shortcutName, "+", unicode.ToLower(r), "#", int(r), actionName)

View File

@ -2,9 +2,11 @@ package buff
import (
"fmt"
"github.com/felixangell/phi/lex"
)
func CloseBuffer(v *BufferView, commands []string) bool {
func CloseBuffer(v *BufferView, commands []*lex.Token) bool {
b := v.getCurrentBuff()
if b == nil {
return false

View File

@ -1,6 +1,8 @@
package buff
func DeleteLine(v *BufferView, commands []string) bool {
import "github.com/felixangell/phi/lex"
func DeleteLine(v *BufferView, commands []*lex.Token) bool {
b := v.getCurrentBuff()
if b == nil {
return false

View File

@ -3,15 +3,17 @@ package buff
import (
"log"
"os"
"github.com/felixangell/phi/lex"
)
func ExitPhi(v *BufferView, commands []string) bool {
func ExitPhi(v *BufferView, commands []*lex.Token) bool {
// todo this probably wont work...
// would also be nice to have a thing
// that asks if we want to save all buffers
// rather than going thru each one specifically?
for i, _ := range v.buffers {
CloseBuffer(v, []string{})
for i := range v.buffers {
CloseBuffer(v, []*lex.Token{})
log.Println("Closing buffer ", i)
}

4
buff/lex.go Normal file
View File

@ -0,0 +1,4 @@
package buff
type lexer struct {
}

View File

@ -1,12 +1,13 @@
package buff
import (
"log"
"fmt"
"strings"
"github.com/felixangell/fuzzysearch/fuzzy"
"github.com/felixangell/phi/cfg"
"github.com/felixangell/phi/gui"
"github.com/felixangell/phi/lex"
"github.com/felixangell/strife"
"github.com/veandco/go-sdl2/sdl"
)
@ -140,33 +141,27 @@ func NewCommandPalette(conf cfg.TomlConfig, view *BufferView) *CommandPalette {
func (b *CommandPalette) processCommand() {
input := b.buff.table.Lines[0].String()
input = strings.TrimSpace(input)
tokenizedLine := strings.Split(input, " ")
fmt.Println("raw comand palette input is ", input)
// command
if strings.Compare(tokenizedLine[0], "!") == 0 {
tokens := lex.New(input).Tokenize()
fmt.Println("tokenized to", tokens)
// no commands to process, just the
// bang.
if len(tokenizedLine) == 1 {
return
}
// slice off the command token
tokenizedLine := strings.Split(input, " ")[1:]
log.Println("Command Entered: '", input, "', ", tokenizedLine)
command := tokenizedLine[0]
log.Println("command palette: ", tokenizedLine)
// FIXME
if len(tokens) <= 1 {
return
}
if tokens[0].Equals("!") && tokens[1].IsType(lex.Word) {
command := tokens[1].Lexeme
action, exists := register[command]
if !exists {
fmt.Println("No such action ", command)
return
}
action.proc(b.parent, tokenizedLine[1:])
return
args := tokens[2:]
fmt.Println("executing action", command, "with arguments", args)
action.proc(b.parent, args)
}
if index, ok := b.pathToIndex[input]; ok {

View File

@ -10,16 +10,17 @@ import (
"path/filepath"
"github.com/atotto/clipboard"
"github.com/felixangell/phi/lex"
)
func ShowPalette(v *BufferView, commands []string) bool {
func ShowPalette(v *BufferView, commands []*lex.Token) bool {
b := v.getCurrentBuff()
v.UnfocusBuffers()
v.focusPalette(b)
return true
}
func Paste(v *BufferView, commands []string) bool {
func Paste(v *BufferView, commands []*lex.Token) bool {
b := v.getCurrentBuff()
if b == nil {
return false
@ -37,7 +38,7 @@ func Paste(v *BufferView, commands []string) bool {
return false
}
func Undo(v *BufferView, commands []string) bool {
func Undo(v *BufferView, commands []*lex.Token) bool {
b := v.getCurrentBuff()
if b == nil {
return false
@ -46,7 +47,7 @@ func Undo(v *BufferView, commands []string) bool {
return false
}
func Redo(v *BufferView, commands []string) bool {
func Redo(v *BufferView, commands []*lex.Token) bool {
b := v.getCurrentBuff()
if b == nil {
return false
@ -67,7 +68,7 @@ func genFileName(dir, prefix, suffix string) string {
// if the buffer is modified it will be
// re-rendered.
func Save(v *BufferView, commands []string) bool {
func Save(v *BufferView, commands []*lex.Token) bool {
// TODO Config option for this.
atomicFileSave := true

View File

@ -8,6 +8,7 @@ import (
"github.com/felixangell/phi/cfg"
"github.com/felixangell/phi/gui"
"github.com/felixangell/phi/lex"
"github.com/felixangell/strife"
"github.com/fsnotify/fsnotify"
"github.com/veandco/go-sdl2/sdl"
@ -308,7 +309,7 @@ func (n *BufferView) OnUpdate() bool {
if actionExists {
if action, ok := register[actionName]; ok {
log.Println("Executing action '" + actionName + "'")
return action.proc(n, []string{})
return action.proc(n, []*lex.Token{})
}
} else {
log.Println("view: unimplemented shortcut", shortcutName, "+", string(unicode.ToLower(r)), "#", int(r), actionName, key)

0
foo Normal file
View File

0
foo bar baz Normal file
View File

View File

@ -1,14 +1,21 @@
package lex
import (
"fmt"
"unicode"
)
type Lexer struct {
pos int
input []rune
startingPos int
pos int
input []rune
}
func New(input string) *Lexer {
return &Lexer{
pos: 0,
input: []rune(input),
startingPos: 0,
pos: 0,
input: []rune(input),
}
}
@ -18,6 +25,17 @@ func (l *Lexer) consume() rune {
return consumed
}
func (l *Lexer) expect(c rune) (rune, bool) {
if l.hasNext() && l.peek() == c {
return l.consume(), true
}
if !l.hasNext() {
return rune(0), false
}
// TODO, fail?
return l.consume(), true
}
func (l *Lexer) next(offs int) rune {
return l.input[l.pos+offs]
}
@ -30,6 +48,64 @@ func (l *Lexer) hasNext() bool {
return l.pos < len(l.input)
}
func (l *Lexer) recognizeString() *Token {
l.expect('"')
for l.hasNext() && l.peek() != '"' {
l.consume()
}
l.expect('"')
return NewToken(l.captureLexeme(), String, l.startingPos)
}
func (l *Lexer) recognizeCharacter() *Token {
l.expect('\'')
for l.hasNext() && l.peek() != '\'' {
l.consume()
}
l.expect('\'')
return NewToken(l.captureLexeme(), Character, l.startingPos)
}
func (l *Lexer) recognizeNumber() *Token {
for l.hasNext() && unicode.IsDigit(l.peek()) {
l.consume()
}
if l.hasNext() && l.peek() == '.' {
l.consume()
for l.hasNext() && unicode.IsDigit(l.peek()) {
l.consume()
}
}
return NewToken(l.captureLexeme(), Number, l.startingPos)
}
func (l *Lexer) recognizeSymbol() *Token {
l.consume()
return NewToken(l.captureLexeme(), Symbol, l.startingPos)
}
func (l *Lexer) recognizeWord() *Token {
for l.hasNext() && (unicode.IsLetter(l.peek()) || unicode.IsDigit(l.peek())) {
l.consume()
}
if l.hasNext() {
curr := l.peek()
if curr == '_' || curr == '-' {
l.consume()
for l.hasNext() && (unicode.IsLetter(l.peek()) || unicode.IsDigit(l.peek())) {
l.consume()
}
}
}
return NewToken(l.captureLexeme(), Word, l.startingPos)
}
func (l *Lexer) captureLexeme() string {
return string(l.input[l.startingPos:l.pos])
}
func (l *Lexer) Tokenize() []*Token {
var result []*Token
for l.hasNext() {
@ -46,22 +122,34 @@ func (l *Lexer) Tokenize() []*Token {
l.consume()
}
startPos := l.pos
for l.hasNext() {
// we run into a layout character
if l.peek() <= ' ' {
break
l.startingPos = l.pos
if token := func() *Token {
if !l.hasNext() {
return nil
}
l.consume()
curr := l.peek()
switch {
case curr == '"':
return l.recognizeString()
case curr == '\'':
return l.recognizeCharacter()
case unicode.IsLetter(curr):
return l.recognizeWord()
case unicode.IsDigit(curr):
return l.recognizeNumber()
case unicode.IsGraphic(curr):
return l.recognizeSymbol()
case curr == ' ':
return nil
}
panic(fmt.Sprintln("unhandled input! ", string(curr)))
}(); token != nil {
result = append(result, token)
}
// this should be a recognized
// token i think?
lexeme := string(l.input[startPos:l.pos])
tok := NewToken(lexeme, Word, startPos)
result = append(result, tok)
}
return result
}

View File

@ -1,11 +1,18 @@
package lex
import "fmt"
import (
"fmt"
"strings"
)
type TokenType uint
type TokenType string
const (
Word TokenType = iota
Word TokenType = "word"
Symbol = "sym"
Character = "char"
String = "string"
Number = "num"
)
type Token struct {
@ -14,10 +21,18 @@ type Token struct {
Start int
}
func (t *Token) Equals(str string) bool {
return strings.Compare(str, t.Lexeme) == 0
}
func (t *Token) IsType(typ TokenType) bool {
return t.Type == typ
}
func NewToken(lexeme string, kind TokenType, start int) *Token {
return &Token{lexeme, kind, start}
}
func (t *Token) String() string {
return fmt.Sprintf("lexeme: %s, type %s, at pos %d", t.Lexeme, string(t.Type), t.Start)
return fmt.Sprintf("{ %s = %s }", t.Lexeme, string(t.Type))
}