chore: initial gomark plugin (#1678)

chore: initial gomark folder
This commit is contained in:
boojack 2023-05-18 21:33:18 +08:00 committed by GitHub
parent a07d5d38d6
commit 88799d469c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 174 additions and 0 deletions

3
plugin/gomark/README.md Normal file
View File

@ -0,0 +1,3 @@
# gomark
A markdown parser for memos. WIP

19
plugin/gomark/ast/ast.go Normal file
View File

@ -0,0 +1,19 @@
package ast
type Node struct {
Type string
Text string
Children []*Node
}
type Document struct {
Nodes []*Node
}
func NewDocument() *Document {
return &Document{}
}
func (d *Document) AddNode(node *Node) {
d.Nodes = append(d.Nodes, node)
}

12
plugin/gomark/ast/node.go Normal file
View File

@ -0,0 +1,12 @@
package ast
func NewNode(tp, text string) *Node {
return &Node{
Type: tp,
Text: text,
}
}
func (n *Node) AddChild(child *Node) {
n.Children = append(n.Children, child)
}

1
plugin/gomark/gomark.go Normal file
View File

@ -0,0 +1 @@
package gomark

View File

@ -0,0 +1,41 @@
package parser
import (
"strings"
"github.com/usememos/memos/plugin/gomark/ast"
)
type HeadingTokenizer struct {
}
func NewHeadingTokenizer() *HeadingTokenizer {
return &HeadingTokenizer{}
}
func (*HeadingTokenizer) Trigger() []byte {
return []byte{'#'}
}
func (*HeadingTokenizer) Parse(parent *ast.Node, block string) *ast.Node {
line := block
level := 0
for _, c := range line {
if c == '#' {
level++
} else if c == ' ' {
break
} else {
return nil
}
}
if level == 0 || level > 6 {
return nil
}
text := strings.TrimSpace(line[level+1:])
node := ast.NewNode("h1", text)
if parent != nil {
parent.AddChild(node)
}
return node
}

View File

@ -0,0 +1 @@
package parser

View File

@ -0,0 +1 @@
package parser

View File

@ -0,0 +1,27 @@
package tokenizer
type TokenType = string
const (
Underline TokenType = "_"
Star TokenType = "*"
Newline TokenType = "\n"
Hash TokenType = "#"
Space TokenType = " "
)
const (
Text TokenType = ""
)
type Token struct {
Type TokenType
Value string
}
func NewToken(tp, text string) *Token {
return &Token{
Type: tp,
Value: text,
}
}

View File

@ -0,0 +1,28 @@
package tokenizer
func tokenize(text string) []*Token {
tokens := []*Token{}
for _, c := range text {
switch c {
case '_':
tokens = append(tokens, NewToken(Underline, "_"))
case '*':
tokens = append(tokens, NewToken(Star, "*"))
case '\n':
tokens = append(tokens, NewToken(Newline, "\n"))
case ' ':
tokens = append(tokens, NewToken(Space, " "))
default:
var lastToken *Token
if len(tokens) > 0 {
lastToken = tokens[len(tokens)-1]
}
if lastToken == nil || lastToken.Type != Text {
tokens = append(tokens, NewToken(Text, string(c)))
} else {
lastToken.Value += string(c)
}
}
}
return tokens
}

View File

@ -0,0 +1,40 @@
package tokenizer
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestTokenize(t *testing.T) {
tests := []struct {
text string
tokens []*Token
}{
{
text: "*Hello world!",
tokens: []*Token{
{
Type: Star,
Value: "*",
},
{
Type: Text,
Value: "Hello",
},
{
Type: Space,
Value: " ",
},
{
Type: Text,
Value: "world!",
},
},
},
}
for _, test := range tests {
result := tokenize(test.text)
require.Equal(t, test.tokens, result)
}
}

View File

@ -0,0 +1 @@
package renderer