diff --git a/plugin/gomark/README.md b/plugin/gomark/README.md new file mode 100644 index 00000000..2a6f1480 --- /dev/null +++ b/plugin/gomark/README.md @@ -0,0 +1,3 @@ +# gomark + +A markdown parser for memos. WIP diff --git a/plugin/gomark/ast/ast.go b/plugin/gomark/ast/ast.go new file mode 100644 index 00000000..e2ba029c --- /dev/null +++ b/plugin/gomark/ast/ast.go @@ -0,0 +1,19 @@ +package ast + +type Node struct { + Type string + Text string + Children []*Node +} + +type Document struct { + Nodes []*Node +} + +func NewDocument() *Document { + return &Document{} +} + +func (d *Document) AddNode(node *Node) { + d.Nodes = append(d.Nodes, node) +} diff --git a/plugin/gomark/ast/node.go b/plugin/gomark/ast/node.go new file mode 100644 index 00000000..0ef0259d --- /dev/null +++ b/plugin/gomark/ast/node.go @@ -0,0 +1,12 @@ +package ast + +func NewNode(tp, text string) *Node { + return &Node{ + Type: tp, + Text: text, + } +} + +func (n *Node) AddChild(child *Node) { + n.Children = append(n.Children, child) +} diff --git a/plugin/gomark/gomark.go b/plugin/gomark/gomark.go new file mode 100644 index 00000000..b7f941d5 --- /dev/null +++ b/plugin/gomark/gomark.go @@ -0,0 +1 @@ +package gomark diff --git a/plugin/gomark/parser/heading.go b/plugin/gomark/parser/heading.go new file mode 100644 index 00000000..5b420eca --- /dev/null +++ b/plugin/gomark/parser/heading.go @@ -0,0 +1,41 @@ +package parser + +import ( + "strings" + + "github.com/usememos/memos/plugin/gomark/ast" +) + +type HeadingTokenizer struct { +} + +func NewHeadingTokenizer() *HeadingTokenizer { + return &HeadingTokenizer{} +} + +func (*HeadingTokenizer) Trigger() []byte { + return []byte{'#'} +} + +func (*HeadingTokenizer) Parse(parent *ast.Node, block string) *ast.Node { + line := block + level := 0 + for _, c := range line { + if c == '#' { + level++ + } else if c == ' ' { + break + } else { + return nil + } + } + if level == 0 || level > 6 { + return nil + } + text := strings.TrimSpace(line[level+1:]) + node := ast.NewNode("h1", text) + if parent != nil { + parent.AddChild(node) + } + return node +} diff --git a/plugin/gomark/parser/heading_test.go b/plugin/gomark/parser/heading_test.go new file mode 100644 index 00000000..0bfe2c25 --- /dev/null +++ b/plugin/gomark/parser/heading_test.go @@ -0,0 +1 @@ +package parser diff --git a/plugin/gomark/parser/parser.go b/plugin/gomark/parser/parser.go new file mode 100644 index 00000000..0bfe2c25 --- /dev/null +++ b/plugin/gomark/parser/parser.go @@ -0,0 +1 @@ +package parser diff --git a/plugin/gomark/parser/tokenizer/token.go b/plugin/gomark/parser/tokenizer/token.go new file mode 100644 index 00000000..207f57ee --- /dev/null +++ b/plugin/gomark/parser/tokenizer/token.go @@ -0,0 +1,27 @@ +package tokenizer + +type TokenType = string + +const ( + Underline TokenType = "_" + Star TokenType = "*" + Newline TokenType = "\n" + Hash TokenType = "#" + Space TokenType = " " +) + +const ( + Text TokenType = "" +) + +type Token struct { + Type TokenType + Value string +} + +func NewToken(tp, text string) *Token { + return &Token{ + Type: tp, + Value: text, + } +} diff --git a/plugin/gomark/parser/tokenizer/tokenizer.go b/plugin/gomark/parser/tokenizer/tokenizer.go new file mode 100644 index 00000000..3f026fb0 --- /dev/null +++ b/plugin/gomark/parser/tokenizer/tokenizer.go @@ -0,0 +1,28 @@ +package tokenizer + +func tokenize(text string) []*Token { + tokens := []*Token{} + for _, c := range text { + switch c { + case '_': + tokens = append(tokens, NewToken(Underline, "_")) + case '*': + tokens = append(tokens, NewToken(Star, "*")) + case '\n': + tokens = append(tokens, NewToken(Newline, "\n")) + case ' ': + tokens = append(tokens, NewToken(Space, " ")) + default: + var lastToken *Token + if len(tokens) > 0 { + lastToken = tokens[len(tokens)-1] + } + if lastToken == nil || lastToken.Type != Text { + tokens = append(tokens, NewToken(Text, string(c))) + } else { + lastToken.Value += string(c) + } + } + } + return tokens +} diff --git a/plugin/gomark/parser/tokenizer/tokenizer_test.go b/plugin/gomark/parser/tokenizer/tokenizer_test.go new file mode 100644 index 00000000..580ebdef --- /dev/null +++ b/plugin/gomark/parser/tokenizer/tokenizer_test.go @@ -0,0 +1,40 @@ +package tokenizer + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestTokenize(t *testing.T) { + tests := []struct { + text string + tokens []*Token + }{ + { + text: "*Hello world!", + tokens: []*Token{ + { + Type: Star, + Value: "*", + }, + { + Type: Text, + Value: "Hello", + }, + { + Type: Space, + Value: " ", + }, + { + Type: Text, + Value: "world!", + }, + }, + }, + } + for _, test := range tests { + result := tokenize(test.text) + require.Equal(t, test.tokens, result) + } +} diff --git a/plugin/gomark/renderer/renderer.go b/plugin/gomark/renderer/renderer.go new file mode 100644 index 00000000..35803f64 --- /dev/null +++ b/plugin/gomark/renderer/renderer.go @@ -0,0 +1 @@ +package renderer