feat(gomark): add bold parser (#1724)

This commit is contained in:
boojack 2023-05-23 20:49:32 +08:00 committed by GitHub
parent fa53a2550a
commit 8c34be92a6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 146 additions and 11 deletions

View File

@ -0,0 +1,46 @@
package parser
import (
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
type BoldParser struct {
ContentTokens []*tokenizer.Token
}
func NewBoldParser() *BoldParser {
return &BoldParser{}
}
func (*BoldParser) Match(tokens []*tokenizer.Token) *BoldParser {
if len(tokens) < 5 {
return nil
}
prefixTokens := tokens[:2]
if len(prefixTokens) != 2 || prefixTokens[0].Type != prefixTokens[1].Type {
return nil
}
prefixTokenType := prefixTokens[0].Type
contentTokens := []*tokenizer.Token{}
cursor := 2
for ; cursor < len(tokens)-1; cursor++ {
token, nextToken := tokens[cursor], tokens[cursor+1]
if token.Type == tokenizer.Newline || nextToken.Type == tokenizer.Newline {
break
}
if token.Type == prefixTokenType && nextToken.Type == prefixTokenType {
break
}
contentTokens = append(contentTokens, token)
}
if cursor != len(tokens)-2 {
return nil
}
return &BoldParser{
ContentTokens: contentTokens,
}
}

View File

@ -0,0 +1,88 @@
package parser
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
func TestBoldParser(t *testing.T) {
tests := []struct {
text string
bold *BoldParser
}{
{
text: "*Hello world!",
bold: nil,
},
{
text: "**Hello**",
bold: &BoldParser{
ContentTokens: []*tokenizer.Token{
{
Type: tokenizer.Text,
Value: "Hello",
},
},
},
},
{
text: "** Hello **",
bold: &BoldParser{
ContentTokens: []*tokenizer.Token{
{
Type: tokenizer.Space,
Value: " ",
},
{
Type: tokenizer.Text,
Value: "Hello",
},
{
Type: tokenizer.Space,
Value: " ",
},
},
},
},
{
text: "** Hello * *",
bold: nil,
},
{
text: "* * Hello **",
bold: nil,
},
{
text: `** Hello
**`,
bold: nil,
},
{
text: `**Hello \n**`,
bold: &BoldParser{
ContentTokens: []*tokenizer.Token{
{
Type: tokenizer.Text,
Value: "Hello",
},
{
Type: tokenizer.Space,
Value: " ",
},
{
Type: tokenizer.Text,
Value: `\n`,
},
},
},
},
}
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
bold := NewBoldParser()
require.Equal(t, test.bold, bold.Match(tokens))
}
}

View File

@ -4,16 +4,16 @@ import (
"github.com/usememos/memos/plugin/gomark/parser/tokenizer" "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
) )
type HeadingTokenizer struct { type HeadingParser struct {
Level int Level int
ContentTokens []*tokenizer.Token ContentTokens []*tokenizer.Token
} }
func NewHeadingTokenizer() *HeadingTokenizer { func NewHeadingParser() *HeadingParser {
return &HeadingTokenizer{} return &HeadingParser{}
} }
func (*HeadingTokenizer) Match(tokens []*tokenizer.Token) *HeadingTokenizer { func (*HeadingParser) Match(tokens []*tokenizer.Token) *HeadingParser {
cursor := 0 cursor := 0
for _, token := range tokens { for _, token := range tokens {
if token.Type == tokenizer.Hash { if token.Type == tokenizer.Hash {
@ -40,12 +40,13 @@ func (*HeadingTokenizer) Match(tokens []*tokenizer.Token) *HeadingTokenizer {
break break
} }
contentTokens = append(contentTokens, token) contentTokens = append(contentTokens, token)
cursor++
} }
if len(contentTokens) == 0 { if len(contentTokens) == 0 {
return nil return nil
} }
return &HeadingTokenizer{ return &HeadingParser{
Level: level, Level: level,
ContentTokens: contentTokens, ContentTokens: contentTokens,
} }

View File

@ -10,7 +10,7 @@ import (
func TestHeadingParser(t *testing.T) { func TestHeadingParser(t *testing.T) {
tests := []struct { tests := []struct {
text string text string
heading *HeadingTokenizer heading *HeadingParser
}{ }{
{ {
text: "*Hello world!", text: "*Hello world!",
@ -18,7 +18,7 @@ func TestHeadingParser(t *testing.T) {
}, },
{ {
text: "## Hello World!", text: "## Hello World!",
heading: &HeadingTokenizer{ heading: &HeadingParser{
Level: 2, Level: 2,
ContentTokens: []*tokenizer.Token{ ContentTokens: []*tokenizer.Token{
{ {
@ -38,7 +38,7 @@ func TestHeadingParser(t *testing.T) {
}, },
{ {
text: "# # Hello World", text: "# # Hello World",
heading: &HeadingTokenizer{ heading: &HeadingParser{
Level: 1, Level: 1,
ContentTokens: []*tokenizer.Token{ ContentTokens: []*tokenizer.Token{
{ {
@ -71,7 +71,7 @@ func TestHeadingParser(t *testing.T) {
{ {
text: `# 123 text: `# 123
Hello World!`, Hello World!`,
heading: &HeadingTokenizer{ heading: &HeadingParser{
Level: 1, Level: 1,
ContentTokens: []*tokenizer.Token{ ContentTokens: []*tokenizer.Token{
{ {
@ -89,7 +89,7 @@ Hello World!`,
for _, test := range tests { for _, test := range tests {
tokens := tokenizer.Tokenize(test.text) tokens := tokenizer.Tokenize(test.text)
headingTokenizer := NewHeadingTokenizer() heading := NewHeadingParser()
require.Equal(t, test.heading, headingTokenizer.Match(tokens)) require.Equal(t, test.heading, heading.Match(tokens))
} }
} }