memos/plugin/gomark/parser/heading_test.go

96 lines
1.5 KiB
Go
Raw Normal View History

package parser
2023-05-23 14:52:31 +03:00
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
func TestHeadingParser(t *testing.T) {
tests := []struct {
text string
2023-05-23 15:49:32 +03:00
heading *HeadingParser
2023-05-23 14:52:31 +03:00
}{
{
text: "*Hello world!",
heading: nil,
},
{
text: "## Hello World!",
2023-05-23 15:49:32 +03:00
heading: &HeadingParser{
2023-05-23 14:52:31 +03:00
Level: 2,
ContentTokens: []*tokenizer.Token{
{
Type: tokenizer.Text,
Value: "Hello",
},
{
Type: tokenizer.Space,
Value: " ",
},
{
Type: tokenizer.Text,
Value: "World!",
},
},
},
},
{
text: "# # Hello World",
2023-05-23 15:49:32 +03:00
heading: &HeadingParser{
2023-05-23 14:52:31 +03:00
Level: 1,
ContentTokens: []*tokenizer.Token{
{
Type: tokenizer.Hash,
Value: "#",
},
{
Type: tokenizer.Space,
Value: " ",
},
{
Type: tokenizer.Text,
Value: "Hello",
},
{
Type: tokenizer.Space,
Value: " ",
},
{
Type: tokenizer.Text,
Value: "World",
},
},
},
},
{
text: " # 123123 Hello World!",
heading: nil,
},
{
text: `# 123
Hello World!`,
2023-05-23 15:49:32 +03:00
heading: &HeadingParser{
2023-05-23 14:52:31 +03:00
Level: 1,
ContentTokens: []*tokenizer.Token{
{
Type: tokenizer.Text,
Value: "123",
},
{
Type: tokenizer.Space,
Value: " ",
},
},
},
},
}
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
2023-05-23 15:49:32 +03:00
heading := NewHeadingParser()
require.Equal(t, test.heading, heading.Match(tokens))
2023-05-23 14:52:31 +03:00
}
}