Skip to content

Commit

Permalink
feat(gomark): add bold parser (#1724)
Browse files Browse the repository at this point in the history
  • Loading branch information
boojack authored May 23, 2023
1 parent fa53a25 commit 8c34be9
Show file tree
Hide file tree
Showing 4 changed files with 146 additions and 11 deletions.
46 changes: 46 additions & 0 deletions plugin/gomark/parser/bold.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
package parser

import (
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)

type BoldParser struct {
ContentTokens []*tokenizer.Token
}

func NewBoldParser() *BoldParser {
return &BoldParser{}
}

func (*BoldParser) Match(tokens []*tokenizer.Token) *BoldParser {
if len(tokens) < 5 {
return nil
}

prefixTokens := tokens[:2]
if len(prefixTokens) != 2 || prefixTokens[0].Type != prefixTokens[1].Type {
return nil
}
prefixTokenType := prefixTokens[0].Type

contentTokens := []*tokenizer.Token{}
cursor := 2
for ; cursor < len(tokens)-1; cursor++ {
token, nextToken := tokens[cursor], tokens[cursor+1]

if token.Type == tokenizer.Newline || nextToken.Type == tokenizer.Newline {
break
}
if token.Type == prefixTokenType && nextToken.Type == prefixTokenType {
break
}
contentTokens = append(contentTokens, token)
}
if cursor != len(tokens)-2 {
return nil
}

return &BoldParser{
ContentTokens: contentTokens,
}
}
88 changes: 88 additions & 0 deletions plugin/gomark/parser/bold_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
package parser

import (
"testing"

"github.com/stretchr/testify/require"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)

func TestBoldParser(t *testing.T) {
tests := []struct {
text string
bold *BoldParser
}{
{
text: "*Hello world!",
bold: nil,
},
{
text: "**Hello**",
bold: &BoldParser{
ContentTokens: []*tokenizer.Token{
{
Type: tokenizer.Text,
Value: "Hello",
},
},
},
},
{
text: "** Hello **",
bold: &BoldParser{
ContentTokens: []*tokenizer.Token{
{
Type: tokenizer.Space,
Value: " ",
},
{
Type: tokenizer.Text,
Value: "Hello",
},
{
Type: tokenizer.Space,
Value: " ",
},
},
},
},
{
text: "** Hello * *",
bold: nil,
},
{
text: "* * Hello **",
bold: nil,
},
{
text: `** Hello
**`,
bold: nil,
},
{
text: `**Hello \n**`,
bold: &BoldParser{
ContentTokens: []*tokenizer.Token{
{
Type: tokenizer.Text,
Value: "Hello",
},
{
Type: tokenizer.Space,
Value: " ",
},
{
Type: tokenizer.Text,
Value: `\n`,
},
},
},
},
}

for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
bold := NewBoldParser()
require.Equal(t, test.bold, bold.Match(tokens))
}
}
11 changes: 6 additions & 5 deletions plugin/gomark/parser/heading.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,16 @@ import (
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)

type HeadingTokenizer struct {
type HeadingParser struct {
Level int
ContentTokens []*tokenizer.Token
}

func NewHeadingTokenizer() *HeadingTokenizer {
return &HeadingTokenizer{}
func NewHeadingParser() *HeadingParser {
return &HeadingParser{}
}

func (*HeadingTokenizer) Match(tokens []*tokenizer.Token) *HeadingTokenizer {
func (*HeadingParser) Match(tokens []*tokenizer.Token) *HeadingParser {
cursor := 0
for _, token := range tokens {
if token.Type == tokenizer.Hash {
Expand All @@ -40,12 +40,13 @@ func (*HeadingTokenizer) Match(tokens []*tokenizer.Token) *HeadingTokenizer {
break
}
contentTokens = append(contentTokens, token)
cursor++
}
if len(contentTokens) == 0 {
return nil
}

return &HeadingTokenizer{
return &HeadingParser{
Level: level,
ContentTokens: contentTokens,
}
Expand Down
12 changes: 6 additions & 6 deletions plugin/gomark/parser/heading_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,15 @@ import (
func TestHeadingParser(t *testing.T) {
tests := []struct {
text string
heading *HeadingTokenizer
heading *HeadingParser
}{
{
text: "*Hello world!",
heading: nil,
},
{
text: "## Hello World!",
heading: &HeadingTokenizer{
heading: &HeadingParser{
Level: 2,
ContentTokens: []*tokenizer.Token{
{
Expand All @@ -38,7 +38,7 @@ func TestHeadingParser(t *testing.T) {
},
{
text: "# # Hello World",
heading: &HeadingTokenizer{
heading: &HeadingParser{
Level: 1,
ContentTokens: []*tokenizer.Token{
{
Expand Down Expand Up @@ -71,7 +71,7 @@ func TestHeadingParser(t *testing.T) {
{
text: `# 123
Hello World!`,
heading: &HeadingTokenizer{
heading: &HeadingParser{
Level: 1,
ContentTokens: []*tokenizer.Token{
{
Expand All @@ -89,7 +89,7 @@ Hello World!`,

for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
headingTokenizer := NewHeadingTokenizer()
require.Equal(t, test.heading, headingTokenizer.Match(tokens))
heading := NewHeadingParser()
require.Equal(t, test.heading, heading.Match(tokens))
}
}

0 comments on commit 8c34be9

Please sign in to comment.