|
|
|
@ -1,6 +1,8 @@
|
|
|
|
|
package parser
|
|
|
|
|
|
|
|
|
|
import (
|
|
|
|
|
"slices"
|
|
|
|
|
|
|
|
|
|
"github.com/usememos/memos/plugin/gomark/ast"
|
|
|
|
|
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
|
|
|
|
|
)
|
|
|
|
@ -15,51 +17,56 @@ func NewCodeBlockParser() *CodeBlockParser {
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (*CodeBlockParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
|
|
|
|
|
if len(tokens) < 9 {
|
|
|
|
|
rows := tokenizer.Split(tokens, tokenizer.Newline)
|
|
|
|
|
if len(rows) < 3 {
|
|
|
|
|
return nil, 0
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if tokens[0].Type != tokenizer.Backtick || tokens[1].Type != tokenizer.Backtick || tokens[2].Type != tokenizer.Backtick {
|
|
|
|
|
firstRow := rows[0]
|
|
|
|
|
if len(firstRow) < 3 {
|
|
|
|
|
return nil, 0
|
|
|
|
|
}
|
|
|
|
|
if tokens[3].Type != tokenizer.Newline && tokens[4].Type != tokenizer.Newline {
|
|
|
|
|
if firstRow[0].Type != tokenizer.Backtick || firstRow[1].Type != tokenizer.Backtick || firstRow[2].Type != tokenizer.Backtick {
|
|
|
|
|
return nil, 0
|
|
|
|
|
}
|
|
|
|
|
cursor := 4
|
|
|
|
|
if tokens[3].Type != tokenizer.Newline {
|
|
|
|
|
cursor = 5
|
|
|
|
|
languageTokens := []*tokenizer.Token{}
|
|
|
|
|
if len(firstRow) > 3 {
|
|
|
|
|
languageTokens = firstRow[3:]
|
|
|
|
|
// Check if language is valid.
|
|
|
|
|
availableLanguageTokenTypes := []tokenizer.TokenType{tokenizer.Text, tokenizer.Number, tokenizer.Underscore}
|
|
|
|
|
for _, token := range languageTokens {
|
|
|
|
|
if !slices.Contains(availableLanguageTokenTypes, token.Type) {
|
|
|
|
|
return nil, 0
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
contentRows := [][]*tokenizer.Token{}
|
|
|
|
|
matched := false
|
|
|
|
|
for ; cursor < len(tokens)-3; cursor++ {
|
|
|
|
|
if tokens[cursor].Type == tokenizer.Newline && tokens[cursor+1].Type == tokenizer.Backtick && tokens[cursor+2].Type == tokenizer.Backtick && tokens[cursor+3].Type == tokenizer.Backtick {
|
|
|
|
|
if cursor+3 == len(tokens)-1 {
|
|
|
|
|
cursor += 4
|
|
|
|
|
matched = true
|
|
|
|
|
break
|
|
|
|
|
} else if tokens[cursor+4].Type == tokenizer.Newline {
|
|
|
|
|
cursor += 4
|
|
|
|
|
matched = true
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
for _, row := range rows[1:] {
|
|
|
|
|
if len(row) == 3 && row[0].Type == tokenizer.Backtick && row[1].Type == tokenizer.Backtick && row[2].Type == tokenizer.Backtick {
|
|
|
|
|
matched = true
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
contentRows = append(contentRows, row)
|
|
|
|
|
}
|
|
|
|
|
if !matched {
|
|
|
|
|
return nil, 0
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
languageToken := tokens[3]
|
|
|
|
|
contentStart, contentEnd := 5, cursor-4
|
|
|
|
|
if languageToken.Type == tokenizer.Newline {
|
|
|
|
|
languageToken = nil
|
|
|
|
|
contentStart = 4
|
|
|
|
|
contentTokens := []*tokenizer.Token{}
|
|
|
|
|
for index, row := range contentRows {
|
|
|
|
|
contentTokens = append(contentTokens, row...)
|
|
|
|
|
if index != len(contentRows)-1 {
|
|
|
|
|
contentTokens = append(contentTokens, &tokenizer.Token{
|
|
|
|
|
Type: tokenizer.Newline,
|
|
|
|
|
Value: "\n",
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
codeBlock := &ast.CodeBlock{
|
|
|
|
|
Content: tokenizer.Stringify(tokens[contentStart:contentEnd]),
|
|
|
|
|
}
|
|
|
|
|
if languageToken != nil {
|
|
|
|
|
codeBlock.Language = languageToken.String()
|
|
|
|
|
}
|
|
|
|
|
return codeBlock, cursor
|
|
|
|
|
return &ast.CodeBlock{
|
|
|
|
|
Content: tokenizer.Stringify(contentTokens),
|
|
|
|
|
Language: tokenizer.Stringify(languageTokens),
|
|
|
|
|
}, 4 + len(languageTokens) + len(contentTokens) + 4
|
|
|
|
|
}
|
|
|
|
|