chore: fix deprecation
This commit is contained in:
parent
370dbbc579
commit
e2fddcf681
6 changed files with 36 additions and 7 deletions
|
@ -50,7 +50,7 @@ func (g *GitHubCalloutTransformer) Transform(node *ast.Document, reader text.Rea
|
||||||
return ast.WalkContinue, nil
|
return ast.WalkContinue, nil
|
||||||
}
|
}
|
||||||
firstTextNode, ok := firstParagraph.FirstChild().(*ast.Text)
|
firstTextNode, ok := firstParagraph.FirstChild().(*ast.Text)
|
||||||
if !ok || string(firstTextNode.Text(reader.Source())) != "[" {
|
if !ok || string(firstTextNode.Value(reader.Source())) != "[" {
|
||||||
return ast.WalkContinue, nil
|
return ast.WalkContinue, nil
|
||||||
}
|
}
|
||||||
secondTextNode, ok := firstTextNode.NextSibling().(*ast.Text)
|
secondTextNode, ok := firstTextNode.NextSibling().(*ast.Text)
|
||||||
|
@ -59,14 +59,14 @@ func (g *GitHubCalloutTransformer) Transform(node *ast.Document, reader text.Rea
|
||||||
}
|
}
|
||||||
// If the second node's text isn't one of the supported attention
|
// If the second node's text isn't one of the supported attention
|
||||||
// types, continue walking.
|
// types, continue walking.
|
||||||
secondTextNodeText := secondTextNode.Text(reader.Source())
|
secondTextNodeText := secondTextNode.Value(reader.Source())
|
||||||
attentionType := strings.ToLower(strings.TrimPrefix(string(secondTextNodeText), "!"))
|
attentionType := strings.ToLower(strings.TrimPrefix(string(secondTextNodeText), "!"))
|
||||||
if _, has := supportedAttentionTypes[attentionType]; !has {
|
if _, has := supportedAttentionTypes[attentionType]; !has {
|
||||||
return ast.WalkContinue, nil
|
return ast.WalkContinue, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
thirdTextNode, ok := secondTextNode.NextSibling().(*ast.Text)
|
thirdTextNode, ok := secondTextNode.NextSibling().(*ast.Text)
|
||||||
if !ok || string(thirdTextNode.Text(reader.Source())) != "]" {
|
if !ok || string(thirdTextNode.Value(reader.Source())) != "]" {
|
||||||
return ast.WalkContinue, nil
|
return ast.WalkContinue, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,8 @@ package callout
|
||||||
import (
|
import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/markup/markdown/util"
|
||||||
|
|
||||||
"github.com/yuin/goldmark/ast"
|
"github.com/yuin/goldmark/ast"
|
||||||
"github.com/yuin/goldmark/parser"
|
"github.com/yuin/goldmark/parser"
|
||||||
"github.com/yuin/goldmark/text"
|
"github.com/yuin/goldmark/text"
|
||||||
|
@ -40,7 +42,7 @@ func (g *GitHubLegacyCalloutTransformer) Transform(node *ast.Document, reader te
|
||||||
if !ok {
|
if !ok {
|
||||||
return ast.WalkContinue, nil
|
return ast.WalkContinue, nil
|
||||||
}
|
}
|
||||||
calloutText := string(calloutNode.Text(reader.Source()))
|
calloutText := string(util.Text(calloutNode, reader.Source()))
|
||||||
calloutType := strings.ToLower(calloutText)
|
calloutType := strings.ToLower(calloutText)
|
||||||
// We only support "Note" and "Warning" callouts in legacy mode,
|
// We only support "Note" and "Warning" callouts in legacy mode,
|
||||||
// match only those.
|
// match only those.
|
||||||
|
|
|
@ -9,6 +9,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/markup"
|
"code.gitea.io/gitea/modules/markup"
|
||||||
|
mdutil "code.gitea.io/gitea/modules/markup/markdown/util"
|
||||||
|
|
||||||
"github.com/yuin/goldmark/ast"
|
"github.com/yuin/goldmark/ast"
|
||||||
"github.com/yuin/goldmark/renderer/html"
|
"github.com/yuin/goldmark/renderer/html"
|
||||||
|
@ -49,7 +50,7 @@ func (r *HTMLRenderer) renderCodeSpan(w util.BufWriter, source []byte, n ast.Nod
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *ASTTransformer) transformCodeSpan(_ *markup.RenderContext, v *ast.CodeSpan, reader text.Reader) {
|
func (g *ASTTransformer) transformCodeSpan(_ *markup.RenderContext, v *ast.CodeSpan, reader text.Reader) {
|
||||||
colorContent := v.Text(reader.Source())
|
colorContent := mdutil.Text(v, reader.Source())
|
||||||
if matchColor(strings.ToLower(string(colorContent))) {
|
if matchColor(strings.ToLower(string(colorContent))) {
|
||||||
v.AppendChild(v, NewColorPreview(colorContent))
|
v.AppendChild(v, NewColorPreview(colorContent))
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@ func (g *ASTTransformer) transformHeading(_ *markup.RenderContext, v *ast.Headin
|
||||||
v.SetAttribute(attr.Name, []byte(fmt.Sprintf("%v", attr.Value)))
|
v.SetAttribute(attr.Name, []byte(fmt.Sprintf("%v", attr.Value)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
txt := v.Text(reader.Source())
|
txt := v.Lines().Value(reader.Source())
|
||||||
header := markup.Header{
|
header := markup.Header{
|
||||||
Text: util.UnsafeBytesToString(txt),
|
Text: util.UnsafeBytesToString(txt),
|
||||||
Level: v.Level,
|
Level: v.Level,
|
||||||
|
|
26
modules/markup/markdown/util/text.go
Normal file
26
modules/markup/markdown/util/text.go
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
// Copyright 2024 The Forgejo Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
package util
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
|
||||||
|
"github.com/yuin/goldmark/ast"
|
||||||
|
)
|
||||||
|
|
||||||
|
func textOfChildren(n ast.Node, src []byte, b *bytes.Buffer) {
|
||||||
|
for c := n.FirstChild(); c != nil; c = c.NextSibling() {
|
||||||
|
if t, ok := c.(*ast.Text); ok {
|
||||||
|
b.Write(t.Value(src))
|
||||||
|
} else {
|
||||||
|
textOfChildren(c, src, b)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Text(n ast.Node, src []byte) []byte {
|
||||||
|
var b bytes.Buffer
|
||||||
|
textOfChildren(n, src, &b)
|
||||||
|
return b.Bytes()
|
||||||
|
}
|
|
@ -46,7 +46,7 @@ func (r *stripRenderer) Render(w io.Writer, source []byte, doc ast.Node) error {
|
||||||
coalesce := prevSibIsText
|
coalesce := prevSibIsText
|
||||||
r.processString(
|
r.processString(
|
||||||
w,
|
w,
|
||||||
v.Text(source),
|
v.Value(source),
|
||||||
coalesce)
|
coalesce)
|
||||||
if v.SoftLineBreak() {
|
if v.SoftLineBreak() {
|
||||||
r.doubleSpace(w)
|
r.doubleSpace(w)
|
||||||
|
|
Loading…
Add table
Reference in a new issue