mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
content adapter: Handle <!--more--> separator in content.value
Closes #12556
This commit is contained in:
parent
74b9b8a229
commit
0221ddb39e
6 changed files with 75 additions and 37 deletions
|
@ -307,7 +307,10 @@ func (pco *pageContentOutput) RenderString(ctx context.Context, args ...any) (te
|
|||
if pageparser.HasShortcode(contentToRender) {
|
||||
contentToRenderb := []byte(contentToRender)
|
||||
// String contains a shortcode.
|
||||
parseInfo.itemsStep1, err = pageparser.ParseBytesMain(contentToRenderb, pageparser.Config{})
|
||||
parseInfo.itemsStep1, err = pageparser.ParseBytes(contentToRenderb, pageparser.Config{
|
||||
NoFrontMatter: true,
|
||||
NoSummaryDivider: true,
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
|
|
@ -643,3 +643,29 @@ Footer: {{ range index site.Menus.footer }}{{ .Name }}|{{ end }}|
|
|||
"Footer: Footer|p2||",
|
||||
)
|
||||
}
|
||||
|
||||
func TestPagesFromGoTmplMore(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
files := `
|
||||
-- hugo.toml --
|
||||
disableKinds = ['home','rss','section','sitemap','taxonomy','term']
|
||||
[markup.goldmark.renderer]
|
||||
unsafe = true
|
||||
-- content/s1/_content.gotmpl --
|
||||
{{ $page := dict
|
||||
"content" (dict "mediaType" "text/markdown" "value" "aaa <!--more--> bbb")
|
||||
"title" "p1"
|
||||
"path" "p1"
|
||||
}}
|
||||
{{ .AddPage $page }}
|
||||
-- layouts/_default/single.html --
|
||||
summary: {{ .Summary }}|content: {{ .Content}}
|
||||
`
|
||||
|
||||
b := hugolib.Test(t, files)
|
||||
|
||||
b.AssertFileContent("public/s1/p1/index.html",
|
||||
"<p>aaa</p>|content: <p>aaa</p>\n<p>bbb</p>",
|
||||
)
|
||||
}
|
||||
|
|
|
@ -63,16 +63,18 @@ func (l *pageLexer) Input() []byte {
|
|||
}
|
||||
|
||||
type Config struct {
|
||||
NoFrontMatter bool
|
||||
NoFrontMatter bool
|
||||
NoSummaryDivider bool
|
||||
}
|
||||
|
||||
// note: the input position here is normally 0 (start), but
|
||||
// can be set if position of first shortcode is known
|
||||
func newPageLexer(input []byte, stateStart stateFunc, cfg Config) *pageLexer {
|
||||
lexer := &pageLexer{
|
||||
input: input,
|
||||
stateStart: stateStart,
|
||||
cfg: cfg,
|
||||
input: input,
|
||||
stateStart: stateStart,
|
||||
summaryDivider: summaryDivider,
|
||||
cfg: cfg,
|
||||
lexerShortcodeState: lexerShortcodeState{
|
||||
currLeftDelimItem: tLeftDelimScNoMarkup,
|
||||
currRightDelimItem: tRightDelimScNoMarkup,
|
||||
|
@ -297,6 +299,8 @@ func (s *sectionHandlers) skip() int {
|
|||
}
|
||||
|
||||
func createSectionHandlers(l *pageLexer) *sectionHandlers {
|
||||
handlers := make([]*sectionHandler, 0, 2)
|
||||
|
||||
shortCodeHandler := §ionHandler{
|
||||
l: l,
|
||||
skipFunc: func(l *pageLexer) int {
|
||||
|
@ -332,31 +336,36 @@ func createSectionHandlers(l *pageLexer) *sectionHandlers {
|
|||
},
|
||||
}
|
||||
|
||||
summaryDividerHandler := §ionHandler{
|
||||
l: l,
|
||||
skipFunc: func(l *pageLexer) int {
|
||||
if l.summaryDividerChecked || l.summaryDivider == nil {
|
||||
return -1
|
||||
}
|
||||
return l.index(l.summaryDivider)
|
||||
},
|
||||
lexFunc: func(origin stateFunc, l *pageLexer) (stateFunc, bool) {
|
||||
if !l.hasPrefix(l.summaryDivider) {
|
||||
return origin, false
|
||||
}
|
||||
handlers = append(handlers, shortCodeHandler)
|
||||
|
||||
l.summaryDividerChecked = true
|
||||
l.pos += len(l.summaryDivider)
|
||||
// This makes it a little easier to reason about later.
|
||||
l.consumeSpace()
|
||||
l.emit(TypeLeadSummaryDivider)
|
||||
if !l.cfg.NoSummaryDivider {
|
||||
summaryDividerHandler := §ionHandler{
|
||||
l: l,
|
||||
skipFunc: func(l *pageLexer) int {
|
||||
if l.summaryDividerChecked {
|
||||
return -1
|
||||
}
|
||||
return l.index(l.summaryDivider)
|
||||
},
|
||||
lexFunc: func(origin stateFunc, l *pageLexer) (stateFunc, bool) {
|
||||
if !l.hasPrefix(l.summaryDivider) {
|
||||
return origin, false
|
||||
}
|
||||
|
||||
l.summaryDividerChecked = true
|
||||
l.pos += len(l.summaryDivider)
|
||||
// This makes it a little easier to reason about later.
|
||||
l.consumeSpace()
|
||||
l.emit(TypeLeadSummaryDivider)
|
||||
|
||||
return origin, true
|
||||
},
|
||||
}
|
||||
|
||||
handlers = append(handlers, summaryDividerHandler)
|
||||
|
||||
return origin, true
|
||||
},
|
||||
}
|
||||
|
||||
handlers := []*sectionHandler{shortCodeHandler, summaryDividerHandler}
|
||||
|
||||
return §ionHandlers{
|
||||
l: l,
|
||||
handlers: handlers,
|
||||
|
|
|
@ -14,8 +14,6 @@
|
|||
package pageparser
|
||||
|
||||
func lexIntroSection(l *pageLexer) stateFunc {
|
||||
l.summaryDivider = summaryDivider
|
||||
|
||||
LOOP:
|
||||
for {
|
||||
r := l.next()
|
||||
|
|
|
@ -47,15 +47,6 @@ func ParseBytes(b []byte, cfg Config) (Items, error) {
|
|||
return l.items, l.err
|
||||
}
|
||||
|
||||
// ParseBytesMain parses b starting with the main section.
|
||||
func ParseBytesMain(b []byte, cfg Config) (Items, error) {
|
||||
l, err := parseBytes(b, cfg, lexMainSection)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return l.items, l.err
|
||||
}
|
||||
|
||||
type ContentFrontMatter struct {
|
||||
Content []byte
|
||||
FrontMatter map[string]any
|
||||
|
|
|
@ -101,3 +101,14 @@ func BenchmarkHasShortcode(b *testing.B) {
|
|||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestSummaryDividerStartingFromMain(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
|
||||
input := `aaa <!--more--> bbb`
|
||||
items, err := collectStringMain(input)
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
c.Assert(items, qt.HasLen, 4)
|
||||
c.Assert(items[1].Type, qt.Equals, TypeLeadSummaryDivider)
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue