mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
hugolib: Make RawContent raw again
This was a regression introduced in Hugo 0.17. Fixes #2601
This commit is contained in:
parent
971d1baf13
commit
2f026ab3f3
5 changed files with 53 additions and 22 deletions
|
@ -72,6 +72,9 @@ func (h htmlHandler) PageConvert(p *Page, t tpl.Template) HandledResult {
|
|||
panic(fmt.Sprintf("Page %q already rendered, does not need conversion", p.BaseFileName()))
|
||||
}
|
||||
|
||||
// Work on a copy of the raw content from now on.
|
||||
p.createWorkContentCopy()
|
||||
|
||||
p.ProcessShortcodes(t)
|
||||
|
||||
return HandledResult{err: nil}
|
||||
|
@ -109,19 +112,22 @@ func commonConvert(p *Page, t tpl.Template) HandledResult {
|
|||
panic(fmt.Sprintf("Page %q already rendered, does not need conversion", p.BaseFileName()))
|
||||
}
|
||||
|
||||
// Work on a copy of the raw content from now on.
|
||||
p.createWorkContentCopy()
|
||||
|
||||
p.ProcessShortcodes(t)
|
||||
|
||||
// TODO(bep) these page handlers need to be re-evaluated, as it is hard to
|
||||
// process a page in isolation. See the new preRender func.
|
||||
if viper.GetBool("enableEmoji") {
|
||||
p.rawContent = helpers.Emojify(p.rawContent)
|
||||
p.workContent = helpers.Emojify(p.workContent)
|
||||
}
|
||||
|
||||
// We have to replace the <!--more--> with something that survives all the
|
||||
// rendering engines.
|
||||
// TODO(bep) inline replace
|
||||
p.rawContent = bytes.Replace(p.rawContent, []byte(helpers.SummaryDivider), internalSummaryDivider, 1)
|
||||
p.rawContent = p.renderContent(p.rawContent)
|
||||
p.workContent = bytes.Replace(p.workContent, []byte(helpers.SummaryDivider), internalSummaryDivider, 1)
|
||||
p.workContent = p.renderContent(p.workContent)
|
||||
|
||||
return HandledResult{err: nil}
|
||||
}
|
||||
|
|
|
@ -368,49 +368,49 @@ func (s *Site) preparePagesForRender(cfg *BuildCfg) {
|
|||
|
||||
// If in watch mode, we need to keep the original so we can
|
||||
// repeat this process on rebuild.
|
||||
var rawContentCopy []byte
|
||||
var workContentCopy []byte
|
||||
if cfg.Watching {
|
||||
rawContentCopy = make([]byte, len(p.rawContent))
|
||||
copy(rawContentCopy, p.rawContent)
|
||||
workContentCopy = make([]byte, len(p.workContent))
|
||||
copy(workContentCopy, p.workContent)
|
||||
} else {
|
||||
// Just reuse the same slice.
|
||||
rawContentCopy = p.rawContent
|
||||
workContentCopy = p.workContent
|
||||
}
|
||||
|
||||
if p.Markup == "markdown" {
|
||||
tmpContent, tmpTableOfContents := helpers.ExtractTOC(rawContentCopy)
|
||||
tmpContent, tmpTableOfContents := helpers.ExtractTOC(workContentCopy)
|
||||
p.TableOfContents = helpers.BytesToHTML(tmpTableOfContents)
|
||||
rawContentCopy = tmpContent
|
||||
workContentCopy = tmpContent
|
||||
}
|
||||
|
||||
var err error
|
||||
if rawContentCopy, err = handleShortcodes(p, s.owner.tmpl, rawContentCopy); err != nil {
|
||||
if workContentCopy, err = handleShortcodes(p, s.owner.tmpl, workContentCopy); err != nil {
|
||||
jww.ERROR.Printf("Failed to handle shortcodes for page %s: %s", p.BaseFileName(), err)
|
||||
}
|
||||
|
||||
if p.Markup != "html" {
|
||||
|
||||
// Now we know enough to create a summary of the page and count some words
|
||||
summaryContent, err := p.setUserDefinedSummaryIfProvided(rawContentCopy)
|
||||
summaryContent, err := p.setUserDefinedSummaryIfProvided(workContentCopy)
|
||||
|
||||
if err != nil {
|
||||
jww.ERROR.Printf("Failed to set user defined summary for page %q: %s", p.Path(), err)
|
||||
} else if summaryContent != nil {
|
||||
rawContentCopy = summaryContent.content
|
||||
workContentCopy = summaryContent.content
|
||||
}
|
||||
|
||||
p.Content = helpers.BytesToHTML(rawContentCopy)
|
||||
p.Content = helpers.BytesToHTML(workContentCopy)
|
||||
|
||||
if summaryContent == nil {
|
||||
p.setAutoSummary()
|
||||
}
|
||||
|
||||
} else {
|
||||
p.Content = helpers.BytesToHTML(rawContentCopy)
|
||||
p.Content = helpers.BytesToHTML(workContentCopy)
|
||||
}
|
||||
|
||||
// no need for this anymore
|
||||
rawContentCopy = nil
|
||||
workContentCopy = nil
|
||||
|
||||
//analyze for raw stats
|
||||
p.analyzePage()
|
||||
|
|
|
@ -560,7 +560,7 @@ func assertShouldNotBuild(t *testing.T, sites *HugoSites) {
|
|||
|
||||
for _, p := range s.rawAllPages {
|
||||
// No HTML when not processed
|
||||
require.Equal(t, p.shouldBuild(), bytes.Contains(p.rawContent, []byte("</")), p.BaseFileName()+": "+string(p.rawContent))
|
||||
require.Equal(t, p.shouldBuild(), bytes.Contains(p.workContent, []byte("</")), p.BaseFileName()+": "+string(p.workContent))
|
||||
require.Equal(t, p.shouldBuild(), p.Content != "", p.BaseFileName())
|
||||
|
||||
require.Equal(t, p.shouldBuild(), p.Content != "", p.BaseFileName())
|
||||
|
|
|
@ -129,12 +129,12 @@ type Page struct {
|
|||
|
||||
frontmatter []byte
|
||||
|
||||
// rawContent isn't "raw" as in the same as in the content file.
|
||||
// Hugo cares about memory consumption, so we make changes to it to do
|
||||
// markdown rendering etc., but it is "raw enough" so we can do rebuilds
|
||||
// when shortcode changes etc.
|
||||
// rawContent is the raw content read from the content file.
|
||||
rawContent []byte
|
||||
|
||||
// workContent is a copy of rawContent that may be mutated during site build.
|
||||
workContent []byte
|
||||
|
||||
// state telling if this is a "new page" or if we have rendered it previously.
|
||||
rendered bool
|
||||
|
||||
|
@ -292,6 +292,11 @@ func (ps Pages) FindPagePos(page *Page) int {
|
|||
return -1
|
||||
}
|
||||
|
||||
func (p *Page) createWorkContentCopy() {
|
||||
p.workContent = make([]byte, len(p.rawContent))
|
||||
copy(p.workContent, p.rawContent)
|
||||
}
|
||||
|
||||
func (p *Page) Plain() string {
|
||||
p.initPlain()
|
||||
return p.plain
|
||||
|
@ -1389,8 +1394,8 @@ func (p *Page) SaveSource() error {
|
|||
}
|
||||
|
||||
func (p *Page) ProcessShortcodes(t tpl.Template) {
|
||||
tmpContent, tmpContentShortCodes, _ := extractAndRenderShortcodes(string(p.rawContent), p, t)
|
||||
p.rawContent = []byte(tmpContent)
|
||||
tmpContent, tmpContentShortCodes, _ := extractAndRenderShortcodes(string(p.workContent), p, t)
|
||||
p.workContent = []byte(tmpContent)
|
||||
p.contentShortCodes = tmpContentShortCodes
|
||||
}
|
||||
|
||||
|
|
|
@ -753,6 +753,26 @@ func TestPageWithDelimiterForMarkdownThatCrossesBorder(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
// Issue #2601
|
||||
func TestPageRawContent(t *testing.T) {
|
||||
s := newSiteFromSources("raw.md", `---
|
||||
title: Raw
|
||||
---
|
||||
**Raw**`)
|
||||
|
||||
writeSource(t, filepath.Join("layouts", "_default", "single.html"), `{{ .RawContent }}`)
|
||||
|
||||
if err := buildSiteSkipRender(s); err != nil {
|
||||
t.Fatalf("Failed to build site: %s", err)
|
||||
}
|
||||
|
||||
require.Len(t, s.RegularPages, 1)
|
||||
p := s.RegularPages[0]
|
||||
|
||||
require.Contains(t, p.RawContent(), "**Raw**")
|
||||
|
||||
}
|
||||
|
||||
func TestPageWithShortCodeInSummary(t *testing.T) {
|
||||
|
||||
assertFunc := func(t *testing.T, ext string, pages Pages) {
|
||||
|
|
Loading…
Reference in a new issue