Create pages from _content.gotmpl

Closes #12427
Closes #12485
Closes #6310
Closes #5074
This commit is contained in:
Bjørn Erik Pedersen 2024-03-17 11:12:33 +01:00
parent 55dea41c1a
commit e2d66e3218
No known key found for this signature in database
60 changed files with 2391 additions and 438 deletions

View file

@ -854,7 +854,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher,
h.BaseFs.SourceFilesystems, h.BaseFs.SourceFilesystems,
dynamicEvents) dynamicEvents)
onePageName := pickOneWriteOrCreatePath(partitionedEvents.ContentEvents) onePageName := pickOneWriteOrCreatePath(h.Conf.ContentTypes(), partitionedEvents.ContentEvents)
c.printChangeDetected("") c.printChangeDetected("")
c.changeDetector.PrepareNew() c.changeDetector.PrepareNew()

View file

@ -46,12 +46,12 @@ import (
"github.com/fsnotify/fsnotify" "github.com/fsnotify/fsnotify"
"github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/common/types"
"github.com/gohugoio/hugo/common/urls" "github.com/gohugoio/hugo/common/urls"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/hugolib" "github.com/gohugoio/hugo/hugolib"
"github.com/gohugoio/hugo/hugolib/filesystems" "github.com/gohugoio/hugo/hugolib/filesystems"
"github.com/gohugoio/hugo/livereload" "github.com/gohugoio/hugo/livereload"
@ -1188,16 +1188,16 @@ func partitionDynamicEvents(sourceFs *filesystems.SourceFilesystems, events []fs
return return
} }
func pickOneWriteOrCreatePath(events []fsnotify.Event) string { func pickOneWriteOrCreatePath(contentTypes config.ContentTypesProvider, events []fsnotify.Event) string {
name := "" name := ""
for _, ev := range events { for _, ev := range events {
if ev.Op&fsnotify.Write == fsnotify.Write || ev.Op&fsnotify.Create == fsnotify.Create { if ev.Op&fsnotify.Write == fsnotify.Write || ev.Op&fsnotify.Create == fsnotify.Create {
if files.IsIndexContentFile(ev.Name) { if contentTypes.IsIndexContentFile(ev.Name) {
return ev.Name return ev.Name
} }
if files.IsContentFile(ev.Name) { if contentTypes.IsContentFile(ev.Name) {
name = ev.Name name = ev.Name
} }

View file

@ -27,7 +27,12 @@ func NewCache[K comparable, T any]() *Cache[K, T] {
} }
// Delete deletes the given key from the cache. // Delete deletes the given key from the cache.
// If c is nil, this method is a no-op.
func (c *Cache[K, T]) Get(key K) (T, bool) { func (c *Cache[K, T]) Get(key K) (T, bool) {
if c == nil {
var zero T
return zero, false
}
c.RLock() c.RLock()
v, found := c.m[key] v, found := c.m[key]
c.RUnlock() c.RUnlock()
@ -60,6 +65,15 @@ func (c *Cache[K, T]) Set(key K, value T) {
c.Unlock() c.Unlock()
} }
// ForEeach calls the given function for each key/value pair in the cache.
func (c *Cache[K, T]) ForEeach(f func(K, T)) {
c.RLock()
defer c.RUnlock()
for k, v := range c.m {
f(k, v)
}
}
// SliceCache is a simple thread safe cache backed by a map. // SliceCache is a simple thread safe cache backed by a map.
type SliceCache[T any] struct { type SliceCache[T any] struct {
m map[string][]T m map[string][]T

View file

@ -25,8 +25,6 @@ import (
"github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/identity"
) )
var defaultPathParser PathParser
// PathParser parses a path into a Path. // PathParser parses a path into a Path.
type PathParser struct { type PathParser struct {
// Maps the language code to its index in the languages/sites slice. // Maps the language code to its index in the languages/sites slice.
@ -34,11 +32,9 @@ type PathParser struct {
// Reports whether the given language is disabled. // Reports whether the given language is disabled.
IsLangDisabled func(string) bool IsLangDisabled func(string) bool
}
// Parse parses component c with path s into Path using the default path parser. // Reports whether the given ext is a content file.
func Parse(c, s string) *Path { IsContentExt func(string) bool
return defaultPathParser.Parse(c, s)
} }
// NormalizePathString returns a normalized path string using the very basic Hugo rules. // NormalizePathString returns a normalized path string using the very basic Hugo rules.
@ -108,7 +104,6 @@ func (pp *PathParser) parse(component, s string) (*Path, error) {
var err error var err error
// Preserve the original case for titles etc. // Preserve the original case for titles etc.
p.unnormalized, err = pp.doParse(component, s, pp.newPath(component)) p.unnormalized, err = pp.doParse(component, s, pp.newPath(component))
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -195,23 +190,26 @@ func (pp *PathParser) doParse(component, s string, p *Path) (*Path, error) {
} }
} }
isContentComponent := p.component == files.ComponentFolderContent || p.component == files.ComponentFolderArchetypes if len(p.identifiers) > 0 {
isContent := isContentComponent && files.IsContentExt(p.Ext()) isContentComponent := p.component == files.ComponentFolderContent || p.component == files.ComponentFolderArchetypes
isContent := isContentComponent && pp.IsContentExt(p.Ext())
if isContent {
id := p.identifiers[len(p.identifiers)-1] id := p.identifiers[len(p.identifiers)-1]
b := p.s[p.posContainerHigh : id.Low-1] b := p.s[p.posContainerHigh : id.Low-1]
switch b { if isContent {
case "index": switch b {
p.bundleType = PathTypeLeaf case "index":
case "_index": p.bundleType = PathTypeLeaf
p.bundleType = PathTypeBranch case "_index":
default: p.bundleType = PathTypeBranch
p.bundleType = PathTypeContentSingle default:
} p.bundleType = PathTypeContentSingle
}
if slashCount == 2 && p.IsLeafBundle() { if slashCount == 2 && p.IsLeafBundle() {
p.posSectionHigh = 0 p.posSectionHigh = 0
}
} else if b == files.NameContentData && files.IsContentDataExt(p.Ext()) {
p.bundleType = PathTypeContentData
} }
} }
@ -246,6 +244,9 @@ const (
// Branch bundles, e.g. /blog/_index.md // Branch bundles, e.g. /blog/_index.md
PathTypeBranch PathTypeBranch
// Content data file, _content.gotmpl.
PathTypeContentData
) )
type Path struct { type Path struct {
@ -521,10 +522,6 @@ func (p *Path) Identifiers() []string {
return ids return ids
} }
func (p *Path) IsHTML() bool {
return files.IsHTML(p.Ext())
}
func (p *Path) BundleType() PathType { func (p *Path) BundleType() PathType {
return p.bundleType return p.bundleType
} }
@ -541,6 +538,10 @@ func (p *Path) IsLeafBundle() bool {
return p.bundleType == PathTypeLeaf return p.bundleType == PathTypeLeaf
} }
func (p *Path) IsContentData() bool {
return p.bundleType == PathTypeContentData
}
func (p Path) ForBundleType(t PathType) *Path { func (p Path) ForBundleType(t PathType) *Path {
p.bundleType = t p.bundleType = t
return &p return &p

View file

@ -27,6 +27,9 @@ var testParser = &PathParser{
"no": 0, "no": 0,
"en": 1, "en": 1,
}, },
IsContentExt: func(ext string) bool {
return ext == "md"
},
} }
func TestParse(t *testing.T) { func TestParse(t *testing.T) {
@ -333,6 +336,22 @@ func TestParse(t *testing.T) {
c.Assert(p.Path(), qt.Equals, "/a/b/c.txt") c.Assert(p.Path(), qt.Equals, "/a/b/c.txt")
}, },
}, },
{
"Content data file gotmpl",
"/a/b/_content.gotmpl",
func(c *qt.C, p *Path) {
c.Assert(p.Path(), qt.Equals, "/a/b/_content.gotmpl")
c.Assert(p.Ext(), qt.Equals, "gotmpl")
c.Assert(p.IsContentData(), qt.IsTrue)
},
},
{
"Content data file yaml",
"/a/b/_content.yaml",
func(c *qt.C, p *Path) {
c.Assert(p.IsContentData(), qt.IsFalse)
},
},
} }
for _, test := range tests { for _, test := range tests {
c.Run(test.name, func(c *qt.C) { c.Run(test.name, func(c *qt.C) {

View file

@ -367,6 +367,7 @@ func (c *Config) CompileConfig(logger loggers.Logger) error {
DisabledLanguages: disabledLangs, DisabledLanguages: disabledLangs,
IgnoredLogs: ignoredLogIDs, IgnoredLogs: ignoredLogIDs,
KindOutputFormats: kindOutputFormats, KindOutputFormats: kindOutputFormats,
ContentTypes: media.DefaultContentTypes.FromTypes(c.MediaTypes.Config),
CreateTitle: helpers.GetTitleFunc(c.TitleCaseStyle), CreateTitle: helpers.GetTitleFunc(c.TitleCaseStyle),
IsUglyURLSection: isUglyURL, IsUglyURLSection: isUglyURL,
IgnoreFile: ignoreFile, IgnoreFile: ignoreFile,
@ -402,6 +403,7 @@ type ConfigCompiled struct {
BaseURLLiveReload urls.BaseURL BaseURLLiveReload urls.BaseURL
ServerInterface string ServerInterface string
KindOutputFormats map[string]output.Formats KindOutputFormats map[string]output.Formats
ContentTypes media.ContentTypes
DisabledKinds map[string]bool DisabledKinds map[string]bool
DisabledLanguages map[string]bool DisabledLanguages map[string]bool
IgnoredLogs map[string]bool IgnoredLogs map[string]bool
@ -759,7 +761,7 @@ func (c *Configs) Init() error {
c.Languages = languages c.Languages = languages
c.LanguagesDefaultFirst = languagesDefaultFirst c.LanguagesDefaultFirst = languagesDefaultFirst
c.ContentPathParser = &paths.PathParser{LanguageIndex: languagesDefaultFirst.AsIndexSet(), IsLangDisabled: c.Base.IsLangDisabled} c.ContentPathParser = &paths.PathParser{LanguageIndex: languagesDefaultFirst.AsIndexSet(), IsLangDisabled: c.Base.IsLangDisabled, IsContentExt: c.Base.C.ContentTypes.IsContentSuffix}
c.configLangs = make([]config.AllProvider, len(c.Languages)) c.configLangs = make([]config.AllProvider, len(c.Languages))
for i, l := range c.LanguagesDefaultFirst { for i, l := range c.LanguagesDefaultFirst {

View file

@ -84,3 +84,21 @@ logPathWarnings = true
b.Assert(conf.PrintI18nWarnings, qt.Equals, true) b.Assert(conf.PrintI18nWarnings, qt.Equals, true)
b.Assert(conf.PrintPathWarnings, qt.Equals, true) b.Assert(conf.PrintPathWarnings, qt.Equals, true)
} }
func TestRedefineContentTypes(t *testing.T) {
files := `
-- hugo.toml --
baseURL = "https://example.com"
[mediaTypes]
[mediaTypes."text/html"]
suffixes = ["html", "xhtml"]
`
b := hugolib.Test(t, files)
conf := b.H.Configs.Base
contentTypes := conf.C.ContentTypes
b.Assert(contentTypes.HTML.Suffixes(), qt.DeepEquals, []string{"html", "xhtml"})
b.Assert(contentTypes.Markdown.Suffixes(), qt.DeepEquals, []string{"md", "mdown", "markdown"})
}

View file

@ -144,6 +144,10 @@ func (c ConfigLanguage) NewIdentityManager(name string) identity.Manager {
return identity.NewManager(name) return identity.NewManager(name)
} }
func (c ConfigLanguage) ContentTypes() config.ContentTypesProvider {
return c.config.C.ContentTypes
}
// GetConfigSection is mostly used in tests. The switch statement isn't complete, but what's in use. // GetConfigSection is mostly used in tests. The switch statement isn't complete, but what's in use.
func (c ConfigLanguage) GetConfigSection(s string) any { func (c ConfigLanguage) GetConfigSection(s string) any {
switch s { switch s {

View file

@ -41,6 +41,7 @@ type AllProvider interface {
Dirs() CommonDirs Dirs() CommonDirs
Quiet() bool Quiet() bool
DirsBase() CommonDirs DirsBase() CommonDirs
ContentTypes() ContentTypesProvider
GetConfigSection(string) any GetConfigSection(string) any
GetConfig() any GetConfig() any
CanonifyURLs() bool CanonifyURLs() bool
@ -75,6 +76,15 @@ type AllProvider interface {
EnableEmoji() bool EnableEmoji() bool
} }
// We cannot import the media package as that would create a circular dependency.
// This interface defineds a sub set of what media.ContentTypes provides.
type ContentTypesProvider interface {
IsContentSuffix(suffix string) bool
IsContentFile(filename string) bool
IsIndexContentFile(filename string) bool
IsHTMLSuffix(suffix string) bool
}
// Provider provides the configuration settings for Hugo. // Provider provides the configuration settings for Hugo.
type Provider interface { type Provider interface {
GetString(key string) string GetString(key string) string

View file

@ -29,8 +29,6 @@ import (
"github.com/gohugoio/hugo/common/hstrings" "github.com/gohugoio/hugo/common/hstrings"
"github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
@ -98,7 +96,7 @@ func NewContent(h *hugolib.HugoSites, kind, targetPath string, force bool) error
return "", fmt.Errorf("failed to resolve %q to an archetype template", targetPath) return "", fmt.Errorf("failed to resolve %q to an archetype template", targetPath)
} }
if !files.IsContentFile(b.targetPath) { if !h.Conf.ContentTypes().IsContentFile(b.targetPath) {
return "", fmt.Errorf("target path %q is not a known content format", b.targetPath) return "", fmt.Errorf("target path %q is not a known content format", b.targetPath)
} }

View file

@ -26,6 +26,7 @@ import (
"github.com/gohugoio/hugo/common/hexec" "github.com/gohugoio/hugo/common/hexec"
"github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/media"
"github.com/spf13/afero" "github.com/spf13/afero"
@ -135,20 +136,16 @@ func (c *ContentSpec) SanitizeAnchorName(s string) string {
} }
func (c *ContentSpec) ResolveMarkup(in string) string { func (c *ContentSpec) ResolveMarkup(in string) string {
if c == nil {
panic("nil ContentSpec")
}
in = strings.ToLower(in) in = strings.ToLower(in)
switch in {
case "md", "markdown", "mdown": if mediaType, found := c.Cfg.ContentTypes().(media.ContentTypes).Types().GetBestMatch(markup.ResolveMarkup(in)); found {
return "markdown" return mediaType.SubType
case "html", "htm":
return "html"
default:
if conv := c.Converters.Get(in); conv != nil {
return conv.Name()
}
} }
if conv := c.Converters.Get(in); conv != nil {
return markup.ResolveMarkup(conv.Name())
}
return "" return ""
} }
@ -244,7 +241,7 @@ func (c *ContentSpec) TrimShortHTML(input []byte, markup string) []byte {
openingTag := []byte("<p>") openingTag := []byte("<p>")
closingTag := []byte("</p>") closingTag := []byte("</p>")
if markup == "asciidocext" { if markup == media.DefaultContentTypes.AsciiDoc.SubType {
openingTag = []byte("<div class=\"paragraph\">\n<p>") openingTag = []byte("<div class=\"paragraph\">\n<p>")
closingTag = []byte("</p>\n</div>") closingTag = []byte("</p>\n</div>")
} }

View file

@ -41,7 +41,7 @@ func TestTrimShortHTML(t *testing.T) {
{"markdown", []byte("<h2 id=`a`>b</h2>\n\n<p>c</p>"), []byte("<h2 id=`a`>b</h2>\n\n<p>c</p>")}, {"markdown", []byte("<h2 id=`a`>b</h2>\n\n<p>c</p>"), []byte("<h2 id=`a`>b</h2>\n\n<p>c</p>")},
// Issue 12369 // Issue 12369
{"markdown", []byte("<div class=\"paragraph\">\n<p>foo</p>\n</div>"), []byte("<div class=\"paragraph\">\n<p>foo</p>\n</div>")}, {"markdown", []byte("<div class=\"paragraph\">\n<p>foo</p>\n</div>"), []byte("<div class=\"paragraph\">\n<p>foo</p>\n</div>")},
{"asciidocext", []byte("<div class=\"paragraph\">\n<p>foo</p>\n</div>"), []byte("foo")}, {"asciidoc", []byte("<div class=\"paragraph\">\n<p>foo</p>\n</div>"), []byte("foo")},
} }
c := newTestContentSpec(nil) c := newTestContentSpec(nil)

View file

@ -35,9 +35,9 @@ func TestResolveMarkup(t *testing.T) {
{"md", "markdown"}, {"md", "markdown"},
{"markdown", "markdown"}, {"markdown", "markdown"},
{"mdown", "markdown"}, {"mdown", "markdown"},
{"asciidocext", "asciidocext"}, {"asciidocext", "asciidoc"},
{"adoc", "asciidocext"}, {"adoc", "asciidoc"},
{"ad", "asciidocext"}, {"ad", "asciidoc"},
{"rst", "rst"}, {"rst", "rst"},
{"pandoc", "pandoc"}, {"pandoc", "pandoc"},
{"pdc", "pandoc"}, {"pdc", "pandoc"},

View file

@ -29,57 +29,13 @@ const (
FilenameHugoStatsJSON = "hugo_stats.json" FilenameHugoStatsJSON = "hugo_stats.json"
) )
var ( func IsGoTmplExt(ext string) bool {
// This should be the only list of valid extensions for content files. return ext == "gotmpl"
contentFileExtensions = []string{
"html", "htm",
"mdown", "markdown", "md",
"asciidoc", "adoc", "ad",
"rest", "rst",
"org",
"pandoc", "pdc",
}
contentFileExtensionsSet map[string]bool
htmlFileExtensions = []string{
"html", "htm",
}
htmlFileExtensionsSet map[string]bool
)
func init() {
contentFileExtensionsSet = make(map[string]bool)
for _, ext := range contentFileExtensions {
contentFileExtensionsSet[ext] = true
}
htmlFileExtensionsSet = make(map[string]bool)
for _, ext := range htmlFileExtensions {
htmlFileExtensionsSet[ext] = true
}
} }
func IsContentFile(filename string) bool { // Supported data file extensions for _content.* files.
return contentFileExtensionsSet[strings.TrimPrefix(filepath.Ext(filename), ".")] func IsContentDataExt(ext string) bool {
} return IsGoTmplExt(ext)
func IsIndexContentFile(filename string) bool {
if !IsContentFile(filename) {
return false
}
base := filepath.Base(filename)
return strings.HasPrefix(base, "index.") || strings.HasPrefix(base, "_index.")
}
func IsHTML(ext string) bool {
return htmlFileExtensionsSet[ext]
}
func IsContentExt(ext string) bool {
return contentFileExtensionsSet[ext]
} }
const ( const (
@ -93,6 +49,8 @@ const (
FolderResources = "resources" FolderResources = "resources"
FolderJSConfig = "_jsconfig" // Mounted below /assets with postcss.config.js etc. FolderJSConfig = "_jsconfig" // Mounted below /assets with postcss.config.js etc.
NameContentData = "_content"
) )
var ( var (

View file

@ -14,22 +14,11 @@
package files package files
import ( import (
"path/filepath"
"testing" "testing"
qt "github.com/frankban/quicktest" qt "github.com/frankban/quicktest"
) )
func TestIsContentFile(t *testing.T) {
c := qt.New(t)
c.Assert(IsContentFile(filepath.FromSlash("my/file.md")), qt.Equals, true)
c.Assert(IsContentFile(filepath.FromSlash("my/file.ad")), qt.Equals, true)
c.Assert(IsContentFile(filepath.FromSlash("textfile.txt")), qt.Equals, false)
c.Assert(IsContentExt("md"), qt.Equals, true)
c.Assert(IsContentExt("json"), qt.Equals, false)
}
func TestComponentFolders(t *testing.T) { func TestComponentFolders(t *testing.T) {
c := qt.New(t) c := qt.New(t)

View file

@ -23,6 +23,7 @@ import (
"github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/media"
"github.com/spf13/afero" "github.com/spf13/afero"
) )
@ -50,7 +51,8 @@ type WalkwayConfig struct {
Root string Root string
// The logger to use. // The logger to use.
Logger loggers.Logger Logger loggers.Logger
PathParser *paths.PathParser
// One or both of these may be pre-set. // One or both of these may be pre-set.
Info FileMetaInfo // The start info. Info FileMetaInfo // The start info.
@ -72,6 +74,10 @@ func NewWalkway(cfg WalkwayConfig) *Walkway {
panic("fs must be set") panic("fs must be set")
} }
if cfg.PathParser == nil {
cfg.PathParser = media.DefaultPathParser
}
logger := cfg.Logger logger := cfg.Logger
if logger == nil { if logger == nil {
logger = loggers.NewDefault() logger = loggers.NewDefault()
@ -161,7 +167,7 @@ func (w *Walkway) walk(path string, info FileMetaInfo, dirEntries []FileMetaInfo
dirEntries = DirEntriesToFileMetaInfos(fis) dirEntries = DirEntriesToFileMetaInfos(fis)
for _, fi := range dirEntries { for _, fi := range dirEntries {
if fi.Meta().PathInfo == nil { if fi.Meta().PathInfo == nil {
fi.Meta().PathInfo = paths.Parse("", filepath.Join(pathRel, fi.Name())) fi.Meta().PathInfo = w.cfg.PathParser.Parse("", filepath.Join(pathRel, fi.Name()))
} }
} }

View file

@ -1144,7 +1144,7 @@ Home.
enConfig := b.H.Sites[0].conf enConfig := b.H.Sites[0].conf
m, _ := enConfig.MediaTypes.Config.GetByType("text/html") m, _ := enConfig.MediaTypes.Config.GetByType("text/html")
b.Assert(m.Suffixes(), qt.DeepEquals, []string{"html"}) b.Assert(m.Suffixes(), qt.DeepEquals, []string{"html", "htm"})
svConfig := b.H.Sites[1].conf svConfig := b.H.Sites[1].conf
f, _ := svConfig.OutputFormats.Config.GetByName("html") f, _ := svConfig.OutputFormats.Config.GetByName("html")

View file

@ -14,6 +14,7 @@
package hugolib package hugolib
import ( import (
"context"
"fmt" "fmt"
"path" "path"
"path/filepath" "path/filepath"
@ -23,10 +24,13 @@ import (
"github.com/bep/logg" "github.com/bep/logg"
"github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/hugolib/pagesfromdata"
"github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/source" "github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/page/pagemeta"
"github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/resources/resource"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
@ -51,9 +55,11 @@ type contentMapConfig struct {
var _ contentNodeI = (*resourceSource)(nil) var _ contentNodeI = (*resourceSource)(nil)
type resourceSource struct { type resourceSource struct {
path *paths.Path langIndex int
opener hugio.OpenReadSeekCloser path *paths.Path
fi hugofs.FileMetaInfo opener hugio.OpenReadSeekCloser
fi hugofs.FileMetaInfo
rc *pagemeta.ResourceConfig
r resource.Resource r resource.Resource
} }
@ -64,11 +70,7 @@ func (r resourceSource) clone() *resourceSource {
} }
func (r *resourceSource) LangIndex() int { func (r *resourceSource) LangIndex() int {
if r.r != nil && r.isPage() { return r.langIndex
return r.r.(*pageState).s.languagei
}
return r.fi.Meta().LangIndex
} }
func (r *resourceSource) MarkStale() { func (r *resourceSource) MarkStale() {
@ -162,12 +164,13 @@ func (cfg contentMapConfig) getTaxonomyConfig(s string) (v viewName) {
return return
} }
func (m *pageMap) AddFi(fi hugofs.FileMetaInfo) error { func (m *pageMap) AddFi(fi hugofs.FileMetaInfo, buildConfig *BuildCfg) (pageCount uint64, resourceCount uint64, addErr error) {
if fi.IsDir() { if fi.IsDir() {
return nil return
} }
insertResource := func(fim hugofs.FileMetaInfo) error { insertResource := func(fim hugofs.FileMetaInfo) error {
resourceCount++
pi := fi.Meta().PathInfo pi := fi.Meta().PathInfo
key := pi.Base() key := pi.Base()
tree := m.treeResources tree := m.treeResources
@ -199,9 +202,9 @@ func (m *pageMap) AddFi(fi hugofs.FileMetaInfo) error {
} }
key = pi.Base() key = pi.Base()
rs = &resourceSource{r: pageResource} rs = &resourceSource{r: pageResource, langIndex: pageResource.s.languagei}
} else { } else {
rs = &resourceSource{path: pi, opener: r, fi: fim} rs = &resourceSource{path: pi, opener: r, fi: fim, langIndex: fim.Meta().LangIndex}
} }
tree.InsertIntoValuesDimension(key, rs) tree.InsertIntoValuesDimension(key, rs)
@ -220,14 +223,27 @@ func (m *pageMap) AddFi(fi hugofs.FileMetaInfo) error {
}, },
)) ))
if err := insertResource(fi); err != nil { if err := insertResource(fi); err != nil {
return err addErr = err
return
} }
case paths.PathTypeContentData:
pc, rc, err := m.addPagesFromGoTmplFi(fi, buildConfig)
pageCount += pc
resourceCount += rc
if err != nil {
addErr = err
return
}
default: default:
m.s.Log.Trace(logg.StringFunc( m.s.Log.Trace(logg.StringFunc(
func() string { func() string {
return fmt.Sprintf("insert bundle: %q", fi.Meta().Filename) return fmt.Sprintf("insert bundle: %q", fi.Meta().Filename)
}, },
)) ))
pageCount++
// A content file. // A content file.
p, pi, err := m.s.h.newPage( p, pi, err := m.s.h.newPage(
&pageMeta{ &pageMeta{
@ -237,17 +253,164 @@ func (m *pageMap) AddFi(fi hugofs.FileMetaInfo) error {
}, },
) )
if err != nil { if err != nil {
return err addErr = err
return
} }
if p == nil { if p == nil {
// Disabled page. // Disabled page.
return nil return
} }
m.treePages.InsertWithLock(pi.Base(), p) m.treePages.InsertIntoValuesDimensionWithLock(pi.Base(), p)
} }
return nil return
}
func (m *pageMap) addPagesFromGoTmplFi(fi hugofs.FileMetaInfo, buildConfig *BuildCfg) (pageCount uint64, resourceCount uint64, addErr error) {
meta := fi.Meta()
pi := meta.PathInfo
m.s.Log.Trace(logg.StringFunc(
func() string {
return fmt.Sprintf("insert pages from data file: %q", fi.Meta().Filename)
},
))
if !files.IsGoTmplExt(pi.Ext()) {
addErr = fmt.Errorf("unsupported data file extension %q", pi.Ext())
return
}
s := m.s.h.resolveSite(fi.Meta().Lang)
f := source.NewFileInfo(fi)
h := s.h
// Make sure the layouts are initialized.
if _, err := h.init.layouts.Do(context.Background()); err != nil {
addErr = err
return
}
contentAdapter := s.pageMap.treePagesFromTemplateAdapters.Get(pi.Base())
var rebuild bool
if contentAdapter != nil {
// Rebuild
contentAdapter = contentAdapter.CloneForGoTmpl(fi)
rebuild = true
} else {
contentAdapter = pagesfromdata.NewPagesFromTemplate(
pagesfromdata.PagesFromTemplateOptions{
GoTmplFi: fi,
Site: s,
DepsFromSite: func(s page.Site) pagesfromdata.PagesFromTemplateDeps {
ss := s.(*Site)
return pagesfromdata.PagesFromTemplateDeps{
TmplFinder: ss.TextTmpl(),
TmplExec: ss.Tmpl(),
}
},
DependencyManager: s.Conf.NewIdentityManager("pagesfromdata"),
Watching: s.Conf.Watching(),
HandlePage: func(pt *pagesfromdata.PagesFromTemplate, pc *pagemeta.PageConfig) error {
s := pt.Site.(*Site)
if err := pc.Compile(pt.GoTmplFi.Meta().PathInfo.Base(), true, "", s.Log, s.conf.MediaTypes.Config); err != nil {
return err
}
ps, pi, err := h.newPage(
&pageMeta{
f: f,
s: s,
pageMetaParams: &pageMetaParams{
pageConfig: pc,
},
},
)
if err != nil {
return err
}
if ps == nil {
// Disabled page.
return nil
}
u, n, replaced := s.pageMap.treePages.InsertIntoValuesDimensionWithLock(pi.Base(), ps)
if h.isRebuild() {
if replaced {
pt.AddChange(n.GetIdentity())
} else {
pt.AddChange(u.GetIdentity())
}
}
return nil
},
HandleResource: func(pt *pagesfromdata.PagesFromTemplate, rc *pagemeta.ResourceConfig) error {
s := pt.Site.(*Site)
if err := rc.Compile(
pt.GoTmplFi.Meta().PathInfo.Base(),
s.Conf.PathParser(),
s.conf.MediaTypes.Config,
); err != nil {
return err
}
rs := &resourceSource{path: rc.PathInfo, rc: rc, opener: nil, fi: nil, langIndex: s.languagei}
_, n, replaced := s.pageMap.treeResources.InsertIntoValuesDimensionWithLock(rc.PathInfo.Base(), rs)
if h.isRebuild() && replaced {
pt.AddChange(n.GetIdentity())
}
return nil
},
},
)
s.pageMap.treePagesFromTemplateAdapters.Insert(pi.Base(), contentAdapter)
}
handleBuildInfo := func(s *Site, bi pagesfromdata.BuildInfo) {
resourceCount += bi.NumResourcesAdded
pageCount += bi.NumPagesAdded
s.handleContentAdapterChanges(bi, buildConfig)
}
bi, err := contentAdapter.Execute(context.Background())
if err != nil {
addErr = err
return
}
handleBuildInfo(s, bi)
if !rebuild && bi.EnableAllLanguages {
// Clone and insert the adapter for the other sites.
for _, ss := range s.h.Sites {
if s == ss {
continue
}
clone := contentAdapter.CloneForSite(ss)
// Make sure it gets executed for the first time.
bi, err := clone.Execute(context.Background())
if err != nil {
addErr = err
return
}
handleBuildInfo(ss, bi)
// Insert into the correct language tree so it get rebuilt on changes.
ss.pageMap.treePagesFromTemplateAdapters.Insert(pi.Base(), clone)
}
}
return
} }
// The home page is represented with the zero string. // The home page is represented with the zero string.

View file

@ -34,7 +34,9 @@ import (
"github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/common/types"
"github.com/gohugoio/hugo/hugofs/files" "github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/hugolib/doctree" "github.com/gohugoio/hugo/hugolib/doctree"
"github.com/gohugoio/hugo/hugolib/pagesfromdata"
"github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources"
"github.com/spf13/cast" "github.com/spf13/cast"
@ -100,6 +102,8 @@ type pageMap struct {
cacheContentPlain *dynacache.Partition[string, *resources.StaleValue[contentPlainPlainWords]] cacheContentPlain *dynacache.Partition[string, *resources.StaleValue[contentPlainPlainWords]]
contentTableOfContents *dynacache.Partition[string, *resources.StaleValue[contentTableOfContents]] contentTableOfContents *dynacache.Partition[string, *resources.StaleValue[contentTableOfContents]]
contentDataFileSeenItems *maps.Cache[string, map[uint64]bool]
cfg contentMapConfig cfg contentMapConfig
} }
@ -122,6 +126,10 @@ type pageTrees struct {
// This tree contains all taxonomy entries, e.g "/tags/blue/page1" // This tree contains all taxonomy entries, e.g "/tags/blue/page1"
treeTaxonomyEntries *doctree.TreeShiftTree[*weightedContentNode] treeTaxonomyEntries *doctree.TreeShiftTree[*weightedContentNode]
// Stores the state for _content.gotmpl files.
// Mostly releveant for rebuilds.
treePagesFromTemplateAdapters *doctree.TreeShiftTree[*pagesfromdata.PagesFromTemplate]
// A slice of the resource trees. // A slice of the resource trees.
resourceTrees doctree.MutableTrees resourceTrees doctree.MutableTrees
} }
@ -222,6 +230,7 @@ func (t pageTrees) Shape(d, v int) *pageTrees {
t.treePages = t.treePages.Shape(d, v) t.treePages = t.treePages.Shape(d, v)
t.treeResources = t.treeResources.Shape(d, v) t.treeResources = t.treeResources.Shape(d, v)
t.treeTaxonomyEntries = t.treeTaxonomyEntries.Shape(d, v) t.treeTaxonomyEntries = t.treeTaxonomyEntries.Shape(d, v)
t.treePagesFromTemplateAdapters = t.treePagesFromTemplateAdapters.Shape(d, v)
t.createMutableTrees() t.createMutableTrees()
return &t return &t
@ -587,9 +596,9 @@ func (m *pageMap) getOrCreateResourcesForPage(ps *pageState) resource.Resources
sort.SliceStable(res, lessFunc) sort.SliceStable(res, lessFunc)
if len(ps.m.pageConfig.Resources) > 0 { if len(ps.m.pageConfig.ResourcesMeta) > 0 {
for i, r := range res { for i, r := range res {
res[i] = resources.CloneWithMetadataIfNeeded(ps.m.pageConfig.Resources, r) res[i] = resources.CloneWithMetadataFromMapIfNeeded(ps.m.pageConfig.ResourcesMeta, r)
} }
sort.SliceStable(res, lessFunc) sort.SliceStable(res, lessFunc)
} }
@ -667,12 +676,13 @@ type contentNodeShifter struct {
numLanguages int numLanguages int
} }
func (s *contentNodeShifter) Delete(n contentNodeI, dimension doctree.Dimension) (bool, bool) { func (s *contentNodeShifter) Delete(n contentNodeI, dimension doctree.Dimension) (contentNodeI, bool, bool) {
lidx := dimension[0] lidx := dimension[0]
switch v := n.(type) { switch v := n.(type) {
case contentNodeIs: case contentNodeIs:
resource.MarkStale(v[lidx]) deleted := v[lidx]
wasDeleted := v[lidx] != nil resource.MarkStale(deleted)
wasDeleted := deleted != nil
v[lidx] = nil v[lidx] = nil
isEmpty := true isEmpty := true
for _, vv := range v { for _, vv := range v {
@ -681,10 +691,11 @@ func (s *contentNodeShifter) Delete(n contentNodeI, dimension doctree.Dimension)
break break
} }
} }
return wasDeleted, isEmpty return deleted, wasDeleted, isEmpty
case resourceSources: case resourceSources:
resource.MarkStale(v[lidx]) deleted := v[lidx]
wasDeleted := v[lidx] != nil resource.MarkStale(deleted)
wasDeleted := deleted != nil
v[lidx] = nil v[lidx] = nil
isEmpty := true isEmpty := true
for _, vv := range v { for _, vv := range v {
@ -693,19 +704,19 @@ func (s *contentNodeShifter) Delete(n contentNodeI, dimension doctree.Dimension)
break break
} }
} }
return wasDeleted, isEmpty return deleted, wasDeleted, isEmpty
case *resourceSource: case *resourceSource:
if lidx != v.LangIndex() { if lidx != v.LangIndex() {
return false, false return nil, false, false
} }
resource.MarkStale(v) resource.MarkStale(v)
return true, true return v, true, true
case *pageState: case *pageState:
if lidx != v.s.languagei { if lidx != v.s.languagei {
return false, false return nil, false, false
} }
resource.MarkStale(v) resource.MarkStale(v)
return true, true return v, true, true
default: default:
panic(fmt.Sprintf("unknown type %T", n)) panic(fmt.Sprintf("unknown type %T", n))
} }
@ -778,7 +789,7 @@ func (s *contentNodeShifter) ForEeachInDimension(n contentNodeI, d int, f func(c
} }
} }
func (s *contentNodeShifter) InsertInto(old, new contentNodeI, dimension doctree.Dimension) contentNodeI { func (s *contentNodeShifter) InsertInto(old, new contentNodeI, dimension doctree.Dimension) (contentNodeI, contentNodeI, bool) {
langi := dimension[doctree.DimensionLanguage.Index()] langi := dimension[doctree.DimensionLanguage.Index()]
switch vv := old.(type) { switch vv := old.(type) {
case *pageState: case *pageState:
@ -787,37 +798,39 @@ func (s *contentNodeShifter) InsertInto(old, new contentNodeI, dimension doctree
panic(fmt.Sprintf("unknown type %T", new)) panic(fmt.Sprintf("unknown type %T", new))
} }
if vv.s.languagei == newp.s.languagei && newp.s.languagei == langi { if vv.s.languagei == newp.s.languagei && newp.s.languagei == langi {
return new return new, vv, true
} }
is := make(contentNodeIs, s.numLanguages) is := make(contentNodeIs, s.numLanguages)
is[vv.s.languagei] = old is[vv.s.languagei] = old
is[langi] = new is[langi] = new
return is return is, old, false
case contentNodeIs: case contentNodeIs:
oldv := vv[langi]
vv[langi] = new vv[langi] = new
return vv return vv, oldv, oldv != nil
case resourceSources: case resourceSources:
oldv := vv[langi]
vv[langi] = new.(*resourceSource) vv[langi] = new.(*resourceSource)
return vv return vv, oldv, oldv != nil
case *resourceSource: case *resourceSource:
newp, ok := new.(*resourceSource) newp, ok := new.(*resourceSource)
if !ok { if !ok {
panic(fmt.Sprintf("unknown type %T", new)) panic(fmt.Sprintf("unknown type %T", new))
} }
if vv.LangIndex() == newp.LangIndex() && newp.LangIndex() == langi { if vv.LangIndex() == newp.LangIndex() && newp.LangIndex() == langi {
return new return new, vv, true
} }
rs := make(resourceSources, s.numLanguages) rs := make(resourceSources, s.numLanguages)
rs[vv.LangIndex()] = vv rs[vv.LangIndex()] = vv
rs[langi] = newp rs[langi] = newp
return rs return rs, vv, false
default: default:
panic(fmt.Sprintf("unknown type %T", old)) panic(fmt.Sprintf("unknown type %T", old))
} }
} }
func (s *contentNodeShifter) Insert(old, new contentNodeI) contentNodeI { func (s *contentNodeShifter) Insert(old, new contentNodeI) (contentNodeI, contentNodeI, bool) {
switch vv := old.(type) { switch vv := old.(type) {
case *pageState: case *pageState:
newp, ok := new.(*pageState) newp, ok := new.(*pageState)
@ -828,12 +841,12 @@ func (s *contentNodeShifter) Insert(old, new contentNodeI) contentNodeI {
if newp != old { if newp != old {
resource.MarkStale(old) resource.MarkStale(old)
} }
return new return new, vv, true
} }
is := make(contentNodeIs, s.numLanguages) is := make(contentNodeIs, s.numLanguages)
is[newp.s.languagei] = new is[newp.s.languagei] = new
is[vv.s.languagei] = old is[vv.s.languagei] = old
return is return is, old, false
case contentNodeIs: case contentNodeIs:
newp, ok := new.(*pageState) newp, ok := new.(*pageState)
if !ok { if !ok {
@ -844,7 +857,7 @@ func (s *contentNodeShifter) Insert(old, new contentNodeI) contentNodeI {
resource.MarkStale(oldp) resource.MarkStale(oldp)
} }
vv[newp.s.languagei] = new vv[newp.s.languagei] = new
return vv return vv, oldp, oldp != nil
case *resourceSource: case *resourceSource:
newp, ok := new.(*resourceSource) newp, ok := new.(*resourceSource)
if !ok { if !ok {
@ -854,12 +867,12 @@ func (s *contentNodeShifter) Insert(old, new contentNodeI) contentNodeI {
if vv != newp { if vv != newp {
resource.MarkStale(vv) resource.MarkStale(vv)
} }
return new return new, vv, true
} }
rs := make(resourceSources, s.numLanguages) rs := make(resourceSources, s.numLanguages)
rs[newp.LangIndex()] = newp rs[newp.LangIndex()] = newp
rs[vv.LangIndex()] = vv rs[vv.LangIndex()] = vv
return rs return rs, vv, false
case resourceSources: case resourceSources:
newp, ok := new.(*resourceSource) newp, ok := new.(*resourceSource)
if !ok { if !ok {
@ -870,7 +883,7 @@ func (s *contentNodeShifter) Insert(old, new contentNodeI) contentNodeI {
resource.MarkStale(oldp) resource.MarkStale(oldp)
} }
vv[newp.LangIndex()] = newp vv[newp.LangIndex()] = newp
return vv return vv, oldp, oldp != nil
default: default:
panic(fmt.Sprintf("unknown type %T", old)) panic(fmt.Sprintf("unknown type %T", old))
} }
@ -890,6 +903,8 @@ func newPageMap(i int, s *Site, mcache *dynacache.Cache, pageTrees *pageTrees) *
cacheContentPlain: dynacache.GetOrCreatePartition[string, *resources.StaleValue[contentPlainPlainWords]](mcache, fmt.Sprintf("/cont/pla/%d", i), dynacache.OptionsPartition{Weight: 70, ClearWhen: dynacache.ClearOnChange}), cacheContentPlain: dynacache.GetOrCreatePartition[string, *resources.StaleValue[contentPlainPlainWords]](mcache, fmt.Sprintf("/cont/pla/%d", i), dynacache.OptionsPartition{Weight: 70, ClearWhen: dynacache.ClearOnChange}),
contentTableOfContents: dynacache.GetOrCreatePartition[string, *resources.StaleValue[contentTableOfContents]](mcache, fmt.Sprintf("/cont/toc/%d", i), dynacache.OptionsPartition{Weight: 70, ClearWhen: dynacache.ClearOnChange}), contentTableOfContents: dynacache.GetOrCreatePartition[string, *resources.StaleValue[contentTableOfContents]](mcache, fmt.Sprintf("/cont/toc/%d", i), dynacache.OptionsPartition{Weight: 70, ClearWhen: dynacache.ClearOnChange}),
contentDataFileSeenItems: maps.NewCache[string, map[uint64]bool](),
cfg: contentMapConfig{ cfg: contentMapConfig{
lang: s.Lang(), lang: s.Lang(),
taxonomyConfig: taxonomiesConfig.Values(), taxonomyConfig: taxonomiesConfig.Values(),
@ -960,8 +975,6 @@ type contentTreeReverseIndexMap struct {
type sitePagesAssembler struct { type sitePagesAssembler struct {
*Site *Site
watching bool
incomingChanges *whatChanged
assembleChanges *whatChanged assembleChanges *whatChanged
ctx context.Context ctx context.Context
} }
@ -1080,6 +1093,7 @@ func (h *HugoSites) resolveAndClearStateForIdentities(
// 1. Handle the cache busters first, as those may produce identities for the page reset step. // 1. Handle the cache busters first, as those may produce identities for the page reset step.
// 2. Then reset the page outputs, which may mark some resources as stale. // 2. Then reset the page outputs, which may mark some resources as stale.
// 3. Then GC the cache. // 3. Then GC the cache.
// TOOD1
if cachebuster != nil { if cachebuster != nil {
if err := loggers.TimeTrackfn(func() (logg.LevelLogger, error) { if err := loggers.TimeTrackfn(func() (logg.LevelLogger, error) {
ll := l.WithField("substep", "gc dynacache cachebuster") ll := l.WithField("substep", "gc dynacache cachebuster")
@ -1125,6 +1139,33 @@ func (h *HugoSites) resolveAndClearStateForIdentities(
} }
changes = changes[:n] changes = changes[:n]
if h.pageTrees.treePagesFromTemplateAdapters.LenRaw() > 0 {
if err := loggers.TimeTrackfn(func() (logg.LevelLogger, error) {
ll := l.WithField("substep", "resolve content adapter change set").WithField("changes", len(changes))
checkedCount := 0
matchCount := 0
depsFinder := identity.NewFinder(identity.FinderConfig{})
h.pageTrees.treePagesFromTemplateAdapters.WalkPrefixRaw(doctree.LockTypeRead, "",
func(s string, n *pagesfromdata.PagesFromTemplate) (bool, error) {
for _, id := range changes {
checkedCount++
if r := depsFinder.Contains(id, n.DependencyManager, 2); r > identity.FinderNotFound {
n.MarkStale()
matchCount++
break
}
}
return false, nil
})
ll = ll.WithField("checked", checkedCount).WithField("matches", matchCount)
return ll, nil
}); err != nil {
return err
}
}
if err := loggers.TimeTrackfn(func() (logg.LevelLogger, error) { if err := loggers.TimeTrackfn(func() (logg.LevelLogger, error) {
// changesLeft: The IDs that the pages is dependent on. // changesLeft: The IDs that the pages is dependent on.
// changesRight: The IDs that the pages depend on. // changesRight: The IDs that the pages depend on.
@ -1269,6 +1310,7 @@ func (sa *sitePagesAssembler) applyAggregates() error {
rw := pw.Extend() rw := pw.Extend()
rw.Tree = sa.pageMap.treeResources rw.Tree = sa.pageMap.treeResources
sa.lastmod = time.Time{} sa.lastmod = time.Time{}
rebuild := sa.s.h.isRebuild()
pw.Handle = func(keyPage string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { pw.Handle = func(keyPage string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
pageBundle := n.(*pageState) pageBundle := n.(*pageState)
@ -1289,7 +1331,7 @@ func (sa *sitePagesAssembler) applyAggregates() error {
// Home page gets it's cascade from the site config. // Home page gets it's cascade from the site config.
cascade = sa.conf.Cascade.Config cascade = sa.conf.Cascade.Config
if pageBundle.m.pageConfig.Cascade == nil { if pageBundle.m.pageConfig.CascadeCompiled == nil {
// Pass the site cascade downwards. // Pass the site cascade downwards.
pw.WalkContext.Data().Insert(keyPage, cascade) pw.WalkContext.Data().Insert(keyPage, cascade)
} }
@ -1300,18 +1342,20 @@ func (sa *sitePagesAssembler) applyAggregates() error {
} }
} }
if (pageBundle.IsHome() || pageBundle.IsSection()) && pageBundle.m.setMetaPostCount > 0 { if rebuild {
oldDates := pageBundle.m.pageConfig.Dates if (pageBundle.IsHome() || pageBundle.IsSection()) && pageBundle.m.setMetaPostCount > 0 {
oldDates := pageBundle.m.pageConfig.Dates
// We need to wait until after the walk to determine if any of the dates have changed. // We need to wait until after the walk to determine if any of the dates have changed.
pw.WalkContext.AddPostHook( pw.WalkContext.AddPostHook(
func() error { func() error {
if oldDates != pageBundle.m.pageConfig.Dates { if oldDates != pageBundle.m.pageConfig.Dates {
sa.assembleChanges.Add(pageBundle) sa.assembleChanges.Add(pageBundle)
} }
return nil return nil
}, },
) )
}
} }
// Combine the cascade map with front matter. // Combine the cascade map with front matter.
@ -1321,15 +1365,15 @@ func (sa *sitePagesAssembler) applyAggregates() error {
// We receive cascade values from above. If this leads to a change compared // We receive cascade values from above. If this leads to a change compared
// to the previous value, we need to mark the page and its dependencies as changed. // to the previous value, we need to mark the page and its dependencies as changed.
if pageBundle.m.setMetaPostCascadeChanged { if rebuild && pageBundle.m.setMetaPostCascadeChanged {
sa.assembleChanges.Add(pageBundle) sa.assembleChanges.Add(pageBundle)
} }
const eventName = "dates" const eventName = "dates"
if n.isContentNodeBranch() { if n.isContentNodeBranch() {
if pageBundle.m.pageConfig.Cascade != nil { if pageBundle.m.pageConfig.CascadeCompiled != nil {
// Pass it down. // Pass it down.
pw.WalkContext.Data().Insert(keyPage, pageBundle.m.pageConfig.Cascade) pw.WalkContext.Data().Insert(keyPage, pageBundle.m.pageConfig.CascadeCompiled)
} }
wasZeroDates := pageBundle.m.pageConfig.Dates.IsAllDatesZero() wasZeroDates := pageBundle.m.pageConfig.Dates.IsAllDatesZero()
@ -1430,9 +1474,9 @@ func (sa *sitePagesAssembler) applyAggregatesToTaxonomiesAndTerms() error {
p := n.(*pageState) p := n.(*pageState)
if p.Kind() != kinds.KindTerm { if p.Kind() != kinds.KindTerm {
// The other kinds were handled in applyAggregates. // The other kinds were handled in applyAggregates.
if p.m.pageConfig.Cascade != nil { if p.m.pageConfig.CascadeCompiled != nil {
// Pass it down. // Pass it down.
pw.WalkContext.Data().Insert(s, p.m.pageConfig.Cascade) pw.WalkContext.Data().Insert(s, p.m.pageConfig.CascadeCompiled)
} }
} }
@ -1553,7 +1597,7 @@ func (sa *sitePagesAssembler) assembleTermsAndTranslations() error {
singular: viewName.singular, singular: viewName.singular,
s: sa.Site, s: sa.Site,
pathInfo: pi, pathInfo: pi,
pageMetaParams: pageMetaParams{ pageMetaParams: &pageMetaParams{
pageConfig: &pagemeta.PageConfig{ pageConfig: &pagemeta.PageConfig{
Kind: kinds.KindTerm, Kind: kinds.KindTerm,
}, },
@ -1615,13 +1659,13 @@ func (sa *sitePagesAssembler) assembleResources() error {
targetPaths := ps.targetPaths() targetPaths := ps.targetPaths()
baseTarget := targetPaths.SubResourceBaseTarget baseTarget := targetPaths.SubResourceBaseTarget
duplicateResourceFiles := true duplicateResourceFiles := true
if ps.m.pageConfig.IsGoldmark { if ps.m.pageConfig.ContentMediaType.IsMarkdown() {
duplicateResourceFiles = ps.s.ContentSpec.Converters.GetMarkupConfig().Goldmark.DuplicateResourceFiles duplicateResourceFiles = ps.s.ContentSpec.Converters.GetMarkupConfig().Goldmark.DuplicateResourceFiles
} }
duplicateResourceFiles = duplicateResourceFiles || ps.s.Conf.IsMultihost() duplicateResourceFiles = duplicateResourceFiles || ps.s.Conf.IsMultihost()
sa.pageMap.forEachResourceInPage( err := sa.pageMap.forEachResourceInPage(
ps, lockType, ps, lockType,
!duplicateResourceFiles, !duplicateResourceFiles,
func(resourceKey string, n contentNodeI, match doctree.DimensionFlag) (bool, error) { func(resourceKey string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
@ -1647,6 +1691,23 @@ func (sa *sitePagesAssembler) assembleResources() error {
} }
if rs.rc != nil && rs.rc.Content.IsResourceValue() {
if rs.rc.Name == "" {
rs.rc.Name = relPathOriginal
}
r, err := ps.m.s.ResourceSpec.NewResourceWrapperFromResourceConfig(rs.rc)
if err != nil {
return false, err
}
rs.r = r
return false, nil
}
var mt media.Type
if rs.rc != nil {
mt = rs.rc.ContentMediaType
}
rd := resources.ResourceSourceDescriptor{ rd := resources.ResourceSourceDescriptor{
OpenReadSeekCloser: rs.opener, OpenReadSeekCloser: rs.opener,
Path: rs.path, Path: rs.path,
@ -1657,8 +1718,23 @@ func (sa *sitePagesAssembler) assembleResources() error {
BasePathTargetPath: baseTarget, BasePathTargetPath: baseTarget,
NameNormalized: relPath, NameNormalized: relPath,
NameOriginal: relPathOriginal, NameOriginal: relPathOriginal,
MediaType: mt,
LazyPublish: !ps.m.pageConfig.Build.PublishResources, LazyPublish: !ps.m.pageConfig.Build.PublishResources,
} }
if rs.rc != nil {
rc := rs.rc
rd.OpenReadSeekCloser = rc.Content.ValueAsOpenReadSeekCloser()
if rc.Name != "" {
rd.NameNormalized = rc.Name
rd.NameOriginal = rc.Name
}
if rc.Title != "" {
rd.Title = rc.Title
}
rd.Params = rc.Params
}
r, err := ps.m.s.ResourceSpec.NewResource(rd) r, err := ps.m.s.ResourceSpec.NewResource(rd)
if err != nil { if err != nil {
return false, err return false, err
@ -1668,7 +1744,7 @@ func (sa *sitePagesAssembler) assembleResources() error {
}, },
) )
return false, nil return false, err
}, },
} }
@ -1775,7 +1851,7 @@ func (sa *sitePagesAssembler) addStandalonePages() error {
m := &pageMeta{ m := &pageMeta{
s: s, s: s,
pathInfo: s.Conf.PathParser().Parse(files.ComponentFolderContent, key+f.MediaType.FirstSuffix.FullSuffix), pathInfo: s.Conf.PathParser().Parse(files.ComponentFolderContent, key+f.MediaType.FirstSuffix.FullSuffix),
pageMetaParams: pageMetaParams{ pageMetaParams: &pageMetaParams{
pageConfig: &pagemeta.PageConfig{ pageConfig: &pagemeta.PageConfig{
Kind: kind, Kind: kind,
}, },
@ -1893,7 +1969,7 @@ func (sa *sitePagesAssembler) addMissingRootSections() error {
m := &pageMeta{ m := &pageMeta{
s: sa.Site, s: sa.Site,
pathInfo: p, pathInfo: p,
pageMetaParams: pageMetaParams{ pageMetaParams: &pageMetaParams{
pageConfig: &pagemeta.PageConfig{ pageConfig: &pagemeta.PageConfig{
Kind: kinds.KindHome, Kind: kinds.KindHome,
}, },
@ -1903,7 +1979,7 @@ func (sa *sitePagesAssembler) addMissingRootSections() error {
if err != nil { if err != nil {
return err return err
} }
w.Tree.InsertWithLock(p.Base(), n) w.Tree.InsertIntoValuesDimensionWithLock(p.Base(), n)
sa.home = n sa.home = n
} }
@ -1926,7 +2002,7 @@ func (sa *sitePagesAssembler) addMissingTaxonomies() error {
m := &pageMeta{ m := &pageMeta{
s: sa.Site, s: sa.Site,
pathInfo: sa.Conf.PathParser().Parse(files.ComponentFolderContent, key+"/_index.md"), pathInfo: sa.Conf.PathParser().Parse(files.ComponentFolderContent, key+"/_index.md"),
pageMetaParams: pageMetaParams{ pageMetaParams: &pageMetaParams{
pageConfig: &pagemeta.PageConfig{ pageConfig: &pagemeta.PageConfig{
Kind: kinds.KindTaxonomy, Kind: kinds.KindTaxonomy,
}, },

View file

@ -173,7 +173,7 @@ func TestTreeInsert(t *testing.T) {
c.Assert(tree.Get("/notfound"), qt.IsNil) c.Assert(tree.Get("/notfound"), qt.IsNil)
ab2 := &testValue{ID: "/a/b", Lang: 0} ab2 := &testValue{ID: "/a/b", Lang: 0}
v, ok := tree.InsertIntoValuesDimension("/a/b", ab2) v, _, ok := tree.InsertIntoValuesDimension("/a/b", ab2)
c.Assert(ok, qt.IsTrue) c.Assert(ok, qt.IsTrue)
c.Assert(v, qt.DeepEquals, ab2) c.Assert(v, qt.DeepEquals, ab2)
@ -239,16 +239,16 @@ func (s *testShifter) ForEeachInDimension(n *testValue, d int, f func(n *testVal
f(n) f(n)
} }
func (s *testShifter) Insert(old, new *testValue) *testValue { func (s *testShifter) Insert(old, new *testValue) (*testValue, *testValue, bool) {
return new return new, old, true
} }
func (s *testShifter) InsertInto(old, new *testValue, dimension doctree.Dimension) *testValue { func (s *testShifter) InsertInto(old, new *testValue, dimension doctree.Dimension) (*testValue, *testValue, bool) {
return new return new, old, true
} }
func (s *testShifter) Delete(n *testValue, dimension doctree.Dimension) (bool, bool) { func (s *testShifter) Delete(n *testValue, dimension doctree.Dimension) (*testValue, bool, bool) {
return true, true return nil, true, true
} }
func (s *testShifter) Shift(n *testValue, dimension doctree.Dimension, exact bool) (*testValue, bool, doctree.DimensionFlag) { func (s *testShifter) Shift(n *testValue, dimension doctree.Dimension, exact bool) (*testValue, bool, doctree.DimensionFlag) {

View file

@ -38,16 +38,18 @@ type (
// Insert inserts new into the tree into the dimension it provides. // Insert inserts new into the tree into the dimension it provides.
// It may replace old. // It may replace old.
// It returns a T (can be the same as old). // It returns the updated and existing T
Insert(old, new T) T // and a bool indicating if an existing record is updated.
Insert(old, new T) (T, T, bool)
// Insert inserts new into the given dimension. // Insert inserts new into the given dimension.
// It may replace old. // It may replace old.
// It returns a T (can be the same as old). // It returns the updated and existing T
InsertInto(old, new T, dimension Dimension) T // and a bool indicating if an existing record is updated.
InsertInto(old, new T, dimension Dimension) (T, T, bool)
// Delete deletes T from the given dimension and returns whether the dimension was deleted and if it's empty after the delete. // Delete deletes T from the given dimension and returns the deleted T and whether the dimension was deleted and if it's empty after the delete.
Delete(v T, dimension Dimension) (bool, bool) Delete(v T, dimension Dimension) (T, bool, bool)
// Shift shifts T into the given dimension // Shift shifts T into the given dimension
// and returns the shifted T and a bool indicating if the shift was successful and // and returns the shifted T and a bool indicating if the shift was successful and
@ -81,7 +83,11 @@ func New[T any](cfg Config[T]) *NodeShiftTree[T] {
} }
} }
func (r *NodeShiftTree[T]) Delete(key string) { func (r *NodeShiftTree[T]) Delete(key string) (T, bool) {
return r.delete(key)
}
func (r *NodeShiftTree[T]) DeleteRaw(key string) {
r.delete(key) r.delete(key)
} }
@ -103,23 +109,24 @@ func (r *NodeShiftTree[T]) DeletePrefix(prefix string) int {
return false return false
}) })
for _, key := range keys { for _, key := range keys {
if ok := r.delete(key); ok { if _, ok := r.delete(key); ok {
count++ count++
} }
} }
return count return count
} }
func (r *NodeShiftTree[T]) delete(key string) bool { func (r *NodeShiftTree[T]) delete(key string) (T, bool) {
var wasDeleted bool var wasDeleted bool
var deleted T
if v, ok := r.tree.Get(key); ok { if v, ok := r.tree.Get(key); ok {
var isEmpty bool var isEmpty bool
wasDeleted, isEmpty = r.shifter.Delete(v.(T), r.dims) deleted, wasDeleted, isEmpty = r.shifter.Delete(v.(T), r.dims)
if isEmpty { if isEmpty {
r.tree.Delete(key) r.tree.Delete(key)
} }
} }
return wasDeleted return deleted, wasDeleted
} }
func (t *NodeShiftTree[T]) DeletePrefixAll(prefix string) int { func (t *NodeShiftTree[T]) DeletePrefixAll(prefix string) int {
@ -141,22 +148,33 @@ func (t *NodeShiftTree[T]) Increment(d int) *NodeShiftTree[T] {
return t.Shape(d, t.dims[d]+1) return t.Shape(d, t.dims[d]+1)
} }
func (r *NodeShiftTree[T]) InsertIntoCurrentDimension(s string, v T) (T, bool) { func (r *NodeShiftTree[T]) InsertIntoCurrentDimension(s string, v T) (T, T, bool) {
s = mustValidateKey(cleanKey(s)) s = mustValidateKey(cleanKey(s))
var (
updated bool
existing T
)
if vv, ok := r.tree.Get(s); ok { if vv, ok := r.tree.Get(s); ok {
v = r.shifter.InsertInto(vv.(T), v, r.dims) v, existing, updated = r.shifter.InsertInto(vv.(T), v, r.dims)
} }
r.tree.Insert(s, v) r.tree.Insert(s, v)
return v, true return v, existing, updated
} }
func (r *NodeShiftTree[T]) InsertIntoValuesDimension(s string, v T) (T, bool) { // InsertIntoValuesDimension inserts v into the tree at the given key and the
// dimension defined by the value.
// It returns the updated and existing T and a bool indicating if an existing record is updated.
func (r *NodeShiftTree[T]) InsertIntoValuesDimension(s string, v T) (T, T, bool) {
s = mustValidateKey(cleanKey(s)) s = mustValidateKey(cleanKey(s))
var (
updated bool
existing T
)
if vv, ok := r.tree.Get(s); ok { if vv, ok := r.tree.Get(s); ok {
v = r.shifter.Insert(vv.(T), v) v, existing, updated = r.shifter.Insert(vv.(T), v)
} }
r.tree.Insert(s, v) r.tree.Insert(s, v)
return v, true return v, existing, updated
} }
func (r *NodeShiftTree[T]) InsertRawWithLock(s string, v any) (any, bool) { func (r *NodeShiftTree[T]) InsertRawWithLock(s string, v any) (any, bool) {
@ -165,7 +183,8 @@ func (r *NodeShiftTree[T]) InsertRawWithLock(s string, v any) (any, bool) {
return r.tree.Insert(s, v) return r.tree.Insert(s, v)
} }
func (r *NodeShiftTree[T]) InsertWithLock(s string, v T) (T, bool) { // It returns the updated and existing T and a bool indicating if an existing record is updated.
func (r *NodeShiftTree[T]) InsertIntoValuesDimensionWithLock(s string, v T) (T, T, bool) {
r.mu.Lock() r.mu.Lock()
defer r.mu.Unlock() defer r.mu.Unlock()
return r.InsertIntoValuesDimension(s, v) return r.InsertIntoValuesDimension(s, v)

View file

@ -28,12 +28,12 @@ type Tree[T any] interface {
} }
// NewSimpleTree creates a new SimpleTree. // NewSimpleTree creates a new SimpleTree.
func NewSimpleTree[T any]() *SimpleTree[T] { func NewSimpleTree[T comparable]() *SimpleTree[T] {
return &SimpleTree[T]{tree: radix.New()} return &SimpleTree[T]{tree: radix.New()}
} }
// SimpleTree is a thread safe radix tree that holds T. // SimpleTree is a thread safe radix tree that holds T.
type SimpleTree[T any] struct { type SimpleTree[T comparable] struct {
mu sync.RWMutex mu sync.RWMutex
tree *radix.Tree tree *radix.Tree
zero T zero T
@ -67,16 +67,23 @@ func (tree *SimpleTree[T]) Insert(s string, v T) T {
return v return v
} }
func (tree *SimpleTree[T]) WalkPrefix(lockType LockType, s string, f func(s string, v T) (bool, error)) error { func (tree *SimpleTree[T]) Lock(lockType LockType) func() {
switch lockType { switch lockType {
case LockTypeNone: case LockTypeNone:
return func() {}
case LockTypeRead: case LockTypeRead:
tree.mu.RLock() tree.mu.RLock()
defer tree.mu.RUnlock() return tree.mu.RUnlock
case LockTypeWrite: case LockTypeWrite:
tree.mu.Lock() tree.mu.Lock()
defer tree.mu.Unlock() return tree.mu.Unlock
} }
return func() {}
}
func (tree *SimpleTree[T]) WalkPrefix(lockType LockType, s string, f func(s string, v T) (bool, error)) error {
commit := tree.Lock(lockType)
defer commit()
var err error var err error
tree.tree.WalkPrefix(s, func(s string, v any) bool { tree.tree.WalkPrefix(s, func(s string, v any) bool {
var b bool var b bool

View file

@ -113,7 +113,7 @@ type LockType int
// MutableTree is a tree that can be modified. // MutableTree is a tree that can be modified.
type MutableTree interface { type MutableTree interface {
Delete(key string) DeleteRaw(key string)
DeleteAll(key string) DeleteAll(key string)
DeletePrefix(prefix string) int DeletePrefix(prefix string) int
DeletePrefixAll(prefix string) int DeletePrefixAll(prefix string) int
@ -140,9 +140,9 @@ var _ MutableTree = MutableTrees(nil)
type MutableTrees []MutableTree type MutableTrees []MutableTree
func (t MutableTrees) Delete(key string) { func (t MutableTrees) DeleteRaw(key string) {
for _, tree := range t { for _, tree := range t {
tree.Delete(key) tree.DeleteRaw(key)
} }
} }

View file

@ -15,18 +15,21 @@ package doctree
var _ Tree[string] = (*TreeShiftTree[string])(nil) var _ Tree[string] = (*TreeShiftTree[string])(nil)
type TreeShiftTree[T any] struct { type TreeShiftTree[T comparable] struct {
// This tree is shiftable in one dimension. // This tree is shiftable in one dimension.
d int d int
// The value of the current dimension. // The value of the current dimension.
v int v int
// The zero value of T.
zero T
// Will be of length equal to the length of the dimension. // Will be of length equal to the length of the dimension.
trees []*SimpleTree[T] trees []*SimpleTree[T]
} }
func NewTreeShiftTree[T any](d, length int) *TreeShiftTree[T] { func NewTreeShiftTree[T comparable](d, length int) *TreeShiftTree[T] {
if length <= 0 { if length <= 0 {
panic("length must be > 0") panic("length must be > 0")
} }
@ -52,6 +55,17 @@ func (t *TreeShiftTree[T]) Get(s string) T {
return t.trees[t.v].Get(s) return t.trees[t.v].Get(s)
} }
func (t *TreeShiftTree[T]) DeleteAllFunc(s string, f func(s string, v T) bool) {
for _, tt := range t.trees {
if v := tt.Get(s); v != t.zero {
if f(s, v) {
// Delete.
tt.tree.Delete(s)
}
}
}
}
func (t *TreeShiftTree[T]) LongestPrefix(s string) (string, T) { func (t *TreeShiftTree[T]) LongestPrefix(s string) (string, T) {
return t.trees[t.v].LongestPrefix(s) return t.trees[t.v].LongestPrefix(s)
} }
@ -60,10 +74,31 @@ func (t *TreeShiftTree[T]) Insert(s string, v T) T {
return t.trees[t.v].Insert(s, v) return t.trees[t.v].Insert(s, v)
} }
func (t *TreeShiftTree[T]) Lock(lockType LockType) func() {
return t.trees[t.v].Lock(lockType)
}
func (t *TreeShiftTree[T]) WalkPrefix(lockType LockType, s string, f func(s string, v T) (bool, error)) error { func (t *TreeShiftTree[T]) WalkPrefix(lockType LockType, s string, f func(s string, v T) (bool, error)) error {
return t.trees[t.v].WalkPrefix(lockType, s, f) return t.trees[t.v].WalkPrefix(lockType, s, f)
} }
func (t *TreeShiftTree[T]) WalkPrefixRaw(lockType LockType, s string, f func(s string, v T) (bool, error)) error {
for _, tt := range t.trees {
if err := tt.WalkPrefix(lockType, s, f); err != nil {
return err
}
}
return nil
}
func (t *TreeShiftTree[T]) LenRaw() int {
var count int
for _, tt := range t.trees {
count += tt.tree.Len()
}
return count
}
func (t *TreeShiftTree[T]) Delete(key string) { func (t *TreeShiftTree[T]) Delete(key string) {
for _, tt := range t.trees { for _, tt := range t.trees {
tt.tree.Delete(key) tt.tree.Delete(key)
@ -77,25 +112,3 @@ func (t *TreeShiftTree[T]) DeletePrefix(prefix string) int {
} }
return count return count
} }
func (t *TreeShiftTree[T]) Lock(writable bool) (commit func()) {
if writable {
for _, tt := range t.trees {
tt.mu.Lock()
}
return func() {
for _, tt := range t.trees {
tt.mu.Unlock()
}
}
}
for _, tt := range t.trees {
tt.mu.RLock()
}
return func() {
for _, tt := range t.trees {
tt.mu.RUnlock()
}
}
}

View file

@ -111,6 +111,24 @@ func (h *HugoSites) ShouldSkipFileChangeEvent(ev fsnotify.Event) bool {
return h.skipRebuildForFilenames[ev.Name] return h.skipRebuildForFilenames[ev.Name]
} }
func (h *HugoSites) isRebuild() bool {
return h.buildCounter.Load() > 0
}
func (h *HugoSites) resolveSite(lang string) *Site {
if lang == "" {
lang = h.Conf.DefaultContentLanguage()
}
for _, s := range h.Sites {
if s.Lang() == lang {
return s
}
}
return nil
}
// Only used in tests. // Only used in tests.
type buildCounters struct { type buildCounters struct {
contentRenderCounter atomic.Uint64 contentRenderCounter atomic.Uint64
@ -479,6 +497,7 @@ func (h *HugoSites) loadData() error {
hugofs.WalkwayConfig{ hugofs.WalkwayConfig{
Fs: h.PathSpec.BaseFs.Data.Fs, Fs: h.PathSpec.BaseFs.Data.Fs,
IgnoreFile: h.SourceSpec.IgnoreFile, IgnoreFile: h.SourceSpec.IgnoreFile,
PathParser: h.Conf.PathParser(),
WalkFn: func(path string, fi hugofs.FileMetaInfo) error { WalkFn: func(path string, fi hugofs.FileMetaInfo) error {
if fi.IsDir() { if fi.IsDir() {
return nil return nil

View file

@ -30,6 +30,8 @@ import (
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/hugofs/files" "github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/hugofs/glob" "github.com/gohugoio/hugo/hugofs/glob"
"github.com/gohugoio/hugo/hugolib/doctree"
"github.com/gohugoio/hugo/hugolib/pagesfromdata"
"github.com/gohugoio/hugo/hugolib/segments" "github.com/gohugoio/hugo/hugolib/segments"
"github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/output"
@ -41,6 +43,7 @@ import (
"github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/common/para" "github.com/gohugoio/hugo/common/para"
"github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/common/rungroup"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/page/siteidentities" "github.com/gohugoio/hugo/resources/page/siteidentities"
@ -96,6 +99,10 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
close(to) close(to)
}(errCollector, errs) }(errCollector, errs)
for _, s := range h.Sites {
s.state = siteStateInit
}
if h.Metrics != nil { if h.Metrics != nil {
h.Metrics.Reset() h.Metrics.Reset()
} }
@ -109,7 +116,7 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
conf := &config conf := &config
if conf.whatChanged == nil { if conf.whatChanged == nil {
// Assume everything has changed // Assume everything has changed
conf.whatChanged = &whatChanged{contentChanged: true} conf.whatChanged = &whatChanged{needsPagesAssembly: true}
} }
var prepareErr error var prepareErr error
@ -153,6 +160,10 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
} }
} }
for _, s := range h.Sites {
s.state = siteStateReady
}
if prepareErr == nil { if prepareErr == nil {
if err := h.render(infol, conf); err != nil { if err := h.render(infol, conf); err != nil {
h.SendError(fmt.Errorf("render: %w", err)) h.SendError(fmt.Errorf("render: %w", err))
@ -213,7 +224,7 @@ func (h *HugoSites) initRebuild(config *BuildCfg) error {
}) })
for _, s := range h.Sites { for _, s := range h.Sites {
s.resetBuildState(config.whatChanged.contentChanged) s.resetBuildState(config.whatChanged.needsPagesAssembly)
} }
h.reset(config) h.reset(config)
@ -232,7 +243,7 @@ func (h *HugoSites) process(ctx context.Context, l logg.LevelLogger, config *Bui
// This is a rebuild // This is a rebuild
return h.processPartial(ctx, l, config, init, events) return h.processPartial(ctx, l, config, init, events)
} }
return h.processFull(ctx, l, *config) return h.processFull(ctx, l, config)
} }
// assemble creates missing sections, applies aggregate values (e.g. dates, cascading params), // assemble creates missing sections, applies aggregate values (e.g. dates, cascading params),
@ -241,22 +252,24 @@ func (h *HugoSites) assemble(ctx context.Context, l logg.LevelLogger, bcfg *Buil
l = l.WithField("step", "assemble") l = l.WithField("step", "assemble")
defer loggers.TimeTrackf(l, time.Now(), nil, "") defer loggers.TimeTrackf(l, time.Now(), nil, "")
if !bcfg.whatChanged.contentChanged { if !bcfg.whatChanged.needsPagesAssembly {
changes := bcfg.whatChanged.Drain()
if len(changes) > 0 {
if err := h.resolveAndClearStateForIdentities(ctx, l, nil, changes); err != nil {
return err
}
}
return nil return nil
} }
h.translationKeyPages.Reset() h.translationKeyPages.Reset()
assemblers := make([]*sitePagesAssembler, len(h.Sites)) assemblers := make([]*sitePagesAssembler, len(h.Sites))
// Changes detected during assembly (e.g. aggregate date changes) // Changes detected during assembly (e.g. aggregate date changes)
assembleChanges := &whatChanged{
identitySet: make(map[identity.Identity]bool),
}
for i, s := range h.Sites { for i, s := range h.Sites {
assemblers[i] = &sitePagesAssembler{ assemblers[i] = &sitePagesAssembler{
Site: s, Site: s,
watching: s.watching(), assembleChanges: bcfg.whatChanged,
incomingChanges: bcfg.whatChanged,
assembleChanges: assembleChanges,
ctx: ctx, ctx: ctx,
} }
} }
@ -272,7 +285,7 @@ func (h *HugoSites) assemble(ctx context.Context, l logg.LevelLogger, bcfg *Buil
return err return err
} }
changes := assembleChanges.Changes() changes := bcfg.whatChanged.Drain()
// Changes from the assemble step (e.g. lastMod, cascade) needs a re-calculation // Changes from the assemble step (e.g. lastMod, cascade) needs a re-calculation
// of what needs to be re-built. // of what needs to be re-built.
@ -619,10 +632,10 @@ func (h *HugoSites) processPartial(ctx context.Context, l logg.LevelLogger, conf
logger := h.Log logger := h.Log
var ( var (
tmplAdded bool tmplAdded bool
tmplChanged bool tmplChanged bool
i18nChanged bool i18nChanged bool
contentChanged bool needsPagesAssemble bool
) )
changedPaths := struct { changedPaths := struct {
@ -696,11 +709,33 @@ func (h *HugoSites) processPartial(ctx context.Context, l logg.LevelLogger, conf
switch pathInfo.Component() { switch pathInfo.Component() {
case files.ComponentFolderContent: case files.ComponentFolderContent:
logger.Println("Source changed", pathInfo.Path()) logger.Println("Source changed", pathInfo.Path())
if ids := h.pageTrees.collectAndMarkStaleIdentities(pathInfo); len(ids) > 0 { isContentDataFile := pathInfo.IsContentData()
changes = append(changes, ids...) if !isContentDataFile {
if ids := h.pageTrees.collectAndMarkStaleIdentities(pathInfo); len(ids) > 0 {
changes = append(changes, ids...)
}
} else {
h.pageTrees.treePagesFromTemplateAdapters.DeleteAllFunc(pathInfo.Base(),
func(s string, n *pagesfromdata.PagesFromTemplate) bool {
changes = append(changes, n.DependencyManager)
// Try to open the file to see if has been deleted.
f, err := n.GoTmplFi.Meta().Open()
if err == nil {
f.Close()
}
if err != nil {
// Remove all pages and resources below.
prefix := pathInfo.Base() + "/"
h.pageTrees.treePages.DeletePrefixAll(prefix)
h.pageTrees.resourceTrees.DeletePrefixAll(prefix)
changes = append(changes, identity.NewGlobIdentity(prefix+"*"))
}
return err != nil
})
} }
contentChanged = true needsPagesAssemble = true
if config.RecentlyVisited != nil { if config.RecentlyVisited != nil {
// Fast render mode. Adding them to the visited queue // Fast render mode. Adding them to the visited queue
@ -714,7 +749,7 @@ func (h *HugoSites) processPartial(ctx context.Context, l logg.LevelLogger, conf
h.pageTrees.treeTaxonomyEntries.DeletePrefix("") h.pageTrees.treeTaxonomyEntries.DeletePrefix("")
if delete { if delete && !isContentDataFile {
_, ok := h.pageTrees.treePages.LongestPrefixAll(pathInfo.Base()) _, ok := h.pageTrees.treePages.LongestPrefixAll(pathInfo.Base())
if ok { if ok {
h.pageTrees.treePages.DeleteAll(pathInfo.Base()) h.pageTrees.treePages.DeleteAll(pathInfo.Base())
@ -853,8 +888,8 @@ func (h *HugoSites) processPartial(ctx context.Context, l logg.LevelLogger, conf
resourceFiles := h.fileEventsContentPaths(addedOrChangedContent) resourceFiles := h.fileEventsContentPaths(addedOrChangedContent)
changed := &whatChanged{ changed := &whatChanged{
contentChanged: contentChanged, needsPagesAssembly: needsPagesAssemble,
identitySet: make(identity.Identities), identitySet: make(identity.Identities),
} }
changed.Add(changes...) changed.Add(changes...)
@ -876,10 +911,7 @@ func (h *HugoSites) processPartial(ctx context.Context, l logg.LevelLogger, conf
} }
} }
// Removes duplicates. if err := h.resolveAndClearStateForIdentities(ctx, l, cacheBusterOr, changed.Drain()); err != nil {
changes = changed.identitySet.AsSlice()
if err := h.resolveAndClearStateForIdentities(ctx, l, cacheBusterOr, changes); err != nil {
return err return err
} }
@ -907,7 +939,13 @@ func (h *HugoSites) processPartial(ctx context.Context, l logg.LevelLogger, conf
} }
if resourceFiles != nil { if resourceFiles != nil {
if err := h.processFiles(ctx, l, *config, resourceFiles...); err != nil { if err := h.processFiles(ctx, l, config, resourceFiles...); err != nil {
return err
}
}
if h.isRebuild() {
if err := h.processContentAdaptersOnRebuild(ctx, config); err != nil {
return err return err
} }
} }
@ -926,7 +964,7 @@ func (h *HugoSites) LogServerAddresses() {
} }
} }
func (h *HugoSites) processFull(ctx context.Context, l logg.LevelLogger, config BuildCfg) (err error) { func (h *HugoSites) processFull(ctx context.Context, l logg.LevelLogger, config *BuildCfg) (err error) {
if err = h.processFiles(ctx, l, config); err != nil { if err = h.processFiles(ctx, l, config); err != nil {
err = fmt.Errorf("readAndProcessContent: %w", err) err = fmt.Errorf("readAndProcessContent: %w", err)
return return
@ -934,7 +972,49 @@ func (h *HugoSites) processFull(ctx context.Context, l logg.LevelLogger, config
return err return err
} }
func (s *HugoSites) processFiles(ctx context.Context, l logg.LevelLogger, buildConfig BuildCfg, filenames ...pathChange) error { func (s *Site) handleContentAdapterChanges(bi pagesfromdata.BuildInfo, buildConfig *BuildCfg) {
if !s.h.isRebuild() {
return
}
if len(bi.ChangedIdentities) > 0 {
buildConfig.whatChanged.Add(bi.ChangedIdentities...)
buildConfig.whatChanged.needsPagesAssembly = true
}
for _, p := range bi.DeletedPaths {
pp := path.Join(bi.Path.Base(), p)
if v, ok := s.pageMap.treePages.Delete(pp); ok {
buildConfig.whatChanged.Add(v.GetIdentity())
}
}
}
func (h *HugoSites) processContentAdaptersOnRebuild(ctx context.Context, buildConfig *BuildCfg) error {
g := rungroup.Run[*pagesfromdata.PagesFromTemplate](ctx, rungroup.Config[*pagesfromdata.PagesFromTemplate]{
NumWorkers: h.numWorkers,
Handle: func(ctx context.Context, p *pagesfromdata.PagesFromTemplate) error {
bi, err := p.Execute(ctx)
if err != nil {
return err
}
s := p.Site.(*Site)
s.handleContentAdapterChanges(bi, buildConfig)
return nil
},
})
h.pageTrees.treePagesFromTemplateAdapters.WalkPrefixRaw(doctree.LockTypeRead, "", func(key string, p *pagesfromdata.PagesFromTemplate) (bool, error) {
if p.StaleVersion() > 0 {
g.Enqueue(p)
}
return false, nil
})
return g.Wait()
}
func (s *HugoSites) processFiles(ctx context.Context, l logg.LevelLogger, buildConfig *BuildCfg, filenames ...pathChange) error {
if s.Deps == nil { if s.Deps == nil {
panic("nil deps on site") panic("nil deps on site")
} }
@ -944,7 +1024,7 @@ func (s *HugoSites) processFiles(ctx context.Context, l logg.LevelLogger, buildC
// For inserts, we can pick an arbitrary pageMap. // For inserts, we can pick an arbitrary pageMap.
pageMap := s.Sites[0].pageMap pageMap := s.Sites[0].pageMap
c := newPagesCollector(ctx, s.h, sourceSpec, s.Log, l, pageMap, filenames) c := newPagesCollector(ctx, s.h, sourceSpec, s.Log, l, pageMap, buildConfig, filenames)
if err := c.Collect(); err != nil { if err := c.Collect(); err != nil {
return err return err

View file

@ -41,7 +41,7 @@ Home: {{ .Title }}
IntegrationTestConfig{ IntegrationTestConfig{
T: t, T: t,
TxtarString: files, TxtarString: files,
LogLevel: logg.LevelTrace, // LogLevel: logg.LevelTrace,
}, },
).Build() ).Build()

View file

@ -510,9 +510,15 @@ func (p *pageState) renderResources() error {
continue continue
} }
if _, isWrapper := r.(resource.ResourceWrapper); isWrapper {
// Skip resources that are wrapped.
// These gets published on its own.
continue
}
src, ok := r.(resource.Source) src, ok := r.(resource.Source)
if !ok { if !ok {
initErr = fmt.Errorf("resource %T does not support resource.Source", src) initErr = fmt.Errorf("resource %T does not support resource.Source", r)
return return
} }
@ -581,7 +587,11 @@ func (p *pageState) getPageInfoForError() string {
func (p *pageState) getContentConverter() converter.Converter { func (p *pageState) getContentConverter() converter.Converter {
var err error var err error
p.contentConverterInit.Do(func() { p.contentConverterInit.Do(func() {
markup := p.m.pageConfig.Markup if p.m.pageConfig.ContentMediaType.IsZero() {
panic("ContentMediaType not set")
}
markup := p.m.pageConfig.ContentMediaType.SubType
if markup == "html" { if markup == "html" {
// Only used for shortcode inner content. // Only used for shortcode inner content.
markup = "markdown" markup = "markdown"

View file

@ -20,6 +20,7 @@ import (
"fmt" "fmt"
"html/template" "html/template"
"io" "io"
"path/filepath"
"strconv" "strconv"
"strings" "strings"
"unicode/utf8" "unicode/utf8"
@ -54,21 +55,31 @@ type pageContentReplacement struct {
source pageparser.Item source pageparser.Item
} }
func (m *pageMeta) parseFrontMatter(h *HugoSites, pid uint64, sourceKey string) (*contentParseInfo, error) { func (m *pageMeta) parseFrontMatter(h *HugoSites, pid uint64) (*contentParseInfo, error) {
var openSource hugio.OpenReadSeekCloser var (
if m.f != nil { sourceKey string
meta := m.f.FileInfo().Meta() openSource hugio.OpenReadSeekCloser
openSource = func() (hugio.ReadSeekCloser, error) { hasContent = m.pageConfig.IsFromContentAdapter
r, err := meta.Open() )
if err != nil {
return nil, fmt.Errorf("failed to open file %q: %w", meta.Filename, err) if m.f != nil && !hasContent {
sourceKey = filepath.ToSlash(m.f.Filename())
if !hasContent {
meta := m.f.FileInfo().Meta()
openSource = func() (hugio.ReadSeekCloser, error) {
r, err := meta.Open()
if err != nil {
return nil, fmt.Errorf("failed to open file %q: %w", meta.Filename, err)
}
return r, nil
} }
return r, nil
} }
} else if hasContent {
openSource = m.pageConfig.Content.ValueAsOpenReadSeekCloser()
} }
if sourceKey == "" { if sourceKey == "" {
sourceKey = strconv.Itoa(int(pid)) sourceKey = strconv.FormatUint(pid, 10)
} }
pi := &contentParseInfo{ pi := &contentParseInfo{
@ -93,6 +104,11 @@ func (m *pageMeta) parseFrontMatter(h *HugoSites, pid uint64, sourceKey string)
pi.itemsStep1 = items pi.itemsStep1 = items
if hasContent {
// No front matter.
return pi, nil
}
if err := pi.mapFrontMatter(source); err != nil { if err := pi.mapFrontMatter(source); err != nil {
return nil, err return nil, err
} }
@ -567,15 +583,14 @@ func (c *cachedContent) contentRendered(ctx context.Context, cp *pageContentOutp
var result contentSummary // hasVariants bool var result contentSummary // hasVariants bool
if c.pi.hasSummaryDivider { if c.pi.hasSummaryDivider {
isHTML := cp.po.p.m.pageConfig.Markup == "html" if cp.po.p.m.pageConfig.ContentMediaType.IsHTML() {
if isHTML {
// Use the summary sections as provided by the user. // Use the summary sections as provided by the user.
i := bytes.Index(b, internalSummaryDividerPre) i := bytes.Index(b, internalSummaryDividerPre)
result.summary = helpers.BytesToHTML(b[:i]) result.summary = helpers.BytesToHTML(b[:i])
b = b[i+len(internalSummaryDividerPre):] b = b[i+len(internalSummaryDividerPre):]
} else { } else {
summary, content, err := splitUserDefinedSummaryAndContent(cp.po.p.m.pageConfig.Markup, b) summary, content, err := splitUserDefinedSummaryAndContent(cp.po.p.m.pageConfig.Content.Markup, b)
if err != nil { if err != nil {
cp.po.p.s.Log.Errorf("Failed to set user defined summary for page %q: %s", cp.po.p.pathOrTitle(), err) cp.po.p.s.Log.Errorf("Failed to set user defined summary for page %q: %s", cp.po.p.pathOrTitle(), err)
} else { } else {
@ -665,7 +680,7 @@ func (c *cachedContent) contentToC(ctx context.Context, cp *pageContentOutput) (
p.pageOutputTemplateVariationsState.Add(1) p.pageOutputTemplateVariationsState.Add(1)
} }
isHTML := cp.po.p.m.pageConfig.Markup == "html" isHTML := cp.po.p.m.pageConfig.ContentMediaType.IsHTML()
if !isHTML { if !isHTML {
createAndSetToC := func(tocProvider converter.TableOfContentsProvider) { createAndSetToC := func(tocProvider converter.TableOfContentsProvider) {
@ -788,7 +803,7 @@ func (c *cachedContent) contentPlain(ctx context.Context, cp *pageContentOutput)
if err != nil { if err != nil {
return nil, err return nil, err
} }
html := cp.po.p.s.ContentSpec.TrimShortHTML(b.Bytes(), cp.po.p.m.pageConfig.Markup) html := cp.po.p.s.ContentSpec.TrimShortHTML(b.Bytes(), cp.po.p.m.pageConfig.Content.Markup)
result.summary = helpers.BytesToHTML(html) result.summary = helpers.BytesToHTML(html)
} else { } else {
var summary string var summary string

View file

@ -54,7 +54,7 @@ type pageMeta struct {
singular string // Set for kind == KindTerm and kind == KindTaxonomy. singular string // Set for kind == KindTerm and kind == KindTaxonomy.
resource.Staler resource.Staler
pageMetaParams *pageMetaParams
pageMetaFrontMatter pageMetaFrontMatter
// Set for standalone pages, e.g. robotsTXT. // Set for standalone pages, e.g. robotsTXT.
@ -100,7 +100,7 @@ type pageMetaFrontMatter struct {
func (m *pageMetaParams) init(preserveOringal bool) { func (m *pageMetaParams) init(preserveOringal bool) {
if preserveOringal { if preserveOringal {
m.paramsOriginal = xmaps.Clone[maps.Params](m.pageConfig.Params) m.paramsOriginal = xmaps.Clone[maps.Params](m.pageConfig.Params)
m.cascadeOriginal = xmaps.Clone[map[page.PageMatcher]maps.Params](m.pageConfig.Cascade) m.cascadeOriginal = xmaps.Clone[map[page.PageMatcher]maps.Params](m.pageConfig.CascadeCompiled)
} }
} }
@ -137,19 +137,19 @@ func (p *pageMeta) BundleType() string {
} }
func (p *pageMeta) Date() time.Time { func (p *pageMeta) Date() time.Time {
return p.pageConfig.Date return p.pageConfig.Dates.Date
} }
func (p *pageMeta) PublishDate() time.Time { func (p *pageMeta) PublishDate() time.Time {
return p.pageConfig.PublishDate return p.pageConfig.Dates.PublishDate
} }
func (p *pageMeta) Lastmod() time.Time { func (p *pageMeta) Lastmod() time.Time {
return p.pageConfig.Lastmod return p.pageConfig.Dates.Lastmod
} }
func (p *pageMeta) ExpiryDate() time.Time { func (p *pageMeta) ExpiryDate() time.Time {
return p.pageConfig.ExpiryDate return p.pageConfig.Dates.ExpiryDate
} }
func (p *pageMeta) Description() string { func (p *pageMeta) Description() string {
@ -280,9 +280,6 @@ func (p *pageMeta) setMetaPre(pi *contentParseInfo, logger loggers.Logger, conf
if frontmatter != nil { if frontmatter != nil {
pcfg := p.pageConfig pcfg := p.pageConfig
if pcfg == nil {
panic("pageConfig not set")
}
// Needed for case insensitive fetching of params values // Needed for case insensitive fetching of params values
maps.PrepareParams(frontmatter) maps.PrepareParams(frontmatter)
pcfg.Params = frontmatter pcfg.Params = frontmatter
@ -293,7 +290,7 @@ func (p *pageMeta) setMetaPre(pi *contentParseInfo, logger loggers.Logger, conf
if err != nil { if err != nil {
return err return err
} }
pcfg.Cascade = cascade pcfg.CascadeCompiled = cascade
} }
// Look for path, lang and kind, all of which values we need early on. // Look for path, lang and kind, all of which values we need early on.
@ -331,18 +328,18 @@ func (ps *pageState) setMetaPost(cascade map[page.PageMatcher]maps.Params) error
ps.m.setMetaPostCount++ ps.m.setMetaPostCount++
var cascadeHashPre uint64 var cascadeHashPre uint64
if ps.m.setMetaPostCount > 1 { if ps.m.setMetaPostCount > 1 {
cascadeHashPre = identity.HashUint64(ps.m.pageConfig.Cascade) cascadeHashPre = identity.HashUint64(ps.m.pageConfig.CascadeCompiled)
ps.m.pageConfig.Cascade = xmaps.Clone[map[page.PageMatcher]maps.Params](ps.m.cascadeOriginal) ps.m.pageConfig.CascadeCompiled = xmaps.Clone[map[page.PageMatcher]maps.Params](ps.m.cascadeOriginal)
} }
// Apply cascades first so they can be overridden later. // Apply cascades first so they can be overridden later.
if cascade != nil { if cascade != nil {
if ps.m.pageConfig.Cascade != nil { if ps.m.pageConfig.CascadeCompiled != nil {
for k, v := range cascade { for k, v := range cascade {
vv, found := ps.m.pageConfig.Cascade[k] vv, found := ps.m.pageConfig.CascadeCompiled[k]
if !found { if !found {
ps.m.pageConfig.Cascade[k] = v ps.m.pageConfig.CascadeCompiled[k] = v
} else { } else {
// Merge // Merge
for ck, cv := range v { for ck, cv := range v {
@ -352,18 +349,18 @@ func (ps *pageState) setMetaPost(cascade map[page.PageMatcher]maps.Params) error
} }
} }
} }
cascade = ps.m.pageConfig.Cascade cascade = ps.m.pageConfig.CascadeCompiled
} else { } else {
ps.m.pageConfig.Cascade = cascade ps.m.pageConfig.CascadeCompiled = cascade
} }
} }
if cascade == nil { if cascade == nil {
cascade = ps.m.pageConfig.Cascade cascade = ps.m.pageConfig.CascadeCompiled
} }
if ps.m.setMetaPostCount > 1 { if ps.m.setMetaPostCount > 1 {
ps.m.setMetaPostCascadeChanged = cascadeHashPre != identity.HashUint64(ps.m.pageConfig.Cascade) ps.m.setMetaPostCascadeChanged = cascadeHashPre != identity.HashUint64(ps.m.pageConfig.CascadeCompiled)
if !ps.m.setMetaPostCascadeChanged { if !ps.m.setMetaPostCascadeChanged {
// No changes, restore any value that may be changed by aggregation. // No changes, restore any value that may be changed by aggregation.
@ -404,11 +401,17 @@ func (p *pageState) setMetaPostParams() error {
pm := p.m pm := p.m
var mtime time.Time var mtime time.Time
var contentBaseName string var contentBaseName string
var ext string
var isContentAdapter bool
if p.File() != nil { if p.File() != nil {
isContentAdapter = p.File().IsContentAdapter()
contentBaseName = p.File().ContentBaseName() contentBaseName = p.File().ContentBaseName()
if p.File().FileInfo() != nil { if p.File().FileInfo() != nil {
mtime = p.File().FileInfo().ModTime() mtime = p.File().FileInfo().ModTime()
} }
if !isContentAdapter {
ext = p.File().Ext()
}
} }
var gitAuthorDate time.Time var gitAuthorDate time.Time
@ -432,6 +435,11 @@ func (p *pageState) setMetaPostParams() error {
p.s.Log.Errorf("Failed to handle dates for page %q: %s", p.pathOrTitle(), err) p.s.Log.Errorf("Failed to handle dates for page %q: %s", p.pathOrTitle(), err)
} }
if isContentAdapter {
// Done.
return nil
}
var buildConfig any var buildConfig any
var isNewBuildKeyword bool var isNewBuildKeyword bool
if v, ok := pm.pageConfig.Params["_build"]; ok { if v, ok := pm.pageConfig.Params["_build"]; ok {
@ -460,8 +468,10 @@ params:
var sitemapSet bool var sitemapSet bool
pcfg := pm.pageConfig pcfg := pm.pageConfig
params := pcfg.Params params := pcfg.Params
if params == nil {
panic("params not set for " + p.Title())
}
var draft, published, isCJKLanguage *bool var draft, published, isCJKLanguage *bool
var userParams map[string]any var userParams map[string]any
@ -554,8 +564,8 @@ params:
pcfg.Layout = cast.ToString(v) pcfg.Layout = cast.ToString(v)
params[loki] = pcfg.Layout params[loki] = pcfg.Layout
case "markup": case "markup":
pcfg.Markup = cast.ToString(v) pcfg.Content.Markup = cast.ToString(v)
params[loki] = pcfg.Markup params[loki] = pcfg.Content.Markup
case "weight": case "weight":
pcfg.Weight = cast.ToInt(v) pcfg.Weight = cast.ToInt(v)
params[loki] = pcfg.Weight params[loki] = pcfg.Weight
@ -605,7 +615,7 @@ params:
} }
if handled { if handled {
pcfg.Resources = resources pcfg.ResourcesMeta = resources
break break
} }
fallthrough fallthrough
@ -652,8 +662,6 @@ params:
pcfg.Sitemap = p.s.conf.Sitemap pcfg.Sitemap = p.s.conf.Sitemap
} }
pcfg.Markup = p.s.ContentSpec.ResolveMarkup(pcfg.Markup)
if draft != nil && published != nil { if draft != nil && published != nil {
pcfg.Draft = *draft pcfg.Draft = *draft
p.m.s.Log.Warnf("page %q has both draft and published settings in its frontmatter. Using draft.", p.File().Filename()) p.m.s.Log.Warnf("page %q has both draft and published settings in its frontmatter. Using draft.", p.File().Filename())
@ -676,6 +684,14 @@ params:
params["iscjklanguage"] = pcfg.IsCJKLanguage params["iscjklanguage"] = pcfg.IsCJKLanguage
if err := pcfg.Validate(false); err != nil {
return err
}
if err := pcfg.Compile("", false, ext, p.s.Log, p.s.conf.MediaTypes.Config); err != nil {
return err
}
return nil return nil
} }
@ -731,18 +747,16 @@ func (p *pageMeta) applyDefaultValues() error {
(&p.pageConfig.Build).Disable() (&p.pageConfig.Build).Disable()
} }
if p.pageConfig.Markup == "" { if p.pageConfig.Content.Markup == "" {
if p.File() != nil { if p.File() != nil {
// Fall back to file extension // Fall back to file extension
p.pageConfig.Markup = p.s.ContentSpec.ResolveMarkup(p.File().Ext()) p.pageConfig.Content.Markup = p.s.ContentSpec.ResolveMarkup(p.File().Ext())
} }
if p.pageConfig.Markup == "" { if p.pageConfig.Content.Markup == "" {
p.pageConfig.Markup = "markdown" p.pageConfig.Content.Markup = "markdown"
} }
} }
p.pageConfig.IsGoldmark = p.s.ContentSpec.Converters.IsGoldmark(p.pageConfig.Markup)
if p.pageConfig.Title == "" && p.f == nil { if p.pageConfig.Title == "" && p.f == nil {
switch p.Kind() { switch p.Kind() {
case kinds.KindHome: case kinds.KindHome:

View file

@ -15,7 +15,6 @@ package hugolib
import ( import (
"fmt" "fmt"
"path/filepath"
"sync" "sync"
"sync/atomic" "sync/atomic"
@ -36,21 +35,17 @@ var pageIDCounter atomic.Uint64
func (h *HugoSites) newPage(m *pageMeta) (*pageState, *paths.Path, error) { func (h *HugoSites) newPage(m *pageMeta) (*pageState, *paths.Path, error) {
m.Staler = &resources.AtomicStaler{} m.Staler = &resources.AtomicStaler{}
if m.pageConfig == nil { if m.pageMetaParams == nil {
m.pageMetaParams = pageMetaParams{ m.pageMetaParams = &pageMetaParams{
pageConfig: &pagemeta.PageConfig{ pageConfig: &pagemeta.PageConfig{},
Params: maps.Params{},
},
} }
} }
if m.pageConfig.Params == nil {
var sourceKey string m.pageConfig.Params = maps.Params{}
if m.f != nil {
sourceKey = filepath.ToSlash(m.f.Filename())
} }
pid := pageIDCounter.Add(1) pid := pageIDCounter.Add(1)
pi, err := m.parseFrontMatter(h, pid, sourceKey) pi, err := m.parseFrontMatter(h, pid)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
@ -69,28 +64,40 @@ func (h *HugoSites) newPage(m *pageMeta) (*pageState, *paths.Path, error) {
s := m.pageConfig.Path s := m.pageConfig.Path
if !paths.HasExt(s) { if !paths.HasExt(s) {
var ( var (
isBranch bool isBranch bool
ext string = "md" isBranchSet bool
ext string = m.pageConfig.ContentMediaType.FirstSuffix.Suffix
) )
if pcfg.Kind != "" { if pcfg.Kind != "" {
isBranch = kinds.IsBranch(pcfg.Kind) isBranch = kinds.IsBranch(pcfg.Kind)
} else if m.pathInfo != nil { isBranchSet = true
isBranch = m.pathInfo.IsBranchBundle() }
if m.pathInfo.Ext() != "" {
ext = m.pathInfo.Ext() if !pcfg.IsFromContentAdapter {
} if m.pathInfo != nil {
} else if m.f != nil { if !isBranchSet {
pi := m.f.FileInfo().Meta().PathInfo isBranch = m.pathInfo.IsBranchBundle()
isBranch = pi.IsBranchBundle() }
if pi.Ext() != "" { if m.pathInfo.Ext() != "" {
ext = pi.Ext() ext = m.pathInfo.Ext()
}
} else if m.f != nil {
pi := m.f.FileInfo().Meta().PathInfo
if !isBranchSet {
isBranch = pi.IsBranchBundle()
}
if pi.Ext() != "" {
ext = pi.Ext()
}
} }
} }
if isBranch { if isBranch {
s += "/_index." + ext s += "/_index." + ext
} else { } else {
s += "/index." + ext s += "/index." + ext
} }
} }
m.pathInfo = h.Conf.PathParser().Parse(files.ComponentFolderContent, s) m.pathInfo = h.Conf.PathParser().Parse(files.ComponentFolderContent, s)
} else if m.pathInfo == nil { } else if m.pathInfo == nil {
@ -112,23 +119,13 @@ func (h *HugoSites) newPage(m *pageMeta) (*pageState, *paths.Path, error) {
} else if m.f != nil { } else if m.f != nil {
meta := m.f.FileInfo().Meta() meta := m.f.FileInfo().Meta()
lang = meta.Lang lang = meta.Lang
m.s = h.Sites[meta.LangIndex]
} else { } else {
lang = m.pathInfo.Lang() lang = m.pathInfo.Lang()
} }
if lang == "" {
lang = h.Conf.DefaultContentLanguage()
}
var found bool
for _, ss := range h.Sites {
if ss.Lang() == lang {
m.s = ss
found = true
break
}
}
if !found { m.s = h.resolveSite(lang)
if m.s == nil {
return nil, fmt.Errorf("no site found for language %q", lang) return nil, fmt.Errorf("no site found for language %q", lang)
} }
} }

View file

@ -25,6 +25,8 @@ import (
"github.com/gohugoio/hugo/common/text" "github.com/gohugoio/hugo/common/text"
"github.com/gohugoio/hugo/common/types/hstring" "github.com/gohugoio/hugo/common/types/hstring"
"github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/markup"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/parser/pageparser" "github.com/gohugoio/hugo/parser/pageparser"
"github.com/mitchellh/mapstructure" "github.com/mitchellh/mapstructure"
"github.com/spf13/cast" "github.com/spf13/cast"
@ -262,6 +264,9 @@ func (pco *pageContentOutput) RenderString(ctx context.Context, args ...any) (te
if err := mapstructure.WeakDecode(m, &opts); err != nil { if err := mapstructure.WeakDecode(m, &opts); err != nil {
return "", fmt.Errorf("failed to decode options: %w", err) return "", fmt.Errorf("failed to decode options: %w", err)
} }
if opts.Markup != "" {
opts.Markup = markup.ResolveMarkup(opts.Markup)
}
} }
contentToRenderv := args[sidx] contentToRenderv := args[sidx]
@ -283,7 +288,8 @@ func (pco *pageContentOutput) RenderString(ctx context.Context, args ...any) (te
} }
conv := pco.po.p.getContentConverter() conv := pco.po.p.getContentConverter()
if opts.Markup != "" && opts.Markup != pco.po.p.m.pageConfig.Markup {
if opts.Markup != "" && opts.Markup != pco.po.p.m.pageConfig.ContentMediaType.SubType {
var err error var err error
conv, err = pco.po.p.m.newContentConverter(pco.po.p, opts.Markup) conv, err = pco.po.p.m.newContentConverter(pco.po.p, opts.Markup)
if err != nil { if err != nil {
@ -376,7 +382,7 @@ func (pco *pageContentOutput) RenderString(ctx context.Context, args ...any) (te
} }
if opts.Display == "inline" { if opts.Display == "inline" {
markup := pco.po.p.m.pageConfig.Markup markup := pco.po.p.m.pageConfig.Content.Markup
if opts.Markup != "" { if opts.Markup != "" {
markup = pco.po.p.s.ContentSpec.ResolveMarkup(opts.Markup) markup = pco.po.p.s.ContentSpec.ResolveMarkup(opts.Markup)
} }
@ -657,7 +663,7 @@ func splitUserDefinedSummaryAndContent(markup string, c []byte) (summary []byte,
startTag := "p" startTag := "p"
switch markup { switch markup {
case "asciidocext": case media.DefaultContentTypes.AsciiDoc.SubType:
startTag = "div" startTag = "div"
} }

View file

@ -42,18 +42,20 @@ func newPagesCollector(
logger loggers.Logger, logger loggers.Logger,
infoLogger logg.LevelLogger, infoLogger logg.LevelLogger,
m *pageMap, m *pageMap,
buildConfig *BuildCfg,
ids []pathChange, ids []pathChange,
) *pagesCollector { ) *pagesCollector {
return &pagesCollector{ return &pagesCollector{
ctx: ctx, ctx: ctx,
h: h, h: h,
fs: sp.BaseFs.Content.Fs, fs: sp.BaseFs.Content.Fs,
m: m, m: m,
sp: sp, sp: sp,
logger: logger, logger: logger,
infoLogger: infoLogger, infoLogger: infoLogger,
ids: ids, buildConfig: buildConfig,
seenDirs: make(map[string]bool), ids: ids,
seenDirs: make(map[string]bool),
} }
} }
@ -68,6 +70,8 @@ type pagesCollector struct {
fs afero.Fs fs afero.Fs
buildConfig *BuildCfg
// List of paths that have changed. Used in partial builds. // List of paths that have changed. Used in partial builds.
ids []pathChange ids []pathChange
seenDirs map[string]bool seenDirs map[string]bool
@ -82,6 +86,8 @@ func (c *pagesCollector) Collect() (collectErr error) {
var ( var (
numWorkers = c.h.numWorkers numWorkers = c.h.numWorkers
numFilesProcessedTotal atomic.Uint64 numFilesProcessedTotal atomic.Uint64
numPagesProcessedTotal atomic.Uint64
numResourcesProcessed atomic.Uint64
numFilesProcessedLast uint64 numFilesProcessedLast uint64
fileBatchTimer = time.Now() fileBatchTimer = time.Now()
fileBatchTimerMu sync.Mutex fileBatchTimerMu sync.Mutex
@ -98,6 +104,8 @@ func (c *pagesCollector) Collect() (collectErr error) {
logg.Fields{ logg.Fields{
logg.Field{Name: "files", Value: numFilesProcessedBatch}, logg.Field{Name: "files", Value: numFilesProcessedBatch},
logg.Field{Name: "files_total", Value: numFilesProcessedTotal.Load()}, logg.Field{Name: "files_total", Value: numFilesProcessedTotal.Load()},
logg.Field{Name: "pages_total", Value: numPagesProcessedTotal.Load()},
logg.Field{Name: "resources_total", Value: numResourcesProcessed.Load()},
}, },
"", "",
) )
@ -113,10 +121,13 @@ func (c *pagesCollector) Collect() (collectErr error) {
c.g = rungroup.Run[hugofs.FileMetaInfo](c.ctx, rungroup.Config[hugofs.FileMetaInfo]{ c.g = rungroup.Run[hugofs.FileMetaInfo](c.ctx, rungroup.Config[hugofs.FileMetaInfo]{
NumWorkers: numWorkers, NumWorkers: numWorkers,
Handle: func(ctx context.Context, fi hugofs.FileMetaInfo) error { Handle: func(ctx context.Context, fi hugofs.FileMetaInfo) error {
if err := c.m.AddFi(fi); err != nil { numPages, numResources, err := c.m.AddFi(fi, c.buildConfig)
if err != nil {
return hugofs.AddFileInfoToError(err, fi, c.fs) return hugofs.AddFileInfoToError(err, fi, c.fs)
} }
numFilesProcessedTotal.Add(1) numFilesProcessedTotal.Add(1)
numPagesProcessedTotal.Add(numPages)
numResourcesProcessed.Add(numResources)
if numFilesProcessedTotal.Load()%1000 == 0 { if numFilesProcessedTotal.Load()%1000 == 0 {
logFilesProcessed(false) logFilesProcessed(false)
} }
@ -243,6 +254,21 @@ func (c *pagesCollector) collectDirDir(path string, root hugofs.FileMetaInfo, in
return nil, nil return nil, nil
} }
n := 0
for _, fi := range readdir {
if fi.Meta().PathInfo.IsContentData() {
// _content.json
// These are not part of any bundle, so just add them directly and remove them from the readdir slice.
if err := c.g.Enqueue(fi); err != nil {
return nil, err
}
} else {
readdir[n] = fi
n++
}
}
readdir = readdir[:n]
// Pick the first regular file. // Pick the first regular file.
var first hugofs.FileMetaInfo var first hugofs.FileMetaInfo
for _, fi := range readdir { for _, fi := range readdir {
@ -260,6 +286,7 @@ func (c *pagesCollector) collectDirDir(path string, root hugofs.FileMetaInfo, in
// Any bundle file will always be first. // Any bundle file will always be first.
firstPi := first.Meta().PathInfo firstPi := first.Meta().PathInfo
if firstPi == nil { if firstPi == nil {
panic(fmt.Sprintf("collectDirDir: no path info for %q", first.Meta().Filename)) panic(fmt.Sprintf("collectDirDir: no path info for %q", first.Meta().Filename))
} }
@ -320,6 +347,7 @@ func (c *pagesCollector) collectDirDir(path string, root hugofs.FileMetaInfo, in
Info: root, Info: root,
Fs: c.fs, Fs: c.fs,
IgnoreFile: c.h.SourceSpec.IgnoreFile, IgnoreFile: c.h.SourceSpec.IgnoreFile,
PathParser: c.h.Conf.PathParser(),
HookPre: preHook, HookPre: preHook,
HookPost: postHook, HookPost: postHook,
WalkFn: wfn, WalkFn: wfn,
@ -370,6 +398,7 @@ func (c *pagesCollector) handleBundleLeaf(dir, bundle hugofs.FileMetaInfo, inPat
Info: dir, Info: dir,
DirEntries: readdir, DirEntries: readdir,
IgnoreFile: c.h.SourceSpec.IgnoreFile, IgnoreFile: c.h.SourceSpec.IgnoreFile,
PathParser: c.h.Conf.PathParser(),
WalkFn: walk, WalkFn: walk,
}) })

View file

@ -0,0 +1,331 @@
// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pagesfromdata
import (
"context"
"fmt"
"io"
"path/filepath"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/page/pagemeta"
"github.com/gohugoio/hugo/resources/resource"
"github.com/gohugoio/hugo/tpl"
"github.com/mitchellh/mapstructure"
"github.com/spf13/cast"
)
type PagesFromDataTemplateContext interface {
// AddPage adds a new page to the site.
// The first return value will always be an empty string.
AddPage(any) (string, error)
// AddResource adds a new resource to the site.
// The first return value will always be an empty string.
AddResource(any) (string, error)
// The site to which the pages will be added.
Site() page.Site
// The same template may be executed multiple times for multiple languages.
// The Store can be used to store state between these invocations.
Store() *maps.Scratch
// By default, the template will be executed for the language
// defined by the _content.gotmpl file (e.g. its mount definition).
// This method can be used to activate the template for all languages.
// The return value will always be an empty string.
EnableAllLanguages() string
}
var _ PagesFromDataTemplateContext = (*pagesFromDataTemplateContext)(nil)
type pagesFromDataTemplateContext struct {
p *PagesFromTemplate
}
func (p *pagesFromDataTemplateContext) toPathMap(v any) (string, map[string]any, error) {
m, err := maps.ToStringMapE(v)
if err != nil {
return "", nil, err
}
pathv, ok := m["path"]
if !ok {
return "", nil, fmt.Errorf("path not set")
}
path, err := cast.ToStringE(pathv)
if err != nil || path == "" {
return "", nil, fmt.Errorf("invalid path %q", path)
}
return path, m, nil
}
func (p *pagesFromDataTemplateContext) AddPage(v any) (string, error) {
path, m, err := p.toPathMap(v)
if err != nil {
return "", err
}
if !p.p.buildState.checkHasChangedAndSetSourceInfo(path, m) {
return "", nil
}
pd := pagemeta.DefaultPageConfig
pd.IsFromContentAdapter = true
if err := mapstructure.WeakDecode(m, &pd); err != nil {
return "", fmt.Errorf("failed to decode page map: %w", err)
}
p.p.buildState.NumPagesAdded++
if err := pd.Validate(true); err != nil {
return "", err
}
return "", p.p.HandlePage(p.p, &pd)
}
func (p *pagesFromDataTemplateContext) AddResource(v any) (string, error) {
path, m, err := p.toPathMap(v)
if err != nil {
return "", err
}
if !p.p.buildState.checkHasChangedAndSetSourceInfo(path, m) {
return "", nil
}
var rd pagemeta.ResourceConfig
if err := mapstructure.WeakDecode(m, &rd); err != nil {
return "", err
}
p.p.buildState.NumResourcesAdded++
if err := rd.Validate(); err != nil {
return "", err
}
return "", p.p.HandleResource(p.p, &rd)
}
func (p *pagesFromDataTemplateContext) Site() page.Site {
return p.p.Site
}
func (p *pagesFromDataTemplateContext) Store() *maps.Scratch {
return p.p.store
}
func (p *pagesFromDataTemplateContext) EnableAllLanguages() string {
p.p.buildState.EnableAllLanguages = true
return ""
}
func NewPagesFromTemplate(opts PagesFromTemplateOptions) *PagesFromTemplate {
return &PagesFromTemplate{
PagesFromTemplateOptions: opts,
PagesFromTemplateDeps: opts.DepsFromSite(opts.Site),
buildState: &BuildState{
sourceInfosCurrent: maps.NewCache[string, *sourceInfo](),
},
store: maps.NewScratch(),
}
}
type PagesFromTemplateOptions struct {
Site page.Site
DepsFromSite func(page.Site) PagesFromTemplateDeps
DependencyManager identity.Manager
Watching bool
HandlePage func(pt *PagesFromTemplate, p *pagemeta.PageConfig) error
HandleResource func(pt *PagesFromTemplate, p *pagemeta.ResourceConfig) error
GoTmplFi hugofs.FileMetaInfo
}
type PagesFromTemplateDeps struct {
TmplFinder tpl.TemplateParseFinder
TmplExec tpl.TemplateExecutor
}
var _ resource.Staler = (*PagesFromTemplate)(nil)
type PagesFromTemplate struct {
PagesFromTemplateOptions
PagesFromTemplateDeps
buildState *BuildState
store *maps.Scratch
}
func (b *PagesFromTemplate) AddChange(id identity.Identity) {
b.buildState.ChangedIdentities = append(b.buildState.ChangedIdentities, id)
}
func (b *PagesFromTemplate) MarkStale() {
b.buildState.StaleVersion++
}
func (b *PagesFromTemplate) StaleVersion() uint32 {
return b.buildState.StaleVersion
}
type BuildInfo struct {
NumPagesAdded uint64
NumResourcesAdded uint64
EnableAllLanguages bool
ChangedIdentities []identity.Identity
DeletedPaths []string
Path *paths.Path
}
type BuildState struct {
StaleVersion uint32
EnableAllLanguages bool
// Paths deleted in the current build.
DeletedPaths []string
// Changed identities in the current build.
ChangedIdentities []identity.Identity
NumPagesAdded uint64
NumResourcesAdded uint64
sourceInfosCurrent *maps.Cache[string, *sourceInfo]
sourceInfosPrevious *maps.Cache[string, *sourceInfo]
}
func (b *BuildState) hash(v any) uint64 {
return identity.HashUint64(v)
}
func (b *BuildState) checkHasChangedAndSetSourceInfo(changedPath string, v any) bool {
h := b.hash(v)
si, found := b.sourceInfosPrevious.Get(changedPath)
if found {
b.sourceInfosCurrent.Set(changedPath, si)
if si.hash == h {
return false
}
} else {
si = &sourceInfo{}
b.sourceInfosCurrent.Set(changedPath, si)
}
si.hash = h
return true
}
func (b *BuildState) resolveDeletedPaths() {
if b.sourceInfosPrevious == nil {
b.DeletedPaths = nil
return
}
var paths []string
b.sourceInfosPrevious.ForEeach(func(k string, _ *sourceInfo) {
if _, found := b.sourceInfosCurrent.Get(k); !found {
paths = append(paths, k)
}
})
b.DeletedPaths = paths
}
func (b *BuildState) PrepareNextBuild() {
b.sourceInfosPrevious = b.sourceInfosCurrent
b.sourceInfosCurrent = maps.NewCache[string, *sourceInfo]()
b.StaleVersion = 0
b.DeletedPaths = nil
b.ChangedIdentities = nil
b.NumPagesAdded = 0
b.NumResourcesAdded = 0
}
type sourceInfo struct {
hash uint64
}
func (p PagesFromTemplate) CloneForSite(s page.Site) *PagesFromTemplate {
// We deliberately make them share the same DepenencyManager and Store.
p.PagesFromTemplateOptions.Site = s
p.PagesFromTemplateDeps = p.PagesFromTemplateOptions.DepsFromSite(s)
p.buildState = &BuildState{
sourceInfosCurrent: maps.NewCache[string, *sourceInfo](),
}
return &p
}
func (p PagesFromTemplate) CloneForGoTmpl(fi hugofs.FileMetaInfo) *PagesFromTemplate {
p.PagesFromTemplateOptions.GoTmplFi = fi
return &p
}
func (p *PagesFromTemplate) GetDependencyManagerForScope(scope int) identity.Manager {
return p.DependencyManager
}
func (p *PagesFromTemplate) Execute(ctx context.Context) (BuildInfo, error) {
defer func() {
p.buildState.PrepareNextBuild()
}()
f, err := p.GoTmplFi.Meta().Open()
if err != nil {
return BuildInfo{}, err
}
defer f.Close()
tmpl, err := p.TmplFinder.Parse(filepath.ToSlash(p.GoTmplFi.Meta().Filename), helpers.ReaderToString(f))
if err != nil {
return BuildInfo{}, err
}
data := &pagesFromDataTemplateContext{
p: p,
}
ctx = tpl.Context.DependencyManagerScopedProvider.Set(ctx, p)
if err := p.TmplExec.ExecuteWithContext(ctx, tmpl, io.Discard, data); err != nil {
return BuildInfo{}, err
}
if p.Watching {
p.buildState.resolveDeletedPaths()
}
bi := BuildInfo{
NumPagesAdded: p.buildState.NumPagesAdded,
NumResourcesAdded: p.buildState.NumResourcesAdded,
EnableAllLanguages: p.buildState.EnableAllLanguages,
ChangedIdentities: p.buildState.ChangedIdentities,
DeletedPaths: p.buildState.DeletedPaths,
Path: p.GoTmplFi.Meta().PathInfo,
}
return bi, nil
}
//////////////

View file

@ -0,0 +1,479 @@
// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pagesfromdata_test
import (
"fmt"
"strings"
"testing"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/hugolib"
"github.com/gohugoio/hugo/markup/asciidocext"
"github.com/gohugoio/hugo/markup/pandoc"
"github.com/gohugoio/hugo/markup/rst"
)
const filesPagesFromDataTempleBasic = `
-- hugo.toml --
disableKinds = ["taxonomy", "term", "rss", "sitemap"]
baseURL = "https://example.com"
disableLiveReload = true
-- assets/a/pixel.png --
iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==
-- assets/mydata.yaml --
p1: "p1"
draft: false
-- layouts/partials/get-value.html --
{{ $val := "p1" }}
{{ return $val }}
-- layouts/_default/single.html --
Single: {{ .Title }}|{{ .Content }}|Params: {{ .Params.param1 }}|Path: {{ .Path }}|
Dates: Date: {{ .Date.Format "2006-01-02" }}|Lastmod: {{ .Lastmod.Format "2006-01-02" }}|PublishDate: {{ .PublishDate.Format "2006-01-02" }}|ExpiryDate: {{ .ExpiryDate.Format "2006-01-02" }}|
Len Resources: {{ .Resources | len }}
Resources: {{ range .Resources }}RelPermalink: {{ .RelPermalink }}|Name: {{ .Name }}|Title: {{ .Title }}|Params: {{ .Params }}|{{ end }}$
{{ with .Resources.Get "featured.png" }}
Featured Image: {{ .RelPermalink }}|{{ .Name }}|
{{ with .Resize "10x10" }}
Resized Featured Image: {{ .RelPermalink }}|{{ .Width }}|
{{ end}}
{{ end }}
-- layouts/_default/list.html --
List: {{ .Title }}|{{ .Content }}|
RegularPagesRecursive: {{ range .RegularPagesRecursive }}{{ .Title }}:{{ .Path }}|{{ end }}$
Sections: {{ range .Sections }}{{ .Title }}:{{ .Path }}|{{ end }}$
-- content/docs/pfile.md --
---
title: "pfile"
date: 2023-03-01
---
Pfile Content
-- content/docs/_content.gotmpl --
{{ $pixel := resources.Get "a/pixel.png" }}
{{ $dataResource := resources.Get "mydata.yaml" }}
{{ $data := $dataResource | transform.Unmarshal }}
{{ $pd := $data.p1 }}
{{ $pp := partial "get-value.html" }}
{{ $title := printf "%s:%s" $pd $pp }}
{{ $date := "2023-03-01" | time.AsTime }}
{{ $dates := dict "date" $date }}
{{ $contentMarkdown := dict "value" "**Hello World**" "mediaType" "text/markdown" }}
{{ $contentMarkdownDefault := dict "value" "**Hello World Default**" }}
{{ $contentHTML := dict "value" "<b>Hello World!</b> No **markdown** here." "mediaType" "text/html" }}
{{ $.AddPage (dict "kind" "page" "path" "P1" "title" $title "dates" $dates "content" $contentMarkdown "params" (dict "param1" "param1v" ) ) }}
{{ $.AddPage (dict "kind" "page" "path" "p2" "title" "p2title" "dates" $dates "content" $contentHTML ) }}
{{ $.AddPage (dict "kind" "page" "path" "p3" "title" "p3title" "dates" $dates "content" $contentMarkdownDefault "draft" false ) }}
{{ $.AddPage (dict "kind" "page" "path" "p4" "title" "p4title" "dates" $dates "content" $contentMarkdownDefault "draft" $data.draft ) }}
{{ $resourceContent := dict "value" $dataResource }}
{{ $.AddResource (dict "path" "p1/data1.yaml" "content" $resourceContent) }}
{{ $.AddResource (dict "path" "p1/mytext.txt" "content" (dict "value" "some text") "name" "textresource" "title" "My Text Resource" "params" (dict "param1" "param1v") )}}
{{ $.AddResource (dict "path" "p1/sub/mytex2.txt" "content" (dict "value" "some text") "title" "My Text Sub Resource" ) }}
{{ $.AddResource (dict "path" "P1/Sub/MyMixCaseText2.txt" "content" (dict "value" "some text") "title" "My Text Sub Mixed Case Path Resource" ) }}
{{ $.AddResource (dict "path" "p1/sub/data1.yaml" "content" $resourceContent "title" "Sub data") }}
{{ $resourceParams := dict "data2ParaM1" "data2Param1v" }}
{{ $.AddResource (dict "path" "p1/data2.yaml" "name" "data2.yaml" "title" "My data 2" "params" $resourceParams "content" $resourceContent) }}
{{ $.AddResource (dict "path" "p1/featuredimage.png" "name" "featured.png" "title" "My Featured Image" "params" $resourceParams "content" (dict "value" $pixel ))}}
`
func TestPagesFromGoTmplMisc(t *testing.T) {
t.Parallel()
b := hugolib.Test(t, filesPagesFromDataTempleBasic)
b.AssertPublishDir(`
docs/p1/mytext.txt
docs/p1/sub/mytex2.tx
docs/p1/sub/mymixcasetext2.txt
`)
// Page from markdown file.
b.AssertFileContent("public/docs/pfile/index.html", "Dates: Date: 2023-03-01|Lastmod: 2023-03-01|PublishDate: 2023-03-01|ExpiryDate: 0001-01-01|")
// Pages from gotmpl.
b.AssertFileContent("public/docs/p1/index.html",
"Single: p1:p1|",
"Path: /docs/p1|",
"<strong>Hello World</strong>",
"Params: param1v|",
"Len Resources: 7",
"RelPermalink: /mydata.yaml|Name: data1.yaml|Title: data1.yaml|Params: map[]|",
"RelPermalink: /mydata.yaml|Name: data2.yaml|Title: My data 2|Params: map[data2param1:data2Param1v]|",
"RelPermalink: /a/pixel.png|Name: featured.png|Title: My Featured Image|Params: map[data2param1:data2Param1v]|",
"RelPermalink: /docs/p1/sub/mytex2.txt|Name: sub/mytex2.txt|",
"RelPermalink: /docs/p1/sub/mymixcasetext2.txt|Name: sub/mymixcasetext2.txt|",
"RelPermalink: /mydata.yaml|Name: sub/data1.yaml|Title: Sub data|Params: map[]|",
"Featured Image: /a/pixel.png|featured.png|",
"Resized Featured Image: /a/pixel_hu8aa3346827e49d756ff4e630147c42b5_70_10x10_resize_box_3.png|10|",
// Resource from string
"RelPermalink: /docs/p1/mytext.txt|Name: textresource|Title: My Text Resource|Params: map[param1:param1v]|",
// Dates
"Dates: Date: 2023-03-01|Lastmod: 2023-03-01|PublishDate: 2023-03-01|ExpiryDate: 0001-01-01|",
)
b.AssertFileContent("public/docs/p2/index.html", "Single: p2title|", "<b>Hello World!</b> No **markdown** here.")
b.AssertFileContent("public/docs/p3/index.html", "<strong>Hello World Default</strong>")
}
func TestPagesFromGoTmplAsciidocAndSimilar(t *testing.T) {
files := `
-- hugo.toml --
disableKinds = ["taxonomy", "term", "rss", "sitemap"]
baseURL = "https://example.com"
[security]
[security.exec]
allow = ['asciidoctor', 'pandoc','rst2html', 'python']
-- layouts/_default/single.html --
|Content: {{ .Content }}|Title: {{ .Title }}|Path: {{ .Path }}|
-- content/docs/_content.gotmpl --
{{ $.AddPage (dict "path" "asciidoc" "content" (dict "value" "Mark my words, #automation is essential#." "mediaType" "text/asciidoc" )) }}
{{ $.AddPage (dict "path" "pandoc" "content" (dict "value" "This ~~is deleted text.~~" "mediaType" "text/pandoc" )) }}
{{ $.AddPage (dict "path" "rst" "content" (dict "value" "This is *bold*." "mediaType" "text/rst" )) }}
{{ $.AddPage (dict "path" "org" "content" (dict "value" "the ability to use +strikethrough+ is a plus" "mediaType" "text/org" )) }}
{{ $.AddPage (dict "path" "nocontent" "title" "No Content" ) }}
`
b := hugolib.Test(t, files)
if asciidocext.Supports() {
b.AssertFileContent("public/docs/asciidoc/index.html",
"Mark my words, <mark>automation is essential</mark>",
"Path: /docs/asciidoc|",
)
}
if pandoc.Supports() {
b.AssertFileContent("public/docs/pandoc/index.html",
"This <del>is deleted text.</del>",
"Path: /docs/pandoc|",
)
}
if rst.Supports() {
b.AssertFileContent("public/docs/rst/index.html",
"This is <em>bold</em>",
"Path: /docs/rst|",
)
}
b.AssertFileContent("public/docs/org/index.html",
"the ability to use <del>strikethrough</del> is a plus",
"Path: /docs/org|",
)
b.AssertFileContent("public/docs/nocontent/index.html", "|Content: |Title: No Content|Path: /docs/nocontent|")
}
func TestPagesFromGoTmplAddPageErrors(t *testing.T) {
filesTemplate := `
-- hugo.toml --
disableKinds = ["taxonomy", "term", "rss", "sitemap"]
baseURL = "https://example.com"
-- content/docs/_content.gotmpl --
{{ $.AddPage DICT }}
`
t.Run("AddPage, missing Path", func(t *testing.T) {
files := strings.ReplaceAll(filesTemplate, "DICT", `(dict "kind" "page" "title" "p1")`)
b, err := hugolib.TestE(t, files)
b.Assert(err, qt.IsNotNil)
b.Assert(err.Error(), qt.Contains, "_content.gotmpl:1:4")
b.Assert(err.Error(), qt.Contains, "error calling AddPage: path not set")
})
t.Run("AddPage, path starting with slash", func(t *testing.T) {
files := strings.ReplaceAll(filesTemplate, "DICT", `(dict "kind" "page" "title" "p1" "path" "/foo")`)
b, err := hugolib.TestE(t, files)
b.Assert(err, qt.IsNotNil)
b.Assert(err.Error(), qt.Contains, `path "/foo" must not start with a /`)
})
t.Run("AddPage, lang set", func(t *testing.T) {
files := strings.ReplaceAll(filesTemplate, "DICT", `(dict "kind" "page" "path" "p1" "lang" "en")`)
b, err := hugolib.TestE(t, files)
b.Assert(err, qt.IsNotNil)
b.Assert(err.Error(), qt.Contains, "_content.gotmpl:1:4")
b.Assert(err.Error(), qt.Contains, "error calling AddPage: lang must not be set")
})
t.Run("Site methods not ready", func(t *testing.T) {
filesTemplate := `
-- hugo.toml --
disableKinds = ["taxonomy", "term", "rss", "sitemap"]
baseURL = "https://example.com"
-- content/docs/_content.gotmpl --
{{ .Site.METHOD }}
`
for _, method := range []string{"RegularPages", "Pages", "AllPages", "AllRegularPages", "Home", "Sections", "GetPage", "Menus", "MainSections", "Taxonomies"} {
t.Run(method, func(t *testing.T) {
files := strings.ReplaceAll(filesTemplate, "METHOD", method)
b, err := hugolib.TestE(t, files)
b.Assert(err, qt.IsNotNil)
b.Assert(err.Error(), qt.Contains, fmt.Sprintf("error calling %s: this method cannot be called before the site is fully initialized", method))
})
}
})
}
func TestPagesFromGoTmplAddResourceErrors(t *testing.T) {
filesTemplate := `
-- hugo.toml --
disableKinds = ["taxonomy", "term", "rss", "sitemap"]
baseURL = "https://example.com"
-- content/docs/_content.gotmpl --
{{ $.AddResource DICT }}
`
t.Run("missing Path", func(t *testing.T) {
files := strings.ReplaceAll(filesTemplate, "DICT", `(dict "name" "r1")`)
b, err := hugolib.TestE(t, files)
b.Assert(err, qt.IsNotNil)
b.Assert(err.Error(), qt.Contains, "error calling AddResource: path not set")
})
}
func TestPagesFromGoTmplEditGoTmpl(t *testing.T) {
t.Parallel()
b := hugolib.TestRunning(t, filesPagesFromDataTempleBasic)
b.EditFileReplaceAll("content/docs/_content.gotmpl", `"title" "p2title"`, `"title" "p2titleedited"`).Build()
b.AssertFileContent("public/docs/p2/index.html", "Single: p2titleedited|")
b.AssertFileContent("public/docs/index.html", "p2titleedited")
}
func TestPagesFromGoTmplEditDataResource(t *testing.T) {
t.Parallel()
b := hugolib.TestRunning(t, filesPagesFromDataTempleBasic)
b.AssertRenderCountPage(7)
b.EditFileReplaceAll("assets/mydata.yaml", "p1: \"p1\"", "p1: \"p1edited\"").Build()
b.AssertFileContent("public/docs/p1/index.html", "Single: p1edited:p1|")
b.AssertFileContent("public/docs/index.html", "p1edited")
b.AssertRenderCountPage(3)
}
func TestPagesFromGoTmplEditPartial(t *testing.T) {
t.Parallel()
b := hugolib.TestRunning(t, filesPagesFromDataTempleBasic)
b.EditFileReplaceAll("layouts/partials/get-value.html", "p1", "p1edited").Build()
b.AssertFileContent("public/docs/p1/index.html", "Single: p1:p1edited|")
b.AssertFileContent("public/docs/index.html", "p1edited")
}
func TestPagesFromGoTmplRemovePage(t *testing.T) {
t.Parallel()
b := hugolib.TestRunning(t, filesPagesFromDataTempleBasic)
b.EditFileReplaceAll("content/docs/_content.gotmpl", `{{ $.AddPage (dict "kind" "page" "path" "p2" "title" "p2title" "dates" $dates "content" $contentHTML ) }}`, "").Build()
b.AssertFileContent("public/index.html", "RegularPagesRecursive: p1:p1:/docs/p1|p3title:/docs/p3|p4title:/docs/p4|pfile:/docs/pfile|$")
}
func TestPagesFromGoTmplDraftPage(t *testing.T) {
t.Parallel()
b := hugolib.TestRunning(t, filesPagesFromDataTempleBasic)
b.EditFileReplaceAll("content/docs/_content.gotmpl", `"draft" false`, `"draft" true`).Build()
b.AssertFileContent("public/index.html", "RegularPagesRecursive: p1:p1:/docs/p1|p2title:/docs/p2|p4title:/docs/p4|pfile:/docs/pfile|$")
}
func TestPagesFromGoTmplDraftFlagFromResource(t *testing.T) {
t.Parallel()
b := hugolib.TestRunning(t, filesPagesFromDataTempleBasic)
b.EditFileReplaceAll("assets/mydata.yaml", `draft: false`, `draft: true`).Build()
b.AssertFileContent("public/index.html", "RegularPagesRecursive: p1:p1:/docs/p1|p2title:/docs/p2|p3title:/docs/p3|pfile:/docs/pfile|$")
b.EditFileReplaceAll("assets/mydata.yaml", `draft: true`, `draft: false`).Build()
b.AssertFileContent("public/index.html", "RegularPagesRecursive: p1:p1:/docs/p1|p2title:/docs/p2|p3title:/docs/p3|p4title:/docs/p4|pfile:/docs/pfile|$")
}
func TestPagesFromGoTmplMovePage(t *testing.T) {
t.Parallel()
b := hugolib.TestRunning(t, filesPagesFromDataTempleBasic)
b.AssertFileContent("public/index.html", "RegularPagesRecursive: p1:p1:/docs/p1|p2title:/docs/p2|p3title:/docs/p3|p4title:/docs/p4|pfile:/docs/pfile|$")
b.EditFileReplaceAll("content/docs/_content.gotmpl", `"path" "p2"`, `"path" "p2moved"`).Build()
b.AssertFileContent("public/index.html", "RegularPagesRecursive: p1:p1:/docs/p1|p2title:/docs/p2moved|p3title:/docs/p3|p4title:/docs/p4|pfile:/docs/pfile|$")
}
func TestPagesFromGoTmplRemoveGoTmpl(t *testing.T) {
t.Parallel()
b := hugolib.TestRunning(t, filesPagesFromDataTempleBasic)
b.AssertFileContent("public/index.html",
"RegularPagesRecursive: p1:p1:/docs/p1|p2title:/docs/p2|p3title:/docs/p3|p4title:/docs/p4|pfile:/docs/pfile|$",
"Sections: Docs:/docs|",
)
b.AssertFileContent("public/docs/index.html", "RegularPagesRecursive: p1:p1:/docs/p1|p2title:/docs/p2|p3title:/docs/p3|p4title:/docs/p4|pfile:/docs/pfile|$")
b.RemoveFiles("content/docs/_content.gotmpl").Build()
// One regular page left.
b.AssertFileContent("public/index.html",
"RegularPagesRecursive: pfile:/docs/pfile|$",
"Sections: Docs:/docs|",
)
b.AssertFileContent("public/docs/index.html", "RegularPagesRecursive: pfile:/docs/pfile|$")
}
func TestPagesFromGoTmplLanguagePerFile(t *testing.T) {
filesTemplate := `
-- hugo.toml --
defaultContentLanguage = "en"
defaultContentLanguageInSubdir = true
[languages]
[languages.en]
weight = 1
title = "Title"
[languages.fr]
weight = 2
title = "Titre"
disabled = DISABLE
-- layouts/_default/single.html --
Single: {{ .Title }}|{{ .Content }}|
-- content/docs/_content.gotmpl --
{{ $.AddPage (dict "kind" "page" "path" "p1" "title" "Title" ) }}
-- content/docs/_content.fr.gotmpl --
{{ $.AddPage (dict "kind" "page" "path" "p1" "title" "Titre" ) }}
`
for _, disable := range []bool{false, true} {
t.Run(fmt.Sprintf("disable=%t", disable), func(t *testing.T) {
b := hugolib.Test(t, strings.ReplaceAll(filesTemplate, "DISABLE", fmt.Sprintf("%t", disable)))
b.AssertFileContent("public/en/docs/p1/index.html", "Single: Title||")
b.AssertFileExists("public/fr/docs/p1/index.html", !disable)
if !disable {
b.AssertFileContent("public/fr/docs/p1/index.html", "Single: Titre||")
}
})
}
}
func TestPagesFromGoTmplEnableAllLanguages(t *testing.T) {
t.Parallel()
filesTemplate := `
-- hugo.toml --
defaultContentLanguage = "en"
defaultContentLanguageInSubdir = true
[languages]
[languages.en]
weight = 1
title = "Title"
[languages.fr]
title = "Titre"
weight = 2
disabled = DISABLE
-- i18n/en.yaml --
title: Title
-- i18n/fr.yaml --
title: Titre
-- content/docs/_content.gotmpl --
{{ .EnableAllLanguages }}
{{ $titleFromStore := .Store.Get "title" }}
{{ if not $titleFromStore }}
{{ $titleFromStore = "notfound"}}
{{ .Store.Set "title" site.Title }}
{{ end }}
{{ $title := printf "%s:%s:%s" site.Title (i18n "title") $titleFromStore }}
{{ $.AddPage (dict "kind" "page" "path" "p1" "title" $title ) }}
-- layouts/_default/single.html --
Single: {{ .Title }}|{{ .Content }}|
`
for _, disable := range []bool{false, true} {
t.Run(fmt.Sprintf("disable=%t", disable), func(t *testing.T) {
b := hugolib.Test(t, strings.ReplaceAll(filesTemplate, "DISABLE", fmt.Sprintf("%t", disable)))
b.AssertFileExists("public/fr/docs/p1/index.html", !disable)
if !disable {
b.AssertFileContent("public/en/docs/p1/index.html", "Single: Title:Title:notfound||")
b.AssertFileContent("public/fr/docs/p1/index.html", "Single: Titre:Titre:Title||")
}
})
}
}
func TestPagesFromGoTmplMarkdownify(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ["taxonomy", "term", "rss", "sitemap"]
baseURL = "https://example.com"
-- layouts/_default/single.html --
|Content: {{ .Content }}|Title: {{ .Title }}|Path: {{ .Path }}|
-- content/docs/_content.gotmpl --
{{ $content := "**Hello World**" | markdownify }}
{{ $.AddPage (dict "path" "p1" "content" (dict "value" $content "mediaType" "text/html" )) }}
`
b, err := hugolib.TestE(t, files)
// This currently fails. We should fix this, but that is not a trivial task, so do it later.
b.Assert(err, qt.IsNotNil)
b.Assert(err.Error(), qt.Contains, "error calling markdownify: this method cannot be called before the site is fully initialized")
}
func TestPagesFromGoTmplResourceWithoutExtensionWithMediaTypeProvided(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ["taxonomy", "term", "rss", "sitemap"]
baseURL = "https://example.com"
-- layouts/_default/single.html --
|Content: {{ .Content }}|Title: {{ .Title }}|Path: {{ .Path }}|
{{ range .Resources }}
|RelPermalink: {{ .RelPermalink }}|Name: {{ .Name }}|Title: {{ .Title }}|Params: {{ .Params }}|MediaType: {{ .MediaType }}|
{{ end }}
-- content/docs/_content.gotmpl --
{{ $.AddPage (dict "path" "p1" "content" (dict "value" "**Hello World**" "mediaType" "text/markdown" )) }}
{{ $.AddResource (dict "path" "p1/myresource" "content" (dict "value" "abcde" "mediaType" "text/plain" )) }}
`
b := hugolib.Test(t, files)
b.AssertFileContent("public/docs/p1/index.html", "RelPermalink: /docs/p1/myresource|Name: myresource|Title: myresource|Params: map[]|MediaType: text/plain|")
}
func TestPagesFromGoTmplCascade(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ["taxonomy", "term", "rss", "sitemap"]
baseURL = "https://example.com"
-- layouts/_default/single.html --
|Content: {{ .Content }}|Title: {{ .Title }}|Path: {{ .Path }}|Params: {{ .Params }}|
-- content/_content.gotmpl --
{{ $cascade := dict "params" (dict "cascadeparam1" "cascadeparam1value" ) }}
{{ $.AddPage (dict "path" "docs" "kind" "section" "cascade" $cascade ) }}
{{ $.AddPage (dict "path" "docs/p1" "content" (dict "value" "**Hello World**" "mediaType" "text/markdown" )) }}
`
b := hugolib.Test(t, files)
b.AssertFileContent("public/docs/p1/index.html", "|Path: /docs/p1|Params: map[cascadeparam1:cascadeparam1value")
}
func TestPagesFromGoBuildOptions(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ["taxonomy", "term", "rss", "sitemap"]
baseURL = "https://example.com"
-- layouts/_default/single.html --
|Content: {{ .Content }}|Title: {{ .Title }}|Path: {{ .Path }}|Params: {{ .Params }}|
-- content/_content.gotmpl --
{{ $.AddPage (dict "path" "docs/p1" "content" (dict "value" "**Hello World**" "mediaType" "text/markdown" )) }}
{{ $never := dict "list" "never" "publishResources" false "render" "never" }}
{{ $.AddPage (dict "path" "docs/p2" "content" (dict "value" "**Hello World**" "mediaType" "text/markdown" ) "build" $never ) }}
`
b := hugolib.Test(t, files)
b.AssertFileExists("public/docs/p1/index.html", true)
b.AssertFileExists("public/docs/p2/index.html", false)
}

View file

@ -0,0 +1,32 @@
// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pagesfromdata
import "testing"
func BenchmarkHash(b *testing.B) {
m := map[string]any{
"foo": "bar",
"bar": "foo",
"stringSlice": []any{"a", "b", "c"},
"intSlice": []any{1, 2, 3},
"largeText": "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec a diam lectus. Sed sit amet ipsum mauris. Maecenas congue ligula ac quam viverra nec consectetur ante hendrerit.",
}
bs := BuildState{}
for i := 0; i < b.N; i++ {
bs.hash(m)
}
}

View file

@ -321,7 +321,7 @@ func prepareShortcode(
// Allow the caller to delay the rendering of the shortcode if needed. // Allow the caller to delay the rendering of the shortcode if needed.
var fn shortcodeRenderFunc = func(ctx context.Context) ([]byte, bool, error) { var fn shortcodeRenderFunc = func(ctx context.Context) ([]byte, bool, error) {
if p.m.pageConfig.IsGoldmark && sc.doMarkup { if p.m.pageConfig.ContentMediaType.IsMarkdown() && sc.doMarkup {
// Signal downwards that the content rendered will be // Signal downwards that the content rendered will be
// parsed and rendered by Goldmark. // parsed and rendered by Goldmark.
ctx = tpl.Context.IsInGoldmark.Set(ctx, true) ctx = tpl.Context.IsInGoldmark.Set(ctx, true)
@ -449,7 +449,7 @@ func doRenderShortcode(
// unchanged. // unchanged.
// 2 If inner does not have a newline, strip the wrapping <p> block and // 2 If inner does not have a newline, strip the wrapping <p> block and
// the newline. // the newline.
switch p.m.pageConfig.Markup { switch p.m.pageConfig.Content.Markup {
case "", "markdown": case "", "markdown":
if match, _ := regexp.MatchString(innerNewlineRegexp, inner); !match { if match, _ := regexp.MatchString(innerNewlineRegexp, inner); !match {
cleaner, err := regexp.Compile(innerCleanupRegexp) cleaner, err := regexp.Compile(innerCleanupRegexp)

View file

@ -62,6 +62,7 @@ import (
) )
func (s *Site) Taxonomies() page.TaxonomyList { func (s *Site) Taxonomies() page.TaxonomyList {
s.checkReady()
s.init.taxonomies.Do(context.Background()) s.init.taxonomies.Do(context.Background())
return s.taxonomies return s.taxonomies
} }
@ -204,6 +205,7 @@ type siteRenderingContext struct {
} }
func (s *Site) Menus() navigation.Menus { func (s *Site) Menus() navigation.Menus {
s.checkReady()
s.init.menus.Do(context.Background()) s.init.menus.Do(context.Background())
return s.menus return s.menus
} }
@ -372,19 +374,33 @@ func (s *Site) watching() bool {
type whatChanged struct { type whatChanged struct {
mu sync.Mutex mu sync.Mutex
contentChanged bool needsPagesAssembly bool
identitySet identity.Identities identitySet identity.Identities
} }
func (w *whatChanged) Add(ids ...identity.Identity) { func (w *whatChanged) Add(ids ...identity.Identity) {
w.mu.Lock() w.mu.Lock()
defer w.mu.Unlock() defer w.mu.Unlock()
if w.identitySet == nil {
w.identitySet = make(identity.Identities)
}
for _, id := range ids { for _, id := range ids {
w.identitySet[id] = true w.identitySet[id] = true
} }
} }
func (w *whatChanged) Clear() {
w.mu.Lock()
defer w.mu.Unlock()
w.clear()
}
func (w *whatChanged) clear() {
w.identitySet = identity.Identities{}
}
func (w *whatChanged) Changes() []identity.Identity { func (w *whatChanged) Changes() []identity.Identity {
if w == nil || w.identitySet == nil { if w == nil || w.identitySet == nil {
return nil return nil
@ -392,6 +408,14 @@ func (w *whatChanged) Changes() []identity.Identity {
return w.identitySet.AsSlice() return w.identitySet.AsSlice()
} }
func (w *whatChanged) Drain() []identity.Identity {
w.mu.Lock()
defer w.mu.Unlock()
ids := w.identitySet.AsSlice()
w.clear()
return ids
}
// RegisterMediaTypes will register the Site's media types in the mime // RegisterMediaTypes will register the Site's media types in the mime
// package, so it will behave correctly with Hugo's built-in server. // package, so it will behave correctly with Hugo's built-in server.
func (s *Site) RegisterMediaTypes() { func (s *Site) RegisterMediaTypes() {
@ -786,6 +810,7 @@ func (s *Site) errorCollator(results <-chan error, errs chan<- error) {
// as possible for existing sites. Most sites will use {{ .Site.GetPage "section" "my/section" }}, // as possible for existing sites. Most sites will use {{ .Site.GetPage "section" "my/section" }},
// i.e. 2 arguments, so we test for that. // i.e. 2 arguments, so we test for that.
func (s *Site) GetPage(ref ...string) (page.Page, error) { func (s *Site) GetPage(ref ...string) (page.Page, error) {
s.checkReady()
p, err := s.s.getPageForRefs(ref...) p, err := s.s.getPageForRefs(ref...)
if p == nil { if p == nil {

View file

@ -32,6 +32,7 @@ import (
"github.com/gohugoio/hugo/config/allconfig" "github.com/gohugoio/hugo/config/allconfig"
"github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/hugolib/doctree" "github.com/gohugoio/hugo/hugolib/doctree"
"github.com/gohugoio/hugo/hugolib/pagesfromdata"
"github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/langs/i18n" "github.com/gohugoio/hugo/langs/i18n"
@ -51,7 +52,15 @@ import (
var _ page.Site = (*Site)(nil) var _ page.Site = (*Site)(nil)
type siteState int
const (
siteStateInit siteState = iota
siteStateReady
)
type Site struct { type Site struct {
state siteState
conf *allconfig.Config conf *allconfig.Config
language *langs.Language language *langs.Language
languagei int languagei int
@ -166,7 +175,8 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) {
treeResources: doctree.New( treeResources: doctree.New(
treeConfig, treeConfig,
), ),
treeTaxonomyEntries: doctree.NewTreeShiftTree[*weightedContentNode](doctree.DimensionLanguage.Index(), len(confm.Languages)), treeTaxonomyEntries: doctree.NewTreeShiftTree[*weightedContentNode](doctree.DimensionLanguage.Index(), len(confm.Languages)),
treePagesFromTemplateAdapters: doctree.NewTreeShiftTree[*pagesfromdata.PagesFromTemplate](doctree.DimensionLanguage.Index(), len(confm.Languages)),
} }
pageTrees.createMutableTrees() pageTrees.createMutableTrees()
@ -415,6 +425,7 @@ func (s *Site) Current() page.Site {
// MainSections returns the list of main sections. // MainSections returns the list of main sections.
func (s *Site) MainSections() []string { func (s *Site) MainSections() []string {
s.checkReady()
return s.conf.C.MainSections return s.conf.C.MainSections
} }
@ -433,6 +444,7 @@ func (s *Site) BaseURL() string {
// Deprecated: Use .Site.Lastmod instead. // Deprecated: Use .Site.Lastmod instead.
func (s *Site) LastChange() time.Time { func (s *Site) LastChange() time.Time {
s.checkReady()
hugo.Deprecate(".Site.LastChange", "Use .Site.Lastmod instead.", "v0.123.0") hugo.Deprecate(".Site.LastChange", "Use .Site.Lastmod instead.", "v0.123.0")
return s.lastmod return s.lastmod
} }
@ -521,6 +533,7 @@ func (s *Site) ForEeachIdentityByName(name string, f func(identity.Identity) boo
// Pages returns all pages. // Pages returns all pages.
// This is for the current language only. // This is for the current language only.
func (s *Site) Pages() page.Pages { func (s *Site) Pages() page.Pages {
s.checkReady()
return s.pageMap.getPagesInSection( return s.pageMap.getPagesInSection(
pageMapQueryPagesInSection{ pageMapQueryPagesInSection{
pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{
@ -537,6 +550,7 @@ func (s *Site) Pages() page.Pages {
// RegularPages returns all the regular pages. // RegularPages returns all the regular pages.
// This is for the current language only. // This is for the current language only.
func (s *Site) RegularPages() page.Pages { func (s *Site) RegularPages() page.Pages {
s.checkReady()
return s.pageMap.getPagesInSection( return s.pageMap.getPagesInSection(
pageMapQueryPagesInSection{ pageMapQueryPagesInSection{
pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{ pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{
@ -551,10 +565,18 @@ func (s *Site) RegularPages() page.Pages {
// AllPages returns all pages for all sites. // AllPages returns all pages for all sites.
func (s *Site) AllPages() page.Pages { func (s *Site) AllPages() page.Pages {
s.checkReady()
return s.h.Pages() return s.h.Pages()
} }
// AllRegularPages returns all regular pages for all sites. // AllRegularPages returns all regular pages for all sites.
func (s *Site) AllRegularPages() page.Pages { func (s *Site) AllRegularPages() page.Pages {
s.checkReady()
return s.h.RegularPages() return s.h.RegularPages()
} }
func (s *Site) checkReady() {
if s.state != siteStateReady {
panic("this method cannot be called before the site is fully initialized")
}
}

View file

@ -19,10 +19,12 @@ import (
// Sections returns the top level sections. // Sections returns the top level sections.
func (s *Site) Sections() page.Pages { func (s *Site) Sections() page.Pages {
s.checkReady()
return s.Home().Sections() return s.Home().Sections()
} }
// Home is a shortcut to the home page, equivalent to .Site.GetPage "home". // Home is a shortcut to the home page, equivalent to .Site.GetPage "home".
func (s *Site) Home() page.Page { func (s *Site) Home() page.Page {
s.checkReady()
return s.s.home return s.s.home
} }

View file

@ -61,6 +61,7 @@ func (tp *TranslationProvider) NewResource(dst *deps.Deps) error {
hugofs.WalkwayConfig{ hugofs.WalkwayConfig{
Fs: dst.BaseFs.I18n.Fs, Fs: dst.BaseFs.I18n.Fs,
IgnoreFile: dst.SourceSpec.IgnoreFile, IgnoreFile: dst.SourceSpec.IgnoreFile,
PathParser: dst.SourceSpec.Cfg.PathParser(),
WalkFn: func(path string, info hugofs.FileMetaInfo) error { WalkFn: func(path string, info hugofs.FileMetaInfo) error {
if info.IsDir() { if info.IsDir() {
return nil return nil

View file

@ -19,6 +19,7 @@ import (
"strings" "strings"
"github.com/gohugoio/hugo/markup/highlight" "github.com/gohugoio/hugo/markup/highlight"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/markup/markup_config" "github.com/gohugoio/hugo/markup/markup_config"
@ -44,7 +45,7 @@ func NewConverterProvider(cfg converter.ProviderConfig) (ConverterProvider, erro
defaultHandler := mcfg.DefaultMarkdownHandler defaultHandler := mcfg.DefaultMarkdownHandler
var defaultFound bool var defaultFound bool
add := func(p converter.ProviderProvider, aliases ...string) error { add := func(p converter.ProviderProvider, subType string, aliases ...string) error {
c, err := p.New(cfg) c, err := p.New(cfg)
if err != nil { if err != nil {
return err return err
@ -53,6 +54,7 @@ func NewConverterProvider(cfg converter.ProviderConfig) (ConverterProvider, erro
name := c.Name() name := c.Name()
aliases = append(aliases, name) aliases = append(aliases, name)
aliases = append(aliases, subType)
if strings.EqualFold(name, defaultHandler) { if strings.EqualFold(name, defaultHandler) {
aliases = append(aliases, "markdown") aliases = append(aliases, "markdown")
@ -63,19 +65,21 @@ func NewConverterProvider(cfg converter.ProviderConfig) (ConverterProvider, erro
return nil return nil
} }
if err := add(goldmark.Provider); err != nil { contentTypes := cfg.Conf.ContentTypes().(media.ContentTypes)
if err := add(goldmark.Provider, contentTypes.Markdown.SubType, contentTypes.Markdown.Suffixes()...); err != nil {
return nil, err return nil, err
} }
if err := add(asciidocext.Provider, "ad", "adoc"); err != nil { if err := add(asciidocext.Provider, contentTypes.AsciiDoc.SubType, contentTypes.AsciiDoc.Suffixes()...); err != nil {
return nil, err return nil, err
} }
if err := add(rst.Provider); err != nil { if err := add(rst.Provider, contentTypes.ReStructuredText.SubType, contentTypes.ReStructuredText.Suffixes()...); err != nil {
return nil, err return nil, err
} }
if err := add(pandoc.Provider, "pdc"); err != nil { if err := add(pandoc.Provider, contentTypes.Pandoc.SubType, contentTypes.Pandoc.Suffixes()...); err != nil {
return nil, err return nil, err
} }
if err := add(org.Provider); err != nil { if err := add(org.Provider, contentTypes.EmacsOrgMode.SubType, contentTypes.EmacsOrgMode.Suffixes()...); err != nil {
return nil, err return nil, err
} }
@ -133,3 +137,16 @@ func addConverter(m map[string]converter.Provider, c converter.Provider, aliases
m[alias] = c m[alias] = c
} }
} }
// ResolveMarkup returns the markup type.
func ResolveMarkup(s string) string {
s = strings.ToLower(s)
switch s {
case "goldmark":
return media.DefaultContentTypes.Markdown.SubType
case "asciidocext":
return media.DefaultContentTypes.AsciiDoc.SubType
default:
return s
}
}

View file

@ -34,8 +34,12 @@ type BuiltinTypes struct {
OpenTypeFontType Type OpenTypeFontType Type
// Common document types // Common document types
PDFType Type PDFType Type
MarkdownType Type MarkdownType Type
EmacsOrgModeType Type
AsciiDocType Type
PandocType Type
ReStructuredTextType Type
// Common video types // Common video types
AVIType Type AVIType Type
@ -85,8 +89,12 @@ var Builtin = BuiltinTypes{
OpenTypeFontType: Type{Type: "font/otf"}, OpenTypeFontType: Type{Type: "font/otf"},
// Common document types // Common document types
PDFType: Type{Type: "application/pdf"}, PDFType: Type{Type: "application/pdf"},
MarkdownType: Type{Type: "text/markdown"}, MarkdownType: Type{Type: "text/markdown"},
AsciiDocType: Type{Type: "text/asciidoc"}, // https://github.com/asciidoctor/asciidoctor/issues/2502
PandocType: Type{Type: "text/pandoc"},
ReStructuredTextType: Type{Type: "text/rst"}, // https://docutils.sourceforge.io/FAQ.html#what-s-the-official-mime-type-for-restructuredtext-data
EmacsOrgModeType: Type{Type: "text/org"},
// Common video types // Common video types
AVIType: Type{Type: "video/x-msvideo"}, AVIType: Type{Type: "video/x-msvideo"},
@ -108,7 +116,7 @@ var defaultMediaTypesConfig = map[string]any{
"text/x-scss": map[string]any{"suffixes": []string{"scss"}}, "text/x-scss": map[string]any{"suffixes": []string{"scss"}},
"text/x-sass": map[string]any{"suffixes": []string{"sass"}}, "text/x-sass": map[string]any{"suffixes": []string{"sass"}},
"text/csv": map[string]any{"suffixes": []string{"csv"}}, "text/csv": map[string]any{"suffixes": []string{"csv"}},
"text/html": map[string]any{"suffixes": []string{"html"}}, "text/html": map[string]any{"suffixes": []string{"html", "htm"}},
"text/javascript": map[string]any{"suffixes": []string{"js", "jsm", "mjs"}}, "text/javascript": map[string]any{"suffixes": []string{"js", "jsm", "mjs"}},
"text/typescript": map[string]any{"suffixes": []string{"ts"}}, "text/typescript": map[string]any{"suffixes": []string{"ts"}},
"text/tsx": map[string]any{"suffixes": []string{"tsx"}}, "text/tsx": map[string]any{"suffixes": []string{"tsx"}},
@ -137,7 +145,11 @@ var defaultMediaTypesConfig = map[string]any{
// Common document types // Common document types
"application/pdf": map[string]any{"suffixes": []string{"pdf"}}, "application/pdf": map[string]any{"suffixes": []string{"pdf"}},
"text/markdown": map[string]any{"suffixes": []string{"md", "markdown"}}, "text/markdown": map[string]any{"suffixes": []string{"md", "mdown", "markdown"}},
"text/asciidoc": map[string]any{"suffixes": []string{"adoc", "asciidoc", "ad"}},
"text/pandoc": map[string]any{"suffixes": []string{"pandoc", "pdc"}},
"text/rst": map[string]any{"suffixes": []string{"rst"}},
"text/org": map[string]any{"suffixes": []string{"org"}},
// Common video types // Common video types
"video/x-msvideo": map[string]any{"suffixes": []string{"avi"}}, "video/x-msvideo": map[string]any{"suffixes": []string{"avi"}},
@ -152,10 +164,3 @@ var defaultMediaTypesConfig = map[string]any{
"application/octet-stream": map[string]any{}, "application/octet-stream": map[string]any{},
} }
func init() {
// Apply delimiter to all.
for _, m := range defaultMediaTypesConfig {
m.(map[string]any)["delimiter"] = "."
}
}

View file

@ -14,13 +14,14 @@
package media package media
import ( import (
"errors"
"fmt" "fmt"
"path/filepath"
"reflect" "reflect"
"sort" "sort"
"strings" "strings"
"github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/mitchellh/mapstructure" "github.com/mitchellh/mapstructure"
@ -31,6 +32,11 @@ import (
var DefaultTypes Types var DefaultTypes Types
func init() { func init() {
// Apply delimiter to all.
for _, m := range defaultMediaTypesConfig {
m.(map[string]any)["delimiter"] = "."
}
ns, err := DecodeTypes(nil) ns, err := DecodeTypes(nil)
if err != nil { if err != nil {
panic(err) panic(err)
@ -39,17 +45,122 @@ func init() {
// Initialize the Builtin types with values from DefaultTypes. // Initialize the Builtin types with values from DefaultTypes.
v := reflect.ValueOf(&Builtin).Elem() v := reflect.ValueOf(&Builtin).Elem()
for i := 0; i < v.NumField(); i++ { for i := 0; i < v.NumField(); i++ {
f := v.Field(i) f := v.Field(i)
fieldName := v.Type().Field(i).Name
builtinType := f.Interface().(Type) builtinType := f.Interface().(Type)
if builtinType.Type == "" {
panic(fmt.Errorf("builtin type %q is empty", fieldName))
}
defaultType, found := DefaultTypes.GetByType(builtinType.Type) defaultType, found := DefaultTypes.GetByType(builtinType.Type)
if !found { if !found {
panic(errors.New("missing default type for builtin type: " + builtinType.Type)) panic(fmt.Errorf("missing default type for field builtin type: %q", fieldName))
} }
f.Set(reflect.ValueOf(defaultType)) f.Set(reflect.ValueOf(defaultType))
} }
} }
func init() {
DefaultContentTypes = ContentTypes{
HTML: Builtin.HTMLType,
Markdown: Builtin.MarkdownType,
AsciiDoc: Builtin.AsciiDocType,
Pandoc: Builtin.PandocType,
ReStructuredText: Builtin.ReStructuredTextType,
EmacsOrgMode: Builtin.EmacsOrgModeType,
}
DefaultContentTypes.init()
}
var DefaultContentTypes ContentTypes
// ContentTypes holds the media types that are considered content in Hugo.
type ContentTypes struct {
HTML Type
Markdown Type
AsciiDoc Type
Pandoc Type
ReStructuredText Type
EmacsOrgMode Type
// Created in init().
types Types
extensionSet map[string]bool
}
func (t *ContentTypes) init() {
t.types = Types{t.HTML, t.Markdown, t.AsciiDoc, t.Pandoc, t.ReStructuredText, t.EmacsOrgMode}
t.extensionSet = make(map[string]bool)
for _, mt := range t.types {
for _, suffix := range mt.Suffixes() {
t.extensionSet[suffix] = true
}
}
}
func (t ContentTypes) IsContentSuffix(suffix string) bool {
return t.extensionSet[suffix]
}
// IsContentFile returns whether the given filename is a content file.
func (t ContentTypes) IsContentFile(filename string) bool {
return t.IsContentSuffix(strings.TrimPrefix(filepath.Ext(filename), "."))
}
// IsIndexContentFile returns whether the given filename is an index content file.
func (t ContentTypes) IsIndexContentFile(filename string) bool {
if !t.IsContentFile(filename) {
return false
}
base := filepath.Base(filename)
return strings.HasPrefix(base, "index.") || strings.HasPrefix(base, "_index.")
}
// IsHTMLSuffix returns whether the given suffix is a HTML media type.
func (t ContentTypes) IsHTMLSuffix(suffix string) bool {
for _, s := range t.HTML.Suffixes() {
if s == suffix {
return true
}
}
return false
}
// Types is a slice of media types.
func (t ContentTypes) Types() Types {
return t.types
}
// FromTypes creates a new ContentTypes updated with the values from the given Types.
func (t ContentTypes) FromTypes(types Types) ContentTypes {
if tt, ok := types.GetByType(t.HTML.Type); ok {
t.HTML = tt
}
if tt, ok := types.GetByType(t.Markdown.Type); ok {
t.Markdown = tt
}
if tt, ok := types.GetByType(t.AsciiDoc.Type); ok {
t.AsciiDoc = tt
}
if tt, ok := types.GetByType(t.Pandoc.Type); ok {
t.Pandoc = tt
}
if tt, ok := types.GetByType(t.ReStructuredText.Type); ok {
t.ReStructuredText = tt
}
if tt, ok := types.GetByType(t.EmacsOrgMode.Type); ok {
t.EmacsOrgMode = tt
}
t.init()
return t
}
// Hold the configuration for a given media type. // Hold the configuration for a given media type.
type MediaTypeConfig struct { type MediaTypeConfig struct {
// The file suffixes used for this media type. // The file suffixes used for this media type.
@ -105,3 +216,10 @@ func DecodeTypes(in map[string]any) (*config.ConfigNamespace[map[string]MediaTyp
} }
return ns, nil return ns, nil
} }
// TODO(bep) get rid of this.
var DefaultPathParser = &paths.PathParser{
IsContentExt: func(ext string) bool {
return DefaultContentTypes.IsContentSuffix(ext)
},
}

View file

@ -114,7 +114,7 @@ func TestDefaultTypes(t *testing.T) {
tp Type tp Type
expectedMainType string expectedMainType string
expectedSubType string expectedSubType string
expectedSuffix string expectedSuffixes string
expectedType string expectedType string
expectedString string expectedString string
}{ }{
@ -122,29 +122,34 @@ func TestDefaultTypes(t *testing.T) {
{Builtin.CSSType, "text", "css", "css", "text/css", "text/css"}, {Builtin.CSSType, "text", "css", "css", "text/css", "text/css"},
{Builtin.SCSSType, "text", "x-scss", "scss", "text/x-scss", "text/x-scss"}, {Builtin.SCSSType, "text", "x-scss", "scss", "text/x-scss", "text/x-scss"},
{Builtin.CSVType, "text", "csv", "csv", "text/csv", "text/csv"}, {Builtin.CSVType, "text", "csv", "csv", "text/csv", "text/csv"},
{Builtin.HTMLType, "text", "html", "html", "text/html", "text/html"}, {Builtin.HTMLType, "text", "html", "html,htm", "text/html", "text/html"},
{Builtin.JavascriptType, "text", "javascript", "js", "text/javascript", "text/javascript"}, {Builtin.MarkdownType, "text", "markdown", "md,mdown,markdown", "text/markdown", "text/markdown"},
{Builtin.EmacsOrgModeType, "text", "org", "org", "text/org", "text/org"},
{Builtin.PandocType, "text", "pandoc", "pandoc,pdc", "text/pandoc", "text/pandoc"},
{Builtin.ReStructuredTextType, "text", "rst", "rst", "text/rst", "text/rst"},
{Builtin.AsciiDocType, "text", "asciidoc", "adoc,asciidoc,ad", "text/asciidoc", "text/asciidoc"},
{Builtin.JavascriptType, "text", "javascript", "js,jsm,mjs", "text/javascript", "text/javascript"},
{Builtin.TypeScriptType, "text", "typescript", "ts", "text/typescript", "text/typescript"}, {Builtin.TypeScriptType, "text", "typescript", "ts", "text/typescript", "text/typescript"},
{Builtin.TSXType, "text", "tsx", "tsx", "text/tsx", "text/tsx"}, {Builtin.TSXType, "text", "tsx", "tsx", "text/tsx", "text/tsx"},
{Builtin.JSXType, "text", "jsx", "jsx", "text/jsx", "text/jsx"}, {Builtin.JSXType, "text", "jsx", "jsx", "text/jsx", "text/jsx"},
{Builtin.JSONType, "application", "json", "json", "application/json", "application/json"}, {Builtin.JSONType, "application", "json", "json", "application/json", "application/json"},
{Builtin.RSSType, "application", "rss", "xml", "application/rss+xml", "application/rss+xml"}, {Builtin.RSSType, "application", "rss", "xml,rss", "application/rss+xml", "application/rss+xml"},
{Builtin.SVGType, "image", "svg", "svg", "image/svg+xml", "image/svg+xml"}, {Builtin.SVGType, "image", "svg", "svg", "image/svg+xml", "image/svg+xml"},
{Builtin.TextType, "text", "plain", "txt", "text/plain", "text/plain"}, {Builtin.TextType, "text", "plain", "txt", "text/plain", "text/plain"},
{Builtin.XMLType, "application", "xml", "xml", "application/xml", "application/xml"}, {Builtin.XMLType, "application", "xml", "xml", "application/xml", "application/xml"},
{Builtin.TOMLType, "application", "toml", "toml", "application/toml", "application/toml"}, {Builtin.TOMLType, "application", "toml", "toml", "application/toml", "application/toml"},
{Builtin.YAMLType, "application", "yaml", "yaml", "application/yaml", "application/yaml"}, {Builtin.YAMLType, "application", "yaml", "yaml,yml", "application/yaml", "application/yaml"},
{Builtin.PDFType, "application", "pdf", "pdf", "application/pdf", "application/pdf"}, {Builtin.PDFType, "application", "pdf", "pdf", "application/pdf", "application/pdf"},
{Builtin.TrueTypeFontType, "font", "ttf", "ttf", "font/ttf", "font/ttf"}, {Builtin.TrueTypeFontType, "font", "ttf", "ttf", "font/ttf", "font/ttf"},
{Builtin.OpenTypeFontType, "font", "otf", "otf", "font/otf", "font/otf"}, {Builtin.OpenTypeFontType, "font", "otf", "otf", "font/otf", "font/otf"},
} { } {
c.Assert(test.tp.MainType, qt.Equals, test.expectedMainType) c.Assert(test.tp.MainType, qt.Equals, test.expectedMainType)
c.Assert(test.tp.SubType, qt.Equals, test.expectedSubType) c.Assert(test.tp.SubType, qt.Equals, test.expectedSubType)
c.Assert(test.tp.SuffixesCSV, qt.Equals, test.expectedSuffixes)
c.Assert(test.tp.Type, qt.Equals, test.expectedType) c.Assert(test.tp.Type, qt.Equals, test.expectedType)
c.Assert(test.tp.String(), qt.Equals, test.expectedString) c.Assert(test.tp.String(), qt.Equals, test.expectedString)
} }
c.Assert(len(DefaultTypes), qt.Equals, 36) c.Assert(len(DefaultTypes), qt.Equals, 40)
} }

View file

@ -117,13 +117,16 @@ func FromContent(types Types, extensionHints []string, content []byte) Type {
return m return m
} }
// FromStringAndExt creates a Type from a MIME string and a given extension. // FromStringAndExt creates a Type from a MIME string and a given extensions
func FromStringAndExt(t, ext string) (Type, error) { func FromStringAndExt(t string, ext ...string) (Type, error) {
tp, err := FromString(t) tp, err := FromString(t)
if err != nil { if err != nil {
return tp, err return tp, err
} }
tp.SuffixesCSV = strings.TrimPrefix(ext, ".") for i, e := range ext {
ext[i] = strings.TrimPrefix(e, ".")
}
tp.SuffixesCSV = strings.Join(ext, ",")
tp.Delimiter = DefaultDelimiter tp.Delimiter = DefaultDelimiter
tp.init() tp.init()
return tp, nil return tp, nil
@ -187,6 +190,16 @@ func (m Type) IsText() bool {
return false return false
} }
// For internal use.
func (m Type) IsHTML() bool {
return m.SubType == Builtin.HTMLType.SubType
}
// For internal use.
func (m Type) IsMarkdown() bool {
return m.SubType == Builtin.MarkdownType.SubType
}
func InitMediaType(m *Type) { func InitMediaType(m *Type) {
m.init() m.init()
} }
@ -221,6 +234,26 @@ func (t Types) Len() int { return len(t) }
func (t Types) Swap(i, j int) { t[i], t[j] = t[j], t[i] } func (t Types) Swap(i, j int) { t[i], t[j] = t[j], t[i] }
func (t Types) Less(i, j int) bool { return t[i].Type < t[j].Type } func (t Types) Less(i, j int) bool { return t[i].Type < t[j].Type }
// GetBestMatch returns the best match for the given media type string.
func (t Types) GetBestMatch(s string) (Type, bool) {
// First try an exact match.
if mt, found := t.GetByType(s); found {
return mt, true
}
// Try main type.
if mt, found := t.GetBySubType(s); found {
return mt, true
}
// Try extension.
if mt, _, found := t.GetFirstBySuffix(s); found {
return mt, true
}
return Type{}, false
}
// GetByType returns a media type for tp. // GetByType returns a media type for tp.
func (t Types) GetByType(tp string) (Type, bool) { func (t Types) GetByType(tp string) (Type, bool) {
for _, tt := range t { for _, tt := range t {
@ -324,6 +357,22 @@ func (t Types) GetByMainSubType(mainType, subType string) (tp Type, found bool)
return return
} }
// GetBySubType gets a media type given a sub type e.g. "plain".
func (t Types) GetBySubType(subType string) (tp Type, found bool) {
for _, tt := range t {
if strings.EqualFold(subType, tt.SubType) {
if found {
// ambiguous
found = false
return
}
tp = tt
found = true
}
}
return
}
// IsZero reports whether this Type represents a zero value. // IsZero reports whether this Type represents a zero value.
// For internal use. // For internal use.
func (m Type) IsZero() bool { func (m Type) IsZero() bool {

View file

@ -115,10 +115,10 @@ func TestFromTypeString(t *testing.T) {
func TestFromStringAndExt(t *testing.T) { func TestFromStringAndExt(t *testing.T) {
c := qt.New(t) c := qt.New(t)
f, err := FromStringAndExt("text/html", "html") f, err := FromStringAndExt("text/html", "html", "htm")
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
c.Assert(f, qt.Equals, Builtin.HTMLType) c.Assert(f, qt.Equals, Builtin.HTMLType)
f, err = FromStringAndExt("text/html", ".html") f, err = FromStringAndExt("text/html", ".html", ".htm")
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
c.Assert(f, qt.Equals, Builtin.HTMLType) c.Assert(f, qt.Equals, Builtin.HTMLType)
} }
@ -214,3 +214,11 @@ func BenchmarkTypeOps(b *testing.B) {
} }
} }
func TestIsContentFile(t *testing.T) {
c := qt.New(t)
c.Assert(DefaultContentTypes.IsContentFile(filepath.FromSlash("my/file.md")), qt.Equals, true)
c.Assert(DefaultContentTypes.IsContentFile(filepath.FromSlash("my/file.ad")), qt.Equals, true)
c.Assert(DefaultContentTypes.IsContentFile(filepath.FromSlash("textfile.txt")), qt.Equals, false)
}

View file

@ -104,7 +104,6 @@ func InterfaceToFrontMatter(in any, format metadecoders.Format, w io.Writer) err
} }
err = InterfaceToConfig(in, format, w) err = InterfaceToConfig(in, format, w)
if err != nil { if err != nil {
return err return err
} }

View file

@ -57,7 +57,7 @@ var (
// PageNop implements Page, but does nothing. // PageNop implements Page, but does nothing.
type nopPage int type nopPage int
var noOpPathInfo = paths.Parse(files.ComponentFolderContent, "no-op.md") var noOpPathInfo = media.DefaultPathParser.Parse(files.ComponentFolderContent, "no-op.md")
func (p *nopPage) Err() resource.ResourceError { func (p *nopPage) Err() resource.ResourceError {
return nil return nil

View file

@ -14,14 +14,24 @@
package pagemeta package pagemeta
import ( import (
"errors"
"fmt"
"path"
"strings" "strings"
"time" "time"
"github.com/gohugoio/hugo/common/hreflect"
"github.com/gohugoio/hugo/common/htime" "github.com/gohugoio/hugo/common/htime"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/markup"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/resources/kinds"
"github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/resource"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
@ -29,6 +39,13 @@ import (
"github.com/spf13/cast" "github.com/spf13/cast"
) )
type DatesStrings struct {
Date string `json:"date"`
Lastmod string `json:"lastMod"`
PublishDate string `json:"publishDate"`
ExpiryDate string `json:"expiryDate"`
}
type Dates struct { type Dates struct {
Date time.Time Date time.Time
Lastmod time.Time Lastmod time.Time
@ -57,40 +74,221 @@ func (d Dates) IsAllDatesZero() bool {
// Note that all the top level fields are reserved Hugo keywords. // Note that all the top level fields are reserved Hugo keywords.
// Any custom configuration needs to be set in the Params map. // Any custom configuration needs to be set in the Params map.
type PageConfig struct { type PageConfig struct {
Dates // Dates holds the four core dates for this page. Dates Dates `json:"-"` // Dates holds the four core dates for this page.
DatesStrings
Title string // The title of the page. Title string // The title of the page.
LinkTitle string // The link title of the page. LinkTitle string // The link title of the page.
Type string // The content type of the page. Type string // The content type of the page.
Layout string // The layout to use for to render this page. Layout string // The layout to use for to render this page.
Markup string // The markup used in the content file.
Weight int // The weight of the page, used in sorting if set to a non-zero value. Weight int // The weight of the page, used in sorting if set to a non-zero value.
Kind string // The kind of page, e.g. "page", "section", "home" etc. This is usually derived from the content path. Kind string // The kind of page, e.g. "page", "section", "home" etc. This is usually derived from the content path.
Path string // The canonical path to the page, e.g. /sect/mypage. Note: Leading slash, no trailing slash, no extensions or language identifiers. Path string // The canonical path to the page, e.g. /sect/mypage. Note: Leading slash, no trailing slash, no extensions or language identifiers.
URL string // The URL to the rendered page, e.g. /sect/mypage.html.
Lang string // The language code for this page. This is usually derived from the module mount or filename. Lang string // The language code for this page. This is usually derived from the module mount or filename.
URL string // The URL to the rendered page, e.g. /sect/mypage.html.
Slug string // The slug for this page. Slug string // The slug for this page.
Description string // The description for this page. Description string // The description for this page.
Summary string // The summary for this page. Summary string // The summary for this page.
Draft bool // Whether or not the content is a draft. Draft bool // Whether or not the content is a draft.
Headless bool // Whether or not the page should be rendered. Headless bool `json:"-"` // Whether or not the page should be rendered.
IsCJKLanguage bool // Whether or not the content is in a CJK language. IsCJKLanguage bool // Whether or not the content is in a CJK language.
TranslationKey string // The translation key for this page. TranslationKey string // The translation key for this page.
Keywords []string // The keywords for this page. Keywords []string // The keywords for this page.
Aliases []string // The aliases for this page. Aliases []string // The aliases for this page.
Outputs []string // The output formats to render this page in. If not set, the site's configured output formats for this page kind will be used. Outputs []string // The output formats to render this page in. If not set, the site's configured output formats for this page kind will be used.
// These build options are set in the front matter, FrontMatterOnlyValues `mapstructure:"-" json:"-"`
// but not passed on to .Params.
Resources []map[string]any Cascade []map[string]any
Cascade map[page.PageMatcher]maps.Params // Only relevant for branch nodes. Sitemap config.SitemapConfig
Sitemap config.SitemapConfig Build BuildConfig
Build BuildConfig
// User defined params. // User defined params.
Params maps.Params Params maps.Params
// Content holds the content for this page.
Content Source
// Compiled values. // Compiled values.
IsGoldmark bool `json:"-"` CascadeCompiled map[page.PageMatcher]maps.Params
ContentMediaType media.Type `mapstructure:"-" json:"-"`
IsFromContentAdapter bool `mapstructure:"-" json:"-"`
}
var DefaultPageConfig = PageConfig{
Build: DefaultBuildConfig,
}
func (p *PageConfig) Validate(pagesFromData bool) error {
if pagesFromData {
if p.Path == "" {
return errors.New("path must be set")
}
if strings.HasPrefix(p.Path, "/") {
return fmt.Errorf("path %q must not start with a /", p.Path)
}
if p.Lang != "" {
return errors.New("lang must not be set")
}
if p.Content.Markup != "" {
return errors.New("markup must not be set, use mediaType")
}
}
if p.Cascade != nil {
if !kinds.IsBranch(p.Kind) {
return errors.New("cascade is only supported for branch nodes")
}
}
return nil
}
// Compile sets up the page configuration after all fields have been set.
func (p *PageConfig) Compile(basePath string, pagesFromData bool, ext string, logger loggers.Logger, mediaTypes media.Types) error {
// In content adapters, we always get relative paths.
if basePath != "" {
p.Path = path.Join(basePath, p.Path)
}
if pagesFromData {
// Note that NormalizePathStringBasic will make sure that we don't preserve the unnormalized path.
// We do that when we create pages from the file system; mostly for backward compatibility,
// but also because people tend to use use the filename to name their resources (with spaces and all),
// and this isn't relevant when creating resources from an API where it's easy to add textual meta data.
p.Path = paths.NormalizePathStringBasic(p.Path)
}
if p.Content.Markup == "" && p.Content.MediaType == "" {
if ext == "" {
ext = "md"
}
p.ContentMediaType = MarkupToMediaType(ext, mediaTypes)
if p.ContentMediaType.IsZero() {
return fmt.Errorf("failed to resolve media type for suffix %q", ext)
}
}
var s string
if p.ContentMediaType.IsZero() {
if p.Content.MediaType != "" {
s = p.Content.MediaType
p.ContentMediaType, _ = mediaTypes.GetByType(s)
}
if p.ContentMediaType.IsZero() && p.Content.Markup != "" {
s = p.Content.Markup
p.ContentMediaType = MarkupToMediaType(s, mediaTypes)
}
}
if p.ContentMediaType.IsZero() {
return fmt.Errorf("failed to resolve media type for %q", s)
}
if p.Content.Markup == "" {
p.Content.Markup = p.ContentMediaType.SubType
}
if p.Cascade != nil {
cascade, err := page.DecodeCascade(logger, p.Cascade)
if err != nil {
return fmt.Errorf("failed to decode cascade: %w", err)
}
p.CascadeCompiled = cascade
}
return nil
}
// MarkupToMediaType converts a markup string to a media type.
func MarkupToMediaType(s string, mediaTypes media.Types) media.Type {
s = strings.ToLower(s)
mt, _ := mediaTypes.GetBestMatch(markup.ResolveMarkup(s))
return mt
}
type ResourceConfig struct {
Path string
Name string
Title string
Params maps.Params
Content Source
// Compiled values.
PathInfo *paths.Path `mapstructure:"-" json:"-"`
ContentMediaType media.Type
}
func (rc *ResourceConfig) Validate() error {
if rc.Path == "" {
return errors.New("path must be set")
}
if rc.Content.Markup != "" {
return errors.New("markup must not be set, use mediaType")
}
return nil
}
func (rc *ResourceConfig) Compile(basePath string, pathParser *paths.PathParser, mediaTypes media.Types) error {
if rc.Params != nil {
maps.PrepareParams(rc.Params)
}
// Note that NormalizePathStringBasic will make sure that we don't preserve the unnormalized path.
// We do that when we create resources from the file system; mostly for backward compatibility,
// but also because people tend to use use the filename to name their resources (with spaces and all),
// and this isn't relevant when creating resources from an API where it's easy to add textual meta data.
rc.Path = paths.NormalizePathStringBasic(path.Join(basePath, rc.Path))
rc.PathInfo = pathParser.Parse(files.ComponentFolderContent, rc.Path)
if rc.Content.MediaType != "" {
var found bool
rc.ContentMediaType, found = mediaTypes.GetByType(rc.Content.MediaType)
if !found {
return fmt.Errorf("media type %q not found", rc.Content.MediaType)
}
}
return nil
}
type Source struct {
// MediaType is the media type of the content.
MediaType string
// The markup used in Value. Only used in front matter.
Markup string
// The content.
Value any
}
func (s Source) IsZero() bool {
return !hreflect.IsTruthful(s.Value)
}
func (s Source) IsResourceValue() bool {
_, ok := s.Value.(resource.Resource)
return ok
}
func (s Source) ValueAsString() string {
if s.Value == nil {
return ""
}
ss, err := cast.ToStringE(s.Value)
if err != nil {
panic(fmt.Errorf("content source: failed to convert %T to string: %s", s.Value, err))
}
return ss
}
func (s Source) ValueAsOpenReadSeekCloser() hugio.OpenReadSeekCloser {
return hugio.NewOpenReadSeekCloser(hugio.NewReadSeekerNoOpCloserFromString(s.ValueAsString()))
}
// FrontMatterOnlyValues holds values that can only be set via front matter.
type FrontMatterOnlyValues struct {
ResourcesMeta []map[string]any
} }
// FrontMatterHandler maps front matter into Page fields and .Params. // FrontMatterHandler maps front matter into Page fields and .Params.
@ -98,6 +296,8 @@ type PageConfig struct {
type FrontMatterHandler struct { type FrontMatterHandler struct {
fmConfig FrontmatterConfig fmConfig FrontmatterConfig
contentAdapterDatesHandler func(d *FrontMatterDescriptor) error
dateHandler frontMatterFieldHandler dateHandler frontMatterFieldHandler
lastModHandler frontMatterFieldHandler lastModHandler frontMatterFieldHandler
publishDateHandler frontMatterFieldHandler publishDateHandler frontMatterFieldHandler
@ -144,6 +344,13 @@ func (f FrontMatterHandler) HandleDates(d *FrontMatterDescriptor) error {
panic("missing pageConfig") panic("missing pageConfig")
} }
if d.PageConfig.IsFromContentAdapter {
if f.contentAdapterDatesHandler == nil {
panic("missing content adapter date handler")
}
return f.contentAdapterDatesHandler(d)
}
if f.dateHandler == nil { if f.dateHandler == nil {
panic("missing date handler") panic("missing date handler")
} }
@ -352,9 +559,13 @@ func NewFrontmatterHandler(logger loggers.Logger, frontMatterConfig FrontmatterC
func (f *FrontMatterHandler) createHandlers() error { func (f *FrontMatterHandler) createHandlers() error {
var err error var err error
if f.contentAdapterDatesHandler, err = f.createContentAdapterDatesHandler(f.fmConfig); err != nil {
return err
}
if f.dateHandler, err = f.createDateHandler(f.fmConfig.Date, if f.dateHandler, err = f.createDateHandler(f.fmConfig.Date,
func(d *FrontMatterDescriptor, t time.Time) { func(d *FrontMatterDescriptor, t time.Time) {
d.PageConfig.Date = t d.PageConfig.Dates.Date = t
setParamIfNotSet(fmDate, t, d) setParamIfNotSet(fmDate, t, d)
}); err != nil { }); err != nil {
return err return err
@ -363,7 +574,7 @@ func (f *FrontMatterHandler) createHandlers() error {
if f.lastModHandler, err = f.createDateHandler(f.fmConfig.Lastmod, if f.lastModHandler, err = f.createDateHandler(f.fmConfig.Lastmod,
func(d *FrontMatterDescriptor, t time.Time) { func(d *FrontMatterDescriptor, t time.Time) {
setParamIfNotSet(fmLastmod, t, d) setParamIfNotSet(fmLastmod, t, d)
d.PageConfig.Lastmod = t d.PageConfig.Dates.Lastmod = t
}); err != nil { }); err != nil {
return err return err
} }
@ -371,7 +582,7 @@ func (f *FrontMatterHandler) createHandlers() error {
if f.publishDateHandler, err = f.createDateHandler(f.fmConfig.PublishDate, if f.publishDateHandler, err = f.createDateHandler(f.fmConfig.PublishDate,
func(d *FrontMatterDescriptor, t time.Time) { func(d *FrontMatterDescriptor, t time.Time) {
setParamIfNotSet(fmPubDate, t, d) setParamIfNotSet(fmPubDate, t, d)
d.PageConfig.PublishDate = t d.PageConfig.Dates.PublishDate = t
}); err != nil { }); err != nil {
return err return err
} }
@ -379,7 +590,7 @@ func (f *FrontMatterHandler) createHandlers() error {
if f.expiryDateHandler, err = f.createDateHandler(f.fmConfig.ExpiryDate, if f.expiryDateHandler, err = f.createDateHandler(f.fmConfig.ExpiryDate,
func(d *FrontMatterDescriptor, t time.Time) { func(d *FrontMatterDescriptor, t time.Time) {
setParamIfNotSet(fmExpiryDate, t, d) setParamIfNotSet(fmExpiryDate, t, d)
d.PageConfig.ExpiryDate = t d.PageConfig.Dates.ExpiryDate = t
}); err != nil { }); err != nil {
return err return err
} }
@ -394,6 +605,86 @@ func setParamIfNotSet(key string, value any, d *FrontMatterDescriptor) {
d.PageConfig.Params[key] = value d.PageConfig.Params[key] = value
} }
func (f FrontMatterHandler) createContentAdapterDatesHandler(fmcfg FrontmatterConfig) (func(d *FrontMatterDescriptor) error, error) {
setTime := func(key string, value time.Time, in *PageConfig) {
switch key {
case fmDate:
in.Dates.Date = value
case fmLastmod:
in.Dates.Lastmod = value
case fmPubDate:
in.Dates.PublishDate = value
case fmExpiryDate:
in.Dates.ExpiryDate = value
}
}
getTime := func(key string, in *PageConfig) time.Time {
switch key {
case fmDate:
return in.Dates.Date
case fmLastmod:
return in.Dates.Lastmod
case fmPubDate:
return in.Dates.PublishDate
case fmExpiryDate:
return in.Dates.ExpiryDate
}
return time.Time{}
}
createSetter := func(identifiers []string, date string) func(pcfg *PageConfig) {
var getTimes []func(in *PageConfig) time.Time
for _, identifier := range identifiers {
if strings.HasPrefix(identifier, ":") {
continue
}
switch identifier {
case fmDate:
getTimes = append(getTimes, func(in *PageConfig) time.Time {
return getTime(fmDate, in)
})
case fmLastmod:
getTimes = append(getTimes, func(in *PageConfig) time.Time {
return getTime(fmLastmod, in)
})
case fmPubDate:
getTimes = append(getTimes, func(in *PageConfig) time.Time {
return getTime(fmPubDate, in)
})
case fmExpiryDate:
getTimes = append(getTimes, func(in *PageConfig) time.Time {
return getTime(fmExpiryDate, in)
})
}
}
return func(pcfg *PageConfig) {
for _, get := range getTimes {
if t := get(pcfg); !t.IsZero() {
setTime(date, t, pcfg)
return
}
}
}
}
setDate := createSetter(fmcfg.Date, fmDate)
setLastmod := createSetter(fmcfg.Lastmod, fmLastmod)
setPublishDate := createSetter(fmcfg.PublishDate, fmPubDate)
setExpiryDate := createSetter(fmcfg.ExpiryDate, fmExpiryDate)
fn := func(d *FrontMatterDescriptor) error {
pcfg := d.PageConfig
setDate(pcfg)
setLastmod(pcfg)
setPublishDate(pcfg)
setExpiryDate(pcfg)
return nil
}
return fn, nil
}
func (f FrontMatterHandler) createDateHandler(identifiers []string, setter func(d *FrontMatterDescriptor, t time.Time)) (frontMatterFieldHandler, error) { func (f FrontMatterHandler) createDateHandler(identifiers []string, setter func(d *FrontMatterDescriptor, t time.Time)) (frontMatterFieldHandler, error) {
var h *frontmatterFieldHandlers var h *frontmatterFieldHandlers
var handlers []frontMatterFieldHandler var handlers []frontMatterFieldHandler

View file

@ -18,8 +18,10 @@ import (
"testing" "testing"
"time" "time"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/config/testconfig" "github.com/gohugoio/hugo/config/testconfig"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/resources/page/pagemeta" "github.com/gohugoio/hugo/resources/page/pagemeta"
@ -148,3 +150,32 @@ func TestFrontMatterDatesDefaultKeyword(t *testing.T) {
c.Assert(d.PageConfig.Dates.PublishDate.Day(), qt.Equals, 4) c.Assert(d.PageConfig.Dates.PublishDate.Day(), qt.Equals, 4)
c.Assert(d.PageConfig.Dates.ExpiryDate.IsZero(), qt.Equals, true) c.Assert(d.PageConfig.Dates.ExpiryDate.IsZero(), qt.Equals, true)
} }
func TestContentMediaTypeFromMarkup(t *testing.T) {
c := qt.New(t)
logger := loggers.NewDefault()
for _, test := range []struct {
in string
expected string
}{
{"", "text/markdown"},
{"md", "text/markdown"},
{"markdown", "text/markdown"},
{"mdown", "text/markdown"},
{"goldmark", "text/markdown"},
{"html", "text/html"},
{"htm", "text/html"},
{"asciidoc", "text/asciidoc"},
{"asciidocext", "text/asciidoc"},
{"adoc", "text/asciidoc"},
{"pandoc", "text/pandoc"},
{"pdc", "text/pandoc"},
{"rst", "text/rst"},
} {
var pc pagemeta.PageConfig
pc.Content.Markup = test.in
c.Assert(pc.Compile("", true, "", logger, media.DefaultTypes), qt.IsNil)
c.Assert(pc.ContentMediaType.Type, qt.Equals, test.expected)
}
}

View file

@ -24,7 +24,7 @@ const (
Link = "link" Link = "link"
) )
var defaultBuildConfig = BuildConfig{ var DefaultBuildConfig = BuildConfig{
List: Always, List: Always,
Render: Always, Render: Always,
PublishResources: true, PublishResources: true,
@ -69,7 +69,7 @@ func (b BuildConfig) IsZero() bool {
} }
func DecodeBuildConfig(m any) (BuildConfig, error) { func DecodeBuildConfig(m any) (BuildConfig, error) {
b := defaultBuildConfig b := DefaultBuildConfig
if m == nil { if m == nil {
return b, nil return b, nil
} }

View file

@ -36,6 +36,8 @@ func TestCreatePlaceholders(t *testing.T) {
"SuffixesCSV": "pre_foo.SuffixesCSV_post", "SuffixesCSV": "pre_foo.SuffixesCSV_post",
"Delimiter": "pre_foo.Delimiter_post", "Delimiter": "pre_foo.Delimiter_post",
"FirstSuffix": "pre_foo.FirstSuffix_post", "FirstSuffix": "pre_foo.FirstSuffix_post",
"IsHTML": "pre_foo.IsHTML_post",
"IsMarkdown": "pre_foo.IsMarkdown_post",
"IsText": "pre_foo.IsText_post", "IsText": "pre_foo.IsText_post",
"String": "pre_foo.String_post", "String": "pre_foo.String_post",
"Type": "pre_foo.Type_post", "Type": "pre_foo.Type_post",

View file

@ -65,6 +65,9 @@ type ResourceSourceDescriptor struct {
// The name of the resource as it was read from the source. // The name of the resource as it was read from the source.
NameOriginal string NameOriginal string
// The title of the resource.
Title string
// Any base paths prepended to the target path. This will also typically be the // Any base paths prepended to the target path. This will also typically be the
// language code, but setting it here means that it should not have any effect on // language code, but setting it here means that it should not have any effect on
// the permalink. // the permalink.
@ -79,6 +82,9 @@ type ResourceSourceDescriptor struct {
// The Data to associate with this resource. // The Data to associate with this resource.
Data map[string]any Data map[string]any
// The Params to associate with this resource.
Params maps.Params
// Delay publishing until either Permalink or RelPermalink is called. Maybe never. // Delay publishing until either Permalink or RelPermalink is called. Maybe never.
LazyPublish bool LazyPublish bool
@ -107,8 +113,12 @@ func (fd *ResourceSourceDescriptor) init(r *Spec) error {
panic(errors.New("RelPath is empty")) panic(errors.New("RelPath is empty"))
} }
if fd.Params == nil {
fd.Params = make(maps.Params)
}
if fd.Path == nil { if fd.Path == nil {
fd.Path = paths.Parse("", fd.TargetPath) fd.Path = r.Cfg.PathParser().Parse("", fd.TargetPath)
} }
if fd.TargetPath == "" { if fd.TargetPath == "" {
@ -143,6 +153,10 @@ func (fd *ResourceSourceDescriptor) init(r *Spec) error {
fd.NameOriginal = fd.NameNormalized fd.NameOriginal = fd.NameNormalized
} }
if fd.Title == "" {
fd.Title = fd.NameOriginal
}
mediaType := fd.MediaType mediaType := fd.MediaType
if mediaType.IsZero() { if mediaType.IsZero() {
ext := fd.Path.Ext() ext := fd.Path.Ext()

View file

@ -74,15 +74,23 @@ type ErrProvider interface {
// Resource represents a linkable resource, i.e. a content page, image etc. // Resource represents a linkable resource, i.e. a content page, image etc.
type Resource interface { type Resource interface {
ResourceWithoutMeta
ResourceMetaProvider
}
type ResourceWithoutMeta interface {
ResourceTypeProvider ResourceTypeProvider
MediaTypeProvider MediaTypeProvider
ResourceLinksProvider ResourceLinksProvider
ResourceNameTitleProvider
ResourceParamsProvider
ResourceDataProvider ResourceDataProvider
ErrProvider ErrProvider
} }
type ResourceWrapper interface {
UnwrappedResource() Resource
WrapResource(Resource) ResourceWrapper
}
type ResourceTypeProvider interface { type ResourceTypeProvider interface {
// ResourceType is the resource type. For most file types, this is the main // ResourceType is the resource type. For most file types, this is the main
// part of the MIME type, e.g. "image", "application", "text" etc. // part of the MIME type, e.g. "image", "application", "text" etc.

View file

@ -20,6 +20,7 @@ import (
"github.com/gohugoio/hugo/hugofs/glob" "github.com/gohugoio/hugo/hugofs/glob"
"github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/resources/page/pagemeta"
"github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/resources/resource"
"github.com/spf13/cast" "github.com/spf13/cast"
@ -90,7 +91,33 @@ func (r *metaResource) updateParams(params map[string]any) {
r.changed = true r.changed = true
} }
func CloneWithMetadataIfNeeded(m []map[string]any, r resource.Resource) resource.Resource { // cloneWithMetadataFromResourceConfigIfNeeded clones the given resource with the given metadata if the resource supports it.
func cloneWithMetadataFromResourceConfigIfNeeded(rc *pagemeta.ResourceConfig, r resource.Resource) resource.Resource {
wmp, ok := r.(resource.WithResourceMetaProvider)
if !ok {
return r
}
if rc.Name == "" && rc.Title == "" && len(rc.Params) == 0 {
// No metadata.
return r
}
if rc.Title == "" {
rc.Title = rc.Name
}
wrapped := &metaResource{
name: rc.Name,
title: rc.Title,
params: rc.Params,
}
return wmp.WithResourceMeta(wrapped)
}
// CloneWithMetadataFromMapIfNeeded clones the given resource with the given metadata if the resource supports it.
func CloneWithMetadataFromMapIfNeeded(m []map[string]any, r resource.Resource) resource.Resource {
wmp, ok := r.(resource.WithResourceMetaProvider) wmp, ok := r.(resource.WithResourceMetaProvider)
if !ok { if !ok {
return r return r

View file

@ -14,6 +14,7 @@
package resources package resources
import ( import (
"fmt"
"path" "path"
"sync" "sync"
@ -22,6 +23,7 @@ import (
"github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/resources/internal" "github.com/gohugoio/hugo/resources/internal"
"github.com/gohugoio/hugo/resources/jsconfig" "github.com/gohugoio/hugo/resources/jsconfig"
"github.com/gohugoio/hugo/resources/page/pagemeta"
"github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/common/hexec" "github.com/gohugoio/hugo/common/hexec"
@ -143,6 +145,16 @@ type PostBuildAssets struct {
JSConfigBuilder *jsconfig.Builder JSConfigBuilder *jsconfig.Builder
} }
func (r *Spec) NewResourceWrapperFromResourceConfig(rc *pagemeta.ResourceConfig) (resource.Resource, error) {
content := rc.Content
switch r := content.Value.(type) {
case resource.Resource:
return cloneWithMetadataFromResourceConfigIfNeeded(rc, r), nil
default:
return nil, fmt.Errorf("failed to create resource for path %q, expected a resource.Resource, got %T", rc.PathInfo.Path(), content.Value)
}
}
// NewResource creates a new Resource from the given ResourceSourceDescriptor. // NewResource creates a new Resource from the given ResourceSourceDescriptor.
func (r *Spec) NewResource(rd ResourceSourceDescriptor) (resource.Resource, error) { func (r *Spec) NewResource(rd ResourceSourceDescriptor) (resource.Resource, error) {
if err := rd.init(r); err != nil { if err := rd.init(r); err != nil {
@ -169,9 +181,9 @@ func (r *Spec) NewResource(rd ResourceSourceDescriptor) (resource.Resource, erro
paths: rp, paths: rp,
spec: r, spec: r,
sd: rd, sd: rd,
params: make(map[string]any), params: rd.Params,
name: rd.NameOriginal, name: rd.NameOriginal,
title: rd.NameOriginal, title: rd.Title,
} }
if rd.MediaType.MainType == "image" { if rd.MediaType.MainType == "image" {

View file

@ -256,6 +256,10 @@ func (r *resourceAdapter) Filter(filters ...any) (images.ImageResource, error) {
return r.getImageOps().Filter(filters...) return r.getImageOps().Filter(filters...)
} }
func (r *resourceAdapter) Resize(spec string) (images.ImageResource, error) {
return r.getImageOps().Resize(spec)
}
func (r *resourceAdapter) Height() int { func (r *resourceAdapter) Height() int {
return r.getImageOps().Height() return r.getImageOps().Height()
} }
@ -314,10 +318,6 @@ func (r *resourceAdapter) RelPermalink() string {
return r.target.RelPermalink() return r.target.RelPermalink()
} }
func (r *resourceAdapter) Resize(spec string) (images.ImageResource, error) {
return r.getImageOps().Resize(spec)
}
func (r *resourceAdapter) ResourceType() string { func (r *resourceAdapter) ResourceType() string {
r.init(false, false) r.init(false, false)
return r.target.ResourceType() return r.target.ResourceType()

View file

@ -21,6 +21,7 @@ import (
"github.com/bep/gitmap" "github.com/bep/gitmap"
"github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/common/hugio"
@ -37,6 +38,12 @@ type File struct {
lazyInit sync.Once lazyInit sync.Once
} }
// IsContentAdapter returns whether the file represents a content adapter.
// This means that there may be more than one Page associated with this file.
func (fi *File) IsContentAdapter() bool {
return fi.fim.Meta().PathInfo.IsContentData()
}
// Filename returns a file's absolute path and filename on disk. // Filename returns a file's absolute path and filename on disk.
func (fi *File) Filename() string { return fi.fim.Meta().Filename } func (fi *File) Filename() string { return fi.fim.Meta().Filename }
@ -136,7 +143,7 @@ func (fi *File) p() *paths.Path {
func NewFileInfoFrom(path, filename string) *File { func NewFileInfoFrom(path, filename string) *File {
meta := &hugofs.FileMeta{ meta := &hugofs.FileMeta{
Filename: filename, Filename: filename,
PathInfo: paths.Parse("", filepath.ToSlash(path)), PathInfo: media.DefaultPathParser.Parse("", filepath.ToSlash(path)),
} }
return NewFileInfo(hugofs.NewFileMetaInfo(nil, meta)) return NewFileInfo(hugofs.NewFileMetaInfo(nil, meta))

View file

@ -65,10 +65,14 @@ type TemplateHandlers struct {
TxtTmpl TemplateParseFinder TxtTmpl TemplateParseFinder
} }
type TemplateExecutor interface {
ExecuteWithContext(ctx context.Context, t Template, wr io.Writer, data any) error
}
// TemplateHandler finds and executes templates. // TemplateHandler finds and executes templates.
type TemplateHandler interface { type TemplateHandler interface {
TemplateFinder TemplateFinder
ExecuteWithContext(ctx context.Context, t Template, wr io.Writer, data any) error TemplateExecutor
LookupLayout(d layouts.LayoutDescriptor, f output.Format) (Template, bool, error) LookupLayout(d layouts.LayoutDescriptor, f output.Format) (Template, bool, error)
HasTemplate(name string) bool HasTemplate(name string) bool
GetIdentity(name string) (identity.Identity, bool) GetIdentity(name string) (identity.Identity, bool)