mirror of
https://github.com/gohugoio/hugo.git
synced 2025-02-15 07:24:03 +00:00
Create pages from _content.gotmpl
Closes #12427 Closes #12485 Closes #6310 Closes #5074
This commit is contained in:
parent
55dea41c1a
commit
e2d66e3218
60 changed files with 2391 additions and 438 deletions
|
@ -854,7 +854,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher,
|
|||
h.BaseFs.SourceFilesystems,
|
||||
dynamicEvents)
|
||||
|
||||
onePageName := pickOneWriteOrCreatePath(partitionedEvents.ContentEvents)
|
||||
onePageName := pickOneWriteOrCreatePath(h.Conf.ContentTypes(), partitionedEvents.ContentEvents)
|
||||
|
||||
c.printChangeDetected("")
|
||||
c.changeDetector.PrepareNew()
|
||||
|
|
|
@ -46,12 +46,12 @@ import (
|
|||
"github.com/fsnotify/fsnotify"
|
||||
"github.com/gohugoio/hugo/common/herrors"
|
||||
"github.com/gohugoio/hugo/common/hugo"
|
||||
|
||||
"github.com/gohugoio/hugo/common/types"
|
||||
"github.com/gohugoio/hugo/common/urls"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/gohugoio/hugo/hugofs/files"
|
||||
"github.com/gohugoio/hugo/hugolib"
|
||||
"github.com/gohugoio/hugo/hugolib/filesystems"
|
||||
"github.com/gohugoio/hugo/livereload"
|
||||
|
@ -1188,16 +1188,16 @@ func partitionDynamicEvents(sourceFs *filesystems.SourceFilesystems, events []fs
|
|||
return
|
||||
}
|
||||
|
||||
func pickOneWriteOrCreatePath(events []fsnotify.Event) string {
|
||||
func pickOneWriteOrCreatePath(contentTypes config.ContentTypesProvider, events []fsnotify.Event) string {
|
||||
name := ""
|
||||
|
||||
for _, ev := range events {
|
||||
if ev.Op&fsnotify.Write == fsnotify.Write || ev.Op&fsnotify.Create == fsnotify.Create {
|
||||
if files.IsIndexContentFile(ev.Name) {
|
||||
if contentTypes.IsIndexContentFile(ev.Name) {
|
||||
return ev.Name
|
||||
}
|
||||
|
||||
if files.IsContentFile(ev.Name) {
|
||||
if contentTypes.IsContentFile(ev.Name) {
|
||||
name = ev.Name
|
||||
}
|
||||
|
||||
|
|
|
@ -27,7 +27,12 @@ func NewCache[K comparable, T any]() *Cache[K, T] {
|
|||
}
|
||||
|
||||
// Delete deletes the given key from the cache.
|
||||
// If c is nil, this method is a no-op.
|
||||
func (c *Cache[K, T]) Get(key K) (T, bool) {
|
||||
if c == nil {
|
||||
var zero T
|
||||
return zero, false
|
||||
}
|
||||
c.RLock()
|
||||
v, found := c.m[key]
|
||||
c.RUnlock()
|
||||
|
@ -60,6 +65,15 @@ func (c *Cache[K, T]) Set(key K, value T) {
|
|||
c.Unlock()
|
||||
}
|
||||
|
||||
// ForEeach calls the given function for each key/value pair in the cache.
|
||||
func (c *Cache[K, T]) ForEeach(f func(K, T)) {
|
||||
c.RLock()
|
||||
defer c.RUnlock()
|
||||
for k, v := range c.m {
|
||||
f(k, v)
|
||||
}
|
||||
}
|
||||
|
||||
// SliceCache is a simple thread safe cache backed by a map.
|
||||
type SliceCache[T any] struct {
|
||||
m map[string][]T
|
||||
|
|
|
@ -25,8 +25,6 @@ import (
|
|||
"github.com/gohugoio/hugo/identity"
|
||||
)
|
||||
|
||||
var defaultPathParser PathParser
|
||||
|
||||
// PathParser parses a path into a Path.
|
||||
type PathParser struct {
|
||||
// Maps the language code to its index in the languages/sites slice.
|
||||
|
@ -34,11 +32,9 @@ type PathParser struct {
|
|||
|
||||
// Reports whether the given language is disabled.
|
||||
IsLangDisabled func(string) bool
|
||||
}
|
||||
|
||||
// Parse parses component c with path s into Path using the default path parser.
|
||||
func Parse(c, s string) *Path {
|
||||
return defaultPathParser.Parse(c, s)
|
||||
// Reports whether the given ext is a content file.
|
||||
IsContentExt func(string) bool
|
||||
}
|
||||
|
||||
// NormalizePathString returns a normalized path string using the very basic Hugo rules.
|
||||
|
@ -108,7 +104,6 @@ func (pp *PathParser) parse(component, s string) (*Path, error) {
|
|||
var err error
|
||||
// Preserve the original case for titles etc.
|
||||
p.unnormalized, err = pp.doParse(component, s, pp.newPath(component))
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -195,23 +190,26 @@ func (pp *PathParser) doParse(component, s string, p *Path) (*Path, error) {
|
|||
}
|
||||
}
|
||||
|
||||
isContentComponent := p.component == files.ComponentFolderContent || p.component == files.ComponentFolderArchetypes
|
||||
isContent := isContentComponent && files.IsContentExt(p.Ext())
|
||||
|
||||
if isContent {
|
||||
if len(p.identifiers) > 0 {
|
||||
isContentComponent := p.component == files.ComponentFolderContent || p.component == files.ComponentFolderArchetypes
|
||||
isContent := isContentComponent && pp.IsContentExt(p.Ext())
|
||||
id := p.identifiers[len(p.identifiers)-1]
|
||||
b := p.s[p.posContainerHigh : id.Low-1]
|
||||
switch b {
|
||||
case "index":
|
||||
p.bundleType = PathTypeLeaf
|
||||
case "_index":
|
||||
p.bundleType = PathTypeBranch
|
||||
default:
|
||||
p.bundleType = PathTypeContentSingle
|
||||
}
|
||||
if isContent {
|
||||
switch b {
|
||||
case "index":
|
||||
p.bundleType = PathTypeLeaf
|
||||
case "_index":
|
||||
p.bundleType = PathTypeBranch
|
||||
default:
|
||||
p.bundleType = PathTypeContentSingle
|
||||
}
|
||||
|
||||
if slashCount == 2 && p.IsLeafBundle() {
|
||||
p.posSectionHigh = 0
|
||||
if slashCount == 2 && p.IsLeafBundle() {
|
||||
p.posSectionHigh = 0
|
||||
}
|
||||
} else if b == files.NameContentData && files.IsContentDataExt(p.Ext()) {
|
||||
p.bundleType = PathTypeContentData
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -246,6 +244,9 @@ const (
|
|||
|
||||
// Branch bundles, e.g. /blog/_index.md
|
||||
PathTypeBranch
|
||||
|
||||
// Content data file, _content.gotmpl.
|
||||
PathTypeContentData
|
||||
)
|
||||
|
||||
type Path struct {
|
||||
|
@ -521,10 +522,6 @@ func (p *Path) Identifiers() []string {
|
|||
return ids
|
||||
}
|
||||
|
||||
func (p *Path) IsHTML() bool {
|
||||
return files.IsHTML(p.Ext())
|
||||
}
|
||||
|
||||
func (p *Path) BundleType() PathType {
|
||||
return p.bundleType
|
||||
}
|
||||
|
@ -541,6 +538,10 @@ func (p *Path) IsLeafBundle() bool {
|
|||
return p.bundleType == PathTypeLeaf
|
||||
}
|
||||
|
||||
func (p *Path) IsContentData() bool {
|
||||
return p.bundleType == PathTypeContentData
|
||||
}
|
||||
|
||||
func (p Path) ForBundleType(t PathType) *Path {
|
||||
p.bundleType = t
|
||||
return &p
|
||||
|
|
|
@ -27,6 +27,9 @@ var testParser = &PathParser{
|
|||
"no": 0,
|
||||
"en": 1,
|
||||
},
|
||||
IsContentExt: func(ext string) bool {
|
||||
return ext == "md"
|
||||
},
|
||||
}
|
||||
|
||||
func TestParse(t *testing.T) {
|
||||
|
@ -333,6 +336,22 @@ func TestParse(t *testing.T) {
|
|||
c.Assert(p.Path(), qt.Equals, "/a/b/c.txt")
|
||||
},
|
||||
},
|
||||
{
|
||||
"Content data file gotmpl",
|
||||
"/a/b/_content.gotmpl",
|
||||
func(c *qt.C, p *Path) {
|
||||
c.Assert(p.Path(), qt.Equals, "/a/b/_content.gotmpl")
|
||||
c.Assert(p.Ext(), qt.Equals, "gotmpl")
|
||||
c.Assert(p.IsContentData(), qt.IsTrue)
|
||||
},
|
||||
},
|
||||
{
|
||||
"Content data file yaml",
|
||||
"/a/b/_content.yaml",
|
||||
func(c *qt.C, p *Path) {
|
||||
c.Assert(p.IsContentData(), qt.IsFalse)
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
c.Run(test.name, func(c *qt.C) {
|
||||
|
|
|
@ -367,6 +367,7 @@ func (c *Config) CompileConfig(logger loggers.Logger) error {
|
|||
DisabledLanguages: disabledLangs,
|
||||
IgnoredLogs: ignoredLogIDs,
|
||||
KindOutputFormats: kindOutputFormats,
|
||||
ContentTypes: media.DefaultContentTypes.FromTypes(c.MediaTypes.Config),
|
||||
CreateTitle: helpers.GetTitleFunc(c.TitleCaseStyle),
|
||||
IsUglyURLSection: isUglyURL,
|
||||
IgnoreFile: ignoreFile,
|
||||
|
@ -402,6 +403,7 @@ type ConfigCompiled struct {
|
|||
BaseURLLiveReload urls.BaseURL
|
||||
ServerInterface string
|
||||
KindOutputFormats map[string]output.Formats
|
||||
ContentTypes media.ContentTypes
|
||||
DisabledKinds map[string]bool
|
||||
DisabledLanguages map[string]bool
|
||||
IgnoredLogs map[string]bool
|
||||
|
@ -759,7 +761,7 @@ func (c *Configs) Init() error {
|
|||
c.Languages = languages
|
||||
c.LanguagesDefaultFirst = languagesDefaultFirst
|
||||
|
||||
c.ContentPathParser = &paths.PathParser{LanguageIndex: languagesDefaultFirst.AsIndexSet(), IsLangDisabled: c.Base.IsLangDisabled}
|
||||
c.ContentPathParser = &paths.PathParser{LanguageIndex: languagesDefaultFirst.AsIndexSet(), IsLangDisabled: c.Base.IsLangDisabled, IsContentExt: c.Base.C.ContentTypes.IsContentSuffix}
|
||||
|
||||
c.configLangs = make([]config.AllProvider, len(c.Languages))
|
||||
for i, l := range c.LanguagesDefaultFirst {
|
||||
|
|
|
@ -84,3 +84,21 @@ logPathWarnings = true
|
|||
b.Assert(conf.PrintI18nWarnings, qt.Equals, true)
|
||||
b.Assert(conf.PrintPathWarnings, qt.Equals, true)
|
||||
}
|
||||
|
||||
func TestRedefineContentTypes(t *testing.T) {
|
||||
files := `
|
||||
-- hugo.toml --
|
||||
baseURL = "https://example.com"
|
||||
[mediaTypes]
|
||||
[mediaTypes."text/html"]
|
||||
suffixes = ["html", "xhtml"]
|
||||
`
|
||||
|
||||
b := hugolib.Test(t, files)
|
||||
|
||||
conf := b.H.Configs.Base
|
||||
contentTypes := conf.C.ContentTypes
|
||||
|
||||
b.Assert(contentTypes.HTML.Suffixes(), qt.DeepEquals, []string{"html", "xhtml"})
|
||||
b.Assert(contentTypes.Markdown.Suffixes(), qt.DeepEquals, []string{"md", "mdown", "markdown"})
|
||||
}
|
||||
|
|
|
@ -144,6 +144,10 @@ func (c ConfigLanguage) NewIdentityManager(name string) identity.Manager {
|
|||
return identity.NewManager(name)
|
||||
}
|
||||
|
||||
func (c ConfigLanguage) ContentTypes() config.ContentTypesProvider {
|
||||
return c.config.C.ContentTypes
|
||||
}
|
||||
|
||||
// GetConfigSection is mostly used in tests. The switch statement isn't complete, but what's in use.
|
||||
func (c ConfigLanguage) GetConfigSection(s string) any {
|
||||
switch s {
|
||||
|
|
|
@ -41,6 +41,7 @@ type AllProvider interface {
|
|||
Dirs() CommonDirs
|
||||
Quiet() bool
|
||||
DirsBase() CommonDirs
|
||||
ContentTypes() ContentTypesProvider
|
||||
GetConfigSection(string) any
|
||||
GetConfig() any
|
||||
CanonifyURLs() bool
|
||||
|
@ -75,6 +76,15 @@ type AllProvider interface {
|
|||
EnableEmoji() bool
|
||||
}
|
||||
|
||||
// We cannot import the media package as that would create a circular dependency.
|
||||
// This interface defineds a sub set of what media.ContentTypes provides.
|
||||
type ContentTypesProvider interface {
|
||||
IsContentSuffix(suffix string) bool
|
||||
IsContentFile(filename string) bool
|
||||
IsIndexContentFile(filename string) bool
|
||||
IsHTMLSuffix(suffix string) bool
|
||||
}
|
||||
|
||||
// Provider provides the configuration settings for Hugo.
|
||||
type Provider interface {
|
||||
GetString(key string) string
|
||||
|
|
|
@ -29,8 +29,6 @@ import (
|
|||
"github.com/gohugoio/hugo/common/hstrings"
|
||||
"github.com/gohugoio/hugo/common/paths"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs/files"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
|
@ -98,7 +96,7 @@ func NewContent(h *hugolib.HugoSites, kind, targetPath string, force bool) error
|
|||
return "", fmt.Errorf("failed to resolve %q to an archetype template", targetPath)
|
||||
}
|
||||
|
||||
if !files.IsContentFile(b.targetPath) {
|
||||
if !h.Conf.ContentTypes().IsContentFile(b.targetPath) {
|
||||
return "", fmt.Errorf("target path %q is not a known content format", b.targetPath)
|
||||
}
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ import (
|
|||
|
||||
"github.com/gohugoio/hugo/common/hexec"
|
||||
"github.com/gohugoio/hugo/common/loggers"
|
||||
"github.com/gohugoio/hugo/media"
|
||||
|
||||
"github.com/spf13/afero"
|
||||
|
||||
|
@ -135,20 +136,16 @@ func (c *ContentSpec) SanitizeAnchorName(s string) string {
|
|||
}
|
||||
|
||||
func (c *ContentSpec) ResolveMarkup(in string) string {
|
||||
if c == nil {
|
||||
panic("nil ContentSpec")
|
||||
}
|
||||
in = strings.ToLower(in)
|
||||
switch in {
|
||||
case "md", "markdown", "mdown":
|
||||
return "markdown"
|
||||
case "html", "htm":
|
||||
return "html"
|
||||
default:
|
||||
if conv := c.Converters.Get(in); conv != nil {
|
||||
return conv.Name()
|
||||
}
|
||||
|
||||
if mediaType, found := c.Cfg.ContentTypes().(media.ContentTypes).Types().GetBestMatch(markup.ResolveMarkup(in)); found {
|
||||
return mediaType.SubType
|
||||
}
|
||||
|
||||
if conv := c.Converters.Get(in); conv != nil {
|
||||
return markup.ResolveMarkup(conv.Name())
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
|
@ -244,7 +241,7 @@ func (c *ContentSpec) TrimShortHTML(input []byte, markup string) []byte {
|
|||
openingTag := []byte("<p>")
|
||||
closingTag := []byte("</p>")
|
||||
|
||||
if markup == "asciidocext" {
|
||||
if markup == media.DefaultContentTypes.AsciiDoc.SubType {
|
||||
openingTag = []byte("<div class=\"paragraph\">\n<p>")
|
||||
closingTag = []byte("</p>\n</div>")
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ func TestTrimShortHTML(t *testing.T) {
|
|||
{"markdown", []byte("<h2 id=`a`>b</h2>\n\n<p>c</p>"), []byte("<h2 id=`a`>b</h2>\n\n<p>c</p>")},
|
||||
// Issue 12369
|
||||
{"markdown", []byte("<div class=\"paragraph\">\n<p>foo</p>\n</div>"), []byte("<div class=\"paragraph\">\n<p>foo</p>\n</div>")},
|
||||
{"asciidocext", []byte("<div class=\"paragraph\">\n<p>foo</p>\n</div>"), []byte("foo")},
|
||||
{"asciidoc", []byte("<div class=\"paragraph\">\n<p>foo</p>\n</div>"), []byte("foo")},
|
||||
}
|
||||
|
||||
c := newTestContentSpec(nil)
|
||||
|
|
|
@ -35,9 +35,9 @@ func TestResolveMarkup(t *testing.T) {
|
|||
{"md", "markdown"},
|
||||
{"markdown", "markdown"},
|
||||
{"mdown", "markdown"},
|
||||
{"asciidocext", "asciidocext"},
|
||||
{"adoc", "asciidocext"},
|
||||
{"ad", "asciidocext"},
|
||||
{"asciidocext", "asciidoc"},
|
||||
{"adoc", "asciidoc"},
|
||||
{"ad", "asciidoc"},
|
||||
{"rst", "rst"},
|
||||
{"pandoc", "pandoc"},
|
||||
{"pdc", "pandoc"},
|
||||
|
|
|
@ -29,57 +29,13 @@ const (
|
|||
FilenameHugoStatsJSON = "hugo_stats.json"
|
||||
)
|
||||
|
||||
var (
|
||||
// This should be the only list of valid extensions for content files.
|
||||
contentFileExtensions = []string{
|
||||
"html", "htm",
|
||||
"mdown", "markdown", "md",
|
||||
"asciidoc", "adoc", "ad",
|
||||
"rest", "rst",
|
||||
"org",
|
||||
"pandoc", "pdc",
|
||||
}
|
||||
|
||||
contentFileExtensionsSet map[string]bool
|
||||
|
||||
htmlFileExtensions = []string{
|
||||
"html", "htm",
|
||||
}
|
||||
|
||||
htmlFileExtensionsSet map[string]bool
|
||||
)
|
||||
|
||||
func init() {
|
||||
contentFileExtensionsSet = make(map[string]bool)
|
||||
for _, ext := range contentFileExtensions {
|
||||
contentFileExtensionsSet[ext] = true
|
||||
}
|
||||
htmlFileExtensionsSet = make(map[string]bool)
|
||||
for _, ext := range htmlFileExtensions {
|
||||
htmlFileExtensionsSet[ext] = true
|
||||
}
|
||||
func IsGoTmplExt(ext string) bool {
|
||||
return ext == "gotmpl"
|
||||
}
|
||||
|
||||
func IsContentFile(filename string) bool {
|
||||
return contentFileExtensionsSet[strings.TrimPrefix(filepath.Ext(filename), ".")]
|
||||
}
|
||||
|
||||
func IsIndexContentFile(filename string) bool {
|
||||
if !IsContentFile(filename) {
|
||||
return false
|
||||
}
|
||||
|
||||
base := filepath.Base(filename)
|
||||
|
||||
return strings.HasPrefix(base, "index.") || strings.HasPrefix(base, "_index.")
|
||||
}
|
||||
|
||||
func IsHTML(ext string) bool {
|
||||
return htmlFileExtensionsSet[ext]
|
||||
}
|
||||
|
||||
func IsContentExt(ext string) bool {
|
||||
return contentFileExtensionsSet[ext]
|
||||
// Supported data file extensions for _content.* files.
|
||||
func IsContentDataExt(ext string) bool {
|
||||
return IsGoTmplExt(ext)
|
||||
}
|
||||
|
||||
const (
|
||||
|
@ -93,6 +49,8 @@ const (
|
|||
|
||||
FolderResources = "resources"
|
||||
FolderJSConfig = "_jsconfig" // Mounted below /assets with postcss.config.js etc.
|
||||
|
||||
NameContentData = "_content"
|
||||
)
|
||||
|
||||
var (
|
||||
|
|
|
@ -14,22 +14,11 @@
|
|||
package files
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
)
|
||||
|
||||
func TestIsContentFile(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
|
||||
c.Assert(IsContentFile(filepath.FromSlash("my/file.md")), qt.Equals, true)
|
||||
c.Assert(IsContentFile(filepath.FromSlash("my/file.ad")), qt.Equals, true)
|
||||
c.Assert(IsContentFile(filepath.FromSlash("textfile.txt")), qt.Equals, false)
|
||||
c.Assert(IsContentExt("md"), qt.Equals, true)
|
||||
c.Assert(IsContentExt("json"), qt.Equals, false)
|
||||
}
|
||||
|
||||
func TestComponentFolders(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import (
|
|||
"github.com/gohugoio/hugo/common/herrors"
|
||||
"github.com/gohugoio/hugo/common/loggers"
|
||||
"github.com/gohugoio/hugo/common/paths"
|
||||
"github.com/gohugoio/hugo/media"
|
||||
|
||||
"github.com/spf13/afero"
|
||||
)
|
||||
|
@ -50,7 +51,8 @@ type WalkwayConfig struct {
|
|||
Root string
|
||||
|
||||
// The logger to use.
|
||||
Logger loggers.Logger
|
||||
Logger loggers.Logger
|
||||
PathParser *paths.PathParser
|
||||
|
||||
// One or both of these may be pre-set.
|
||||
Info FileMetaInfo // The start info.
|
||||
|
@ -72,6 +74,10 @@ func NewWalkway(cfg WalkwayConfig) *Walkway {
|
|||
panic("fs must be set")
|
||||
}
|
||||
|
||||
if cfg.PathParser == nil {
|
||||
cfg.PathParser = media.DefaultPathParser
|
||||
}
|
||||
|
||||
logger := cfg.Logger
|
||||
if logger == nil {
|
||||
logger = loggers.NewDefault()
|
||||
|
@ -161,7 +167,7 @@ func (w *Walkway) walk(path string, info FileMetaInfo, dirEntries []FileMetaInfo
|
|||
dirEntries = DirEntriesToFileMetaInfos(fis)
|
||||
for _, fi := range dirEntries {
|
||||
if fi.Meta().PathInfo == nil {
|
||||
fi.Meta().PathInfo = paths.Parse("", filepath.Join(pathRel, fi.Name()))
|
||||
fi.Meta().PathInfo = w.cfg.PathParser.Parse("", filepath.Join(pathRel, fi.Name()))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1144,7 +1144,7 @@ Home.
|
|||
|
||||
enConfig := b.H.Sites[0].conf
|
||||
m, _ := enConfig.MediaTypes.Config.GetByType("text/html")
|
||||
b.Assert(m.Suffixes(), qt.DeepEquals, []string{"html"})
|
||||
b.Assert(m.Suffixes(), qt.DeepEquals, []string{"html", "htm"})
|
||||
|
||||
svConfig := b.H.Sites[1].conf
|
||||
f, _ := svConfig.OutputFormats.Config.GetByName("html")
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
package hugolib
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"path"
|
||||
"path/filepath"
|
||||
|
@ -23,10 +24,13 @@ import (
|
|||
"github.com/bep/logg"
|
||||
"github.com/gohugoio/hugo/common/hugio"
|
||||
"github.com/gohugoio/hugo/common/paths"
|
||||
"github.com/gohugoio/hugo/hugofs/files"
|
||||
"github.com/gohugoio/hugo/hugolib/pagesfromdata"
|
||||
"github.com/gohugoio/hugo/identity"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
|
||||
"github.com/gohugoio/hugo/resources/page"
|
||||
"github.com/gohugoio/hugo/resources/page/pagemeta"
|
||||
"github.com/gohugoio/hugo/resources/resource"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
|
@ -51,9 +55,11 @@ type contentMapConfig struct {
|
|||
var _ contentNodeI = (*resourceSource)(nil)
|
||||
|
||||
type resourceSource struct {
|
||||
path *paths.Path
|
||||
opener hugio.OpenReadSeekCloser
|
||||
fi hugofs.FileMetaInfo
|
||||
langIndex int
|
||||
path *paths.Path
|
||||
opener hugio.OpenReadSeekCloser
|
||||
fi hugofs.FileMetaInfo
|
||||
rc *pagemeta.ResourceConfig
|
||||
|
||||
r resource.Resource
|
||||
}
|
||||
|
@ -64,11 +70,7 @@ func (r resourceSource) clone() *resourceSource {
|
|||
}
|
||||
|
||||
func (r *resourceSource) LangIndex() int {
|
||||
if r.r != nil && r.isPage() {
|
||||
return r.r.(*pageState).s.languagei
|
||||
}
|
||||
|
||||
return r.fi.Meta().LangIndex
|
||||
return r.langIndex
|
||||
}
|
||||
|
||||
func (r *resourceSource) MarkStale() {
|
||||
|
@ -162,12 +164,13 @@ func (cfg contentMapConfig) getTaxonomyConfig(s string) (v viewName) {
|
|||
return
|
||||
}
|
||||
|
||||
func (m *pageMap) AddFi(fi hugofs.FileMetaInfo) error {
|
||||
func (m *pageMap) AddFi(fi hugofs.FileMetaInfo, buildConfig *BuildCfg) (pageCount uint64, resourceCount uint64, addErr error) {
|
||||
if fi.IsDir() {
|
||||
return nil
|
||||
return
|
||||
}
|
||||
|
||||
insertResource := func(fim hugofs.FileMetaInfo) error {
|
||||
resourceCount++
|
||||
pi := fi.Meta().PathInfo
|
||||
key := pi.Base()
|
||||
tree := m.treeResources
|
||||
|
@ -199,9 +202,9 @@ func (m *pageMap) AddFi(fi hugofs.FileMetaInfo) error {
|
|||
}
|
||||
key = pi.Base()
|
||||
|
||||
rs = &resourceSource{r: pageResource}
|
||||
rs = &resourceSource{r: pageResource, langIndex: pageResource.s.languagei}
|
||||
} else {
|
||||
rs = &resourceSource{path: pi, opener: r, fi: fim}
|
||||
rs = &resourceSource{path: pi, opener: r, fi: fim, langIndex: fim.Meta().LangIndex}
|
||||
}
|
||||
|
||||
tree.InsertIntoValuesDimension(key, rs)
|
||||
|
@ -220,14 +223,27 @@ func (m *pageMap) AddFi(fi hugofs.FileMetaInfo) error {
|
|||
},
|
||||
))
|
||||
if err := insertResource(fi); err != nil {
|
||||
return err
|
||||
addErr = err
|
||||
return
|
||||
}
|
||||
case paths.PathTypeContentData:
|
||||
pc, rc, err := m.addPagesFromGoTmplFi(fi, buildConfig)
|
||||
pageCount += pc
|
||||
resourceCount += rc
|
||||
if err != nil {
|
||||
addErr = err
|
||||
return
|
||||
}
|
||||
|
||||
default:
|
||||
m.s.Log.Trace(logg.StringFunc(
|
||||
func() string {
|
||||
return fmt.Sprintf("insert bundle: %q", fi.Meta().Filename)
|
||||
},
|
||||
))
|
||||
|
||||
pageCount++
|
||||
|
||||
// A content file.
|
||||
p, pi, err := m.s.h.newPage(
|
||||
&pageMeta{
|
||||
|
@ -237,17 +253,164 @@ func (m *pageMap) AddFi(fi hugofs.FileMetaInfo) error {
|
|||
},
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
addErr = err
|
||||
return
|
||||
}
|
||||
if p == nil {
|
||||
// Disabled page.
|
||||
return nil
|
||||
return
|
||||
}
|
||||
|
||||
m.treePages.InsertWithLock(pi.Base(), p)
|
||||
m.treePages.InsertIntoValuesDimensionWithLock(pi.Base(), p)
|
||||
|
||||
}
|
||||
return nil
|
||||
return
|
||||
}
|
||||
|
||||
func (m *pageMap) addPagesFromGoTmplFi(fi hugofs.FileMetaInfo, buildConfig *BuildCfg) (pageCount uint64, resourceCount uint64, addErr error) {
|
||||
meta := fi.Meta()
|
||||
pi := meta.PathInfo
|
||||
|
||||
m.s.Log.Trace(logg.StringFunc(
|
||||
func() string {
|
||||
return fmt.Sprintf("insert pages from data file: %q", fi.Meta().Filename)
|
||||
},
|
||||
))
|
||||
|
||||
if !files.IsGoTmplExt(pi.Ext()) {
|
||||
addErr = fmt.Errorf("unsupported data file extension %q", pi.Ext())
|
||||
return
|
||||
}
|
||||
|
||||
s := m.s.h.resolveSite(fi.Meta().Lang)
|
||||
f := source.NewFileInfo(fi)
|
||||
h := s.h
|
||||
|
||||
// Make sure the layouts are initialized.
|
||||
if _, err := h.init.layouts.Do(context.Background()); err != nil {
|
||||
addErr = err
|
||||
return
|
||||
}
|
||||
|
||||
contentAdapter := s.pageMap.treePagesFromTemplateAdapters.Get(pi.Base())
|
||||
var rebuild bool
|
||||
if contentAdapter != nil {
|
||||
// Rebuild
|
||||
contentAdapter = contentAdapter.CloneForGoTmpl(fi)
|
||||
rebuild = true
|
||||
} else {
|
||||
contentAdapter = pagesfromdata.NewPagesFromTemplate(
|
||||
pagesfromdata.PagesFromTemplateOptions{
|
||||
GoTmplFi: fi,
|
||||
Site: s,
|
||||
DepsFromSite: func(s page.Site) pagesfromdata.PagesFromTemplateDeps {
|
||||
ss := s.(*Site)
|
||||
return pagesfromdata.PagesFromTemplateDeps{
|
||||
TmplFinder: ss.TextTmpl(),
|
||||
TmplExec: ss.Tmpl(),
|
||||
}
|
||||
},
|
||||
DependencyManager: s.Conf.NewIdentityManager("pagesfromdata"),
|
||||
Watching: s.Conf.Watching(),
|
||||
HandlePage: func(pt *pagesfromdata.PagesFromTemplate, pc *pagemeta.PageConfig) error {
|
||||
s := pt.Site.(*Site)
|
||||
if err := pc.Compile(pt.GoTmplFi.Meta().PathInfo.Base(), true, "", s.Log, s.conf.MediaTypes.Config); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ps, pi, err := h.newPage(
|
||||
&pageMeta{
|
||||
f: f,
|
||||
s: s,
|
||||
pageMetaParams: &pageMetaParams{
|
||||
pageConfig: pc,
|
||||
},
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if ps == nil {
|
||||
// Disabled page.
|
||||
return nil
|
||||
}
|
||||
|
||||
u, n, replaced := s.pageMap.treePages.InsertIntoValuesDimensionWithLock(pi.Base(), ps)
|
||||
|
||||
if h.isRebuild() {
|
||||
if replaced {
|
||||
pt.AddChange(n.GetIdentity())
|
||||
} else {
|
||||
pt.AddChange(u.GetIdentity())
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
HandleResource: func(pt *pagesfromdata.PagesFromTemplate, rc *pagemeta.ResourceConfig) error {
|
||||
s := pt.Site.(*Site)
|
||||
if err := rc.Compile(
|
||||
pt.GoTmplFi.Meta().PathInfo.Base(),
|
||||
s.Conf.PathParser(),
|
||||
s.conf.MediaTypes.Config,
|
||||
); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
rs := &resourceSource{path: rc.PathInfo, rc: rc, opener: nil, fi: nil, langIndex: s.languagei}
|
||||
|
||||
_, n, replaced := s.pageMap.treeResources.InsertIntoValuesDimensionWithLock(rc.PathInfo.Base(), rs)
|
||||
|
||||
if h.isRebuild() && replaced {
|
||||
pt.AddChange(n.GetIdentity())
|
||||
}
|
||||
return nil
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
s.pageMap.treePagesFromTemplateAdapters.Insert(pi.Base(), contentAdapter)
|
||||
|
||||
}
|
||||
|
||||
handleBuildInfo := func(s *Site, bi pagesfromdata.BuildInfo) {
|
||||
resourceCount += bi.NumResourcesAdded
|
||||
pageCount += bi.NumPagesAdded
|
||||
s.handleContentAdapterChanges(bi, buildConfig)
|
||||
}
|
||||
|
||||
bi, err := contentAdapter.Execute(context.Background())
|
||||
if err != nil {
|
||||
addErr = err
|
||||
return
|
||||
}
|
||||
handleBuildInfo(s, bi)
|
||||
|
||||
if !rebuild && bi.EnableAllLanguages {
|
||||
// Clone and insert the adapter for the other sites.
|
||||
for _, ss := range s.h.Sites {
|
||||
if s == ss {
|
||||
continue
|
||||
}
|
||||
|
||||
clone := contentAdapter.CloneForSite(ss)
|
||||
|
||||
// Make sure it gets executed for the first time.
|
||||
bi, err := clone.Execute(context.Background())
|
||||
if err != nil {
|
||||
addErr = err
|
||||
return
|
||||
}
|
||||
handleBuildInfo(ss, bi)
|
||||
|
||||
// Insert into the correct language tree so it get rebuilt on changes.
|
||||
ss.pageMap.treePagesFromTemplateAdapters.Insert(pi.Base(), clone)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// The home page is represented with the zero string.
|
||||
|
|