mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
7285e74090
There are some breaking changes in this commit, see #11455. Closes #11455 Closes #11549 This fixes a set of bugs (see issue list) and it is also paying some technical debt accumulated over the years. We now build with Staticcheck enabled in the CI build. The performance should be about the same as before for regular sized Hugo sites, but it should perform and scale much better to larger data sets, as objects that uses lots of memory (e.g. rendered Markdown, big JSON files read into maps with transform.Unmarshal etc.) will now get automatically garbage collected if needed. Performance on partial rebuilds when running the server in fast render mode should be the same, but the change detection should be much more accurate. A list of the notable new features: * A new dependency tracker that covers (almost) all of Hugo's API and is used to do fine grained partial rebuilds when running the server. * A new and simpler tree document store which allows fast lookups and prefix-walking in all dimensions (e.g. language) concurrently. * You can now configure an upper memory limit allowing for much larger data sets and/or running on lower specced PCs. We have lifted the "no resources in sub folders" restriction for branch bundles (e.g. sections). Memory Limit * Hugos will, by default, set aside a quarter of the total system memory, but you can set this via the OS environment variable HUGO_MEMORYLIMIT (in gigabytes). This is backed by a partitioned LRU cache used throughout Hugo. A cache that gets dynamically resized in low memory situations, allowing Go's Garbage Collector to free the memory. New Dependency Tracker: Hugo has had a rule based coarse grained approach to server rebuilds that has worked mostly pretty well, but there have been some surprises (e.g. stale content). This is now revamped with a new dependency tracker that can quickly calculate the delta given a changed resource (e.g. a content file, template, JS file etc.). This handles transitive relations, e.g. $page -> js.Build -> JS import, or $page1.Content -> render hook -> site.GetPage -> $page2.Title, or $page1.Content -> shortcode -> partial -> site.RegularPages -> $page2.Content -> shortcode ..., and should also handle changes to aggregated values (e.g. site.Lastmod) effectively. This covers all of Hugo's API with 2 known exceptions (a list that may not be fully exhaustive): Changes to files loaded with template func os.ReadFile may not be handled correctly. We recommend loading resources with resources.Get Changes to Hugo objects (e.g. Page) passed in the template context to lang.Translate may not be detected correctly. We recommend having simple i18n templates without too much data context passed in other than simple types such as strings and numbers. Note that the cachebuster configuration (when A changes then rebuild B) works well with the above, but we recommend that you revise that configuration, as it in most situations should not be needed. One example where it is still needed is with TailwindCSS and using changes to hugo_stats.json to trigger new CSS rebuilds. Document Store: Previously, a little simplified, we split the document store (where we store pages and resources) in a tree per language. This worked pretty well, but the structure made some operations harder than they needed to be. We have now restructured it into one Radix tree for all languages. Internally the language is considered to be a dimension of that tree, and the tree can be viewed in all dimensions concurrently. This makes some operations re. language simpler (e.g. finding translations is just a slice range), but the idea is that it should also be relatively inexpensive to add more dimensions if needed (e.g. role). Fixes #10169 Fixes #10364 Fixes #10482 Fixes #10630 Fixes #10656 Fixes #10694 Fixes #10918 Fixes #11262 Fixes #11439 Fixes #11453 Fixes #11457 Fixes #11466 Fixes #11540 Fixes #11551 Fixes #11556 Fixes #11654 Fixes #11661 Fixes #11663 Fixes #11664 Fixes #11669 Fixes #11671 Fixes #11807 Fixes #11808 Fixes #11809 Fixes #11815 Fixes #11840 Fixes #11853 Fixes #11860 Fixes #11883 Fixes #11904 Fixes #7388 Fixes #7425 Fixes #7436 Fixes #7544 Fixes #7882 Fixes #7960 Fixes #8255 Fixes #8307 Fixes #8863 Fixes #8927 Fixes #9192 Fixes #9324
585 lines
14 KiB
Go
585 lines
14 KiB
Go
// Copyright 2024 The Hugo Authors. All rights reserved.
|
|
//
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
// you may not use this file except in compliance with the License.
|
|
// You may obtain a copy of the License at
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
//
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
// See the License for the specific language governing permissions and
|
|
// limitations under the License.
|
|
|
|
package hugolib
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
"io"
|
|
"strings"
|
|
"sync"
|
|
"sync/atomic"
|
|
|
|
"github.com/bep/logg"
|
|
"github.com/gohugoio/hugo/cache/dynacache"
|
|
"github.com/gohugoio/hugo/config/allconfig"
|
|
"github.com/gohugoio/hugo/hugofs/glob"
|
|
"github.com/gohugoio/hugo/hugolib/doctree"
|
|
|
|
"github.com/fsnotify/fsnotify"
|
|
|
|
"github.com/gohugoio/hugo/output"
|
|
"github.com/gohugoio/hugo/parser/metadecoders"
|
|
|
|
"github.com/gohugoio/hugo/common/hugo"
|
|
"github.com/gohugoio/hugo/common/maps"
|
|
"github.com/gohugoio/hugo/common/para"
|
|
"github.com/gohugoio/hugo/common/types"
|
|
"github.com/gohugoio/hugo/hugofs"
|
|
|
|
"github.com/gohugoio/hugo/source"
|
|
|
|
"github.com/gohugoio/hugo/common/herrors"
|
|
"github.com/gohugoio/hugo/deps"
|
|
"github.com/gohugoio/hugo/helpers"
|
|
"github.com/gohugoio/hugo/lazy"
|
|
|
|
"github.com/gohugoio/hugo/resources/page"
|
|
)
|
|
|
|
// HugoSites represents the sites to build. Each site represents a language.
|
|
type HugoSites struct {
|
|
Sites []*Site
|
|
|
|
Configs *allconfig.Configs
|
|
|
|
hugoInfo hugo.HugoInfo
|
|
|
|
// Render output formats for all sites.
|
|
renderFormats output.Formats
|
|
|
|
// The currently rendered Site.
|
|
currentSite *Site
|
|
|
|
*deps.Deps
|
|
|
|
gitInfo *gitInfo
|
|
codeownerInfo *codeownerInfo
|
|
|
|
// As loaded from the /data dirs
|
|
data map[string]any
|
|
|
|
// Cache for page listings.
|
|
cachePages *dynacache.Partition[string, page.Pages]
|
|
|
|
// Before Hugo 0.122.0 we managed all translations in a map using a translationKey
|
|
// that could be overridden in front matter.
|
|
// Now the different page dimensions (e.g. language) are built-in to the page trees above.
|
|
// But we sill need to support the overridden translationKey, but that should
|
|
// be relatively rare and low volume.
|
|
translationKeyPages *maps.SliceCache[page.Page]
|
|
|
|
pageTrees *pageTrees
|
|
|
|
postRenderInit sync.Once
|
|
|
|
// File change events with filename stored in this map will be skipped.
|
|
skipRebuildForFilenamesMu sync.Mutex
|
|
skipRebuildForFilenames map[string]bool
|
|
|
|
init *hugoSitesInit
|
|
|
|
workersSite *para.Workers
|
|
numWorkersSites int
|
|
numWorkers int
|
|
|
|
*fatalErrorHandler
|
|
*buildCounters
|
|
}
|
|
|
|
// ShouldSkipFileChangeEvent allows skipping filesystem event early before
|
|
// the build is started.
|
|
func (h *HugoSites) ShouldSkipFileChangeEvent(ev fsnotify.Event) bool {
|
|
h.skipRebuildForFilenamesMu.Lock()
|
|
defer h.skipRebuildForFilenamesMu.Unlock()
|
|
return h.skipRebuildForFilenames[ev.Name]
|
|
}
|
|
|
|
// Only used in tests.
|
|
type buildCounters struct {
|
|
contentRenderCounter atomic.Uint64
|
|
pageRenderCounter atomic.Uint64
|
|
}
|
|
|
|
func (c *buildCounters) loggFields() logg.Fields {
|
|
return logg.Fields{
|
|
{Name: "pages", Value: c.pageRenderCounter.Load()},
|
|
{Name: "content", Value: c.contentRenderCounter.Load()},
|
|
}
|
|
}
|
|
|
|
type fatalErrorHandler struct {
|
|
mu sync.Mutex
|
|
|
|
h *HugoSites
|
|
|
|
err error
|
|
|
|
done bool
|
|
donec chan bool // will be closed when done
|
|
}
|
|
|
|
// FatalError error is used in some rare situations where it does not make sense to
|
|
// continue processing, to abort as soon as possible and log the error.
|
|
func (f *fatalErrorHandler) FatalError(err error) {
|
|
f.mu.Lock()
|
|
defer f.mu.Unlock()
|
|
if !f.done {
|
|
f.done = true
|
|
close(f.donec)
|
|
}
|
|
f.err = err
|
|
}
|
|
|
|
func (f *fatalErrorHandler) getErr() error {
|
|
f.mu.Lock()
|
|
defer f.mu.Unlock()
|
|
return f.err
|
|
}
|
|
|
|
func (f *fatalErrorHandler) Done() <-chan bool {
|
|
return f.donec
|
|
}
|
|
|
|
type hugoSitesInit struct {
|
|
// Loads the data from all of the /data folders.
|
|
data *lazy.Init
|
|
|
|
// Performs late initialization (before render) of the templates.
|
|
layouts *lazy.Init
|
|
|
|
// Loads the Git info and CODEOWNERS for all the pages if enabled.
|
|
gitInfo *lazy.Init
|
|
}
|
|
|
|
func (h *HugoSites) Data() map[string]any {
|
|
if _, err := h.init.data.Do(context.Background()); err != nil {
|
|
h.SendError(fmt.Errorf("failed to load data: %w", err))
|
|
return nil
|
|
}
|
|
return h.data
|
|
}
|
|
|
|
// Pages returns all pages for all sites.
|
|
func (h *HugoSites) Pages() page.Pages {
|
|
key := "pages"
|
|
v, err := h.cachePages.GetOrCreate(key, func(string) (page.Pages, error) {
|
|
var pages page.Pages
|
|
for _, s := range h.Sites {
|
|
pages = append(pages, s.Pages()...)
|
|
}
|
|
page.SortByDefault(pages)
|
|
return pages, nil
|
|
})
|
|
if err != nil {
|
|
panic(err)
|
|
}
|
|
return v
|
|
}
|
|
|
|
// Pages returns all regularpages for all sites.
|
|
func (h *HugoSites) RegularPages() page.Pages {
|
|
key := "regular-pages"
|
|
v, err := h.cachePages.GetOrCreate(key, func(string) (page.Pages, error) {
|
|
var pages page.Pages
|
|
for _, s := range h.Sites {
|
|
pages = append(pages, s.RegularPages()...)
|
|
}
|
|
page.SortByDefault(pages)
|
|
|
|
return pages, nil
|
|
})
|
|
if err != nil {
|
|
panic(err)
|
|
}
|
|
return v
|
|
}
|
|
|
|
func (h *HugoSites) gitInfoForPage(p page.Page) (source.GitInfo, error) {
|
|
if _, err := h.init.gitInfo.Do(context.Background()); err != nil {
|
|
return source.GitInfo{}, err
|
|
}
|
|
|
|
if h.gitInfo == nil {
|
|
return source.GitInfo{}, nil
|
|
}
|
|
|
|
return h.gitInfo.forPage(p), nil
|
|
}
|
|
|
|
func (h *HugoSites) codeownersForPage(p page.Page) ([]string, error) {
|
|
if _, err := h.init.gitInfo.Do(context.Background()); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
if h.codeownerInfo == nil {
|
|
return nil, nil
|
|
}
|
|
|
|
return h.codeownerInfo.forPage(p), nil
|
|
}
|
|
|
|
func (h *HugoSites) pickOneAndLogTheRest(errors []error) error {
|
|
if len(errors) == 0 {
|
|
return nil
|
|
}
|
|
|
|
var i int
|
|
|
|
for j, err := range errors {
|
|
// If this is in server mode, we want to return an error to the client
|
|
// with a file context, if possible.
|
|
if herrors.UnwrapFileError(err) != nil {
|
|
i = j
|
|
break
|
|
}
|
|
}
|
|
|
|
// Log the rest, but add a threshold to avoid flooding the log.
|
|
const errLogThreshold = 5
|
|
|
|
for j, err := range errors {
|
|
if j == i || err == nil {
|
|
continue
|
|
}
|
|
|
|
if j >= errLogThreshold {
|
|
break
|
|
}
|
|
|
|
h.Log.Errorln(err)
|
|
}
|
|
|
|
return errors[i]
|
|
}
|
|
|
|
func (h *HugoSites) isMultiLingual() bool {
|
|
return len(h.Sites) > 1
|
|
}
|
|
|
|
// TODO(bep) consolidate
|
|
func (h *HugoSites) LanguageSet() map[string]int {
|
|
set := make(map[string]int)
|
|
for i, s := range h.Sites {
|
|
set[s.language.Lang] = i
|
|
}
|
|
return set
|
|
}
|
|
|
|
func (h *HugoSites) NumLogErrors() int {
|
|
if h == nil {
|
|
return 0
|
|
}
|
|
return h.Log.LoggCount(logg.LevelError)
|
|
}
|
|
|
|
func (h *HugoSites) PrintProcessingStats(w io.Writer) {
|
|
stats := make([]*helpers.ProcessingStats, len(h.Sites))
|
|
for i := 0; i < len(h.Sites); i++ {
|
|
stats[i] = h.Sites[i].PathSpec.ProcessingStats
|
|
}
|
|
helpers.ProcessingStatsTable(w, stats...)
|
|
}
|
|
|
|
// GetContentPage finds a Page with content given the absolute filename.
|
|
// Returns nil if none found.
|
|
func (h *HugoSites) GetContentPage(filename string) page.Page {
|
|
var p page.Page
|
|
|
|
h.withPage(func(s string, p2 *pageState) bool {
|
|
if p2.File() == nil {
|
|
return false
|
|
}
|
|
|
|
if p2.File().FileInfo().Meta().Filename == filename {
|
|
p = p2
|
|
return true
|
|
}
|
|
|
|
for _, r := range p2.Resources().ByType(pageResourceType) {
|
|
p3 := r.(page.Page)
|
|
if p3.File() != nil && p3.File().FileInfo().Meta().Filename == filename {
|
|
p = p3
|
|
return true
|
|
}
|
|
}
|
|
|
|
return false
|
|
})
|
|
|
|
return p
|
|
}
|
|
|
|
func (h *HugoSites) loadGitInfo() error {
|
|
if h.Configs.Base.EnableGitInfo {
|
|
gi, err := newGitInfo(h.Conf)
|
|
if err != nil {
|
|
h.Log.Errorln("Failed to read Git log:", err)
|
|
} else {
|
|
h.gitInfo = gi
|
|
}
|
|
|
|
co, err := newCodeOwners(h.Configs.LoadingInfo.BaseConfig.WorkingDir)
|
|
if err != nil {
|
|
h.Log.Errorln("Failed to read CODEOWNERS:", err)
|
|
} else {
|
|
h.codeownerInfo = co
|
|
}
|
|
}
|
|
return nil
|
|
}
|
|
|
|
// Reset resets the sites and template caches etc., making it ready for a full rebuild.
|
|
func (h *HugoSites) reset(config *BuildCfg) {
|
|
h.fatalErrorHandler = &fatalErrorHandler{
|
|
h: h,
|
|
donec: make(chan bool),
|
|
}
|
|
}
|
|
|
|
// resetLogs resets the log counters etc. Used to do a new build on the same sites.
|
|
func (h *HugoSites) resetLogs() {
|
|
h.Log.Reset()
|
|
for _, s := range h.Sites {
|
|
s.Deps.Log.Reset()
|
|
}
|
|
}
|
|
|
|
func (h *HugoSites) withSite(fn func(s *Site) error) error {
|
|
for _, s := range h.Sites {
|
|
if err := fn(s); err != nil {
|
|
return err
|
|
}
|
|
}
|
|
return nil
|
|
}
|
|
|
|
func (h *HugoSites) withPage(fn func(s string, p *pageState) bool) {
|
|
h.withSite(func(s *Site) error {
|
|
w := &doctree.NodeShiftTreeWalker[contentNodeI]{
|
|
Tree: s.pageMap.treePages,
|
|
LockType: doctree.LockTypeRead,
|
|
Handle: func(s string, n contentNodeI, match doctree.DimensionFlag) (bool, error) {
|
|
return fn(s, n.(*pageState)), nil
|
|
},
|
|
}
|
|
return w.Walk(context.Background())
|
|
})
|
|
}
|
|
|
|
// BuildCfg holds build options used to, as an example, skip the render step.
|
|
type BuildCfg struct {
|
|
// Skip rendering. Useful for testing.
|
|
SkipRender bool
|
|
// Use this to indicate what changed (for rebuilds).
|
|
whatChanged *whatChanged
|
|
|
|
// This is a partial re-render of some selected pages.
|
|
PartialReRender bool
|
|
|
|
// Set in server mode when the last build failed for some reason.
|
|
ErrRecovery bool
|
|
|
|
// Recently visited URLs. This is used for partial re-rendering.
|
|
RecentlyVisited *types.EvictingStringQueue
|
|
|
|
// Can be set to build only with a sub set of the content source.
|
|
ContentInclusionFilter *glob.FilenameFilter
|
|
|
|
// Set when the buildlock is already acquired (e.g. the archetype content builder).
|
|
NoBuildLock bool
|
|
|
|
testCounters *buildCounters
|
|
}
|
|
|
|
// shouldRender returns whether this output format should be rendered or not.
|
|
func (cfg *BuildCfg) shouldRender(p *pageState) bool {
|
|
if !p.renderOnce {
|
|
return true
|
|
}
|
|
|
|
// The render state is incremented on render and reset when a related change is detected.
|
|
// Note that this is set per output format.
|
|
shouldRender := p.renderState == 0
|
|
|
|
if !shouldRender {
|
|
return false
|
|
}
|
|
|
|
fastRenderMode := cfg.RecentlyVisited.Len() > 0
|
|
|
|
if !fastRenderMode {
|
|
// Not in fast render mode or first time render.
|
|
return shouldRender
|
|
}
|
|
|
|
if !p.render {
|
|
// Not be to rendered for this output format.
|
|
return false
|
|
}
|
|
|
|
if p.outputFormat().IsHTML {
|
|
// This is fast render mode and the output format is HTML,
|
|
// rerender if this page is one of the recently visited.
|
|
return cfg.RecentlyVisited.Contains(p.RelPermalink())
|
|
}
|
|
|
|
// In fast render mode, we want to avoid re-rendering the sitemaps etc. and
|
|
// other big listings whenever we e.g. change a content file,
|
|
// but we want partial renders of the recently visited pages to also include
|
|
// alternative formats of the same HTML page (e.g. RSS, JSON).
|
|
for _, po := range p.pageOutputs {
|
|
if po.render && po.f.IsHTML && cfg.RecentlyVisited.Contains(po.RelPermalink()) {
|
|
return true
|
|
}
|
|
}
|
|
|
|
return false
|
|
}
|
|
|
|
func (s *Site) preparePagesForRender(isRenderingSite bool, idx int) error {
|
|
var err error
|
|
|
|
initPage := func(p *pageState) error {
|
|
if err = p.shiftToOutputFormat(isRenderingSite, idx); err != nil {
|
|
return err
|
|
}
|
|
return nil
|
|
}
|
|
|
|
return s.pageMap.forEeachPageIncludingBundledPages(nil,
|
|
func(p *pageState) (bool, error) {
|
|
return false, initPage(p)
|
|
},
|
|
)
|
|
}
|
|
|
|
func (h *HugoSites) loadData() error {
|
|
h.data = make(map[string]any)
|
|
w := hugofs.NewWalkway(
|
|
hugofs.WalkwayConfig{
|
|
Fs: h.PathSpec.BaseFs.Data.Fs,
|
|
WalkFn: func(path string, fi hugofs.FileMetaInfo) error {
|
|
if fi.IsDir() {
|
|
return nil
|
|
}
|
|
pi := fi.Meta().PathInfo
|
|
if pi == nil {
|
|
panic("no path info")
|
|
}
|
|
return h.handleDataFile(source.NewFileInfo(fi))
|
|
},
|
|
})
|
|
|
|
if err := w.Walk(); err != nil {
|
|
return err
|
|
}
|
|
return nil
|
|
}
|
|
|
|
func (h *HugoSites) handleDataFile(r *source.File) error {
|
|
var current map[string]any
|
|
|
|
f, err := r.FileInfo().Meta().Open()
|
|
if err != nil {
|
|
return fmt.Errorf("data: failed to open %q: %w", r.LogicalName(), err)
|
|
}
|
|
defer f.Close()
|
|
|
|
// Crawl in data tree to insert data
|
|
current = h.data
|
|
dataPath := r.FileInfo().Meta().PathInfo.Dir()[1:]
|
|
keyParts := strings.Split(dataPath, "/")
|
|
|
|
for _, key := range keyParts {
|
|
if key != "" {
|
|
if _, ok := current[key]; !ok {
|
|
current[key] = make(map[string]any)
|
|
}
|
|
current = current[key].(map[string]any)
|
|
}
|
|
}
|
|
|
|
data, err := h.readData(r)
|
|
if err != nil {
|
|
return h.errWithFileContext(err, r)
|
|
}
|
|
|
|
if data == nil {
|
|
return nil
|
|
}
|
|
|
|
// filepath.Walk walks the files in lexical order, '/' comes before '.'
|
|
higherPrecedentData := current[r.BaseFileName()]
|
|
|
|
switch data.(type) {
|
|
case nil:
|
|
case map[string]any:
|
|
|
|
switch higherPrecedentData.(type) {
|
|
case nil:
|
|
current[r.BaseFileName()] = data
|
|
case map[string]any:
|
|
// merge maps: insert entries from data for keys that
|
|
// don't already exist in higherPrecedentData
|
|
higherPrecedentMap := higherPrecedentData.(map[string]any)
|
|
for key, value := range data.(map[string]any) {
|
|
if _, exists := higherPrecedentMap[key]; exists {
|
|
// this warning could happen if
|
|
// 1. A theme uses the same key; the main data folder wins
|
|
// 2. A sub folder uses the same key: the sub folder wins
|
|
// TODO(bep) figure out a way to detect 2) above and make that a WARN
|
|
h.Log.Infof("Data for key '%s' in path '%s' is overridden by higher precedence data already in the data tree", key, r.Path())
|
|
} else {
|
|
higherPrecedentMap[key] = value
|
|
}
|
|
}
|
|
default:
|
|
// can't merge: higherPrecedentData is not a map
|
|
h.Log.Warnf("The %T data from '%s' overridden by "+
|
|
"higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData)
|
|
}
|
|
|
|
case []any:
|
|
if higherPrecedentData == nil {
|
|
current[r.BaseFileName()] = data
|
|
} else {
|
|
// we don't merge array data
|
|
h.Log.Warnf("The %T data from '%s' overridden by "+
|
|
"higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData)
|
|
}
|
|
|
|
default:
|
|
h.Log.Errorf("unexpected data type %T in file %s", data, r.LogicalName())
|
|
}
|
|
|
|
return nil
|
|
}
|
|
|
|
func (h *HugoSites) errWithFileContext(err error, f *source.File) error {
|
|
realFilename := f.FileInfo().Meta().Filename
|
|
return herrors.NewFileErrorFromFile(err, realFilename, h.Fs.Source, nil)
|
|
}
|
|
|
|
func (h *HugoSites) readData(f *source.File) (any, error) {
|
|
file, err := f.FileInfo().Meta().Open()
|
|
if err != nil {
|
|
return nil, fmt.Errorf("readData: failed to open data file: %w", err)
|
|
}
|
|
defer file.Close()
|
|
content := helpers.ReaderToBytes(file)
|
|
|
|
format := metadecoders.FormatFromString(f.Ext())
|
|
return metadecoders.Default.Unmarshal(content, format)
|
|
}
|