mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
parent
8731d88222
commit
6cd0784e44
33 changed files with 1033 additions and 148 deletions
18
cache/dynacache/dynacache.go
vendored
18
cache/dynacache/dynacache.go
vendored
|
@ -38,6 +38,11 @@ import (
|
||||||
|
|
||||||
const minMaxSize = 10
|
const minMaxSize = 10
|
||||||
|
|
||||||
|
type KeyIdentity struct {
|
||||||
|
Key any
|
||||||
|
Identity identity.Identity
|
||||||
|
}
|
||||||
|
|
||||||
// New creates a new cache.
|
// New creates a new cache.
|
||||||
func New(opts Options) *Cache {
|
func New(opts Options) *Cache {
|
||||||
if opts.CheckInterval == 0 {
|
if opts.CheckInterval == 0 {
|
||||||
|
@ -64,14 +69,14 @@ func New(opts Options) *Cache {
|
||||||
|
|
||||||
infol := opts.Log.InfoCommand("dynacache")
|
infol := opts.Log.InfoCommand("dynacache")
|
||||||
|
|
||||||
evictedIdentities := collections.NewStack[identity.Identity]()
|
evictedIdentities := collections.NewStack[KeyIdentity]()
|
||||||
|
|
||||||
onEvict := func(k, v any) {
|
onEvict := func(k, v any) {
|
||||||
if !opts.Watching {
|
if !opts.Watching {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
identity.WalkIdentitiesShallow(v, func(level int, id identity.Identity) bool {
|
identity.WalkIdentitiesShallow(v, func(level int, id identity.Identity) bool {
|
||||||
evictedIdentities.Push(id)
|
evictedIdentities.Push(KeyIdentity{Key: k, Identity: id})
|
||||||
return false
|
return false
|
||||||
})
|
})
|
||||||
resource.MarkStale(v)
|
resource.MarkStale(v)
|
||||||
|
@ -124,7 +129,7 @@ type Cache struct {
|
||||||
partitions map[string]PartitionManager
|
partitions map[string]PartitionManager
|
||||||
|
|
||||||
onEvict func(k, v any)
|
onEvict func(k, v any)
|
||||||
evictedIdentities *collections.Stack[identity.Identity]
|
evictedIdentities *collections.Stack[KeyIdentity]
|
||||||
|
|
||||||
opts Options
|
opts Options
|
||||||
infol logg.LevelLogger
|
infol logg.LevelLogger
|
||||||
|
@ -135,10 +140,15 @@ type Cache struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// DrainEvictedIdentities drains the evicted identities from the cache.
|
// DrainEvictedIdentities drains the evicted identities from the cache.
|
||||||
func (c *Cache) DrainEvictedIdentities() []identity.Identity {
|
func (c *Cache) DrainEvictedIdentities() []KeyIdentity {
|
||||||
return c.evictedIdentities.Drain()
|
return c.evictedIdentities.Drain()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DrainEvictedIdentitiesMatching drains the evicted identities from the cache that match the given predicate.
|
||||||
|
func (c *Cache) DrainEvictedIdentitiesMatching(predicate func(KeyIdentity) bool) []KeyIdentity {
|
||||||
|
return c.evictedIdentities.DrainMatching(predicate)
|
||||||
|
}
|
||||||
|
|
||||||
// ClearMatching clears all partition for which the predicate returns true.
|
// ClearMatching clears all partition for which the predicate returns true.
|
||||||
func (c *Cache) ClearMatching(predicatePartition func(k string, p PartitionManager) bool, predicateValue func(k, v any) bool) {
|
func (c *Cache) ClearMatching(predicatePartition func(k string, p PartitionManager) bool, predicateValue func(k, v any) bool) {
|
||||||
if predicatePartition == nil {
|
if predicatePartition == nil {
|
||||||
|
|
1
cache/httpcache/httpcache.go
vendored
1
cache/httpcache/httpcache.go
vendored
|
@ -83,7 +83,6 @@ func (c *Config) Compile() (ConfigCompiled, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// PollConfig holds the configuration for polling remote resources to detect changes in watch mode.
|
// PollConfig holds the configuration for polling remote resources to detect changes in watch mode.
|
||||||
// TODO1 make sure this enabled only in watch mode.
|
|
||||||
type PollConfig struct {
|
type PollConfig struct {
|
||||||
// What remote resources to apply this configuration to.
|
// What remote resources to apply this configuration to.
|
||||||
For GlobMatcher
|
For GlobMatcher
|
||||||
|
|
|
@ -65,3 +65,16 @@ func (s *Stack[T]) Drain() []T {
|
||||||
s.items = nil
|
s.items = nil
|
||||||
return items
|
return items
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *Stack[T]) DrainMatching(predicate func(T) bool) []T {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
var items []T
|
||||||
|
for i := len(s.items) - 1; i >= 0; i-- {
|
||||||
|
if predicate(s.items[i]) {
|
||||||
|
items = append(items, s.items[i])
|
||||||
|
s.items = append(s.items[:i], s.items[i+1:]...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return items
|
||||||
|
}
|
||||||
|
|
|
@ -68,6 +68,20 @@ func (e *TimeoutError) Is(target error) bool {
|
||||||
return ok
|
return ok
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// errMessage wraps an error with a message.
|
||||||
|
type errMessage struct {
|
||||||
|
msg string
|
||||||
|
err error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *errMessage) Error() string {
|
||||||
|
return e.msg
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *errMessage) Unwrap() error {
|
||||||
|
return e.err
|
||||||
|
}
|
||||||
|
|
||||||
// IsFeatureNotAvailableError returns true if the given error is or contains a FeatureNotAvailableError.
|
// IsFeatureNotAvailableError returns true if the given error is or contains a FeatureNotAvailableError.
|
||||||
func IsFeatureNotAvailableError(err error) bool {
|
func IsFeatureNotAvailableError(err error) bool {
|
||||||
return errors.Is(err, &FeatureNotAvailableError{})
|
return errors.Is(err, &FeatureNotAvailableError{})
|
||||||
|
@ -121,19 +135,38 @@ func IsNotExist(err error) bool {
|
||||||
|
|
||||||
var nilPointerErrRe = regexp.MustCompile(`at <(.*)>: error calling (.*?): runtime error: invalid memory address or nil pointer dereference`)
|
var nilPointerErrRe = regexp.MustCompile(`at <(.*)>: error calling (.*?): runtime error: invalid memory address or nil pointer dereference`)
|
||||||
|
|
||||||
func ImproveIfNilPointer(inErr error) (outErr error) {
|
const deferredPrefix = "__hdeferred/"
|
||||||
outErr = inErr
|
|
||||||
|
|
||||||
|
var deferredStringToRemove = regexp.MustCompile(`executing "__hdeferred/.*" `)
|
||||||
|
|
||||||
|
// ImproveRenderErr improves the error message for rendering errors.
|
||||||
|
func ImproveRenderErr(inErr error) (outErr error) {
|
||||||
|
outErr = inErr
|
||||||
|
msg := improveIfNilPointerMsg(inErr)
|
||||||
|
if msg != "" {
|
||||||
|
outErr = &errMessage{msg: msg, err: outErr}
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.Contains(inErr.Error(), deferredPrefix) {
|
||||||
|
msg := deferredStringToRemove.ReplaceAllString(inErr.Error(), "executing ")
|
||||||
|
outErr = &errMessage{msg: msg, err: outErr}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func improveIfNilPointerMsg(inErr error) string {
|
||||||
m := nilPointerErrRe.FindStringSubmatch(inErr.Error())
|
m := nilPointerErrRe.FindStringSubmatch(inErr.Error())
|
||||||
if len(m) == 0 {
|
if len(m) == 0 {
|
||||||
return
|
return ""
|
||||||
}
|
}
|
||||||
call := m[1]
|
call := m[1]
|
||||||
field := m[2]
|
field := m[2]
|
||||||
parts := strings.Split(call, ".")
|
parts := strings.Split(call, ".")
|
||||||
|
if len(parts) < 2 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
receiverName := parts[len(parts)-2]
|
receiverName := parts[len(parts)-2]
|
||||||
receiver := strings.Join(parts[:len(parts)-1], ".")
|
receiver := strings.Join(parts[:len(parts)-1], ".")
|
||||||
s := fmt.Sprintf("– %s is nil; wrap it in if or with: {{ with %s }}{{ .%s }}{{ end }}", receiverName, receiver, field)
|
s := fmt.Sprintf("– %s is nil; wrap it in if or with: {{ with %s }}{{ .%s }}{{ end }}", receiverName, receiver, field)
|
||||||
outErr = errors.New(nilPointerErrRe.ReplaceAllString(inErr.Error(), s))
|
return nilPointerErrRe.ReplaceAllString(inErr.Error(), s)
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,24 +17,35 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
)
|
)
|
||||||
|
|
||||||
// HasBytesWriter is a writer that will set Match to true if the given pattern
|
// HasBytesWriter is a writer will match against a slice of patterns.
|
||||||
// is found in the stream.
|
|
||||||
type HasBytesWriter struct {
|
type HasBytesWriter struct {
|
||||||
Match bool
|
Patterns []*HasBytesPattern
|
||||||
Pattern []byte
|
|
||||||
|
|
||||||
i int
|
i int
|
||||||
done bool
|
done bool
|
||||||
buff []byte
|
buff []byte
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type HasBytesPattern struct {
|
||||||
|
Match bool
|
||||||
|
Pattern []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *HasBytesWriter) patternLen() int {
|
||||||
|
l := 0
|
||||||
|
for _, p := range h.Patterns {
|
||||||
|
l += len(p.Pattern)
|
||||||
|
}
|
||||||
|
return l
|
||||||
|
}
|
||||||
|
|
||||||
func (h *HasBytesWriter) Write(p []byte) (n int, err error) {
|
func (h *HasBytesWriter) Write(p []byte) (n int, err error) {
|
||||||
if h.done {
|
if h.done {
|
||||||
return len(p), nil
|
return len(p), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(h.buff) == 0 {
|
if len(h.buff) == 0 {
|
||||||
h.buff = make([]byte, len(h.Pattern)*2)
|
h.buff = make([]byte, h.patternLen()*2)
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := range p {
|
for i := range p {
|
||||||
|
@ -46,11 +57,23 @@ func (h *HasBytesWriter) Write(p []byte) (n int, err error) {
|
||||||
h.i = len(h.buff) / 2
|
h.i = len(h.buff) / 2
|
||||||
}
|
}
|
||||||
|
|
||||||
if bytes.Contains(h.buff, h.Pattern) {
|
for _, pp := range h.Patterns {
|
||||||
h.Match = true
|
if bytes.Contains(h.buff, pp.Pattern) {
|
||||||
h.done = true
|
pp.Match = true
|
||||||
return len(p), nil
|
done := true
|
||||||
|
for _, ppp := range h.Patterns {
|
||||||
|
if !ppp.Match {
|
||||||
|
done = false
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if done {
|
||||||
|
h.done = true
|
||||||
|
}
|
||||||
|
return len(p), nil
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return len(p), nil
|
return len(p), nil
|
||||||
|
|
|
@ -34,8 +34,11 @@ func TestHasBytesWriter(t *testing.T) {
|
||||||
var b bytes.Buffer
|
var b bytes.Buffer
|
||||||
|
|
||||||
h := &HasBytesWriter{
|
h := &HasBytesWriter{
|
||||||
Pattern: []byte("__foo"),
|
Patterns: []*HasBytesPattern{
|
||||||
|
{Pattern: []byte("__foo")},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
return h, io.MultiWriter(&b, h)
|
return h, io.MultiWriter(&b, h)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,19 +49,19 @@ func TestHasBytesWriter(t *testing.T) {
|
||||||
for i := 0; i < 22; i++ {
|
for i := 0; i < 22; i++ {
|
||||||
h, w := neww()
|
h, w := neww()
|
||||||
fmt.Fprintf(w, rndStr()+"abc __foobar"+rndStr())
|
fmt.Fprintf(w, rndStr()+"abc __foobar"+rndStr())
|
||||||
c.Assert(h.Match, qt.Equals, true)
|
c.Assert(h.Patterns[0].Match, qt.Equals, true)
|
||||||
|
|
||||||
h, w = neww()
|
h, w = neww()
|
||||||
fmt.Fprintf(w, rndStr()+"abc __f")
|
fmt.Fprintf(w, rndStr()+"abc __f")
|
||||||
fmt.Fprintf(w, "oo bar"+rndStr())
|
fmt.Fprintf(w, "oo bar"+rndStr())
|
||||||
c.Assert(h.Match, qt.Equals, true)
|
c.Assert(h.Patterns[0].Match, qt.Equals, true)
|
||||||
|
|
||||||
h, w = neww()
|
h, w = neww()
|
||||||
fmt.Fprintf(w, rndStr()+"abc __moo bar")
|
fmt.Fprintf(w, rndStr()+"abc __moo bar")
|
||||||
c.Assert(h.Match, qt.Equals, false)
|
c.Assert(h.Patterns[0].Match, qt.Equals, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
h, w := neww()
|
h, w := neww()
|
||||||
fmt.Fprintf(w, "__foo")
|
fmt.Fprintf(w, "__foo")
|
||||||
c.Assert(h.Match, qt.Equals, true)
|
c.Assert(h.Patterns[0].Match, qt.Equals, true)
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,6 +74,26 @@ func (c *Cache[K, T]) ForEeach(f func(K, T)) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *Cache[K, T]) Drain() map[K]T {
|
||||||
|
c.Lock()
|
||||||
|
m := c.m
|
||||||
|
c.m = make(map[K]T)
|
||||||
|
c.Unlock()
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Cache[K, T]) Len() int {
|
||||||
|
c.RLock()
|
||||||
|
defer c.RUnlock()
|
||||||
|
return len(c.m)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Cache[K, T]) Reset() {
|
||||||
|
c.Lock()
|
||||||
|
c.m = make(map[K]T)
|
||||||
|
c.Unlock()
|
||||||
|
}
|
||||||
|
|
||||||
// SliceCache is a simple thread safe cache backed by a map.
|
// SliceCache is a simple thread safe cache backed by a map.
|
||||||
type SliceCache[T any] struct {
|
type SliceCache[T any] struct {
|
||||||
m map[string][]T
|
m map[string][]T
|
||||||
|
|
|
@ -237,12 +237,17 @@ func prettifyPath(in string, b filepathPathBridge) string {
|
||||||
return b.Join(b.Dir(in), name, "index"+ext)
|
return b.Join(b.Dir(in), name, "index"+ext)
|
||||||
}
|
}
|
||||||
|
|
||||||
// CommonDir returns the common directory of the given paths.
|
// CommonDirPath returns the common directory of the given paths.
|
||||||
func CommonDir(path1, path2 string) string {
|
func CommonDirPath(path1, path2 string) string {
|
||||||
if path1 == "" || path2 == "" {
|
if path1 == "" || path2 == "" {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
hadLeadingSlash := strings.HasPrefix(path1, "/") || strings.HasPrefix(path2, "/")
|
||||||
|
|
||||||
|
path1 = TrimLeading(path1)
|
||||||
|
path2 = TrimLeading(path2)
|
||||||
|
|
||||||
p1 := strings.Split(path1, "/")
|
p1 := strings.Split(path1, "/")
|
||||||
p2 := strings.Split(path2, "/")
|
p2 := strings.Split(path2, "/")
|
||||||
|
|
||||||
|
@ -256,7 +261,13 @@ func CommonDir(path1, path2 string) string {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return strings.Join(common, "/")
|
s := strings.Join(common, "/")
|
||||||
|
|
||||||
|
if hadLeadingSlash && s != "" {
|
||||||
|
s = "/" + s
|
||||||
|
}
|
||||||
|
|
||||||
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sanitize sanitizes string to be used in Hugo's file paths and URLs, allowing only
|
// Sanitize sanitizes string to be used in Hugo's file paths and URLs, allowing only
|
||||||
|
@ -384,12 +395,27 @@ func PathEscape(pth string) string {
|
||||||
|
|
||||||
// ToSlashTrimLeading is just a filepath.ToSlash with an added / prefix trimmer.
|
// ToSlashTrimLeading is just a filepath.ToSlash with an added / prefix trimmer.
|
||||||
func ToSlashTrimLeading(s string) string {
|
func ToSlashTrimLeading(s string) string {
|
||||||
return strings.TrimPrefix(filepath.ToSlash(s), "/")
|
return TrimLeading(filepath.ToSlash(s))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TrimLeading trims the leading slash from the given string.
|
||||||
|
func TrimLeading(s string) string {
|
||||||
|
return strings.TrimPrefix(s, "/")
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToSlashTrimTrailing is just a filepath.ToSlash with an added / suffix trimmer.
|
// ToSlashTrimTrailing is just a filepath.ToSlash with an added / suffix trimmer.
|
||||||
func ToSlashTrimTrailing(s string) string {
|
func ToSlashTrimTrailing(s string) string {
|
||||||
return strings.TrimSuffix(filepath.ToSlash(s), "/")
|
return TrimTrailing(filepath.ToSlash(s))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TrimTrailing trims the trailing slash from the given string.
|
||||||
|
func TrimTrailing(s string) string {
|
||||||
|
return strings.TrimSuffix(s, "/")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ToSlashTrim trims any leading and trailing slashes from the given string and converts it to a forward slash separated path.
|
||||||
|
func ToSlashTrim(s string) string {
|
||||||
|
return strings.Trim(filepath.ToSlash(s), "/")
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToSlashPreserveLeading converts the path given to a forward slash separated path
|
// ToSlashPreserveLeading converts the path given to a forward slash separated path
|
||||||
|
@ -397,3 +423,8 @@ func ToSlashTrimTrailing(s string) string {
|
||||||
func ToSlashPreserveLeading(s string) string {
|
func ToSlashPreserveLeading(s string) string {
|
||||||
return "/" + strings.Trim(filepath.ToSlash(s), "/")
|
return "/" + strings.Trim(filepath.ToSlash(s), "/")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// IsSameFilePath checks if s1 and s2 are the same file path.
|
||||||
|
func IsSameFilePath(s1, s2 string) bool {
|
||||||
|
return path.Clean(ToSlashTrim(s1)) == path.Clean(ToSlashTrim(s2))
|
||||||
|
}
|
||||||
|
|
|
@ -262,3 +262,52 @@ func TestFieldsSlash(t *testing.T) {
|
||||||
c.Assert(FieldsSlash("/"), qt.DeepEquals, []string{})
|
c.Assert(FieldsSlash("/"), qt.DeepEquals, []string{})
|
||||||
c.Assert(FieldsSlash(""), qt.DeepEquals, []string{})
|
c.Assert(FieldsSlash(""), qt.DeepEquals, []string{})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestCommonDirPath(t *testing.T) {
|
||||||
|
c := qt.New(t)
|
||||||
|
|
||||||
|
for _, this := range []struct {
|
||||||
|
a, b, expected string
|
||||||
|
}{
|
||||||
|
{"/a/b/c", "/a/b/d", "/a/b"},
|
||||||
|
{"/a/b/c", "a/b/d", "/a/b"},
|
||||||
|
{"a/b/c", "/a/b/d", "/a/b"},
|
||||||
|
{"a/b/c", "a/b/d", "a/b"},
|
||||||
|
{"/a/b/c", "/a/b/c", "/a/b/c"},
|
||||||
|
{"/a/b/c", "/a/b/c/d", "/a/b/c"},
|
||||||
|
{"/a/b/c", "/a/b", "/a/b"},
|
||||||
|
{"/a/b/c", "/a", "/a"},
|
||||||
|
{"/a/b/c", "/d/e/f", ""},
|
||||||
|
} {
|
||||||
|
c.Assert(CommonDirPath(this.a, this.b), qt.Equals, this.expected, qt.Commentf("a: %s b: %s", this.a, this.b))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIsSameFilePath(t *testing.T) {
|
||||||
|
c := qt.New(t)
|
||||||
|
|
||||||
|
for _, this := range []struct {
|
||||||
|
a, b string
|
||||||
|
expected bool
|
||||||
|
}{
|
||||||
|
{"/a/b/c", "/a/b/c", true},
|
||||||
|
{"/a/b/c", "/a/b/c/", true},
|
||||||
|
{"/a/b/c", "/a/b/d", false},
|
||||||
|
{"/a/b/c", "/a/b", false},
|
||||||
|
{"/a/b/c", "/a/b/c/d", false},
|
||||||
|
{"/a/b/c", "/a/b/cd", false},
|
||||||
|
{"/a/b/c", "/a/b/cc", false},
|
||||||
|
{"/a/b/c", "/a/b/c/", true},
|
||||||
|
{"/a/b/c", "/a/b/c//", true},
|
||||||
|
{"/a/b/c", "/a/b/c/.", true},
|
||||||
|
{"/a/b/c", "/a/b/c/./", true},
|
||||||
|
{"/a/b/c", "/a/b/c/./.", true},
|
||||||
|
{"/a/b/c", "/a/b/c/././", true},
|
||||||
|
{"/a/b/c", "/a/b/c/././.", true},
|
||||||
|
{"/a/b/c", "/a/b/c/./././", true},
|
||||||
|
{"/a/b/c", "/a/b/c/./././.", true},
|
||||||
|
{"/a/b/c", "/a/b/c/././././", true},
|
||||||
|
} {
|
||||||
|
c.Assert(IsSameFilePath(filepath.FromSlash(this.a), filepath.FromSlash(this.b)), qt.Equals, this.expected, qt.Commentf("a: %s b: %s", this.a, this.b))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -458,6 +458,7 @@ func (l *configLoader) loadModules(configs *Configs) (modules.ModulesConfig, *mo
|
||||||
conf := configs.Base
|
conf := configs.Base
|
||||||
workingDir := bcfg.WorkingDir
|
workingDir := bcfg.WorkingDir
|
||||||
themesDir := bcfg.ThemesDir
|
themesDir := bcfg.ThemesDir
|
||||||
|
publishDir := bcfg.PublishDir
|
||||||
|
|
||||||
cfg := configs.LoadingInfo.Cfg
|
cfg := configs.LoadingInfo.Cfg
|
||||||
|
|
||||||
|
@ -492,6 +493,7 @@ func (l *configLoader) loadModules(configs *Configs) (modules.ModulesConfig, *mo
|
||||||
HookBeforeFinalize: hook,
|
HookBeforeFinalize: hook,
|
||||||
WorkingDir: workingDir,
|
WorkingDir: workingDir,
|
||||||
ThemesDir: themesDir,
|
ThemesDir: themesDir,
|
||||||
|
PublishDir: publishDir,
|
||||||
Environment: l.Environment,
|
Environment: l.Environment,
|
||||||
CacheDir: conf.Caches.CacheDirModules(),
|
CacheDir: conf.Caches.CacheDirModules(),
|
||||||
ModuleConfig: conf.Module,
|
ModuleConfig: conf.Module,
|
||||||
|
|
58
deps/deps.go
vendored
58
deps/deps.go
vendored
|
@ -15,6 +15,7 @@ import (
|
||||||
"github.com/gohugoio/hugo/cache/filecache"
|
"github.com/gohugoio/hugo/cache/filecache"
|
||||||
"github.com/gohugoio/hugo/common/hexec"
|
"github.com/gohugoio/hugo/common/hexec"
|
||||||
"github.com/gohugoio/hugo/common/loggers"
|
"github.com/gohugoio/hugo/common/loggers"
|
||||||
|
"github.com/gohugoio/hugo/common/maps"
|
||||||
"github.com/gohugoio/hugo/common/types"
|
"github.com/gohugoio/hugo/common/types"
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/gohugoio/hugo/config"
|
||||||
"github.com/gohugoio/hugo/config/allconfig"
|
"github.com/gohugoio/hugo/config/allconfig"
|
||||||
|
@ -135,6 +136,15 @@ func (d *Deps) Init() error {
|
||||||
if d.BuildState == nil {
|
if d.BuildState == nil {
|
||||||
d.BuildState = &BuildState{}
|
d.BuildState = &BuildState{}
|
||||||
}
|
}
|
||||||
|
if d.BuildState.DeferredExecutions == nil {
|
||||||
|
if d.BuildState.DeferredExecutionsGroupedByRenderingContext == nil {
|
||||||
|
d.BuildState.DeferredExecutionsGroupedByRenderingContext = make(map[tpl.RenderingContext]*DeferredExecutions)
|
||||||
|
}
|
||||||
|
d.BuildState.DeferredExecutions = &DeferredExecutions{
|
||||||
|
Executions: maps.NewCache[string, *tpl.DeferredExecution](),
|
||||||
|
FilenamesWithPostPrefix: maps.NewCache[string, bool](),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if d.BuildStartListeners == nil {
|
if d.BuildStartListeners == nil {
|
||||||
d.BuildStartListeners = &Listeners{}
|
d.BuildStartListeners = &Listeners{}
|
||||||
|
@ -161,20 +171,29 @@ func (d *Deps) Init() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if d.PathSpec == nil {
|
if d.PathSpec == nil {
|
||||||
hashBytesReceiverFunc := func(name string, match bool) {
|
hashBytesReceiverFunc := func(name string, match []byte) {
|
||||||
if !match {
|
s := string(match)
|
||||||
return
|
switch s {
|
||||||
|
case postpub.PostProcessPrefix:
|
||||||
|
d.BuildState.AddFilenameWithPostPrefix(name)
|
||||||
|
case tpl.HugoDeferredTemplatePrefix:
|
||||||
|
d.BuildState.DeferredExecutions.FilenamesWithPostPrefix.Set(name, true)
|
||||||
}
|
}
|
||||||
d.BuildState.AddFilenameWithPostPrefix(name)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Skip binary files.
|
// Skip binary files.
|
||||||
mediaTypes := d.Conf.GetConfigSection("mediaTypes").(media.Types)
|
mediaTypes := d.Conf.GetConfigSection("mediaTypes").(media.Types)
|
||||||
hashBytesSHouldCheck := func(name string) bool {
|
hashBytesShouldCheck := func(name string) bool {
|
||||||
ext := strings.TrimPrefix(filepath.Ext(name), ".")
|
ext := strings.TrimPrefix(filepath.Ext(name), ".")
|
||||||
return mediaTypes.IsTextSuffix(ext)
|
return mediaTypes.IsTextSuffix(ext)
|
||||||
}
|
}
|
||||||
d.Fs.PublishDir = hugofs.NewHasBytesReceiver(d.Fs.PublishDir, hashBytesSHouldCheck, hashBytesReceiverFunc, []byte(postpub.PostProcessPrefix))
|
d.Fs.PublishDir = hugofs.NewHasBytesReceiver(
|
||||||
|
d.Fs.PublishDir,
|
||||||
|
hashBytesShouldCheck,
|
||||||
|
hashBytesReceiverFunc,
|
||||||
|
[]byte(tpl.HugoDeferredTemplatePrefix),
|
||||||
|
[]byte(postpub.PostProcessPrefix))
|
||||||
|
|
||||||
pathSpec, err := helpers.NewPathSpec(d.Fs, d.Conf, d.Log)
|
pathSpec, err := helpers.NewPathSpec(d.Fs, d.Conf, d.Log)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -371,10 +390,37 @@ type BuildState struct {
|
||||||
// A set of filenames in /public that
|
// A set of filenames in /public that
|
||||||
// contains a post-processing prefix.
|
// contains a post-processing prefix.
|
||||||
filenamesWithPostPrefix map[string]bool
|
filenamesWithPostPrefix map[string]bool
|
||||||
|
|
||||||
|
DeferredExecutions *DeferredExecutions
|
||||||
|
|
||||||
|
// Deferred executions grouped by rendering context.
|
||||||
|
DeferredExecutionsGroupedByRenderingContext map[tpl.RenderingContext]*DeferredExecutions
|
||||||
|
}
|
||||||
|
|
||||||
|
type DeferredExecutions struct {
|
||||||
|
// A set of filenames in /public that
|
||||||
|
// contains a post-processing prefix.
|
||||||
|
FilenamesWithPostPrefix *maps.Cache[string, bool]
|
||||||
|
|
||||||
|
// Maps a placeholder to a deferred execution.
|
||||||
|
Executions *maps.Cache[string, *tpl.DeferredExecution]
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ identity.SignalRebuilder = (*BuildState)(nil)
|
var _ identity.SignalRebuilder = (*BuildState)(nil)
|
||||||
|
|
||||||
|
// StartStageRender will be called before a stage is rendered.
|
||||||
|
func (b *BuildState) StartStageRender(stage tpl.RenderingContext) {
|
||||||
|
}
|
||||||
|
|
||||||
|
// StopStageRender will be called after a stage is rendered.
|
||||||
|
func (b *BuildState) StopStageRender(stage tpl.RenderingContext) {
|
||||||
|
b.DeferredExecutionsGroupedByRenderingContext[stage] = b.DeferredExecutions
|
||||||
|
b.DeferredExecutions = &DeferredExecutions{
|
||||||
|
Executions: maps.NewCache[string, *tpl.DeferredExecution](),
|
||||||
|
FilenamesWithPostPrefix: maps.NewCache[string, bool](),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (b *BuildState) SignalRebuild(ids ...identity.Identity) {
|
func (b *BuildState) SignalRebuild(ids ...identity.Identity) {
|
||||||
b.OnSignalRebuild(ids...)
|
b.OnSignalRebuild(ids...)
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,12 +28,12 @@ var (
|
||||||
type hasBytesFs struct {
|
type hasBytesFs struct {
|
||||||
afero.Fs
|
afero.Fs
|
||||||
shouldCheck func(name string) bool
|
shouldCheck func(name string) bool
|
||||||
hasBytesCallback func(name string, match bool)
|
hasBytesCallback func(name string, match []byte)
|
||||||
pattern []byte
|
patterns [][]byte
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewHasBytesReceiver(delegate afero.Fs, shouldCheck func(name string) bool, hasBytesCallback func(name string, match bool), pattern []byte) afero.Fs {
|
func NewHasBytesReceiver(delegate afero.Fs, shouldCheck func(name string) bool, hasBytesCallback func(name string, match []byte), patterns ...[]byte) afero.Fs {
|
||||||
return &hasBytesFs{Fs: delegate, shouldCheck: shouldCheck, hasBytesCallback: hasBytesCallback, pattern: pattern}
|
return &hasBytesFs{Fs: delegate, shouldCheck: shouldCheck, hasBytesCallback: hasBytesCallback, patterns: patterns}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (fs *hasBytesFs) UnwrapFilesystem() afero.Fs {
|
func (fs *hasBytesFs) UnwrapFilesystem() afero.Fs {
|
||||||
|
@ -60,10 +60,15 @@ func (fs *hasBytesFs) wrapFile(f afero.File) afero.File {
|
||||||
if !fs.shouldCheck(f.Name()) {
|
if !fs.shouldCheck(f.Name()) {
|
||||||
return f
|
return f
|
||||||
}
|
}
|
||||||
|
patterns := make([]*hugio.HasBytesPattern, len(fs.patterns))
|
||||||
|
for i, p := range fs.patterns {
|
||||||
|
patterns[i] = &hugio.HasBytesPattern{Pattern: p}
|
||||||
|
}
|
||||||
|
|
||||||
return &hasBytesFile{
|
return &hasBytesFile{
|
||||||
File: f,
|
File: f,
|
||||||
hbw: &hugio.HasBytesWriter{
|
hbw: &hugio.HasBytesWriter{
|
||||||
Pattern: fs.pattern,
|
Patterns: patterns,
|
||||||
},
|
},
|
||||||
hasBytesCallback: fs.hasBytesCallback,
|
hasBytesCallback: fs.hasBytesCallback,
|
||||||
}
|
}
|
||||||
|
@ -74,7 +79,7 @@ func (fs *hasBytesFs) Name() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
type hasBytesFile struct {
|
type hasBytesFile struct {
|
||||||
hasBytesCallback func(name string, match bool)
|
hasBytesCallback func(name string, match []byte)
|
||||||
hbw *hugio.HasBytesWriter
|
hbw *hugio.HasBytesWriter
|
||||||
afero.File
|
afero.File
|
||||||
}
|
}
|
||||||
|
@ -88,6 +93,10 @@ func (h *hasBytesFile) Write(p []byte) (n int, err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *hasBytesFile) Close() error {
|
func (h *hasBytesFile) Close() error {
|
||||||
h.hasBytesCallback(h.Name(), h.hbw.Match)
|
for _, p := range h.hbw.Patterns {
|
||||||
|
if p.Match {
|
||||||
|
h.hasBytesCallback(h.Name(), p.Pattern)
|
||||||
|
}
|
||||||
|
}
|
||||||
return h.File.Close()
|
return h.File.Close()
|
||||||
}
|
}
|
||||||
|
|
|
@ -323,6 +323,7 @@ type ComponentPath struct {
|
||||||
Component string
|
Component string
|
||||||
Path string
|
Path string
|
||||||
Lang string
|
Lang string
|
||||||
|
Watch bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c ComponentPath) ComponentPathJoined() string {
|
func (c ComponentPath) ComponentPathJoined() string {
|
||||||
|
@ -376,6 +377,7 @@ func (fs *RootMappingFs) ReverseLookupComponent(component, filename string) ([]C
|
||||||
Component: first.FromBase,
|
Component: first.FromBase,
|
||||||
Path: paths.ToSlashTrimLeading(filename),
|
Path: paths.ToSlashTrimLeading(filename),
|
||||||
Lang: first.Meta.Lang,
|
Lang: first.Meta.Lang,
|
||||||
|
Watch: first.Meta.Watch,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -33,6 +33,7 @@ import (
|
||||||
"github.com/gohugoio/hugo/common/rungroup"
|
"github.com/gohugoio/hugo/common/rungroup"
|
||||||
"github.com/gohugoio/hugo/common/types"
|
"github.com/gohugoio/hugo/common/types"
|
||||||
"github.com/gohugoio/hugo/hugofs/files"
|
"github.com/gohugoio/hugo/hugofs/files"
|
||||||
|
"github.com/gohugoio/hugo/hugofs/glob"
|
||||||
"github.com/gohugoio/hugo/hugolib/doctree"
|
"github.com/gohugoio/hugo/hugolib/doctree"
|
||||||
"github.com/gohugoio/hugo/hugolib/pagesfromdata"
|
"github.com/gohugoio/hugo/hugolib/pagesfromdata"
|
||||||
"github.com/gohugoio/hugo/identity"
|
"github.com/gohugoio/hugo/identity"
|
||||||
|
@ -1002,7 +1003,7 @@ func (m *pageMap) debugPrint(prefix string, maxLevel int, w io.Writer) {
|
||||||
}
|
}
|
||||||
const indentStr = " "
|
const indentStr = " "
|
||||||
p := n.(*pageState)
|
p := n.(*pageState)
|
||||||
s := strings.TrimPrefix(keyPage, paths.CommonDir(prevKey, keyPage))
|
s := strings.TrimPrefix(keyPage, paths.CommonDirPath(prevKey, keyPage))
|
||||||
lenIndent := len(keyPage) - len(s)
|
lenIndent := len(keyPage) - len(s)
|
||||||
fmt.Fprint(w, strings.Repeat(indentStr, lenIndent))
|
fmt.Fprint(w, strings.Repeat(indentStr, lenIndent))
|
||||||
info := fmt.Sprintf("%s lm: %s (%s)", s, p.Lastmod().Format("2006-01-02"), p.Kind())
|
info := fmt.Sprintf("%s lm: %s (%s)", s, p.Lastmod().Format("2006-01-02"), p.Kind())
|
||||||
|
@ -1047,6 +1048,59 @@ func (m *pageMap) debugPrint(prefix string, maxLevel int, w io.Writer) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (h *HugoSites) dynacacheGCFilenameIfNotWatchedAndDrainMatching(filename string) {
|
||||||
|
cpss := h.BaseFs.ResolvePaths(filename)
|
||||||
|
if len(cpss) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Compile cache busters.
|
||||||
|
var cacheBusters []func(string) bool
|
||||||
|
for _, cps := range cpss {
|
||||||
|
if cps.Watch {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
np := glob.NormalizePath(path.Join(cps.Component, cps.Path))
|
||||||
|
g, err := h.ResourceSpec.BuildConfig().MatchCacheBuster(h.Log, np)
|
||||||
|
if err == nil && g != nil {
|
||||||
|
cacheBusters = append(cacheBusters, g)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(cacheBusters) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
cacheBusterOr := func(s string) bool {
|
||||||
|
for _, cb := range cacheBusters {
|
||||||
|
if cb(s) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
h.dynacacheGCCacheBuster(cacheBusterOr)
|
||||||
|
|
||||||
|
// We want to avoid that evicted items in the above is considered in the next step server change.
|
||||||
|
_ = h.MemCache.DrainEvictedIdentitiesMatching(func(ki dynacache.KeyIdentity) bool {
|
||||||
|
return cacheBusterOr(ki.Key.(string))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *HugoSites) dynacacheGCCacheBuster(cachebuster func(s string) bool) {
|
||||||
|
if cachebuster == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
shouldDelete := func(k, v any) bool {
|
||||||
|
var b bool
|
||||||
|
if s, ok := k.(string); ok {
|
||||||
|
b = cachebuster(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
h.MemCache.ClearMatching(nil, shouldDelete)
|
||||||
|
}
|
||||||
|
|
||||||
func (h *HugoSites) resolveAndClearStateForIdentities(
|
func (h *HugoSites) resolveAndClearStateForIdentities(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
l logg.LevelLogger,
|
l logg.LevelLogger,
|
||||||
|
@ -1095,25 +1149,10 @@ func (h *HugoSites) resolveAndClearStateForIdentities(
|
||||||
// 1. Handle the cache busters first, as those may produce identities for the page reset step.
|
// 1. Handle the cache busters first, as those may produce identities for the page reset step.
|
||||||
// 2. Then reset the page outputs, which may mark some resources as stale.
|
// 2. Then reset the page outputs, which may mark some resources as stale.
|
||||||
// 3. Then GC the cache.
|
// 3. Then GC the cache.
|
||||||
// TOOD1
|
|
||||||
if cachebuster != nil {
|
if cachebuster != nil {
|
||||||
if err := loggers.TimeTrackfn(func() (logg.LevelLogger, error) {
|
if err := loggers.TimeTrackfn(func() (logg.LevelLogger, error) {
|
||||||
ll := l.WithField("substep", "gc dynacache cachebuster")
|
ll := l.WithField("substep", "gc dynacache cachebuster")
|
||||||
|
h.dynacacheGCCacheBuster(cachebuster)
|
||||||
shouldDelete := func(k, v any) bool {
|
|
||||||
if cachebuster == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
var b bool
|
|
||||||
if s, ok := k.(string); ok {
|
|
||||||
b = cachebuster(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
return b
|
|
||||||
}
|
|
||||||
|
|
||||||
h.MemCache.ClearMatching(nil, shouldDelete)
|
|
||||||
|
|
||||||
return ll, nil
|
return ll, nil
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -1123,7 +1162,9 @@ func (h *HugoSites) resolveAndClearStateForIdentities(
|
||||||
// Drain the cache eviction stack.
|
// Drain the cache eviction stack.
|
||||||
evicted := h.Deps.MemCache.DrainEvictedIdentities()
|
evicted := h.Deps.MemCache.DrainEvictedIdentities()
|
||||||
if len(evicted) < 200 {
|
if len(evicted) < 200 {
|
||||||
changes = append(changes, evicted...)
|
for _, c := range evicted {
|
||||||
|
changes = append(changes, c.Identity)
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
// Mass eviction, we might as well invalidate everything.
|
// Mass eviction, we might as well invalidate everything.
|
||||||
changes = []identity.Identity{identity.GenghisKhan}
|
changes = []identity.Identity{identity.GenghisKhan}
|
||||||
|
|
|
@ -720,7 +720,7 @@ func (b *sourceFilesystemsBuilder) createOverlayFs(
|
||||||
ModuleOrdinal: md.ordinal,
|
ModuleOrdinal: md.ordinal,
|
||||||
IsProject: md.isMainProject,
|
IsProject: md.isMainProject,
|
||||||
Meta: &hugofs.FileMeta{
|
Meta: &hugofs.FileMeta{
|
||||||
Watch: md.Watch(),
|
Watch: !mount.DisableWatch && md.Watch(),
|
||||||
Weight: mountWeight,
|
Weight: mountWeight,
|
||||||
InclusionFilter: inclusionFilter,
|
InclusionFilter: inclusionFilter,
|
||||||
},
|
},
|
||||||
|
|
|
@ -26,6 +26,7 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/bep/logg"
|
"github.com/bep/logg"
|
||||||
|
"github.com/gohugoio/hugo/bufferpool"
|
||||||
"github.com/gohugoio/hugo/deps"
|
"github.com/gohugoio/hugo/deps"
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
"github.com/gohugoio/hugo/hugofs/files"
|
"github.com/gohugoio/hugo/hugofs/files"
|
||||||
|
@ -173,6 +174,16 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
|
||||||
h.SendError(fmt.Errorf("postRenderOnce: %w", err))
|
h.SendError(fmt.Errorf("postRenderOnce: %w", err))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Make sure to write any build stats to disk first so it's available
|
||||||
|
// to the post processors.
|
||||||
|
if err := h.writeBuildStats(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.renderDeferred(infol); err != nil {
|
||||||
|
h.SendError(fmt.Errorf("renderDeferred: %w", err))
|
||||||
|
}
|
||||||
|
|
||||||
if err := h.postProcess(infol); err != nil {
|
if err := h.postProcess(infol); err != nil {
|
||||||
h.SendError(fmt.Errorf("postProcess: %w", err))
|
h.SendError(fmt.Errorf("postProcess: %w", err))
|
||||||
}
|
}
|
||||||
|
@ -352,49 +363,174 @@ func (h *HugoSites) render(l logg.LevelLogger, config *BuildCfg) error {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
siteRenderContext.outIdx = siteOutIdx
|
if err := func() error {
|
||||||
siteRenderContext.sitesOutIdx = i
|
rc := tpl.RenderingContext{Site: s, SiteOutIdx: siteOutIdx}
|
||||||
i++
|
h.BuildState.StartStageRender(rc)
|
||||||
|
defer h.BuildState.StopStageRender(rc)
|
||||||
|
|
||||||
select {
|
siteRenderContext.outIdx = siteOutIdx
|
||||||
case <-h.Done():
|
siteRenderContext.sitesOutIdx = i
|
||||||
|
i++
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-h.Done():
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
for _, s2 := range h.Sites {
|
||||||
|
if err := s2.preparePagesForRender(s == s2, siteRenderContext.sitesOutIdx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !config.SkipRender {
|
||||||
|
ll := l.WithField("substep", "pages").
|
||||||
|
WithField("site", s.language.Lang).
|
||||||
|
WithField("outputFormat", renderFormat.Name)
|
||||||
|
|
||||||
|
start := time.Now()
|
||||||
|
|
||||||
|
if config.PartialReRender {
|
||||||
|
if err := s.renderPages(siteRenderContext); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err := s.render(siteRenderContext); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
loggers.TimeTrackf(ll, start, nil, "")
|
||||||
|
}
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
default:
|
}(); err != nil {
|
||||||
for _, s2 := range h.Sites {
|
return err
|
||||||
// We render site by site, but since the content is lazily rendered
|
|
||||||
// and a site can "borrow" content from other sites, every site
|
|
||||||
// needs this set.
|
|
||||||
s2.rc = &siteRenderingContext{Format: renderFormat}
|
|
||||||
|
|
||||||
if err := s2.preparePagesForRender(s == s2, siteRenderContext.sitesOutIdx); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !config.SkipRender {
|
|
||||||
ll := l.WithField("substep", "pages").
|
|
||||||
WithField("site", s.language.Lang).
|
|
||||||
WithField("outputFormat", renderFormat.Name)
|
|
||||||
|
|
||||||
start := time.Now()
|
|
||||||
|
|
||||||
if config.PartialReRender {
|
|
||||||
if err := s.renderPages(siteRenderContext); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if err := s.render(siteRenderContext); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
loggers.TimeTrackf(ll, start, nil, "")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (h *HugoSites) renderDeferred(l logg.LevelLogger) error {
|
||||||
|
l = l.WithField("step", "render deferred")
|
||||||
|
start := time.Now()
|
||||||
|
|
||||||
|
var deferredCount int
|
||||||
|
|
||||||
|
for rc, de := range h.Deps.BuildState.DeferredExecutionsGroupedByRenderingContext {
|
||||||
|
if de.FilenamesWithPostPrefix.Len() == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
deferredCount += de.FilenamesWithPostPrefix.Len()
|
||||||
|
|
||||||
|
s := rc.Site.(*Site)
|
||||||
|
for _, s2 := range h.Sites {
|
||||||
|
if err := s2.preparePagesForRender(s == s2, rc.SiteOutIdx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := s.executeDeferredTemplates(de); err != nil {
|
||||||
|
return herrors.ImproveRenderErr(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
loggers.TimeTrackf(l, start, logg.Fields{
|
||||||
|
logg.Field{Name: "count", Value: deferredCount},
|
||||||
|
}, "")
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Site) executeDeferredTemplates(de *deps.DeferredExecutions) error {
|
||||||
|
handleFile := func(filename string) error {
|
||||||
|
content, err := afero.ReadFile(s.BaseFs.PublishFs, filename)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
k := 0
|
||||||
|
changed := false
|
||||||
|
|
||||||
|
for {
|
||||||
|
if k >= len(content) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
l := bytes.Index(content[k:], []byte(tpl.HugoDeferredTemplatePrefix))
|
||||||
|
if l == -1 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
m := bytes.Index(content[k+l:], []byte(tpl.HugoDeferredTemplateSuffix)) + len(tpl.HugoDeferredTemplateSuffix)
|
||||||
|
|
||||||
|
low, high := k+l, k+l+m
|
||||||
|
|
||||||
|
forward := l + m
|
||||||
|
id := string(content[low:high])
|
||||||
|
|
||||||
|
if err := func() error {
|
||||||
|
deferred, found := de.Executions.Get(id)
|
||||||
|
if !found {
|
||||||
|
panic(fmt.Sprintf("deferred execution with id %q not found", id))
|
||||||
|
}
|
||||||
|
deferred.Mu.Lock()
|
||||||
|
defer deferred.Mu.Unlock()
|
||||||
|
|
||||||
|
if !deferred.Executed {
|
||||||
|
tmpl := s.Deps.Tmpl()
|
||||||
|
templ, found := tmpl.Lookup(deferred.TemplateName)
|
||||||
|
if !found {
|
||||||
|
panic(fmt.Sprintf("template %q not found", deferred.TemplateName))
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := func() error {
|
||||||
|
buf := bufferpool.GetBuffer()
|
||||||
|
defer bufferpool.PutBuffer(buf)
|
||||||
|
|
||||||
|
err = tmpl.ExecuteWithContext(deferred.Ctx, templ, buf, deferred.Data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
deferred.Result = buf.String()
|
||||||
|
deferred.Executed = true
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
content = append(content[:low], append([]byte(deferred.Result), content[high:]...)...)
|
||||||
|
changed = true
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
k += forward
|
||||||
|
}
|
||||||
|
|
||||||
|
if changed {
|
||||||
|
return afero.WriteFile(s.BaseFs.PublishFs, filename, content, 0o666)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
g := rungroup.Run[string](context.Background(), rungroup.Config[string]{
|
||||||
|
NumWorkers: s.h.numWorkers,
|
||||||
|
Handle: func(ctx context.Context, filename string) error {
|
||||||
|
return handleFile(filename)
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
de.FilenamesWithPostPrefix.ForEeach(func(filename string, _ bool) {
|
||||||
|
g.Enqueue(filename)
|
||||||
|
})
|
||||||
|
|
||||||
|
return g.Wait()
|
||||||
|
}
|
||||||
|
|
||||||
// / postRenderOnce runs some post processing that only needs to be done once, e.g. printing of unused templates.
|
// / postRenderOnce runs some post processing that only needs to be done once, e.g. printing of unused templates.
|
||||||
func (h *HugoSites) postRenderOnce() error {
|
func (h *HugoSites) postRenderOnce() error {
|
||||||
h.postRenderInit.Do(func() {
|
h.postRenderInit.Do(func() {
|
||||||
|
@ -428,12 +564,6 @@ func (h *HugoSites) postProcess(l logg.LevelLogger) error {
|
||||||
l = l.WithField("step", "postProcess")
|
l = l.WithField("step", "postProcess")
|
||||||
defer loggers.TimeTrackf(l, time.Now(), nil, "")
|
defer loggers.TimeTrackf(l, time.Now(), nil, "")
|
||||||
|
|
||||||
// Make sure to write any build stats to disk first so it's available
|
|
||||||
// to the post processors.
|
|
||||||
if err := h.writeBuildStats(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// This will only be set when js.Build have been triggered with
|
// This will only be set when js.Build have been triggered with
|
||||||
// imports that resolves to the project or a module.
|
// imports that resolves to the project or a module.
|
||||||
// Write a jsconfig.json file to the project's /asset directory
|
// Write a jsconfig.json file to the project's /asset directory
|
||||||
|
@ -600,6 +730,10 @@ func (h *HugoSites) writeBuildStats() error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// This step may be followed by a post process step that may
|
||||||
|
// rebuild e.g. CSS, so clear any cache that's defined for the hugo_stats.json.
|
||||||
|
h.dynacacheGCFilenameIfNotWatchedAndDrainMatching(filename)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -628,3 +628,20 @@ title: "A page"
|
||||||
|
|
||||||
b.CreateSites().BuildFail(BuildCfg{})
|
b.CreateSites().BuildFail(BuildCfg{})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestErrorTemplateRuntime(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
files := `
|
||||||
|
-- hugo.toml --
|
||||||
|
-- layouts/index.html --
|
||||||
|
Home.
|
||||||
|
{{ .ThisDoesNotExist }}
|
||||||
|
`
|
||||||
|
|
||||||
|
b, err := TestE(t, files)
|
||||||
|
|
||||||
|
b.Assert(err, qt.Not(qt.IsNil))
|
||||||
|
b.Assert(err.Error(), qt.Contains, filepath.FromSlash(`/layouts/index.html:2:3`))
|
||||||
|
b.Assert(err.Error(), qt.Contains, `can't evaluate field ThisDoesNotExist`)
|
||||||
|
}
|
||||||
|
|
|
@ -62,7 +62,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func (s *Site) Taxonomies() page.TaxonomyList {
|
func (s *Site) Taxonomies() page.TaxonomyList {
|
||||||
s.checkReady()
|
s.CheckReady()
|
||||||
s.init.taxonomies.Do(context.Background())
|
s.init.taxonomies.Do(context.Background())
|
||||||
return s.taxonomies
|
return s.taxonomies
|
||||||
}
|
}
|
||||||
|
@ -200,12 +200,8 @@ func (s *Site) prepareInits() {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
type siteRenderingContext struct {
|
|
||||||
output.Format
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Site) Menus() navigation.Menus {
|
func (s *Site) Menus() navigation.Menus {
|
||||||
s.checkReady()
|
s.CheckReady()
|
||||||
s.init.menus.Do(context.Background())
|
s.init.menus.Do(context.Background())
|
||||||
return s.menus
|
return s.menus
|
||||||
}
|
}
|
||||||
|
@ -810,7 +806,7 @@ func (s *Site) errorCollator(results <-chan error, errs chan<- error) {
|
||||||
// as possible for existing sites. Most sites will use {{ .Site.GetPage "section" "my/section" }},
|
// as possible for existing sites. Most sites will use {{ .Site.GetPage "section" "my/section" }},
|
||||||
// i.e. 2 arguments, so we test for that.
|
// i.e. 2 arguments, so we test for that.
|
||||||
func (s *Site) GetPage(ref ...string) (page.Page, error) {
|
func (s *Site) GetPage(ref ...string) (page.Page, error) {
|
||||||
s.checkReady()
|
s.CheckReady()
|
||||||
p, err := s.s.getPageForRefs(ref...)
|
p, err := s.s.getPageForRefs(ref...)
|
||||||
|
|
||||||
if p == nil {
|
if p == nil {
|
||||||
|
|
|
@ -88,10 +88,6 @@ type Site struct {
|
||||||
publisher publisher.Publisher
|
publisher publisher.Publisher
|
||||||
frontmatterHandler pagemeta.FrontMatterHandler
|
frontmatterHandler pagemeta.FrontMatterHandler
|
||||||
|
|
||||||
// We render each site for all the relevant output formats in serial with
|
|
||||||
// this rendering context pointing to the current one.
|
|
||||||
rc *siteRenderingContext
|
|
||||||
|
|
||||||
// The output formats that we need to render this site in. This slice
|
// The output formats that we need to render this site in. This slice
|
||||||
// will be fixed once set.
|
// will be fixed once set.
|
||||||
// This will be the union of Site.Pages' outputFormats.
|
// This will be the union of Site.Pages' outputFormats.
|
||||||
|
@ -439,7 +435,7 @@ func (s *Site) Current() page.Site {
|
||||||
|
|
||||||
// MainSections returns the list of main sections.
|
// MainSections returns the list of main sections.
|
||||||
func (s *Site) MainSections() []string {
|
func (s *Site) MainSections() []string {
|
||||||
s.checkReady()
|
s.CheckReady()
|
||||||
return s.conf.C.MainSections
|
return s.conf.C.MainSections
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -458,7 +454,7 @@ func (s *Site) BaseURL() string {
|
||||||
|
|
||||||
// Deprecated: Use .Site.Lastmod instead.
|
// Deprecated: Use .Site.Lastmod instead.
|
||||||
func (s *Site) LastChange() time.Time {
|
func (s *Site) LastChange() time.Time {
|
||||||
s.checkReady()
|
s.CheckReady()
|
||||||
hugo.Deprecate(".Site.LastChange", "Use .Site.Lastmod instead.", "v0.123.0")
|
hugo.Deprecate(".Site.LastChange", "Use .Site.Lastmod instead.", "v0.123.0")
|
||||||
return s.lastmod
|
return s.lastmod
|
||||||
}
|
}
|
||||||
|
@ -547,7 +543,7 @@ func (s *Site) ForEeachIdentityByName(name string, f func(identity.Identity) boo
|
||||||
// Pages returns all pages.
|
// Pages returns all pages.
|
||||||
// This is for the current language only.
|
// This is for the current language only.
|
||||||
func (s *Site) Pages() page.Pages {
|
func (s *Site) Pages() page.Pages {
|
||||||
s.checkReady()
|
s.CheckReady()
|
||||||
return s.pageMap.getPagesInSection(
|
return s.pageMap.getPagesInSection(
|
||||||
pageMapQueryPagesInSection{
|
pageMapQueryPagesInSection{
|
||||||
pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{
|
pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{
|
||||||
|
@ -564,7 +560,7 @@ func (s *Site) Pages() page.Pages {
|
||||||
// RegularPages returns all the regular pages.
|
// RegularPages returns all the regular pages.
|
||||||
// This is for the current language only.
|
// This is for the current language only.
|
||||||
func (s *Site) RegularPages() page.Pages {
|
func (s *Site) RegularPages() page.Pages {
|
||||||
s.checkReady()
|
s.CheckReady()
|
||||||
return s.pageMap.getPagesInSection(
|
return s.pageMap.getPagesInSection(
|
||||||
pageMapQueryPagesInSection{
|
pageMapQueryPagesInSection{
|
||||||
pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{
|
pageMapQueryPagesBelowPath: pageMapQueryPagesBelowPath{
|
||||||
|
@ -579,17 +575,17 @@ func (s *Site) RegularPages() page.Pages {
|
||||||
|
|
||||||
// AllPages returns all pages for all sites.
|
// AllPages returns all pages for all sites.
|
||||||
func (s *Site) AllPages() page.Pages {
|
func (s *Site) AllPages() page.Pages {
|
||||||
s.checkReady()
|
s.CheckReady()
|
||||||
return s.h.Pages()
|
return s.h.Pages()
|
||||||
}
|
}
|
||||||
|
|
||||||
// AllRegularPages returns all regular pages for all sites.
|
// AllRegularPages returns all regular pages for all sites.
|
||||||
func (s *Site) AllRegularPages() page.Pages {
|
func (s *Site) AllRegularPages() page.Pages {
|
||||||
s.checkReady()
|
s.CheckReady()
|
||||||
return s.h.RegularPages()
|
return s.h.RegularPages()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Site) checkReady() {
|
func (s *Site) CheckReady() {
|
||||||
if s.state != siteStateReady {
|
if s.state != siteStateReady {
|
||||||
panic("this method cannot be called before the site is fully initialized")
|
panic("this method cannot be called before the site is fully initialized")
|
||||||
}
|
}
|
||||||
|
|
|
@ -111,7 +111,7 @@ func (s *Site) renderPages(ctx *siteRenderContext) error {
|
||||||
|
|
||||||
err := <-errs
|
err := <-errs
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to render pages: %w", herrors.ImproveIfNilPointer(err))
|
return fmt.Errorf("failed to render pages: %w", herrors.ImproveRenderErr(err))
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -226,7 +226,7 @@ func (s *Site) renderPaginator(p *pageState, templ tpl.Template) error {
|
||||||
paginatePath := s.Conf.Pagination().Path
|
paginatePath := s.Conf.Pagination().Path
|
||||||
|
|
||||||
d := p.targetPathDescriptor
|
d := p.targetPathDescriptor
|
||||||
f := p.s.rc.Format
|
f := p.outputFormat()
|
||||||
d.Type = f
|
d.Type = f
|
||||||
|
|
||||||
if p.paginator.current == nil || p.paginator.current != p.paginator.current.First() {
|
if p.paginator.current == nil || p.paginator.current != p.paginator.current.First() {
|
||||||
|
|
|
@ -19,12 +19,12 @@ import (
|
||||||
|
|
||||||
// Sections returns the top level sections.
|
// Sections returns the top level sections.
|
||||||
func (s *Site) Sections() page.Pages {
|
func (s *Site) Sections() page.Pages {
|
||||||
s.checkReady()
|
s.CheckReady()
|
||||||
return s.Home().Sections()
|
return s.Home().Sections()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Home is a shortcut to the home page, equivalent to .Site.GetPage "home".
|
// Home is a shortcut to the home page, equivalent to .Site.GetPage "home".
|
||||||
func (s *Site) Home() page.Page {
|
func (s *Site) Home() page.Page {
|
||||||
s.checkReady()
|
s.CheckReady()
|
||||||
return s.s.home
|
return s.s.home
|
||||||
}
|
}
|
||||||
|
|
|
@ -760,6 +760,9 @@ type ClientConfig struct {
|
||||||
// Absolute path to the project's themes dir.
|
// Absolute path to the project's themes dir.
|
||||||
ThemesDir string
|
ThemesDir string
|
||||||
|
|
||||||
|
// The publish dir.
|
||||||
|
PublishDir string
|
||||||
|
|
||||||
// Eg. "production"
|
// Eg. "production"
|
||||||
Environment string
|
Environment string
|
||||||
|
|
||||||
|
|
|
@ -51,12 +51,16 @@ github.com/gohugoio/hugoTestModules1_darwin/modh2_2@v1.4.0 github.com/gohugoio/h
|
||||||
themesDir := filepath.Join(workingDir, "themes")
|
themesDir := filepath.Join(workingDir, "themes")
|
||||||
err = os.Mkdir(themesDir, 0o777)
|
err = os.Mkdir(themesDir, 0o777)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
publishDir := filepath.Join(workingDir, "public")
|
||||||
|
err = os.Mkdir(publishDir, 0o777)
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
ccfg := ClientConfig{
|
ccfg := ClientConfig{
|
||||||
Fs: hugofs.Os,
|
Fs: hugofs.Os,
|
||||||
WorkingDir: workingDir,
|
|
||||||
CacheDir: filepath.Join(workingDir, "modcache"),
|
CacheDir: filepath.Join(workingDir, "modcache"),
|
||||||
|
WorkingDir: workingDir,
|
||||||
ThemesDir: themesDir,
|
ThemesDir: themesDir,
|
||||||
|
PublishDir: publishDir,
|
||||||
Exec: hexec.New(security.DefaultConfig),
|
Exec: hexec.New(security.DefaultConfig),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,6 +27,7 @@ import (
|
||||||
"github.com/bep/debounce"
|
"github.com/bep/debounce"
|
||||||
"github.com/gohugoio/hugo/common/herrors"
|
"github.com/gohugoio/hugo/common/herrors"
|
||||||
"github.com/gohugoio/hugo/common/loggers"
|
"github.com/gohugoio/hugo/common/loggers"
|
||||||
|
"github.com/gohugoio/hugo/common/paths"
|
||||||
|
|
||||||
"github.com/spf13/cast"
|
"github.com/spf13/cast"
|
||||||
|
|
||||||
|
@ -657,7 +658,13 @@ func (c *collector) normalizeMounts(owner *moduleAdapter, mounts []Mount) ([]Mou
|
||||||
// Verify that Source exists
|
// Verify that Source exists
|
||||||
_, err := c.fs.Stat(sourceDir)
|
_, err := c.fs.Stat(sourceDir)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if strings.HasSuffix(sourceDir, files.FilenameHugoStatsJSON) {
|
if paths.IsSameFilePath(sourceDir, c.ccfg.PublishDir) {
|
||||||
|
// This is a little exotic, but there are use cases for mounting the public folder.
|
||||||
|
// This will typically also be in .gitingore, so create it.
|
||||||
|
if err := c.fs.MkdirAll(sourceDir, 0o755); err != nil {
|
||||||
|
return nil, fmt.Errorf("%s: %q", errMsg, err)
|
||||||
|
}
|
||||||
|
} else if strings.HasSuffix(sourceDir, files.FilenameHugoStatsJSON) {
|
||||||
// A common pattern for Tailwind 3 is to mount that file to get it on the server watch list.
|
// A common pattern for Tailwind 3 is to mount that file to get it on the server watch list.
|
||||||
|
|
||||||
// A common pattern is also to add hugo_stats.json to .gitignore.
|
// A common pattern is also to add hugo_stats.json to .gitignore.
|
||||||
|
@ -669,6 +676,7 @@ func (c *collector) normalizeMounts(owner *moduleAdapter, mounts []Mount) ([]Mou
|
||||||
}
|
}
|
||||||
f.Close()
|
f.Close()
|
||||||
} else {
|
} else {
|
||||||
|
c.logger.Warnf("module %q: mount source %q does not exist", owner.Path(), sourceDir)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -402,6 +402,9 @@ type Mount struct {
|
||||||
|
|
||||||
// Exclude all files matching the given Glob patterns (string or slice).
|
// Exclude all files matching the given Glob patterns (string or slice).
|
||||||
ExcludeFiles any
|
ExcludeFiles any
|
||||||
|
|
||||||
|
// Disable watching in watch mode for this mount.
|
||||||
|
DisableWatch bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// Used as key to remove duplicates.
|
// Used as key to remove duplicates.
|
||||||
|
|
|
@ -134,6 +134,12 @@ type Site interface {
|
||||||
|
|
||||||
// Deprecated: Use .Site.Home.OutputFormats.Get "rss" instead.
|
// Deprecated: Use .Site.Home.OutputFormats.Get "rss" instead.
|
||||||
RSSLink() template.URL
|
RSSLink() template.URL
|
||||||
|
|
||||||
|
// For internal use only.
|
||||||
|
// This will panic if the site is not fully initialized.
|
||||||
|
// This is typically used to inform the user in the content adapter templates,
|
||||||
|
// as these are executed before all the page collections etc. are ready to use.
|
||||||
|
CheckReady()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sites represents an ordered list of sites (languages).
|
// Sites represents an ordered list of sites (languages).
|
||||||
|
@ -326,6 +332,11 @@ func (s *siteWrapper) ForEeachIdentityByName(name string, f func(identity.Identi
|
||||||
s.s.(identity.ForEeachIdentityByNameProvider).ForEeachIdentityByName(name, f)
|
s.s.(identity.ForEeachIdentityByNameProvider).ForEeachIdentityByName(name, f)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// For internal use only.
|
||||||
|
func (s *siteWrapper) CheckReady() {
|
||||||
|
s.s.CheckReady()
|
||||||
|
}
|
||||||
|
|
||||||
type testSite struct {
|
type testSite struct {
|
||||||
h hugo.HugoInfo
|
h hugo.HugoInfo
|
||||||
l *langs.Language
|
l *langs.Language
|
||||||
|
@ -480,6 +491,9 @@ func (s testSite) RSSLink() template.URL {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s testSite) CheckReady() {
|
||||||
|
}
|
||||||
|
|
||||||
// NewDummyHugoSite creates a new minimal test site.
|
// NewDummyHugoSite creates a new minimal test site.
|
||||||
func NewDummyHugoSite(conf config.AllProvider) Site {
|
func NewDummyHugoSite(conf config.AllProvider) Site {
|
||||||
return testSite{
|
return testSite{
|
||||||
|
|
|
@ -20,11 +20,13 @@ import (
|
||||||
"reflect"
|
"reflect"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
"unicode"
|
"unicode"
|
||||||
|
|
||||||
bp "github.com/gohugoio/hugo/bufferpool"
|
bp "github.com/gohugoio/hugo/bufferpool"
|
||||||
"github.com/gohugoio/hugo/common/hcontext"
|
"github.com/gohugoio/hugo/common/hcontext"
|
||||||
"github.com/gohugoio/hugo/identity"
|
"github.com/gohugoio/hugo/identity"
|
||||||
|
"github.com/gohugoio/hugo/langs"
|
||||||
"github.com/gohugoio/hugo/output/layouts"
|
"github.com/gohugoio/hugo/output/layouts"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/output"
|
"github.com/gohugoio/hugo/output"
|
||||||
|
@ -160,6 +162,11 @@ type TemplateFuncGetter interface {
|
||||||
GetFunc(name string) (reflect.Value, bool)
|
GetFunc(name string) (reflect.Value, bool)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type RenderingContext struct {
|
||||||
|
Site site
|
||||||
|
SiteOutIdx int
|
||||||
|
}
|
||||||
|
|
||||||
type contextKey string
|
type contextKey string
|
||||||
|
|
||||||
// Context manages values passed in the context to templates.
|
// Context manages values passed in the context to templates.
|
||||||
|
@ -191,6 +198,15 @@ type page interface {
|
||||||
IsNode() bool
|
IsNode() bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type site interface {
|
||||||
|
Language() *langs.Language
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
HugoDeferredTemplatePrefix = "__hdeferred/"
|
||||||
|
HugoDeferredTemplateSuffix = "__d="
|
||||||
|
)
|
||||||
|
|
||||||
const hugoNewLinePlaceholder = "___hugonl_"
|
const hugoNewLinePlaceholder = "___hugonl_"
|
||||||
|
|
||||||
var stripHTMLReplacerPre = strings.NewReplacer("\n", " ", "</p>", hugoNewLinePlaceholder, "<br>", hugoNewLinePlaceholder, "<br />", hugoNewLinePlaceholder)
|
var stripHTMLReplacerPre = strings.NewReplacer("\n", " ", "</p>", hugoNewLinePlaceholder, "<br>", hugoNewLinePlaceholder, "<br />", hugoNewLinePlaceholder)
|
||||||
|
@ -228,3 +244,13 @@ func StripHTML(s string) string {
|
||||||
|
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type DeferredExecution struct {
|
||||||
|
Mu sync.Mutex
|
||||||
|
Ctx context.Context
|
||||||
|
TemplateName string
|
||||||
|
Data any
|
||||||
|
|
||||||
|
Executed bool
|
||||||
|
Result string
|
||||||
|
}
|
||||||
|
|
202
tpl/templates/defer_integration_test.go
Normal file
202
tpl/templates/defer_integration_test.go
Normal file
|
@ -0,0 +1,202 @@
|
||||||
|
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package templates_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
qt "github.com/frankban/quicktest"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/hugolib"
|
||||||
|
)
|
||||||
|
|
||||||
|
const deferFilesCommon = `
|
||||||
|
-- hugo.toml --
|
||||||
|
disableLiveReload = true
|
||||||
|
disableKinds = ["taxonomy", "term", "rss", "sitemap", "robotsTXT", "404", "section"]
|
||||||
|
[languages]
|
||||||
|
[languages.en]
|
||||||
|
weight = 1
|
||||||
|
[languages.nn]
|
||||||
|
weight = 2
|
||||||
|
-- i18n/en.toml --
|
||||||
|
[hello]
|
||||||
|
other = "Hello"
|
||||||
|
-- i18n/nn.toml --
|
||||||
|
[hello]
|
||||||
|
other = "Hei"
|
||||||
|
-- content/_index.en.md --
|
||||||
|
---
|
||||||
|
title: "Home"
|
||||||
|
outputs: ["html", "amp"]
|
||||||
|
---
|
||||||
|
-- content/_index.nn.md --
|
||||||
|
---
|
||||||
|
title: "Heim"
|
||||||
|
outputs: ["html", "amp"]
|
||||||
|
---
|
||||||
|
-- assets/mytext.txt --
|
||||||
|
Hello.
|
||||||
|
-- layouts/baseof.html --
|
||||||
|
HTML|{{ block "main" . }}{{ end }}$
|
||||||
|
-- layouts/index.html --
|
||||||
|
{{ define "main" }}
|
||||||
|
EDIT_COUNTER_OUTSIDE_0
|
||||||
|
{{ .Store.Set "hello" "Hello" }}
|
||||||
|
{{ $data := dict "page" . }}
|
||||||
|
{{ with (templates.Defer (dict "data" $data) ) }}
|
||||||
|
{{ $mytext := resources.Get "mytext.txt" }}
|
||||||
|
REPLACE_ME|Title: {{ .page.Title }}|{{ .page.RelPermalink }}|Hello: {{ T "hello" }}|Hello Store: {{ .page.Store.Get "hello" }}|Mytext: {{ $mytext.Content }}|
|
||||||
|
EDIT_COUNTER_DEFER_0
|
||||||
|
{{ end }}$
|
||||||
|
{{ end }}
|
||||||
|
-- layouts/index.amp.html --
|
||||||
|
AMP.
|
||||||
|
{{ $data := dict "page" . }}
|
||||||
|
{{ with (templates.Defer (dict "data" $data) ) }}Title AMP: {{ .page.Title }}|{{ .page.RelPermalink }}|Hello: {{ T "hello" }}{{ end }}$
|
||||||
|
|
||||||
|
`
|
||||||
|
|
||||||
|
func TestDeferBasic(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
b := hugolib.Test(t, deferFilesCommon)
|
||||||
|
|
||||||
|
b.AssertFileContent("public/index.html", "Title: Home|/|Hello: Hello|Hello Store: Hello|Mytext: Hello.|")
|
||||||
|
b.AssertFileContent("public/amp/index.html", "Title AMP: Home|/amp/|Hello: Hello")
|
||||||
|
b.AssertFileContent("public/nn/index.html", "Title: Heim|/nn/|Hello: Hei")
|
||||||
|
b.AssertFileContent("public/nn/amp/index.html", "Title AMP: Heim|/nn/amp/|Hello: Hei")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeferRepeatedBuildsEditOutside(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
b := hugolib.TestRunning(t, deferFilesCommon)
|
||||||
|
|
||||||
|
for i := 0; i < 5; i++ {
|
||||||
|
old := fmt.Sprintf("EDIT_COUNTER_OUTSIDE_%d", i)
|
||||||
|
new := fmt.Sprintf("EDIT_COUNTER_OUTSIDE_%d", i+1)
|
||||||
|
b.EditFileReplaceAll("layouts/index.html", old, new).Build()
|
||||||
|
b.AssertFileContent("public/index.html", new)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeferRepeatedBuildsEditDefer(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
b := hugolib.TestRunning(t, deferFilesCommon)
|
||||||
|
|
||||||
|
for i := 0; i < 8; i++ {
|
||||||
|
old := fmt.Sprintf("EDIT_COUNTER_DEFER_%d", i)
|
||||||
|
new := fmt.Sprintf("EDIT_COUNTER_DEFER_%d", i+1)
|
||||||
|
b.EditFileReplaceAll("layouts/index.html", old, new).Build()
|
||||||
|
b.AssertFileContent("public/index.html", new)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeferErrorParse(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
b, err := hugolib.TestE(t, strings.ReplaceAll(deferFilesCommon, "Title AMP: {{ .page.Title }}", "{{ .page.Title }"))
|
||||||
|
|
||||||
|
b.Assert(err, qt.Not(qt.IsNil))
|
||||||
|
b.Assert(err.Error(), qt.Contains, `index.amp.html:3: unexpected "}" in operand`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeferErrorRuntime(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
b, err := hugolib.TestE(t, strings.ReplaceAll(deferFilesCommon, "Title AMP: {{ .page.Title }}", "{{ .page.Titles }}"))
|
||||||
|
|
||||||
|
b.Assert(err, qt.Not(qt.IsNil))
|
||||||
|
b.Assert(err.Error(), qt.Contains, filepath.FromSlash(`/layouts/index.amp.html:3:57`))
|
||||||
|
b.Assert(err.Error(), qt.Contains, `execute of template failed: template: index.amp.html:3:57: executing at <.page.Titles>: can't evaluate field Titles`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeferEditDeferBlock(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
b := hugolib.TestRunning(t, deferFilesCommon)
|
||||||
|
b.AssertRenderCountPage(4)
|
||||||
|
b.EditFileReplaceAll("layouts/index.html", "REPLACE_ME", "Edited.").Build()
|
||||||
|
b.AssertFileContent("public/index.html", "Edited.")
|
||||||
|
b.AssertRenderCountPage(2)
|
||||||
|
}
|
||||||
|
|
||||||
|
//
|
||||||
|
|
||||||
|
func TestDeferEditResourceUsedInDeferBlock(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
b := hugolib.TestRunning(t, deferFilesCommon)
|
||||||
|
b.AssertRenderCountPage(4)
|
||||||
|
b.EditFiles("assets/mytext.txt", "Mytext Hello Edited.").Build()
|
||||||
|
b.AssertFileContent("public/index.html", "Mytext Hello Edited.")
|
||||||
|
b.AssertRenderCountPage(2)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeferMountPublic(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
files := `
|
||||||
|
-- hugo.toml --
|
||||||
|
[module]
|
||||||
|
[[module.mounts]]
|
||||||
|
source = "content"
|
||||||
|
target = "content"
|
||||||
|
[[module.mounts]]
|
||||||
|
source = "layouts"
|
||||||
|
target = "layouts"
|
||||||
|
[[module.mounts]]
|
||||||
|
source = 'public'
|
||||||
|
target = 'assets/public'
|
||||||
|
disableWatch = true
|
||||||
|
-- layouts/index.html --
|
||||||
|
Home.
|
||||||
|
{{ $mydata := dict "v1" "v1value" }}
|
||||||
|
{{ $json := resources.FromString "mydata/data.json" ($mydata | jsonify ) }}
|
||||||
|
{{ $nop := $json.RelPermalink }}
|
||||||
|
{{ with (templates.Defer (dict "key" "foo")) }}
|
||||||
|
{{ $jsonFilePublic := resources.Get "public/mydata/data.json" }}
|
||||||
|
{{ with $jsonFilePublic }}
|
||||||
|
{{ $m := $jsonFilePublic | transform.Unmarshal }}
|
||||||
|
v1: {{ $m.v1 }}
|
||||||
|
{{ end }}
|
||||||
|
{{ end }}
|
||||||
|
`
|
||||||
|
|
||||||
|
b := hugolib.Test(t, files)
|
||||||
|
|
||||||
|
b.AssertFileContent("public/index.html", "v1: v1value")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDeferFromContentAdapterShouldFail(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
files := `
|
||||||
|
-- hugo.toml --
|
||||||
|
-- content/_content.gotmpl --
|
||||||
|
{{ with (templates.Defer (dict "key" "foo")) }}
|
||||||
|
Foo.
|
||||||
|
{{ end }}
|
||||||
|
`
|
||||||
|
|
||||||
|
b, err := hugolib.TestE(t, files)
|
||||||
|
|
||||||
|
b.Assert(err, qt.Not(qt.IsNil))
|
||||||
|
b.Assert(err.Error(), qt.Contains, "error calling Defer: this method cannot be called before the site is fully initialized")
|
||||||
|
}
|
|
@ -39,6 +39,16 @@ func init() {
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
ns.AddMethodMapping(ctx.Defer,
|
||||||
|
nil, // No aliases to keep the AST parsing simple.
|
||||||
|
[][2]string{},
|
||||||
|
)
|
||||||
|
|
||||||
|
ns.AddMethodMapping(ctx.DoDefer,
|
||||||
|
[]string{"doDefer"},
|
||||||
|
[][2]string{},
|
||||||
|
)
|
||||||
|
|
||||||
return ns
|
return ns
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,14 +15,24 @@
|
||||||
package templates
|
package templates
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"sync/atomic"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/deps"
|
"github.com/gohugoio/hugo/deps"
|
||||||
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
"github.com/gohugoio/hugo/tpl"
|
||||||
|
"github.com/mitchellh/mapstructure"
|
||||||
)
|
)
|
||||||
|
|
||||||
// New returns a new instance of the templates-namespaced template functions.
|
// New returns a new instance of the templates-namespaced template functions.
|
||||||
func New(deps *deps.Deps) *Namespace {
|
func New(deps *deps.Deps) *Namespace {
|
||||||
return &Namespace{
|
ns := &Namespace{
|
||||||
deps: deps,
|
deps: deps,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return ns
|
||||||
}
|
}
|
||||||
|
|
||||||
// Namespace provides template functions for the "templates" namespace.
|
// Namespace provides template functions for the "templates" namespace.
|
||||||
|
@ -36,3 +46,59 @@ type Namespace struct {
|
||||||
func (ns *Namespace) Exists(name string) bool {
|
func (ns *Namespace) Exists(name string) bool {
|
||||||
return ns.deps.Tmpl().HasTemplate(name)
|
return ns.deps.Tmpl().HasTemplate(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Defer defers the execution of a template block.
|
||||||
|
func (ns *Namespace) Defer(args ...any) (bool, error) {
|
||||||
|
// Prevent defer from being used in content adapters,
|
||||||
|
// that just doesn't work.
|
||||||
|
ns.deps.Site.CheckReady()
|
||||||
|
|
||||||
|
if len(args) != 0 {
|
||||||
|
return false, fmt.Errorf("Defer does not take any arguments")
|
||||||
|
}
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var defferedIDCounter atomic.Uint64
|
||||||
|
|
||||||
|
type DeferOpts struct {
|
||||||
|
// Optional cache key. If set, the deferred block will be executed
|
||||||
|
// once per unique key.
|
||||||
|
Key string
|
||||||
|
|
||||||
|
// Optional data context to use when executing the deferred block.
|
||||||
|
Data any
|
||||||
|
}
|
||||||
|
|
||||||
|
// DoDefer defers the execution of a template block.
|
||||||
|
// For internal use only.
|
||||||
|
func (ns *Namespace) DoDefer(ctx context.Context, id string, optsv any) string {
|
||||||
|
var opts DeferOpts
|
||||||
|
if optsv != nil {
|
||||||
|
if err := mapstructure.WeakDecode(optsv, &opts); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
templateName := id
|
||||||
|
var key string
|
||||||
|
if opts.Key != "" {
|
||||||
|
key = helpers.MD5String(opts.Key)
|
||||||
|
} else {
|
||||||
|
key = strconv.FormatUint(defferedIDCounter.Add(1), 10)
|
||||||
|
}
|
||||||
|
|
||||||
|
id = fmt.Sprintf("%s_%s%s", id, key, tpl.HugoDeferredTemplateSuffix)
|
||||||
|
|
||||||
|
_ = ns.deps.BuildState.DeferredExecutions.Executions.GetOrCreate(id,
|
||||||
|
func() *tpl.DeferredExecution {
|
||||||
|
return &tpl.DeferredExecution{
|
||||||
|
TemplateName: templateName,
|
||||||
|
Ctx: ctx,
|
||||||
|
Data: opts.Data,
|
||||||
|
Executed: false,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return id
|
||||||
|
}
|
||||||
|
|
|
@ -42,6 +42,7 @@ import (
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/herrors"
|
"github.com/gohugoio/hugo/common/herrors"
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
|
"github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate/parse"
|
||||||
|
|
||||||
htmltemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate"
|
htmltemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate"
|
||||||
texttemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
|
texttemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
|
||||||
|
@ -194,11 +195,12 @@ func newTemplateNamespace(funcs map[string]any) *templateNamespace {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func newTemplateState(templ tpl.Template, info templateInfo, id identity.Identity) *templateState {
|
func newTemplateState(owner *templateState, templ tpl.Template, info templateInfo, id identity.Identity) *templateState {
|
||||||
if id == nil {
|
if id == nil {
|
||||||
id = info
|
id = info
|
||||||
}
|
}
|
||||||
return &templateState{
|
return &templateState{
|
||||||
|
owner: owner,
|
||||||
info: info,
|
info: info,
|
||||||
typ: info.resolveType(),
|
typ: info.resolveType(),
|
||||||
Template: templ,
|
Template: templ,
|
||||||
|
@ -260,7 +262,11 @@ func (t *templateExec) ExecuteWithContext(ctx context.Context, templ tpl.Templat
|
||||||
|
|
||||||
execErr := t.executor.ExecuteWithContext(ctx, templ, wr, data)
|
execErr := t.executor.ExecuteWithContext(ctx, templ, wr, data)
|
||||||
if execErr != nil {
|
if execErr != nil {
|
||||||
execErr = t.addFileContext(templ, execErr)
|
owner := templ
|
||||||
|
if ts, ok := templ.(*templateState); ok && ts.owner != nil {
|
||||||
|
owner = ts.owner
|
||||||
|
}
|
||||||
|
execErr = t.addFileContext(owner, execErr)
|
||||||
}
|
}
|
||||||
return execErr
|
return execErr
|
||||||
}
|
}
|
||||||
|
@ -312,6 +318,9 @@ func (t *templateExec) MarkReady() error {
|
||||||
// We only need the clones if base templates are in use.
|
// We only need the clones if base templates are in use.
|
||||||
if len(t.needsBaseof) > 0 {
|
if len(t.needsBaseof) > 0 {
|
||||||
err = t.main.createPrototypes()
|
err = t.main.createPrototypes()
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -369,7 +378,7 @@ type layoutCacheEntry struct {
|
||||||
func (t *templateHandler) AddTemplate(name, tpl string) error {
|
func (t *templateHandler) AddTemplate(name, tpl string) error {
|
||||||
templ, err := t.addTemplateTo(t.newTemplateInfo(name, tpl), t.main)
|
templ, err := t.addTemplateTo(t.newTemplateInfo(name, tpl), t.main)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
t.applyTemplateTransformers(t.main, templ)
|
_, err = t.applyTemplateTransformers(t.main, templ)
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -390,6 +399,7 @@ func (t *templateHandler) LookupLayout(d layouts.LayoutDescriptor, f output.Form
|
||||||
t.layoutTemplateCacheMu.RUnlock()
|
t.layoutTemplateCacheMu.RUnlock()
|
||||||
return cacheVal.templ, cacheVal.found, cacheVal.err
|
return cacheVal.templ, cacheVal.found, cacheVal.err
|
||||||
}
|
}
|
||||||
|
|
||||||
t.layoutTemplateCacheMu.RUnlock()
|
t.layoutTemplateCacheMu.RUnlock()
|
||||||
|
|
||||||
t.layoutTemplateCacheMu.Lock()
|
t.layoutTemplateCacheMu.Lock()
|
||||||
|
@ -497,13 +507,15 @@ func (t *templateHandler) findLayout(d layouts.LayoutDescriptor, f output.Format
|
||||||
return nil, false, err
|
return nil, false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
ts := newTemplateState(templ, overlay, identity.Or(base, overlay))
|
ts := newTemplateState(nil, templ, overlay, identity.Or(base, overlay))
|
||||||
|
|
||||||
if found {
|
if found {
|
||||||
ts.baseInfo = base
|
ts.baseInfo = base
|
||||||
}
|
}
|
||||||
|
|
||||||
t.applyTemplateTransformers(t.main, ts)
|
if _, err := t.applyTemplateTransformers(t.main, ts); err != nil {
|
||||||
|
return nil, false, err
|
||||||
|
}
|
||||||
|
|
||||||
if err := t.extractPartials(ts.Template); err != nil {
|
if err := t.extractPartials(ts.Template); err != nil {
|
||||||
return nil, false, err
|
return nil, false, err
|
||||||
|
@ -674,7 +686,10 @@ func (t *templateHandler) addTemplateFile(name string, fim hugofs.FileMetaInfo)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return tinfo.errWithFileContext("parse failed", err)
|
return tinfo.errWithFileContext("parse failed", err)
|
||||||
}
|
}
|
||||||
t.applyTemplateTransformers(t.main, templ)
|
|
||||||
|
if _, err = t.applyTemplateTransformers(t.main, templ); err != nil {
|
||||||
|
return tinfo.errWithFileContext("transform failed", err)
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -745,6 +760,12 @@ func (t *templateHandler) applyTemplateTransformers(ns *templateNamespace, ts *t
|
||||||
t.transformNotFound[k] = ts
|
t.transformNotFound[k] = ts
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for k, v := range c.deferNodes {
|
||||||
|
if err = t.main.addDeferredTemplate(ts, k, v); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return c, err
|
return c, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -858,7 +879,7 @@ func (t *templateHandler) extractPartials(templ tpl.Template) error {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
ts := newTemplateState(templ, templateInfo{name: templ.Name()}, nil)
|
ts := newTemplateState(nil, templ, templateInfo{name: templ.Name()}, nil)
|
||||||
ts.typ = templatePartial
|
ts.typ = templatePartial
|
||||||
|
|
||||||
t.main.mu.RLock()
|
t.main.mu.RLock()
|
||||||
|
@ -954,18 +975,18 @@ type templateNamespace struct {
|
||||||
*templateStateMap
|
*templateStateMap
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t templateNamespace) Clone() *templateNamespace {
|
func (t *templateNamespace) getPrototypeText() *texttemplate.Template {
|
||||||
t.mu.Lock()
|
if t.prototypeTextClone != nil {
|
||||||
defer t.mu.Unlock()
|
return t.prototypeTextClone
|
||||||
|
|
||||||
t.templateStateMap = &templateStateMap{
|
|
||||||
templates: make(map[string]*templateState),
|
|
||||||
}
|
}
|
||||||
|
return t.prototypeText
|
||||||
|
}
|
||||||
|
|
||||||
t.prototypeText = texttemplate.Must(t.prototypeText.Clone())
|
func (t *templateNamespace) getPrototypeHTML() *htmltemplate.Template {
|
||||||
t.prototypeHTML = htmltemplate.Must(t.prototypeHTML.Clone())
|
if t.prototypeHTMLClone != nil {
|
||||||
|
return t.prototypeHTMLClone
|
||||||
return &t
|
}
|
||||||
|
return t.prototypeHTML
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *templateNamespace) Lookup(name string) (tpl.Template, bool) {
|
func (t *templateNamespace) Lookup(name string) (tpl.Template, bool) {
|
||||||
|
@ -996,12 +1017,46 @@ func (t *templateNamespace) newTemplateLookup(in *templateState) func(name strin
|
||||||
return templ
|
return templ
|
||||||
}
|
}
|
||||||
if templ, found := findTemplateIn(name, in); found {
|
if templ, found := findTemplateIn(name, in); found {
|
||||||
return newTemplateState(templ, templateInfo{name: templ.Name()}, nil)
|
return newTemplateState(nil, templ, templateInfo{name: templ.Name()}, nil)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (t *templateNamespace) addDeferredTemplate(owner *templateState, name string, n *parse.ListNode) error {
|
||||||
|
t.mu.Lock()
|
||||||
|
defer t.mu.Unlock()
|
||||||
|
|
||||||
|
if _, found := t.templates[name]; found {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var templ tpl.Template
|
||||||
|
|
||||||
|
if owner.isText() {
|
||||||
|
prototype := t.getPrototypeText()
|
||||||
|
tt, err := prototype.New(name).Parse("")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to parse empty text template %q: %w", name, err)
|
||||||
|
}
|
||||||
|
tt.Tree.Root = n
|
||||||
|
templ = tt
|
||||||
|
} else {
|
||||||
|
prototype := t.getPrototypeHTML()
|
||||||
|
tt, err := prototype.New(name).Parse("")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to parse empty HTML template %q: %w", name, err)
|
||||||
|
}
|
||||||
|
tt.Tree.Root = n
|
||||||
|
templ = tt
|
||||||
|
}
|
||||||
|
|
||||||
|
dts := newTemplateState(owner, templ, templateInfo{name: name}, nil)
|
||||||
|
t.templates[name] = dts
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (t *templateNamespace) parse(info templateInfo) (*templateState, error) {
|
func (t *templateNamespace) parse(info templateInfo) (*templateState, error) {
|
||||||
t.mu.Lock()
|
t.mu.Lock()
|
||||||
defer t.mu.Unlock()
|
defer t.mu.Unlock()
|
||||||
|
@ -1014,7 +1069,7 @@ func (t *templateNamespace) parse(info templateInfo) (*templateState, error) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
ts := newTemplateState(templ, info, nil)
|
ts := newTemplateState(nil, templ, info, nil)
|
||||||
|
|
||||||
t.templates[info.name] = ts
|
t.templates[info.name] = ts
|
||||||
|
|
||||||
|
@ -1028,7 +1083,7 @@ func (t *templateNamespace) parse(info templateInfo) (*templateState, error) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
ts := newTemplateState(templ, info, nil)
|
ts := newTemplateState(nil, templ, info, nil)
|
||||||
|
|
||||||
t.templates[info.name] = ts
|
t.templates[info.name] = ts
|
||||||
|
|
||||||
|
@ -1040,6 +1095,9 @@ var _ tpl.IsInternalTemplateProvider = (*templateState)(nil)
|
||||||
type templateState struct {
|
type templateState struct {
|
||||||
tpl.Template
|
tpl.Template
|
||||||
|
|
||||||
|
// Set for deferred templates.
|
||||||
|
owner *templateState
|
||||||
|
|
||||||
typ templateType
|
typ templateType
|
||||||
parseInfo tpl.ParseInfo
|
parseInfo tpl.ParseInfo
|
||||||
id identity.Identity
|
id identity.Identity
|
||||||
|
|
|
@ -17,6 +17,7 @@ import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/helpers"
|
||||||
htmltemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate"
|
htmltemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/htmltemplate"
|
||||||
texttemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
|
texttemplate "github.com/gohugoio/hugo/tpl/internal/go_templates/texttemplate"
|
||||||
|
|
||||||
|
@ -38,6 +39,7 @@ const (
|
||||||
type templateContext struct {
|
type templateContext struct {
|
||||||
visited map[string]bool
|
visited map[string]bool
|
||||||
templateNotFound map[string]bool
|
templateNotFound map[string]bool
|
||||||
|
deferNodes map[string]*parse.ListNode
|
||||||
lookupFn func(name string) *templateState
|
lookupFn func(name string) *templateState
|
||||||
|
|
||||||
// The last error encountered.
|
// The last error encountered.
|
||||||
|
@ -77,6 +79,7 @@ func newTemplateContext(
|
||||||
lookupFn: lookupFn,
|
lookupFn: lookupFn,
|
||||||
visited: make(map[string]bool),
|
visited: make(map[string]bool),
|
||||||
templateNotFound: make(map[string]bool),
|
templateNotFound: make(map[string]bool),
|
||||||
|
deferNodes: make(map[string]*parse.ListNode),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -116,9 +119,14 @@ const (
|
||||||
// "range" over a one-element slice so we can shift dot to the
|
// "range" over a one-element slice so we can shift dot to the
|
||||||
// partial's argument, Arg, while allowing Arg to be falsy.
|
// partial's argument, Arg, while allowing Arg to be falsy.
|
||||||
partialReturnWrapperTempl = `{{ $_hugo_dot := $ }}{{ $ := .Arg }}{{ range (slice .Arg) }}{{ $_hugo_dot.Set ("PLACEHOLDER") }}{{ end }}`
|
partialReturnWrapperTempl = `{{ $_hugo_dot := $ }}{{ $ := .Arg }}{{ range (slice .Arg) }}{{ $_hugo_dot.Set ("PLACEHOLDER") }}{{ end }}`
|
||||||
|
|
||||||
|
doDeferTempl = `{{ doDefer ("PLACEHOLDER1") ("PLACEHOLDER2") }}`
|
||||||
)
|
)
|
||||||
|
|
||||||
var partialReturnWrapper *parse.ListNode
|
var (
|
||||||
|
partialReturnWrapper *parse.ListNode
|
||||||
|
doDefer *parse.ListNode
|
||||||
|
)
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
templ, err := texttemplate.New("").Parse(partialReturnWrapperTempl)
|
templ, err := texttemplate.New("").Parse(partialReturnWrapperTempl)
|
||||||
|
@ -126,6 +134,12 @@ func init() {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
partialReturnWrapper = templ.Tree.Root
|
partialReturnWrapper = templ.Tree.Root
|
||||||
|
|
||||||
|
templ, err = texttemplate.New("").Funcs(texttemplate.FuncMap{"doDefer": func(string, string) string { return "" }}).Parse(doDeferTempl)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
doDefer = templ.Tree.Root
|
||||||
}
|
}
|
||||||
|
|
||||||
// wrapInPartialReturnWrapper copies and modifies the parsed nodes of a
|
// wrapInPartialReturnWrapper copies and modifies the parsed nodes of a
|
||||||
|
@ -158,6 +172,7 @@ func (c *templateContext) applyTransformations(n parse.Node) (bool, error) {
|
||||||
case *parse.IfNode:
|
case *parse.IfNode:
|
||||||
c.applyTransformationsToNodes(x.Pipe, x.List, x.ElseList)
|
c.applyTransformationsToNodes(x.Pipe, x.List, x.ElseList)
|
||||||
case *parse.WithNode:
|
case *parse.WithNode:
|
||||||
|
c.handleDefer(x)
|
||||||
c.applyTransformationsToNodes(x.Pipe, x.List, x.ElseList)
|
c.applyTransformationsToNodes(x.Pipe, x.List, x.ElseList)
|
||||||
case *parse.RangeNode:
|
case *parse.RangeNode:
|
||||||
c.applyTransformationsToNodes(x.Pipe, x.List, x.ElseList)
|
c.applyTransformationsToNodes(x.Pipe, x.List, x.ElseList)
|
||||||
|
@ -191,6 +206,58 @@ func (c *templateContext) applyTransformations(n parse.Node) (bool, error) {
|
||||||
return true, c.err
|
return true, c.err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *templateContext) handleDefer(withNode *parse.WithNode) {
|
||||||
|
if len(withNode.Pipe.Cmds) != 1 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
cmd := withNode.Pipe.Cmds[0]
|
||||||
|
if len(cmd.Args) != 1 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
idArg := cmd.Args[0]
|
||||||
|
|
||||||
|
p, ok := idArg.(*parse.PipeNode)
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(p.Cmds) != 1 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd = p.Cmds[0]
|
||||||
|
|
||||||
|
if len(cmd.Args) != 2 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
idArg = cmd.Args[0]
|
||||||
|
|
||||||
|
id, ok := idArg.(*parse.ChainNode)
|
||||||
|
if !ok || len(id.Field) != 1 || id.Field[0] != "Defer" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if id2, ok := id.Node.(*parse.IdentifierNode); !ok || id2.Ident != "templates" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
deferArg := cmd.Args[1]
|
||||||
|
cmd.Args = []parse.Node{idArg}
|
||||||
|
|
||||||
|
l := doDefer.CopyList()
|
||||||
|
n := l.Nodes[0].(*parse.ActionNode)
|
||||||
|
|
||||||
|
inner := withNode.List.CopyList()
|
||||||
|
innerHash := helpers.MD5String(inner.String())
|
||||||
|
deferredID := tpl.HugoDeferredTemplatePrefix + innerHash
|
||||||
|
|
||||||
|
c.deferNodes[deferredID] = inner
|
||||||
|
withNode.List = l
|
||||||
|
|
||||||
|
n.Pipe.Cmds[0].Args[1].(*parse.PipeNode).Cmds[0].Args[0].(*parse.StringNode).Text = deferredID
|
||||||
|
n.Pipe.Cmds[0].Args[2] = deferArg
|
||||||
|
}
|
||||||
|
|
||||||
func (c *templateContext) applyTransformationsToNodes(nodes ...parse.Node) {
|
func (c *templateContext) applyTransformationsToNodes(nodes ...parse.Node) {
|
||||||
for _, node := range nodes {
|
for _, node := range nodes {
|
||||||
c.applyTransformations(node)
|
c.applyTransformations(node)
|
||||||
|
|
|
@ -47,7 +47,7 @@ func TestTransformRecursiveTemplate(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func newTestTemplate(templ tpl.Template) *templateState {
|
func newTestTemplate(templ tpl.Template) *templateState {
|
||||||
return newTemplateState(
|
return newTemplateState(nil,
|
||||||
templ,
|
templ,
|
||||||
templateInfo{
|
templateInfo{
|
||||||
name: templ.Name(),
|
name: templ.Name(),
|
||||||
|
|
Loading…
Reference in a new issue