mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
parent
621194a319
commit
eceeb19751
2 changed files with 73 additions and 37 deletions
|
@ -130,49 +130,48 @@ type pageTrees struct {
|
|||
// so we mark all entries as stale (which will trigger cache invalidation), then
|
||||
// return the first.
|
||||
func (t *pageTrees) collectAndMarkStaleIdentities(p *paths.Path) []identity.Identity {
|
||||
ids := t.collectAndMarkStaleIdentitiesFor(p.Base())
|
||||
key := p.Base()
|
||||
var ids []identity.Identity
|
||||
// We need only one identity sample per dimensio.
|
||||
nCount := 0
|
||||
cb := func(n contentNodeI) bool {
|
||||
if n == nil {
|
||||
return false
|
||||
}
|
||||
n.MarkStale()
|
||||
if nCount > 0 {
|
||||
return true
|
||||
}
|
||||
nCount++
|
||||
n.ForEeachIdentity(func(id identity.Identity) bool {
|
||||
ids = append(ids, id)
|
||||
return false
|
||||
})
|
||||
|
||||
return false
|
||||
}
|
||||
tree := t.treePages
|
||||
nCount = 0
|
||||
tree.ForEeachInDimension(key, doctree.DimensionLanguage.Index(),
|
||||
cb,
|
||||
)
|
||||
|
||||
tree = t.treeResources
|
||||
nCount = 0
|
||||
tree.ForEeachInDimension(key, doctree.DimensionLanguage.Index(),
|
||||
cb,
|
||||
)
|
||||
|
||||
if p.Component() == files.ComponentFolderContent {
|
||||
// It may also be a bundled content resource.
|
||||
key := p.ForBundleType(paths.PathTypeContentResource).Base()
|
||||
tree := t.treeResources
|
||||
if n := tree.Get(key); n != nil {
|
||||
n.ForEeachIdentity(func(id identity.Identity) bool {
|
||||
ids = append(ids, id)
|
||||
return false
|
||||
})
|
||||
if n, ok := tree.GetRaw(key); ok {
|
||||
n.MarkStale()
|
||||
}
|
||||
}
|
||||
}
|
||||
return ids
|
||||
}
|
||||
tree = t.treeResources
|
||||
nCount = 0
|
||||
tree.ForEeachInDimension(key, doctree.DimensionLanguage.Index(),
|
||||
cb,
|
||||
)
|
||||
|
||||
func (t *pageTrees) collectAndMarkStaleIdentitiesFor(key string) []identity.Identity {
|
||||
var ids []identity.Identity
|
||||
tree := t.treePages
|
||||
if n := tree.Get(key); n != nil {
|
||||
n.ForEeachIdentity(func(id identity.Identity) bool {
|
||||
ids = append(ids, id)
|
||||
return false
|
||||
})
|
||||
if n, ok := tree.GetRaw(key); ok {
|
||||
n.MarkStale()
|
||||
}
|
||||
}
|
||||
|
||||
tree = t.treeResources
|
||||
if n := tree.Get(key); n != nil {
|
||||
n.ForEeachIdentity(func(id identity.Identity) bool {
|
||||
ids = append(ids, id)
|
||||
return false
|
||||
})
|
||||
if n, ok := tree.GetRaw(key); ok {
|
||||
n.MarkStale()
|
||||
}
|
||||
}
|
||||
|
||||
return ids
|
||||
}
|
||||
|
||||
|
|
|
@ -1268,6 +1268,43 @@ Single: {{ .Title }}|{{ .Content }}|Bundled File: {{ with .Resources.GetMatch "f
|
|||
b.AssertFileContent("public/nn/p1/index.html", "B nn edit.")
|
||||
}
|
||||
|
||||
func TestRebuildEditContentNonDefaultLanguageDifferentBundles(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
files := `
|
||||
-- hugo.toml --
|
||||
baseURL = "https://example.com"
|
||||
disableLiveReload = true
|
||||
defaultContentLanguage = "en"
|
||||
defaultContentLanguageInSubdir = true
|
||||
[languages]
|
||||
[languages.en]
|
||||
weight = 1
|
||||
contentDir = "content/en"
|
||||
[languages.nn]
|
||||
weight = 2
|
||||
contentDir = "content/nn"
|
||||
-- content/en/p1en/index.md --
|
||||
---
|
||||
title: "P1 en"
|
||||
---
|
||||
-- content/nn/p1nn/index.md --
|
||||
---
|
||||
title: "P1 nn"
|
||||
---
|
||||
P1 nn.
|
||||
-- layouts/_default/single.html --
|
||||
Single: {{ .Title }}|{{ .Content }}|
|
||||
`
|
||||
|
||||
b := TestRunning(t, files)
|
||||
|
||||
b.AssertFileContent("public/nn/p1nn/index.html", "Single: P1 nn|<p>P1 nn.</p>")
|
||||
b.EditFileReplaceAll("content/nn/p1nn/index.md", "P1 nn.", "P1 nn edit.").Build()
|
||||
b.AssertFileContent("public/nn/p1nn/index.html", "Single: P1 nn|<p>P1 nn edit.</p>\n|")
|
||||
b.AssertFileContent("public/nn/p1nn/index.html", "P1 nn edit.")
|
||||
}
|
||||
|
||||
func TestRebuildVariationsAssetsSassImport(t *testing.T) {
|
||||
if !htesting.IsCI() {
|
||||
t.Skip("skip CI only")
|
||||
|
|
Loading…
Reference in a new issue