mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-07 20:30:36 -05:00
Fix GetPage on section/bundle name overlaps
In the internal Radix we stored the directory based nodes without a traling slash, e.g. `/blog`. The original motivation was probably to make it easy to do prefix searching: Give me all ancestors. This, however have lead to some ambigouty with overlapping directory names. This particular problem was, however, not possible to work around in an easy way, so from now we store these as `/blog/`. Fixes #7301
This commit is contained in:
parent
6c3c6686f5
commit
a985efcecf
10 changed files with 212 additions and 163 deletions
|
@ -665,3 +665,12 @@ func FileContainsAny(filename string, subslices [][]byte, fs afero.Fs) (bool, er
|
|||
func Exists(path string, fs afero.Fs) (bool, error) {
|
||||
return afero.Exists(fs, path)
|
||||
}
|
||||
|
||||
// AddTrailingSlash adds a trailing Unix styled slash (/) if not already
|
||||
// there.
|
||||
func AddTrailingSlash(path string) string {
|
||||
if !strings.HasSuffix(path, "/") {
|
||||
path += "/"
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
|
|
@ -77,7 +77,7 @@ func (c *stringChecker) Check(got interface{}, args []interface{}, note func(key
|
|||
return nil
|
||||
}
|
||||
|
||||
return fmt.Errorf("values are not the same text: %s", htesting.DiffStrings(s1, s2))
|
||||
return fmt.Errorf("values are not the same text: %s", strings.Join(htesting.DiffStrings(s1, s2), " | "))
|
||||
}
|
||||
|
||||
func normalizeString(s string) string {
|
||||
|
|
|
@ -20,6 +20,8 @@ import (
|
|||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
|
||||
"github.com/gohugoio/hugo/resources/page"
|
||||
"github.com/pkg/errors"
|
||||
|
||||
|
@ -31,27 +33,26 @@ import (
|
|||
)
|
||||
|
||||
// We store the branch nodes in either the `sections` or `taxonomies` tree
|
||||
// with their path as a key; Unix style slashes, a leading slash but no
|
||||
// trailing slash.
|
||||
// with their path as a key; Unix style slashes, a leading and trailing slash.
|
||||
//
|
||||
// E.g. "/blog" or "/categories/funny"
|
||||
// E.g. "/blog/" or "/categories/funny/"
|
||||
//
|
||||
// Pages that belongs to a section are stored in the `pages` tree below
|
||||
// the section name and a branch separator, e.g. "/blog__hb_". A page is
|
||||
// the section name and a branch separator, e.g. "/blog/__hb_". A page is
|
||||
// given a key using the path below the section and the base filename with no extension
|
||||
// with a leaf separator added.
|
||||
//
|
||||
// For bundled pages (/mybundle/index.md), we use the folder name.
|
||||
//
|
||||
// An exmple of a full page key would be "/blog__hb_/page1__hl_"
|
||||
// An exmple of a full page key would be "/blog/__hb_page1__hl_"
|
||||
//
|
||||
// Bundled resources are stored in the `resources` having their path prefixed
|
||||
// with the bundle they belong to, e.g.
|
||||
// "/blog__hb_/bundle__hl_data.json".
|
||||
// "/blog/__hb_bundle__hl_data.json".
|
||||
//
|
||||
// The weighted taxonomy entries extracted from page front matter are stored in
|
||||
// the `taxonomyEntries` tree below /plural/term/page-key, e.g.
|
||||
// "/categories/funny/blog__hb_/bundle__hl_".
|
||||
// "/categories/funny/blog/__hb_bundle__hl_".
|
||||
const (
|
||||
cmBranchSeparator = "__hb_"
|
||||
cmLeafSeparator = "__hl_"
|
||||
|
@ -105,7 +106,7 @@ func newContentMap(cfg contentMapConfig) *contentMap {
|
|||
addToReverseMap(mountKey, n, m)
|
||||
}
|
||||
}
|
||||
k := strings.TrimSuffix(path.Base(s), cmLeafSeparator)
|
||||
k := strings.TrimPrefix(strings.TrimSuffix(path.Base(s), cmLeafSeparator), cmBranchSeparator)
|
||||
addToReverseMap(k, n, m)
|
||||
return false
|
||||
})
|
||||
|
@ -127,18 +128,15 @@ type cmInsertKeyBuilder struct {
|
|||
}
|
||||
|
||||
func (b cmInsertKeyBuilder) ForPage(s string) *cmInsertKeyBuilder {
|
||||
// TODO2 fmt.Println("ForPage:", s, "baseKey:", b.baseKey, "key:", b.key)
|
||||
//fmt.Println("ForPage:", s, "baseKey:", b.baseKey, "key:", b.key)
|
||||
baseKey := b.baseKey
|
||||
b.baseKey = s
|
||||
|
||||
if !strings.HasPrefix(s, "/") {
|
||||
s = "/" + s
|
||||
}
|
||||
|
||||
if baseKey != "/" {
|
||||
// Don't repeat the section path in the key.
|
||||
s = strings.TrimPrefix(s, baseKey)
|
||||
}
|
||||
s = strings.TrimPrefix(s, "/")
|
||||
|
||||
switch b.tree {
|
||||
case b.m.sections:
|
||||
|
@ -154,10 +152,10 @@ func (b cmInsertKeyBuilder) ForPage(s string) *cmInsertKeyBuilder {
|
|||
}
|
||||
|
||||
func (b cmInsertKeyBuilder) ForResource(s string) *cmInsertKeyBuilder {
|
||||
// TODO2 fmt.Println("ForResource:", s, "baseKey:", b.baseKey, "key:", b.key)
|
||||
//fmt.Println("ForResource:", s, "baseKey:", b.baseKey, "key:", b.key)
|
||||
|
||||
s = strings.TrimPrefix(s, "/")
|
||||
s = strings.TrimPrefix(s, strings.TrimPrefix(b.baseKey, "/")+"/")
|
||||
baseKey := helpers.AddTrailingSlash(b.baseKey)
|
||||
s = strings.TrimPrefix(s, baseKey)
|
||||
|
||||
switch b.tree {
|
||||
case b.m.pages:
|
||||
|
@ -173,14 +171,23 @@ func (b cmInsertKeyBuilder) ForResource(s string) *cmInsertKeyBuilder {
|
|||
|
||||
func (b *cmInsertKeyBuilder) Insert(n *contentNode) *cmInsertKeyBuilder {
|
||||
if b.err == nil {
|
||||
b.tree.Insert(cleanTreeKey(b.key), n)
|
||||
b.tree.Insert(b.Key(), n)
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *cmInsertKeyBuilder) Key() string {
|
||||
switch b.tree {
|
||||
case b.m.sections, b.m.taxonomies:
|
||||
return cleanSectionTreeKey(b.key)
|
||||
default:
|
||||
return cleanTreeKey(b.key)
|
||||
}
|
||||
}
|
||||
|
||||
func (b *cmInsertKeyBuilder) DeleteAll() *cmInsertKeyBuilder {
|
||||
if b.err == nil {
|
||||
b.tree.DeletePrefix(cleanTreeKey(b.key))
|
||||
b.tree.DeletePrefix(b.Key())
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
@ -211,15 +218,16 @@ func (b *cmInsertKeyBuilder) WithFile(fi hugofs.FileMetaInfo) *cmInsertKeyBuilde
|
|||
}
|
||||
|
||||
func (b *cmInsertKeyBuilder) WithSection(s string) *cmInsertKeyBuilder {
|
||||
s = cleanSectionTreeKey(s)
|
||||
b.newTopLevel()
|
||||
b.tree = b.m.sections
|
||||
b.baseKey = s
|
||||
b.key = s
|
||||
// TODO2 fmt.Println("WithSection:", s, "baseKey:", b.baseKey, "key:", b.key)
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *cmInsertKeyBuilder) WithTaxonomy(s string) *cmInsertKeyBuilder {
|
||||
s = cleanSectionTreeKey(s)
|
||||
b.newTopLevel()
|
||||
b.tree = b.m.taxonomies
|
||||
b.baseKey = s
|
||||
|
@ -233,20 +241,17 @@ func (b *cmInsertKeyBuilder) getBundle(s string) (string, string) {
|
|||
m := b.m
|
||||
section, _ := m.getSection(s)
|
||||
|
||||
p := s
|
||||
if section != "/" {
|
||||
p = strings.TrimPrefix(s, section)
|
||||
}
|
||||
p := strings.TrimPrefix(s, section)
|
||||
|
||||
bundlePathParts := strings.Split(p, "/")[1:]
|
||||
bundlePathParts := strings.Split(p, "/")
|
||||
basePath := section + cmBranchSeparator
|
||||
|
||||
// Put it into an existing bundle if found.
|
||||
for i := len(bundlePathParts) - 2; i >= 0; i-- {
|
||||
bundlePath := path.Join(bundlePathParts[:i]...)
|
||||
searchKey := basePath + "/" + bundlePath + cmLeafSeparator
|
||||
searchKey := basePath + bundlePath + cmLeafSeparator
|
||||
if _, found := m.pages.Get(searchKey); found {
|
||||
return section + "/" + bundlePath, searchKey
|
||||
return section + bundlePath, searchKey
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -432,7 +437,7 @@ func (m *contentMap) CreateMissingNodes() error {
|
|||
sectionPath = sectionPath[:firstSlash]
|
||||
}
|
||||
}
|
||||
sect = cleanTreeKey(sect)
|
||||
sect = cleanSectionTreeKey(sect)
|
||||
_, found := m.sections.Get(sect)
|
||||
if !found {
|
||||
m.sections.Insert(sect, &contentNode{path: sectionPath})
|
||||
|
@ -440,7 +445,7 @@ func (m *contentMap) CreateMissingNodes() error {
|
|||
}
|
||||
|
||||
for _, view := range m.cfg.taxonomyConfig {
|
||||
s := cleanTreeKey(view.plural)
|
||||
s := cleanSectionTreeKey(view.plural)
|
||||
_, found := m.taxonomies.Get(s)
|
||||
if !found {
|
||||
b := &contentNode{
|
||||
|
@ -476,15 +481,20 @@ func (m *contentMap) newContentNodeFromFi(fi hugofs.FileMetaInfo) *contentNode {
|
|||
}
|
||||
|
||||
func (m *contentMap) getFirstSection(s string) (string, *contentNode) {
|
||||
s = helpers.AddTrailingSlash(s)
|
||||
for {
|
||||
k, v, found := m.sections.LongestPrefix(s)
|
||||
|
||||
if !found {
|
||||
return "", nil
|
||||
}
|
||||
if strings.Count(k, "/") == 1 {
|
||||
|
||||
if strings.Count(k, "/") <= 2 {
|
||||
return k, v.(*contentNode)
|
||||
}
|
||||
s = path.Dir(s)
|
||||
|
||||
s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/")))
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -507,10 +517,7 @@ func (m *contentMap) getOrCreateSection(n *contentNode, s string) (string, *cont
|
|||
}
|
||||
|
||||
if mustCreate {
|
||||
k = s[:strings.Index(s[1:], "/")+1]
|
||||
if k == "" {
|
||||
k = "/"
|
||||
}
|
||||
k = cleanSectionTreeKey(s[:strings.Index(s[1:], "/")+1])
|
||||
|
||||
b = &contentNode{
|
||||
path: n.rootSection(),
|
||||
|
@ -523,7 +530,9 @@ func (m *contentMap) getOrCreateSection(n *contentNode, s string) (string, *cont
|
|||
}
|
||||
|
||||
func (m *contentMap) getPage(section, name string) *contentNode {
|
||||
key := section + cmBranchSeparator + "/" + name + cmLeafSeparator
|
||||
section = helpers.AddTrailingSlash(section)
|
||||
key := section + cmBranchSeparator + name + cmLeafSeparator
|
||||
|
||||
v, found := m.pages.Get(key)
|
||||
if found {
|
||||
return v.(*contentNode)
|
||||
|
@ -532,7 +541,9 @@ func (m *contentMap) getPage(section, name string) *contentNode {
|
|||
}
|
||||
|
||||
func (m *contentMap) getSection(s string) (string, *contentNode) {
|
||||
k, v, found := m.sections.LongestPrefix(path.Dir(s))
|
||||
s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/")))
|
||||
|
||||
k, v, found := m.sections.LongestPrefix(s)
|
||||
|
||||
if found {
|
||||
return k, v.(*contentNode)
|
||||
|
@ -541,21 +552,18 @@ func (m *contentMap) getSection(s string) (string, *contentNode) {
|
|||
}
|
||||
|
||||
func (m *contentMap) getTaxonomyParent(s string) (string, *contentNode) {
|
||||
s = path.Dir(s)
|
||||
if s == "/" {
|
||||
v, found := m.sections.Get(s)
|
||||
if found {
|
||||
return s, v.(*contentNode)
|
||||
}
|
||||
return "", nil
|
||||
s = helpers.AddTrailingSlash(path.Dir(strings.TrimSuffix(s, "/")))
|
||||
k, v, found := m.taxonomies.LongestPrefix(s)
|
||||
|
||||
if found {
|
||||
return k, v.(*contentNode)
|
||||
}
|
||||
|
||||
for _, tree := range []*contentTree{m.taxonomies, m.sections} {
|
||||
k, v, found := tree.LongestPrefix(s)
|
||||
if found {
|
||||
return k, v.(*contentNode)
|
||||
}
|
||||
v, found = m.sections.Get("/")
|
||||
if found {
|
||||
return s, v.(*contentNode)
|
||||
}
|
||||
|
||||
return "", nil
|
||||
}
|
||||
|
||||
|
@ -569,6 +577,15 @@ func cleanTreeKey(k string) string {
|
|||
return k
|
||||
}
|
||||
|
||||
func cleanSectionTreeKey(k string) string {
|
||||
k = cleanTreeKey(k)
|
||||
if k != "/" {
|
||||
k += "/"
|
||||
}
|
||||
|
||||
return k
|
||||
}
|
||||
|
||||
func (m *contentMap) onSameLevel(s1, s2 string) bool {
|
||||
return strings.Count(s1, "/") == strings.Count(s2, "/")
|
||||
}
|
||||
|
@ -606,13 +623,13 @@ func (m *contentMap) deleteOrphanSections() {
|
|||
return false
|
||||
}
|
||||
|
||||
if s == "/" || strings.Count(s, "/") > 1 {
|
||||
if s == "/" || strings.Count(s, "/") > 2 {
|
||||
return false
|
||||
}
|
||||
|
||||
prefixBundle := s + cmBranchSeparator
|
||||
|
||||
if !(m.sections.hasPrefix(s+"/") || m.pages.hasPrefix(prefixBundle) || m.resources.hasPrefix(prefixBundle)) {
|
||||
if !(m.sections.hasBelow(s) || m.pages.hasBelow(prefixBundle) || m.resources.hasBelow(prefixBundle)) {
|
||||
sectionsToDelete = append(sectionsToDelete, s)
|
||||
}
|
||||
|
||||
|
@ -630,13 +647,15 @@ func (m *contentMap) deletePage(s string) {
|
|||
}
|
||||
|
||||
func (m *contentMap) deleteSectionByPath(s string) {
|
||||
m.sections.Delete(s)
|
||||
m.sections.DeletePrefix(s + "/")
|
||||
m.pages.DeletePrefix(s + cmBranchSeparator)
|
||||
m.pages.DeletePrefix(s + "/")
|
||||
m.resources.DeletePrefix(s + cmBranchSeparator)
|
||||
m.resources.DeletePrefix(s + cmLeafSeparator)
|
||||
m.resources.DeletePrefix(s + "/")
|
||||
if !strings.HasSuffix(s, "/") {
|
||||
panic("section must end with a slash")
|
||||
}
|
||||
if !strings.HasPrefix(s, "/") {
|
||||
panic("section must start with a slash")
|
||||
}
|
||||
m.sections.DeletePrefix(s)
|
||||
m.pages.DeletePrefix(s)
|
||||
m.resources.DeletePrefix(s)
|
||||
}
|
||||
|
||||
func (m *contentMap) deletePageByPath(s string) {
|
||||
|
@ -648,8 +667,7 @@ func (m *contentMap) deletePageByPath(s string) {
|
|||
}
|
||||
|
||||
func (m *contentMap) deleteTaxonomy(s string) {
|
||||
m.taxonomies.Delete(s)
|
||||
m.taxonomies.DeletePrefix(s + "/")
|
||||
m.taxonomies.DeletePrefix(s)
|
||||
}
|
||||
|
||||
func (m *contentMap) reduceKeyPart(dir, filename string) string {
|
||||
|
@ -817,7 +835,7 @@ func (c *contentTree) WalkQuery(query pageMapQuery, walkFn contentTreeNodeCallba
|
|||
filter = contentTreeNoListAlwaysFilter
|
||||
}
|
||||
if query.Prefix != "" {
|
||||
c.WalkPrefix(query.Prefix, func(s string, v interface{}) bool {
|
||||
c.WalkBelow(query.Prefix, func(s string, v interface{}) bool {
|
||||
n := v.(*contentNode)
|
||||
if filter != nil && filter(s, n) {
|
||||
return false
|
||||
|
@ -862,6 +880,18 @@ func (c contentTrees) WalkPrefix(prefix string, fn contentTreeNodeCallback) {
|
|||
}
|
||||
}
|
||||
|
||||
// WalkBelow walks the tree below the given prefix, i.e. it skips the
|
||||
// node with the given prefix as key.
|
||||
func (c *contentTree) WalkBelow(prefix string, fn radix.WalkFn) {
|
||||
c.Tree.WalkPrefix(prefix, func(s string, v interface{}) bool {
|
||||
if s == prefix {
|
||||
return false
|
||||
}
|
||||
return fn(s, v)
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func (c *contentTree) getMatch(matches func(b *contentNode) bool) string {
|
||||
var match string
|
||||
c.Walk(func(s string, v interface{}) bool {
|
||||
|
@ -881,9 +911,9 @@ func (c *contentTree) getMatch(matches func(b *contentNode) bool) string {
|
|||
return match
|
||||
}
|
||||
|
||||
func (c *contentTree) hasPrefix(s string) bool {
|
||||
func (c *contentTree) hasBelow(s1 string) bool {
|
||||
var t bool
|
||||
c.Tree.WalkPrefix(s, func(s string, v interface{}) bool {
|
||||
c.WalkBelow(s1, func(s2 string, v interface{}) bool {
|
||||
t = true
|
||||
return true
|
||||
})
|
||||
|
@ -953,12 +983,7 @@ func (c *contentTreeRef) getPagesRecursive() page.Pages {
|
|||
Filter: c.n.p.m.getListFilter(true),
|
||||
}
|
||||
|
||||
query.Prefix = c.key + cmBranchSeparator
|
||||
c.m.collectPages(query, func(c *contentNode) {
|
||||
pas = append(pas, c.p)
|
||||
})
|
||||
|
||||
query.Prefix = c.key + "/"
|
||||
query.Prefix = c.key
|
||||
c.m.collectPages(query, func(c *contentNode) {
|
||||
pas = append(pas, c.p)
|
||||
})
|
||||
|
|
|
@ -70,7 +70,7 @@ func (m *pageMap) createMissingTaxonomyNodes() error {
|
|||
m.taxonomyEntries.Walk(func(s string, v interface{}) bool {
|
||||
n := v.(*contentNode)
|
||||
vi := n.viewInfo
|
||||
k := cleanTreeKey(vi.name.plural + "/" + vi.termKey)
|
||||
k := cleanSectionTreeKey(vi.name.plural + "/" + vi.termKey)
|
||||
|
||||
if _, found := m.taxonomies.Get(k); !found {
|
||||
vic := &contentBundleViewInfo{
|
||||
|
@ -266,6 +266,7 @@ func (m *pageMap) newResource(fim hugofs.FileMetaInfo, owner *pageState) (resour
|
|||
|
||||
func (m *pageMap) createSiteTaxonomies() error {
|
||||
m.s.taxonomies = make(TaxonomyList)
|
||||
var walkErr error
|
||||
m.taxonomies.Walk(func(s string, v interface{}) bool {
|
||||
n := v.(*contentNode)
|
||||
t := n.viewInfo
|
||||
|
@ -276,7 +277,11 @@ func (m *pageMap) createSiteTaxonomies() error {
|
|||
m.s.taxonomies[viewName.plural] = make(Taxonomy)
|
||||
} else {
|
||||
taxonomy := m.s.taxonomies[viewName.plural]
|
||||
m.taxonomyEntries.WalkPrefix(s+"/", func(ss string, v interface{}) bool {
|
||||
if taxonomy == nil {
|
||||
walkErr = errors.Errorf("missing taxonomy: %s", viewName.plural)
|
||||
return true
|
||||
}
|
||||
m.taxonomyEntries.WalkPrefix(s, func(ss string, v interface{}) bool {
|
||||
b2 := v.(*contentNode)
|
||||
info := b2.viewInfo
|
||||
taxonomy.add(info.termKey, page.NewWeightedPage(info.weight, info.ref.p, n.p))
|
||||
|
@ -294,7 +299,7 @@ func (m *pageMap) createSiteTaxonomies() error {
|
|||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
return walkErr
|
||||
}
|
||||
|
||||
func (m *pageMap) createListAllPages() page.Pages {
|
||||
|
@ -426,7 +431,6 @@ func (m *pageMap) assembleSections() error {
|
|||
|
||||
m.sections.Walk(func(s string, v interface{}) bool {
|
||||
n := v.(*contentNode)
|
||||
|
||||
var shouldBuild bool
|
||||
|
||||
defer func() {
|
||||
|
@ -596,11 +600,12 @@ func (m *pageMap) attachPageToViews(s string, b *contentNode) {
|
|||
},
|
||||
}
|
||||
|
||||
if s == "/" {
|
||||
// To avoid getting an empty key.
|
||||
s = page.KindHome
|
||||
var key string
|
||||
if strings.HasSuffix(s, "/") {
|
||||
key = cleanSectionTreeKey(path.Join(viewName.plural, termKey, s))
|
||||
} else {
|
||||
key = cleanTreeKey(path.Join(viewName.plural, termKey, s))
|
||||
}
|
||||
key := cleanTreeKey(path.Join(viewName.plural, termKey, s))
|
||||
m.taxonomyEntries.Insert(key, bv)
|
||||
}
|
||||
}
|
||||
|
@ -638,19 +643,10 @@ func (m *pageMap) collectPagesAndSections(query pageMapQuery, fn func(c *content
|
|||
}
|
||||
|
||||
func (m *pageMap) collectSections(query pageMapQuery, fn func(c *contentNode)) error {
|
||||
var level int
|
||||
isHome := query.Prefix == "/"
|
||||
|
||||
if !isHome {
|
||||
level = strings.Count(query.Prefix, "/")
|
||||
}
|
||||
level := strings.Count(query.Prefix, "/")
|
||||
|
||||
return m.collectSectionsFn(query, func(s string, c *contentNode) bool {
|
||||
if s == query.Prefix {
|
||||
return false
|
||||
}
|
||||
|
||||
if (strings.Count(s, "/") - level) != 1 {
|
||||
if strings.Count(s, "/") != level+1 {
|
||||
return false
|
||||
}
|
||||
|
||||
|
@ -745,10 +741,11 @@ func (m *pageMaps) AssemblePages() error {
|
|||
return err
|
||||
}
|
||||
|
||||
a := (§ionWalker{m: pm.contentMap}).applyAggregates()
|
||||
sw := §ionWalker{m: pm.contentMap}
|
||||
a := sw.applyAggregates()
|
||||
_, mainSectionsSet := pm.s.s.Info.Params()["mainsections"]
|
||||
if !mainSectionsSet && a.mainSection != "" {
|
||||
mainSections := []string{a.mainSection}
|
||||
mainSections := []string{strings.TrimRight(a.mainSection, "/")}
|
||||
pm.s.s.Info.Params()["mainSections"] = mainSections
|
||||
pm.s.s.Info.Params()["mainsections"] = mainSections
|
||||
}
|
||||
|
@ -847,7 +844,7 @@ func (b *pagesMapBucket) getTaxonomies() page.Pages {
|
|||
b.sectionsInit.Do(func() {
|
||||
var pas page.Pages
|
||||
ref := b.owner.treeRef
|
||||
ref.m.collectTaxonomies(ref.key+"/", func(c *contentNode) {
|
||||
ref.m.collectTaxonomies(ref.key, func(c *contentNode) {
|
||||
pas = append(pas, c.p)
|
||||
})
|
||||
page.SortByDefault(pas)
|
||||
|
@ -888,8 +885,12 @@ type sectionAggregateHandler struct {
|
|||
s string
|
||||
}
|
||||
|
||||
func (h *sectionAggregateHandler) String() string {
|
||||
return fmt.Sprintf("%s/%s - %d - %s", h.sectionAggregate.datesAll, h.sectionAggregate.datesSection, h.sectionPageCount, h.s)
|
||||
}
|
||||
|
||||
func (h *sectionAggregateHandler) isRootSection() bool {
|
||||
return h.s != "/" && strings.Count(h.s, "/") == 1
|
||||
return h.s != "/" && strings.Count(h.s, "/") == 2
|
||||
}
|
||||
|
||||
func (h *sectionAggregateHandler) handleNested(v sectionWalkHandler) error {
|
||||
|
@ -963,11 +964,13 @@ func (w *sectionWalker) applyAggregates() *sectionAggregateHandler {
|
|||
func (w *sectionWalker) walkLevel(prefix string, createVisitor func() sectionWalkHandler) sectionWalkHandler {
|
||||
|
||||
level := strings.Count(prefix, "/")
|
||||
|
||||
visitor := createVisitor()
|
||||
|
||||
w.m.taxonomies.WalkPrefix(prefix, func(s string, v interface{}) bool {
|
||||
w.m.taxonomies.WalkBelow(prefix, func(s string, v interface{}) bool {
|
||||
currentLevel := strings.Count(s, "/")
|
||||
if currentLevel > level {
|
||||
|
||||
if currentLevel > level+1 {
|
||||
return false
|
||||
}
|
||||
|
||||
|
@ -977,8 +980,8 @@ func (w *sectionWalker) walkLevel(prefix string, createVisitor func() sectionWal
|
|||
return true
|
||||
}
|
||||
|
||||
if currentLevel == 1 {
|
||||
nested := w.walkLevel(s+"/", createVisitor)
|
||||
if currentLevel == 2 {
|
||||
nested := w.walkLevel(s, createVisitor)
|
||||
if w.err = visitor.handleNested(nested); w.err != nil {
|
||||
return true
|
||||
}
|
||||
|
@ -995,9 +998,9 @@ func (w *sectionWalker) walkLevel(prefix string, createVisitor func() sectionWal
|
|||
return w.err != nil
|
||||
})
|
||||
|
||||
w.m.sections.WalkPrefix(prefix, func(s string, v interface{}) bool {
|
||||
w.m.sections.WalkBelow(prefix, func(s string, v interface{}) bool {
|
||||
currentLevel := strings.Count(s, "/")
|
||||
if currentLevel > level {
|
||||
if currentLevel > level+1 {
|
||||
return false
|
||||
}
|
||||
|
||||
|
@ -1016,11 +1019,9 @@ func (w *sectionWalker) walkLevel(prefix string, createVisitor func() sectionWal
|
|||
return true
|
||||
}
|
||||
|
||||
if s != "/" {
|
||||
nested := w.walkLevel(s+"/", createVisitor)
|
||||
if w.err = visitor.handleNested(nested); w.err != nil {
|
||||
return true
|
||||
}
|
||||
nested := w.walkLevel(s, createVisitor)
|
||||
if w.err = visitor.handleNested(nested); w.err != nil {
|
||||
return true
|
||||
}
|
||||
|
||||
w.err = visitor.handleSectionPost()
|
||||
|
|
|
@ -155,19 +155,19 @@ func TestContentMap(t *testing.T) {
|
|||
|
||||
expect := `
|
||||
Tree 0:
|
||||
/blog__hb_/a__hl_
|
||||
/blog__hb_/b/c__hl_
|
||||
/blog/__hb_a__hl_
|
||||
/blog/__hb_b/c__hl_
|
||||
Tree 1:
|
||||
/blog
|
||||
/blog/
|
||||
Tree 2:
|
||||
/blog__hb_/a__hl_b/data.json
|
||||
/blog__hb_/a__hl_logo.png
|
||||
/blog__hl_sectiondata.json
|
||||
en/pages/blog__hb_/a__hl_|f:blog/a/index.md
|
||||
/blog/__hb_a__hl_b/data.json
|
||||
/blog/__hb_a__hl_logo.png
|
||||
/blog/__hl_sectiondata.json
|
||||
en/pages/blog/__hb_a__hl_|f:blog/a/index.md
|
||||
- R: blog/a/b/data.json
|
||||
- R: blog/a/logo.png
|
||||
en/pages/blog__hb_/b/c__hl_|f:blog/b/c/index.md
|
||||
en/sections/blog|f:blog/_index.md
|
||||
en/pages/blog/__hb_b/c__hl_|f:blog/b/c/index.md
|
||||
en/sections/blog/|f:blog/_index.md
|
||||
- P: blog/a/index.md
|
||||
- P: blog/b/c/index.md
|
||||
- R: blog/sectiondata.json
|
||||
|
@ -194,24 +194,24 @@ func TestContentMap(t *testing.T) {
|
|||
|
||||
expect = `
|
||||
Tree 0:
|
||||
/blog__hb_/a__hl_
|
||||
/blog__hb_/b/c__hl_
|
||||
/blog/__hb_a__hl_
|
||||
/blog/__hb_b/c__hl_
|
||||
Tree 1:
|
||||
/blog
|
||||
/blog/
|
||||
Tree 2:
|
||||
/blog__hb_/a__hl_b/data.json
|
||||
/blog__hb_/a__hl_b/data2.json
|
||||
/blog__hb_/a__hl_logo.png
|
||||
/blog__hb_/b/c__hl_d/data3.json
|
||||
/blog__hl_sectiondata.json
|
||||
/blog__hl_sectiondata2.json
|
||||
en/pages/blog__hb_/a__hl_|f:blog/a/index.md
|
||||
/blog/__hb_a__hl_b/data.json
|
||||
/blog/__hb_a__hl_b/data2.json
|
||||
/blog/__hb_a__hl_logo.png
|
||||
/blog/__hb_b/c__hl_d/data3.json
|
||||
/blog/__hl_sectiondata.json
|
||||
/blog/__hl_sectiondata2.json
|
||||
en/pages/blog/__hb_a__hl_|f:blog/a/index.md
|
||||
- R: blog/a/b/data.json
|
||||
- R: blog/a/b/data2.json
|
||||
- R: blog/a/logo.png
|
||||
en/pages/blog__hb_/b/c__hl_|f:blog/b/c/index.md
|
||||
en/pages/blog/__hb_b/c__hl_|f:blog/b/c/index.md
|
||||
- R: blog/b/c/d/data3.json
|
||||
en/sections/blog|f:blog/_index.md
|
||||
en/sections/blog/|f:blog/_index.md
|
||||
- P: blog/a/index.md
|
||||
- P: blog/b/c/index.md
|
||||
- R: blog/sectiondata.json
|
||||
|
@ -226,26 +226,26 @@ func TestContentMap(t *testing.T) {
|
|||
|
||||
c.Assert(m.testDump(), hqt.IsSameString, `
|
||||
Tree 0:
|
||||
/blog__hb_/a__hl_
|
||||
/blog__hb_/b/c__hl_
|
||||
/blog__hb_/b__hl_
|
||||
/blog/__hb_a__hl_
|
||||
/blog/__hb_b/c__hl_
|
||||
/blog/__hb_b__hl_
|
||||
Tree 1:
|
||||
/blog
|
||||
/blog/
|
||||
Tree 2:
|
||||
/blog__hb_/a__hl_b/data.json
|
||||
/blog__hb_/a__hl_b/data2.json
|
||||
/blog__hb_/a__hl_logo.png
|
||||
/blog__hb_/b/c__hl_d/data3.json
|
||||
/blog__hl_sectiondata.json
|
||||
/blog__hl_sectiondata2.json
|
||||
en/pages/blog__hb_/a__hl_|f:blog/a/index.md
|
||||
/blog/__hb_a__hl_b/data.json
|
||||
/blog/__hb_a__hl_b/data2.json
|
||||
/blog/__hb_a__hl_logo.png
|
||||
/blog/__hb_b/c__hl_d/data3.json
|
||||
/blog/__hl_sectiondata.json
|
||||
/blog/__hl_sectiondata2.json
|
||||
en/pages/blog/__hb_a__hl_|f:blog/a/index.md
|
||||
- R: blog/a/b/data.json
|
||||
- R: blog/a/b/data2.json
|
||||
- R: blog/a/logo.png
|
||||
en/pages/blog__hb_/b/c__hl_|f:blog/b/c/index.md
|
||||
en/pages/blog/__hb_b/c__hl_|f:blog/b/c/index.md
|
||||
- R: blog/b/c/d/data3.json
|
||||
en/pages/blog__hb_/b__hl_|f:blog/b.md
|
||||
en/sections/blog|f:blog/_index.md
|
||||
en/pages/blog/__hb_b__hl_|f:blog/b.md
|
||||
en/sections/blog/|f:blog/_index.md
|
||||
- P: blog/a/index.md
|
||||
- P: blog/b/c/index.md
|
||||
- P: blog/b.md
|
||||
|
@ -280,19 +280,19 @@ func TestContentMap(t *testing.T) {
|
|||
c.Assert(got, hqt.IsSameString, `
|
||||
|
||||
Tree 0:
|
||||
/__hb_/bundle__hl_
|
||||
/blog__hb_/a__hl_
|
||||
/blog__hb_/page__hl_
|
||||
/__hb_bundle__hl_
|
||||
/blog/__hb_a__hl_
|
||||
/blog/__hb_page__hl_
|
||||
Tree 1:
|
||||
/
|
||||
/blog
|
||||
/blog/
|
||||
Tree 2:
|
||||
en/pages/__hb_/bundle__hl_|f:bundle/index.md
|
||||
en/pages/blog__hb_/a__hl_|f:blog/a/index.md
|
||||
en/pages/blog__hb_/page__hl_|f:blog/page.md
|
||||
en/pages/__hb_bundle__hl_|f:bundle/index.md
|
||||
en/pages/blog/__hb_a__hl_|f:blog/a/index.md
|
||||
en/pages/blog/__hb_page__hl_|f:blog/page.md
|
||||
en/sections/
|
||||
- P: bundle/index.md
|
||||
en/sections/blog
|
||||
en/sections/blog/
|
||||
- P: blog/a/index.md
|
||||
- P: blog/page.md
|
||||
|
||||
|
|
|
@ -133,22 +133,21 @@ func (p *pageState) GitInfo() *gitmap.GitInfo {
|
|||
|
||||
// GetTerms gets the terms defined on this page in the given taxonomy.
|
||||
func (p *pageState) GetTerms(taxonomy string) page.Pages {
|
||||
taxonomy = strings.ToLower(taxonomy)
|
||||
m := p.s.pageMap
|
||||
prefix := cleanTreeKey(taxonomy)
|
||||
|
||||
var self string
|
||||
if p.IsHome() {
|
||||
// TODO(bep) make this less magical, see taxonomyEntries.Insert.
|
||||
self = "/" + page.KindHome
|
||||
} else if p.treeRef != nil {
|
||||
self = p.treeRef.key
|
||||
if p.treeRef == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
m := p.s.pageMap
|
||||
|
||||
taxonomy = strings.ToLower(taxonomy)
|
||||
prefix := cleanSectionTreeKey(taxonomy)
|
||||
self := strings.TrimPrefix(p.treeRef.key, "/")
|
||||
|
||||
var pas page.Pages
|
||||
|
||||
m.taxonomies.WalkQuery(pageMapQuery{Prefix: prefix}, func(s string, n *contentNode) bool {
|
||||
if _, found := m.taxonomyEntries.Get(s + self); found {
|
||||
key := s + self
|
||||
if _, found := m.taxonomyEntries.Get(key); found {
|
||||
pas = append(pas, n.p)
|
||||
}
|
||||
return false
|
||||
|
|
|
@ -58,7 +58,7 @@ func (pt pageTree) IsAncestor(other interface{}) (bool, error) {
|
|||
return true, nil
|
||||
}
|
||||
|
||||
if strings.HasPrefix(ref2.key, ref1.key+"/") {
|
||||
if strings.HasPrefix(ref2.key, ref1.key) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
|
@ -109,7 +109,7 @@ func (pt pageTree) IsDescendant(other interface{}) (bool, error) {
|
|||
return true, nil
|
||||
}
|
||||
|
||||
if strings.HasPrefix(ref1.key, ref2.key+"/") {
|
||||
if strings.HasPrefix(ref1.key, ref2.key) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
|
@ -123,9 +123,11 @@ func (pt pageTree) FirstSection() page.Page {
|
|||
return pt.p.s.home
|
||||
}
|
||||
key := ref.key
|
||||
|
||||
if !ref.isSection() {
|
||||
key = path.Dir(key)
|
||||
}
|
||||
|
||||
_, b := ref.m.getFirstSection(key)
|
||||
if b == nil {
|
||||
return nil
|
||||
|
|
|
@ -169,18 +169,20 @@ func (c *PageCollections) getPageNew(context page.Page, ref string) (page.Page,
|
|||
func (c *PageCollections) getSectionOrPage(ref string) (*contentNode, string) {
|
||||
var n *contentNode
|
||||
|
||||
s, v, found := c.pageMap.sections.LongestPrefix(ref)
|
||||
pref := helpers.AddTrailingSlash(ref)
|
||||
s, v, found := c.pageMap.sections.LongestPrefix(pref)
|
||||
|
||||
if found {
|
||||
n = v.(*contentNode)
|
||||
}
|
||||
|
||||
if found && s == ref {
|
||||
if found && s == pref {
|
||||
// A section
|
||||
return n, ""
|
||||
}
|
||||
|
||||
m := c.pageMap
|
||||
|
||||
filename := strings.TrimPrefix(strings.TrimPrefix(ref, s), "/")
|
||||
langSuffix := "." + m.s.Lang()
|
||||
|
||||
|
@ -224,9 +226,11 @@ func shouldDoSimpleLookup(ref string) bool {
|
|||
|
||||
func (c *PageCollections) getContentNode(context page.Page, isReflink bool, ref string) (*contentNode, error) {
|
||||
ref = filepath.ToSlash(strings.ToLower(strings.TrimSpace(ref)))
|
||||
|
||||
if ref == "" {
|
||||
ref = "/"
|
||||
}
|
||||
|
||||
inRef := ref
|
||||
navUp := strings.HasPrefix(ref, "..")
|
||||
var doSimpleLookup bool
|
||||
|
@ -275,9 +279,11 @@ func (c *PageCollections) getContentNode(context page.Page, isReflink bool, ref
|
|||
}
|
||||
|
||||
// Check if it's a taxonomy node
|
||||
s, v, found := m.taxonomies.LongestPrefix(ref)
|
||||
pref := helpers.AddTrailingSlash(ref)
|
||||
s, v, found := m.taxonomies.LongestPrefix(pref)
|
||||
|
||||
if found {
|
||||
if !m.onSameLevel(ref, s) {
|
||||
if !m.onSameLevel(pref, s) {
|
||||
return nil, nil
|
||||
}
|
||||
return v.(*contentNode), nil
|
||||
|
|
|
@ -211,6 +211,9 @@ func TestGetPage(t *testing.T) {
|
|||
writeSource(t, fs, filepath.Join("content", "sect3", "b1", "index.md"), pc("b1 bundle"))
|
||||
writeSource(t, fs, filepath.Join("content", "sect3", "index", "index.md"), pc("index bundle"))
|
||||
|
||||
writeSource(t, fs, filepath.Join("content", "section_bundle_overlap", "_index.md"), pc("index overlap section"))
|
||||
writeSource(t, fs, filepath.Join("content", "section_bundle_overlap_bundle", "index.md"), pc("index overlap bundle"))
|
||||
|
||||
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
|
||||
|
||||
sec3, err := s.getPageNew(nil, "/sect3")
|
||||
|
@ -282,6 +285,9 @@ func TestGetPage(t *testing.T) {
|
|||
// Bundle variants
|
||||
{"Bundle regular", page.KindPage, nil, []string{"sect3/b1", "sect3/b1/index.md", "sect3/b1/index.en.md"}, "b1 bundle"},
|
||||
{"Bundle index name", page.KindPage, nil, []string{"sect3/index/index.md", "sect3/index"}, "index bundle"},
|
||||
|
||||
// https://github.com/gohugoio/hugo/issues/7301
|
||||
{"Section and bundle overlap", page.KindPage, nil, []string{"section_bundle_overlap_bundle"}, "index overlap bundle"},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
|
|
|
@ -637,6 +637,7 @@ Cats Paginator {{ range $cats.Paginator.Pages }}{{ .RelPermalink }}|{{ end }}:EN
|
|||
b.Assert(funny, qt.Not(qt.IsNil))
|
||||
|
||||
b.Assert(cat.Parent().IsHome(), qt.Equals, true)
|
||||
b.Assert(funny.Kind(), qt.Equals, "taxonomy")
|
||||
b.Assert(funny.Parent(), qt.Equals, cat)
|
||||
|
||||
b.AssertFileContent("public/index.html", `
|
||||
|
|
Loading…
Reference in a new issue