mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-29 17:42:06 -05:00
parent
3737c9bcb3
commit
f8bda16e15
5 changed files with 79 additions and 20 deletions
|
@ -14,6 +14,7 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"html/template"
|
"html/template"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
@ -39,6 +40,7 @@ const (
|
||||||
NodeHome
|
NodeHome
|
||||||
NodeSection
|
NodeSection
|
||||||
NodeTaxonomy
|
NodeTaxonomy
|
||||||
|
NodeTaxonomyTerms
|
||||||
)
|
)
|
||||||
|
|
||||||
func (p NodeType) IsNode() bool {
|
func (p NodeType) IsNode() bool {
|
||||||
|
@ -343,7 +345,9 @@ func (n *Node) addLangFilepathPrefix(outfile string) string {
|
||||||
|
|
||||||
func sectionsFromFilename(filename string) []string {
|
func sectionsFromFilename(filename string) []string {
|
||||||
dir, _ := filepath.Split(filename)
|
dir, _ := filepath.Split(filename)
|
||||||
return strings.Split(dir, helpers.FilePathSeparator)
|
dir = strings.TrimSuffix(dir, helpers.FilePathSeparator)
|
||||||
|
sections := strings.Split(dir, helpers.FilePathSeparator)
|
||||||
|
return sections
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bep) np node identificator
|
// TODO(bep) np node identificator
|
||||||
|
@ -364,13 +368,15 @@ func nodeTypeFromFilename(filename string) NodeType {
|
||||||
func (p *Page) setNodeTypeVars(s *Site) {
|
func (p *Page) setNodeTypeVars(s *Site) {
|
||||||
// TODO(bep) np taxonomies etc.
|
// TODO(bep) np taxonomies etc.
|
||||||
if p.NodeType == NodeUnknown {
|
if p.NodeType == NodeUnknown {
|
||||||
// This is either a taxonomy or a section
|
// This is either a taxonomy list, taxonomy term or a section
|
||||||
if s.isTaxonomy(p.Section()) {
|
nodeType := s.nodeTypeFromSections(p.sections)
|
||||||
p.NodeType = NodeTaxonomy
|
|
||||||
} else {
|
if nodeType == NodeUnknown {
|
||||||
p.NodeType = NodeSection
|
panic(fmt.Sprintf("Unable to determine node type from %q", p.sections))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
p.NodeType = nodeType
|
||||||
|
|
||||||
}
|
}
|
||||||
// TODO(bep) np node URL
|
// TODO(bep) np node URL
|
||||||
// Set Node URL
|
// Set Node URL
|
||||||
|
@ -381,6 +387,8 @@ func (p *Page) setNodeTypeVars(s *Site) {
|
||||||
p.URLPath.URL = p.Section()
|
p.URLPath.URL = p.Section()
|
||||||
case NodeTaxonomy:
|
case NodeTaxonomy:
|
||||||
p.URLPath.URL = path.Join(p.sections...)
|
p.URLPath.URL = path.Join(p.sections...)
|
||||||
|
case NodeTaxonomyTerms:
|
||||||
|
p.URLPath.URL = path.Join(p.sections...)
|
||||||
}
|
}
|
||||||
|
|
||||||
p.site = s
|
p.site = s
|
||||||
|
|
|
@ -31,8 +31,8 @@ import (
|
||||||
*/
|
*/
|
||||||
|
|
||||||
func TestNodesAsPage(t *testing.T) {
|
func TestNodesAsPage(t *testing.T) {
|
||||||
jww.SetStdoutThreshold(jww.LevelDebug)
|
//jww.SetStdoutThreshold(jww.LevelDebug)
|
||||||
//jww.SetStdoutThreshold(jww.LevelFatal)
|
jww.SetStdoutThreshold(jww.LevelFatal)
|
||||||
|
|
||||||
nodePageFeatureFlag = true
|
nodePageFeatureFlag = true
|
||||||
defer toggleNodePageFeatureFlag()
|
defer toggleNodePageFeatureFlag()
|
||||||
|
@ -71,6 +71,18 @@ Section2 **Content!**
|
||||||
title: Taxonomy Hugo
|
title: Taxonomy Hugo
|
||||||
---
|
---
|
||||||
Taxonomy Hugo **Content!**
|
Taxonomy Hugo **Content!**
|
||||||
|
`)
|
||||||
|
|
||||||
|
writeSource(t, filepath.Join("content", "categories", "web", "_node.md"), `---
|
||||||
|
title: Taxonomy Web
|
||||||
|
---
|
||||||
|
Taxonomy Web **Content!**
|
||||||
|
`)
|
||||||
|
|
||||||
|
writeSource(t, filepath.Join("content", "categories", "_node.md"), `---
|
||||||
|
title: Taxonomy Term Categories
|
||||||
|
---
|
||||||
|
Taxonomy Term Categories **Content!**
|
||||||
`)
|
`)
|
||||||
|
|
||||||
writeSource(t, filepath.Join("layouts", "index.html"), `
|
writeSource(t, filepath.Join("layouts", "index.html"), `
|
||||||
|
@ -96,6 +108,7 @@ Section Content: {{ .Content }}
|
||||||
{{ end }}
|
{{ end }}
|
||||||
`)
|
`)
|
||||||
|
|
||||||
|
// Taxonomy lists
|
||||||
writeSource(t, filepath.Join("layouts", "_default", "taxonomy.html"), `
|
writeSource(t, filepath.Join("layouts", "_default", "taxonomy.html"), `
|
||||||
Taxonomy Title: {{ .Title }}
|
Taxonomy Title: {{ .Title }}
|
||||||
Taxonomy Content: {{ .Content }}
|
Taxonomy Content: {{ .Content }}
|
||||||
|
@ -103,6 +116,15 @@ Taxonomy Content: {{ .Content }}
|
||||||
{{ range .Paginator.Pages }}
|
{{ range .Paginator.Pages }}
|
||||||
Pag: {{ .Title }}
|
Pag: {{ .Title }}
|
||||||
{{ end }}
|
{{ end }}
|
||||||
|
`)
|
||||||
|
|
||||||
|
// Taxonomy terms
|
||||||
|
writeSource(t, filepath.Join("layouts", "_default", "terms.html"), `
|
||||||
|
Taxonomy Terms Title: {{ .Title }}
|
||||||
|
Taxonomy Terms Content: {{ .Content }}
|
||||||
|
{{ range $key, $value := .Data.Terms }}
|
||||||
|
k/v: {{ $key }} / {{ printf "%=v" $value }}
|
||||||
|
{{ end }}
|
||||||
`)
|
`)
|
||||||
|
|
||||||
// Add some regular pages
|
// Add some regular pages
|
||||||
|
@ -113,7 +135,10 @@ Taxonomy Content: {{ .Content }}
|
||||||
}
|
}
|
||||||
writeSource(t, filepath.Join("content", sect, fmt.Sprintf("regular%d.md", i)), fmt.Sprintf(`---
|
writeSource(t, filepath.Join("content", sect, fmt.Sprintf("regular%d.md", i)), fmt.Sprintf(`---
|
||||||
title: Page %02d
|
title: Page %02d
|
||||||
categories: Hugo
|
categories: [
|
||||||
|
"Hugo",
|
||||||
|
"Web"
|
||||||
|
]
|
||||||
---
|
---
|
||||||
Content Page %02d
|
Content Page %02d
|
||||||
`, i, i))
|
`, i, i))
|
||||||
|
@ -169,13 +194,22 @@ Content Page %02d
|
||||||
sections := h.findAllPagesByNodeType(NodeSection)
|
sections := h.findAllPagesByNodeType(NodeSection)
|
||||||
require.Len(t, sections, 2)
|
require.Len(t, sections, 2)
|
||||||
|
|
||||||
// Check taxonomy list
|
// Check taxonomy lists
|
||||||
assertFileContent(t, filepath.Join("public", "categories", "hugo", "index.html"), false,
|
assertFileContent(t, filepath.Join("public", "categories", "hugo", "index.html"), false,
|
||||||
"Taxonomy Title: Taxonomy Hugo", "Taxonomy Hugo <strong>Content!</strong>")
|
"Taxonomy Title: Taxonomy Hugo", "Taxonomy Hugo <strong>Content!</strong>")
|
||||||
|
|
||||||
|
assertFileContent(t, filepath.Join("public", "categories", "web", "index.html"), false,
|
||||||
|
"Taxonomy Title: Taxonomy Web", "Taxonomy Web <strong>Content!</strong>")
|
||||||
|
|
||||||
// Check taxonomy list paginator
|
// Check taxonomy list paginator
|
||||||
assertFileContent(t, filepath.Join("public", "categories", "hugo", "page", "2", "index.html"), false,
|
assertFileContent(t, filepath.Join("public", "categories", "hugo", "page", "2", "index.html"), false,
|
||||||
"Taxonomy Title: Taxonomy Hugo",
|
"Taxonomy Title: Taxonomy Hugo",
|
||||||
"Pag: Page 02")
|
"Pag: Page 02")
|
||||||
|
|
||||||
|
// Check taxonomy terms
|
||||||
|
assertFileContent(t, filepath.Join("public", "categories", "index.html"), false,
|
||||||
|
"Taxonomy Terms Title: Taxonomy Term Categories", "Taxonomy Term Categories <strong>Content!</strong>", "k/v: hugo")
|
||||||
|
|
||||||
|
// There are no pages to paginate over in the taxonomy terms.
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -475,6 +475,9 @@ func (p *Page) layouts(l ...string) []string {
|
||||||
case NodeTaxonomy:
|
case NodeTaxonomy:
|
||||||
singular := p.site.taxonomiesPluralSingular[p.sections[0]]
|
singular := p.site.taxonomiesPluralSingular[p.sections[0]]
|
||||||
return []string{"taxonomy/" + singular + ".html", "indexes/" + singular + ".html", "_default/taxonomy.html", "_default/list.html"}
|
return []string{"taxonomy/" + singular + ".html", "indexes/" + singular + ".html", "_default/taxonomy.html", "_default/list.html"}
|
||||||
|
case NodeTaxonomyTerms:
|
||||||
|
singular := p.site.taxonomiesPluralSingular[p.sections[0]]
|
||||||
|
return []string{"taxonomy/" + singular + ".terms.html", "_default/terms.html", "indexes/indexes.html"}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Regular Page handled below
|
// Regular Page handled below
|
||||||
|
@ -1167,6 +1170,8 @@ func (p *Page) TargetPath() (outfile string) {
|
||||||
return filepath.Join(p.Section(), "index.html")
|
return filepath.Join(p.Section(), "index.html")
|
||||||
case NodeTaxonomy:
|
case NodeTaxonomy:
|
||||||
return filepath.Join(append(p.sections, "index.html")...)
|
return filepath.Join(append(p.sections, "index.html")...)
|
||||||
|
case NodeTaxonomyTerms:
|
||||||
|
return filepath.Join(append(p.sections, "index.html")...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Always use URL if it's specified
|
// Always use URL if it's specified
|
||||||
|
@ -1253,7 +1258,16 @@ func (p *Page) prepareData(s *Site) error {
|
||||||
p.Data["Singular"] = singular
|
p.Data["Singular"] = singular
|
||||||
p.Data["Plural"] = plural
|
p.Data["Plural"] = plural
|
||||||
p.Data["Pages"] = taxonomy.Pages()
|
p.Data["Pages"] = taxonomy.Pages()
|
||||||
|
case NodeTaxonomyTerms:
|
||||||
|
plural := p.sections[0]
|
||||||
|
singular := s.taxonomiesPluralSingular[plural]
|
||||||
|
|
||||||
|
p.Data["Singular"] = singular
|
||||||
|
p.Data["Plural"] = plural
|
||||||
|
p.Data["Terms"] = s.Taxonomies[plural]
|
||||||
|
// keep the following just for legacy reasons
|
||||||
|
p.Data["OrderedIndex"] = p.Data["Terms"]
|
||||||
|
p.Data["Index"] = p.Data["Terms"]
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -1578,11 +1578,7 @@ func (s *Site) assembleSections() {
|
||||||
s.Info.Sections = s.Sections
|
s.Info.Sections = s.Sections
|
||||||
regularPages := s.findPagesByNodeType(NodePage)
|
regularPages := s.findPagesByNodeType(NodePage)
|
||||||
for i, p := range regularPages {
|
for i, p := range regularPages {
|
||||||
section := p.Section()
|
s.Sections.add(p.Section(), WeightedPage{s.Pages[i].Weight, regularPages[i]}, s.Info.preserveTaxonomyNames)
|
||||||
if s.isTaxonomy(section) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
s.Sections.add(section, WeightedPage{s.Pages[i].Weight, regularPages[i]}, s.Info.preserveTaxonomyNames)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for k := range s.Sections {
|
for k := range s.Sections {
|
||||||
|
@ -1599,11 +1595,14 @@ func (s *Site) assembleSections() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Site) isTaxonomy(section string) bool {
|
func (s *Site) nodeTypeFromSections(sections []string) NodeType {
|
||||||
if _, isTaxonomy := s.Taxonomies[section]; isTaxonomy {
|
if _, isTaxonomy := s.Taxonomies[sections[0]]; isTaxonomy {
|
||||||
return true
|
if len(sections) == 1 {
|
||||||
|
return NodeTaxonomyTerms
|
||||||
}
|
}
|
||||||
return false
|
return NodeTaxonomy
|
||||||
|
}
|
||||||
|
return NodeSection
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Site) findPagesByNodeType(n NodeType) Pages {
|
func (s *Site) findPagesByNodeType(n NodeType) Pages {
|
||||||
|
@ -1891,6 +1890,9 @@ func taxonomyRenderer(prepare bool, s *Site, taxes <-chan taxRenderInfo, results
|
||||||
|
|
||||||
// renderListsOfTaxonomyTerms renders a page per taxonomy that lists the terms for that taxonomy
|
// renderListsOfTaxonomyTerms renders a page per taxonomy that lists the terms for that taxonomy
|
||||||
func (s *Site) renderListsOfTaxonomyTerms(prepare bool) (err error) {
|
func (s *Site) renderListsOfTaxonomyTerms(prepare bool) (err error) {
|
||||||
|
if nodePageFeatureFlag {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
taxonomies := s.Language.GetStringMapString("Taxonomies")
|
taxonomies := s.Language.GetStringMapString("Taxonomies")
|
||||||
for singular, plural := range taxonomies {
|
for singular, plural := range taxonomies {
|
||||||
n := s.nodeLookup(fmt.Sprintf("taxlist-%s", plural), 0, prepare)
|
n := s.nodeLookup(fmt.Sprintf("taxlist-%s", plural), 0, prepare)
|
||||||
|
|
|
@ -70,7 +70,8 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa
|
||||||
results <- err
|
results <- err
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.NodeType.IsNode() {
|
// Taxonomy terms have no page set to paginate, so skip that for now.
|
||||||
|
if p.NodeType.IsNode() && p.NodeType != NodeTaxonomyTerms {
|
||||||
if err := s.renderPaginator(p); err != nil {
|
if err := s.renderPaginator(p); err != nil {
|
||||||
results <- err
|
results <- err
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue