mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
parent
75c38071d8
commit
3ff25b37a3
5 changed files with 84 additions and 10 deletions
|
@ -17,6 +17,7 @@ import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
@ -390,6 +391,11 @@ func (h *HugoSites) createMissingNodes() error {
|
||||||
// TODO(bep) np check node title etc.
|
// TODO(bep) np check node title etc.
|
||||||
s := h.Sites[0]
|
s := h.Sites[0]
|
||||||
|
|
||||||
|
// TODO(bep) np
|
||||||
|
for _, p := range s.Pages {
|
||||||
|
p.setNodeTypeVars(s)
|
||||||
|
}
|
||||||
|
|
||||||
home := s.findPagesByNodeType(NodeHome)
|
home := s.findPagesByNodeType(NodeHome)
|
||||||
|
|
||||||
// home page
|
// home page
|
||||||
|
@ -460,7 +466,7 @@ func (s *Site) newNodePage(typ NodeType) *Page {
|
||||||
language: s.Language,
|
language: s.Language,
|
||||||
}
|
}
|
||||||
|
|
||||||
return &Page{Node: n}
|
return &Page{Node: n, site: s}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Site) newHomePage() *Page {
|
func (s *Site) newHomePage() *Page {
|
||||||
|
@ -489,6 +495,7 @@ func (s *Site) newTaxonomyPage(plural, key string) *Page {
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO(bep) np check set url
|
// TODO(bep) np check set url
|
||||||
|
p.URLPath.URL = path.Join(plural, key)
|
||||||
|
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
@ -509,7 +516,7 @@ func (s *Site) newSectionPage(name string, section WeightedPages) *Page {
|
||||||
} else {
|
} else {
|
||||||
p.Title = sectionName
|
p.Title = sectionName
|
||||||
}
|
}
|
||||||
|
p.URLPath.URL = name
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -613,8 +620,6 @@ func (s *Site) preparePagesForRender(cfg BuildCfg, changed whatChanged) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
p.setNodeTypeVars(s)
|
|
||||||
|
|
||||||
// If we got this far it means that this is either a new Page pointer
|
// If we got this far it means that this is either a new Page pointer
|
||||||
// or a template or similar has changed so wee need to do a rerendering
|
// or a template or similar has changed so wee need to do a rerendering
|
||||||
// of the shortcodes etc.
|
// of the shortcodes etc.
|
||||||
|
|
|
@ -31,8 +31,8 @@ import (
|
||||||
*/
|
*/
|
||||||
|
|
||||||
func TestNodesAsPage(t *testing.T) {
|
func TestNodesAsPage(t *testing.T) {
|
||||||
//jww.SetStdoutThreshold(jww.LevelDebug)
|
jww.SetStdoutThreshold(jww.LevelDebug)
|
||||||
jww.SetStdoutThreshold(jww.LevelFatal)
|
//jww.SetStdoutThreshold(jww.LevelFatal)
|
||||||
|
|
||||||
nodePageFeatureFlag = true
|
nodePageFeatureFlag = true
|
||||||
defer toggleNodePageFeatureFlag()
|
defer toggleNodePageFeatureFlag()
|
||||||
|
@ -105,6 +105,8 @@ Content Page %02d
|
||||||
}
|
}
|
||||||
|
|
||||||
viper.Set("paginate", 1)
|
viper.Set("paginate", 1)
|
||||||
|
viper.Set("title", "Hugo Rocks")
|
||||||
|
viper.Set("rssURI", "customrss.xml")
|
||||||
|
|
||||||
s := newSiteDefaultLang()
|
s := newSiteDefaultLang()
|
||||||
|
|
||||||
|
@ -172,11 +174,18 @@ Content Page %02d
|
||||||
|
|
||||||
// There are no pages to paginate over in the taxonomy terms.
|
// There are no pages to paginate over in the taxonomy terms.
|
||||||
|
|
||||||
|
// RSS
|
||||||
|
assertFileContent(t, filepath.Join("public", "customrss.xml"), false, "Recent content in Home Sweet Home! on Hugo Rocks", "<rss")
|
||||||
|
assertFileContent(t, filepath.Join("public", "sect1", "customrss.xml"), false, "Recent content in Section1 on Hugo Rocks", "<rss")
|
||||||
|
assertFileContent(t, filepath.Join("public", "sect2", "customrss.xml"), false, "Recent content in Section2 on Hugo Rocks", "<rss")
|
||||||
|
assertFileContent(t, filepath.Join("public", "categories", "hugo", "customrss.xml"), false, "Recent content in Taxonomy Hugo on Hugo Rocks", "<rss")
|
||||||
|
assertFileContent(t, filepath.Join("public", "categories", "web", "customrss.xml"), false, "Recent content in Taxonomy Web on Hugo Rocks", "<rss")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNodesWithNoContentFile(t *testing.T) {
|
func TestNodesWithNoContentFile(t *testing.T) {
|
||||||
//jww.SetStdoutThreshold(jww.LevelDebug)
|
jww.SetStdoutThreshold(jww.LevelDebug)
|
||||||
jww.SetStdoutThreshold(jww.LevelFatal)
|
//jww.SetStdoutThreshold(jww.LevelFatal)
|
||||||
|
|
||||||
nodePageFeatureFlag = true
|
nodePageFeatureFlag = true
|
||||||
defer toggleNodePageFeatureFlag()
|
defer toggleNodePageFeatureFlag()
|
||||||
|
@ -203,6 +212,7 @@ Content Page %02d
|
||||||
|
|
||||||
viper.Set("paginate", 1)
|
viper.Set("paginate", 1)
|
||||||
viper.Set("title", "Hugo Rocks!")
|
viper.Set("title", "Hugo Rocks!")
|
||||||
|
viper.Set("rssURI", "customrss.xml")
|
||||||
|
|
||||||
s := newSiteDefaultLang()
|
s := newSiteDefaultLang()
|
||||||
|
|
||||||
|
@ -234,6 +244,13 @@ Content Page %02d
|
||||||
assertFileContent(t, filepath.Join("public", "sect2", "index.html"), false,
|
assertFileContent(t, filepath.Join("public", "sect2", "index.html"), false,
|
||||||
"Section Title: Sect2s")
|
"Section Title: Sect2s")
|
||||||
|
|
||||||
|
// RSS
|
||||||
|
assertFileContent(t, filepath.Join("public", "customrss.xml"), false, "Recent content in Hugo Rocks! on Hugo Rocks!", "<rss")
|
||||||
|
assertFileContent(t, filepath.Join("public", "sect1", "customrss.xml"), false, "Recent content in Sect1s on Hugo Rocks!", "<rss")
|
||||||
|
assertFileContent(t, filepath.Join("public", "sect2", "customrss.xml"), false, "Recent content in Sect2s on Hugo Rocks!", "<rss")
|
||||||
|
assertFileContent(t, filepath.Join("public", "categories", "hugo", "customrss.xml"), false, "Recent content in Hugo on Hugo Rocks!", "<rss")
|
||||||
|
assertFileContent(t, filepath.Join("public", "categories", "web", "customrss.xml"), false, "Recent content in Web on Hugo Rocks!", "<rss")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func writeLayoutsForNodeAsPageTests(t *testing.T) {
|
func writeLayoutsForNodeAsPageTests(t *testing.T) {
|
||||||
|
|
|
@ -95,6 +95,8 @@ type Page struct {
|
||||||
PageMeta
|
PageMeta
|
||||||
Source
|
Source
|
||||||
Position `json:"-"`
|
Position `json:"-"`
|
||||||
|
|
||||||
|
// TODO(bep) np pointer, or remove
|
||||||
Node
|
Node
|
||||||
|
|
||||||
GitInfo *gitmap.GitInfo
|
GitInfo *gitmap.GitInfo
|
||||||
|
@ -496,6 +498,29 @@ func (p *Page) layouts(l ...string) []string {
|
||||||
return layouts(p.Type(), layout)
|
return layouts(p.Type(), layout)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO(bep) np consolidate and test these NodeType switches
|
||||||
|
// rssLayouts returns RSS layouts to use for the RSS version of this page, nil
|
||||||
|
// if no RSS should be rendered.
|
||||||
|
func (p *Page) rssLayouts() []string {
|
||||||
|
switch p.NodeType {
|
||||||
|
case NodeHome:
|
||||||
|
return []string{"rss.xml", "_default/rss.xml", "_internal/_default/rss.xml"}
|
||||||
|
case NodeSection:
|
||||||
|
section := p.sections[0]
|
||||||
|
return []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
|
||||||
|
case NodeTaxonomy:
|
||||||
|
singular := p.site.taxonomiesPluralSingular[p.sections[0]]
|
||||||
|
return []string{"taxonomy/" + singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
|
||||||
|
case NodeTaxonomyTerms:
|
||||||
|
// No RSS for taxonomy terms
|
||||||
|
case NodePage:
|
||||||
|
// No RSS for regular pages
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
func layouts(types string, layout string) (layouts []string) {
|
func layouts(types string, layout string) (layouts []string) {
|
||||||
t := strings.Split(types, "/")
|
t := strings.Split(types, "/")
|
||||||
|
|
||||||
|
@ -1246,7 +1271,7 @@ func (p *Page) prepareData(s *Site) error {
|
||||||
if !ok {
|
if !ok {
|
||||||
return fmt.Errorf("Data for section %s not found", p.Section())
|
return fmt.Errorf("Data for section %s not found", p.Section())
|
||||||
}
|
}
|
||||||
p.Data["Pages"] = sectionData
|
p.Data["Pages"] = sectionData.Pages()
|
||||||
case NodeTaxonomy:
|
case NodeTaxonomy:
|
||||||
plural := p.sections[0]
|
plural := p.sections[0]
|
||||||
term := p.sections[1]
|
term := p.sections[1]
|
||||||
|
@ -1278,7 +1303,7 @@ func (p *Page) prepareData(s *Site) error {
|
||||||
// the paginators etc., we do it manually here.
|
// the paginators etc., we do it manually here.
|
||||||
// TODO(bep) np do better
|
// TODO(bep) np do better
|
||||||
func (p *Page) copy() *Page {
|
func (p *Page) copy() *Page {
|
||||||
c := &Page{Node: Node{NodeType: p.NodeType}}
|
c := &Page{Node: Node{NodeType: p.NodeType, Site: p.Site}}
|
||||||
c.Title = p.Title
|
c.Title = p.Title
|
||||||
c.Data = p.Data
|
c.Data = p.Data
|
||||||
c.Date = p.Date
|
c.Date = p.Date
|
||||||
|
|
|
@ -2322,6 +2322,7 @@ func (s *Site) layoutExists(layouts ...string) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layouts ...string) error {
|
func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layouts ...string) error {
|
||||||
|
jww.DEBUG.Printf("Render XML for %q to %q", name, dest)
|
||||||
renderBuffer := bp.GetBuffer()
|
renderBuffer := bp.GetBuffer()
|
||||||
defer bp.PutBuffer(renderBuffer)
|
defer bp.PutBuffer(renderBuffer)
|
||||||
renderBuffer.WriteString("<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>\n")
|
renderBuffer.WriteString("<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>\n")
|
||||||
|
|
|
@ -76,6 +76,10 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa
|
||||||
results <- err
|
results <- err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := s.renderRSS(p); err != nil {
|
||||||
|
results <- err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -121,3 +125,25 @@ func (s *Site) renderPaginator(p *Page) error {
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *Site) renderRSS(p *Page) error {
|
||||||
|
layouts := p.rssLayouts()
|
||||||
|
|
||||||
|
if layouts == nil {
|
||||||
|
// No RSS for this NodeType
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(bep) np check RSS titles
|
||||||
|
rssNode := p.copy()
|
||||||
|
|
||||||
|
// TODO(bep) np todelido URL
|
||||||
|
rssURI := s.Language.GetString("rssURI")
|
||||||
|
rssNode.URLPath.URL = path.Join(rssNode.URLPath.URL, rssURI)
|
||||||
|
|
||||||
|
if err := s.renderAndWriteXML(rssNode.Title, rssNode.URLPath.URL, rssNode, s.appendThemeTemplates(layouts)...); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue