mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-07 20:30:36 -05:00
parent
c175407fa4
commit
698b994f71
8 changed files with 206 additions and 106 deletions
|
@ -52,6 +52,7 @@ func (h *HugoSites) assembleGitInfo() {
|
|||
s := h.Sites[0]
|
||||
|
||||
for _, p := range s.AllPages {
|
||||
// TODO(bep) np consider other nodes
|
||||
// Git normalizes file paths on this form:
|
||||
filename := path.Join(contentRoot, contentDir, filepath.ToSlash(p.Path()))
|
||||
g, ok := gitMap[filename]
|
||||
|
|
|
@ -238,10 +238,16 @@ func (h *HugoSites) Build(config BuildCfg) error {
|
|||
}
|
||||
}
|
||||
|
||||
// TODO(bep) np createMissingNodes needs taxonomies and sections
|
||||
if err := h.createMissingNodes(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, s := range h.Sites {
|
||||
// Needed by all who use .Pages, .AllPages, .indexPages
|
||||
s.refreshPageCaches()
|
||||
}
|
||||
|
||||
if err := h.preRender(config, whatChanged{source: true, other: true}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -314,10 +320,10 @@ func (h *HugoSites) Rebuild(config BuildCfg, events ...fsnotify.Event) error {
|
|||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := h.createMissingNodes(); err != nil {
|
||||
return err
|
||||
if err := h.createMissingNodes(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err := h.preRender(config, changed); err != nil {
|
||||
|
@ -391,16 +397,11 @@ func (h *HugoSites) createMissingNodes() error {
|
|||
// TODO(bep) np check node title etc.
|
||||
s := h.Sites[0]
|
||||
|
||||
// TODO(bep) np
|
||||
for _, p := range s.Pages {
|
||||
p.setNodeTypeVars(s)
|
||||
}
|
||||
|
||||
home := s.findPagesByNodeType(NodeHome)
|
||||
|
||||
// home page
|
||||
if len(home) == 0 {
|
||||
s.Pages = append(s.Pages, s.newHomePage())
|
||||
s.Nodes = append(s.Nodes, s.newHomePage())
|
||||
}
|
||||
|
||||
// taxonomy list and terms pages
|
||||
|
@ -426,11 +427,11 @@ func (h *HugoSites) createMissingNodes() error {
|
|||
}
|
||||
}
|
||||
if !foundTaxonomyPage {
|
||||
s.Pages = append(s.Pages, s.newTaxonomyPage(plural, key))
|
||||
s.Nodes = append(s.Nodes, s.newTaxonomyPage(plural, key))
|
||||
}
|
||||
|
||||
if !foundTaxonomyTermsPage {
|
||||
s.Pages = append(s.Pages, s.newTaxonomyTermsPage(plural))
|
||||
s.Nodes = append(s.Nodes, s.newTaxonomyTermsPage(plural))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -449,7 +450,7 @@ func (h *HugoSites) createMissingNodes() error {
|
|||
}
|
||||
}
|
||||
if !foundSection {
|
||||
s.Pages = append(s.Pages, s.newSectionPage(name, section))
|
||||
s.Nodes = append(s.Nodes, s.newSectionPage(name, section))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -542,7 +543,7 @@ func (h *HugoSites) setupTranslations() {
|
|||
if strings.HasPrefix(site.Language.Lang, p.Lang()) {
|
||||
site.updateBuildStats(p)
|
||||
if shouldBuild {
|
||||
site.Pages = append(site.Pages, p)
|
||||
site.Nodes = append(site.Nodes, p)
|
||||
p.Site = &site.Info
|
||||
}
|
||||
}
|
||||
|
@ -552,7 +553,7 @@ func (h *HugoSites) setupTranslations() {
|
|||
}
|
||||
|
||||
if i == 0 {
|
||||
site.AllPages = append(site.AllPages, p)
|
||||
site.AllNodes = append(site.AllNodes, p)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -560,12 +561,12 @@ func (h *HugoSites) setupTranslations() {
|
|||
|
||||
// Pull over the collections from the master site
|
||||
for i := 1; i < len(h.Sites); i++ {
|
||||
h.Sites[i].AllPages = h.Sites[0].AllPages
|
||||
h.Sites[i].AllNodes = h.Sites[0].AllNodes
|
||||
h.Sites[i].Data = h.Sites[0].Data
|
||||
}
|
||||
|
||||
if len(h.Sites) > 1 {
|
||||
pages := h.Sites[0].AllPages
|
||||
pages := h.Sites[0].AllNodes
|
||||
allTranslations := pagesToTranslationsMap(h.multilingual, pages)
|
||||
assignTranslationsToPages(allTranslations, pages)
|
||||
}
|
||||
|
@ -574,12 +575,14 @@ func (h *HugoSites) setupTranslations() {
|
|||
// preRender performs build tasks that need to be done as late as possible.
|
||||
// Shortcode handling is the main task in here.
|
||||
// TODO(bep) We need to look at the whole handler-chain construct with he below in mind.
|
||||
// TODO(bep) np clean
|
||||
func (h *HugoSites) preRender(cfg BuildCfg, changed whatChanged) error {
|
||||
|
||||
for _, s := range h.Sites {
|
||||
if err := s.setCurrentLanguageConfig(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Run "render prepare"
|
||||
if err := s.renderHomePage(true); err != nil {
|
||||
return err
|
||||
|
@ -680,7 +683,7 @@ func (s *Site) preparePagesForRender(cfg BuildCfg, changed whatChanged) {
|
|||
}(pageChan, wg)
|
||||
}
|
||||
|
||||
for _, p := range s.Pages {
|
||||
for _, p := range s.Nodes {
|
||||
pageChan <- p
|
||||
}
|
||||
|
||||
|
@ -728,9 +731,10 @@ func (s *Site) updateBuildStats(page *Page) {
|
|||
}
|
||||
}
|
||||
|
||||
// TODO(bep) np remove
|
||||
func (h *HugoSites) findAllPagesByNodeType(n NodeType) Pages {
|
||||
var pages Pages
|
||||
for _, p := range h.Sites[0].AllPages {
|
||||
for _, p := range h.Sites[0].AllNodes {
|
||||
if p.NodeType == n {
|
||||
pages = append(pages, p)
|
||||
}
|
||||
|
@ -750,6 +754,9 @@ func buildAndRenderSite(s *Site, additionalTemplates ...string) error {
|
|||
|
||||
// Convenience func used in tests to build a single site/language.
|
||||
func doBuildSite(s *Site, render bool, additionalTemplates ...string) error {
|
||||
if s.PageCollections == nil {
|
||||
s.PageCollections = newPageCollections()
|
||||
}
|
||||
sites, err := newHugoSites(s)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
|
@ -395,7 +395,6 @@ func (p *Page) setNodeTypeVars(s *Site) {
|
|||
}
|
||||
|
||||
p.NodeType = nodeType
|
||||
|
||||
}
|
||||
// TODO(bep) np node URL
|
||||
// Set Node URL
|
||||
|
|
|
@ -31,8 +31,8 @@ import (
|
|||
*/
|
||||
|
||||
func TestNodesAsPage(t *testing.T) {
|
||||
jww.SetStdoutThreshold(jww.LevelDebug)
|
||||
//jww.SetStdoutThreshold(jww.LevelFatal)
|
||||
//jww.SetStdoutThreshold(jww.LevelDebug)
|
||||
jww.SetStdoutThreshold(jww.LevelFatal)
|
||||
|
||||
nodePageFeatureFlag = true
|
||||
defer toggleNodePageFeatureFlag()
|
||||
|
@ -184,8 +184,8 @@ Content Page %02d
|
|||
}
|
||||
|
||||
func TestNodesWithNoContentFile(t *testing.T) {
|
||||
jww.SetStdoutThreshold(jww.LevelDebug)
|
||||
//jww.SetStdoutThreshold(jww.LevelFatal)
|
||||
//jww.SetStdoutThreshold(jww.LevelDebug)
|
||||
jww.SetStdoutThreshold(jww.LevelFatal)
|
||||
|
||||
nodePageFeatureFlag = true
|
||||
defer toggleNodePageFeatureFlag()
|
||||
|
@ -221,7 +221,7 @@ Content Page %02d
|
|||
}
|
||||
|
||||
// Home page
|
||||
homePages := s.findPagesByNodeType(NodeHome)
|
||||
homePages := s.findIndexNodesByNodeType(NodeHome)
|
||||
require.Len(t, homePages, 1)
|
||||
|
||||
homePage := homePages[0]
|
||||
|
|
|
@ -1260,12 +1260,12 @@ func (p *Page) prepareLayouts() error {
|
|||
|
||||
// TODO(bep) np naming, move some
|
||||
func (p *Page) prepareData(s *Site) error {
|
||||
|
||||
p.Data = make(map[string]interface{})
|
||||
switch p.NodeType {
|
||||
case NodePage:
|
||||
case NodeHome:
|
||||
// TODO(bep) np cache the below
|
||||
p.Data["Pages"] = s.owner.findAllPagesByNodeType(NodePage)
|
||||
p.Data["Pages"] = s.Pages
|
||||
case NodeSection:
|
||||
sectionData, ok := s.Sections[p.sections[0]]
|
||||
if !ok {
|
||||
|
|
122
hugolib/page_collections.go
Normal file
122
hugolib/page_collections.go
Normal file
|
@ -0,0 +1,122 @@
|
|||
// Copyright 2016 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// TODO(bep) np pages names
|
||||
// TODO(bep) np this is a somewhat breaking change and should be doc. + release notes: See AllPages vs. "this language only". Looks like it is like this alread, check.
|
||||
type PageCollections struct {
|
||||
// Includes only pages of NodePage type, and only pages in the current language.
|
||||
Pages Pages
|
||||
|
||||
// Includes all pages in all languages, including the current one.
|
||||
// Only pages of NodePage type.
|
||||
AllPages Pages
|
||||
|
||||
// Includes pages of all types, but only pages in the current language.
|
||||
Nodes Pages
|
||||
|
||||
// Includes all pages in all languages, including the current one.
|
||||
// Includes pages of all types.
|
||||
AllNodes Pages
|
||||
|
||||
// A convenience cache for the traditional node types, taxonomies, home page etc.
|
||||
// This is for the current language only.
|
||||
indexNodes Pages
|
||||
|
||||
// Includes absolute all pages (of all types), including drafts etc.
|
||||
rawAllPages Pages
|
||||
}
|
||||
|
||||
func (c *PageCollections) refreshPageCaches() {
|
||||
// All pages are stored in AllNodes and Nodes. Filter from those.
|
||||
c.Pages = c.findPagesByNodeTypeIn(NodePage, c.Nodes)
|
||||
c.indexNodes = c.findPagesByNodeTypeNotIn(NodePage, c.Nodes)
|
||||
c.AllPages = c.findPagesByNodeTypeIn(NodePage, c.AllNodes)
|
||||
|
||||
for _, n := range c.Nodes {
|
||||
if n.NodeType == NodeUnknown {
|
||||
panic(fmt.Sprintf("Got unknown type %s", n.Title))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func newPageCollections() *PageCollections {
|
||||
return &PageCollections{}
|
||||
}
|
||||
|
||||
func newPageCollectionsFromPages(pages Pages) *PageCollections {
|
||||
return &PageCollections{rawAllPages: pages}
|
||||
}
|
||||
|
||||
func (c *PageCollections) findPagesByNodeType(n NodeType) Pages {
|
||||
return c.findPagesByNodeTypeIn(n, c.Nodes)
|
||||
}
|
||||
|
||||
func (c *PageCollections) findIndexNodesByNodeType(n NodeType) Pages {
|
||||
return c.findPagesByNodeTypeIn(n, c.indexNodes)
|
||||
}
|
||||
|
||||
func (*PageCollections) findPagesByNodeTypeIn(n NodeType, inPages Pages) Pages {
|
||||
var pages Pages
|
||||
for _, p := range inPages {
|
||||
if p.NodeType == n {
|
||||
pages = append(pages, p)
|
||||
}
|
||||
}
|
||||
return pages
|
||||
}
|
||||
|
||||
func (*PageCollections) findPagesByNodeTypeNotIn(n NodeType, inPages Pages) Pages {
|
||||
var pages Pages
|
||||
for _, p := range inPages {
|
||||
if p.NodeType != n {
|
||||
pages = append(pages, p)
|
||||
}
|
||||
}
|
||||
return pages
|
||||
}
|
||||
|
||||
func (c *PageCollections) findAllPagesByNodeType(n NodeType) Pages {
|
||||
return c.findPagesByNodeTypeIn(n, c.rawAllPages)
|
||||
}
|
||||
|
||||
func (c *PageCollections) findRawAllPagesByNodeType(n NodeType) Pages {
|
||||
return c.findPagesByNodeTypeIn(n, c.rawAllPages)
|
||||
}
|
||||
|
||||
func (c *PageCollections) addPage(page *Page) {
|
||||
c.rawAllPages = append(c.rawAllPages, page)
|
||||
}
|
||||
|
||||
func (c *PageCollections) removePageByPath(path string) {
|
||||
if i := c.rawAllPages.FindPagePosByFilePath(path); i >= 0 {
|
||||
c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *PageCollections) removePage(page *Page) {
|
||||
if i := c.rawAllPages.FindPagePos(page); i >= 0 {
|
||||
c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *PageCollections) replacePage(page *Page) {
|
||||
// will find existing page that matches filepath and remove it
|
||||
c.removePage(page)
|
||||
c.addPage(page)
|
||||
}
|
128
hugolib/site.go
128
hugolib/site.go
|
@ -91,11 +91,10 @@ type Site struct {
|
|||
nodeCache *nodeCache
|
||||
nodeCacheInit sync.Once
|
||||
|
||||
Pages Pages
|
||||
AllPages Pages
|
||||
rawAllPages Pages
|
||||
Files []*source.File
|
||||
Taxonomies TaxonomyList
|
||||
*PageCollections
|
||||
|
||||
Files []*source.File
|
||||
Taxonomies TaxonomyList
|
||||
|
||||
// Plural is what we get in the folder, so keep track of this mapping
|
||||
// to get the singular form from that value.
|
||||
|
@ -122,7 +121,8 @@ func (s *Site) reset() *Site {
|
|||
|
||||
// newSite creates a new site in the given language.
|
||||
func newSite(lang *helpers.Language) *Site {
|
||||
return &Site{Language: lang, Info: newSiteInfo(siteBuilderCfg{language: lang})}
|
||||
c := newPageCollections()
|
||||
return &Site{Language: lang, PageCollections: c, Info: newSiteInfo(siteBuilderCfg{pageCollections: c, language: lang})}
|
||||
|
||||
}
|
||||
|
||||
|
@ -172,14 +172,12 @@ type SiteInfo struct {
|
|||
_ [4]byte
|
||||
paginationPageCount uint64
|
||||
|
||||
BaseURL template.URL
|
||||
Taxonomies TaxonomyList
|
||||
Authors AuthorList
|
||||
Social SiteSocial
|
||||
Sections Taxonomy
|
||||
Pages *Pages // Includes only pages in this language
|
||||
AllPages *Pages // Includes other translated pages, excluding those in this language.
|
||||
rawAllPages *Pages // Includes absolute all pages, including drafts etc.
|
||||
BaseURL template.URL
|
||||
Taxonomies TaxonomyList
|
||||
Authors AuthorList
|
||||
Social SiteSocial
|
||||
Sections Taxonomy
|
||||
*PageCollections
|
||||
Files *[]*source.File
|
||||
Menus *Menus
|
||||
Hugo *HugoInfo
|
||||
|
@ -211,18 +209,19 @@ type SiteInfo struct {
|
|||
// Used in tests.
|
||||
|
||||
type siteBuilderCfg struct {
|
||||
language *helpers.Language
|
||||
baseURL string
|
||||
language *helpers.Language
|
||||
pageCollections *PageCollections
|
||||
baseURL string
|
||||
|
||||
pages *Pages
|
||||
pages Pages
|
||||
}
|
||||
|
||||
func newSiteInfo(cfg siteBuilderCfg) SiteInfo {
|
||||
return SiteInfo{
|
||||
BaseURL: template.URL(cfg.baseURL),
|
||||
rawAllPages: cfg.pages,
|
||||
pathSpec: helpers.NewPathSpecFromConfig(cfg.language),
|
||||
multilingual: newMultiLingualForLanguage(cfg.language),
|
||||
BaseURL: template.URL(cfg.baseURL),
|
||||
pathSpec: helpers.NewPathSpecFromConfig(cfg.language),
|
||||
multilingual: newMultiLingualForLanguage(cfg.language),
|
||||
PageCollections: cfg.pageCollections,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -297,7 +296,8 @@ func (s *SiteInfo) refLink(ref string, page *Page, relative bool) (string, error
|
|||
var link string
|
||||
|
||||
if refURL.Path != "" {
|
||||
for _, page := range []*Page(*s.AllPages) {
|
||||
// TODO(bep) np relRef
|
||||
for _, page := range s.AllPages {
|
||||
refPath := filepath.FromSlash(refURL.Path)
|
||||
if page.Source.Path() == refPath || page.Source.LogicalName() == refPath {
|
||||
target = page
|
||||
|
@ -372,7 +372,8 @@ func (s *SiteInfo) SourceRelativeLink(ref string, currentPage *Page) (string, er
|
|||
}
|
||||
}
|
||||
|
||||
for _, page := range []*Page(*s.AllPages) {
|
||||
// TODO(bep) np sourceRelativeLink
|
||||
for _, page := range s.AllPages {
|
||||
if page.Source.Path() == refPath {
|
||||
target = page
|
||||
break
|
||||
|
@ -381,14 +382,14 @@ func (s *SiteInfo) SourceRelativeLink(ref string, currentPage *Page) (string, er
|
|||
// need to exhaust the test, then try with the others :/
|
||||
// if the refPath doesn't end in a filename with extension `.md`, then try with `.md` , and then `/index.md`
|
||||
mdPath := strings.TrimSuffix(refPath, string(os.PathSeparator)) + ".md"
|
||||
for _, page := range []*Page(*s.AllPages) {
|
||||
for _, page := range s.AllPages {
|
||||
if page.Source.Path() == mdPath {
|
||||
target = page
|
||||
break
|
||||
}
|
||||
}
|
||||
indexPath := filepath.Join(refPath, "index.md")
|
||||
for _, page := range []*Page(*s.AllPages) {
|
||||
for _, page := range s.AllPages {
|
||||
if page.Source.Path() == indexPath {
|
||||
target = page
|
||||
break
|
||||
|
@ -806,6 +807,7 @@ func (s *Site) postProcess() (err error) {
|
|||
if err = s.buildSiteMeta(); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
s.timerStep("build taxonomies")
|
||||
return
|
||||
}
|
||||
|
@ -975,9 +977,7 @@ func (s *Site) initializeSiteInfo() {
|
|||
BuildDrafts: viper.GetBool("buildDrafts"),
|
||||
canonifyURLs: viper.GetBool("canonifyURLs"),
|
||||
preserveTaxonomyNames: lang.GetBool("preserveTaxonomyNames"),
|
||||
AllPages: &s.AllPages,
|
||||
Pages: &s.Pages,
|
||||
rawAllPages: &s.rawAllPages,
|
||||
PageCollections: s.PageCollections,
|
||||
Files: &s.Files,
|
||||
Menus: &s.Menus,
|
||||
Params: params,
|
||||
|
@ -1293,28 +1293,6 @@ func converterCollator(s *Site, results <-chan HandledResult, errs chan<- error)
|
|||
errs <- fmt.Errorf("Errors rendering pages: %s", strings.Join(errMsgs, "\n"))
|
||||
}
|
||||
|
||||
func (s *Site) addPage(page *Page) {
|
||||
s.rawAllPages = append(s.rawAllPages, page)
|
||||
}
|
||||
|
||||
func (s *Site) removePageByPath(path string) {
|
||||
if i := s.rawAllPages.FindPagePosByFilePath(path); i >= 0 {
|
||||
s.rawAllPages = append(s.rawAllPages[:i], s.rawAllPages[i+1:]...)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Site) removePage(page *Page) {
|
||||
if i := s.rawAllPages.FindPagePos(page); i >= 0 {
|
||||
s.rawAllPages = append(s.rawAllPages[:i], s.rawAllPages[i+1:]...)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Site) replacePage(page *Page) {
|
||||
// will find existing page that matches filepath and remove it
|
||||
s.removePage(page)
|
||||
s.addPage(page)
|
||||
}
|
||||
|
||||
func (s *Site) replaceFile(sf *source.File) {
|
||||
for i, f := range s.Files {
|
||||
if f.Path() == sf.Path() {
|
||||
|
@ -1379,15 +1357,29 @@ func readCollator(s *Site, results <-chan HandledResult, errs chan<- error) {
|
|||
}
|
||||
|
||||
func (s *Site) buildSiteMeta() (err error) {
|
||||
|
||||
s.assembleMenus()
|
||||
|
||||
if len(s.Pages) == 0 {
|
||||
if len(s.Nodes) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// TODO(bep) np order
|
||||
// assembleTaxonomies: Needs pages (temp lookup) (maybe later nodes)
|
||||
s.assembleTaxonomies()
|
||||
|
||||
// TODO(bep) np
|
||||
for _, p := range s.AllNodes {
|
||||
// setNodeTypeVars needs taxonomies
|
||||
p.setNodeTypeVars(s)
|
||||
}
|
||||
|
||||
// assembleSections: Needs pages (temp lookup)
|
||||
s.assembleSections()
|
||||
s.Info.LastChange = s.Pages[0].Lastmod
|
||||
|
||||
// TODO(bep) np
|
||||
pages := s.findPagesByNodeType(NodePage)
|
||||
s.Info.LastChange = pages[0].Lastmod
|
||||
|
||||
return
|
||||
}
|
||||
|
@ -1527,7 +1519,8 @@ func (s *Site) assembleTaxonomies() {
|
|||
for singular, plural := range taxonomies {
|
||||
s.Taxonomies[plural] = make(Taxonomy)
|
||||
s.taxonomiesPluralSingular[plural] = singular
|
||||
for _, p := range s.Pages {
|
||||
// TODO(np) tax other nodes
|
||||
for _, p := range s.findPagesByNodeType(NodePage) {
|
||||
vals := p.getParam(plural, !s.Info.preserveTaxonomyNames)
|
||||
weight := p.GetParam(plural + "_weight")
|
||||
if weight == nil {
|
||||
|
@ -1560,8 +1553,7 @@ func (s *Site) resetBuildState() {
|
|||
|
||||
s.nodeCache.reset()
|
||||
|
||||
s.Pages = make(Pages, 0)
|
||||
s.AllPages = make(Pages, 0)
|
||||
s.PageCollections = newPageCollections()
|
||||
|
||||
s.Info.paginationPageCount = 0
|
||||
s.draftCount = 0
|
||||
|
@ -1578,7 +1570,7 @@ func (s *Site) assembleSections() {
|
|||
s.Info.Sections = s.Sections
|
||||
regularPages := s.findPagesByNodeType(NodePage)
|
||||
for i, p := range regularPages {
|
||||
s.Sections.add(p.Section(), WeightedPage{s.Pages[i].Weight, regularPages[i]}, s.Info.preserveTaxonomyNames)
|
||||
s.Sections.add(p.Section(), WeightedPage{regularPages[i].Weight, regularPages[i]}, s.Info.preserveTaxonomyNames)
|
||||
}
|
||||
|
||||
for k := range s.Sections {
|
||||
|
@ -1605,28 +1597,6 @@ func (s *Site) nodeTypeFromSections(sections []string) NodeType {
|
|||
return NodeSection
|
||||
}
|
||||
|
||||
func (s *Site) findPagesByNodeType(n NodeType) Pages {
|
||||
return s.findPagesByNodeTypeIn(n, s.Pages)
|
||||
}
|
||||
|
||||
func (s *Site) findPagesByNodeTypeIn(n NodeType, inPages Pages) Pages {
|
||||
var pages Pages
|
||||
for _, p := range inPages {
|
||||
if p.NodeType == n {
|
||||
pages = append(pages, p)
|
||||
}
|
||||
}
|
||||
return pages
|
||||
}
|
||||
|
||||
func (s *Site) findAllPagesByNodeType(n NodeType) Pages {
|
||||
return s.findPagesByNodeTypeIn(n, s.rawAllPages)
|
||||
}
|
||||
|
||||
func (s *Site) findRawAllPagesByNodeType(n NodeType) Pages {
|
||||
return s.findPagesByNodeTypeIn(n, s.rawAllPages)
|
||||
}
|
||||
|
||||
// renderAliases renders shell pages that simply have a redirect in the header.
|
||||
func (s *Site) renderAliases() error {
|
||||
for _, p := range s.Pages {
|
||||
|
@ -1668,7 +1638,7 @@ func (s *Site) renderAliases() error {
|
|||
func (s *Site) preparePages() error {
|
||||
var errors []error
|
||||
|
||||
for _, p := range s.Pages {
|
||||
for _, p := range s.Nodes {
|
||||
if err := p.prepareLayouts(); err != nil {
|
||||
errors = append(errors, err)
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ func (s *Site) renderPages() error {
|
|||
go pageRenderer(s, pages, results, wg)
|
||||
}
|
||||
|
||||
for _, page := range s.Pages {
|
||||
for _, page := range s.Nodes {
|
||||
pages <- page
|
||||
}
|
||||
|
||||
|
@ -135,6 +135,7 @@ func (s *Site) renderRSS(p *Page) error {
|
|||
}
|
||||
|
||||
// TODO(bep) np check RSS titles
|
||||
// TODO(bep) np check RSS page limit, 50?
|
||||
rssNode := p.copy()
|
||||
|
||||
// TODO(bep) np todelido URL
|
||||
|
|
Loading…
Reference in a new issue