mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
parent
99d11386a7
commit
a843d5d3bb
5 changed files with 99 additions and 116 deletions
|
@ -15,9 +15,7 @@ package hugolib
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
|
@ -25,7 +23,6 @@ import (
|
|||
|
||||
"github.com/spf13/viper"
|
||||
|
||||
"github.com/bep/inflect"
|
||||
"github.com/spf13/hugo/source"
|
||||
"github.com/spf13/hugo/tpl"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
|
@ -300,84 +297,6 @@ func (h *HugoSites) createMissingPages() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// TODO(bep) np move
|
||||
// Move the new* methods after cleanup in site.go
|
||||
func (s *Site) newNodePage(typ string) *Page {
|
||||
return &Page{
|
||||
pageInit: &pageInit{},
|
||||
Kind: typ,
|
||||
Data: make(map[string]interface{}),
|
||||
Site: &s.Info,
|
||||
language: s.Language,
|
||||
site: s}
|
||||
}
|
||||
|
||||
func (s *Site) newHomePage() *Page {
|
||||
p := s.newNodePage(KindHome)
|
||||
p.Title = s.Info.Title
|
||||
pages := Pages{}
|
||||
p.Data["Pages"] = pages
|
||||
p.Pages = pages
|
||||
s.setPageURLs(p, "/")
|
||||
return p
|
||||
}
|
||||
|
||||
func (s *Site) setPageURLs(p *Page, in string) {
|
||||
p.URLPath.URL = s.Info.pathSpec.URLizeAndPrep(in)
|
||||
p.URLPath.Permalink = s.Info.permalink(p.URLPath.URL)
|
||||
p.RSSLink = template.HTML(s.Info.permalink(in + ".xml"))
|
||||
}
|
||||
|
||||
func (s *Site) newTaxonomyPage(plural, key string) *Page {
|
||||
|
||||
p := s.newNodePage(KindTaxonomy)
|
||||
|
||||
p.sections = []string{plural, key}
|
||||
|
||||
if s.Info.preserveTaxonomyNames {
|
||||
key = s.Info.pathSpec.MakePathSanitized(key)
|
||||
}
|
||||
|
||||
if s.Info.preserveTaxonomyNames {
|
||||
// keep as is in the title
|
||||
p.Title = key
|
||||
} else {
|
||||
p.Title = strings.Replace(strings.Title(key), "-", " ", -1)
|
||||
}
|
||||
|
||||
s.setPageURLs(p, path.Join(plural, key))
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
func (s *Site) newSectionPage(name string, section WeightedPages) *Page {
|
||||
|
||||
p := s.newNodePage(KindSection)
|
||||
p.sections = []string{name}
|
||||
|
||||
sectionName := name
|
||||
if !s.Info.preserveTaxonomyNames && len(section) > 0 {
|
||||
sectionName = section[0].Page.Section()
|
||||
}
|
||||
|
||||
sectionName = helpers.FirstUpper(sectionName)
|
||||
if viper.GetBool("pluralizeListTitles") {
|
||||
p.Title = inflect.Pluralize(sectionName)
|
||||
} else {
|
||||
p.Title = sectionName
|
||||
}
|
||||
s.setPageURLs(p, name)
|
||||
return p
|
||||
}
|
||||
|
||||
func (s *Site) newTaxonomyTermsPage(plural string) *Page {
|
||||
p := s.newNodePage(KindTaxonomyTerm)
|
||||
p.sections = []string{plural}
|
||||
p.Title = strings.Title(plural)
|
||||
s.setPageURLs(p, plural)
|
||||
return p
|
||||
}
|
||||
|
||||
func (h *HugoSites) setupTranslations() {
|
||||
|
||||
master := h.Sites[0]
|
||||
|
|
|
@ -175,7 +175,7 @@ type Page struct {
|
|||
|
||||
site *Site
|
||||
|
||||
// Pulled over from Node. TODO(bep) np reorg and group (embed)
|
||||
// Pulled over from old Node. TODO(bep) reorg and group (embed)
|
||||
|
||||
Site *SiteInfo `json:"-"`
|
||||
|
||||
|
@ -1458,7 +1458,6 @@ func (p *Page) prepareLayouts() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// TODO(bep) np naming, move some
|
||||
func (p *Page) prepareData(s *Site) error {
|
||||
|
||||
var pages Pages
|
||||
|
@ -1507,7 +1506,7 @@ func (p *Page) prepareData(s *Site) error {
|
|||
}
|
||||
|
||||
func (p *Page) updatePageDates() {
|
||||
// TODO(bep) np there is a potential issue with page sorting for home pages
|
||||
// TODO(bep) there is a potential issue with page sorting for home pages
|
||||
// etc. without front matter dates set, but let us wrap the head around
|
||||
// that in another time.
|
||||
if !p.IsNode() {
|
||||
|
@ -1553,8 +1552,6 @@ func (p *Page) copy() *Page {
|
|||
return &c
|
||||
}
|
||||
|
||||
// TODO(bep) np these are pulled over from Node. Needs regrouping / embed
|
||||
|
||||
func (p *Page) Now() time.Time {
|
||||
return time.Now()
|
||||
}
|
||||
|
|
|
@ -13,10 +13,6 @@
|
|||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// PageCollections contains the page collections for a site.
|
||||
type PageCollections struct {
|
||||
// Includes only pages of all types, and only pages in the current language.
|
||||
|
@ -34,6 +30,9 @@ type PageCollections struct {
|
|||
// This is for the current language only.
|
||||
RegularPages Pages
|
||||
|
||||
// A convenience cache for the all the regular pages.
|
||||
AllRegularPages Pages
|
||||
|
||||
// Includes absolute all pages (of all types), including drafts etc.
|
||||
rawAllPages Pages
|
||||
}
|
||||
|
@ -41,13 +40,7 @@ type PageCollections struct {
|
|||
func (c *PageCollections) refreshPageCaches() {
|
||||
c.indexPages = c.findPagesByKindNotIn(KindPage, c.Pages)
|
||||
c.RegularPages = c.findPagesByKindIn(KindPage, c.Pages)
|
||||
|
||||
// TODO(bep) np remove eventually
|
||||
for _, n := range c.Pages {
|
||||
if n.Kind == kindUnknown {
|
||||
panic(fmt.Sprintf("Got unknown type %s", n.Title))
|
||||
}
|
||||
}
|
||||
c.AllRegularPages = c.findPagesByKindIn(KindPage, c.AllPages)
|
||||
}
|
||||
|
||||
func newPageCollections() *PageCollections {
|
||||
|
|
|
@ -27,6 +27,8 @@ import (
|
|||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/bep/inflect"
|
||||
|
||||
"sync/atomic"
|
||||
|
||||
"github.com/fsnotify/fsnotify"
|
||||
|
@ -285,8 +287,7 @@ func (s *SiteInfo) refLink(ref string, page *Page, relative bool) (string, error
|
|||
var link string
|
||||
|
||||
if refURL.Path != "" {
|
||||
// TODO(bep) np relRef
|
||||
for _, page := range s.AllPages {
|
||||
for _, page := range s.AllRegularPages {
|
||||
refPath := filepath.FromSlash(refURL.Path)
|
||||
if page.Source.Path() == refPath || page.Source.LogicalName() == refPath {
|
||||
target = page
|
||||
|
@ -357,8 +358,7 @@ func (s *SiteInfo) SourceRelativeLink(ref string, currentPage *Page) (string, er
|
|||
}
|
||||
}
|
||||
|
||||
// TODO(bep) np sourceRelativeLink
|
||||
for _, page := range s.AllPages {
|
||||
for _, page := range s.AllRegularPages {
|
||||
if page.Source.Path() == refPath {
|
||||
target = page
|
||||
break
|
||||
|
@ -367,14 +367,14 @@ func (s *SiteInfo) SourceRelativeLink(ref string, currentPage *Page) (string, er
|
|||
// need to exhaust the test, then try with the others :/
|
||||
// if the refPath doesn't end in a filename with extension `.md`, then try with `.md` , and then `/index.md`
|
||||
mdPath := strings.TrimSuffix(refPath, string(os.PathSeparator)) + ".md"
|
||||
for _, page := range s.AllPages {
|
||||
for _, page := range s.AllRegularPages {
|
||||
if page.Source.Path() == mdPath {
|
||||
target = page
|
||||
break
|
||||
}
|
||||
}
|
||||
indexPath := filepath.Join(refPath, "index.md")
|
||||
for _, page := range s.AllPages {
|
||||
for _, page := range s.AllRegularPages {
|
||||
if page.Source.Path() == indexPath {
|
||||
target = page
|
||||
break
|
||||
|
@ -1525,7 +1525,7 @@ func (s *Site) resetBuildState() {
|
|||
func (s *Site) assembleSections() {
|
||||
s.Sections = make(Taxonomy)
|
||||
s.Info.Sections = s.Sections
|
||||
// TODO(bep) np check these vs the caches
|
||||
|
||||
regularPages := s.findPagesByKind(KindPage)
|
||||
sectionPages := s.findPagesByKind(KindSection)
|
||||
|
||||
|
@ -1716,7 +1716,6 @@ func (s *Site) renderAndWritePage(name string, dest string, d interface{}, layou
|
|||
|
||||
var pageTarget target.Output
|
||||
|
||||
// TODO(bep) np ugly urls vs frontmatter
|
||||
if p, ok := d.(*Page); ok && p.IsPage() && path.Ext(p.URLPath.URL) != "" {
|
||||
// user has explicitly set a URL with extension for this page
|
||||
// make sure it sticks even if "ugly URLs" are turned off.
|
||||
|
@ -1989,3 +1988,79 @@ func getGoMaxProcs() int {
|
|||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func (s *Site) newNodePage(typ string) *Page {
|
||||
return &Page{
|
||||
pageInit: &pageInit{},
|
||||
Kind: typ,
|
||||
Data: make(map[string]interface{}),
|
||||
Site: &s.Info,
|
||||
language: s.Language,
|
||||
site: s}
|
||||
}
|
||||
|
||||
func (s *Site) newHomePage() *Page {
|
||||
p := s.newNodePage(KindHome)
|
||||
p.Title = s.Info.Title
|
||||
pages := Pages{}
|
||||
p.Data["Pages"] = pages
|
||||
p.Pages = pages
|
||||
s.setPageURLs(p, "/")
|
||||
return p
|
||||
}
|
||||
|
||||
func (s *Site) setPageURLs(p *Page, in string) {
|
||||
p.URLPath.URL = s.Info.pathSpec.URLizeAndPrep(in)
|
||||
p.URLPath.Permalink = s.Info.permalink(p.URLPath.URL)
|
||||
p.RSSLink = template.HTML(s.Info.permalink(in + ".xml"))
|
||||
}
|
||||
|
||||
func (s *Site) newTaxonomyPage(plural, key string) *Page {
|
||||
|
||||
p := s.newNodePage(KindTaxonomy)
|
||||
|
||||
p.sections = []string{plural, key}
|
||||
|
||||
if s.Info.preserveTaxonomyNames {
|
||||
key = s.Info.pathSpec.MakePathSanitized(key)
|
||||
}
|
||||
|
||||
if s.Info.preserveTaxonomyNames {
|
||||
// keep as is in the title
|
||||
p.Title = key
|
||||
} else {
|
||||
p.Title = strings.Replace(strings.Title(key), "-", " ", -1)
|
||||
}
|
||||
|
||||
s.setPageURLs(p, path.Join(plural, key))
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
func (s *Site) newSectionPage(name string, section WeightedPages) *Page {
|
||||
|
||||
p := s.newNodePage(KindSection)
|
||||
p.sections = []string{name}
|
||||
|
||||
sectionName := name
|
||||
if !s.Info.preserveTaxonomyNames && len(section) > 0 {
|
||||
sectionName = section[0].Page.Section()
|
||||
}
|
||||
|
||||
sectionName = helpers.FirstUpper(sectionName)
|
||||
if viper.GetBool("pluralizeListTitles") {
|
||||
p.Title = inflect.Pluralize(sectionName)
|
||||
} else {
|
||||
p.Title = sectionName
|
||||
}
|
||||
s.setPageURLs(p, name)
|
||||
return p
|
||||
}
|
||||
|
||||
func (s *Site) newTaxonomyTermsPage(plural string) *Page {
|
||||
p := s.newNodePage(KindTaxonomyTerm)
|
||||
p.sections = []string{plural}
|
||||
p.Title = strings.Title(plural)
|
||||
s.setPageURLs(p, plural)
|
||||
return p
|
||||
}
|
||||
|
|
|
@ -87,7 +87,6 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa
|
|||
}
|
||||
|
||||
// renderPaginator must be run after the owning Page has been rendered.
|
||||
// TODO(bep) np
|
||||
func (s *Site) renderPaginator(p *Page) error {
|
||||
if p.paginator != nil {
|
||||
jww.DEBUG.Printf("Render paginator for page %q", p.Path())
|
||||
|
@ -95,10 +94,8 @@ func (s *Site) renderPaginator(p *Page) error {
|
|||
|
||||
// write alias for page 1
|
||||
// TODO(bep) ml all of these n.addLang ... fix.
|
||||
// TODO(bep) np URL
|
||||
|
||||
aliasPath := p.addLangPathPrefix(helpers.PaginateAliasPath(path.Join(p.sections...), 1))
|
||||
//TODO(bep) np node.permalink
|
||||
link := p.Permalink()
|
||||
s.writeDestAlias(aliasPath, link, nil)
|
||||
|
||||
|
@ -141,16 +138,18 @@ func (s *Site) renderRSS(p *Page) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// TODO(bep) np check RSS titles
|
||||
// TODO(bep) np check RSS page limit, 50?
|
||||
rssNode := p.copy()
|
||||
rssNode.Kind = kindRSS
|
||||
|
||||
// TODO(bep) np todelido URL
|
||||
rssPage := p.copy()
|
||||
rssPage.Kind = kindRSS
|
||||
high := 50
|
||||
if len(rssPage.Pages) > high {
|
||||
rssPage.Pages = rssPage.Pages[:high]
|
||||
rssPage.Data["Pages"] = rssPage.Pages
|
||||
}
|
||||
rssURI := s.Language.GetString("rssURI")
|
||||
rssNode.URLPath.URL = path.Join(rssNode.URLPath.URL, rssURI)
|
||||
rssPath := path.Join(rssPage.URLPath.URL, rssURI)
|
||||
s.setPageURLs(rssPage, rssPath)
|
||||
|
||||
if err := s.renderAndWriteXML(rssNode.Title, rssNode.addLangFilepathPrefix(rssNode.URLPath.URL), rssNode, s.appendThemeTemplates(layouts)...); err != nil {
|
||||
if err := s.renderAndWriteXML(rssPage.Title, rssPage.addLangFilepathPrefix(rssPath), rssPage, s.appendThemeTemplates(layouts)...); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue