2016-10-31 14:53:33 -04:00
|
|
|
// Copyright 2016 The Hugo Authors. All rights reserved.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package hugolib
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"path"
|
|
|
|
"sync"
|
2016-11-20 06:21:19 -05:00
|
|
|
"time"
|
2016-10-31 14:53:33 -04:00
|
|
|
|
2017-04-02 06:22:54 -04:00
|
|
|
"github.com/spf13/hugo/output"
|
|
|
|
|
2017-03-07 03:55:17 -05:00
|
|
|
bp "github.com/spf13/hugo/bufferpool"
|
2016-10-31 14:53:33 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
// renderPages renders pages each corresponding to a markdown file.
|
|
|
|
// TODO(bep np doc
|
|
|
|
func (s *Site) renderPages() error {
|
|
|
|
|
|
|
|
results := make(chan error)
|
2017-03-07 03:55:17 -05:00
|
|
|
pages := make(chan *Page)
|
2016-10-31 14:53:33 -04:00
|
|
|
errs := make(chan error)
|
|
|
|
|
|
|
|
go errorCollator(results, errs)
|
|
|
|
|
2017-02-21 12:56:56 -05:00
|
|
|
numWorkers := getGoMaxProcs() * 4
|
2016-10-31 14:53:33 -04:00
|
|
|
|
|
|
|
wg := &sync.WaitGroup{}
|
|
|
|
|
2017-02-21 12:56:56 -05:00
|
|
|
for i := 0; i < numWorkers; i++ {
|
2016-10-31 14:53:33 -04:00
|
|
|
wg.Add(1)
|
|
|
|
go pageRenderer(s, pages, results, wg)
|
|
|
|
}
|
|
|
|
|
2016-11-11 03:01:47 -05:00
|
|
|
for _, page := range s.Pages {
|
2017-03-07 03:55:17 -05:00
|
|
|
pages <- page
|
2016-10-31 14:53:33 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
close(pages)
|
|
|
|
|
|
|
|
wg.Wait()
|
|
|
|
|
|
|
|
close(results)
|
|
|
|
|
|
|
|
err := <-errs
|
|
|
|
if err != nil {
|
2016-12-26 08:52:31 -05:00
|
|
|
return fmt.Errorf("Error(s) rendering pages: %s", err)
|
2016-10-31 14:53:33 -04:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-03-07 03:55:17 -05:00
|
|
|
func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.WaitGroup) {
|
2016-10-31 14:53:33 -04:00
|
|
|
defer wg.Done()
|
2017-03-09 13:19:29 -05:00
|
|
|
|
2017-03-07 03:55:17 -05:00
|
|
|
for page := range pages {
|
2017-03-19 16:09:31 -04:00
|
|
|
|
2017-03-16 03:32:14 -04:00
|
|
|
for i, outFormat := range page.outputFormats {
|
2017-03-19 16:09:31 -04:00
|
|
|
|
2017-03-26 05:45:12 -04:00
|
|
|
var (
|
|
|
|
pageOutput *PageOutput
|
|
|
|
err error
|
|
|
|
)
|
|
|
|
|
|
|
|
if i == 0 {
|
|
|
|
page.pageOutputInit.Do(func() {
|
|
|
|
var po *PageOutput
|
|
|
|
po, err = newPageOutput(page, false, outFormat)
|
|
|
|
page.mainPageOutput = po
|
|
|
|
})
|
|
|
|
pageOutput = page.mainPageOutput
|
|
|
|
} else {
|
|
|
|
pageOutput, err = newPageOutput(page, true, outFormat)
|
|
|
|
}
|
2017-03-17 11:35:09 -04:00
|
|
|
|
2017-03-09 13:19:29 -05:00
|
|
|
if err != nil {
|
2017-03-16 03:32:14 -04:00
|
|
|
s.Log.ERROR.Printf("Failed to create output page for type %q for page %q: %s", outFormat.Name, page, err)
|
2017-03-09 13:19:29 -05:00
|
|
|
continue
|
|
|
|
}
|
2017-03-02 09:35:25 -05:00
|
|
|
|
2017-03-07 03:55:17 -05:00
|
|
|
var layouts []string
|
2017-03-02 09:35:25 -05:00
|
|
|
|
2017-03-25 13:28:38 -04:00
|
|
|
if page.selfLayout != "" {
|
|
|
|
layouts = []string{page.selfLayout}
|
2017-03-07 03:55:17 -05:00
|
|
|
} else {
|
2017-03-26 13:34:30 -04:00
|
|
|
layouts, err = s.layouts(pageOutput)
|
|
|
|
if err != nil {
|
|
|
|
s.Log.ERROR.Printf("Failed to resolve layout output %q for page %q: %s", outFormat.Name, page, err)
|
|
|
|
continue
|
|
|
|
}
|
2017-03-07 03:55:17 -05:00
|
|
|
}
|
2017-03-02 09:35:25 -05:00
|
|
|
|
2017-03-16 03:32:14 -04:00
|
|
|
switch pageOutput.outputFormat.Name {
|
2017-03-02 09:35:25 -05:00
|
|
|
|
2017-03-09 13:19:29 -05:00
|
|
|
case "RSS":
|
2017-03-08 07:45:33 -05:00
|
|
|
if err := s.renderRSS(pageOutput); err != nil {
|
|
|
|
results <- err
|
|
|
|
}
|
|
|
|
default:
|
2017-03-09 13:19:29 -05:00
|
|
|
targetPath, err := pageOutput.targetPath()
|
|
|
|
if err != nil {
|
2017-03-16 03:32:14 -04:00
|
|
|
s.Log.ERROR.Printf("Failed to create target path for output %q for page %q: %s", outFormat.Name, page, err)
|
2017-03-09 13:19:29 -05:00
|
|
|
continue
|
|
|
|
}
|
2017-03-02 09:35:25 -05:00
|
|
|
|
2017-03-07 03:55:17 -05:00
|
|
|
s.Log.DEBUG.Printf("Render %s to %q with layouts %q", pageOutput.Kind, targetPath, layouts)
|
|
|
|
|
2017-03-16 05:04:30 -04:00
|
|
|
if err := s.renderAndWritePage("page "+pageOutput.FullFilePath(), targetPath, pageOutput, layouts...); err != nil {
|
2017-03-02 09:35:25 -05:00
|
|
|
results <- err
|
|
|
|
}
|
2017-03-06 13:16:31 -05:00
|
|
|
|
2017-03-07 03:55:17 -05:00
|
|
|
if pageOutput.IsNode() {
|
|
|
|
if err := s.renderPaginator(pageOutput); err != nil {
|
|
|
|
results <- err
|
|
|
|
}
|
|
|
|
}
|
2017-03-06 13:16:31 -05:00
|
|
|
}
|
2017-03-08 07:45:33 -05:00
|
|
|
|
2016-10-31 14:53:33 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// renderPaginator must be run after the owning Page has been rendered.
|
2017-03-06 13:16:31 -05:00
|
|
|
func (s *Site) renderPaginator(p *PageOutput) error {
|
2016-10-31 14:53:33 -04:00
|
|
|
if p.paginator != nil {
|
2017-01-10 04:55:03 -05:00
|
|
|
s.Log.DEBUG.Printf("Render paginator for page %q", p.Path())
|
2017-02-04 22:20:06 -05:00
|
|
|
paginatePath := s.Cfg.GetString("paginatePath")
|
2016-10-31 14:53:33 -04:00
|
|
|
|
|
|
|
// write alias for page 1
|
2017-03-25 14:21:19 -04:00
|
|
|
addend := fmt.Sprintf("/%s/%d", paginatePath, 1)
|
|
|
|
target, err := p.createTargetPath(p.outputFormat, addend)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2016-11-07 14:24:37 -05:00
|
|
|
|
2017-03-25 14:21:19 -04:00
|
|
|
// TODO(bep) output do better
|
|
|
|
link := newOutputFormat(p.Page, p.outputFormat).Permalink()
|
|
|
|
s.writeDestAlias(target, link, nil)
|
2016-10-31 14:53:33 -04:00
|
|
|
|
|
|
|
pagers := p.paginator.Pagers()
|
|
|
|
|
|
|
|
for i, pager := range pagers {
|
|
|
|
if i == 0 {
|
|
|
|
// already created
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
pagerNode := p.copy()
|
|
|
|
|
|
|
|
pagerNode.paginator = pager
|
|
|
|
if pager.TotalPages() > 0 {
|
|
|
|
first, _ := pager.page(0)
|
|
|
|
pagerNode.Date = first.Date
|
|
|
|
pagerNode.Lastmod = first.Lastmod
|
|
|
|
}
|
|
|
|
|
|
|
|
pageNumber := i + 1
|
2017-03-09 13:19:29 -05:00
|
|
|
addend := fmt.Sprintf("/%s/%d", paginatePath, pageNumber)
|
|
|
|
targetPath, _ := p.targetPath(addend)
|
2017-03-26 13:34:30 -04:00
|
|
|
layouts, err := p.layouts()
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2016-11-07 14:24:37 -05:00
|
|
|
|
2017-03-16 05:04:30 -04:00
|
|
|
if err := s.renderAndWritePage(
|
|
|
|
pagerNode.Title,
|
2017-03-19 10:25:32 -04:00
|
|
|
targetPath, pagerNode, layouts...); err != nil {
|
2016-10-31 14:53:33 -04:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
2016-11-02 16:34:19 -04:00
|
|
|
|
2017-03-06 13:16:31 -05:00
|
|
|
func (s *Site) renderRSS(p *PageOutput) error {
|
2016-12-06 03:32:14 -05:00
|
|
|
|
2017-02-18 04:02:12 -05:00
|
|
|
if !s.isEnabled(kindRSS) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
if s.Cfg.GetBool("disableRSS") {
|
2016-12-06 03:32:14 -05:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-03-06 19:01:51 -05:00
|
|
|
p.Kind = kindRSS
|
2016-11-20 06:21:19 -05:00
|
|
|
|
|
|
|
// TODO(bep) we zero the date here to get the number of diffs down in
|
|
|
|
// testing. But this should be set back later; the RSS feed should
|
|
|
|
// inherit the publish date from the node it represents.
|
|
|
|
if p.Kind == KindTaxonomy {
|
|
|
|
var zeroDate time.Time
|
2017-03-06 19:01:51 -05:00
|
|
|
p.Date = zeroDate
|
2016-11-20 06:21:19 -05:00
|
|
|
}
|
|
|
|
|
2017-02-13 11:11:22 -05:00
|
|
|
limit := s.Cfg.GetInt("rssLimit")
|
2017-03-06 19:01:51 -05:00
|
|
|
if limit >= 0 && len(p.Pages) > limit {
|
|
|
|
p.Pages = p.Pages[:limit]
|
|
|
|
p.Data["Pages"] = p.Pages
|
2016-11-16 15:06:10 -05:00
|
|
|
}
|
2016-11-26 09:50:32 -05:00
|
|
|
|
2017-03-26 13:34:30 -04:00
|
|
|
layouts, err := s.layoutHandler.For(
|
2017-03-25 12:46:09 -04:00
|
|
|
p.layoutDescriptor,
|
|
|
|
"",
|
|
|
|
p.outputFormat)
|
2017-03-26 13:34:30 -04:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2017-03-25 12:46:09 -04:00
|
|
|
|
2017-03-09 13:19:29 -05:00
|
|
|
// TODO(bep) output deprecate/handle rssURI
|
|
|
|
targetPath, err := p.targetPath()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2016-11-02 16:34:19 -04:00
|
|
|
|
2017-03-06 19:01:51 -05:00
|
|
|
return s.renderAndWriteXML(p.Title,
|
2017-03-25 12:46:09 -04:00
|
|
|
targetPath, p, layouts...)
|
2016-11-02 16:34:19 -04:00
|
|
|
}
|
2016-11-08 17:34:52 -05:00
|
|
|
|
|
|
|
func (s *Site) render404() error {
|
2017-02-18 04:02:12 -05:00
|
|
|
if !s.isEnabled(kind404) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
if s.Cfg.GetBool("disable404") {
|
2016-11-08 17:34:52 -05:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-11-13 05:43:23 -05:00
|
|
|
p := s.newNodePage(kind404)
|
2017-03-25 15:31:43 -04:00
|
|
|
|
2016-11-08 17:34:52 -05:00
|
|
|
p.Title = "404 Page not found"
|
|
|
|
p.Data["Pages"] = s.Pages
|
2016-11-11 03:19:16 -05:00
|
|
|
p.Pages = s.Pages
|
2017-03-24 11:54:37 -04:00
|
|
|
p.URLPath.URL = "404.html"
|
2016-11-08 17:34:52 -05:00
|
|
|
|
2017-03-25 15:31:43 -04:00
|
|
|
if err := p.initTargetPathDescriptor(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2016-11-08 17:34:52 -05:00
|
|
|
nfLayouts := []string{"404.html"}
|
|
|
|
|
2017-04-02 06:22:54 -04:00
|
|
|
pageOutput, err := newPageOutput(p, false, output.HTMLFormat)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return s.renderAndWritePage("404 page", "404.html", pageOutput, s.appendThemeTemplates(nfLayouts)...)
|
2016-11-23 12:28:14 -05:00
|
|
|
|
2016-11-08 17:34:52 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Site) renderSitemap() error {
|
2017-02-18 04:02:12 -05:00
|
|
|
if !s.isEnabled(kindSitemap) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
if s.Cfg.GetBool("disableSitemap") {
|
2016-11-08 17:34:52 -05:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
sitemapDefault := parseSitemap(s.Cfg.GetStringMap("sitemap"))
|
2016-11-08 17:34:52 -05:00
|
|
|
|
2016-11-13 05:43:23 -05:00
|
|
|
n := s.newNodePage(kindSitemap)
|
2016-11-08 17:34:52 -05:00
|
|
|
|
2016-11-09 13:59:28 -05:00
|
|
|
// Include all pages (regular, home page, taxonomies etc.)
|
2016-11-11 03:01:47 -05:00
|
|
|
pages := s.Pages
|
2016-11-08 17:34:52 -05:00
|
|
|
|
2016-11-13 05:43:23 -05:00
|
|
|
page := s.newNodePage(kindSitemap)
|
2016-11-08 17:34:52 -05:00
|
|
|
page.URLPath.URL = ""
|
2017-03-25 15:31:43 -04:00
|
|
|
if err := page.initTargetPathDescriptor(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2016-11-08 17:34:52 -05:00
|
|
|
page.Sitemap.ChangeFreq = sitemapDefault.ChangeFreq
|
|
|
|
page.Sitemap.Priority = sitemapDefault.Priority
|
2016-11-09 13:59:28 -05:00
|
|
|
page.Sitemap.Filename = sitemapDefault.Filename
|
2016-11-08 17:34:52 -05:00
|
|
|
|
|
|
|
n.Data["Pages"] = pages
|
2016-11-11 03:19:16 -05:00
|
|
|
n.Pages = pages
|
2016-11-08 17:34:52 -05:00
|
|
|
|
2017-03-25 15:31:43 -04:00
|
|
|
// TODO(bep) output
|
|
|
|
if err := page.initTargetPathDescriptor(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2016-11-09 13:59:28 -05:00
|
|
|
// TODO(bep) this should be done somewhere else
|
2016-11-08 17:34:52 -05:00
|
|
|
for _, page := range pages {
|
|
|
|
if page.Sitemap.ChangeFreq == "" {
|
|
|
|
page.Sitemap.ChangeFreq = sitemapDefault.ChangeFreq
|
|
|
|
}
|
|
|
|
|
|
|
|
if page.Sitemap.Priority == -1 {
|
|
|
|
page.Sitemap.Priority = sitemapDefault.Priority
|
|
|
|
}
|
|
|
|
|
|
|
|
if page.Sitemap.Filename == "" {
|
|
|
|
page.Sitemap.Filename = sitemapDefault.Filename
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
smLayouts := []string{"sitemap.xml", "_default/sitemap.xml", "_internal/_default/sitemap.xml"}
|
|
|
|
addLanguagePrefix := n.Site.IsMultiLingual()
|
|
|
|
|
2016-11-23 12:28:14 -05:00
|
|
|
return s.renderAndWriteXML("sitemap",
|
|
|
|
n.addLangPathPrefixIfFlagSet(page.Sitemap.Filename, addLanguagePrefix), n, s.appendThemeTemplates(smLayouts)...)
|
2016-11-08 17:34:52 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Site) renderRobotsTXT() error {
|
2017-02-18 04:02:12 -05:00
|
|
|
if !s.isEnabled(kindRobotsTXT) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
if !s.Cfg.GetBool("enableRobotsTXT") {
|
2016-11-08 17:34:52 -05:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-11-13 05:43:23 -05:00
|
|
|
n := s.newNodePage(kindRobotsTXT)
|
2017-03-25 15:31:43 -04:00
|
|
|
if err := n.initTargetPathDescriptor(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2016-11-08 17:34:52 -05:00
|
|
|
n.Data["Pages"] = s.Pages
|
2016-11-11 03:19:16 -05:00
|
|
|
n.Pages = s.Pages
|
2016-11-08 17:34:52 -05:00
|
|
|
|
|
|
|
rLayouts := []string{"robots.txt", "_default/robots.txt", "_internal/_default/robots.txt"}
|
|
|
|
outBuffer := bp.GetBuffer()
|
|
|
|
defer bp.PutBuffer(outBuffer)
|
|
|
|
err := s.renderForLayouts("robots", n, outBuffer, s.appendThemeTemplates(rLayouts)...)
|
|
|
|
|
|
|
|
if err == nil {
|
2017-03-16 05:04:30 -04:00
|
|
|
err = s.publish("robots.txt", outBuffer)
|
2016-11-08 17:34:52 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// renderAliases renders shell pages that simply have a redirect in the header.
|
|
|
|
func (s *Site) renderAliases() error {
|
2016-11-11 03:01:47 -05:00
|
|
|
for _, p := range s.Pages {
|
2016-11-08 17:34:52 -05:00
|
|
|
if len(p.Aliases) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2017-03-24 06:25:25 -04:00
|
|
|
for _, f := range p.outputFormats {
|
|
|
|
if !f.IsHTML {
|
|
|
|
continue
|
|
|
|
}
|
2016-11-15 04:43:49 -05:00
|
|
|
|
2017-03-24 06:25:25 -04:00
|
|
|
o := newOutputFormat(p, f)
|
|
|
|
plink := o.Permalink()
|
|
|
|
|
|
|
|
for _, a := range p.Aliases {
|
|
|
|
if f.Path != "" {
|
|
|
|
// Make sure AMP and similar doesn't clash with regular aliases.
|
|
|
|
a = path.Join(a, f.Path)
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := s.writeDestAlias(a, plink, p); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2016-11-08 17:34:52 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if s.owner.multilingual.enabled() {
|
2017-02-04 22:20:06 -05:00
|
|
|
mainLang := s.owner.multilingual.DefaultLang
|
2016-11-08 17:34:52 -05:00
|
|
|
if s.Info.defaultContentLanguageInSubdir {
|
2017-02-04 22:20:06 -05:00
|
|
|
mainLangURL := s.PathSpec.AbsURL(mainLang.Lang, false)
|
2017-01-10 04:55:03 -05:00
|
|
|
s.Log.DEBUG.Printf("Write redirect to main language %s: %s", mainLang, mainLangURL)
|
2017-03-03 04:47:43 -05:00
|
|
|
if err := s.publishDestAlias(true, "/", mainLangURL, nil); err != nil {
|
2016-11-08 17:34:52 -05:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
} else {
|
2017-01-10 04:55:03 -05:00
|
|
|
mainLangURL := s.PathSpec.AbsURL("", false)
|
|
|
|
s.Log.DEBUG.Printf("Write redirect to main language %s: %s", mainLang, mainLangURL)
|
2017-03-03 04:47:43 -05:00
|
|
|
if err := s.publishDestAlias(true, mainLang.Lang, mainLangURL, nil); err != nil {
|
2016-11-08 17:34:52 -05:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|