2019-01-02 06:33:26 -05:00
|
|
|
// Copyright 2019 The Hugo Authors. All rights reserved.
|
2016-10-31 14:53:33 -04:00
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package hugolib
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"path"
|
2017-11-20 04:34:30 -05:00
|
|
|
"strings"
|
2016-10-31 14:53:33 -04:00
|
|
|
"sync"
|
|
|
|
|
2019-04-05 04:09:22 -04:00
|
|
|
"github.com/gohugoio/hugo/config"
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
"github.com/gohugoio/hugo/output"
|
2018-10-03 08:58:09 -04:00
|
|
|
"github.com/pkg/errors"
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
"github.com/gohugoio/hugo/resources/page"
|
|
|
|
"github.com/gohugoio/hugo/resources/page/pagemeta"
|
2016-10-31 14:53:33 -04:00
|
|
|
)
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
type siteRenderContext struct {
|
|
|
|
cfg *BuildCfg
|
|
|
|
|
|
|
|
// Zero based index for all output formats combined.
|
|
|
|
sitesOutIdx int
|
|
|
|
|
|
|
|
// Zero based index of the output formats configured within a Site.
|
|
|
|
outIdx int
|
|
|
|
|
|
|
|
multihost bool
|
|
|
|
}
|
|
|
|
|
|
|
|
// Whether to render 404.html, robotsTXT.txt which usually is rendered
|
|
|
|
// once only in the site root.
|
|
|
|
func (s siteRenderContext) renderSingletonPages() bool {
|
|
|
|
if s.multihost {
|
|
|
|
// 1 per site
|
|
|
|
return s.outIdx == 0
|
|
|
|
}
|
|
|
|
|
|
|
|
// 1 for all sites
|
|
|
|
return s.sitesOutIdx == 0
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2016-10-31 14:53:33 -04:00
|
|
|
// renderPages renders pages each corresponding to a markdown file.
|
|
|
|
// TODO(bep np doc
|
2019-01-02 06:33:26 -05:00
|
|
|
func (s *Site) renderPages(ctx *siteRenderContext) error {
|
2016-10-31 14:53:33 -04:00
|
|
|
|
2019-04-05 04:09:22 -04:00
|
|
|
numWorkers := config.GetNumWorkerMultiplier()
|
2019-04-04 10:50:10 -04:00
|
|
|
|
2016-10-31 14:53:33 -04:00
|
|
|
results := make(chan error)
|
2019-04-04 10:50:10 -04:00
|
|
|
pages := make(chan *pageState, numWorkers) // buffered for performance
|
2016-10-31 14:53:33 -04:00
|
|
|
errs := make(chan error)
|
|
|
|
|
2018-10-03 08:58:09 -04:00
|
|
|
go s.errorCollator(results, errs)
|
2016-10-31 14:53:33 -04:00
|
|
|
|
|
|
|
wg := &sync.WaitGroup{}
|
|
|
|
|
2017-02-21 12:56:56 -05:00
|
|
|
for i := 0; i < numWorkers; i++ {
|
2016-10-31 14:53:33 -04:00
|
|
|
wg.Add(1)
|
2019-01-02 06:33:26 -05:00
|
|
|
go pageRenderer(ctx, s, pages, results, wg)
|
2016-10-31 14:53:33 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
cfg := ctx.cfg
|
|
|
|
|
|
|
|
if !cfg.PartialReRender && ctx.outIdx == 0 && len(s.headlessPages) > 0 {
|
2018-01-23 08:02:54 -05:00
|
|
|
wg.Add(1)
|
|
|
|
go headlessPagesPublisher(s, wg)
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
L:
|
|
|
|
for _, page := range s.workAllPages {
|
2018-01-30 04:33:57 -05:00
|
|
|
if cfg.shouldRender(page) {
|
2019-01-02 06:33:26 -05:00
|
|
|
select {
|
|
|
|
case <-s.h.Done():
|
|
|
|
break L
|
|
|
|
default:
|
|
|
|
pages <- page
|
|
|
|
}
|
Only re-render the view(s) you're working on
Hugo already, in its server mode, support partial rebuilds. To put it simply: If you change `about.md`, only that content page is read and processed, then Hugo does some processing (taxonomies etc.) and the full site is rendered.
This commit covers the rendering part: We now only re-render the pages you work on, i.e. the last n pages you watched in the browser (which obviously also includes the page in the example above).
To be more specific: When you are running the hugo server in watch (aka. livereload) mode, and change a template or a content file, then we do a partial re-rendering of the following:
* The current content page (if it is a content change)
* The home page
* Up to the last 10 pages you visited on the site.
This should in most cases be enough, but if you navigate to something completely different, you may see stale content. Doing an edit will then refresh that page.
Note that this feature is enabled by default. To turn it off, run `hugo server --disableFastRender`.
Fixes #3962
See #1643
2017-10-14 07:40:43 -04:00
|
|
|
}
|
2016-10-31 14:53:33 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
close(pages)
|
|
|
|
|
|
|
|
wg.Wait()
|
|
|
|
|
|
|
|
close(results)
|
|
|
|
|
|
|
|
err := <-errs
|
|
|
|
if err != nil {
|
2018-10-03 08:58:09 -04:00
|
|
|
return errors.Wrap(err, "failed to render pages")
|
2016-10-31 14:53:33 -04:00
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-01-23 08:02:54 -05:00
|
|
|
func headlessPagesPublisher(s *Site, wg *sync.WaitGroup) {
|
|
|
|
defer wg.Done()
|
2019-01-02 06:33:26 -05:00
|
|
|
for _, p := range s.headlessPages {
|
|
|
|
if err := p.renderResources(); err != nil {
|
|
|
|
s.SendError(p.errorf(err, "failed to render page resources"))
|
2018-01-23 08:02:54 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func pageRenderer(
|
|
|
|
ctx *siteRenderContext,
|
|
|
|
s *Site,
|
|
|
|
pages <-chan *pageState,
|
|
|
|
results chan<- error,
|
|
|
|
wg *sync.WaitGroup) {
|
2017-03-19 16:09:31 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
defer wg.Done()
|
2018-05-08 04:10:13 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
for p := range pages {
|
|
|
|
f := p.outputFormat()
|
2017-03-26 05:45:12 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// TODO(bep) get rid of this odd construct. RSS is an output format.
|
|
|
|
if f.Name == "RSS" && !s.isEnabled(kindRSS) {
|
|
|
|
continue
|
|
|
|
}
|
2017-03-17 11:35:09 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if ctx.outIdx == 0 {
|
|
|
|
if err := p.renderResources(); err != nil {
|
|
|
|
s.SendError(p.errorf(err, "failed to render page resources"))
|
2017-04-28 03:40:50 -04:00
|
|
|
continue
|
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
}
|
2017-04-28 03:40:50 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
layouts, err := p.getLayouts()
|
|
|
|
if err != nil {
|
|
|
|
s.Log.ERROR.Printf("Failed to resolve layout for output %q for page %q: %s", f.Name, p, err)
|
|
|
|
continue
|
|
|
|
}
|
2017-03-02 09:35:25 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
targetPath := p.targetPaths().TargetFilename
|
2017-03-02 09:35:25 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if targetPath == "" {
|
|
|
|
s.Log.ERROR.Printf("Failed to create target path for output %q for page %q: %s", f.Name, p, err)
|
|
|
|
continue
|
|
|
|
}
|
2017-03-07 03:55:17 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if err := s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "page "+p.Title(), targetPath, p, layouts...); err != nil {
|
|
|
|
results <- err
|
|
|
|
}
|
2017-03-06 13:16:31 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if p.paginator != nil && p.paginator.current != nil {
|
|
|
|
if err := s.renderPaginator(p, layouts); err != nil {
|
|
|
|
results <- err
|
2017-03-06 13:16:31 -05:00
|
|
|
}
|
2016-10-31 14:53:33 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// renderPaginator must be run after the owning Page has been rendered.
|
2019-01-02 06:33:26 -05:00
|
|
|
func (s *Site) renderPaginator(p *pageState, layouts []string) error {
|
2016-11-07 14:24:37 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
paginatePath := s.Cfg.GetString("paginatePath")
|
2016-10-31 14:53:33 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
d := p.targetPathDescriptor
|
|
|
|
f := p.s.rc.Format
|
|
|
|
d.Type = f
|
2016-10-31 14:53:33 -04:00
|
|
|
|
2019-04-10 04:11:51 -04:00
|
|
|
if p.paginator.current == nil || p.paginator.current != p.paginator.current.First() {
|
|
|
|
panic(fmt.Sprintf("invalid paginator state for %q", p.pathOrTitle()))
|
|
|
|
}
|
2016-10-31 14:53:33 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// Write alias for page 1
|
|
|
|
d.Addends = fmt.Sprintf("/%s/%d", paginatePath, 1)
|
|
|
|
targetPaths := page.CreateTargetPaths(d)
|
2017-04-09 04:33:04 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if err := s.writeDestAlias(targetPaths.TargetFilename, p.Permalink(), f, nil); err != nil {
|
|
|
|
return err
|
2016-10-31 14:53:33 -04:00
|
|
|
}
|
2016-11-02 16:34:19 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// Render pages for the rest
|
|
|
|
for current := p.paginator.current.Next(); current != nil; current = current.Next() {
|
2016-12-06 03:32:14 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
p.paginator.current = current
|
|
|
|
d.Addends = fmt.Sprintf("/%s/%d", paginatePath, current.PageNumber())
|
|
|
|
targetPaths := page.CreateTargetPaths(d)
|
2016-11-26 09:50:32 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if err := s.renderAndWritePage(
|
|
|
|
&s.PathSpec.ProcessingStats.PaginatorPages,
|
|
|
|
p.Title(),
|
|
|
|
targetPaths.TargetFilename, p, layouts...); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2017-03-25 12:46:09 -04:00
|
|
|
|
2017-03-09 13:19:29 -05:00
|
|
|
}
|
2016-11-02 16:34:19 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return nil
|
2016-11-02 16:34:19 -04:00
|
|
|
}
|
2016-11-08 17:34:52 -05:00
|
|
|
|
|
|
|
func (s *Site) render404() error {
|
2017-02-18 04:02:12 -05:00
|
|
|
if !s.isEnabled(kind404) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
p, err := newPageStandalone(&pageMeta{
|
|
|
|
s: s,
|
|
|
|
kind: kind404,
|
|
|
|
urlPaths: pagemeta.URLPath{
|
2019-03-31 06:08:15 -04:00
|
|
|
URL: "404.html",
|
2019-01-02 06:33:26 -05:00
|
|
|
},
|
|
|
|
},
|
|
|
|
output.HTMLFormat,
|
|
|
|
)
|
2017-03-25 15:31:43 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if err != nil {
|
2017-03-25 15:31:43 -04:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2016-11-08 17:34:52 -05:00
|
|
|
nfLayouts := []string{"404.html"}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
targetPath := p.targetPaths().TargetFilename
|
2017-04-02 06:22:54 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if targetPath == "" {
|
|
|
|
return errors.New("failed to create targetPath for 404 page")
|
2017-07-01 16:58:52 -04:00
|
|
|
}
|
2016-11-23 12:28:14 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "404 page", targetPath, p, nfLayouts...)
|
2016-11-08 17:34:52 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Site) renderSitemap() error {
|
2017-02-18 04:02:12 -05:00
|
|
|
if !s.isEnabled(kindSitemap) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
p, err := newPageStandalone(&pageMeta{
|
|
|
|
s: s,
|
|
|
|
kind: kindSitemap,
|
|
|
|
urlPaths: pagemeta.URLPath{
|
|
|
|
URL: s.siteCfg.sitemap.Filename,
|
|
|
|
}},
|
|
|
|
output.HTMLFormat,
|
|
|
|
)
|
2016-11-08 17:34:52 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if err != nil {
|
2017-03-25 15:31:43 -04:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
targetPath := p.targetPaths().TargetFilename
|
2016-11-08 17:34:52 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if targetPath == "" {
|
|
|
|
return errors.New("failed to create targetPath for sitemap")
|
2016-11-08 17:34:52 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
smLayouts := []string{"sitemap.xml", "_default/sitemap.xml", "_internal/_default/sitemap.xml"}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemap", targetPath, p, smLayouts...)
|
2016-11-08 17:34:52 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Site) renderRobotsTXT() error {
|
2017-02-18 04:02:12 -05:00
|
|
|
if !s.isEnabled(kindRobotsTXT) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
if !s.Cfg.GetBool("enableRobotsTXT") {
|
2016-11-08 17:34:52 -05:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
p, err := newPageStandalone(&pageMeta{
|
|
|
|
s: s,
|
|
|
|
kind: kindRobotsTXT,
|
|
|
|
urlPaths: pagemeta.URLPath{
|
2019-03-31 06:08:15 -04:00
|
|
|
URL: "robots.txt",
|
2019-01-02 06:33:26 -05:00
|
|
|
},
|
|
|
|
},
|
|
|
|
output.RobotsTxtFormat)
|
2018-01-25 04:36:53 -05:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return err
|
2016-11-08 17:34:52 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
rLayouts := []string{"robots.txt", "_default/robots.txt", "_internal/_default/robots.txt"}
|
2017-04-23 16:03:25 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "Robots Txt", p.targetPaths().TargetFilename, p, rLayouts...)
|
2018-01-25 04:36:53 -05:00
|
|
|
|
2016-11-08 17:34:52 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// renderAliases renders shell pages that simply have a redirect in the header.
|
|
|
|
func (s *Site) renderAliases() error {
|
2019-01-02 06:33:26 -05:00
|
|
|
for _, p := range s.workAllPages {
|
|
|
|
|
|
|
|
if len(p.Aliases()) == 0 {
|
2016-11-08 17:34:52 -05:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
for _, of := range p.OutputFormats() {
|
|
|
|
if !of.Format.IsHTML {
|
2017-03-24 06:25:25 -04:00
|
|
|
continue
|
|
|
|
}
|
2016-11-15 04:43:49 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
plink := of.Permalink()
|
|
|
|
f := of.Format
|
2017-03-24 06:25:25 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
for _, a := range p.Aliases() {
|
2019-03-30 12:08:25 -04:00
|
|
|
isRelative := !strings.HasPrefix(a, "/")
|
|
|
|
|
|
|
|
if isRelative {
|
|
|
|
// Make alias relative, where "." will be on the
|
|
|
|
// same directory level as the current page.
|
|
|
|
// TODO(bep) ugly URLs doesn't seem to be supported in
|
|
|
|
// aliases, I'm not sure why not.
|
|
|
|
basePath := of.RelPermalink()
|
|
|
|
if strings.HasSuffix(basePath, "/") {
|
|
|
|
basePath = path.Join(basePath, "..")
|
|
|
|
}
|
|
|
|
a = path.Join(basePath, a)
|
|
|
|
|
|
|
|
} else if f.Path != "" {
|
2017-03-24 06:25:25 -04:00
|
|
|
// Make sure AMP and similar doesn't clash with regular aliases.
|
2019-03-30 11:46:18 -04:00
|
|
|
a = path.Join(f.Path, a)
|
2017-03-24 06:25:25 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
lang := p.Language().Lang
|
2017-11-20 04:34:30 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if s.h.multihost && !strings.HasPrefix(a, "/"+lang) {
|
2017-11-20 04:34:30 -05:00
|
|
|
// These need to be in its language root.
|
|
|
|
a = path.Join(lang, a)
|
|
|
|
}
|
|
|
|
|
2018-08-05 05:13:49 -04:00
|
|
|
if err := s.writeDestAlias(a, plink, f, p); err != nil {
|
2017-03-24 06:25:25 -04:00
|
|
|
return err
|
|
|
|
}
|
2016-11-08 17:34:52 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// renderMainLanguageRedirect creates a redirect to the main language home,
|
|
|
|
// depending on if it lives in sub folder (e.g. /en) or not.
|
|
|
|
func (s *Site) renderMainLanguageRedirect() error {
|
|
|
|
|
|
|
|
if !s.h.multilingual.enabled() || s.h.IsMultihost() {
|
|
|
|
// No need for a redirect
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
html, found := s.outputFormatsConfig.GetByName("HTML")
|
|
|
|
if found {
|
|
|
|
mainLang := s.h.multilingual.DefaultLang
|
|
|
|
if s.Info.defaultContentLanguageInSubdir {
|
|
|
|
mainLangURL := s.PathSpec.AbsURL(mainLang.Lang, false)
|
|
|
|
s.Log.DEBUG.Printf("Write redirect to main language %s: %s", mainLang, mainLangURL)
|
|
|
|
if err := s.publishDestAlias(true, "/", mainLangURL, html, nil); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
mainLangURL := s.PathSpec.AbsURL("", false)
|
|
|
|
s.Log.DEBUG.Printf("Write redirect to main language %s: %s", mainLang, mainLangURL)
|
|
|
|
if err := s.publishDestAlias(true, mainLang.Lang, mainLangURL, html, nil); err != nil {
|
|
|
|
return err
|
2016-11-08 17:34:52 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|