2016-07-27 04:03:45 -04:00
|
|
|
// Copyright 2016-present The Hugo Authors. All rights reserved.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package hugolib
|
|
|
|
|
2016-07-27 04:49:42 -04:00
|
|
|
import (
|
2017-01-10 04:55:03 -05:00
|
|
|
"errors"
|
2016-07-28 03:30:58 -04:00
|
|
|
"strings"
|
2016-08-01 17:04:44 -04:00
|
|
|
"sync"
|
2016-07-27 04:49:42 -04:00
|
|
|
|
2017-06-26 15:34:16 -04:00
|
|
|
"path/filepath"
|
|
|
|
|
2017-06-13 12:42:45 -04:00
|
|
|
"github.com/gohugoio/hugo/deps"
|
|
|
|
"github.com/gohugoio/hugo/helpers"
|
2016-08-01 17:04:44 -04:00
|
|
|
|
2017-06-13 12:42:45 -04:00
|
|
|
"github.com/gohugoio/hugo/i18n"
|
|
|
|
"github.com/gohugoio/hugo/tpl"
|
|
|
|
"github.com/gohugoio/hugo/tpl/tplimpl"
|
2016-07-27 04:49:42 -04:00
|
|
|
)
|
|
|
|
|
2016-07-27 04:03:45 -04:00
|
|
|
// HugoSites represents the sites to build. Each site represents a language.
|
2016-07-28 03:30:58 -04:00
|
|
|
type HugoSites struct {
|
|
|
|
Sites []*Site
|
|
|
|
|
2016-08-08 04:12:39 -04:00
|
|
|
runMode runmode
|
|
|
|
|
|
|
|
multilingual *Multilingual
|
2017-01-03 11:28:51 -05:00
|
|
|
|
2017-11-02 03:25:20 -04:00
|
|
|
// Multihost is set if multilingual and baseURL set on the language level.
|
|
|
|
multihost bool
|
|
|
|
|
2017-01-10 04:55:03 -05:00
|
|
|
*deps.Deps
|
2017-01-03 11:28:51 -05:00
|
|
|
}
|
|
|
|
|
2017-11-02 03:25:20 -04:00
|
|
|
func (h *HugoSites) IsMultihost() bool {
|
|
|
|
return h != nil && h.multihost
|
|
|
|
}
|
|
|
|
|
2017-06-26 15:34:16 -04:00
|
|
|
// GetContentPage finds a Page with content given the absolute filename.
|
|
|
|
// Returns nil if none found.
|
|
|
|
func (h *HugoSites) GetContentPage(filename string) *Page {
|
|
|
|
s := h.Sites[0]
|
|
|
|
contendDir := filepath.Join(s.PathSpec.AbsPathify(s.Cfg.GetString("contentDir")))
|
|
|
|
if !strings.HasPrefix(filename, contendDir) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
rel := strings.TrimPrefix(filename, contendDir)
|
|
|
|
rel = strings.TrimPrefix(rel, helpers.FilePathSeparator)
|
|
|
|
|
|
|
|
pos := s.rawAllPages.findPagePosByFilePath(rel)
|
|
|
|
|
|
|
|
if pos == -1 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return s.rawAllPages[pos]
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2017-01-10 04:55:03 -05:00
|
|
|
// NewHugoSites creates a new collection of sites given the input sites, building
|
|
|
|
// a language configuration based on those.
|
|
|
|
func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
|
2017-01-03 11:28:51 -05:00
|
|
|
|
2017-01-10 04:55:03 -05:00
|
|
|
if cfg.Language != nil {
|
|
|
|
return nil, errors.New("Cannot provide Language in Cfg when sites are provided")
|
2017-01-03 11:28:51 -05:00
|
|
|
}
|
2016-07-28 03:30:58 -04:00
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
langConfig, err := newMultiLingualFromSites(cfg.Cfg, sites...)
|
2016-08-06 08:51:50 -04:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2016-07-28 03:30:58 -04:00
|
|
|
}
|
|
|
|
|
2017-01-03 11:28:51 -05:00
|
|
|
h := &HugoSites{
|
|
|
|
multilingual: langConfig,
|
|
|
|
Sites: sites}
|
2016-08-08 04:12:39 -04:00
|
|
|
|
|
|
|
for _, s := range sites {
|
|
|
|
s.owner = h
|
|
|
|
}
|
2017-01-10 04:55:03 -05:00
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
// TODO(bep)
|
|
|
|
cfg.Cfg.Set("multilingual", sites[0].multilingualEnabled())
|
|
|
|
|
2017-04-02 08:50:13 -04:00
|
|
|
if err := applyDepsIfNeeded(cfg, sites...); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2017-01-10 04:55:03 -05:00
|
|
|
|
|
|
|
h.Deps = sites[0].Deps
|
|
|
|
|
2017-11-02 03:25:20 -04:00
|
|
|
// The baseURL may be provided at the language level. If that is true,
|
|
|
|
// then every language must have a baseURL. In this case we always render
|
|
|
|
// to a language sub folder, which is then stripped from all the Permalink URLs etc.
|
|
|
|
var baseURLFromLang bool
|
|
|
|
|
|
|
|
for _, s := range sites {
|
|
|
|
burl := s.Language.GetLocal("baseURL")
|
|
|
|
if baseURLFromLang && burl == nil {
|
|
|
|
return h, errors.New("baseURL must be set on all or none of the languages")
|
|
|
|
}
|
|
|
|
|
|
|
|
if burl != nil {
|
|
|
|
baseURLFromLang = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if baseURLFromLang {
|
|
|
|
for _, s := range sites {
|
|
|
|
// TODO(bep) multihost check
|
|
|
|
s.Info.defaultContentLanguageInSubdir = true
|
|
|
|
s.Cfg.Set("defaultContentLanguageInSubdir", true)
|
|
|
|
}
|
|
|
|
h.multihost = true
|
|
|
|
}
|
|
|
|
|
2016-08-08 04:12:39 -04:00
|
|
|
return h, nil
|
2016-07-28 03:30:58 -04:00
|
|
|
}
|
2016-07-27 04:03:45 -04:00
|
|
|
|
2017-01-10 04:55:03 -05:00
|
|
|
func applyDepsIfNeeded(cfg deps.DepsCfg, sites ...*Site) error {
|
|
|
|
if cfg.TemplateProvider == nil {
|
2017-02-17 07:30:50 -05:00
|
|
|
cfg.TemplateProvider = tplimpl.DefaultTemplateProvider
|
2017-01-10 04:55:03 -05:00
|
|
|
}
|
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
if cfg.TranslationProvider == nil {
|
|
|
|
cfg.TranslationProvider = i18n.NewTranslationProvider()
|
|
|
|
}
|
|
|
|
|
2017-01-10 04:55:03 -05:00
|
|
|
var (
|
|
|
|
d *deps.Deps
|
|
|
|
err error
|
|
|
|
)
|
|
|
|
|
|
|
|
for _, s := range sites {
|
|
|
|
if s.Deps != nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if d == nil {
|
|
|
|
cfg.Language = s.Language
|
|
|
|
cfg.WithTemplate = s.withSiteTemplates(cfg.WithTemplate)
|
2017-03-25 09:37:04 -04:00
|
|
|
|
|
|
|
var err error
|
|
|
|
d, err = deps.New(cfg)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2017-04-04 08:10:20 -04:00
|
|
|
d.OutputFormatsConfig = s.outputFormatsConfig
|
2017-02-04 22:20:06 -05:00
|
|
|
s.Deps = d
|
|
|
|
|
2017-02-19 17:53:48 -05:00
|
|
|
if err = d.LoadResources(); err != nil {
|
2017-01-10 04:55:03 -05:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
} else {
|
|
|
|
d, err = d.ForLanguage(s.Language)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2017-04-04 08:10:20 -04:00
|
|
|
d.OutputFormatsConfig = s.outputFormatsConfig
|
2017-02-04 22:20:06 -05:00
|
|
|
s.Deps = d
|
2017-01-10 04:55:03 -05:00
|
|
|
}
|
2017-02-04 22:20:06 -05:00
|
|
|
|
2017-01-10 04:55:03 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
// NewHugoSites creates HugoSites from the given config.
|
|
|
|
func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) {
|
2017-01-09 19:36:59 -05:00
|
|
|
sites, err := createSitesFromConfig(cfg)
|
2016-08-06 08:51:50 -04:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2017-01-03 11:28:51 -05:00
|
|
|
return newHugoSites(cfg, sites...)
|
2016-08-06 08:51:50 -04:00
|
|
|
}
|
|
|
|
|
2017-03-27 14:43:49 -04:00
|
|
|
func (s *Site) withSiteTemplates(withTemplates ...func(templ tpl.TemplateHandler) error) func(templ tpl.TemplateHandler) error {
|
|
|
|
return func(templ tpl.TemplateHandler) error {
|
|
|
|
templ.LoadTemplates(s.PathSpec.GetLayoutDirPath(), "")
|
2017-02-19 17:53:48 -05:00
|
|
|
if s.PathSpec.ThemeSet() {
|
2017-03-27 14:43:49 -04:00
|
|
|
templ.LoadTemplates(s.PathSpec.GetThemeDir()+"/layouts", "theme")
|
2017-01-10 04:55:03 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, wt := range withTemplates {
|
|
|
|
if wt == nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if err := wt(templ); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
2017-01-09 19:36:59 -05:00
|
|
|
}
|
|
|
|
|
2017-01-10 04:55:03 -05:00
|
|
|
func createSitesFromConfig(cfg deps.DepsCfg) ([]*Site, error) {
|
|
|
|
|
|
|
|
var (
|
|
|
|
sites []*Site
|
|
|
|
)
|
|
|
|
|
2017-11-02 03:25:20 -04:00
|
|
|
languages := getLanguages(cfg.Cfg)
|
2016-08-05 07:10:58 -04:00
|
|
|
|
2017-11-02 03:25:20 -04:00
|
|
|
for _, lang := range languages {
|
|
|
|
var s *Site
|
2016-08-05 07:10:58 -04:00
|
|
|
var err error
|
2017-11-02 03:25:20 -04:00
|
|
|
cfg.Language = lang
|
|
|
|
s, err = newSite(cfg)
|
2016-08-05 07:10:58 -04:00
|
|
|
|
|
|
|
if err != nil {
|
2017-11-02 03:25:20 -04:00
|
|
|
return nil, err
|
2016-08-05 07:10:58 -04:00
|
|
|
}
|
|
|
|
|
2017-11-02 03:25:20 -04:00
|
|
|
sites = append(sites, s)
|
2016-08-05 07:10:58 -04:00
|
|
|
}
|
|
|
|
|
2016-08-06 08:51:50 -04:00
|
|
|
return sites, nil
|
2016-08-05 07:10:58 -04:00
|
|
|
}
|
|
|
|
|
2016-11-16 07:00:45 -05:00
|
|
|
// Reset resets the sites and template caches, making it ready for a full rebuild.
|
2016-08-06 08:51:50 -04:00
|
|
|
func (h *HugoSites) reset() {
|
2016-07-28 03:30:58 -04:00
|
|
|
for i, s := range h.Sites {
|
2016-08-07 18:12:06 -04:00
|
|
|
h.Sites[i] = s.reset()
|
2016-07-27 04:03:45 -04:00
|
|
|
}
|
|
|
|
}
|
2016-07-27 04:49:42 -04:00
|
|
|
|
2016-11-10 14:55:52 -05:00
|
|
|
func (h *HugoSites) createSitesFromConfig() error {
|
2016-08-09 14:06:15 -04:00
|
|
|
|
2017-11-02 03:25:20 -04:00
|
|
|
if err := loadLanguageSettings(h.Cfg); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
depsCfg := deps.DepsCfg{Fs: h.Fs, Cfg: h.Cfg}
|
2017-11-02 03:25:20 -04:00
|
|
|
|
2017-01-10 04:55:03 -05:00
|
|
|
sites, err := createSitesFromConfig(depsCfg)
|
2016-08-06 08:51:50 -04:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
langConfig, err := newMultiLingualFromSites(depsCfg.Cfg, sites...)
|
2016-08-06 08:51:50 -04:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
h.Sites = sites
|
|
|
|
|
2016-08-08 04:12:39 -04:00
|
|
|
for _, s := range sites {
|
|
|
|
s.owner = h
|
2016-08-06 08:51:50 -04:00
|
|
|
}
|
|
|
|
|
2017-01-10 04:55:03 -05:00
|
|
|
if err := applyDepsIfNeeded(depsCfg, sites...); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
h.Deps = sites[0].Deps
|
|
|
|
|
2016-08-08 04:12:39 -04:00
|
|
|
h.multilingual = langConfig
|
|
|
|
|
2016-08-06 08:51:50 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (h *HugoSites) toSiteInfos() []*SiteInfo {
|
2016-08-04 16:12:19 -04:00
|
|
|
infos := make([]*SiteInfo, len(h.Sites))
|
|
|
|
for i, s := range h.Sites {
|
|
|
|
infos[i] = &s.Info
|
|
|
|
}
|
|
|
|
return infos
|
|
|
|
}
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
// BuildCfg holds build options used to, as an example, skip the render step.
|
2016-07-28 03:30:58 -04:00
|
|
|
type BuildCfg struct {
|
|
|
|
// Whether we are in watch (server) mode
|
|
|
|
Watching bool
|
|
|
|
// Print build stats at the end of a build
|
|
|
|
PrintStats bool
|
2016-08-06 08:51:50 -04:00
|
|
|
// Reset site state before build. Use to force full rebuilds.
|
|
|
|
ResetState bool
|
|
|
|
// Re-creates the sites from configuration before a build.
|
|
|
|
// This is needed if new languages are added.
|
|
|
|
CreateSitesFromConfig bool
|
2016-07-28 03:30:58 -04:00
|
|
|
// Skip rendering. Useful for testing.
|
2016-08-05 10:11:03 -04:00
|
|
|
SkipRender bool
|
2016-11-10 14:55:52 -05:00
|
|
|
// Use this to indicate what changed (for rebuilds).
|
|
|
|
whatChanged *whatChanged
|
Only re-render the view(s) you're working on
Hugo already, in its server mode, support partial rebuilds. To put it simply: If you change `about.md`, only that content page is read and processed, then Hugo does some processing (taxonomies etc.) and the full site is rendered.
This commit covers the rendering part: We now only re-render the pages you work on, i.e. the last n pages you watched in the browser (which obviously also includes the page in the example above).
To be more specific: When you are running the hugo server in watch (aka. livereload) mode, and change a template or a content file, then we do a partial re-rendering of the following:
* The current content page (if it is a content change)
* The home page
* Up to the last 10 pages you visited on the site.
This should in most cases be enough, but if you navigate to something completely different, you may see stale content. Doing an edit will then refresh that page.
Note that this feature is enabled by default. To turn it off, run `hugo server --disableFastRender`.
Fixes #3962
See #1643
2017-10-14 07:40:43 -04:00
|
|
|
// Recently visited URLs. This is used for partial re-rendering.
|
|
|
|
RecentlyVisited map[string]bool
|
2016-07-27 04:49:42 -04:00
|
|
|
}
|
2016-07-28 03:30:58 -04:00
|
|
|
|
2016-11-10 14:55:52 -05:00
|
|
|
func (h *HugoSites) renderCrossSitesArtifacts() error {
|
2016-08-04 16:12:19 -04:00
|
|
|
|
2017-11-02 03:25:20 -04:00
|
|
|
if !h.multilingual.enabled() || h.IsMultihost() {
|
2016-08-04 16:12:19 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
if h.Cfg.GetBool("disableSitemap") {
|
2016-09-23 04:30:55 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-05-31 03:57:19 -04:00
|
|
|
sitemapEnabled := false
|
|
|
|
for _, s := range h.Sites {
|
|
|
|
if s.isEnabled(kindSitemap) {
|
|
|
|
sitemapEnabled = true
|
2017-05-31 05:55:51 -04:00
|
|
|
break
|
2017-05-31 03:57:19 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !sitemapEnabled {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-08-04 16:12:19 -04:00
|
|
|
// TODO(bep) DRY
|
2017-02-04 22:20:06 -05:00
|
|
|
sitemapDefault := parseSitemap(h.Cfg.GetStringMap("sitemap"))
|
2016-08-04 16:12:19 -04:00
|
|
|
|
|
|
|
s := h.Sites[0]
|
|
|
|
|
|
|
|
smLayouts := []string{"sitemapindex.xml", "_default/sitemapindex.xml", "_internal/_default/sitemapindex.xml"}
|
|
|
|
|
2016-11-23 12:28:14 -05:00
|
|
|
return s.renderAndWriteXML("sitemapindex",
|
|
|
|
sitemapDefault.Filename, h.toSiteInfos(), s.appendThemeTemplates(smLayouts)...)
|
2016-08-04 16:12:19 -04:00
|
|
|
}
|
|
|
|
|
2016-11-07 14:24:37 -05:00
|
|
|
func (h *HugoSites) assignMissingTranslations() error {
|
|
|
|
// This looks heavy, but it should be a small number of nodes by now.
|
2016-11-16 11:52:03 -05:00
|
|
|
allPages := h.findAllPagesByKindNotIn(KindPage)
|
2016-11-13 06:33:11 -05:00
|
|
|
for _, nodeType := range []string{KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm} {
|
2016-11-16 11:52:03 -05:00
|
|
|
nodes := h.findPagesByKindIn(nodeType, allPages)
|
2016-11-07 14:24:37 -05:00
|
|
|
|
|
|
|
// Assign translations
|
|
|
|
for _, t1 := range nodes {
|
|
|
|
for _, t2 := range nodes {
|
2016-12-23 03:52:05 -05:00
|
|
|
if t1.isNewTranslation(t2) {
|
2016-11-07 14:24:37 -05:00
|
|
|
t1.translations = append(t1.translations, t2)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-11-21 04:11:34 -05:00
|
|
|
|
|
|
|
// Now we can sort the translations.
|
|
|
|
for _, p := range allPages {
|
|
|
|
if len(p.translations) > 0 {
|
|
|
|
pageBy(languagePageSort).Sort(p.translations)
|
|
|
|
}
|
|
|
|
}
|
2016-11-07 14:24:37 -05:00
|
|
|
return nil
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2016-11-11 03:01:47 -05:00
|
|
|
// createMissingPages creates home page, taxonomies etc. that isnt't created as an
|
2016-11-01 17:39:24 -04:00
|
|
|
// effect of having a content file.
|
2016-11-11 03:01:47 -05:00
|
|
|
func (h *HugoSites) createMissingPages() error {
|
|
|
|
var newPages Pages
|
2016-11-07 14:24:37 -05:00
|
|
|
|
|
|
|
for _, s := range h.Sites {
|
2017-02-18 04:02:12 -05:00
|
|
|
if s.isEnabled(KindHome) {
|
|
|
|
// home pages
|
|
|
|
home := s.findPagesByKind(KindHome)
|
|
|
|
if len(home) > 1 {
|
|
|
|
panic("Too many homes")
|
|
|
|
}
|
|
|
|
if len(home) == 0 {
|
|
|
|
n := s.newHomePage()
|
|
|
|
s.Pages = append(s.Pages, n)
|
|
|
|
newPages = append(newPages, n)
|
|
|
|
}
|
2016-11-07 14:24:37 -05:00
|
|
|
}
|
|
|
|
|
2017-04-09 04:33:04 -04:00
|
|
|
// Will create content-less root sections.
|
|
|
|
newSections := s.assembleSections()
|
|
|
|
s.Pages = append(s.Pages, newSections...)
|
|
|
|
newPages = append(newPages, newSections...)
|
|
|
|
|
2016-11-07 14:24:37 -05:00
|
|
|
// taxonomy list and terms pages
|
|
|
|
taxonomies := s.Language.GetStringMapString("taxonomies")
|
|
|
|
if len(taxonomies) > 0 {
|
2016-11-16 11:52:03 -05:00
|
|
|
taxonomyPages := s.findPagesByKind(KindTaxonomy)
|
|
|
|
taxonomyTermsPages := s.findPagesByKind(KindTaxonomyTerm)
|
2016-11-07 14:24:37 -05:00
|
|
|
for _, plural := range taxonomies {
|
2017-03-03 19:00:11 -05:00
|
|
|
if s.isEnabled(KindTaxonomyTerm) {
|
|
|
|
foundTaxonomyTermsPage := false
|
2016-11-07 14:24:37 -05:00
|
|
|
for _, p := range taxonomyTermsPages {
|
|
|
|
if p.sections[0] == plural {
|
|
|
|
foundTaxonomyTermsPage = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
2017-02-18 04:02:12 -05:00
|
|
|
|
2017-03-01 11:07:38 -05:00
|
|
|
if !foundTaxonomyTermsPage {
|
|
|
|
n := s.newTaxonomyTermsPage(plural)
|
|
|
|
s.Pages = append(s.Pages, n)
|
|
|
|
newPages = append(newPages, n)
|
2016-11-07 14:24:37 -05:00
|
|
|
}
|
2016-11-01 17:39:24 -04:00
|
|
|
}
|
2017-03-03 19:00:11 -05:00
|
|
|
|
|
|
|
if s.isEnabled(KindTaxonomy) {
|
|
|
|
for key := range s.Taxonomies[plural] {
|
|
|
|
foundTaxonomyPage := false
|
|
|
|
origKey := key
|
|
|
|
|
|
|
|
if s.Info.preserveTaxonomyNames {
|
|
|
|
key = s.PathSpec.MakePathSanitized(key)
|
|
|
|
}
|
|
|
|
for _, p := range taxonomyPages {
|
|
|
|
if p.sections[0] == plural && p.sections[1] == key {
|
|
|
|
foundTaxonomyPage = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !foundTaxonomyPage {
|
|
|
|
n := s.newTaxonomyPage(plural, origKey)
|
|
|
|
s.Pages = append(s.Pages, n)
|
|
|
|
newPages = append(newPages, n)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-11-01 17:39:24 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-11 03:01:47 -05:00
|
|
|
if len(newPages) > 0 {
|
2016-11-21 04:35:57 -05:00
|
|
|
// This resorting is unfortunate, but it also needs to be sorted
|
|
|
|
// when sections are created.
|
2016-11-07 14:24:37 -05:00
|
|
|
first := h.Sites[0]
|
2016-11-21 04:35:57 -05:00
|
|
|
|
2016-11-11 03:01:47 -05:00
|
|
|
first.AllPages = append(first.AllPages, newPages...)
|
2016-11-21 04:35:57 -05:00
|
|
|
|
|
|
|
first.AllPages.Sort()
|
|
|
|
|
|
|
|
for _, s := range h.Sites {
|
|
|
|
s.Pages.Sort()
|
|
|
|
}
|
|
|
|
|
2016-11-07 14:24:37 -05:00
|
|
|
for i := 1; i < len(h.Sites); i++ {
|
2016-11-11 03:01:47 -05:00
|
|
|
h.Sites[i].AllPages = first.AllPages
|
2016-11-07 14:24:37 -05:00
|
|
|
}
|
|
|
|
}
|
2016-11-21 04:35:57 -05:00
|
|
|
|
2016-11-01 17:39:24 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
func (s *Site) assignSiteByLanguage(p *Page) {
|
|
|
|
|
|
|
|
pageLang := p.Lang()
|
|
|
|
|
|
|
|
if pageLang == "" {
|
|
|
|
panic("Page language missing: " + p.Title)
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, site := range s.owner.Sites {
|
|
|
|
if strings.HasPrefix(site.Language.Lang, pageLang) {
|
|
|
|
p.s = site
|
|
|
|
p.Site = &site.Info
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2016-11-09 03:58:18 -05:00
|
|
|
func (h *HugoSites) setupTranslations() {
|
2016-11-01 17:39:24 -04:00
|
|
|
|
|
|
|
master := h.Sites[0]
|
2016-07-28 03:30:58 -04:00
|
|
|
|
2016-11-09 03:58:18 -05:00
|
|
|
for _, p := range master.rawAllPages {
|
2016-07-28 03:30:58 -04:00
|
|
|
if p.Lang() == "" {
|
|
|
|
panic("Page language missing: " + p.Title)
|
|
|
|
}
|
|
|
|
|
2017-02-18 04:02:12 -05:00
|
|
|
if p.Kind == kindUnknown {
|
|
|
|
p.Kind = p.s.kindFromSections(p.sections)
|
|
|
|
}
|
|
|
|
|
|
|
|
if !p.s.isEnabled(p.Kind) {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
shouldBuild := p.shouldBuild()
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
for i, site := range h.Sites {
|
2017-02-04 22:20:06 -05:00
|
|
|
// The site is assigned by language when read.
|
|
|
|
if site == p.s {
|
2016-07-28 03:30:58 -04:00
|
|
|
site.updateBuildStats(p)
|
|
|
|
if shouldBuild {
|
2016-11-11 03:01:47 -05:00
|
|
|
site.Pages = append(site.Pages, p)
|
2016-07-28 03:30:58 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !shouldBuild {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if i == 0 {
|
2016-11-11 03:01:47 -05:00
|
|
|
site.AllPages = append(site.AllPages, p)
|
2016-07-28 03:30:58 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-08-11 12:26:45 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// Pull over the collections from the master site
|
|
|
|
for i := 1; i < len(h.Sites); i++ {
|
2016-11-11 03:01:47 -05:00
|
|
|
h.Sites[i].AllPages = h.Sites[0].AllPages
|
2016-08-11 12:26:45 -04:00
|
|
|
h.Sites[i].Data = h.Sites[0].Data
|
2016-07-28 03:30:58 -04:00
|
|
|
}
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
if len(h.Sites) > 1 {
|
2016-11-11 03:01:47 -05:00
|
|
|
pages := h.Sites[0].AllPages
|
2017-03-08 03:58:58 -05:00
|
|
|
allTranslations := pagesToTranslationsMap(pages)
|
2016-07-28 03:30:58 -04:00
|
|
|
assignTranslationsToPages(allTranslations, pages)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-05-06 14:15:28 -04:00
|
|
|
func (s *Site) preparePagesForRender(cfg *BuildCfg) {
|
2017-05-05 03:24:37 -04:00
|
|
|
|
2016-08-12 12:17:00 -04:00
|
|
|
pageChan := make(chan *Page)
|
|
|
|
wg := &sync.WaitGroup{}
|
2017-02-21 12:56:56 -05:00
|
|
|
numWorkers := getGoMaxProcs() * 4
|
2016-08-01 17:04:44 -04:00
|
|
|
|
2017-02-21 12:56:56 -05:00
|
|
|
for i := 0; i < numWorkers; i++ {
|
2016-08-01 17:04:44 -04:00
|
|
|
wg.Add(1)
|
|
|
|
go func(pages <-chan *Page, wg *sync.WaitGroup) {
|
|
|
|
defer wg.Done()
|
|
|
|
for p := range pages {
|
2017-05-06 14:15:28 -04:00
|
|
|
if !p.shouldRenderTo(s.rc.Format) {
|
|
|
|
// No need to prepare
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
var shortcodeUpdate bool
|
|
|
|
if p.shortcodeState != nil {
|
|
|
|
shortcodeUpdate = p.shortcodeState.updateDelta()
|
|
|
|
}
|
2016-08-12 18:33:17 -04:00
|
|
|
|
2017-05-06 14:15:28 -04:00
|
|
|
if !shortcodeUpdate && !cfg.whatChanged.other && p.rendered {
|
2016-08-12 18:33:17 -04:00
|
|
|
// No need to process it again.
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// If we got this far it means that this is either a new Page pointer
|
|
|
|
// or a template or similar has changed so wee need to do a rerendering
|
|
|
|
// of the shortcodes etc.
|
|
|
|
|
|
|
|
// Mark it as rendered
|
|
|
|
p.rendered = true
|
|
|
|
|
2017-05-06 14:15:28 -04:00
|
|
|
// If in watch mode or if we have multiple output formats,
|
|
|
|
// we need to keep the original so we can
|
|
|
|
// potentially repeat this process on rebuild.
|
|
|
|
needsACopy := cfg.Watching || len(p.outputFormats) > 1
|
2016-12-01 04:21:49 -05:00
|
|
|
var workContentCopy []byte
|
2017-05-06 14:15:28 -04:00
|
|
|
if needsACopy {
|
2016-12-01 04:21:49 -05:00
|
|
|
workContentCopy = make([]byte, len(p.workContent))
|
|
|
|
copy(workContentCopy, p.workContent)
|
2016-08-12 18:33:17 -04:00
|
|
|
} else {
|
|
|
|
// Just reuse the same slice.
|
2016-12-01 04:21:49 -05:00
|
|
|
workContentCopy = p.workContent
|
2016-08-12 18:33:17 -04:00
|
|
|
}
|
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
if p.Markup == "markdown" {
|
2016-12-01 04:21:49 -05:00
|
|
|
tmpContent, tmpTableOfContents := helpers.ExtractTOC(workContentCopy)
|
2016-08-01 17:04:44 -04:00
|
|
|
p.TableOfContents = helpers.BytesToHTML(tmpTableOfContents)
|
2016-12-01 04:21:49 -05:00
|
|
|
workContentCopy = tmpContent
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
2016-10-25 14:40:32 -04:00
|
|
|
var err error
|
2017-03-08 03:58:58 -05:00
|
|
|
if workContentCopy, err = handleShortcodes(p, workContentCopy); err != nil {
|
2017-01-10 04:55:03 -05:00
|
|
|
s.Log.ERROR.Printf("Failed to handle shortcodes for page %s: %s", p.BaseFileName(), err)
|
2016-10-24 08:40:57 -04:00
|
|
|
}
|
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
if p.Markup != "html" {
|
|
|
|
|
|
|
|
// Now we know enough to create a summary of the page and count some words
|
2016-12-01 04:21:49 -05:00
|
|
|
summaryContent, err := p.setUserDefinedSummaryIfProvided(workContentCopy)
|
2016-08-01 17:04:44 -04:00
|
|
|
|
|
|
|
if err != nil {
|
2017-01-10 04:55:03 -05:00
|
|
|
s.Log.ERROR.Printf("Failed to set user defined summary for page %q: %s", p.Path(), err)
|
2016-08-01 17:04:44 -04:00
|
|
|
} else if summaryContent != nil {
|
2016-12-01 04:21:49 -05:00
|
|
|
workContentCopy = summaryContent.content
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
2016-12-01 04:21:49 -05:00
|
|
|
p.Content = helpers.BytesToHTML(workContentCopy)
|
2016-08-01 17:04:44 -04:00
|
|
|
|
|
|
|
if summaryContent == nil {
|
2017-04-06 11:39:20 -04:00
|
|
|
if err := p.setAutoSummary(); err != nil {
|
|
|
|
s.Log.ERROR.Printf("Failed to set user auto summary for page %q: %s", p.pathOrTitle(), err)
|
|
|
|
}
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
2016-08-12 18:33:17 -04:00
|
|
|
|
|
|
|
} else {
|
2016-12-01 04:21:49 -05:00
|
|
|
p.Content = helpers.BytesToHTML(workContentCopy)
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
//analyze for raw stats
|
|
|
|
p.analyzePage()
|
2016-08-12 18:33:17 -04:00
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
}(pageChan, wg)
|
|
|
|
}
|
|
|
|
|
2016-11-11 03:01:47 -05:00
|
|
|
for _, p := range s.Pages {
|
2016-08-01 17:04:44 -04:00
|
|
|
pageChan <- p
|
|
|
|
}
|
|
|
|
|
|
|
|
close(pageChan)
|
|
|
|
|
|
|
|
wg.Wait()
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
// Pages returns all pages for all sites.
|
2016-08-09 14:06:15 -04:00
|
|
|
func (h *HugoSites) Pages() Pages {
|
2016-08-05 10:11:03 -04:00
|
|
|
return h.Sites[0].AllPages
|
|
|
|
}
|
|
|
|
|
2017-03-08 03:58:58 -05:00
|
|
|
func handleShortcodes(p *Page, rawContentCopy []byte) ([]byte, error) {
|
2017-05-06 14:15:28 -04:00
|
|
|
if p.shortcodeState != nil && len(p.shortcodeState.contentShortcodes) > 0 {
|
|
|
|
p.s.Log.DEBUG.Printf("Replace %d shortcodes in %q", len(p.shortcodeState.contentShortcodes), p.BaseFileName())
|
|
|
|
err := p.shortcodeState.executeShortcodesForDelta(p)
|
2016-08-01 17:04:44 -04:00
|
|
|
|
|
|
|
if err != nil {
|
2016-10-25 14:40:32 -04:00
|
|
|
return rawContentCopy, err
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
2017-05-06 14:15:28 -04:00
|
|
|
rawContentCopy, err = replaceShortcodeTokens(rawContentCopy, shortcodePlaceholderPrefix, p.shortcodeState.renderedShortcodes)
|
2016-08-01 17:04:44 -04:00
|
|
|
|
|
|
|
if err != nil {
|
2017-01-10 04:55:03 -05:00
|
|
|
p.s.Log.FATAL.Printf("Failed to replace shortcode tokens in %s:\n%s", p.BaseFileName(), err.Error())
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-25 14:40:32 -04:00
|
|
|
return rawContentCopy, nil
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
func (s *Site) updateBuildStats(page *Page) {
|
|
|
|
if page.IsDraft() {
|
|
|
|
s.draftCount++
|
|
|
|
}
|
|
|
|
|
|
|
|
if page.IsFuture() {
|
|
|
|
s.futureCount++
|
|
|
|
}
|
|
|
|
|
|
|
|
if page.IsExpired() {
|
|
|
|
s.expiredCount++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
func (h *HugoSites) findPagesByKindNotIn(kind string, inPages Pages) Pages {
|
|
|
|
return h.Sites[0].findPagesByKindNotIn(kind, inPages)
|
2016-11-07 14:24:37 -05:00
|
|
|
}
|
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
func (h *HugoSites) findPagesByKindIn(kind string, inPages Pages) Pages {
|
|
|
|
return h.Sites[0].findPagesByKindIn(kind, inPages)
|
2016-11-07 14:24:37 -05:00
|
|
|
}
|
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
func (h *HugoSites) findAllPagesByKind(kind string) Pages {
|
|
|
|
return h.findPagesByKindIn(kind, h.Sites[0].AllPages)
|
2016-11-07 14:24:37 -05:00
|
|
|
}
|
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
func (h *HugoSites) findAllPagesByKindNotIn(kind string) Pages {
|
|
|
|
return h.findPagesByKindNotIn(kind, h.Sites[0].AllPages)
|
2016-11-07 14:24:37 -05:00
|
|
|
}
|