2016-07-27 04:03:45 -04:00
|
|
|
// Copyright 2016-present The Hugo Authors. All rights reserved.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package hugolib
|
|
|
|
|
2016-07-27 04:49:42 -04:00
|
|
|
import (
|
2016-08-05 07:10:58 -04:00
|
|
|
"fmt"
|
2016-11-07 14:24:37 -05:00
|
|
|
"html/template"
|
2016-08-08 03:28:02 -04:00
|
|
|
"os"
|
2016-11-02 16:34:19 -04:00
|
|
|
"path"
|
2016-07-28 03:30:58 -04:00
|
|
|
"strings"
|
2016-08-01 17:04:44 -04:00
|
|
|
"sync"
|
2016-07-27 04:49:42 -04:00
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
"github.com/spf13/hugo/helpers"
|
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
"github.com/spf13/viper"
|
2016-07-27 04:49:42 -04:00
|
|
|
|
2016-11-01 17:39:24 -04:00
|
|
|
"github.com/bep/inflect"
|
2016-07-28 03:30:58 -04:00
|
|
|
"github.com/spf13/hugo/source"
|
|
|
|
"github.com/spf13/hugo/tpl"
|
2016-07-27 04:49:42 -04:00
|
|
|
jww "github.com/spf13/jwalterweatherman"
|
|
|
|
)
|
|
|
|
|
2016-07-27 04:03:45 -04:00
|
|
|
// HugoSites represents the sites to build. Each site represents a language.
|
2016-07-28 03:30:58 -04:00
|
|
|
type HugoSites struct {
|
|
|
|
Sites []*Site
|
|
|
|
|
2016-08-08 04:12:39 -04:00
|
|
|
tmpl tpl.Template
|
|
|
|
runMode runmode
|
|
|
|
|
|
|
|
multilingual *Multilingual
|
2016-07-28 03:30:58 -04:00
|
|
|
}
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
// NewHugoSites creates a new collection of sites given the input sites, building
|
|
|
|
// a language configuration based on those.
|
2016-08-07 08:03:03 -04:00
|
|
|
func newHugoSites(sites ...*Site) (*HugoSites, error) {
|
2016-08-06 08:51:50 -04:00
|
|
|
langConfig, err := newMultiLingualFromSites(sites...)
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2016-07-28 03:30:58 -04:00
|
|
|
}
|
|
|
|
|
2016-11-08 17:49:42 -05:00
|
|
|
h := &HugoSites{multilingual: langConfig, Sites: sites}
|
2016-08-08 04:12:39 -04:00
|
|
|
|
|
|
|
for _, s := range sites {
|
|
|
|
s.owner = h
|
|
|
|
}
|
|
|
|
return h, nil
|
2016-07-28 03:30:58 -04:00
|
|
|
}
|
2016-07-27 04:03:45 -04:00
|
|
|
|
2016-08-05 07:10:58 -04:00
|
|
|
// NewHugoSitesFromConfiguration creates HugoSites from the global Viper config.
|
|
|
|
func NewHugoSitesFromConfiguration() (*HugoSites, error) {
|
2016-08-06 08:51:50 -04:00
|
|
|
sites, err := createSitesFromConfig()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2016-08-07 08:03:03 -04:00
|
|
|
return newHugoSites(sites...)
|
2016-08-06 08:51:50 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func createSitesFromConfig() ([]*Site, error) {
|
2016-08-05 10:11:03 -04:00
|
|
|
var sites []*Site
|
2016-10-24 14:56:00 -04:00
|
|
|
multilingual := viper.GetStringMap("languages")
|
2016-08-05 07:10:58 -04:00
|
|
|
if len(multilingual) == 0 {
|
2016-08-07 18:12:06 -04:00
|
|
|
sites = append(sites, newSite(helpers.NewDefaultLanguage()))
|
2016-08-05 07:10:58 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
if len(multilingual) > 0 {
|
|
|
|
var err error
|
|
|
|
|
|
|
|
languages, err := toSortedLanguages(multilingual)
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("Failed to parse multilingual config: %s", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, lang := range languages {
|
2016-08-05 10:11:03 -04:00
|
|
|
sites = append(sites, newSite(lang))
|
2016-08-05 07:10:58 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2016-08-06 08:51:50 -04:00
|
|
|
return sites, nil
|
2016-08-05 07:10:58 -04:00
|
|
|
}
|
|
|
|
|
2016-11-16 07:00:45 -05:00
|
|
|
// Reset resets the sites and template caches, making it ready for a full rebuild.
|
2016-08-06 08:51:50 -04:00
|
|
|
func (h *HugoSites) reset() {
|
2016-07-28 03:30:58 -04:00
|
|
|
for i, s := range h.Sites {
|
2016-08-07 18:12:06 -04:00
|
|
|
h.Sites[i] = s.reset()
|
2016-07-27 04:03:45 -04:00
|
|
|
}
|
2016-11-16 07:00:45 -05:00
|
|
|
|
|
|
|
tpl.ResetCaches()
|
2016-07-27 04:03:45 -04:00
|
|
|
}
|
2016-07-27 04:49:42 -04:00
|
|
|
|
2016-11-10 14:55:52 -05:00
|
|
|
func (h *HugoSites) createSitesFromConfig() error {
|
2016-08-09 14:06:15 -04:00
|
|
|
|
2016-08-06 08:51:50 -04:00
|
|
|
sites, err := createSitesFromConfig()
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
langConfig, err := newMultiLingualFromSites(sites...)
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
h.Sites = sites
|
|
|
|
|
2016-08-08 04:12:39 -04:00
|
|
|
for _, s := range sites {
|
|
|
|
s.owner = h
|
2016-08-06 08:51:50 -04:00
|
|
|
}
|
|
|
|
|
2016-08-08 04:12:39 -04:00
|
|
|
h.multilingual = langConfig
|
|
|
|
|
2016-08-06 08:51:50 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (h *HugoSites) toSiteInfos() []*SiteInfo {
|
2016-08-04 16:12:19 -04:00
|
|
|
infos := make([]*SiteInfo, len(h.Sites))
|
|
|
|
for i, s := range h.Sites {
|
|
|
|
infos[i] = &s.Info
|
|
|
|
}
|
|
|
|
return infos
|
|
|
|
}
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
// BuildCfg holds build options used to, as an example, skip the render step.
|
2016-07-28 03:30:58 -04:00
|
|
|
type BuildCfg struct {
|
|
|
|
// Whether we are in watch (server) mode
|
|
|
|
Watching bool
|
|
|
|
// Print build stats at the end of a build
|
|
|
|
PrintStats bool
|
2016-08-06 08:51:50 -04:00
|
|
|
// Reset site state before build. Use to force full rebuilds.
|
|
|
|
ResetState bool
|
|
|
|
// Re-creates the sites from configuration before a build.
|
|
|
|
// This is needed if new languages are added.
|
|
|
|
CreateSitesFromConfig bool
|
2016-07-28 03:30:58 -04:00
|
|
|
// Skip rendering. Useful for testing.
|
2016-08-05 10:11:03 -04:00
|
|
|
SkipRender bool
|
2016-07-28 03:30:58 -04:00
|
|
|
// Use this to add templates to use for rendering.
|
|
|
|
// Useful for testing.
|
|
|
|
withTemplate func(templ tpl.Template) error
|
2016-11-10 14:55:52 -05:00
|
|
|
// Use this to indicate what changed (for rebuilds).
|
|
|
|
whatChanged *whatChanged
|
2016-07-27 04:49:42 -04:00
|
|
|
}
|
2016-07-28 03:30:58 -04:00
|
|
|
|
2016-08-08 03:28:02 -04:00
|
|
|
// Analyze prints a build report to Stdout.
|
|
|
|
// Useful for debugging.
|
|
|
|
func (h *HugoSites) Analyze() error {
|
|
|
|
if err := h.Build(BuildCfg{SkipRender: true}); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
s := h.Sites[0]
|
|
|
|
return s.ShowPlan(os.Stdout)
|
|
|
|
}
|
|
|
|
|
2016-11-10 14:55:52 -05:00
|
|
|
func (h *HugoSites) renderCrossSitesArtifacts() error {
|
2016-08-04 16:12:19 -04:00
|
|
|
|
2016-08-08 04:12:39 -04:00
|
|
|
if !h.multilingual.enabled() {
|
2016-08-04 16:12:19 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-10-24 14:56:00 -04:00
|
|
|
if viper.GetBool("disableSitemap") {
|
2016-09-23 04:30:55 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-08-04 16:12:19 -04:00
|
|
|
// TODO(bep) DRY
|
2016-10-24 14:56:00 -04:00
|
|
|
sitemapDefault := parseSitemap(viper.GetStringMap("sitemap"))
|
2016-08-04 16:12:19 -04:00
|
|
|
|
|
|
|
s := h.Sites[0]
|
|
|
|
|
|
|
|
smLayouts := []string{"sitemapindex.xml", "_default/sitemapindex.xml", "_internal/_default/sitemapindex.xml"}
|
|
|
|
|
|
|
|
if err := s.renderAndWriteXML("sitemapindex", sitemapDefault.Filename,
|
2016-08-05 07:10:58 -04:00
|
|
|
h.toSiteInfos(), s.appendThemeTemplates(smLayouts)...); err != nil {
|
2016-08-04 16:12:19 -04:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-11-07 14:24:37 -05:00
|
|
|
func (h *HugoSites) assignMissingTranslations() error {
|
|
|
|
// This looks heavy, but it should be a small number of nodes by now.
|
2016-11-13 05:43:23 -05:00
|
|
|
allPages := h.findAllPagesByNodeTypeNotIn(KindPage)
|
|
|
|
for _, nodeType := range []Kind{KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm} {
|
2016-11-11 03:01:47 -05:00
|
|
|
nodes := h.findPagesByNodeTypeIn(nodeType, allPages)
|
2016-11-07 14:24:37 -05:00
|
|
|
|
|
|
|
// Assign translations
|
|
|
|
for _, t1 := range nodes {
|
|
|
|
for _, t2 := range nodes {
|
|
|
|
if t2.isTranslation(t1) {
|
|
|
|
t1.translations = append(t1.translations, t2)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2016-11-11 03:01:47 -05:00
|
|
|
// createMissingPages creates home page, taxonomies etc. that isnt't created as an
|
2016-11-01 17:39:24 -04:00
|
|
|
// effect of having a content file.
|
2016-11-11 03:01:47 -05:00
|
|
|
func (h *HugoSites) createMissingPages() error {
|
2016-11-01 17:39:24 -04:00
|
|
|
// TODO(bep) np check node title etc.
|
|
|
|
|
2016-11-11 03:01:47 -05:00
|
|
|
var newPages Pages
|
2016-11-07 14:24:37 -05:00
|
|
|
|
|
|
|
for _, s := range h.Sites {
|
|
|
|
|
|
|
|
// home pages
|
2016-11-13 05:43:23 -05:00
|
|
|
home := s.findPagesByNodeType(KindHome)
|
2016-11-07 14:24:37 -05:00
|
|
|
if len(home) > 1 {
|
|
|
|
panic("Too many homes")
|
|
|
|
}
|
|
|
|
if len(home) == 0 {
|
|
|
|
n := s.newHomePage()
|
2016-11-11 03:01:47 -05:00
|
|
|
s.Pages = append(s.Pages, n)
|
|
|
|
newPages = append(newPages, n)
|
2016-11-07 14:24:37 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// taxonomy list and terms pages
|
|
|
|
taxonomies := s.Language.GetStringMapString("taxonomies")
|
|
|
|
if len(taxonomies) > 0 {
|
2016-11-13 05:43:23 -05:00
|
|
|
taxonomyPages := s.findPagesByNodeType(KindTaxonomy)
|
|
|
|
taxonomyTermsPages := s.findPagesByNodeType(KindTaxonomyTerm)
|
2016-11-07 14:24:37 -05:00
|
|
|
for _, plural := range taxonomies {
|
|
|
|
tax := s.Taxonomies[plural]
|
|
|
|
foundTaxonomyPage := false
|
|
|
|
foundTaxonomyTermsPage := false
|
|
|
|
for key, _ := range tax {
|
|
|
|
for _, p := range taxonomyPages {
|
|
|
|
if p.sections[0] == plural && p.sections[1] == key {
|
|
|
|
foundTaxonomyPage = true
|
|
|
|
break
|
|
|
|
}
|
2016-11-01 17:39:24 -04:00
|
|
|
}
|
2016-11-07 14:24:37 -05:00
|
|
|
for _, p := range taxonomyTermsPages {
|
|
|
|
if p.sections[0] == plural {
|
|
|
|
foundTaxonomyTermsPage = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !foundTaxonomyPage {
|
|
|
|
n := s.newTaxonomyPage(plural, key)
|
2016-11-11 03:01:47 -05:00
|
|
|
s.Pages = append(s.Pages, n)
|
|
|
|
newPages = append(newPages, n)
|
2016-11-01 17:39:24 -04:00
|
|
|
}
|
|
|
|
|
2016-11-07 14:24:37 -05:00
|
|
|
if !foundTaxonomyTermsPage {
|
|
|
|
foundTaxonomyTermsPage = true
|
|
|
|
n := s.newTaxonomyTermsPage(plural)
|
2016-11-11 03:01:47 -05:00
|
|
|
s.Pages = append(s.Pages, n)
|
|
|
|
newPages = append(newPages, n)
|
2016-11-07 14:24:37 -05:00
|
|
|
}
|
2016-11-01 17:39:24 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-13 05:43:23 -05:00
|
|
|
sectionPages := s.findPagesByNodeType(KindSection)
|
2016-11-07 14:24:37 -05:00
|
|
|
if len(sectionPages) < len(s.Sections) {
|
|
|
|
for name, section := range s.Sections {
|
2016-11-09 05:09:16 -05:00
|
|
|
// A section may be created for the root content folder if a
|
|
|
|
// content file is placed there.
|
|
|
|
// We cannot create a section node for that, because
|
|
|
|
// that would overwrite the home page.
|
|
|
|
if name == "" {
|
|
|
|
continue
|
|
|
|
}
|
2016-11-07 14:24:37 -05:00
|
|
|
foundSection := false
|
|
|
|
for _, sectionPage := range sectionPages {
|
|
|
|
if sectionPage.sections[0] == name {
|
|
|
|
foundSection = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !foundSection {
|
|
|
|
n := s.newSectionPage(name, section)
|
2016-11-11 03:01:47 -05:00
|
|
|
s.Pages = append(s.Pages, n)
|
|
|
|
newPages = append(newPages, n)
|
2016-11-01 17:39:24 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-11 03:01:47 -05:00
|
|
|
if len(newPages) > 0 {
|
2016-11-07 14:24:37 -05:00
|
|
|
first := h.Sites[0]
|
2016-11-11 03:01:47 -05:00
|
|
|
first.AllPages = append(first.AllPages, newPages...)
|
2016-11-07 14:24:37 -05:00
|
|
|
for i := 1; i < len(h.Sites); i++ {
|
2016-11-11 03:01:47 -05:00
|
|
|
h.Sites[i].AllPages = first.AllPages
|
2016-11-07 14:24:37 -05:00
|
|
|
}
|
|
|
|
}
|
2016-11-01 17:39:24 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-11-10 14:55:52 -05:00
|
|
|
// TODO(bep) np move
|
2016-11-01 17:39:24 -04:00
|
|
|
// Move the new* methods after cleanup in site.go
|
2016-11-13 05:43:23 -05:00
|
|
|
func (s *Site) newNodePage(typ Kind) *Page {
|
2016-11-08 17:34:52 -05:00
|
|
|
return &Page{
|
2016-11-13 05:43:23 -05:00
|
|
|
Kind: typ,
|
2016-11-08 17:34:52 -05:00
|
|
|
Node: Node{
|
|
|
|
Data: make(map[string]interface{}),
|
|
|
|
Site: &s.Info,
|
|
|
|
language: s.Language,
|
|
|
|
}, site: s}
|
2016-11-01 17:39:24 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Site) newHomePage() *Page {
|
2016-11-13 05:43:23 -05:00
|
|
|
p := s.newNodePage(KindHome)
|
2016-11-01 17:39:24 -04:00
|
|
|
p.Title = s.Info.Title
|
2016-11-11 03:19:16 -05:00
|
|
|
pages := Pages{}
|
|
|
|
p.Data["Pages"] = pages
|
|
|
|
p.Pages = pages
|
2016-11-07 14:24:37 -05:00
|
|
|
s.setPageURLs(p, "/")
|
2016-11-01 17:39:24 -04:00
|
|
|
// TODO(bep) np check Data pages
|
|
|
|
// TODO(bep) np check setURLs
|
|
|
|
return p
|
|
|
|
}
|
|
|
|
|
2016-11-07 14:24:37 -05:00
|
|
|
func (s *Site) setPageURLs(p *Page, in string) {
|
|
|
|
p.URLPath.URL = s.Info.pathSpec.URLizeAndPrep(in)
|
|
|
|
p.URLPath.Permalink = s.Info.permalink(p.URLPath.URL)
|
|
|
|
p.RSSLink = template.HTML(s.Info.permalink(in + ".xml"))
|
|
|
|
}
|
|
|
|
|
2016-11-01 17:39:24 -04:00
|
|
|
func (s *Site) newTaxonomyPage(plural, key string) *Page {
|
|
|
|
|
2016-11-13 05:43:23 -05:00
|
|
|
p := s.newNodePage(KindTaxonomy)
|
2016-11-01 17:39:24 -04:00
|
|
|
|
|
|
|
p.sections = []string{plural, key}
|
|
|
|
|
|
|
|
if s.Info.preserveTaxonomyNames {
|
|
|
|
key = s.Info.pathSpec.MakePathSanitized(key)
|
|
|
|
}
|
|
|
|
|
|
|
|
if s.Info.preserveTaxonomyNames {
|
|
|
|
// keep as is in the title
|
|
|
|
p.Title = key
|
|
|
|
} else {
|
|
|
|
p.Title = strings.Replace(strings.Title(key), "-", " ", -1)
|
|
|
|
}
|
|
|
|
|
2016-11-07 14:24:37 -05:00
|
|
|
s.setPageURLs(p, path.Join(plural, key))
|
2016-11-01 17:39:24 -04:00
|
|
|
|
|
|
|
return p
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Site) newSectionPage(name string, section WeightedPages) *Page {
|
|
|
|
|
2016-11-13 05:43:23 -05:00
|
|
|
p := s.newNodePage(KindSection)
|
2016-11-01 17:39:24 -04:00
|
|
|
p.sections = []string{name}
|
|
|
|
|
|
|
|
sectionName := name
|
|
|
|
if !s.Info.preserveTaxonomyNames && len(section) > 0 {
|
|
|
|
sectionName = section[0].Page.Section()
|
|
|
|
}
|
|
|
|
|
|
|
|
sectionName = helpers.FirstUpper(sectionName)
|
|
|
|
if viper.GetBool("pluralizeListTitles") {
|
|
|
|
p.Title = inflect.Pluralize(sectionName)
|
|
|
|
} else {
|
|
|
|
p.Title = sectionName
|
|
|
|
}
|
2016-11-07 14:24:37 -05:00
|
|
|
s.setPageURLs(p, name)
|
2016-11-01 17:39:24 -04:00
|
|
|
return p
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Site) newTaxonomyTermsPage(plural string) *Page {
|
2016-11-13 05:43:23 -05:00
|
|
|
p := s.newNodePage(KindTaxonomyTerm)
|
2016-11-01 17:39:24 -04:00
|
|
|
p.sections = []string{plural}
|
|
|
|
p.Title = strings.Title(plural)
|
2016-11-09 13:59:28 -05:00
|
|
|
s.setPageURLs(p, plural)
|
2016-11-01 17:39:24 -04:00
|
|
|
return p
|
|
|
|
}
|
|
|
|
|
2016-11-09 03:58:18 -05:00
|
|
|
func (h *HugoSites) setupTranslations() {
|
2016-11-01 17:39:24 -04:00
|
|
|
|
|
|
|
master := h.Sites[0]
|
2016-07-28 03:30:58 -04:00
|
|
|
|
2016-11-09 03:58:18 -05:00
|
|
|
for _, p := range master.rawAllPages {
|
2016-07-28 03:30:58 -04:00
|
|
|
if p.Lang() == "" {
|
|
|
|
panic("Page language missing: " + p.Title)
|
|
|
|
}
|
|
|
|
|
|
|
|
shouldBuild := p.shouldBuild()
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
for i, site := range h.Sites {
|
2016-07-28 03:30:58 -04:00
|
|
|
if strings.HasPrefix(site.Language.Lang, p.Lang()) {
|
|
|
|
site.updateBuildStats(p)
|
|
|
|
if shouldBuild {
|
2016-11-11 03:01:47 -05:00
|
|
|
site.Pages = append(site.Pages, p)
|
2016-07-28 03:30:58 -04:00
|
|
|
p.Site = &site.Info
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !shouldBuild {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if i == 0 {
|
2016-11-11 03:01:47 -05:00
|
|
|
site.AllPages = append(site.AllPages, p)
|
2016-07-28 03:30:58 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-08-11 12:26:45 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// Pull over the collections from the master site
|
|
|
|
for i := 1; i < len(h.Sites); i++ {
|
2016-11-11 03:01:47 -05:00
|
|
|
h.Sites[i].AllPages = h.Sites[0].AllPages
|
2016-08-11 12:26:45 -04:00
|
|
|
h.Sites[i].Data = h.Sites[0].Data
|
2016-07-28 03:30:58 -04:00
|
|
|
}
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
if len(h.Sites) > 1 {
|
2016-11-11 03:01:47 -05:00
|
|
|
pages := h.Sites[0].AllPages
|
2016-08-08 04:12:39 -04:00
|
|
|
allTranslations := pagesToTranslationsMap(h.multilingual, pages)
|
2016-07-28 03:30:58 -04:00
|
|
|
assignTranslationsToPages(allTranslations, pages)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-08-10 16:46:02 -04:00
|
|
|
// preRender performs build tasks that need to be done as late as possible.
|
2016-08-01 17:04:44 -04:00
|
|
|
// Shortcode handling is the main task in here.
|
2016-08-10 16:46:02 -04:00
|
|
|
// TODO(bep) We need to look at the whole handler-chain construct with he below in mind.
|
2016-11-03 19:34:25 -04:00
|
|
|
// TODO(bep) np clean
|
2016-08-12 18:33:17 -04:00
|
|
|
func (h *HugoSites) preRender(cfg BuildCfg, changed whatChanged) error {
|
2016-08-09 14:06:15 -04:00
|
|
|
|
2016-08-12 12:17:00 -04:00
|
|
|
for _, s := range h.Sites {
|
|
|
|
if err := s.setCurrentLanguageConfig(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2016-08-12 18:33:17 -04:00
|
|
|
s.preparePagesForRender(cfg, changed)
|
2016-08-12 12:17:00 -04:00
|
|
|
}
|
2016-08-01 17:04:44 -04:00
|
|
|
|
2016-08-12 12:17:00 -04:00
|
|
|
return nil
|
|
|
|
}
|
2016-08-01 17:04:44 -04:00
|
|
|
|
2016-08-12 18:33:17 -04:00
|
|
|
func (s *Site) preparePagesForRender(cfg BuildCfg, changed whatChanged) {
|
2016-08-12 12:17:00 -04:00
|
|
|
pageChan := make(chan *Page)
|
|
|
|
wg := &sync.WaitGroup{}
|
2016-08-01 17:04:44 -04:00
|
|
|
|
|
|
|
for i := 0; i < getGoMaxProcs()*4; i++ {
|
|
|
|
wg.Add(1)
|
|
|
|
go func(pages <-chan *Page, wg *sync.WaitGroup) {
|
|
|
|
defer wg.Done()
|
|
|
|
for p := range pages {
|
2016-08-12 18:33:17 -04:00
|
|
|
|
|
|
|
if !changed.other && p.rendered {
|
|
|
|
// No need to process it again.
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// If we got this far it means that this is either a new Page pointer
|
|
|
|
// or a template or similar has changed so wee need to do a rerendering
|
|
|
|
// of the shortcodes etc.
|
|
|
|
|
|
|
|
// Mark it as rendered
|
|
|
|
p.rendered = true
|
|
|
|
|
|
|
|
// If in watch mode, we need to keep the original so we can
|
|
|
|
// repeat this process on rebuild.
|
2016-10-25 14:40:32 -04:00
|
|
|
var rawContentCopy []byte
|
2016-08-12 18:33:17 -04:00
|
|
|
if cfg.Watching {
|
2016-10-25 14:40:32 -04:00
|
|
|
rawContentCopy = make([]byte, len(p.rawContent))
|
|
|
|
copy(rawContentCopy, p.rawContent)
|
2016-08-12 18:33:17 -04:00
|
|
|
} else {
|
|
|
|
// Just reuse the same slice.
|
2016-10-25 14:40:32 -04:00
|
|
|
rawContentCopy = p.rawContent
|
2016-08-12 18:33:17 -04:00
|
|
|
}
|
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
if p.Markup == "markdown" {
|
2016-10-25 14:40:32 -04:00
|
|
|
tmpContent, tmpTableOfContents := helpers.ExtractTOC(rawContentCopy)
|
2016-08-01 17:04:44 -04:00
|
|
|
p.TableOfContents = helpers.BytesToHTML(tmpTableOfContents)
|
2016-10-25 14:40:32 -04:00
|
|
|
rawContentCopy = tmpContent
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
2016-10-25 14:40:32 -04:00
|
|
|
var err error
|
|
|
|
if rawContentCopy, err = handleShortcodes(p, s.owner.tmpl, rawContentCopy); err != nil {
|
2016-10-24 08:40:57 -04:00
|
|
|
jww.ERROR.Printf("Failed to handle shortcodes for page %s: %s", p.BaseFileName(), err)
|
|
|
|
}
|
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
if p.Markup != "html" {
|
|
|
|
|
|
|
|
// Now we know enough to create a summary of the page and count some words
|
2016-10-25 14:40:32 -04:00
|
|
|
summaryContent, err := p.setUserDefinedSummaryIfProvided(rawContentCopy)
|
2016-08-01 17:04:44 -04:00
|
|
|
|
|
|
|
if err != nil {
|
2016-10-18 02:43:44 -04:00
|
|
|
jww.ERROR.Printf("Failed to set user defined summary for page %q: %s", p.Path(), err)
|
2016-08-01 17:04:44 -04:00
|
|
|
} else if summaryContent != nil {
|
2016-10-25 14:40:32 -04:00
|
|
|
rawContentCopy = summaryContent.content
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
2016-10-25 14:40:32 -04:00
|
|
|
p.Content = helpers.BytesToHTML(rawContentCopy)
|
2016-08-01 17:04:44 -04:00
|
|
|
|
|
|
|
if summaryContent == nil {
|
|
|
|
p.setAutoSummary()
|
|
|
|
}
|
2016-08-12 18:33:17 -04:00
|
|
|
|
|
|
|
} else {
|
2016-10-25 14:40:32 -04:00
|
|
|
p.Content = helpers.BytesToHTML(rawContentCopy)
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
2016-08-12 18:33:17 -04:00
|
|
|
// no need for this anymore
|
2016-10-25 14:40:32 -04:00
|
|
|
rawContentCopy = nil
|
2016-08-12 18:33:17 -04:00
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
//analyze for raw stats
|
|
|
|
p.analyzePage()
|
2016-08-12 18:33:17 -04:00
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
}(pageChan, wg)
|
|
|
|
}
|
|
|
|
|
2016-11-11 03:01:47 -05:00
|
|
|
for _, p := range s.Pages {
|
2016-08-01 17:04:44 -04:00
|
|
|
pageChan <- p
|
|
|
|
}
|
|
|
|
|
|
|
|
close(pageChan)
|
|
|
|
|
|
|
|
wg.Wait()
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
// Pages returns all pages for all sites.
|
2016-08-09 14:06:15 -04:00
|
|
|
func (h *HugoSites) Pages() Pages {
|
2016-08-05 10:11:03 -04:00
|
|
|
return h.Sites[0].AllPages
|
|
|
|
}
|
|
|
|
|
2016-10-25 14:40:32 -04:00
|
|
|
func handleShortcodes(p *Page, t tpl.Template, rawContentCopy []byte) ([]byte, error) {
|
2016-08-01 17:04:44 -04:00
|
|
|
if len(p.contentShortCodes) > 0 {
|
|
|
|
jww.DEBUG.Printf("Replace %d shortcodes in %q", len(p.contentShortCodes), p.BaseFileName())
|
|
|
|
shortcodes, err := executeShortcodeFuncMap(p.contentShortCodes)
|
|
|
|
|
|
|
|
if err != nil {
|
2016-10-25 14:40:32 -04:00
|
|
|
return rawContentCopy, err
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
2016-10-25 14:40:32 -04:00
|
|
|
rawContentCopy, err = replaceShortcodeTokens(rawContentCopy, shortcodePlaceholderPrefix, shortcodes)
|
2016-08-01 17:04:44 -04:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
jww.FATAL.Printf("Failed to replace short code tokens in %s:\n%s", p.BaseFileName(), err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-25 14:40:32 -04:00
|
|
|
return rawContentCopy, nil
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
func (s *Site) updateBuildStats(page *Page) {
|
|
|
|
if page.IsDraft() {
|
|
|
|
s.draftCount++
|
|
|
|
}
|
|
|
|
|
|
|
|
if page.IsFuture() {
|
|
|
|
s.futureCount++
|
|
|
|
}
|
|
|
|
|
|
|
|
if page.IsExpired() {
|
|
|
|
s.expiredCount++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-03 19:34:25 -04:00
|
|
|
// TODO(bep) np remove
|
2016-11-13 05:43:23 -05:00
|
|
|
func (h *HugoSites) findAllPagesByNodeType(n Kind) Pages {
|
2016-11-07 14:24:37 -05:00
|
|
|
return h.Sites[0].findAllPagesByNodeType(n)
|
|
|
|
}
|
|
|
|
|
2016-11-13 05:43:23 -05:00
|
|
|
func (h *HugoSites) findPagesByNodeTypeNotIn(n Kind, inPages Pages) Pages {
|
2016-11-07 14:24:37 -05:00
|
|
|
return h.Sites[0].findPagesByNodeTypeNotIn(n, inPages)
|
|
|
|
}
|
|
|
|
|
2016-11-13 05:43:23 -05:00
|
|
|
func (h *HugoSites) findPagesByNodeTypeIn(n Kind, inPages Pages) Pages {
|
2016-11-07 14:24:37 -05:00
|
|
|
return h.Sites[0].findPagesByNodeTypeIn(n, inPages)
|
|
|
|
}
|
|
|
|
|
2016-11-13 05:43:23 -05:00
|
|
|
func (h *HugoSites) findAllPagesByNodeTypeNotIn(n Kind) Pages {
|
2016-11-11 03:01:47 -05:00
|
|
|
return h.findPagesByNodeTypeNotIn(n, h.Sites[0].AllPages)
|
2016-11-07 14:24:37 -05:00
|
|
|
}
|
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
// Convenience func used in tests to build a single site/language excluding render phase.
|
|
|
|
func buildSiteSkipRender(s *Site, additionalTemplates ...string) error {
|
|
|
|
return doBuildSite(s, false, additionalTemplates...)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Convenience func used in tests to build a single site/language including render phase.
|
|
|
|
func buildAndRenderSite(s *Site, additionalTemplates ...string) error {
|
|
|
|
return doBuildSite(s, true, additionalTemplates...)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Convenience func used in tests to build a single site/language.
|
|
|
|
func doBuildSite(s *Site, render bool, additionalTemplates ...string) error {
|
2016-11-03 19:34:25 -04:00
|
|
|
if s.PageCollections == nil {
|
|
|
|
s.PageCollections = newPageCollections()
|
|
|
|
}
|
2016-08-07 08:03:03 -04:00
|
|
|
sites, err := newHugoSites(s)
|
2016-07-28 03:30:58 -04:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
addTemplates := func(templ tpl.Template) error {
|
|
|
|
for i := 0; i < len(additionalTemplates); i += 2 {
|
|
|
|
err := templ.AddTemplate(additionalTemplates[i], additionalTemplates[i+1])
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
config := BuildCfg{SkipRender: !render, withTemplate: addTemplates}
|
2016-07-28 03:30:58 -04:00
|
|
|
return sites.Build(config)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Convenience func used in tests.
|
2016-08-07 16:01:55 -04:00
|
|
|
func newHugoSitesFromSourceAndLanguages(input []source.ByteSource, languages helpers.Languages) (*HugoSites, error) {
|
2016-07-28 03:30:58 -04:00
|
|
|
if len(languages) == 0 {
|
|
|
|
panic("Must provide at least one language")
|
|
|
|
}
|
|
|
|
first := &Site{
|
|
|
|
Source: &source.InMemorySource{ByteSource: input},
|
|
|
|
Language: languages[0],
|
|
|
|
}
|
|
|
|
if len(languages) == 1 {
|
2016-08-07 08:03:03 -04:00
|
|
|
return newHugoSites(first)
|
2016-07-28 03:30:58 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
sites := make([]*Site, len(languages))
|
|
|
|
sites[0] = first
|
|
|
|
for i := 1; i < len(languages); i++ {
|
|
|
|
sites[i] = &Site{Language: languages[i]}
|
|
|
|
}
|
|
|
|
|
2016-08-07 08:03:03 -04:00
|
|
|
return newHugoSites(sites...)
|
2016-07-28 03:30:58 -04:00
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// Convenience func used in tests.
|
2016-08-07 08:03:03 -04:00
|
|
|
func newHugoSitesDefaultLanguage() (*HugoSites, error) {
|
2016-08-07 16:01:55 -04:00
|
|
|
return newHugoSitesFromSourceAndLanguages(nil, helpers.Languages{helpers.NewDefaultLanguage()})
|
2016-08-07 08:03:03 -04:00
|
|
|
}
|