Introduce a tree map for all content

This commit introduces a new data structure to store pages and their resources.

This data structure is backed by radix trees.

This simplies tree operations, makes all pages a bundle,  and paves the way for #6310.

It also solves a set of annoying issues (see list below).

Not a motivation behind this, but this commit also makes Hugo in general a little bit faster and more memory effective (see benchmarks). Especially for partial rebuilds on content edits, but also when taxonomies is in use.

```
name                                   old time/op    new time/op    delta
SiteNew/Bundle_with_image/Edit-16        1.32ms ± 8%    1.00ms ± 9%  -24.42%  (p=0.029 n=4+4)
SiteNew/Bundle_with_JSON_file/Edit-16    1.28ms ± 0%    0.94ms ± 0%  -26.26%  (p=0.029 n=4+4)
SiteNew/Tags_and_categories/Edit-16      33.9ms ± 2%    21.8ms ± 1%  -35.67%  (p=0.029 n=4+4)
SiteNew/Canonify_URLs/Edit-16            40.6ms ± 1%    37.7ms ± 3%   -7.20%  (p=0.029 n=4+4)
SiteNew/Deep_content_tree/Edit-16        56.7ms ± 0%    51.7ms ± 1%   -8.82%  (p=0.029 n=4+4)
SiteNew/Many_HTML_templates/Edit-16      19.9ms ± 2%    18.3ms ± 3%   -7.64%  (p=0.029 n=4+4)
SiteNew/Page_collections/Edit-16         37.9ms ± 4%    34.0ms ± 2%  -10.28%  (p=0.029 n=4+4)
SiteNew/Bundle_with_image-16             10.7ms ± 0%    10.6ms ± 0%   -1.15%  (p=0.029 n=4+4)
SiteNew/Bundle_with_JSON_file-16         10.8ms ± 0%    10.7ms ± 0%   -1.05%  (p=0.029 n=4+4)
SiteNew/Tags_and_categories-16           43.2ms ± 1%    39.6ms ± 1%   -8.35%  (p=0.029 n=4+4)
SiteNew/Canonify_URLs-16                 47.6ms ± 1%    47.3ms ± 0%     ~     (p=0.057 n=4+4)
SiteNew/Deep_content_tree-16             73.0ms ± 1%    74.2ms ± 1%     ~     (p=0.114 n=4+4)
SiteNew/Many_HTML_templates-16           37.9ms ± 0%    38.1ms ± 1%     ~     (p=0.114 n=4+4)
SiteNew/Page_collections-16              53.6ms ± 1%    54.7ms ± 1%   +2.09%  (p=0.029 n=4+4)

name                                   old alloc/op   new alloc/op   delta
SiteNew/Bundle_with_image/Edit-16         486kB ± 0%     430kB ± 0%  -11.47%  (p=0.029 n=4+4)
SiteNew/Bundle_with_JSON_file/Edit-16     265kB ± 0%     209kB ± 0%  -21.06%  (p=0.029 n=4+4)
SiteNew/Tags_and_categories/Edit-16      13.6MB ± 0%     8.8MB ± 0%  -34.93%  (p=0.029 n=4+4)
SiteNew/Canonify_URLs/Edit-16            66.5MB ± 0%    63.9MB ± 0%   -3.95%  (p=0.029 n=4+4)
SiteNew/Deep_content_tree/Edit-16        28.8MB ± 0%    25.8MB ± 0%  -10.55%  (p=0.029 n=4+4)
SiteNew/Many_HTML_templates/Edit-16      6.16MB ± 0%    5.56MB ± 0%   -9.86%  (p=0.029 n=4+4)
SiteNew/Page_collections/Edit-16         16.9MB ± 0%    16.0MB ± 0%   -5.19%  (p=0.029 n=4+4)
SiteNew/Bundle_with_image-16             2.28MB ± 0%    2.29MB ± 0%   +0.35%  (p=0.029 n=4+4)
SiteNew/Bundle_with_JSON_file-16         2.07MB ± 0%    2.07MB ± 0%     ~     (p=0.114 n=4+4)
SiteNew/Tags_and_categories-16           14.3MB ± 0%    13.2MB ± 0%   -7.30%  (p=0.029 n=4+4)
SiteNew/Canonify_URLs-16                 69.1MB ± 0%    69.0MB ± 0%     ~     (p=0.343 n=4+4)
SiteNew/Deep_content_tree-16             31.3MB ± 0%    31.8MB ± 0%   +1.49%  (p=0.029 n=4+4)
SiteNew/Many_HTML_templates-16           10.8MB ± 0%    10.9MB ± 0%   +1.11%  (p=0.029 n=4+4)
SiteNew/Page_collections-16              21.4MB ± 0%    21.6MB ± 0%   +1.15%  (p=0.029 n=4+4)

name                                   old allocs/op  new allocs/op  delta
SiteNew/Bundle_with_image/Edit-16         4.74k ± 0%     3.86k ± 0%  -18.57%  (p=0.029 n=4+4)
SiteNew/Bundle_with_JSON_file/Edit-16     4.73k ± 0%     3.85k ± 0%  -18.58%  (p=0.029 n=4+4)
SiteNew/Tags_and_categories/Edit-16        301k ± 0%      198k ± 0%  -34.14%  (p=0.029 n=4+4)
SiteNew/Canonify_URLs/Edit-16              389k ± 0%      373k ± 0%   -4.07%  (p=0.029 n=4+4)
SiteNew/Deep_content_tree/Edit-16          338k ± 0%      262k ± 0%  -22.63%  (p=0.029 n=4+4)
SiteNew/Many_HTML_templates/Edit-16        102k ± 0%       88k ± 0%  -13.81%  (p=0.029 n=4+4)
SiteNew/Page_collections/Edit-16           176k ± 0%      152k ± 0%  -13.32%  (p=0.029 n=4+4)
SiteNew/Bundle_with_image-16              26.8k ± 0%     26.8k ± 0%   +0.05%  (p=0.029 n=4+4)
SiteNew/Bundle_with_JSON_file-16          26.8k ± 0%     26.8k ± 0%   +0.05%  (p=0.029 n=4+4)
SiteNew/Tags_and_categories-16             273k ± 0%      245k ± 0%  -10.36%  (p=0.029 n=4+4)
SiteNew/Canonify_URLs-16                   396k ± 0%      398k ± 0%   +0.39%  (p=0.029 n=4+4)
SiteNew/Deep_content_tree-16               317k ± 0%      325k ± 0%   +2.53%  (p=0.029 n=4+4)
SiteNew/Many_HTML_templates-16             146k ± 0%      147k ± 0%   +0.98%  (p=0.029 n=4+4)
SiteNew/Page_collections-16                210k ± 0%      215k ± 0%   +2.44%  (p=0.029 n=4+4)
```

Fixes #6312
Fixes #6087
Fixes #6738
Fixes #6412
Fixes #6743
Fixes #6875
Fixes #6034
Fixes #6902
Fixes #6173
Fixes #6590
This commit is contained in:
Bjørn Erik Pedersen 2019-09-10 11:26:34 +02:00
parent e5329f13c0
commit eada236f87
No known key found for this signature in database
GPG key ID: 330E6E2BD4859D8F
71 changed files with 4859 additions and 2531 deletions

View file

@ -16,10 +16,11 @@ package commands
import (
"bytes"
"fmt"
"io"
"strings"
"time"
"github.com/gohugoio/hugo/parser/pageparser"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/hugofs"
@ -28,7 +29,6 @@ import (
"github.com/gohugoio/hugo/parser"
"github.com/gohugoio/hugo/parser/metadecoders"
"github.com/gohugoio/hugo/parser/pageparser"
"github.com/pkg/errors"
@ -157,7 +157,7 @@ func (cc *convertCmd) convertAndSavePage(p page.Page, site *hugolib.Site, target
return nil
}
pf, err := parseContentFile(file)
pf, err := pageparser.ParseFrontMatterAndContent(file)
if err != nil {
site.Log.ERROR.Println(errMsg)
file.Close()
@ -167,23 +167,23 @@ func (cc *convertCmd) convertAndSavePage(p page.Page, site *hugolib.Site, target
file.Close()
// better handling of dates in formats that don't have support for them
if pf.frontMatterFormat == metadecoders.JSON || pf.frontMatterFormat == metadecoders.YAML || pf.frontMatterFormat == metadecoders.TOML {
for k, v := range pf.frontMatter {
if pf.FrontMatterFormat == metadecoders.JSON || pf.FrontMatterFormat == metadecoders.YAML || pf.FrontMatterFormat == metadecoders.TOML {
for k, v := range pf.FrontMatter {
switch vv := v.(type) {
case time.Time:
pf.frontMatter[k] = vv.Format(time.RFC3339)
pf.FrontMatter[k] = vv.Format(time.RFC3339)
}
}
}
var newContent bytes.Buffer
err = parser.InterfaceToFrontMatter(pf.frontMatter, targetFormat, &newContent)
err = parser.InterfaceToFrontMatter(pf.FrontMatter, targetFormat, &newContent)
if err != nil {
site.Log.ERROR.Println(errMsg)
return err
}
newContent.Write(pf.content)
newContent.Write(pf.Content)
newFilename := p.File().Filename()
@ -210,39 +210,3 @@ type parsedFile struct {
// Everything after Front Matter
content []byte
}
func parseContentFile(r io.Reader) (parsedFile, error) {
var pf parsedFile
psr, err := pageparser.Parse(r, pageparser.Config{})
if err != nil {
return pf, err
}
iter := psr.Iterator()
walkFn := func(item pageparser.Item) bool {
if pf.frontMatterSource != nil {
// The rest is content.
pf.content = psr.Input()[item.Pos:]
// Done
return false
} else if item.IsFrontMatter() {
pf.frontMatterFormat = metadecoders.FormatFromFrontMatterType(item.Type)
pf.frontMatterSource = item.Val
}
return true
}
iter.PeekWalk(walkFn)
metadata, err := metadecoders.Default.UnmarshalToMap(pf.frontMatterSource, pf.frontMatterFormat)
if err != nil {
return pf, err
}
pf.frontMatter = metadata
return pf, nil
}

View file

@ -26,6 +26,8 @@ import (
"time"
"unicode"
"github.com/gohugoio/hugo/parser/pageparser"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/parser/metadecoders"
@ -397,19 +399,19 @@ func convertJekyllPost(path, relPath, targetDir string, draft bool) error {
return err
}
pf, err := parseContentFile(bytes.NewReader(contentBytes))
pf, err := pageparser.ParseFrontMatterAndContent(bytes.NewReader(contentBytes))
if err != nil {
jww.ERROR.Println("Parse file error:", path)
return err
}
newmetadata, err := convertJekyllMetaData(pf.frontMatter, postName, postDate, draft)
newmetadata, err := convertJekyllMetaData(pf.FrontMatter, postName, postDate, draft)
if err != nil {
jww.ERROR.Println("Convert metadata error:", path)
return err
}
content, err := convertJekyllContent(newmetadata, string(pf.content))
content, err := convertJekyllContent(newmetadata, string(pf.Content))
if err != nil {
jww.ERROR.Println("Converting Jekyll error:", path)
return err

View file

@ -57,6 +57,11 @@ func PrintStackTrace(w io.Writer) {
fmt.Fprintf(w, "%s", buf)
}
// ErrorSender is a, typically, non-blocking error handler.
type ErrorSender interface {
SendError(err error)
}
// Recover is a helper function that can be used to capture panics.
// Put this at the top of a method/function that crashes in a template:
// defer herrors.Recover()

View file

@ -16,6 +16,7 @@ package para
import (
"context"
"runtime"
"sort"
"sync"
"sync/atomic"

28
common/types/convert.go Normal file
View file

@ -0,0 +1,28 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package types
import "github.com/spf13/cast"
// ToStringSlicePreserveString converts v to a string slice.
// If v is a string, it will be wrapped in a string slice.
func ToStringSlicePreserveString(v interface{}) []string {
if v == nil {
return nil
}
if sds, ok := v.(string); ok {
return []string{sds}
}
return cast.ToStringSlice(v)
}

View file

@ -0,0 +1,29 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package types
import (
"testing"
qt "github.com/frankban/quicktest"
)
func TestToStringSlicePreserveString(t *testing.T) {
c := qt.New(t)
c.Assert(ToStringSlicePreserveString("Hugo"), qt.DeepEquals, []string{"Hugo"})
c.Assert(ToStringSlicePreserveString([]interface{}{"A", "B"}), qt.DeepEquals, []string{"A", "B"})
c.Assert(ToStringSlicePreserveString(nil), qt.IsNil)
}

View file

@ -14,7 +14,7 @@
package config
import (
"github.com/spf13/cast"
"github.com/gohugoio/hugo/common/types"
)
// Provider provides the configuration settings for Hugo.
@ -35,14 +35,7 @@ type Provider interface {
// we do not attempt to split it into fields.
func GetStringSlicePreserveString(cfg Provider, key string) []string {
sd := cfg.Get(key)
return toStringSlicePreserveString(sd)
}
func toStringSlicePreserveString(v interface{}) []string {
if sds, ok := v.(string); ok {
return []string{sds}
}
return cast.ToStringSlice(v)
return types.ToStringSlicePreserveString(sd)
}
// SetBaseTestDefaults provides some common config defaults used in tests.

View file

@ -110,7 +110,7 @@ func executeArcheTypeAsTemplate(s *hugolib.Site, name, kind, targetPath, archety
Date: time.Now().Format(time.RFC3339),
Name: name,
File: f,
Site: &s.Info,
Site: s.Info,
}
if archetypeFilename == "" {

1
go.sum
View file

@ -73,6 +73,7 @@ github.com/bep/gitmap v1.1.1 h1:Nf8ySnC3I7/xPjuWeCwzukUFv185iTUQ6nOvLy9gCJA=
github.com/bep/gitmap v1.1.1/go.mod h1:g9VRETxFUXNWzMiuxOwcudo6DfZkW9jOsOW0Ft4kYaY=
github.com/bep/golibsass v0.4.0 h1:B2jsNZuRgpsyzv0I5iubJYApDhib87RzjTcRhVOjg78=
github.com/bep/golibsass v0.4.0/go.mod h1:DL87K8Un/+pWUS75ggYv41bliGiolxzDKWJAq3eJ1MA=
github.com/bep/golibsass v0.5.0 h1:b+Uxsk826Q35OmbenSmU65P+FJJQoVs2gI2mk1ba28s=
github.com/bep/golibsass v0.5.0/go.mod h1:DL87K8Un/+pWUS75ggYv41bliGiolxzDKWJAq3eJ1MA=
github.com/bep/tmc v0.5.1 h1:CsQnSC6MsomH64gw0cT5f+EwQDcvZz4AazKunFwTpuI=
github.com/bep/tmc v0.5.1/go.mod h1:tGYHN8fS85aJPhDLgXETVKp+PR382OvFi2+q2GkGsq0=

View file

@ -437,36 +437,6 @@ func NormalizeHugoFlags(f *pflag.FlagSet, name string) pflag.NormalizedName {
return pflag.NormalizedName(name)
}
// DiffStringSlices returns the difference between two string slices.
// Useful in tests.
// See:
// http://stackoverflow.com/questions/19374219/how-to-find-the-difference-between-two-slices-of-strings-in-golang
func DiffStringSlices(slice1 []string, slice2 []string) []string {
diffStr := []string{}
m := map[string]int{}
for _, s1Val := range slice1 {
m[s1Val] = 1
}
for _, s2Val := range slice2 {
m[s2Val] = m[s2Val] + 1
}
for mKey, mVal := range m {
if mVal == 1 {
diffStr = append(diffStr, mKey)
}
}
return diffStr
}
// DiffStrings splits the strings into fields and runs it into DiffStringSlices.
// Useful for tests.
func DiffStrings(s1, s2 string) []string {
return DiffStringSlices(strings.Fields(s1), strings.Fields(s2))
}
// PrintFs prints the given filesystem to the given writer starting from the given path.
// This is useful for debugging.
func PrintFs(fs afero.Fs, path string, w io.Writer) {

View file

@ -18,6 +18,7 @@ import (
"fmt"
"io"
"os"
"path"
"path/filepath"
"regexp"
"sort"
@ -243,13 +244,19 @@ func FileAndExtNoDelimiter(in string) (string, string) {
return file, strings.TrimPrefix(ext, ".")
}
// Filename takes a path, strips out the extension,
// Filename takes a file path, strips out the extension,
// and returns the name of the file.
func Filename(in string) (name string) {
name, _ = fileAndExt(in, fpb)
return
}
// PathNoExt takes a path, strips out the extension,
// and returns the name of the file.
func PathNoExt(in string) string {
return strings.TrimSuffix(in, path.Ext(in))
}
// FileAndExt returns the filename and any extension of a file path as
// two separate strings.
//

View file

@ -15,12 +15,24 @@ package hqt
import (
"errors"
"fmt"
"reflect"
"strings"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/htesting"
"github.com/google/go-cmp/cmp"
"github.com/spf13/cast"
)
// IsSameString asserts that two strings are equal. The two strings
// are normalized (whitespace removed) before doing a ==.
// Also note that two strings can be the same even if they're of different
// types.
var IsSameString qt.Checker = &stringChecker{
argNames: []string{"got", "want"},
}
// IsSameType asserts that got is the same type as want.
var IsSameType qt.Checker = &typeChecker{
argNames: []string{"got", "want"},
@ -47,6 +59,36 @@ func (c *typeChecker) Check(got interface{}, args []interface{}, note func(key s
return nil
}
type stringChecker struct {
argNames
}
// Check implements Checker.Check by checking that got and args[0] represents the same normalized text (whitespace etc. trimmed).
func (c *stringChecker) Check(got interface{}, args []interface{}, note func(key string, value interface{})) (err error) {
s1, s2 := cast.ToString(got), cast.ToString(args[0])
if s1 == s2 {
return nil
}
s1, s2 = normalizeString(s1), normalizeString(s2)
if s1 == s2 {
return nil
}
return fmt.Errorf("values are not the same text: %s", htesting.DiffStrings(s1, s2))
}
func normalizeString(s string) string {
lines := strings.Split(strings.TrimSpace(s), "\n")
for i, line := range lines {
lines[i] = strings.TrimSpace(line)
}
return strings.Join(lines, "\n")
}
// DeepAllowUnexported creates an option to allow compare of unexported types
// in the given list of types.
// see https://github.com/google/go-cmp/issues/40#issuecomment-328615283

View file

@ -56,3 +56,33 @@ var rnd = rand.New(rand.NewSource(time.Now().UnixNano()))
func RandIntn(n int) int {
return rnd.Intn(n)
}
// DiffStringSlices returns the difference between two string slices.
// Useful in tests.
// See:
// http://stackoverflow.com/questions/19374219/how-to-find-the-difference-between-two-slices-of-strings-in-golang
func DiffStringSlices(slice1 []string, slice2 []string) []string {
diffStr := []string{}
m := map[string]int{}
for _, s1Val := range slice1 {
m[s1Val] = 1
}
for _, s2Val := range slice2 {
m[s2Val] = m[s2Val] + 1
}
for mKey, mVal := range m {
if mVal == 1 {
diffStr = append(diffStr, mKey)
}
}
return diffStr
}
// DiffStrings splits the strings into fields and runs it into DiffStringSlices.
// Useful for tests.
func DiffStrings(s1, s2 string) []string {
return DiffStringSlices(strings.Fields(s1), strings.Fields(s2))
}

View file

@ -80,7 +80,8 @@ func DecorateBasePathFs(base *afero.BasePathFs) afero.Fs {
// NewBaseFileDecorator decorates the given Fs to provide the real filename
// and an Opener func.
func NewBaseFileDecorator(fs afero.Fs) afero.Fs {
func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero.Fs {
ffs := &baseFileDecoratorFs{Fs: fs}
decorator := func(fi os.FileInfo, filename string) (os.FileInfo, error) {
@ -120,7 +121,14 @@ func NewBaseFileDecorator(fs afero.Fs) afero.Fs {
return ffs.open(filename)
}
return decorateFileInfo(fi, ffs, opener, filename, "", meta), nil
fim := decorateFileInfo(fi, ffs, opener, filename, "", meta)
for _, cb := range callbacks {
cb(fim)
}
return fim, nil
}
ffs.decorate = decorator

View file

@ -39,6 +39,7 @@ const (
metaKeyBaseDir = "baseDir" // Abs base directory of source file.
metaKeyMountRoot = "mountRoot"
metaKeyModule = "module"
metaKeyOriginalFilename = "originalFilename"
metaKeyName = "name"
metaKeyPath = "path"
@ -100,10 +101,10 @@ func (f FileMeta) Name() string {
return f.stringV(metaKeyName)
}
func (f FileMeta) Classifier() string {
c := f.stringV(metaKeyClassifier)
if c != "" {
return c
func (f FileMeta) Classifier() files.ContentClass {
c, found := f[metaKeyClassifier]
if found {
return c.(files.ContentClass)
}
return files.ContentClassFile // For sorting
@ -131,6 +132,10 @@ func (f FileMeta) MountRoot() string {
return f.stringV(metaKeyMountRoot)
}
func (f FileMeta) Module() string {
return f.stringV(metaKeyModule)
}
func (f FileMeta) Weight() int {
return f.GetInt(metaKeyWeight)
}

View file

@ -49,14 +49,20 @@ func IsContentExt(ext string) bool {
return contentFileExtensionsSet[ext]
}
type ContentClass string
const (
ContentClassLeaf = "leaf"
ContentClassBranch = "branch"
ContentClassFile = "zfile" // Sort below
ContentClassContent = "zcontent"
ContentClassLeaf ContentClass = "leaf"
ContentClassBranch ContentClass = "branch"
ContentClassFile ContentClass = "zfile" // Sort below
ContentClassContent ContentClass = "zcontent"
)
func ClassifyContentFile(filename string) string {
func (c ContentClass) IsBundle() bool {
return c == ContentClassLeaf || c == ContentClassBranch
}
func ClassifyContentFile(filename string) ContentClass {
if !IsContentFile(filename) {
return ContentClassFile
}

View file

@ -185,7 +185,7 @@ func (fs *FilterFs) Open(name string) (afero.File, error) {
}
func (fs *FilterFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
panic("not implemented")
return fs.fs.Open(name)
}
func (fs *FilterFs) ReadDir(name string) ([]os.FileInfo, error) {

View file

@ -65,6 +65,7 @@ func NewRootMappingFs(fs afero.Fs, rms ...RootMapping) (*RootMappingFs, error) {
rm.Meta[metaKeyBaseDir] = rm.ToBasedir
rm.Meta[metaKeyMountRoot] = rm.path
rm.Meta[metaKeyModule] = rm.Module
meta := copyFileMeta(rm.Meta)
@ -121,6 +122,7 @@ type RootMapping struct {
From string // The virtual mount.
To string // The source directory or file.
ToBasedir string // The base of To. May be empty if an absolute path was provided.
Module string // The module path/ID.
Meta FileMeta // File metadata (lang etc.)
fi FileMetaInfo

View file

@ -17,7 +17,6 @@ import (
"bytes"
"errors"
"fmt"
"html/template"
"io"
"path"
"path/filepath"
@ -32,8 +31,6 @@ import (
"github.com/gohugoio/hugo/tpl"
)
var defaultAliasTemplates *template.Template
type aliasHandler struct {
t tpl.TemplateHandler
log *loggers.Logger

View file

@ -17,6 +17,7 @@ import (
"bytes"
"fmt"
"path"
"strings"
"testing"
qt "github.com/frankban/quicktest"
@ -60,29 +61,33 @@ func TestCascade(t *testing.T) {
b.Build(BuildCfg{})
b.AssertFileContent("public/index.html", `
12|taxonomy|categories/cool/_index.md|Cascade Category|cat.png|categories|HTML-|
12|taxonomy|categories/catsect1|catsect1|cat.png|categories|HTML-|
12|taxonomy|categories/funny|funny|cat.png|categories|HTML-|
12|taxonomyTerm|categories/_index.md|My Categories|cat.png|categories|HTML-|
32|taxonomy|categories/sad/_index.md|Cascade Category|sad.png|categories|HTML-|
42|taxonomy|tags/blue|blue|home.png|tags|HTML-|
42|section|sect3|Cascade Home|home.png|sect3|HTML-|
42|taxonomyTerm|tags|Cascade Home|home.png|tags|HTML-|
42|page|bundle1/index.md|Cascade Home|home.png|page|HTML-|
42|page|p2.md|Cascade Home|home.png|page|HTML-|
42|page|sect2/p2.md|Cascade Home|home.png|sect2|HTML-|
42|page|sect3/p1.md|Cascade Home|home.png|sect3|HTML-|
42|taxonomy|tags/green|green|home.png|tags|HTML-|
42|home|_index.md|Home|home.png|page|HTML-|
42|page|p1.md|p1|home.png|page|HTML-|
42|section|sect1/_index.md|Sect1|sect1.png|stype|HTML-|
42|section|sect1/s1_2/_index.md|Sect1_2|sect1.png|stype|HTML-|
42|page|sect1/s1_2/p1.md|Sect1_2_p1|sect1.png|stype|HTML-|
42|page|sect1/s1_2/p2.md|Sect1_2_p2|sect1.png|stype|HTML-|
42|section|sect2/_index.md|Sect2|home.png|sect2|HTML-|
42|page|sect2/p1.md|Sect2_p1|home.png|sect2|HTML-|
52|page|sect4/p1.md|Cascade Home|home.png|sect4|RSS-|
52|section|sect4/_index.md|Sect4|home.png|sect4|RSS-|
12|taxonomy|categories/cool/_index.md|Cascade Category|cat.png|categories|HTML-|
12|taxonomy|categories/catsect1|catsect1|cat.png|categories|HTML-|
12|taxonomy|categories/funny|funny|cat.png|categories|HTML-|
12|taxonomyTerm|categories/_index.md|My Categories|cat.png|categories|HTML-|
32|taxonomy|categories/sad/_index.md|Cascade Category|sad.png|categories|HTML-|
42|taxonomy|tags/blue|blue|home.png|tags|HTML-|
42|taxonomyTerm|tags|Cascade Home|home.png|tags|HTML-|
42|section|sectnocontent|Cascade Home|home.png|sectnocontent|HTML-|
42|section|sect3|Cascade Home|home.png|sect3|HTML-|
42|page|bundle1/index.md|Cascade Home|home.png|page|HTML-|
42|page|p2.md|Cascade Home|home.png|page|HTML-|
42|page|sect2/p2.md|Cascade Home|home.png|sect2|HTML-|
42|page|sect3/nofrontmatter.md|Cascade Home|home.png|sect3|HTML-|
42|page|sect3/p1.md|Cascade Home|home.png|sect3|HTML-|
42|page|sectnocontent/p1.md|Cascade Home|home.png|sectnocontent|HTML-|
42|section|sectnofrontmatter/_index.md|Cascade Home|home.png|sectnofrontmatter|HTML-|
42|taxonomy|tags/green|green|home.png|tags|HTML-|
42|home|_index.md|Home|home.png|page|HTML-|
42|page|p1.md|p1|home.png|page|HTML-|
42|section|sect1/_index.md|Sect1|sect1.png|stype|HTML-|
42|section|sect1/s1_2/_index.md|Sect1_2|sect1.png|stype|HTML-|
42|page|sect1/s1_2/p1.md|Sect1_2_p1|sect1.png|stype|HTML-|
42|page|sect1/s1_2/p2.md|Sect1_2_p2|sect1.png|stype|HTML-|
42|section|sect2/_index.md|Sect2|home.png|sect2|HTML-|
42|page|sect2/p1.md|Sect2_p1|home.png|sect2|HTML-|
52|page|sect4/p1.md|Cascade Home|home.png|sect4|RSS-|
52|section|sect4/_index.md|Sect4|home.png|sect4|RSS-|
`)
// Check that type set in cascade gets the correct layout.
@ -106,43 +111,131 @@ func TestCascadeEdit(t *testing.T) {
title: P1
---
`
b := newTestSitesBuilder(t).Running()
b.WithTemplatesAdded("_default/single.html", `Banner: {{ .Params.banner }}|Layout: {{ .Layout }}|Type: {{ .Type }}|Content: {{ .Content }}`)
b.WithContent("post/_index.md", `
indexContentNoCascade := `
---
title: Post
title: Home
---
`
indexContentCascade := `
---
title: Section
cascade:
banner: post.jpg
layout: postlayout
type: posttype
---
`)
`
b.WithContent("post/dir/_index.md", `
---
title: Dir
---
`, "post/dir/p1.md", p1Content)
b.Build(BuildCfg{})
layout := `Banner: {{ .Params.banner }}|Layout: {{ .Layout }}|Type: {{ .Type }}|Content: {{ .Content }}`
assert := func() {
b.Helper()
b.AssertFileContent("public/post/dir/p1/index.html",
`Banner: post.jpg|`,
`Layout: postlayout`,
`Type: posttype`,
)
newSite := func(t *testing.T, cascade bool) *sitesBuilder {
b := newTestSitesBuilder(t).Running()
b.WithTemplates("_default/single.html", layout)
b.WithTemplates("_default/list.html", layout)
if cascade {
b.WithContent("post/_index.md", indexContentCascade)
} else {
b.WithContent("post/_index.md", indexContentNoCascade)
}
b.WithContent("post/dir/p1.md", p1Content)
return b
}
assert()
t.Run("Edit descendant", func(t *testing.T) {
t.Parallel()
b.EditFiles("content/post/dir/p1.md", p1Content+"\ncontent edit")
b.Build(BuildCfg{})
b := newSite(t, true)
b.Build(BuildCfg{})
assert()
b.AssertFileContent("public/post/dir/p1/index.html",
`content edit`,
)
assert := func() {
b.Helper()
b.AssertFileContent("public/post/dir/p1/index.html",
`Banner: post.jpg|`,
`Layout: postlayout`,
`Type: posttype`,
)
}
assert()
b.EditFiles("content/post/dir/p1.md", p1Content+"\ncontent edit")
b.Build(BuildCfg{})
assert()
b.AssertFileContent("public/post/dir/p1/index.html",
`content edit
Banner: post.jpg`,
)
})
t.Run("Edit ancestor", func(t *testing.T) {
t.Parallel()
b := newSite(t, true)
b.Build(BuildCfg{})
b.AssertFileContent("public/post/dir/p1/index.html", `Banner: post.jpg|Layout: postlayout|Type: posttype|Content:`)
b.EditFiles("content/post/_index.md", strings.Replace(indexContentCascade, "post.jpg", "edit.jpg", 1))
b.Build(BuildCfg{})
b.AssertFileContent("public/post/index.html", `Banner: edit.jpg|Layout: postlayout|Type: posttype|`)
b.AssertFileContent("public/post/dir/p1/index.html", `Banner: edit.jpg|Layout: postlayout|Type: posttype|`)
})
t.Run("Edit ancestor, add cascade", func(t *testing.T) {
t.Parallel()
b := newSite(t, true)
b.Build(BuildCfg{})
b.AssertFileContent("public/post/dir/p1/index.html", `Banner: post.jpg`)
b.EditFiles("content/post/_index.md", indexContentCascade)
b.Build(BuildCfg{})
b.AssertFileContent("public/post/index.html", `Banner: post.jpg|Layout: postlayout|Type: posttype|`)
b.AssertFileContent("public/post/dir/p1/index.html", `Banner: post.jpg|Layout: postlayout|`)
})
t.Run("Edit ancestor, remove cascade", func(t *testing.T) {
t.Parallel()
b := newSite(t, false)
b.Build(BuildCfg{})
b.AssertFileContent("public/post/dir/p1/index.html", `Banner: |Layout: |`)
b.EditFiles("content/post/_index.md", indexContentNoCascade)
b.Build(BuildCfg{})
b.AssertFileContent("public/post/index.html", `Banner: |Layout: |Type: post|`)
b.AssertFileContent("public/post/dir/p1/index.html", `Banner: |Layout: |`)
})
t.Run("Edit ancestor, content only", func(t *testing.T) {
t.Parallel()
b := newSite(t, true)
b.Build(BuildCfg{})
b.EditFiles("content/post/_index.md", indexContentCascade+"\ncontent edit")
counters := &testCounters{}
b.Build(BuildCfg{testCounters: counters})
// As we only changed the content, not the cascade front matter, make
// only the home page is re-rendered.
b.Assert(int(counters.contentRenderCounter), qt.Equals, 1)
b.AssertFileContent("public/post/index.html", `Banner: post.jpg|Layout: postlayout|Type: posttype|Content: <p>content edit</p>`)
b.AssertFileContent("public/post/dir/p1/index.html", `Banner: post.jpg|Layout: postlayout|`)
})
}
func newCascadeTestBuilder(t testing.TB, langs []string) *sitesBuilder {
@ -247,6 +340,12 @@ defaultContentLanguageInSubDir = false
}),
"sect2/p2.md", p(map[string]interface{}{}),
"sect3/p1.md", p(map[string]interface{}{}),
// No front matter, see #6855
"sect3/nofrontmatter.md", `**Hello**`,
"sectnocontent/p1.md", `**Hello**`,
"sectnofrontmatter/_index.md", `**Hello**`,
"sect4/_index.md", p(map[string]interface{}{
"title": "Sect4",
"cascade": map[string]interface{}{

971
hugolib/content_map.go Normal file
View file

@ -0,0 +1,971 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"path"
"path/filepath"
"strings"
"sync"
"github.com/gohugoio/hugo/resources/page"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/hugofs"
radix "github.com/armon/go-radix"
)
// We store the branch nodes in either the `sections` or `taxonomies` tree
// with their path as a key; Unix style slashes, a leading slash but no
// trailing slash.
//
// E.g. "/blog" or "/categories/funny"
//
// Pages that belongs to a section are stored in the `pages` tree below
// the section name and a branch separator, e.g. "/blog__hb_". A page is
// given a key using the path below the section and the base filename with no extension
// with a leaf separator added.
//
// For bundled pages (/mybundle/index.md), we use the folder name.
//
// An exmple of a full page key would be "/blog__hb_/page1__hl_"
//
// Bundled resources are stored in the `resources` having their path prefixed
// with the bundle they belong to, e.g.
// "/blog__hb_/bundle__hl_data.json".
//
// The weighted taxonomy entries extracted from page front matter are stored in
// the `taxonomyEntries` tree below /plural/term/page-key, e.g.
// "/categories/funny/blog__hb_/bundle__hl_".
const (
cmBranchSeparator = "__hb_"
cmLeafSeparator = "__hl_"
)
// Used to mark ambigous keys in reverse index lookups.
var ambigousContentNode = &contentNode{}
func newContentMap(cfg contentMapConfig) *contentMap {
m := &contentMap{
cfg: &cfg,
pages: &contentTree{Name: "pages", Tree: radix.New()},
sections: &contentTree{Name: "sections", Tree: radix.New()},
taxonomies: &contentTree{Name: "taxonomies", Tree: radix.New()},
taxonomyEntries: &contentTree{Name: "taxonomyEntries", Tree: radix.New()},
resources: &contentTree{Name: "resources", Tree: radix.New()},
}
m.pageTrees = []*contentTree{
m.pages, m.sections, m.taxonomies,
}
m.bundleTrees = []*contentTree{
m.pages, m.sections, m.taxonomies, m.resources,
}
m.branchTrees = []*contentTree{
m.sections, m.taxonomies,
}
addToReverseMap := func(k string, n *contentNode, m map[interface{}]*contentNode) {
k = strings.ToLower(k)
existing, found := m[k]
if found && existing != ambigousContentNode {
m[k] = ambigousContentNode
} else if !found {
m[k] = n
}
}
m.pageReverseIndex = &contentTreeReverseIndex{
t: []*contentTree{m.pages, m.sections, m.taxonomies},
initFn: func(t *contentTree, m map[interface{}]*contentNode) {
t.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
if n.p != nil && !n.p.File().IsZero() {
meta := n.p.File().FileInfo().Meta()
if meta.Path() != meta.PathFile() {
// Keep track of the original mount source.
mountKey := filepath.ToSlash(filepath.Join(meta.Module(), meta.PathFile()))
addToReverseMap(mountKey, n, m)
}
}
k := strings.TrimSuffix(path.Base(s), cmLeafSeparator)
addToReverseMap(k, n, m)
return false
})
},
}
return m
}
type cmInsertKeyBuilder struct {
m *contentMap
err error
// Builder state
tree *contentTree
baseKey string // Section or page key
key string
}
func (b cmInsertKeyBuilder) ForPage(s string) *cmInsertKeyBuilder {
// TODO2 fmt.Println("ForPage:", s, "baseKey:", b.baseKey, "key:", b.key)
baseKey := b.baseKey
b.baseKey = s
if !strings.HasPrefix(s, "/") {
s = "/" + s
}
if baseKey != "/" {
// Don't repeat the section path in the key.
s = strings.TrimPrefix(s, baseKey)
}
switch b.tree {
case b.m.sections:
b.tree = b.m.pages
b.key = baseKey + cmBranchSeparator + s + cmLeafSeparator
case b.m.taxonomies:
b.key = path.Join(baseKey, s)
default:
panic("invalid state")
}
return &b
}
func (b cmInsertKeyBuilder) ForResource(s string) *cmInsertKeyBuilder {
// TODO2 fmt.Println("ForResource:", s, "baseKey:", b.baseKey, "key:", b.key)
s = strings.TrimPrefix(s, "/")
s = strings.TrimPrefix(s, strings.TrimPrefix(b.baseKey, "/")+"/")
switch b.tree {
case b.m.pages:
b.key = b.key + s
case b.m.sections, b.m.taxonomies:
b.key = b.key + cmLeafSeparator + s
default:
panic(fmt.Sprintf("invalid state: %#v", b.tree))
}
b.tree = b.m.resources
return &b
}
func (b *cmInsertKeyBuilder) Insert(n *contentNode) *cmInsertKeyBuilder {
if b.err == nil {
b.tree.Insert(cleanTreeKey(b.key), n)
}
return b
}
func (b *cmInsertKeyBuilder) DeleteAll() *cmInsertKeyBuilder {
if b.err == nil {
b.tree.DeletePrefix(cleanTreeKey(b.key))
}
return b
}
func (b *cmInsertKeyBuilder) WithFile(fi hugofs.FileMetaInfo) *cmInsertKeyBuilder {
b.newTopLevel()
m := b.m
meta := fi.Meta()
p := cleanTreeKey(meta.Path())
bundlePath := m.getBundleDir(meta)
isBundle := meta.Classifier().IsBundle()
if isBundle {
panic("not implemented")
}
p, k := b.getBundle(p)
if k == "" {
b.err = errors.Errorf("no bundle header found for %q", bundlePath)
return b
}
id := k + m.reduceKeyPart(p, fi.Meta().Path())
b.tree = b.m.resources
b.key = id
b.baseKey = p
return b
}
func (b *cmInsertKeyBuilder) WithSection(s string) *cmInsertKeyBuilder {
b.newTopLevel()
b.tree = b.m.sections
b.baseKey = s
b.key = s
// TODO2 fmt.Println("WithSection:", s, "baseKey:", b.baseKey, "key:", b.key)
return b
}
func (b *cmInsertKeyBuilder) WithTaxonomy(s string) *cmInsertKeyBuilder {
b.newTopLevel()
b.tree = b.m.taxonomies
b.baseKey = s
b.key = s
return b
}
// getBundle gets both the key to the section and the prefix to where to store
// this page bundle and its resources.
func (b *cmInsertKeyBuilder) getBundle(s string) (string, string) {
m := b.m
section, _ := m.getSection(s)
p := s
if section != "/" {
p = strings.TrimPrefix(s, section)
}
bundlePathParts := strings.Split(p, "/")[1:]
basePath := section + cmBranchSeparator
// Put it into an existing bundle if found.
for i := len(bundlePathParts) - 2; i >= 0; i-- {
bundlePath := path.Join(bundlePathParts[:i]...)
searchKey := basePath + "/" + bundlePath + cmLeafSeparator
if _, found := m.pages.Get(searchKey); found {
return section + "/" + bundlePath, searchKey
}
}
// Put it into the section bundle.
return section, section + cmLeafSeparator
}
func (b *cmInsertKeyBuilder) newTopLevel() {
b.key = ""
}
type contentBundleViewInfo struct {
name viewName
termKey string
termOrigin string
weight int
ref *contentNode
}
func (c *contentBundleViewInfo) kind() string {
if c.termKey != "" {
return page.KindTaxonomy
}
return page.KindTaxonomyTerm
}
func (c *contentBundleViewInfo) sections() []string {
if c.kind() == page.KindTaxonomyTerm {
return []string{c.name.plural}
}
return []string{c.name.plural, c.termKey}
}
func (c *contentBundleViewInfo) term() string {
if c.termOrigin != "" {
return c.termOrigin
}
return c.termKey
}
type contentMap struct {
cfg *contentMapConfig
// View of regular pages, sections, and taxonomies.
pageTrees contentTrees
// View of pages, sections, taxonomies, and resources.
bundleTrees contentTrees
// View of sections and taxonomies.
branchTrees contentTrees
// Stores page bundles keyed by its path's directory or the base filename,
// e.g. "blog/post.md" => "/blog/post", "blog/post/index.md" => "/blog/post"
// These are the "regular pages" and all of them are bundles.
pages *contentTree
// A reverse index used as a fallback in GetPage.
// There are currently two cases where this is used:
// 1. Short name lookups in ref/relRef, e.g. using only "mypage.md" without a path.
// 2. Links resolved from a remounted content directory. These are restricted to the same module.
// Both of the above cases can result in ambigous lookup errors.
pageReverseIndex *contentTreeReverseIndex
// Section nodes.
sections *contentTree
// Taxonomy nodes.
taxonomies *contentTree
// Pages in a taxonomy.
taxonomyEntries *contentTree
// Resources stored per bundle below a common prefix, e.g. "/blog/post__hb_".
resources *contentTree
}
func (m *contentMap) AddFiles(fis ...hugofs.FileMetaInfo) error {
for _, fi := range fis {
if err := m.addFile(fi); err != nil {
return err
}
}
return nil
}
func (m *contentMap) AddFilesBundle(header hugofs.FileMetaInfo, resources ...hugofs.FileMetaInfo) error {
var (
meta = header.Meta()
classifier = meta.Classifier()
isBranch = classifier == files.ContentClassBranch
bundlePath = m.getBundleDir(meta)
n = m.newContentNodeFromFi(header)
b = m.newKeyBuilder()
section string
)
if isBranch {
// Either a section or a taxonomy node.
section = bundlePath
if tc := m.cfg.getTaxonomyConfig(section); !tc.IsZero() {
term := strings.TrimPrefix(strings.TrimPrefix(section, "/"+tc.plural), "/")
n.viewInfo = &contentBundleViewInfo{
name: tc,
termKey: term,
termOrigin: term,
}
n.viewInfo.ref = n
b.WithTaxonomy(section).Insert(n)
} else {
b.WithSection(section).Insert(n)
}
} else {
// A regular page. Attach it to its section.
section, _ = m.getOrCreateSection(n, bundlePath)
b = b.WithSection(section).ForPage(bundlePath).Insert(n)
}
if m.cfg.isRebuild {
// The resource owner will be either deleted or overwritten on rebuilds,
// but make sure we handle deletion of resources (images etc.) as well.
b.ForResource("").DeleteAll()
}
for _, r := range resources {
rb := b.ForResource(cleanTreeKey(r.Meta().Path()))
rb.Insert(&contentNode{fi: r})
}
return nil
}
func (m *contentMap) CreateMissingNodes() error {
// Create missing home and root sections
rootSections := make(map[string]interface{})
trackRootSection := func(s string, b *contentNode) {
parts := strings.Split(s, "/")
if len(parts) > 2 {
root := strings.TrimSuffix(parts[1], cmBranchSeparator)
if root != "" {
if _, found := rootSections[root]; !found {
rootSections[root] = b
}
}
}
}
m.sections.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
if s == "/" {
return false
}
trackRootSection(s, n)
return false
})
m.pages.Walk(func(s string, v interface{}) bool {
trackRootSection(s, v.(*contentNode))
return false
})
if _, found := rootSections["/"]; !found {
rootSections["/"] = true
}
for sect, v := range rootSections {
var sectionPath string
if n, ok := v.(*contentNode); ok && n.path != "" {
sectionPath = n.path
firstSlash := strings.Index(sectionPath, "/")
if firstSlash != -1 {
sectionPath = sectionPath[:firstSlash]
}
}
sect = cleanTreeKey(sect)
_, found := m.sections.Get(sect)
if !found {
m.sections.Insert(sect, &contentNode{path: sectionPath})
}
}
for _, view := range m.cfg.taxonomyConfig {
s := cleanTreeKey(view.plural)
_, found := m.taxonomies.Get(s)
if !found {
b := &contentNode{
viewInfo: &contentBundleViewInfo{
name: view,
},
}
b.viewInfo.ref = b
m.taxonomies.Insert(s, b)
}
}
return nil
}
func (m *contentMap) getBundleDir(meta hugofs.FileMeta) string {
dir := cleanTreeKey(filepath.Dir(meta.Path()))
switch meta.Classifier() {
case files.ContentClassContent:
return path.Join(dir, meta.TranslationBaseName())
default:
return dir
}
}
func (m *contentMap) newContentNodeFromFi(fi hugofs.FileMetaInfo) *contentNode {
return &contentNode{
fi: fi,
path: strings.TrimPrefix(filepath.ToSlash(fi.Meta().Path()), "/"),
}
}
func (m *contentMap) getFirstSection(s string) (string, *contentNode) {
for {
k, v, found := m.sections.LongestPrefix(s)
if !found {
return "", nil
}
if strings.Count(k, "/") == 1 {
return k, v.(*contentNode)
}
s = path.Dir(s)
}
}
func (m *contentMap) newKeyBuilder() *cmInsertKeyBuilder {
return &cmInsertKeyBuilder{m: m}
}
func (m *contentMap) getOrCreateSection(n *contentNode, s string) (string, *contentNode) {
level := strings.Count(s, "/")
k, b := m.getSection(s)
mustCreate := false
if k == "" {
mustCreate = true
} else if level > 1 && k == "/" {
// We found the home section, but this page needs to be placed in
// the root, e.g. "/blog", section.
mustCreate = true
}
if mustCreate {
k = s[:strings.Index(s[1:], "/")+1]
if k == "" {
k = "/"
}
b = &contentNode{
path: n.rootSection(),
}
m.sections.Insert(k, b)
}
return k, b
}
func (m *contentMap) getPage(section, name string) *contentNode {
key := section + cmBranchSeparator + "/" + name + cmLeafSeparator
v, found := m.pages.Get(key)
if found {
return v.(*contentNode)
}
return nil
}
func (m *contentMap) getSection(s string) (string, *contentNode) {
k, v, found := m.sections.LongestPrefix(path.Dir(s))
if found {
return k, v.(*contentNode)
}
return "", nil
}
func (m *contentMap) getTaxonomyParent(s string) (string, *contentNode) {
s = path.Dir(s)
if s == "/" {
v, found := m.sections.Get(s)
if found {
return s, v.(*contentNode)
}
return "", nil
}
for _, tree := range []*contentTree{m.taxonomies, m.sections} {
k, v, found := tree.LongestPrefix(s)
if found {
return k, v.(*contentNode)
}
}
return "", nil
}
func (m *contentMap) addFile(fi hugofs.FileMetaInfo) error {
b := m.newKeyBuilder()
return b.WithFile(fi).Insert(m.newContentNodeFromFi(fi)).err
}
func cleanTreeKey(k string) string {
k = "/" + strings.ToLower(strings.Trim(path.Clean(filepath.ToSlash(k)), "./"))
return k
}
func (m *contentMap) onSameLevel(s1, s2 string) bool {
return strings.Count(s1, "/") == strings.Count(s2, "/")
}
func (m *contentMap) deleteBundleMatching(matches func(b *contentNode) bool) {
// Check sections first
s := m.sections.getMatch(matches)
if s != "" {
m.deleteSectionByPath(s)
return
}
s = m.pages.getMatch(matches)
if s != "" {
m.deletePage(s)
return
}
s = m.resources.getMatch(matches)
if s != "" {
m.resources.Delete(s)
}
}
// Deletes any empty root section that's not backed by a content file.
func (m *contentMap) deleteOrphanSections() {
m.sections.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
if n.fi != nil {
// Section may be empty, but is backed by a content file.
return false
}
if s == "/" || strings.Count(s, "/") > 1 {
return false
}
prefixBundle := s + cmBranchSeparator
if !(m.sections.hasPrefix(s+"/") || m.pages.hasPrefix(prefixBundle) || m.resources.hasPrefix(prefixBundle)) {
m.sections.Delete(s)
}
return false
})
}
func (m *contentMap) deletePage(s string) {
m.pages.DeletePrefix(s)
m.resources.DeletePrefix(s)
}
func (m *contentMap) deleteSectionByPath(s string) {
m.sections.Delete(s)
m.sections.DeletePrefix(s + "/")
m.pages.DeletePrefix(s + cmBranchSeparator)
m.pages.DeletePrefix(s + "/")
m.resources.DeletePrefix(s + cmBranchSeparator)
m.resources.DeletePrefix(s + cmLeafSeparator)
m.resources.DeletePrefix(s + "/")
}
func (m *contentMap) deletePageByPath(s string) {
m.pages.Walk(func(s string, v interface{}) bool {
fmt.Println("S", s)
return false
})
}
func (m *contentMap) deleteTaxonomy(s string) {
m.taxonomies.Delete(s)
m.taxonomies.DeletePrefix(s + "/")
}
func (m *contentMap) reduceKeyPart(dir, filename string) string {
dir, filename = filepath.ToSlash(dir), filepath.ToSlash(filename)
dir, filename = strings.TrimPrefix(dir, "/"), strings.TrimPrefix(filename, "/")
return strings.TrimPrefix(strings.TrimPrefix(filename, dir), "/")
}
func (m *contentMap) splitKey(k string) []string {
if k == "" || k == "/" {
return nil
}
return strings.Split(k, "/")[1:]
}
func (m *contentMap) testDump() string {
var sb strings.Builder
for i, r := range []*contentTree{m.pages, m.sections, m.resources} {
sb.WriteString(fmt.Sprintf("Tree %d:\n", i))
r.Walk(func(s string, v interface{}) bool {
sb.WriteString("\t" + s + "\n")
return false
})
}
for i, r := range []*contentTree{m.pages, m.sections} {
r.Walk(func(s string, v interface{}) bool {
c := v.(*contentNode)
cpToString := func(c *contentNode) string {
var sb strings.Builder
if c.p != nil {
sb.WriteString("|p:" + c.p.Title())
}
if c.fi != nil {
sb.WriteString("|f:" + filepath.ToSlash(c.fi.Meta().Path()))
}
return sb.String()
}
sb.WriteString(path.Join(m.cfg.lang, r.Name) + s + cpToString(c) + "\n")
resourcesPrefix := s
if i == 1 {
resourcesPrefix += cmLeafSeparator
m.pages.WalkPrefix(s+cmBranchSeparator, func(s string, v interface{}) bool {
sb.WriteString("\t - P: " + filepath.ToSlash((v.(*contentNode).fi.(hugofs.FileMetaInfo)).Meta().Filename()) + "\n")
return false
})
}
m.resources.WalkPrefix(resourcesPrefix, func(s string, v interface{}) bool {
sb.WriteString("\t - R: " + filepath.ToSlash((v.(*contentNode).fi.(hugofs.FileMetaInfo)).Meta().Filename()) + "\n")
return false
})
return false
})
}
return sb.String()
}
type contentMapConfig struct {
lang string
taxonomyConfig []viewName
taxonomyDisabled bool
taxonomyTermDisabled bool
pageDisabled bool
isRebuild bool
}
func (cfg contentMapConfig) getTaxonomyConfig(s string) (v viewName) {
s = strings.TrimPrefix(s, "/")
if s == "" {
return
}
for _, n := range cfg.taxonomyConfig {
if strings.HasPrefix(s, n.plural) {
return n
}
}
return
}
type contentNode struct {
p *pageState
// Set for taxonomy nodes.
viewInfo *contentBundleViewInfo
// Set if source is a file.
// We will soon get other sources.
fi hugofs.FileMetaInfo
// The source path. Unix slashes. No leading slash.
path string
}
func (b *contentNode) rootSection() string {
if b.path == "" {
return ""
}
firstSlash := strings.Index(b.path, "/")
if firstSlash == -1 {
return b.path
}
return b.path[:firstSlash]
}
type contentTree struct {
Name string
*radix.Tree
}
type contentTrees []*contentTree
func (t contentTrees) DeletePrefix(prefix string) int {
var count int
for _, tree := range t {
tree.Walk(func(s string, v interface{}) bool {
return false
})
count += tree.DeletePrefix(prefix)
}
return count
}
type contentTreeNodeCallback func(s string, n *contentNode) bool
var (
contentTreeNoListFilter = func(s string, n *contentNode) bool {
if n.p == nil {
return true
}
return n.p.m.noList()
}
contentTreeNoRenderFilter = func(s string, n *contentNode) bool {
if n.p == nil {
return true
}
return n.p.m.noRender()
}
)
func (c *contentTree) WalkPrefixListable(prefix string, fn contentTreeNodeCallback) {
c.WalkPrefixFilter(prefix, contentTreeNoListFilter, fn)
}
func (c *contentTree) WalkPrefixFilter(prefix string, filter, walkFn contentTreeNodeCallback) {
c.WalkPrefix(prefix, func(s string, v interface{}) bool {
n := v.(*contentNode)
if filter(s, n) {
return false
}
return walkFn(s, n)
})
}
func (c *contentTree) WalkListable(fn contentTreeNodeCallback) {
c.WalkFilter(contentTreeNoListFilter, fn)
}
func (c *contentTree) WalkFilter(filter, walkFn contentTreeNodeCallback) {
c.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
if filter(s, n) {
return false
}
return walkFn(s, n)
})
}
func (c contentTrees) WalkListable(fn contentTreeNodeCallback) {
for _, tree := range c {
tree.WalkListable(fn)
}
}
func (c contentTrees) WalkRenderable(fn contentTreeNodeCallback) {
for _, tree := range c {
tree.WalkFilter(contentTreeNoRenderFilter, fn)
}
}
func (c contentTrees) Walk(fn contentTreeNodeCallback) {
for _, tree := range c {
tree.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
return fn(s, n)
})
}
}
func (c contentTrees) WalkPrefix(prefix string, fn contentTreeNodeCallback) {
for _, tree := range c {
tree.WalkPrefix(prefix, func(s string, v interface{}) bool {
n := v.(*contentNode)
return fn(s, n)
})
}
}
func (c *contentTree) getMatch(matches func(b *contentNode) bool) string {
var match string
c.Walk(func(s string, v interface{}) bool {
n, ok := v.(*contentNode)
if !ok {
return false
}
if matches(n) {
match = s
return true
}
return false
})
return match
}
func (c *contentTree) hasPrefix(s string) bool {
var t bool
c.Tree.WalkPrefix(s, func(s string, v interface{}) bool {
t = true
return true
})
return t
}
func (c *contentTree) printKeys() {
c.Walk(func(s string, v interface{}) bool {
fmt.Println(s)
return false
})
}
func (c *contentTree) printKeysPrefix(prefix string) {
c.WalkPrefix(prefix, func(s string, v interface{}) bool {
fmt.Println(s)
return false
})
}
// contentTreeRef points to a node in the given tree.
type contentTreeRef struct {
m *pageMap
t *contentTree
n *contentNode
key string
}
func (c *contentTreeRef) getCurrentSection() (string, *contentNode) {
if c.isSection() {
return c.key, c.n
}
return c.getSection()
}
func (c *contentTreeRef) isSection() bool {
return c.t == c.m.sections
}
func (c *contentTreeRef) getSection() (string, *contentNode) {
return c.m.getSection(c.key)
}
func (c *contentTreeRef) collectPages() page.Pages {
var pas page.Pages
c.m.collectPages(c.key+cmBranchSeparator, func(c *contentNode) {
pas = append(pas, c.p)
})
page.SortByDefault(pas)
return pas
}
func (c *contentTreeRef) collectPagesAndSections() page.Pages {
var pas page.Pages
c.m.collectPagesAndSections(c.key, func(c *contentNode) {
pas = append(pas, c.p)
})
page.SortByDefault(pas)
return pas
}
func (c *contentTreeRef) collectSections() page.Pages {
var pas page.Pages
c.m.collectSections(c.key, func(c *contentNode) {
pas = append(pas, c.p)
})
page.SortByDefault(pas)
return pas
}
type contentTreeReverseIndex struct {
t []*contentTree
m map[interface{}]*contentNode
init sync.Once
initFn func(*contentTree, map[interface{}]*contentNode)
}
func (c *contentTreeReverseIndex) Get(key interface{}) *contentNode {
c.init.Do(func() {
c.m = make(map[interface{}]*contentNode)
for _, tree := range c.t {
c.initFn(tree, c.m)
}
})
return c.m[key]
}

998
hugolib/content_map_page.go Normal file
View file

@ -0,0 +1,998 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"context"
"fmt"
"path"
"path/filepath"
"strings"
"sync"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/common/types"
"github.com/gohugoio/hugo/resources"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/parser/pageparser"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/resource"
"github.com/spf13/cast"
"github.com/gohugoio/hugo/common/para"
"github.com/pkg/errors"
)
func newPageMaps(h *HugoSites) *pageMaps {
mps := make([]*pageMap, len(h.Sites))
for i, s := range h.Sites {
mps[i] = s.pageMap
}
return &pageMaps{
workers: para.New(h.numWorkers),
pmaps: mps,
}
}
type pageMap struct {
s *Site
*contentMap
}
func (m *pageMap) Len() int {
l := 0
for _, t := range m.contentMap.pageTrees {
l += t.Len()
}
return l
}
func (m *pageMap) createMissingTaxonomyNodes() error {
if m.cfg.taxonomyDisabled {
return nil
}
m.taxonomyEntries.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
vi := n.viewInfo
k := cleanTreeKey(vi.name.plural + "/" + vi.termKey)
if _, found := m.taxonomies.Get(k); !found {
vic := &contentBundleViewInfo{
name: vi.name,
termKey: vi.termKey,
termOrigin: vi.termOrigin,
}
m.taxonomies.Insert(k, &contentNode{viewInfo: vic})
}
return false
})
return nil
}
func (m *pageMap) newPageFromContentNode(n *contentNode, parentBucket *pagesMapBucket, owner *pageState) (*pageState, error) {
if n.fi == nil {
panic("FileInfo must (currently) be set")
}
f, err := newFileInfo(m.s.SourceSpec, n.fi)
if err != nil {
return nil, err
}
meta := n.fi.Meta()
content := func() (hugio.ReadSeekCloser, error) {
return meta.Open()
}
bundled := owner != nil
s := m.s
sections := s.sectionsFromFile(f)
kind := s.kindFromFileInfoOrSections(f, sections)
if kind == page.KindTaxonomy {
s.PathSpec.MakePathsSanitized(sections)
}
metaProvider := &pageMeta{kind: kind, sections: sections, bundled: bundled, s: s, f: f}
ps, err := newPageBase(metaProvider)
if err != nil {
return nil, err
}
if n.fi.Meta().GetBool(walkIsRootFileMetaKey) {
// Make sure that the bundle/section we start walking from is always
// rendered.
// This is only relevant in server fast render mode.
ps.forceRender = true
}
n.p = ps
if ps.IsNode() {
ps.bucket = newPageBucket(ps)
}
gi, err := s.h.gitInfoForPage(ps)
if err != nil {
return nil, errors.Wrap(err, "failed to load Git data")
}
ps.gitInfo = gi
r, err := content()
if err != nil {
return nil, err
}
defer r.Close()
parseResult, err := pageparser.Parse(
r,
pageparser.Config{EnableEmoji: s.siteCfg.enableEmoji},
)
if err != nil {
return nil, err
}
ps.pageContent = pageContent{
source: rawPageContent{
parsed: parseResult,
posMainContent: -1,
posSummaryEnd: -1,
posBodyStart: -1,
},
}
ps.shortcodeState = newShortcodeHandler(ps, ps.s, nil)
if err := ps.mapContent(parentBucket, metaProvider); err != nil {
return nil, ps.wrapError(err)
}
if err := metaProvider.applyDefaultValues(n); err != nil {
return nil, err
}
ps.init.Add(func() (interface{}, error) {
pp, err := newPagePaths(s, ps, metaProvider)
if err != nil {
return nil, err
}
outputFormatsForPage := ps.m.outputFormats()
if !ps.m.noRender() {
// Prepare output formats for all sites.
ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats))
created := make(map[string]*pageOutput)
for i, f := range ps.s.h.renderFormats {
if po, found := created[f.Name]; found {
ps.pageOutputs[i] = po
continue
}
_, render := outputFormatsForPage.GetByName(f.Name)
po := newPageOutput(ps, pp, f, render)
// Create a content provider for the first,
// we may be able to reuse it.
if i == 0 {
contentProvider, err := newPageContentOutput(ps, po)
if err != nil {
return nil, err
}
po.initContentProvider(contentProvider)
}
ps.pageOutputs[i] = po
created[f.Name] = po
}
} else if ps.m.buildConfig.PublishResources {
// We need one output format for potential resources to publish.
po := newPageOutput(ps, pp, outputFormatsForPage[0], false)
contentProvider, err := newPageContentOutput(ps, po)
if err != nil {
return nil, err
}
po.initContentProvider(contentProvider)
ps.pageOutputs = []*pageOutput{po}
}
if err := ps.initCommonProviders(pp); err != nil {
return nil, err
}
return nil, nil
})
ps.parent = owner
return ps, nil
}
func (m *pageMap) newResource(fim hugofs.FileMetaInfo, owner *pageState) (resource.Resource, error) {
if owner == nil {
panic("owner is nil")
}
// TODO(bep) consolidate with multihost logic + clean up
outputFormats := owner.m.outputFormats()
seen := make(map[string]bool)
var targetBasePaths []string
// Make sure bundled resources are published to all of the ouptput formats'
// sub paths.
for _, f := range outputFormats {
p := f.Path
if seen[p] {
continue
}
seen[p] = true
targetBasePaths = append(targetBasePaths, p)
}
meta := fim.Meta()
r := func() (hugio.ReadSeekCloser, error) {
return meta.Open()
}
target := strings.TrimPrefix(meta.Path(), owner.File().Dir())
return owner.s.ResourceSpec.New(
resources.ResourceSourceDescriptor{
TargetPaths: owner.getTargetPaths,
OpenReadSeekCloser: r,
FileInfo: fim,
RelTargetFilename: target,
TargetBasePaths: targetBasePaths,
})
}
func (m *pageMap) createSiteTaxonomies() error {
m.s.taxonomies = make(TaxonomyList)
m.taxonomies.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
t := n.viewInfo
viewName := t.name
if t.termKey == "" {
m.s.taxonomies[viewName.plural] = make(Taxonomy)
} else {
taxonomy := m.s.taxonomies[viewName.plural]
m.taxonomyEntries.WalkPrefix(s+"/", func(ss string, v interface{}) bool {
b2 := v.(*contentNode)
info := b2.viewInfo
taxonomy.add(info.termKey, page.NewWeightedPage(info.weight, info.ref.p, n.p))
return false
})
}
return false
})
for _, taxonomy := range m.s.taxonomies {
for _, v := range taxonomy {
v.Sort()
}
}
return nil
}
func (m *pageMap) createListAllPages() page.Pages {
pages := make(page.Pages, 0)
m.contentMap.pageTrees.Walk(func(s string, n *contentNode) bool {
if n.p == nil {
panic(fmt.Sprintf("BUG: page not set for %q", s))
}
if contentTreeNoListFilter(s, n) {
return false
}
pages = append(pages, n.p)
return false
})
page.SortByDefault(pages)
return pages
}
func (m *pageMap) assemblePages() error {
m.taxonomyEntries.DeletePrefix("/")
if err := m.assembleSections(); err != nil {
return err
}
var err error
if err != nil {
return err
}
m.pages.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
var shouldBuild bool
defer func() {
// Make sure we always rebuild the view cache.
if shouldBuild && err == nil && n.p != nil {
m.attachPageToViews(s, n)
}
}()
if n.p != nil {
// A rebuild
shouldBuild = true
return false
}
var parent *contentNode
var parentBucket *pagesMapBucket
_, parent = m.getSection(s)
if parent == nil {
panic(fmt.Sprintf("BUG: parent not set for %q", s))
}
parentBucket = parent.p.bucket
n.p, err = m.newPageFromContentNode(n, parentBucket, nil)
if err != nil {
return true
}
shouldBuild = !(n.p.Kind() == page.KindPage && m.cfg.pageDisabled) && m.s.shouldBuild(n.p)
if !shouldBuild {
m.deletePage(s)
return false
}
n.p.treeRef = &contentTreeRef{
m: m,
t: m.pages,
n: n,
key: s,
}
if err = m.assembleResources(s, n.p, parentBucket); err != nil {
return true
}
return false
})
m.deleteOrphanSections()
return err
}
func (m *pageMap) assembleResources(s string, p *pageState, parentBucket *pagesMapBucket) error {
var err error
m.resources.WalkPrefix(s, func(s string, v interface{}) bool {
n := v.(*contentNode)
meta := n.fi.Meta()
classifier := meta.Classifier()
var r resource.Resource
switch classifier {
case files.ContentClassContent:
var rp *pageState
rp, err = m.newPageFromContentNode(n, parentBucket, p)
if err != nil {
return true
}
rp.m.resourcePath = filepath.ToSlash(strings.TrimPrefix(rp.Path(), p.File().Dir()))
r = rp
case files.ContentClassFile:
r, err = m.newResource(n.fi, p)
if err != nil {
return true
}
default:
panic(fmt.Sprintf("invalid classifier: %q", classifier))
}
p.resources = append(p.resources, r)
return false
})
return err
}
func (m *pageMap) assembleSections() error {
var sectionsToDelete []string
var err error
m.sections.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
var shouldBuild bool
defer func() {
// Make sure we always rebuild the view cache.
if shouldBuild && err == nil && n.p != nil {
m.attachPageToViews(s, n)
if n.p.IsHome() {
m.s.home = n.p
}
}
}()
sections := m.splitKey(s)
if n.p != nil {
if n.p.IsHome() {
m.s.home = n.p
}
shouldBuild = true
return false
}
var parent *contentNode
var parentBucket *pagesMapBucket
if s != "/" {
_, parent = m.getSection(s)
if parent == nil || parent.p == nil {
panic(fmt.Sprintf("BUG: parent not set for %q", s))
}
}
if parent != nil {
parentBucket = parent.p.bucket
}
kind := page.KindSection
if s == "/" {
kind = page.KindHome
}
if n.fi != nil {
n.p, err = m.newPageFromContentNode(n, parentBucket, nil)
if err != nil {
return true
}
} else {
n.p = m.s.newPage(n, parentBucket, kind, "", sections...)
}
shouldBuild = m.s.shouldBuild(n.p)
if !shouldBuild {
sectionsToDelete = append(sectionsToDelete, s)
return false
}
n.p.treeRef = &contentTreeRef{
m: m,
t: m.sections,
n: n,
key: s,
}
if err = m.assembleResources(s+cmLeafSeparator, n.p, parentBucket); err != nil {
return true
}
return false
})
for _, s := range sectionsToDelete {
m.deleteSectionByPath(s)
}
return err
}
func (m *pageMap) assembleTaxonomies() error {
var taxonomiesToDelete []string
var err error
m.taxonomies.Walk(func(s string, v interface{}) bool {
n := v.(*contentNode)
if n.p != nil {
return false
}
kind := n.viewInfo.kind()
sections := n.viewInfo.sections()
_, parent := m.getTaxonomyParent(s)
if parent == nil || parent.p == nil {
panic(fmt.Sprintf("BUG: parent not set for %q", s))
}
parentBucket := parent.p.bucket
if n.fi != nil {
n.p, err = m.newPageFromContentNode(n, parent.p.bucket, nil)
if err != nil {
return true
}
} else {
title := ""
if kind == page.KindTaxonomy {
title = n.viewInfo.term()
}
n.p = m.s.newPage(n, parent.p.bucket, kind, title, sections...)
}
if !m.s.shouldBuild(n.p) {
taxonomiesToDelete = append(taxonomiesToDelete, s)
return false
}
n.p.treeRef = &contentTreeRef{
m: m,
t: m.taxonomies,
n: n,
key: s,
}
if err = m.assembleResources(s+cmLeafSeparator, n.p, parentBucket); err != nil {
return true
}
return false
})
for _, s := range taxonomiesToDelete {
m.deleteTaxonomy(s)
}
return err
}
func (m *pageMap) attachPageToViews(s string, b *contentNode) {
if m.cfg.taxonomyDisabled {
return
}
for _, viewName := range m.cfg.taxonomyConfig {
vals := types.ToStringSlicePreserveString(getParam(b.p, viewName.plural, false))
if vals == nil {
continue
}
w := getParamToLower(b.p, viewName.plural+"_weight")
weight, err := cast.ToIntE(w)
if err != nil {
m.s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %q", w, b.p.Path())
// weight will equal zero, so let the flow continue
}
for _, v := range vals {
termKey := m.s.getTaxonomyKey(v)
bv := &contentNode{
viewInfo: &contentBundleViewInfo{
name: viewName,
termKey: termKey,
termOrigin: v,
weight: weight,
ref: b,
},
}
if s == "/" {
// To avoid getting an empty key.
s = "home"
}
key := cleanTreeKey(path.Join(viewName.plural, termKey, s))
m.taxonomyEntries.Insert(key, bv)
}
}
}
func (m *pageMap) collectPages(prefix string, fn func(c *contentNode)) error {
m.pages.WalkPrefixListable(prefix, func(s string, n *contentNode) bool {
fn(n)
return false
})
return nil
}
func (m *pageMap) collectPagesAndSections(prefix string, fn func(c *contentNode)) error {
if err := m.collectSections(prefix, fn); err != nil {
return err
}
if err := m.collectPages(prefix+cmBranchSeparator, fn); err != nil {
return err
}
return nil
}
func (m *pageMap) collectSections(prefix string, fn func(c *contentNode)) error {
var level int
isHome := prefix == "/"
if !isHome {
level = strings.Count(prefix, "/")
}
return m.collectSectionsFn(prefix, func(s string, c *contentNode) bool {
if s == prefix {
return false
}
if (strings.Count(s, "/") - level) != 1 {
return false
}
fn(c)
return false
})
}
func (m *pageMap) collectSectionsFn(prefix string, fn func(s string, c *contentNode) bool) error {
if !strings.HasSuffix(prefix, "/") {
prefix += "/"
}
m.sections.WalkPrefixListable(prefix, func(s string, n *contentNode) bool {
return fn(s, n)
})
return nil
}
func (m *pageMap) collectSectionsRecursiveIncludingSelf(prefix string, fn func(c *contentNode)) error {
return m.collectSectionsFn(prefix, func(s string, c *contentNode) bool {
fn(c)
return false
})
}
func (m *pageMap) collectTaxonomies(prefix string, fn func(c *contentNode)) error {
m.taxonomies.WalkPrefixListable(prefix, func(s string, n *contentNode) bool {
fn(n)
return false
})
return nil
}
// withEveryBundlePage applies fn to every Page, including those bundled inside
// leaf bundles.
func (m *pageMap) withEveryBundlePage(fn func(p *pageState) bool) {
m.bundleTrees.Walk(func(s string, n *contentNode) bool {
if n.p != nil {
return fn(n.p)
}
return false
})
}
type pageMaps struct {
workers *para.Workers
pmaps []*pageMap
}
// deleteSection deletes the entire section from s.
func (m *pageMaps) deleteSection(s string) {
m.withMaps(func(pm *pageMap) error {
pm.deleteSectionByPath(s)
return nil
})
}
func (m *pageMaps) AssemblePages() error {
return m.withMaps(func(pm *pageMap) error {
if err := pm.CreateMissingNodes(); err != nil {
return err
}
if err := pm.assemblePages(); err != nil {
return err
}
if err := pm.createMissingTaxonomyNodes(); err != nil {
return err
}
// Handle any new sections created in the step above.
if err := pm.assembleSections(); err != nil {
return err
}
if err := pm.assembleTaxonomies(); err != nil {
return err
}
if err := pm.createSiteTaxonomies(); err != nil {
return err
}
a := (&sectionWalker{m: pm.contentMap}).applyAggregates()
_, mainSectionsSet := pm.s.s.Info.Params()["mainsections"]
if !mainSectionsSet && a.mainSection != "" {
mainSections := []string{a.mainSection}
pm.s.s.Info.Params()["mainSections"] = mainSections
pm.s.s.Info.Params()["mainsections"] = mainSections
}
pm.s.lastmod = a.datesAll.Lastmod()
if resource.IsZeroDates(pm.s.home) {
pm.s.home.m.Dates = a.datesAll
}
return nil
})
}
func (m *pageMaps) walkBundles(fn func(n *contentNode) bool) {
_ = m.withMaps(func(pm *pageMap) error {
pm.bundleTrees.Walk(func(s string, n *contentNode) bool {
return fn(n)
})
return nil
})
}
func (m *pageMaps) walkBranchesPrefix(prefix string, fn func(s string, n *contentNode) bool) {
_ = m.withMaps(func(pm *pageMap) error {
pm.branchTrees.WalkPrefix(prefix, func(s string, n *contentNode) bool {
return fn(s, n)
})
return nil
})
}
func (m *pageMaps) withMaps(fn func(pm *pageMap) error) error {
g, _ := m.workers.Start(context.Background())
for _, pm := range m.pmaps {
pm := pm
g.Run(func() error {
return fn(pm)
})
}
return g.Wait()
}
type pagesMapBucket struct {
// Cascading front matter.
cascade maps.Params
owner *pageState // The branch node
pagesInit sync.Once
pages page.Pages
pagesAndSectionsInit sync.Once
pagesAndSections page.Pages
sectionsInit sync.Once
sections page.Pages
}
func (b *pagesMapBucket) getPages() page.Pages {
b.pagesInit.Do(func() {
b.pages = b.owner.treeRef.collectPages()
page.SortByDefault(b.pages)
})
return b.pages
}
func (b *pagesMapBucket) getPagesAndSections() page.Pages {
b.pagesAndSectionsInit.Do(func() {
b.pagesAndSections = b.owner.treeRef.collectPagesAndSections()
})
return b.pagesAndSections
}
func (b *pagesMapBucket) getSections() page.Pages {
b.sectionsInit.Do(func() {
b.sections = b.owner.treeRef.collectSections()
})
return b.sections
}
func (b *pagesMapBucket) getTaxonomies() page.Pages {
b.sectionsInit.Do(func() {
var pas page.Pages
ref := b.owner.treeRef
ref.m.collectTaxonomies(ref.key+"/", func(c *contentNode) {
pas = append(pas, c.p)
})
page.SortByDefault(pas)
b.sections = pas
})
return b.sections
}
type sectionAggregate struct {
datesAll resource.Dates
datesSection resource.Dates
pageCount int
mainSection string
mainSectionPageCount int
}
type sectionAggregateHandler struct {
sectionAggregate
sectionPageCount int
// Section
b *contentNode
s string
}
func (h *sectionAggregateHandler) isRootSection() bool {
return h.s != "/" && strings.Count(h.s, "/") == 1
}
func (h *sectionAggregateHandler) handleNested(v sectionWalkHandler) error {
nested := v.(*sectionAggregateHandler)
h.sectionPageCount += nested.pageCount
h.pageCount += h.sectionPageCount
h.datesAll.UpdateDateAndLastmodIfAfter(nested.datesAll)
h.datesSection.UpdateDateAndLastmodIfAfter(nested.datesAll)
return nil
}
func (h *sectionAggregateHandler) handlePage(s string, n *contentNode) error {
h.sectionPageCount++
var d resource.Dated
if n.p != nil {
d = n.p
} else if n.viewInfo != nil && n.viewInfo.ref != nil {
d = n.viewInfo.ref.p
} else {
return nil
}
h.datesAll.UpdateDateAndLastmodIfAfter(d)
h.datesSection.UpdateDateAndLastmodIfAfter(d)
return nil
}
func (h *sectionAggregateHandler) handleSectionPost() error {
if h.sectionPageCount > h.mainSectionPageCount && h.isRootSection() {
h.mainSectionPageCount = h.sectionPageCount
h.mainSection = strings.TrimPrefix(h.s, "/")
}
if resource.IsZeroDates(h.b.p) {
h.b.p.m.Dates = h.datesSection
}
h.datesSection = resource.Dates{}
return nil
}
func (h *sectionAggregateHandler) handleSectionPre(s string, b *contentNode) error {
h.s = s
h.b = b
h.sectionPageCount = 0
h.datesAll.UpdateDateAndLastmodIfAfter(b.p)
return nil
}
type sectionWalkHandler interface {
handleNested(v sectionWalkHandler) error
handlePage(s string, b *contentNode) error
handleSectionPost() error
handleSectionPre(s string, b *contentNode) error
}
type sectionWalker struct {
err error
m *contentMap
}
func (w *sectionWalker) applyAggregates() *sectionAggregateHandler {
return w.walkLevel("/", func() sectionWalkHandler {
return &sectionAggregateHandler{}
}).(*sectionAggregateHandler)
}
func (w *sectionWalker) walkLevel(prefix string, createVisitor func() sectionWalkHandler) sectionWalkHandler {
level := strings.Count(prefix, "/")
visitor := createVisitor()
w.m.taxonomies.WalkPrefix(prefix, func(s string, v interface{}) bool {
currentLevel := strings.Count(s, "/")
if currentLevel > level {
return false
}
n := v.(*contentNode)
if w.err = visitor.handleSectionPre(s, n); w.err != nil {
return true
}
if currentLevel == 1 {
nested := w.walkLevel(s+"/", createVisitor)
if w.err = visitor.handleNested(nested); w.err != nil {
return true
}
} else {
w.m.taxonomyEntries.WalkPrefix(s, func(ss string, v interface{}) bool {
n := v.(*contentNode)
w.err = visitor.handlePage(ss, n)
return w.err != nil
})
}
w.err = visitor.handleSectionPost()
return w.err != nil
})
w.m.sections.WalkPrefix(prefix, func(s string, v interface{}) bool {
currentLevel := strings.Count(s, "/")
if currentLevel > level {
return false
}
n := v.(*contentNode)
if w.err = visitor.handleSectionPre(s, n); w.err != nil {
return true
}
w.m.pages.WalkPrefix(s+cmBranchSeparator, func(s string, v interface{}) bool {
w.err = visitor.handlePage(s, v.(*contentNode))
return w.err != nil
})
if w.err != nil {
return true
}
if s != "/" {
nested := w.walkLevel(s+"/", createVisitor)
if w.err = visitor.handleNested(nested); w.err != nil {
return true
}
}
w.err = visitor.handleSectionPost()
return w.err != nil
})
return visitor
}
type viewName struct {
singular string // e.g. "category"
plural string // e.g. "categories"
}
func (v viewName) IsZero() bool {
return v.singular == ""
}

455
hugolib/content_map_test.go Normal file
View file

@ -0,0 +1,455 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"path/filepath"
"strings"
"testing"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/htesting/hqt"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/hugofs"
"github.com/spf13/afero"
qt "github.com/frankban/quicktest"
)
func BenchmarkContentMap(b *testing.B) {
writeFile := func(c *qt.C, fs afero.Fs, filename, content string) hugofs.FileMetaInfo {
c.Helper()
filename = filepath.FromSlash(filename)
c.Assert(fs.MkdirAll(filepath.Dir(filename), 0777), qt.IsNil)
c.Assert(afero.WriteFile(fs, filename, []byte(content), 0777), qt.IsNil)
fi, err := fs.Stat(filename)
c.Assert(err, qt.IsNil)
mfi := fi.(hugofs.FileMetaInfo)
return mfi
}
createFs := func(fs afero.Fs, lang string) afero.Fs {
return hugofs.NewBaseFileDecorator(fs,
func(fi hugofs.FileMetaInfo) {
meta := fi.Meta()
// We have a more elaborate filesystem setup in the
// real flow, so simulate this here.
meta["lang"] = lang
meta["path"] = meta.Filename()
meta["classifier"] = files.ClassifyContentFile(fi.Name())
})
}
b.Run("CreateMissingNodes", func(b *testing.B) {
c := qt.New(b)
b.StopTimer()
mps := make([]*contentMap, b.N)
for i := 0; i < b.N; i++ {
m := newContentMap(contentMapConfig{lang: "en"})
mps[i] = m
memfs := afero.NewMemMapFs()
fs := createFs(memfs, "en")
for i := 1; i <= 20; i++ {
c.Assert(m.AddFilesBundle(writeFile(c, fs, fmt.Sprintf("sect%d/a/index.md", i), "page")), qt.IsNil)
c.Assert(m.AddFilesBundle(writeFile(c, fs, fmt.Sprintf("sect2%d/%sindex.md", i, strings.Repeat("b/", i)), "page")), qt.IsNil)
}
}
b.StartTimer()
for i := 0; i < b.N; i++ {
m := mps[i]
c.Assert(m.CreateMissingNodes(), qt.IsNil)
b.StopTimer()
m.pages.DeletePrefix("/")
m.sections.DeletePrefix("/")
b.StartTimer()
}
})
}
func TestContentMap(t *testing.T) {
c := qt.New(t)
writeFile := func(c *qt.C, fs afero.Fs, filename, content string) hugofs.FileMetaInfo {
c.Helper()
filename = filepath.FromSlash(filename)
c.Assert(fs.MkdirAll(filepath.Dir(filename), 0777), qt.IsNil)
c.Assert(afero.WriteFile(fs, filename, []byte(content), 0777), qt.IsNil)
fi, err := fs.Stat(filename)
c.Assert(err, qt.IsNil)
mfi := fi.(hugofs.FileMetaInfo)
return mfi
}
createFs := func(fs afero.Fs, lang string) afero.Fs {
return hugofs.NewBaseFileDecorator(fs,
func(fi hugofs.FileMetaInfo) {
meta := fi.Meta()
// We have a more elaborate filesystem setup in the
// real flow, so simulate this here.
meta["lang"] = lang
meta["path"] = meta.Filename()
meta["classifier"] = files.ClassifyContentFile(fi.Name())
meta["translationBaseName"] = helpers.Filename(fi.Name())
})
}
c.Run("AddFiles", func(c *qt.C) {
memfs := afero.NewMemMapFs()
fsl := func(lang string) afero.Fs {
return createFs(memfs, lang)
}
fs := fsl("en")
header := writeFile(c, fs, "blog/a/index.md", "page")
c.Assert(header.Meta().Lang(), qt.Equals, "en")
resources := []hugofs.FileMetaInfo{
writeFile(c, fs, "blog/a/b/data.json", "data"),
writeFile(c, fs, "blog/a/logo.png", "image"),
}
m := newContentMap(contentMapConfig{lang: "en"})
c.Assert(m.AddFilesBundle(header, resources...), qt.IsNil)
c.Assert(m.AddFilesBundle(writeFile(c, fs, "blog/b/c/index.md", "page")), qt.IsNil)
c.Assert(m.AddFilesBundle(
writeFile(c, fs, "blog/_index.md", "section page"),
writeFile(c, fs, "blog/sectiondata.json", "section resource"),
), qt.IsNil)
got := m.testDump()
expect := `
Tree 0:
/blog__hb_/a__hl_
/blog__hb_/b/c__hl_
Tree 1:
/blog
Tree 2:
/blog__hb_/a__hl_b/data.json
/blog__hb_/a__hl_logo.png
/blog__hl_sectiondata.json
en/pages/blog__hb_/a__hl_|f:blog/a/index.md
- R: blog/a/b/data.json
- R: blog/a/logo.png
en/pages/blog__hb_/b/c__hl_|f:blog/b/c/index.md
en/sections/blog|f:blog/_index.md
- P: blog/a/index.md
- P: blog/b/c/index.md
- R: blog/sectiondata.json
`
c.Assert(got, hqt.IsSameString, expect, qt.Commentf(got))
// Add a data file to the section bundle
c.Assert(m.AddFiles(
writeFile(c, fs, "blog/sectiondata2.json", "section resource"),
), qt.IsNil)
// And then one to the leaf bundles
c.Assert(m.AddFiles(
writeFile(c, fs, "blog/a/b/data2.json", "data2"),
), qt.IsNil)
c.Assert(m.AddFiles(
writeFile(c, fs, "blog/b/c/d/data3.json", "data3"),
), qt.IsNil)
got = m.testDump()
expect = `
Tree 0:
/blog__hb_/a__hl_
/blog__hb_/b/c__hl_
Tree 1:
/blog
Tree 2:
/blog__hb_/a__hl_b/data.json
/blog__hb_/a__hl_b/data2.json
/blog__hb_/a__hl_logo.png
/blog__hb_/b/c__hl_d/data3.json
/blog__hl_sectiondata.json
/blog__hl_sectiondata2.json
en/pages/blog__hb_/a__hl_|f:blog/a/index.md
- R: blog/a/b/data.json
- R: blog/a/b/data2.json
- R: blog/a/logo.png
en/pages/blog__hb_/b/c__hl_|f:blog/b/c/index.md
- R: blog/b/c/d/data3.json
en/sections/blog|f:blog/_index.md
- P: blog/a/index.md
- P: blog/b/c/index.md
- R: blog/sectiondata.json
- R: blog/sectiondata2.json
`
c.Assert(got, hqt.IsSameString, expect, qt.Commentf(got))
// Add a regular page (i.e. not a bundle)
c.Assert(m.AddFilesBundle(writeFile(c, fs, "blog/b.md", "page")), qt.IsNil)
c.Assert(m.testDump(), hqt.IsSameString, `
Tree 0:
/blog__hb_/a__hl_
/blog__hb_/b/c__hl_
/blog__hb_/b__hl_
Tree 1:
/blog
Tree 2:
/blog__hb_/a__hl_b/data.json
/blog__hb_/a__hl_b/data2.json
/blog__hb_/a__hl_logo.png
/blog__hb_/b/c__hl_d/data3.json
/blog__hl_sectiondata.json
/blog__hl_sectiondata2.json
en/pages/blog__hb_/a__hl_|f:blog/a/index.md
- R: blog/a/b/data.json
- R: blog/a/b/data2.json
- R: blog/a/logo.png
en/pages/blog__hb_/b/c__hl_|f:blog/b/c/index.md
- R: blog/b/c/d/data3.json
en/pages/blog__hb_/b__hl_|f:blog/b.md
en/sections/blog|f:blog/_index.md
- P: blog/a/index.md
- P: blog/b/c/index.md
- P: blog/b.md
- R: blog/sectiondata.json
- R: blog/sectiondata2.json
`, qt.Commentf(m.testDump()))
})
c.Run("CreateMissingNodes", func(c *qt.C) {
memfs := afero.NewMemMapFs()
fsl := func(lang string) afero.Fs {
return createFs(memfs, lang)
}
fs := fsl("en")
m := newContentMap(contentMapConfig{lang: "en"})
c.Assert(m.AddFilesBundle(writeFile(c, fs, "blog/page.md", "page")), qt.IsNil)
c.Assert(m.AddFilesBundle(writeFile(c, fs, "blog/a/index.md", "page")), qt.IsNil)
c.Assert(m.AddFilesBundle(writeFile(c, fs, "bundle/index.md", "page")), qt.IsNil)
c.Assert(m.CreateMissingNodes(), qt.IsNil)
got := m.testDump()
c.Assert(got, hqt.IsSameString, `
Tree 0:
/__hb_/bundle__hl_
/blog__hb_/a__hl_
/blog__hb_/page__hl_
Tree 1:
/
/blog
Tree 2:
en/pages/__hb_/bundle__hl_|f:bundle/index.md
en/pages/blog__hb_/a__hl_|f:blog/a/index.md
en/pages/blog__hb_/page__hl_|f:blog/page.md
en/sections/
- P: bundle/index.md
en/sections/blog
- P: blog/a/index.md
- P: blog/page.md
`, qt.Commentf(got))
})
c.Run("cleanKey", func(c *qt.C) {
for _, test := range []struct {
in string
expected string
}{
{"/a/b/", "/a/b"},
{filepath.FromSlash("/a/b/"), "/a/b"},
{"/a//b/", "/a/b"},
} {
c.Assert(cleanTreeKey(test.in), qt.Equals, test.expected)
}
})
}
func TestContentMapSite(t *testing.T) {
b := newTestSitesBuilder(t)
pageTempl := `
---
title: "Page %d"
date: "2019-06-0%d"
lastMod: "2019-06-0%d"
categories: ["funny"]
---
Page content.
`
createPage := func(i int) string {
return fmt.Sprintf(pageTempl, i, i, i+1)
}
draftTemplate := `---
title: "Draft"
draft: true
---
`
b.WithContent("_index.md", `
---
title: "Hugo Home"
cascade:
description: "Common Description"
---
Home Content.
`)
b.WithContent("blog/page1.md", createPage(1))
b.WithContent("blog/page2.md", createPage(2))
b.WithContent("blog/page3.md", createPage(3))
b.WithContent("blog/bundle/index.md", createPage(12))
b.WithContent("blog/bundle/data.json", "data")
b.WithContent("blog/bundle/page.md", createPage(99))
b.WithContent("blog/subsection/_index.md", createPage(3))
b.WithContent("blog/subsection/subdata.json", "data")
b.WithContent("blog/subsection/page4.md", createPage(8))
b.WithContent("blog/subsection/page5.md", createPage(10))
b.WithContent("blog/subsection/draft/index.md", draftTemplate)
b.WithContent("blog/subsection/draft/data.json", "data")
b.WithContent("blog/draftsection/_index.md", draftTemplate)
b.WithContent("blog/draftsection/page/index.md", createPage(12))
b.WithContent("blog/draftsection/page/folder/data.json", "data")
b.WithContent("blog/draftsection/sub/_index.md", createPage(12))
b.WithContent("blog/draftsection/sub/page.md", createPage(13))
b.WithContent("docs/page6.md", createPage(11))
b.WithContent("tags/_index.md", createPage(32))
b.WithTemplatesAdded("layouts/index.html", `
Num Regular: {{ len .Site.RegularPages }}
Main Sections: {{ .Site.Params.mainSections }}
Pag Num Pages: {{ len .Paginator.Pages }}
{{ $home := .Site.Home }}
{{ $blog := .Site.GetPage "blog" }}
{{ $categories := .Site.GetPage "categories" }}
{{ $funny := .Site.GetPage "categories/funny" }}
{{ $blogSub := .Site.GetPage "blog/subsection" }}
{{ $page := .Site.GetPage "blog/page1" }}
{{ $page2 := .Site.GetPage "blog/page2" }}
{{ $page4 := .Site.GetPage "blog/subsection/page4" }}
{{ $bundle := .Site.GetPage "blog/bundle" }}
Home: {{ template "print-page" $home }}
Blog Section: {{ template "print-page" $blog }}
Blog Sub Section: {{ template "print-page" $blogSub }}
Page: {{ template "print-page" $page }}
Bundle: {{ template "print-page" $bundle }}
IsDescendant: true: {{ $page.IsDescendant $blog }} true: {{ $blogSub.IsDescendant $blog }} true: {{ $blog.IsDescendant $home }} false: {{ $home.IsDescendant $blog }}
IsAncestor: true: {{ $blog.IsAncestor $page }} true: {{ $home.IsAncestor $blog }} true: {{ $blog.IsAncestor $blogSub }} true: {{ $home.IsAncestor $page }} false: {{ $page.IsAncestor $blog }} false: {{ $blog.IsAncestor $home }} false: {{ $blogSub.IsAncestor $blog }}
FirstSection: {{ $blogSub.FirstSection.RelPermalink }} {{ $blog.FirstSection.RelPermalink }} {{ $home.FirstSection.RelPermalink }} {{ $page.FirstSection.RelPermalink }}
InSection: true: {{ $page.InSection $blog }} false: {{ $page.InSection $blogSub }}
Next: {{ $page2.Next.RelPermalink }}
NextInSection: {{ $page2.NextInSection.RelPermalink }}
Pages: {{ range $blog.Pages }}{{ .RelPermalink }}|{{ end }}
Sections: {{ range $home.Sections }}{{ .RelPermalink }}|{{ end }}
Categories: {{ range .Site.Taxonomies.categories }}{{ .Page.RelPermalink }}; {{ .Page.Title }}; {{ .Count }}|{{ end }}
Category Terms: {{ $categories.Kind}}: {{ range $categories.Data.Terms.Alphabetical }}{{ .Page.RelPermalink }}; {{ .Page.Title }}; {{ .Count }}|{{ end }}
Category Funny: {{ $funny.Kind}}; {{ $funny.Data.Term }}: {{ range $funny.Pages }}{{ .RelPermalink }};|{{ end }}
Pag Num Pages: {{ len .Paginator.Pages }}
Pag Blog Num Pages: {{ len $blog.Paginator.Pages }}
Blog Num RegularPages: {{ len $blog.RegularPages }}
Blog Num Pages: {{ len $blog.Pages }}
Draft1: {{ if (.Site.GetPage "blog/subsection/draft") }}FOUND{{ end }}|
Draft2: {{ if (.Site.GetPage "blog/draftsection") }}FOUND{{ end }}|
Draft3: {{ if (.Site.GetPage "blog/draftsection/page") }}FOUND{{ end }}|
Draft4: {{ if (.Site.GetPage "blog/draftsection/sub") }}FOUND{{ end }}|
Draft5: {{ if (.Site.GetPage "blog/draftsection/sub/page") }}FOUND{{ end }}|
{{ define "print-page" }}{{ .Title }}|{{ .RelPermalink }}|{{ .Date.Format "2006-01-02" }}|Current Section: {{ .CurrentSection.SectionsPath }}|Resources: {{ range .Resources }}{{ .ResourceType }}: {{ .RelPermalink }}|{{ end }}{{ end }}
`)
b.Build(BuildCfg{})
b.AssertFileContent("public/index.html",
`
Num Regular: 7
Main Sections: [blog]
Pag Num Pages: 7
Home: Hugo Home|/|2019-06-08|Current Section: |Resources:
Blog Section: Blogs|/blog/|2019-06-08|Current Section: blog|Resources:
Blog Sub Section: Page 3|/blog/subsection/|2019-06-03|Current Section: blog/subsection|Resources: json: /blog/subsection/subdata.json|
Page: Page 1|/blog/page1/|2019-06-01|Current Section: blog|Resources:
Bundle: Page 12|/blog/bundle/|0001-01-01|Current Section: blog|Resources: json: /blog/bundle/data.json|page: |
IsDescendant: true: true true: true true: true false: false
IsAncestor: true: true true: true true: true true: true false: false false: false false: false
FirstSection: /blog/ /blog/ / /blog/
InSection: true: true false: false
Next: /blog/page3/
NextInSection: /blog/page3/
Pages: /blog/page3/|/blog/subsection/|/blog/page2/|/blog/page1/|/blog/bundle/|
Sections: /blog/|/docs/|
Categories: /categories/funny/; funny; 9|
Category Terms: taxonomyTerm: /categories/funny/; funny; 9|
Category Funny: taxonomy; funny: /blog/subsection/page4/;|/blog/page3/;|/blog/subsection/;|/blog/page2/;|/blog/page1/;|/blog/subsection/page5/;|/docs/page6/;|/blog/bundle/;|;|
Pag Num Pages: 7
Pag Blog Num Pages: 4
Blog Num RegularPages: 4
Blog Num Pages: 5
Draft1: |
Draft2: |
Draft3: |
Draft4: |
Draft5: |
`)
}

View file

@ -13,188 +13,256 @@
package hugolib
import (
"strings"
"testing"
"fmt"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/helpers"
)
func TestDisableKindsNoneDisabled(t *testing.T) {
t.Parallel()
doTestDisableKinds(t)
}
func TestDisable(t *testing.T) {
c := qt.New(t)
func TestDisableKindsSomeDisabled(t *testing.T) {
t.Parallel()
doTestDisableKinds(t, page.KindSection, kind404)
}
func TestDisableKindsOneDisabled(t *testing.T) {
t.Parallel()
for _, kind := range allKinds {
if kind == page.KindPage {
// Turning off regular page generation have some side-effects
// not handled by the assertions below (no sections), so
// skip that for now.
continue
}
doTestDisableKinds(t, kind)
}
}
func TestDisableKindsAllDisabled(t *testing.T) {
t.Parallel()
doTestDisableKinds(t, allKinds...)
}
func doTestDisableKinds(t *testing.T, disabled ...string) {
siteConfigTemplate := `
newSitesBuilder := func(c *qt.C, disableKind string) *sitesBuilder {
config := fmt.Sprintf(`
baseURL = "http://example.com/blog"
enableRobotsTXT = true
disableKinds = %s
disableKinds = [%q]
`, disableKind)
paginate = 1
defaultContentLanguage = "en"
[Taxonomies]
tag = "tags"
category = "categories"
`
pageTemplate := `---
title: "%s"
tags:
%s
categories:
- Hugo
b := newTestSitesBuilder(c)
b.WithConfigFile("toml", config).WithContent("sect/page.md", `
---
title: Page
categories: ["mycat"]
tags: ["mytag"]
---
# Doc
`
disabledStr := "[]"
`, "sect/no-list.md", `
---
title: No List
_build:
list: false
---
`, "sect/no-render.md", `
---
title: No List
_build:
render: false
---
`, "sect/no-publishresources/index.md", `
---
title: No Publish Resources
_build:
publishResources: false
---
`, "sect/headlessbundle/index.md", `
---
title: Headless
headless: true
---
`)
b.WithSourceFile("content/sect/headlessbundle/data.json", "DATA")
b.WithSourceFile("content/sect/no-publishresources/data.json", "DATA")
return b
if len(disabled) > 0 {
disabledStr = strings.Replace(fmt.Sprintf("%#v", disabled), "[]string{", "[", -1)
disabledStr = strings.Replace(disabledStr, "}", "]", -1)
}
siteConfig := fmt.Sprintf(siteConfigTemplate, disabledStr)
b := newTestSitesBuilder(t).WithConfigFile("toml", siteConfig)
b.WithTemplates(
"index.html", "Home|{{ .Title }}|{{ .Content }}",
"_default/single.html", "Single|{{ .Title }}|{{ .Content }}",
"_default/list.html", "List|{{ .Title }}|{{ .Content }}",
"_default/terms.html", "Terms List|{{ .Title }}|{{ .Content }}",
"layouts/404.html", "Page Not Found",
)
b.WithContent(
"sect/p1.md", fmt.Sprintf(pageTemplate, "P1", "- tag1"),
"categories/_index.md", newTestPage("Category Terms", "2017-01-01", 10),
"tags/tag1/_index.md", newTestPage("Tag1 List", "2017-01-01", 10),
)
b.Build(BuildCfg{})
h := b.H
assertDisabledKinds(b, h.Sites[0], disabled...)
}
func assertDisabledKinds(b *sitesBuilder, s *Site, disabled ...string) {
assertDisabledKind(b,
func(isDisabled bool) bool {
if isDisabled {
return len(s.RegularPages()) == 0
}
return len(s.RegularPages()) > 0
}, disabled, page.KindPage, "public/sect/p1/index.html", "Single|P1")
assertDisabledKind(b,
func(isDisabled bool) bool {
p := s.getPage(page.KindHome)
if isDisabled {
return p == nil
}
return p != nil
}, disabled, page.KindHome, "public/index.html", "Home")
assertDisabledKind(b,
func(isDisabled bool) bool {
p := s.getPage(page.KindSection, "sect")
if isDisabled {
return p == nil
}
return p != nil
}, disabled, page.KindSection, "public/sect/index.html", "Sects")
assertDisabledKind(b,
func(isDisabled bool) bool {
p := s.getPage(page.KindTaxonomy, "tags", "tag1")
if isDisabled {
return p == nil
}
return p != nil
}, disabled, page.KindTaxonomy, "public/tags/tag1/index.html", "Tag1")
assertDisabledKind(b,
func(isDisabled bool) bool {
p := s.getPage(page.KindTaxonomyTerm, "tags")
if isDisabled {
return p == nil
}
return p != nil
}, disabled, page.KindTaxonomyTerm, "public/tags/index.html", "Tags")
assertDisabledKind(b,
func(isDisabled bool) bool {
p := s.getPage(page.KindTaxonomyTerm, "categories")
if isDisabled {
return p == nil
}
return p != nil
}, disabled, page.KindTaxonomyTerm, "public/categories/index.html", "Category Terms")
assertDisabledKind(b,
func(isDisabled bool) bool {
p := s.getPage(page.KindTaxonomy, "categories", "hugo")
if isDisabled {
return p == nil
}
return p != nil
}, disabled, page.KindTaxonomy, "public/categories/hugo/index.html", "Hugo")
// The below have no page in any collection.
assertDisabledKind(b, func(isDisabled bool) bool { return true }, disabled, kindRSS, "public/index.xml", "<link>")
assertDisabledKind(b, func(isDisabled bool) bool { return true }, disabled, kindSitemap, "public/sitemap.xml", "sitemap")
assertDisabledKind(b, func(isDisabled bool) bool { return true }, disabled, kindRobotsTXT, "public/robots.txt", "User-agent")
assertDisabledKind(b, func(isDisabled bool) bool { return true }, disabled, kind404, "public/404.html", "Page Not Found")
}
func assertDisabledKind(b *sitesBuilder, kindAssert func(bool) bool, disabled []string, kind, path, matcher string) {
isDisabled := stringSliceContains(kind, disabled...)
b.Assert(kindAssert(isDisabled), qt.Equals, true)
if kind == kindRSS && !isDisabled {
// If the home page is also disabled, there is not RSS to look for.
if stringSliceContains(page.KindHome, disabled...) {
isDisabled = true
}
}
if isDisabled {
// Path should not exist
fileExists, err := helpers.Exists(path, b.Fs.Destination)
getPage := func(b *sitesBuilder, ref string) page.Page {
b.Helper()
p, err := b.H.Sites[0].getPageNew(nil, ref)
b.Assert(err, qt.IsNil)
b.Assert(fileExists, qt.Equals, false)
} else {
b.AssertFileContent(path, matcher)
return p
}
getPageInSitePages := func(b *sitesBuilder, ref string) page.Page {
b.Helper()
for _, pages := range []page.Pages{b.H.Sites[0].Pages(), b.H.Sites[0].RegularPages()} {
for _, p := range pages {
if ref == p.(*pageState).sourceRef() {
return p
}
}
}
return nil
}
getPageInPagePages := func(p page.Page, ref string) page.Page {
for _, pages := range []page.Pages{p.Pages(), p.RegularPages(), p.Sections()} {
for _, p := range pages {
if ref == p.(*pageState).sourceRef() {
return p
}
}
}
return nil
}
disableKind := page.KindPage
c.Run("Disable "+disableKind, func(c *qt.C) {
b := newSitesBuilder(c, disableKind)
b.Build(BuildCfg{})
s := b.H.Sites[0]
b.Assert(getPage(b, "/sect/page.md"), qt.IsNil)
b.Assert(b.CheckExists("public/sect/page/index.html"), qt.Equals, false)
b.Assert(getPageInSitePages(b, "/sect/page.md"), qt.IsNil)
b.Assert(getPageInPagePages(getPage(b, "/"), "/sect/page.md"), qt.IsNil)
// Also check the side effects
b.Assert(b.CheckExists("public/categories/mycat/index.html"), qt.Equals, false)
b.Assert(len(s.Taxonomies()["categories"]), qt.Equals, 0)
})
disableKind = page.KindTaxonomy
c.Run("Disable "+disableKind, func(c *qt.C) {
b := newSitesBuilder(c, disableKind)
b.Build(BuildCfg{})
s := b.H.Sites[0]
b.Assert(b.CheckExists("public/categories/index.html"), qt.Equals, true)
b.Assert(b.CheckExists("public/categories/mycat/index.html"), qt.Equals, false)
b.Assert(len(s.Taxonomies()["categories"]), qt.Equals, 0)
b.Assert(getPage(b, "/categories"), qt.Not(qt.IsNil))
b.Assert(getPage(b, "/categories/mycat"), qt.IsNil)
})
disableKind = page.KindTaxonomyTerm
c.Run("Disable "+disableKind, func(c *qt.C) {
b := newSitesBuilder(c, disableKind)
b.Build(BuildCfg{})
s := b.H.Sites[0]
b.Assert(b.CheckExists("public/categories/mycat/index.html"), qt.Equals, true)
b.Assert(b.CheckExists("public/categories/index.html"), qt.Equals, false)
b.Assert(len(s.Taxonomies()["categories"]), qt.Equals, 1)
b.Assert(getPage(b, "/categories/mycat"), qt.Not(qt.IsNil))
categories := getPage(b, "/categories")
b.Assert(categories, qt.Not(qt.IsNil))
b.Assert(categories.RelPermalink(), qt.Equals, "")
b.Assert(getPageInSitePages(b, "/categories"), qt.IsNil)
b.Assert(getPageInPagePages(getPage(b, "/"), "/categories"), qt.IsNil)
})
disableKind = page.KindHome
c.Run("Disable "+disableKind, func(c *qt.C) {
b := newSitesBuilder(c, disableKind)
b.Build(BuildCfg{})
b.Assert(b.CheckExists("public/index.html"), qt.Equals, false)
home := getPage(b, "/")
b.Assert(home, qt.Not(qt.IsNil))
b.Assert(home.RelPermalink(), qt.Equals, "")
b.Assert(getPageInSitePages(b, "/"), qt.IsNil)
b.Assert(getPageInPagePages(home, "/"), qt.IsNil)
b.Assert(getPage(b, "/sect/page.md"), qt.Not(qt.IsNil))
})
disableKind = page.KindSection
c.Run("Disable "+disableKind, func(c *qt.C) {
b := newSitesBuilder(c, disableKind)
b.Build(BuildCfg{})
b.Assert(b.CheckExists("public/sect/index.html"), qt.Equals, false)
sect := getPage(b, "/sect")
b.Assert(sect, qt.Not(qt.IsNil))
b.Assert(sect.RelPermalink(), qt.Equals, "")
b.Assert(getPageInSitePages(b, "/sect"), qt.IsNil)
home := getPage(b, "/")
b.Assert(getPageInPagePages(home, "/sect"), qt.IsNil)
b.Assert(home.OutputFormats(), qt.HasLen, 2)
page := getPage(b, "/sect/page.md")
b.Assert(page, qt.Not(qt.IsNil))
b.Assert(page.CurrentSection(), qt.Equals, sect)
b.Assert(getPageInPagePages(sect, "/sect/page.md"), qt.Not(qt.IsNil))
b.AssertFileContent("public/sitemap.xml", "sitemap")
b.AssertFileContent("public/index.xml", "rss")
})
disableKind = kindRSS
c.Run("Disable "+disableKind, func(c *qt.C) {
b := newSitesBuilder(c, disableKind)
b.Build(BuildCfg{})
b.Assert(b.CheckExists("public/index.xml"), qt.Equals, false)
home := getPage(b, "/")
b.Assert(home.OutputFormats(), qt.HasLen, 1)
})
disableKind = kindSitemap
c.Run("Disable "+disableKind, func(c *qt.C) {
b := newSitesBuilder(c, disableKind)
b.Build(BuildCfg{})
b.Assert(b.CheckExists("public/sitemap.xml"), qt.Equals, false)
})
disableKind = kind404
c.Run("Disable "+disableKind, func(c *qt.C) {
b := newSitesBuilder(c, disableKind)
b.Build(BuildCfg{})
b.Assert(b.CheckExists("public/404.html"), qt.Equals, false)
})
disableKind = kindRobotsTXT
c.Run("Disable "+disableKind, func(c *qt.C) {
b := newSitesBuilder(c, disableKind)
b.WithTemplatesAdded("robots.txt", "myrobots")
b.Build(BuildCfg{})
b.Assert(b.CheckExists("public/robots.txt"), qt.Equals, false)
})
c.Run("Headless bundle", func(c *qt.C) {
b := newSitesBuilder(c, disableKind)
b.Build(BuildCfg{})
b.Assert(b.CheckExists("public/sect/headlessbundle/index.html"), qt.Equals, false)
b.Assert(b.CheckExists("public/sect/headlessbundle/data.json"), qt.Equals, true)
bundle := getPage(b, "/sect/headlessbundle/index.md")
b.Assert(bundle, qt.Not(qt.IsNil))
b.Assert(bundle.RelPermalink(), qt.Equals, "")
resource := bundle.Resources()[0]
b.Assert(resource.RelPermalink(), qt.Equals, "/blog/sect/headlessbundle/data.json")
b.Assert(bundle.OutputFormats(), qt.HasLen, 0)
b.Assert(bundle.AlternativeOutputFormats(), qt.HasLen, 0)
})
c.Run("Build config, no list", func(c *qt.C) {
b := newSitesBuilder(c, disableKind)
b.Build(BuildCfg{})
ref := "/sect/no-list.md"
b.Assert(b.CheckExists("public/sect/no-list/index.html"), qt.Equals, true)
p := getPage(b, ref)
b.Assert(p, qt.Not(qt.IsNil))
b.Assert(p.RelPermalink(), qt.Equals, "/blog/sect/no-list/")
b.Assert(getPageInSitePages(b, ref), qt.IsNil)
sect := getPage(b, "/sect")
b.Assert(getPageInPagePages(sect, ref), qt.IsNil)
})
c.Run("Build config, no render", func(c *qt.C) {
b := newSitesBuilder(c, disableKind)
b.Build(BuildCfg{})
ref := "/sect/no-render.md"
b.Assert(b.CheckExists("public/sect/no-render/index.html"), qt.Equals, false)
p := getPage(b, ref)
b.Assert(p, qt.Not(qt.IsNil))
b.Assert(p.RelPermalink(), qt.Equals, "")
b.Assert(p.OutputFormats(), qt.HasLen, 0)
b.Assert(getPageInSitePages(b, ref), qt.Not(qt.IsNil))
sect := getPage(b, "/sect")
b.Assert(getPageInPagePages(sect, ref), qt.Not(qt.IsNil))
})
c.Run("Build config, no publish resources", func(c *qt.C) {
b := newSitesBuilder(c, disableKind)
b.Build(BuildCfg{})
b.Assert(b.CheckExists("public/sect/no-publishresources/index.html"), qt.Equals, true)
b.Assert(b.CheckExists("public/sect/no-publishresources/data.json"), qt.Equals, false)
bundle := getPage(b, "/sect/no-publishresources/index.md")
b.Assert(bundle, qt.Not(qt.IsNil))
b.Assert(bundle.RelPermalink(), qt.Equals, "/blog/sect/no-publishresources/")
b.Assert(bundle.Resources(), qt.HasLen, 1)
resource := bundle.Resources()[0]
b.Assert(resource.RelPermalink(), qt.Equals, "/blog/sect/no-publishresources/data.json")
})
}

View file

@ -556,6 +556,7 @@ func (b *sourceFilesystemsBuilder) createModFs(
From: mount.Target,
To: filename,
ToBasedir: base,
Module: md.Module.Path(),
Meta: hugofs.FileMeta{
"watch": md.Watch(),
"mountWeight": mountWeight,

View file

@ -71,6 +71,11 @@ title: "Page"
module github.com/gohugoio/tests/testHugoModules
`)
b.WithSourceFile("go.sum", `
github.com/gohugoio/hugoTestModule2 v0.0.0-20200131160637-9657d7697877 h1:WLM2bQCKIWo04T6NsIWsX/Vtirhf0TnpY66xyqGlgVY=
github.com/gohugoio/hugoTestModule2 v0.0.0-20200131160637-9657d7697877/go.mod h1:CBFZS3khIAXKxReMwq0le8sEl/D8hcXmixlOHVv+Gd0=
`)
b.Build(BuildCfg{})

View file

@ -14,6 +14,7 @@
package hugolib
import (
"context"
"io"
"path/filepath"
"sort"
@ -28,8 +29,8 @@ import (
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/parser/metadecoders"
"github.com/gohugoio/hugo/common/para"
"github.com/gohugoio/hugo/hugofs"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/source"
@ -77,11 +78,16 @@ type HugoSites struct {
// As loaded from the /data dirs
data map[string]interface{}
content *pageMaps
// Keeps track of bundle directories and symlinks to enable partial rebuilding.
ContentChanges *contentChangeMap
init *hugoSitesInit
workers *para.Workers
numWorkers int
*fatalErrorHandler
*testCounters
}
@ -175,7 +181,7 @@ func (h *HugoSites) gitInfoForPage(p page.Page) (*gitmap.GitInfo, error) {
func (h *HugoSites) siteInfos() page.Sites {
infos := make(page.Sites, len(h.Sites))
for i, site := range h.Sites {
infos[i] = &site.Info
infos[i] = site.Info
}
return infos
}
@ -245,25 +251,22 @@ func (h *HugoSites) PrintProcessingStats(w io.Writer) {
// GetContentPage finds a Page with content given the absolute filename.
// Returns nil if none found.
func (h *HugoSites) GetContentPage(filename string) page.Page {
for _, s := range h.Sites {
pos := s.rawAllPages.findPagePosByFilename(filename)
if pos == -1 {
continue
}
return s.rawAllPages[pos]
}
var p page.Page
// If not found already, this may be bundled in another content file.
dir := filepath.Dir(filename)
for _, s := range h.Sites {
pos := s.rawAllPages.findPagePosByFilnamePrefix(dir)
if pos == -1 {
continue
h.content.walkBundles(func(b *contentNode) bool {
if b.p == nil || b.fi == nil {
return false
}
return s.rawAllPages[pos]
}
return nil
if b.fi.Meta().Filename() == filename {
p = b.p
return true
}
return false
})
return p
}
// NewHugoSites creates a new collection of sites given the input sites, building
@ -282,11 +285,22 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
var contentChangeTracker *contentChangeMap
numWorkers := config.GetNumWorkerMultiplier()
if numWorkers > len(sites) {
numWorkers = len(sites)
}
var workers *para.Workers
if numWorkers > 1 {
workers = para.New(numWorkers)
}
h := &HugoSites{
running: cfg.Running,
multilingual: langConfig,
multihost: cfg.Cfg.GetBool("multihost"),
Sites: sites,
workers: workers,
numWorkers: numWorkers,
init: &hugoSitesInit{
data: lazy.New(),
layouts: lazy.New(),
@ -400,13 +414,27 @@ func applyDeps(cfg deps.DepsCfg, sites ...*Site) error {
return err
}
d.Site = &s.Info
d.Site = s.Info
siteConfig, err := loadSiteConfig(s.language)
if err != nil {
return errors.Wrap(err, "load site config")
}
s.siteConfigConfig = siteConfig
pm := &pageMap{
contentMap: newContentMap(contentMapConfig{
lang: s.Lang(),
taxonomyConfig: s.siteCfg.taxonomiesConfig.Values(),
taxonomyDisabled: !s.isEnabled(page.KindTaxonomy),
taxonomyTermDisabled: !s.isEnabled(page.KindTaxonomyTerm),
pageDisabled: !s.isEnabled(page.KindPage),
}),
s: s,
}
s.PageCollections = newPageCollections(pm)
s.siteRefLinker, err = newSiteRefLinker(s.language, s)
return err
}
@ -525,6 +553,26 @@ func (h *HugoSites) resetLogs() {
}
}
func (h *HugoSites) withSite(fn func(s *Site) error) error {
if h.workers == nil {
for _, s := range h.Sites {
if err := fn(s); err != nil {
return err
}
}
return nil
}
g, _ := h.workers.Start(context.Background())
for _, s := range h.Sites {
s := s
g.Run(func() error {
return fn(s)
})
}
return g.Wait()
}
func (h *HugoSites) createSitesFromConfig(cfg config.Provider) error {
oldLangs, _ := h.Cfg.Get("languagesSorted").(langs.Languages)
@ -567,7 +615,7 @@ func (h *HugoSites) createSitesFromConfig(cfg config.Provider) error {
func (h *HugoSites) toSiteInfos() []*SiteInfo {
infos := make([]*SiteInfo, len(h.Sites))
for i, s := range h.Sites {
infos[i] = &s.Info
infos[i] = s.Info
}
return infos
}
@ -603,9 +651,6 @@ type BuildCfg struct {
// For regular builds, this will allways return true.
// TODO(bep) rename/work this.
func (cfg *BuildCfg) shouldRender(p *pageState) bool {
if !p.render {
return false
}
if p.forceRender {
return true
}
@ -652,9 +697,21 @@ func (h *HugoSites) renderCrossSitesArtifacts() error {
}
func (h *HugoSites) removePageByFilename(filename string) {
for _, s := range h.Sites {
s.removePageFilename(filename)
}
h.content.withMaps(func(m *pageMap) error {
m.deleteBundleMatching(func(b *contentNode) bool {
if b.p == nil {
return false
}
if b.fi == nil {
return false
}
return b.fi.Meta().Filename() == filename
})
return nil
})
}
func (h *HugoSites) createPageCollections() error {
@ -683,19 +740,13 @@ func (h *HugoSites) createPageCollections() error {
}
func (s *Site) preparePagesForRender(isRenderingSite bool, idx int) error {
for _, p := range s.workAllPages {
if err := p.initOutputFormat(isRenderingSite, idx); err != nil {
return err
var err error
s.pageMap.withEveryBundlePage(func(p *pageState) bool {
if err = p.initOutputFormat(isRenderingSite, idx); err != nil {
return true
}
}
for _, p := range s.headlessPages {
if err := p.initOutputFormat(isRenderingSite, idx); err != nil {
return err
}
}
return false
})
return nil
}
@ -837,49 +888,60 @@ func (h *HugoSites) findPagesByKindIn(kind string, inPages page.Pages) page.Page
}
func (h *HugoSites) resetPageState() {
for _, s := range h.Sites {
for _, p := range s.rawAllPages {
for _, po := range p.pageOutputs {
if po.cp == nil {
continue
}
po.cp.Reset()
}
h.content.walkBundles(func(n *contentNode) bool {
if n.p == nil {
return false
}
}
p := n.p
for _, po := range p.pageOutputs {
if po.cp == nil {
continue
}
po.cp.Reset()
}
return false
})
}
func (h *HugoSites) resetPageStateFromEvents(idset identity.Identities) {
for _, s := range h.Sites {
PAGES:
for _, p := range s.rawAllPages {
OUTPUTS:
for _, po := range p.pageOutputs {
if po.cp == nil {
continue
}
for id := range idset {
if po.cp.dependencyTracker.Search(id) != nil {
po.cp.Reset()
continue OUTPUTS
}
}
h.content.walkBundles(func(n *contentNode) bool {
if n.p == nil {
return false
}
p := n.p
OUTPUTS:
for _, po := range p.pageOutputs {
if po.cp == nil {
continue
}
for _, s := range p.shortcodeState.shortcodes {
for id := range idset {
if idm, ok := s.info.(identity.Manager); ok && idm.Search(id) != nil {
for _, po := range p.pageOutputs {
if po.cp != nil {
po.cp.Reset()
}
}
continue PAGES
}
for id := range idset {
if po.cp.dependencyTracker.Search(id) != nil {
po.cp.Reset()
continue OUTPUTS
}
}
}
}
if p.shortcodeState == nil {
return false
}
for _, s := range p.shortcodeState.shortcodes {
for id := range idset {
if idm, ok := s.info.(identity.Manager); ok && idm.Search(id) != nil {
for _, po := range p.pageOutputs {
if po.cp != nil {
po.cp.Reset()
}
}
return false
}
}
}
return false
})
}
// Used in partial reloading to determine if the change is in a bundle.

View file

@ -19,10 +19,7 @@ import (
"fmt"
"runtime/trace"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/output"
"golang.org/x/sync/errgroup"
"golang.org/x/sync/semaphore"
"github.com/pkg/errors"
@ -246,41 +243,7 @@ func (h *HugoSites) assemble(bcfg *BuildCfg) error {
return nil
}
numWorkers := config.GetNumWorkerMultiplier()
sem := semaphore.NewWeighted(int64(numWorkers))
g, ctx := errgroup.WithContext(context.Background())
for _, s := range h.Sites {
s := s
g.Go(func() error {
err := sem.Acquire(ctx, 1)
if err != nil {
return err
}
defer sem.Release(1)
if err := s.assemblePagesMap(s); err != nil {
return err
}
if err := s.pagesMap.assemblePageMeta(); err != nil {
return err
}
if err := s.pagesMap.assembleTaxonomies(s); err != nil {
return err
}
if err := s.createWorkAllPages(); err != nil {
return err
}
return nil
})
}
if err := g.Wait(); err != nil {
if err := h.content.AssemblePages(); err != nil {
return err
}
@ -301,8 +264,12 @@ func (h *HugoSites) render(config *BuildCfg) error {
if !config.PartialReRender {
h.renderFormats = output.Formats{}
for _, s := range h.Sites {
h.withSite(func(s *Site) error {
s.initRenderFormats()
return nil
})
for _, s := range h.Sites {
h.renderFormats = append(h.renderFormats, s.renderFormats...)
}
}

View file

@ -9,6 +9,7 @@ import (
"time"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/htesting"
"github.com/gohugoio/hugo/resources/page"
"github.com/fortytw2/leaktest"
@ -276,8 +277,8 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
c.Assert(len(doc4.Translations()), qt.Equals, 0)
// Taxonomies and their URLs
c.Assert(len(enSite.Taxonomies), qt.Equals, 1)
tags := enSite.Taxonomies["tags"]
c.Assert(len(enSite.Taxonomies()), qt.Equals, 1)
tags := enSite.Taxonomies()["tags"]
c.Assert(len(tags), qt.Equals, 2)
c.Assert(doc1en, qt.Equals, tags["tag1"][0].Page)
@ -357,8 +358,8 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
b.AssertFileContent("public/fr/sitemap.xml", "http://example.com/blog/fr/sect/doc1/")
// Check taxonomies
enTags := enSite.Taxonomies["tags"]
frTags := frSite.Taxonomies["plaques"]
enTags := enSite.Taxonomies()["tags"]
frTags := frSite.Taxonomies()["plaques"]
c.Assert(len(enTags), qt.Equals, 2, qt.Commentf("Tags in en: %v", enTags))
c.Assert(len(frTags), qt.Equals, 2, qt.Commentf("Tags in fr: %v", frTags))
c.Assert(enTags["tag1"], qt.Not(qt.IsNil))
@ -706,7 +707,7 @@ func checkContent(s *sitesBuilder, filename string, matches ...string) {
content := readDestination(s.T, s.Fs, filename)
for _, match := range matches {
if !strings.Contains(content, match) {
s.Fatalf("No match for %q in content for %s\n%q", match, filename, content)
s.Fatalf("No match for\n%q\nin content for %s\n%q\nDiff:\n%s", match, filename, content, htesting.DiffStrings(content, match))
}
}
}

View file

@ -25,13 +25,11 @@ import (
"github.com/mitchellh/mapstructure"
"github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/markup/converter"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/hugofs/files"
@ -153,7 +151,6 @@ func (p *pageState) getPagesAndSections() page.Pages {
return b.getPagesAndSections()
}
// TODO(bep) cm add a test
func (p *pageState) RegularPages() page.Pages {
p.regularPagesInit.Do(func() {
var pages page.Pages
@ -189,13 +186,12 @@ func (p *pageState) Pages() page.Pages {
case page.KindSection, page.KindHome:
pages = p.getPagesAndSections()
case page.KindTaxonomy:
termInfo := p.bucket
plural := maps.GetString(termInfo.meta, "plural")
term := maps.GetString(termInfo.meta, "termKey")
taxonomy := p.s.Taxonomies[plural].Get(term)
b := p.treeRef.n
viewInfo := b.viewInfo
taxonomy := p.s.Taxonomies()[viewInfo.name.plural].Get(viewInfo.termKey)
pages = taxonomy.Pages()
case page.KindTaxonomyTerm:
pages = p.getPagesAndSections()
pages = p.bucket.getTaxonomies()
default:
pages = p.s.Pages()
}
@ -219,38 +215,35 @@ func (p *pageState) RawContent() string {
return string(p.source.parsed.Input()[start:])
}
func (p *pageState) sortResources() {
sort.SliceStable(p.resources, func(i, j int) bool {
ri, rj := p.resources[i], p.resources[j]
if ri.ResourceType() < rj.ResourceType() {
return true
}
p1, ok1 := ri.(page.Page)
p2, ok2 := rj.(page.Page)
if ok1 != ok2 {
return ok2
}
if ok1 {
return page.DefaultPageSort(p1, p2)
}
return ri.RelPermalink() < rj.RelPermalink()
})
}
func (p *pageState) Resources() resource.Resources {
p.resourcesInit.Do(func() {
sort := func() {
sort.SliceStable(p.resources, func(i, j int) bool {
ri, rj := p.resources[i], p.resources[j]
if ri.ResourceType() < rj.ResourceType() {
return true
}
p1, ok1 := ri.(page.Page)
p2, ok2 := rj.(page.Page)
if ok1 != ok2 {
return ok2
}
if ok1 {
return page.DefaultPageSort(p1, p2)
}
return ri.RelPermalink() < rj.RelPermalink()
})
}
sort()
p.sortResources()
if len(p.m.resourcesMetadata) > 0 {
resources.AssignMetadata(p.m.resourcesMetadata, p.resources...)
sort()
p.sortResources()
}
})
return p.resources
}
@ -264,7 +257,7 @@ func (p *pageState) HasShortcode(name string) bool {
}
func (p *pageState) Site() page.Site {
return &p.s.Info
return p.s.Info
}
func (p *pageState) String() string {
@ -324,7 +317,7 @@ func (ps *pageState) initCommonProviders(pp pagePaths) error {
ps.OutputFormatsProvider = pp
ps.targetPathDescriptor = pp.targetPathDescriptor
ps.RefProvider = newPageRef(ps)
ps.SitesProvider = &ps.s.Info
ps.SitesProvider = ps.s.Info
return nil
}
@ -384,8 +377,8 @@ func (p *pageState) getLayoutDescriptor() output.LayoutDescriptor {
section = sections[0]
}
case page.KindTaxonomyTerm, page.KindTaxonomy:
section = maps.GetString(p.bucket.meta, "singular")
b := p.getTreeRef().n
section = b.viewInfo.name.singular
default:
}
@ -641,10 +634,6 @@ func (p *pageState) getContentConverter() converter.Converter {
return p.m.contentConverter
}
func (p *pageState) addResources(r ...resource.Resource) {
p.resources = append(p.resources, r...)
}
func (p *pageState) mapContent(bucket *pagesMapBucket, meta *pageMeta) error {
s := p.shortcodeState
@ -665,6 +654,7 @@ func (p *pageState) mapContent(bucket *pagesMapBucket, meta *pageMeta) error {
// … it's safe to keep some "global" state
var currShortcode shortcode
var ordinal int
var frontMatterSet bool
Loop:
for {
@ -679,7 +669,7 @@ Loop:
p.s.BuildFlags.HasLateTemplate.CAS(false, true)
rn.AddBytes(it)
case it.IsFrontMatter():
f := metadecoders.FormatFromFrontMatterType(it.Type)
f := pageparser.FormatFromFrontMatterType(it.Type)
m, err := metadecoders.Default.UnmarshalToMap(it.Val, f)
if err != nil {
if fe, ok := err.(herrors.FileError); ok {
@ -692,6 +682,7 @@ Loop:
if err := meta.setMetadata(bucket, p, m); err != nil {
return err
}
frontMatterSet = true
next := iter.Peek()
if !next.IsDone() {
@ -779,6 +770,14 @@ Loop:
}
}
if !frontMatterSet {
// Page content without front matter. Assign default front matter from
// cascades etc.
if err := meta.setMetadata(bucket, p, nil); err != nil {
return err
}
}
p.cmap = rn
return nil
@ -856,12 +855,11 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error {
return err
}
if idx >= len(p.pageOutputs) {
panic(fmt.Sprintf("invalid page state for %q: got output format index %d, have %d", p.pathOrTitle(), idx, len(p.pageOutputs)))
if len(p.pageOutputs) == 1 {
idx = 0
}
p.pageOutput = p.pageOutputs[idx]
if p.pageOutput == nil {
panic(fmt.Sprintf("pageOutput is nil for output idx %d", idx))
}
@ -901,13 +899,6 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error {
p.pageOutput.cp = cp
}
for _, r := range p.Resources().ByType(pageResourceType) {
rp := r.(*pageState)
if err := rp.shiftToOutputFormat(isRenderingSite, idx); err != nil {
return errors.Wrap(err, "failed to shift outputformat in Page resource")
}
}
return nil
}
@ -934,75 +925,6 @@ func (p *pageState) sourceRef() string {
return ""
}
func (p *pageState) sourceRefs() []string {
refs := []string{p.sourceRef()}
if !p.File().IsZero() {
meta := p.File().FileInfo().Meta()
path := meta.PathFile()
if path != "" {
ref := "/" + filepath.ToSlash(path)
if ref != refs[0] {
refs = append(refs, ref)
}
}
}
return refs
}
type pageStatePages []*pageState
// Implement sorting.
func (ps pageStatePages) Len() int { return len(ps) }
func (ps pageStatePages) Less(i, j int) bool { return page.DefaultPageSort(ps[i], ps[j]) }
func (ps pageStatePages) Swap(i, j int) { ps[i], ps[j] = ps[j], ps[i] }
// findPagePos Given a page, it will find the position in Pages
// will return -1 if not found
func (ps pageStatePages) findPagePos(page *pageState) int {
for i, x := range ps {
if x.File().Filename() == page.File().Filename() {
return i
}
}
return -1
}
func (ps pageStatePages) findPagePosByFilename(filename string) int {
for i, x := range ps {
if x.File().Filename() == filename {
return i
}
}
return -1
}
func (ps pageStatePages) findPagePosByFilnamePrefix(prefix string) int {
if prefix == "" {
return -1
}
lenDiff := -1
currPos := -1
prefixLen := len(prefix)
// Find the closest match
for i, x := range ps {
if strings.HasPrefix(x.File().Filename(), prefix) {
diff := len(x.File().Filename()) - prefixLen
if lenDiff == -1 || diff < lenDiff {
lenDiff = diff
currPos = i
}
}
}
return currPos
}
func (s *Site) sectionsFromFile(fi source.File) []string {
dirname := fi.Dir()

View file

@ -26,18 +26,40 @@ import (
"github.com/gohugoio/hugo/resources/resource"
)
type treeRefProvider interface {
getTreeRef() *contentTreeRef
}
func (p *pageCommon) getTreeRef() *contentTreeRef {
return p.treeRef
}
type nextPrevProvider interface {
getNextPrev() *nextPrev
}
func (p *pageCommon) getNextPrev() *nextPrev {
return p.posNextPrev
}
type nextPrevInSectionProvider interface {
getNextPrevInSection() *nextPrev
}
func (p *pageCommon) getNextPrevInSection() *nextPrev {
return p.posNextPrevSection
}
type pageCommon struct {
s *Site
m *pageMeta
bucket *pagesMapBucket
bucket *pagesMapBucket
treeRef *contentTreeRef
// Laziliy initialized dependencies.
init *lazy.Init
metaInit sync.Once
metaInitFn func(bucket *pagesMapBucket) error
// All of these represents the common parts of a page.Page
maps.Scratcher
navigation.PageMenusProvider

View file

@ -16,8 +16,6 @@ package hugolib
import (
"sync"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/resources/page"
)
@ -38,26 +36,23 @@ func (p *pageData) Data() interface{} {
switch p.Kind() {
case page.KindTaxonomy:
bucket := p.bucket
meta := bucket.meta
plural := maps.GetString(meta, "plural")
singular := maps.GetString(meta, "singular")
b := p.treeRef.n
name := b.viewInfo.name
termKey := b.viewInfo.termKey
taxonomy := p.s.Taxonomies[plural].Get(maps.GetString(meta, "termKey"))
taxonomy := p.s.Taxonomies()[name.plural].Get(termKey)
p.data[singular] = taxonomy
p.data["Singular"] = meta["singular"]
p.data["Plural"] = plural
p.data["Term"] = meta["term"]
p.data[name.singular] = taxonomy
p.data["Singular"] = name.singular
p.data["Plural"] = name.plural
p.data["Term"] = b.viewInfo.term()
case page.KindTaxonomyTerm:
bucket := p.bucket
meta := bucket.meta
plural := maps.GetString(meta, "plural")
singular := maps.GetString(meta, "singular")
b := p.treeRef.n
name := b.viewInfo.name
p.data["Singular"] = singular
p.data["Plural"] = plural
p.data["Terms"] = p.s.Taxonomies[plural]
p.data["Singular"] = name.singular
p.data["Plural"] = name.plural
p.data["Terms"] = p.s.Taxonomies()[name.plural]
// keep the following just for legacy reasons
p.data["OrderedIndex"] = p.data["Terms"]
p.data["Index"] = p.data["Terms"]

View file

@ -61,7 +61,10 @@ type pageMeta struct {
// a fixed pageOutput.
standalone bool
bundleType string
draft bool // Only published when running with -D flag
buildConfig pagemeta.BuildConfig
bundleType files.ContentClass
// Params contains configuration defined in the params section of page frontmatter.
params map[string]interface{}
@ -85,8 +88,6 @@ type pageMeta struct {
aliases []string
draft bool
description string
keywords []string
@ -94,13 +95,6 @@ type pageMeta struct {
resource.Dates
// This is enabled if it is a leaf bundle (the "index.md" type) and it is marked as headless in front matter.
// Being headless means that
// 1. The page itself is not rendered to disk
// 2. It is not available in .Site.Pages etc.
// 3. But you can get it via .Site.GetPage
headless bool
// Set if this page is bundled inside another.
bundled bool
@ -160,7 +154,7 @@ func (p *pageMeta) Authors() page.AuthorList {
return al
}
func (p *pageMeta) BundleType() string {
func (p *pageMeta) BundleType() files.ContentClass {
return p.bundleType
}
@ -309,40 +303,53 @@ func (p *pageMeta) Weight() int {
return p.weight
}
func (pm *pageMeta) setMetadata(bucket *pagesMapBucket, p *pageState, frontmatter map[string]interface{}) error {
if frontmatter == nil && bucket.cascade == nil {
return errors.New("missing frontmatter data")
func (pm *pageMeta) mergeBucketCascades(b1, b2 *pagesMapBucket) {
if b1.cascade == nil {
b1.cascade = make(map[string]interface{})
}
if b2 != nil && b2.cascade != nil {
for k, v := range b2.cascade {
if _, found := b1.cascade[k]; !found {
b1.cascade[k] = v
}
}
}
}
func (pm *pageMeta) setMetadata(parentBucket *pagesMapBucket, p *pageState, frontmatter map[string]interface{}) error {
pm.params = make(maps.Params)
if frontmatter == nil && (parentBucket == nil || parentBucket.cascade == nil) {
return nil
}
if frontmatter != nil {
// Needed for case insensitive fetching of params values
maps.ToLower(frontmatter)
if p.IsNode() {
if p.bucket != nil {
// Check for any cascade define on itself.
if cv, found := frontmatter["cascade"]; found {
cvm := maps.ToStringMap(cv)
if bucket.cascade == nil {
bucket.cascade = cvm
} else {
for k, v := range cvm {
bucket.cascade[k] = v
}
}
}
}
if bucket != nil && bucket.cascade != nil {
for k, v := range bucket.cascade {
if _, found := frontmatter[k]; !found {
frontmatter[k] = v
}
p.bucket.cascade = maps.ToStringMap(cv)
}
}
} else {
frontmatter = make(map[string]interface{})
for k, v := range bucket.cascade {
}
var cascade map[string]interface{}
if p.bucket != nil {
if parentBucket != nil {
// Merge missing keys from parent into this.
pm.mergeBucketCascades(p.bucket, parentBucket)
}
cascade = p.bucket.cascade
} else if parentBucket != nil {
cascade = parentBucket.cascade
}
for k, v := range cascade {
if _, found := frontmatter[k]; !found {
frontmatter[k] = v
}
}
@ -379,6 +386,11 @@ func (pm *pageMeta) setMetadata(bucket *pagesMapBucket, p *pageState, frontmatte
p.s.Log.ERROR.Printf("Failed to handle dates for page %q: %s", p.pathOrTitle(), err)
}
pm.buildConfig, err = pagemeta.DecodeBuildConfig(frontmatter["_build"])
if err != nil {
return err
}
var sitemapSet bool
var draft, published, isCJKLanguage *bool
@ -439,12 +451,15 @@ func (pm *pageMeta) setMetadata(bucket *pagesMapBucket, p *pageState, frontmatte
pm.keywords = cast.ToStringSlice(v)
pm.params[loki] = pm.keywords
case "headless":
// For now, only the leaf bundles ("index.md") can be headless (i.e. produce no output).
// We may expand on this in the future, but that gets more complex pretty fast.
if p.File().TranslationBaseName() == "index" {
pm.headless = cast.ToBool(v)
// Legacy setting for leaf bundles.
// This is since Hugo 0.63 handled in a more general way for all
// pages.
isHeadless := cast.ToBool(v)
pm.params[loki] = isHeadless
if p.File().TranslationBaseName() == "index" && isHeadless {
pm.buildConfig.List = false
pm.buildConfig.Render = false
}
pm.params[loki] = pm.headless
case "outputs":
o := cast.ToStringSlice(v)
if len(o) > 0 {
@ -594,7 +609,23 @@ func (pm *pageMeta) setMetadata(bucket *pagesMapBucket, p *pageState, frontmatte
return nil
}
func (p *pageMeta) applyDefaultValues(ps *pageState) error {
func (p *pageMeta) noList() bool {
return !p.buildConfig.List
}
func (p *pageMeta) noRender() bool {
return !p.buildConfig.Render
}
func (p *pageMeta) applyDefaultValues(n *contentNode) error {
if p.buildConfig.IsZero() {
p.buildConfig, _ = pagemeta.DecodeBuildConfig(nil)
}
if !p.s.isEnabled(p.Kind()) {
(&p.buildConfig).Disable()
}
if p.markup == "" {
if !p.File().IsZero() {
// Fall back to file extension
@ -610,13 +641,21 @@ func (p *pageMeta) applyDefaultValues(ps *pageState) error {
case page.KindHome:
p.title = p.s.Info.title
case page.KindSection:
sectionName := helpers.FirstUpper(p.sections[0])
var sectionName string
if n != nil {
sectionName = n.rootSection()
} else {
sectionName = p.sections[0]
}
sectionName = helpers.FirstUpper(sectionName)
if p.s.Cfg.GetBool("pluralizeListTitles") {
p.title = inflect.Pluralize(sectionName)
} else {
p.title = sectionName
}
case page.KindTaxonomy:
// TODO(bep) improve
key := p.sections[len(p.sections)-1]
p.title = strings.Replace(p.s.titleFunc(key), "-", " ", -1)
case page.KindTaxonomyTerm:
@ -653,7 +692,7 @@ func (p *pageMeta) applyDefaultValues(ps *pageState) error {
markup = "markdown"
}
cp, err := p.newContentConverter(ps, markup, renderingConfigOverrides)
cp, err := p.newContentConverter(n.p, markup, renderingConfigOverrides)
if err != nil {
return err
}
@ -665,6 +704,9 @@ func (p *pageMeta) applyDefaultValues(ps *pageState) error {
}
func (p *pageMeta) newContentConverter(ps *pageState, markup string, renderingConfigOverrides map[string]interface{}) (converter.Converter, error) {
if ps == nil {
panic("no Page provided")
}
cp := p.s.ContentSpec.Converters.Get(markup)
if cp == nil {
return nil, errors.Errorf("no content renderer found for markup %q", p.markup)

View file

@ -22,15 +22,11 @@ import (
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/parser/pageparser"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/lazy"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/resource"
)
func newPageBase(metaProvider *pageMeta) (*pageState, error) {
@ -62,7 +58,8 @@ func newPageBase(metaProvider *pageMeta) (*pageState, error) {
InternalDependencies: s,
init: lazy.New(),
m: metaProvider,
s: s},
s: s,
},
}
siteAdapter := pageSiteAdapter{s: s, p: ps}
@ -95,7 +92,16 @@ func newPageBase(metaProvider *pageMeta) (*pageState, error) {
}
func newPageFromMeta(meta map[string]interface{}, metaProvider *pageMeta) (*pageState, error) {
func newPageBucket(p *pageState) *pagesMapBucket {
return &pagesMapBucket{owner: p}
}
func newPageFromMeta(
n *contentNode,
parentBucket *pagesMapBucket,
meta map[string]interface{},
metaProvider *pageMeta) (*pageState, error) {
if metaProvider.f == nil {
metaProvider.f = page.NewZeroFile(metaProvider.s.DistinctWarningLog)
}
@ -105,26 +111,20 @@ func newPageFromMeta(meta map[string]interface{}, metaProvider *pageMeta) (*page
return nil, err
}
initMeta := func(bucket *pagesMapBucket) error {
if meta != nil || bucket != nil {
if err := metaProvider.setMetadata(bucket, ps, meta); err != nil {
return ps.wrapError(err)
}
}
bucket := parentBucket
if err := metaProvider.applyDefaultValues(ps); err != nil {
return err
}
return nil
if ps.IsNode() {
ps.bucket = newPageBucket(ps)
}
if metaProvider.standalone {
initMeta(nil)
} else {
// Because of possible cascade keywords, we need to delay this
// until we have the complete page graph.
ps.metaInitFn = initMeta
if meta != nil || parentBucket != nil {
if err := metaProvider.setMetadata(bucket, ps, meta); err != nil {
return nil, ps.wrapError(err)
}
}
if err := metaProvider.applyDefaultValues(n); err != nil {
return nil, err
}
ps.init.Add(func() (interface{}, error) {
@ -138,19 +138,25 @@ func newPageFromMeta(meta map[string]interface{}, metaProvider *pageMeta) (*page
}
if ps.m.standalone {
ps.pageOutput = makeOut(ps.m.outputFormats()[0], true)
ps.pageOutput = makeOut(ps.m.outputFormats()[0], !ps.m.noRender())
} else {
ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats))
created := make(map[string]*pageOutput)
outputFormatsForPage := ps.m.outputFormats()
for i, f := range ps.s.h.renderFormats {
po, found := created[f.Name]
if !found {
_, shouldRender := outputFormatsForPage.GetByName(f.Name)
po = makeOut(f, shouldRender)
created[f.Name] = po
if !ps.m.noRender() {
ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats))
created := make(map[string]*pageOutput)
for i, f := range ps.s.h.renderFormats {
po, found := created[f.Name]
if !found {
_, shouldRender := outputFormatsForPage.GetByName(f.Name)
po = makeOut(f, shouldRender)
created[f.Name] = po
}
ps.pageOutputs[i] = po
}
ps.pageOutputs[i] = po
} else {
// We need one output format for potential resources to publish.
ps.pageOutputs = []*pageOutput{makeOut(outputFormatsForPage[0], false)}
}
}
@ -170,7 +176,7 @@ func newPageFromMeta(meta map[string]interface{}, metaProvider *pageMeta) (*page
func newPageStandalone(m *pageMeta, f output.Format) (*pageState, error) {
m.configuredOutputFormats = output.Formats{f}
m.standalone = true
p, err := newPageFromMeta(nil, m)
p, err := newPageFromMeta(nil, nil, nil, m)
if err != nil {
return nil, err
@ -184,108 +190,6 @@ func newPageStandalone(m *pageMeta, f output.Format) (*pageState, error) {
}
func newPageWithContent(f *fileInfo, s *Site, bundled bool, content resource.OpenReadSeekCloser) (*pageState, error) {
sections := s.sectionsFromFile(f)
kind := s.kindFromFileInfoOrSections(f, sections)
if kind == page.KindTaxonomy {
s.PathSpec.MakePathsSanitized(sections)
}
metaProvider := &pageMeta{kind: kind, sections: sections, bundled: bundled, s: s, f: f}
ps, err := newPageBase(metaProvider)
if err != nil {
return nil, err
}
gi, err := s.h.gitInfoForPage(ps)
if err != nil {
return nil, errors.Wrap(err, "failed to load Git data")
}
ps.gitInfo = gi
r, err := content()
if err != nil {
return nil, err
}
defer r.Close()
parseResult, err := pageparser.Parse(
r,
pageparser.Config{EnableEmoji: s.siteCfg.enableEmoji},
)
if err != nil {
return nil, err
}
ps.pageContent = pageContent{
source: rawPageContent{
parsed: parseResult,
posMainContent: -1,
posSummaryEnd: -1,
posBodyStart: -1,
},
}
ps.shortcodeState = newShortcodeHandler(ps, ps.s, nil)
ps.metaInitFn = func(bucket *pagesMapBucket) error {
if err := ps.mapContent(bucket, metaProvider); err != nil {
return ps.wrapError(err)
}
if err := metaProvider.applyDefaultValues(ps); err != nil {
return err
}
return nil
}
ps.init.Add(func() (interface{}, error) {
pp, err := newPagePaths(s, ps, metaProvider)
if err != nil {
return nil, err
}
// Prepare output formats for all sites.
ps.pageOutputs = make([]*pageOutput, len(ps.s.h.renderFormats))
created := make(map[string]*pageOutput)
outputFormatsForPage := ps.m.outputFormats()
for i, f := range ps.s.h.renderFormats {
if po, found := created[f.Name]; found {
ps.pageOutputs[i] = po
continue
}
_, render := outputFormatsForPage.GetByName(f.Name)
po := newPageOutput(ps, pp, f, render)
// Create a content provider for the first,
// we may be able to reuse it.
if i == 0 {
contentProvider, err := newPageContentOutput(ps, po)
if err != nil {
return nil, err
}
po.initContentProvider(contentProvider)
}
ps.pageOutputs[i] = po
created[f.Name] = po
}
if err := ps.initCommonProviders(pp); err != nil {
return nil, err
}
return nil, nil
})
return ps, nil
}
type pageDeprecatedWarning struct {
p *pageState
}

View file

@ -32,7 +32,7 @@ func newPageOutput(
ft, found := pp.targetPaths[f.Name]
if !found {
// Link to the main output format
ft = pp.targetPaths[pp.OutputFormats()[0].Format.Name]
ft = pp.targetPaths[pp.firstOutputFormat.Format.Name]
}
targetPathsProvider = ft
linksProvider = ft

View file

@ -33,15 +33,11 @@ func newPagePaths(
}
outputFormats := pm.outputFormats()
if len(outputFormats) == 0 {
outputFormats = pm.s.outputFormats[pm.Kind()]
}
if len(outputFormats) == 0 {
return pagePaths{}, nil
}
if pm.headless {
if pm.noRender() {
outputFormats = outputFormats[:1]
}
@ -55,9 +51,9 @@ func newPagePaths(
var relPermalink, permalink string
// If a page is headless or bundled in another, it will not get published
// on its own and it will have no links.
if !pm.headless && !pm.bundled {
// If a page is headless or marked as "no render", or bundled in another,
// it will not get published on its own and it will have no links.
if !pm.noRender() && !pm.bundled {
relPermalink = paths.RelPermalink(s.PathSpec)
permalink = paths.PermalinkForOutputFormat(s.PathSpec, f)
}
@ -77,8 +73,14 @@ func newPagePaths(
}
var out page.OutputFormats
if !pm.noRender() {
out = pageOutputFormats
}
return pagePaths{
outputFormats: pageOutputFormats,
outputFormats: out,
firstOutputFormat: pageOutputFormats[0],
targetPaths: targets,
targetPathDescriptor: targetPathDescriptor,
}, nil
@ -86,7 +88,8 @@ func newPagePaths(
}
type pagePaths struct {
outputFormats page.OutputFormats
outputFormats page.OutputFormats
firstOutputFormat page.OutputFormat
targetPaths map[string]targetPathsHolder
targetPathDescriptor page.TargetPathDescriptor

View file

@ -14,8 +14,10 @@
package hugolib
import (
"path"
"strings"
"github.com/gohugoio/hugo/common/types"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/resources/page"
)
@ -28,17 +30,18 @@ func (pt pageTree) IsAncestor(other interface{}) (bool, error) {
return false, nil
}
pp, err := unwrapPage(other)
if err != nil || pp == nil {
return false, err
}
if pt.p.Kind() == page.KindPage && len(pt.p.SectionsEntries()) == len(pp.SectionsEntries()) {
// A regular page is never its section's ancestor.
tp, ok := other.(treeRefProvider)
if !ok {
return false, nil
}
return helpers.HasStringsPrefix(pp.SectionsEntries(), pt.p.SectionsEntries()), nil
ref1, ref2 := pt.p.getTreeRef(), tp.getTreeRef()
if !ref1.isSection() {
return false, nil
}
return strings.HasPrefix(ref2.key, ref1.key), nil
}
func (pt pageTree) CurrentSection() page.Page {
@ -55,35 +58,33 @@ func (pt pageTree) IsDescendant(other interface{}) (bool, error) {
if pt.p == nil {
return false, nil
}
pp, err := unwrapPage(other)
if err != nil || pp == nil {
return false, err
}
if pp.Kind() == page.KindPage && len(pt.p.SectionsEntries()) == len(pp.SectionsEntries()) {
// A regular page is never its section's descendant.
tp, ok := other.(treeRefProvider)
if !ok {
return false, nil
}
return helpers.HasStringsPrefix(pt.p.SectionsEntries(), pp.SectionsEntries()), nil
ref1, ref2 := pt.p.getTreeRef(), tp.getTreeRef()
if !ref2.isSection() {
return false, nil
}
return strings.HasPrefix(ref1.key, ref2.key), nil
}
func (pt pageTree) FirstSection() page.Page {
p := pt.p
parent := p.Parent()
if types.IsNil(parent) || parent.IsHome() {
return p
ref := pt.p.getTreeRef()
key := ref.key
if !ref.isSection() {
key = path.Dir(key)
}
for {
current := parent
parent = parent.Parent()
if types.IsNil(parent) || parent.IsHome() {
return current
}
_, b := ref.m.getFirstSection(key)
if b == nil {
return nil
}
return b.p
}
func (pt pageTree) InSection(other interface{}) (bool, error) {
@ -91,16 +92,17 @@ func (pt pageTree) InSection(other interface{}) (bool, error) {
return false, nil
}
pp, err := unwrapPage(other)
if err != nil {
return false, err
}
if pp == nil {
tp, ok := other.(treeRefProvider)
if !ok {
return false, nil
}
return pp.CurrentSection().Eq(pt.p.CurrentSection()), nil
ref1, ref2 := pt.p.getTreeRef(), tp.getTreeRef()
s1, _ := ref1.getCurrentSection()
s2, _ := ref2.getCurrentSection()
return s1 == s2, nil
}
@ -109,15 +111,22 @@ func (pt pageTree) Page() page.Page {
}
func (pt pageTree) Parent() page.Page {
if pt.p.parent != nil {
return pt.p.parent
p := pt.p
if p.parent != nil {
return p.parent
}
if pt.p.bucket == nil || pt.p.bucket.parent == nil {
if pt.p.IsHome() {
return nil
}
return pt.p.bucket.parent.owner
_, b := p.getTreeRef().getSection()
if b == nil {
return nil
}
return b.p
}
func (pt pageTree) Sections() page.Pages {

View file

@ -23,7 +23,6 @@ var (
// This is all the kinds we can expect to find in .Site.Pages.
allKindsInPages = []string{page.KindPage, page.KindHome, page.KindSection, page.KindTaxonomy, page.KindTaxonomyTerm}
allKinds = append(allKindsInPages, []string{kindRSS, kindSitemap, kindRobotsTXT, kind404}...)
)
const (

View file

@ -481,7 +481,7 @@ categories: ["cool stuff"]
s := b.H.Sites[0]
checkDate := func(t time.Time, msg string) {
b.Assert(t.Year(), qt.Equals, 2017)
b.Assert(t.Year(), qt.Equals, 2017, qt.Commentf(msg))
}
checkDated := func(d resource.Dated, msg string) {
@ -524,7 +524,7 @@ date: 2018-01-15
b.Assert(len(b.H.Sites), qt.Equals, 1)
s := b.H.Sites[0]
b.Assert(s.getPage("/").Date().Year(), qt.Equals, 2017)
b.Assert(s.getPage("/").Date().Year(), qt.Equals, 2018)
b.Assert(s.getPage("/no-index").Date().Year(), qt.Equals, 2017)
b.Assert(s.getPage("/with-index-no-date").Date().IsZero(), qt.Equals, true)
b.Assert(s.getPage("/with-index-date").Date().Year(), qt.Equals, 2018)

View file

@ -20,6 +20,8 @@ import (
"strings"
"testing"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs"
@ -101,7 +103,7 @@ func TestPageBundlerSiteRegular(t *testing.T) {
c.Assert(len(s.RegularPages()), qt.Equals, 8)
singlePage := s.getPage(page.KindPage, "a/1.md")
c.Assert(singlePage.BundleType(), qt.Equals, "")
c.Assert(singlePage.BundleType(), qt.Equals, files.ContentClass(""))
c.Assert(singlePage, qt.Not(qt.IsNil))
c.Assert(s.getPage("page", "a/1"), qt.Equals, singlePage)
@ -148,12 +150,12 @@ func TestPageBundlerSiteRegular(t *testing.T) {
leafBundle1 := s.getPage(page.KindPage, "b/my-bundle/index.md")
c.Assert(leafBundle1, qt.Not(qt.IsNil))
c.Assert(leafBundle1.BundleType(), qt.Equals, "leaf")
c.Assert(leafBundle1.BundleType(), qt.Equals, files.ContentClassLeaf)
c.Assert(leafBundle1.Section(), qt.Equals, "b")
sectionB := s.getPage(page.KindSection, "b")
c.Assert(sectionB, qt.Not(qt.IsNil))
home, _ := s.Info.Home()
c.Assert(home.BundleType(), qt.Equals, "branch")
c.Assert(home.BundleType(), qt.Equals, files.ContentClassBranch)
// This is a root bundle and should live in the "home section"
// See https://github.com/gohugoio/hugo/issues/4332
@ -387,12 +389,10 @@ func TestMultilingualDisableLanguage(t *testing.T) {
c.Assert(len(s.Pages()), qt.Equals, 16)
// No nn pages
c.Assert(len(s.AllPages()), qt.Equals, 16)
for _, p := range s.rawAllPages {
s.pageMap.withEveryBundlePage(func(p *pageState) bool {
c.Assert(p.Language().Lang != "nn", qt.Equals, true)
}
for _, p := range s.AllPages() {
c.Assert(p.Language().Lang != "nn", qt.Equals, true)
}
return false
})
}
@ -549,7 +549,6 @@ HEADLESS {{< myShort >}}
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
c.Assert(len(s.RegularPages()), qt.Equals, 1)
c.Assert(len(s.headlessPages), qt.Equals, 1)
regular := s.getPage(page.KindPage, "a/index")
c.Assert(regular.RelPermalink(), qt.Equals, "/s1/")
@ -1147,18 +1146,15 @@ baseURL = "https://example.org"
defaultContentLanguage = "en"
defaultContentLanguageInSubDir = true
disableKinds = ["taxonomyTerm", "taxonomy"]
[languages]
[languages.nn]
languageName = "Nynorsk"
weight = 2
title = "Tittel på Nynorsk"
[languages.en]
title = "Title in English"
languageName = "English"
weight = 1
`
pageContent := func(id string) string {

View file

@ -17,43 +17,25 @@ import (
"fmt"
"path"
"path/filepath"
"sort"
"strings"
"sync"
"time"
"github.com/gohugoio/hugo/resources/resource"
"github.com/gohugoio/hugo/common/herrors"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/cache"
"github.com/gohugoio/hugo/resources/page"
)
// Used in the page cache to mark more than one hit for a given key.
var ambiguityFlag = &pageState{}
// PageCollections contains the page collections for a site.
type PageCollections struct {
pagesMap *pagesMap
// Includes absolute all pages (of all types), including drafts etc.
rawAllPages pageStatePages
// rawAllPages plus additional pages created during the build process.
workAllPages pageStatePages
// Includes headless bundles, i.e. bundles that produce no output for its content page.
headlessPages pageStatePages
pageMap *pageMap
// Lazy initialized page collections
pages *lazyPagesFactory
regularPages *lazyPagesFactory
allPages *lazyPagesFactory
allRegularPages *lazyPagesFactory
// The index for .Site.GetPage etc.
pageIndex *cache.Lazy
}
// Pages returns all pages.
@ -78,25 +60,6 @@ func (c *PageCollections) AllRegularPages() page.Pages {
return c.allRegularPages.get()
}
// Get initializes the index if not already done so, then
// looks up the given page ref, returns nil if no value found.
func (c *PageCollections) getFromCache(ref string) (page.Page, error) {
v, found, err := c.pageIndex.Get(ref)
if err != nil {
return nil, err
}
if !found {
return nil, nil
}
p := v.(page.Page)
if p != ambiguityFlag {
return p, nil
}
return nil, fmt.Errorf("page reference %q is ambiguous", ref)
}
type lazyPagesFactory struct {
pages page.Pages
@ -115,83 +78,19 @@ func newLazyPagesFactory(factory page.PagesFactory) *lazyPagesFactory {
return &lazyPagesFactory{factory: factory}
}
func newPageCollections() *PageCollections {
return newPageCollectionsFromPages(nil)
}
func newPageCollections(m *pageMap) *PageCollections {
if m == nil {
panic("must provide a pageMap")
}
func newPageCollectionsFromPages(pages pageStatePages) *PageCollections {
c := &PageCollections{rawAllPages: pages}
c := &PageCollections{pageMap: m}
c.pages = newLazyPagesFactory(func() page.Pages {
pages := make(page.Pages, len(c.workAllPages))
for i, p := range c.workAllPages {
pages[i] = p
}
return pages
return m.createListAllPages()
})
c.regularPages = newLazyPagesFactory(func() page.Pages {
return c.findPagesByKindInWorkPages(page.KindPage, c.workAllPages)
})
c.pageIndex = cache.NewLazy(func() (map[string]interface{}, error) {
index := make(map[string]interface{})
add := func(ref string, p page.Page) {
ref = strings.ToLower(ref)
existing := index[ref]
if existing == nil {
index[ref] = p
} else if existing != ambiguityFlag && existing != p {
index[ref] = ambiguityFlag
}
}
for _, pageCollection := range []pageStatePages{c.workAllPages, c.headlessPages} {
for _, p := range pageCollection {
if p.IsPage() {
sourceRefs := p.sourceRefs()
for _, ref := range sourceRefs {
add(ref, p)
}
sourceRef := sourceRefs[0]
// Ref/Relref supports this potentially ambiguous lookup.
add(p.File().LogicalName(), p)
translationBaseName := p.File().TranslationBaseName()
dir, _ := path.Split(sourceRef)
dir = strings.TrimSuffix(dir, "/")
if translationBaseName == "index" {
add(dir, p)
add(path.Base(dir), p)
} else {
add(translationBaseName, p)
}
// We need a way to get to the current language version.
pathWithNoExtensions := path.Join(dir, translationBaseName)
add(pathWithNoExtensions, p)
} else {
sourceRefs := p.sourceRefs()
for _, ref := range sourceRefs {
add(ref, p)
}
ref := p.SectionsPath()
// index the canonical, unambiguous virtual ref
// e.g. /section
// (this may already have been indexed above)
add("/"+ref, p)
}
}
}
return index, nil
return c.findPagesByKindIn(page.KindPage, c.pages.get())
})
return c
@ -249,64 +148,157 @@ func (c *PageCollections) getPage(typ string, sections ...string) page.Page {
return p
}
// Case insensitive page lookup.
// getPageRef resolves a Page from ref/relRef, with a slightly more comprehensive
// search path than getPageNew.
func (c *PageCollections) getPageRef(context page.Page, ref string) (page.Page, error) {
n, err := c.getContentNode(context, true, ref)
if err != nil || n == nil || n.p == nil {
return nil, err
}
return n.p, nil
}
func (c *PageCollections) getPageNew(context page.Page, ref string) (page.Page, error) {
var anError error
n, err := c.getContentNode(context, false, ref)
if err != nil || n == nil || n.p == nil {
return nil, err
}
return n.p, nil
}
ref = strings.ToLower(ref)
func (c *PageCollections) getSectionOrPage(ref string) (*contentNode, string) {
var n *contentNode
// Absolute (content root relative) reference.
if strings.HasPrefix(ref, "/") {
p, err := c.getFromCache(ref)
if err == nil && p != nil {
return p, nil
}
if err != nil {
anError = err
}
s, v, found := c.pageMap.sections.LongestPrefix(ref)
} else if context != nil {
if found {
n = v.(*contentNode)
}
if found && s == ref {
// A section
return n, ""
}
m := c.pageMap
filename := strings.TrimPrefix(strings.TrimPrefix(ref, s), "/")
langSuffix := "." + m.s.Lang()
// Trim both extension and any language code.
name := helpers.PathNoExt(filename)
name = strings.TrimSuffix(name, langSuffix)
// These are reserved bundle names and will always be stored by their owning
// folder name.
name = strings.TrimSuffix(name, "/index")
name = strings.TrimSuffix(name, "/_index")
if !found {
return nil, name
}
// Check if it's a section with filename provided.
if !n.p.File().IsZero() && n.p.File().LogicalName() == filename {
return n, name
}
return m.getPage(s, name), name
}
func (c *PageCollections) getContentNode(context page.Page, isReflink bool, ref string) (*contentNode, error) {
defer herrors.Recover()
ref = filepath.ToSlash(strings.ToLower(strings.TrimSpace(ref)))
if ref == "" {
ref = "/"
}
inRef := ref
var doSimpleLookup bool
if isReflink || context == nil {
// For Ref/Reflink and .Site.GetPage do simple name lookups for the potentially ambigous myarticle.md and /myarticle.md,
// but not when we get ./myarticle*, section/myarticle.
doSimpleLookup = ref[0] != '.' || ref[0] == '/' && strings.Count(ref, "/") == 1
}
if context != nil && !strings.HasPrefix(ref, "/") {
// Try the page-relative path.
var dir string
if !context.File().IsZero() {
dir = filepath.ToSlash(context.File().Dir())
var base string
if context.File().IsZero() {
base = context.SectionsPath()
} else {
dir = context.SectionsPath()
}
ppath := path.Join("/", strings.ToLower(dir), ref)
p, err := c.getFromCache(ppath)
if err == nil && p != nil {
return p, nil
}
if err != nil {
anError = err
base = filepath.ToSlash(filepath.Dir(context.File().FileInfo().Meta().Path()))
}
ref = path.Join("/", strings.ToLower(base), ref)
}
if !strings.HasPrefix(ref, "/") {
ref = "/" + ref
}
m := c.pageMap
// It's either a section, a page in a section or a taxonomy node.
// Start with the most likely:
n, name := c.getSectionOrPage(ref)
if n != nil {
return n, nil
}
if !strings.HasPrefix(inRef, "/") {
// Many people will have "post/foo.md" in their content files.
p, err := c.getFromCache("/" + ref)
if err == nil && p != nil {
return p, nil
if n, _ := c.getSectionOrPage("/" + inRef); n != nil {
return n, nil
}
}
// Check if it's a taxonomy node
s, v, found := m.taxonomies.LongestPrefix(ref)
if found {
if !m.onSameLevel(ref, s) {
return nil, nil
}
return v.(*contentNode), nil
}
getByName := func(s string) (*contentNode, error) {
n := m.pageReverseIndex.Get(s)
if n != nil {
if n == ambigousContentNode {
return nil, fmt.Errorf("page reference %q is ambiguous", ref)
}
return n, nil
}
return nil, nil
}
var module string
if context != nil && !context.File().IsZero() {
module = context.File().FileInfo().Meta().Module()
}
if module == "" && !c.pageMap.s.home.File().IsZero() {
module = c.pageMap.s.home.File().FileInfo().Meta().Module()
}
if module != "" {
n, err := getByName(module + ref)
if err != nil {
anError = err
return nil, err
}
if n != nil {
return n, nil
}
}
// Last try.
ref = strings.TrimPrefix(ref, "/")
p, err := c.getFromCache(ref)
if err != nil {
anError = err
if !doSimpleLookup {
return nil, nil
}
if p == nil && anError != nil {
return nil, wrapErr(errors.Wrap(anError, "failed to resolve ref"), context)
}
// Ref/relref supports this potentially ambigous lookup.
return getByName(name)
return p, nil
}
func (*PageCollections) findPagesByKindIn(kind string, inPages page.Pages) page.Pages {
@ -318,238 +310,3 @@ func (*PageCollections) findPagesByKindIn(kind string, inPages page.Pages) page.
}
return pages
}
func (c *PageCollections) findPagesByKind(kind string) page.Pages {
return c.findPagesByKindIn(kind, c.Pages())
}
func (c *PageCollections) findWorkPagesByKind(kind string) pageStatePages {
var pages pageStatePages
for _, p := range c.workAllPages {
if p.Kind() == kind {
pages = append(pages, p)
}
}
return pages
}
func (*PageCollections) findPagesByKindInWorkPages(kind string, inPages pageStatePages) page.Pages {
var pages page.Pages
for _, p := range inPages {
if p.Kind() == kind {
pages = append(pages, p)
}
}
return pages
}
func (c *PageCollections) addPage(page *pageState) {
c.rawAllPages = append(c.rawAllPages, page)
}
func (c *PageCollections) removePageFilename(filename string) {
if i := c.rawAllPages.findPagePosByFilename(filename); i >= 0 {
c.clearResourceCacheForPage(c.rawAllPages[i])
c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...)
}
}
func (c *PageCollections) removePage(page *pageState) {
if i := c.rawAllPages.findPagePos(page); i >= 0 {
c.clearResourceCacheForPage(c.rawAllPages[i])
c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...)
}
}
func (c *PageCollections) replacePage(page *pageState) {
// will find existing page that matches filepath and remove it
c.removePage(page)
c.addPage(page)
}
func (c *PageCollections) clearResourceCacheForPage(page *pageState) {
if len(page.resources) > 0 {
page.s.ResourceSpec.DeleteCacheByPrefix(page.targetPaths().SubResourceBaseTarget)
}
}
func (c *PageCollections) assemblePagesMap(s *Site) error {
c.pagesMap = newPagesMap(s)
rootSections := make(map[string]bool)
// Add all branch nodes first.
for _, p := range c.rawAllPages {
rootSections[p.Section()] = true
if p.IsPage() {
continue
}
c.pagesMap.addPage(p)
}
// Create missing home page and the first level sections if no
// _index provided.
s.home = c.pagesMap.getOrCreateHome()
for k := range rootSections {
c.pagesMap.createSectionIfNotExists(k)
}
// Attach the regular pages to their section.
for _, p := range c.rawAllPages {
if p.IsNode() {
continue
}
c.pagesMap.addPage(p)
}
return nil
}
func (c *PageCollections) createWorkAllPages() error {
c.workAllPages = make(pageStatePages, 0, len(c.rawAllPages))
c.headlessPages = make(pageStatePages, 0)
var (
homeDates *resource.Dates
sectionDates *resource.Dates
siteLastmod time.Time
siteLastDate time.Time
sectionsParamId = "mainSections"
sectionsParamIdLower = strings.ToLower(sectionsParamId)
)
mainSections, mainSectionsFound := c.pagesMap.s.Info.Params()[sectionsParamIdLower]
var (
bucketsToRemove []string
rootBuckets []*pagesMapBucket
walkErr error
)
c.pagesMap.r.Walk(func(s string, v interface{}) bool {
bucket := v.(*pagesMapBucket)
parentBucket := c.pagesMap.parentBucket(s)
if parentBucket != nil {
if !mainSectionsFound && strings.Count(s, "/") == 1 && bucket.owner.IsSection() {
// Root section
rootBuckets = append(rootBuckets, bucket)
}
}
if bucket.owner.IsHome() {
if resource.IsZeroDates(bucket.owner) {
// Calculate dates from the page tree.
homeDates = &bucket.owner.m.Dates
}
}
sectionDates = nil
if resource.IsZeroDates(bucket.owner) {
sectionDates = &bucket.owner.m.Dates
}
if parentBucket != nil {
bucket.parent = parentBucket
if bucket.owner.IsSection() {
parentBucket.bucketSections = append(parentBucket.bucketSections, bucket)
}
}
if bucket.isEmpty() {
if bucket.owner.IsSection() && bucket.owner.File().IsZero() {
// Check for any nested section.
var hasDescendant bool
c.pagesMap.r.WalkPrefix(s, func(ss string, v interface{}) bool {
if s != ss {
hasDescendant = true
return true
}
return false
})
if !hasDescendant {
// This is an auto-created section with, now, nothing in it.
bucketsToRemove = append(bucketsToRemove, s)
return false
}
}
}
if !bucket.disabled {
c.workAllPages = append(c.workAllPages, bucket.owner)
}
if !bucket.view {
for _, p := range bucket.headlessPages {
ps := p.(*pageState)
ps.parent = bucket.owner
c.headlessPages = append(c.headlessPages, ps)
}
for _, p := range bucket.pages {
ps := p.(*pageState)
ps.parent = bucket.owner
c.workAllPages = append(c.workAllPages, ps)
if homeDates != nil {
homeDates.UpdateDateAndLastmodIfAfter(ps)
}
if sectionDates != nil {
sectionDates.UpdateDateAndLastmodIfAfter(ps)
}
if p.Lastmod().After(siteLastmod) {
siteLastmod = p.Lastmod()
}
if p.Date().After(siteLastDate) {
siteLastDate = p.Date()
}
}
}
return false
})
if walkErr != nil {
return walkErr
}
c.pagesMap.s.lastmod = siteLastmod
if !mainSectionsFound {
// Calculare main section
var (
maxRootBucketWeight int
maxRootBucket *pagesMapBucket
)
for _, b := range rootBuckets {
weight := len(b.pages) + (len(b.bucketSections) * 5)
if weight >= maxRootBucketWeight {
maxRootBucket = b
maxRootBucketWeight = weight
}
}
if maxRootBucket != nil {
// Try to make this as backwards compatible as possible.
mainSections = []string{maxRootBucket.owner.Section()}
}
}
c.pagesMap.s.Info.Params()[sectionsParamId] = mainSections
c.pagesMap.s.Info.Params()[sectionsParamIdLower] = mainSections
for _, key := range bucketsToRemove {
c.pagesMap.r.Delete(key)
}
sort.Sort(c.workAllPages)
return nil
}

View file

@ -70,43 +70,91 @@ func BenchmarkGetPage(b *testing.B) {
}
}
func BenchmarkGetPageRegular(b *testing.B) {
func createGetPageRegularBenchmarkSite(t testing.TB) *Site {
var (
c = qt.New(b)
c = qt.New(t)
cfg, fs = newTestCfg()
r = rand.New(rand.NewSource(time.Now().UnixNano()))
)
pc := func(title string) string {
return fmt.Sprintf(pageCollectionsPageTemplate, title)
}
for i := 0; i < 10; i++ {
for j := 0; j < 100; j++ {
content := fmt.Sprintf(pageCollectionsPageTemplate, fmt.Sprintf("Title%d_%d", i, j))
writeSource(b, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), content)
content := pc(fmt.Sprintf("Title%d_%d", i, j))
writeSource(c, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), content)
}
}
s := buildSingleSite(b, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
return buildSingleSite(c, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
pagePaths := make([]string, b.N)
}
for i := 0; i < b.N; i++ {
pagePaths[i] = path.Join(fmt.Sprintf("sect%d", r.Intn(10)), fmt.Sprintf("page%d.md", r.Intn(100)))
}
func TestBenchmarkGetPageRegular(t *testing.T) {
c := qt.New(t)
s := createGetPageRegularBenchmarkSite(t)
b.ResetTimer()
for i := 0; i < b.N; i++ {
page, _ := s.getPageNew(nil, pagePaths[i])
c.Assert(page, qt.Not(qt.IsNil))
for i := 0; i < 10; i++ {
pp := path.Join("/", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", i))
page, _ := s.getPageNew(nil, pp)
c.Assert(page, qt.Not(qt.IsNil), qt.Commentf(pp))
}
}
type testCase struct {
func BenchmarkGetPageRegular(b *testing.B) {
r := rand.New(rand.NewSource(time.Now().UnixNano()))
b.Run("From root", func(b *testing.B) {
s := createGetPageRegularBenchmarkSite(b)
c := qt.New(b)
pagePaths := make([]string, b.N)
for i := 0; i < b.N; i++ {
pagePaths[i] = path.Join(fmt.Sprintf("/sect%d", r.Intn(10)), fmt.Sprintf("page%d.md", r.Intn(100)))
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
page, _ := s.getPageNew(nil, pagePaths[i])
c.Assert(page, qt.Not(qt.IsNil))
}
})
b.Run("Page relative", func(b *testing.B) {
s := createGetPageRegularBenchmarkSite(b)
c := qt.New(b)
allPages := s.RegularPages()
pagePaths := make([]string, b.N)
pages := make([]page.Page, b.N)
for i := 0; i < b.N; i++ {
pagePaths[i] = fmt.Sprintf("page%d.md", r.Intn(100))
pages[i] = allPages[r.Intn(len(allPages)/3)]
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
page, _ := s.getPageNew(pages[i], pagePaths[i])
c.Assert(page, qt.Not(qt.IsNil))
}
})
}
type getPageTest struct {
name string
kind string
context page.Page
path []string
pathVariants []string
expectedTitle string
}
func (t *testCase) check(p page.Page, err error, errorMsg string, c *qt.C) {
func (t *getPageTest) check(p page.Page, err error, errorMsg string, c *qt.C) {
c.Helper()
errorComment := qt.Commentf(errorMsg)
switch t.kind {
case "Ambiguous":
@ -130,117 +178,159 @@ func TestGetPage(t *testing.T) {
c = qt.New(t)
)
pc := func(title string) string {
return fmt.Sprintf(pageCollectionsPageTemplate, title)
}
for i := 0; i < 10; i++ {
for j := 0; j < 10; j++ {
content := fmt.Sprintf(pageCollectionsPageTemplate, fmt.Sprintf("Title%d_%d", i, j))
content := pc(fmt.Sprintf("Title%d_%d", i, j))
writeSource(t, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), content)
}
}
content := fmt.Sprintf(pageCollectionsPageTemplate, "home page")
content := pc("home page")
writeSource(t, fs, filepath.Join("content", "_index.md"), content)
content = fmt.Sprintf(pageCollectionsPageTemplate, "about page")
content = pc("about page")
writeSource(t, fs, filepath.Join("content", "about.md"), content)
content = fmt.Sprintf(pageCollectionsPageTemplate, "section 3")
content = pc("section 3")
writeSource(t, fs, filepath.Join("content", "sect3", "_index.md"), content)
content = fmt.Sprintf(pageCollectionsPageTemplate, "UniqueBase")
writeSource(t, fs, filepath.Join("content", "sect3", "unique.md"), content)
writeSource(t, fs, filepath.Join("content", "sect3", "unique.md"), pc("UniqueBase"))
writeSource(t, fs, filepath.Join("content", "sect3", "Unique2.md"), pc("UniqueBase2"))
content = fmt.Sprintf(pageCollectionsPageTemplate, "another sect7")
content = pc("another sect7")
writeSource(t, fs, filepath.Join("content", "sect3", "sect7", "_index.md"), content)
content = fmt.Sprintf(pageCollectionsPageTemplate, "deep page")
content = pc("deep page")
writeSource(t, fs, filepath.Join("content", "sect3", "subsect", "deep.md"), content)
// Bundle variants
writeSource(t, fs, filepath.Join("content", "sect3", "b1", "index.md"), pc("b1 bundle"))
writeSource(t, fs, filepath.Join("content", "sect3", "index", "index.md"), pc("index bundle"))
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
sec3, err := s.getPageNew(nil, "/sect3")
c.Assert(err, qt.IsNil)
c.Assert(sec3, qt.Not(qt.IsNil))
tests := []testCase{
tests := []getPageTest{
// legacy content root relative paths
{page.KindHome, nil, []string{}, "home page"},
{page.KindPage, nil, []string{"about.md"}, "about page"},
{page.KindSection, nil, []string{"sect3"}, "section 3"},
{page.KindPage, nil, []string{"sect3/page1.md"}, "Title3_1"},
{page.KindPage, nil, []string{"sect4/page2.md"}, "Title4_2"},
{page.KindSection, nil, []string{"sect3/sect7"}, "another sect7"},
{page.KindPage, nil, []string{"sect3/subsect/deep.md"}, "deep page"},
{page.KindPage, nil, []string{filepath.FromSlash("sect5/page3.md")}, "Title5_3"}, //test OS-specific path
{"Root relative, no slash, home", page.KindHome, nil, []string{""}, "home page"},
{"Root relative, no slash, root page", page.KindPage, nil, []string{"about.md", "ABOUT.md"}, "about page"},
{"Root relative, no slash, section", page.KindSection, nil, []string{"sect3"}, "section 3"},
{"Root relative, no slash, section page", page.KindPage, nil, []string{"sect3/page1.md"}, "Title3_1"},
{"Root relative, no slash, sub setion", page.KindSection, nil, []string{"sect3/sect7"}, "another sect7"},
{"Root relative, no slash, nested page", page.KindPage, nil, []string{"sect3/subsect/deep.md"}, "deep page"},
{"Root relative, no slash, OS slashes", page.KindPage, nil, []string{filepath.FromSlash("sect5/page3.md")}, "Title5_3"},
// shorthand refs (potentially ambiguous)
{page.KindPage, nil, []string{"unique.md"}, "UniqueBase"},
{"Ambiguous", nil, []string{"page1.md"}, ""},
{"Short ref, unique", page.KindPage, nil, []string{"unique.md", "unique"}, "UniqueBase"},
{"Short ref, unique, upper case", page.KindPage, nil, []string{"Unique2.md", "unique2.md", "unique2"}, "UniqueBase2"},
{"Short ref, ambiguous", "Ambiguous", nil, []string{"page1.md"}, ""},
// ISSUE: This is an ambiguous ref, but because we have to support the legacy
// content root relative paths without a leading slash, the lookup
// returns /sect7. This undermines ambiguity detection, but we have no choice.
//{"Ambiguous", nil, []string{"sect7"}, ""},
{page.KindSection, nil, []string{"sect7"}, "Sect7s"},
{"Section, ambigous", page.KindSection, nil, []string{"sect7"}, "Sect7s"},
// absolute paths
{page.KindHome, nil, []string{"/"}, "home page"},
{page.KindPage, nil, []string{"/about.md"}, "about page"},
{page.KindSection, nil, []string{"/sect3"}, "section 3"},
{page.KindPage, nil, []string{"/sect3/page1.md"}, "Title3_1"},
{page.KindPage, nil, []string{"/sect4/page2.md"}, "Title4_2"},
{page.KindSection, nil, []string{"/sect3/sect7"}, "another sect7"},
{page.KindPage, nil, []string{"/sect3/subsect/deep.md"}, "deep page"},
{page.KindPage, nil, []string{filepath.FromSlash("/sect5/page3.md")}, "Title5_3"}, //test OS-specific path
{page.KindPage, nil, []string{"/sect3/unique.md"}, "UniqueBase"}, //next test depends on this page existing
{"Absolute, home", page.KindHome, nil, []string{"/", ""}, "home page"},
{"Absolute, page", page.KindPage, nil, []string{"/about.md", "/about"}, "about page"},
{"Absolute, sect", page.KindSection, nil, []string{"/sect3"}, "section 3"},
{"Absolute, page in subsection", page.KindPage, nil, []string{"/sect3/page1.md", "/Sect3/Page1.md"}, "Title3_1"},
{"Absolute, section, subsection with same name", page.KindSection, nil, []string{"/sect3/sect7"}, "another sect7"},
{"Absolute, page, deep", page.KindPage, nil, []string{"/sect3/subsect/deep.md"}, "deep page"},
{"Absolute, page, OS slashes", page.KindPage, nil, []string{filepath.FromSlash("/sect5/page3.md")}, "Title5_3"}, //test OS-specific path
{"Absolute, unique", page.KindPage, nil, []string{"/sect3/unique.md"}, "UniqueBase"},
{"Absolute, unique, case", page.KindPage, nil, []string{"/sect3/Unique2.md", "/sect3/unique2.md", "/sect3/unique2", "/sect3/Unique2"}, "UniqueBase2"},
//next test depends on this page existing
// {"NoPage", nil, []string{"/unique.md"}, ""}, // ISSUE #4969: this is resolving to /sect3/unique.md
{"NoPage", nil, []string{"/missing-page.md"}, ""},
{"NoPage", nil, []string{"/missing-section"}, ""},
{"Absolute, missing page", "NoPage", nil, []string{"/missing-page.md"}, ""},
{"Absolute, missing section", "NoPage", nil, []string{"/missing-section"}, ""},
// relative paths
{page.KindHome, sec3, []string{".."}, "home page"},
{page.KindHome, sec3, []string{"../"}, "home page"},
{page.KindPage, sec3, []string{"../about.md"}, "about page"},
{page.KindSection, sec3, []string{"."}, "section 3"},
{page.KindSection, sec3, []string{"./"}, "section 3"},
{page.KindPage, sec3, []string{"page1.md"}, "Title3_1"},
{page.KindPage, sec3, []string{"./page1.md"}, "Title3_1"},
{page.KindPage, sec3, []string{"../sect4/page2.md"}, "Title4_2"},
{page.KindSection, sec3, []string{"sect7"}, "another sect7"},
{page.KindSection, sec3, []string{"./sect7"}, "another sect7"},
{page.KindPage, sec3, []string{"./subsect/deep.md"}, "deep page"},
{page.KindPage, sec3, []string{"./subsect/../../sect7/page9.md"}, "Title7_9"},
{page.KindPage, sec3, []string{filepath.FromSlash("../sect5/page3.md")}, "Title5_3"}, //test OS-specific path
{page.KindPage, sec3, []string{"./unique.md"}, "UniqueBase"},
{"NoPage", sec3, []string{"./sect2"}, ""},
{"Dot relative, home", page.KindHome, sec3, []string{".."}, "home page"},
{"Dot relative, home, slash", page.KindHome, sec3, []string{"../"}, "home page"},
{"Dot relative about", page.KindPage, sec3, []string{"../about.md"}, "about page"},
{"Dot", page.KindSection, sec3, []string{"."}, "section 3"},
{"Dot slash", page.KindSection, sec3, []string{"./"}, "section 3"},
{"Page relative, no dot", page.KindPage, sec3, []string{"page1.md"}, "Title3_1"},
{"Page relative, dot", page.KindPage, sec3, []string{"./page1.md"}, "Title3_1"},
{"Up and down another section", page.KindPage, sec3, []string{"../sect4/page2.md"}, "Title4_2"},
{"Rel sect7", page.KindSection, sec3, []string{"sect7"}, "another sect7"},
{"Rel sect7 dot", page.KindSection, sec3, []string{"./sect7"}, "another sect7"},
{"Dot deep", page.KindPage, sec3, []string{"./subsect/deep.md"}, "deep page"},
{"Dot dot inner", page.KindPage, sec3, []string{"./subsect/../../sect7/page9.md"}, "Title7_9"},
{"Dot OS slash", page.KindPage, sec3, []string{filepath.FromSlash("../sect5/page3.md")}, "Title5_3"}, //test OS-specific path
{"Dot unique", page.KindPage, sec3, []string{"./unique.md"}, "UniqueBase"},
{"Dot sect", "NoPage", sec3, []string{"./sect2"}, ""},
//{"NoPage", sec3, []string{"sect2"}, ""}, // ISSUE: /sect3 page relative query is resolving to /sect2
// absolute paths ignore context
{page.KindHome, sec3, []string{"/"}, "home page"},
{page.KindPage, sec3, []string{"/about.md"}, "about page"},
{page.KindPage, sec3, []string{"/sect4/page2.md"}, "Title4_2"},
{page.KindPage, sec3, []string{"/sect3/subsect/deep.md"}, "deep page"}, //next test depends on this page existing
{"NoPage", sec3, []string{"/subsect/deep.md"}, ""},
{"Abs, ignore context, home", page.KindHome, sec3, []string{"/"}, "home page"},
{"Abs, ignore context, about", page.KindPage, sec3, []string{"/about.md"}, "about page"},
{"Abs, ignore context, page in section", page.KindPage, sec3, []string{"/sect4/page2.md"}, "Title4_2"},
{"Abs, ignore context, page subsect deep", page.KindPage, sec3, []string{"/sect3/subsect/deep.md"}, "deep page"}, //next test depends on this page existing
{"Abs, ignore context, page deep", "NoPage", sec3, []string{"/subsect/deep.md"}, ""},
// Taxonomies
{"Taxonomy term", page.KindTaxonomyTerm, nil, []string{"categories"}, "Categories"},
{"Taxonomy", page.KindTaxonomy, nil, []string{"categories/hugo", "categories/Hugo"}, "Hugo"},
// Bundle variants
{"Bundle regular", page.KindPage, nil, []string{"sect3/b1", "sect3/b1/index.md", "sect3/b1/index.en.md"}, "b1 bundle"},
{"Bundle index name", page.KindPage, nil, []string{"sect3/index/index.md", "sect3/index"}, "index bundle"},
}
for _, test := range tests {
errorMsg := fmt.Sprintf("Test case %s %v -> %s", test.context, test.path, test.expectedTitle)
c.Run(test.name, func(c *qt.C) {
errorMsg := fmt.Sprintf("Test case %v %v -> %s", test.context, test.pathVariants, test.expectedTitle)
// test legacy public Site.GetPage (which does not support page context relative queries)
if test.context == nil {
args := append([]string{test.kind}, test.path...)
page, err := s.Info.GetPage(args...)
test.check(page, err, errorMsg, c)
}
// test legacy public Site.GetPage (which does not support page context relative queries)
if test.context == nil {
for _, ref := range test.pathVariants {
args := append([]string{test.kind}, ref)
page, err := s.Info.GetPage(args...)
test.check(page, err, errorMsg, c)
}
}
// test new internal Site.getPageNew
var ref string
if len(test.path) == 1 {
ref = filepath.ToSlash(test.path[0])
} else {
ref = path.Join(test.path...)
}
page2, err := s.getPageNew(test.context, ref)
test.check(page2, err, errorMsg, c)
// test new internal Site.getPageNew
for _, ref := range test.pathVariants {
page2, err := s.getPageNew(test.context, ref)
test.check(page2, err, errorMsg, c)
}
})
}
}
// https://github.com/gohugoio/hugo/issues/6034
func TestGetPageRelative(t *testing.T) {
b := newTestSitesBuilder(t)
for i, section := range []string{"what", "where", "who"} {
isDraft := i == 2
b.WithContent(
section+"/_index.md", fmt.Sprintf("---title: %s\n---", section),
section+"/members.md", fmt.Sprintf("---title: members %s\ndraft: %t\n---", section, isDraft),
)
}
b.WithTemplates("_default/list.html", `
{{ with .GetPage "members.md" }}
Members: {{ .Title }}
{{ else }}
NOT FOUND
{{ end }}
`)
b.Build(BuildCfg{})
b.AssertFileContent("public/what/index.html", `Members: members what`)
b.AssertFileContent("public/where/index.html", `Members: members where`)
b.AssertFileContent("public/who/index.html", `NOT FOUND`)
}

View file

@ -19,21 +19,14 @@ import (
"os"
pth "path"
"path/filepath"
"strings"
"reflect"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/parser/pageparser"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/resources"
"github.com/pkg/errors"
"golang.org/x/sync/errgroup"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/resources/resource"
"github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/common/loggers"
@ -41,30 +34,32 @@ import (
"github.com/spf13/afero"
)
const (
walkIsRootFileMetaKey = "walkIsRootFileMetaKey"
)
func newPagesCollector(
sp *source.SourceSpec,
contentMap *pageMaps,
logger *loggers.Logger,
contentTracker *contentChangeMap,
proc pagesCollectorProcessorProvider, filenames ...string) *pagesCollector {
return &pagesCollector{
fs: sp.SourceFs,
proc: proc,
sp: sp,
logger: logger,
filenames: filenames,
tracker: contentTracker,
fs: sp.SourceFs,
contentMap: contentMap,
proc: proc,
sp: sp,
logger: logger,
filenames: filenames,
tracker: contentTracker,
}
}
func newPagesProcessor(h *HugoSites, sp *source.SourceSpec, partialBuild bool) *pagesProcessor {
return &pagesProcessor{
h: h,
sp: sp,
partialBuild: partialBuild,
numWorkers: config.GetNumWorkerMultiplier() * 3,
}
type contentDirKey struct {
dirname string
filename string
tp bundleDirType
}
type fileinfoBundle struct {
@ -90,6 +85,8 @@ type pagesCollector struct {
fs afero.Fs
logger *loggers.Logger
contentMap *pageMaps
// Ordered list (bundle headers first) used in partial builds.
filenames []string
@ -99,21 +96,78 @@ type pagesCollector struct {
proc pagesCollectorProcessorProvider
}
type contentDirKey struct {
dirname string
filename string
tp bundleDirType
// isCascadingEdit returns whether the dir represents a cascading edit.
// That is, if a front matter cascade section is removed, added or edited.
// If this is the case we must re-evaluate its descendants.
func (c *pagesCollector) isCascadingEdit(dir contentDirKey) (bool, string) {
// This is eiter a section or a taxonomy node. Find it.
prefix := cleanTreeKey(dir.dirname)
section := "/"
var isCascade bool
c.contentMap.walkBranchesPrefix(prefix, func(s string, n *contentNode) bool {
if n.fi == nil || dir.filename != n.fi.Meta().Filename() {
return false
}
f, err := n.fi.Meta().Open()
if err != nil {
// File may have been removed, assume a cascading edit.
// Some false positives is not too bad.
isCascade = true
return true
}
pf, err := pageparser.ParseFrontMatterAndContent(f)
f.Close()
if err != nil {
isCascade = true
return true
}
if n.p == nil || n.p.bucket == nil {
return true
}
section = s
maps.ToLower(pf.FrontMatter)
cascade1, ok := pf.FrontMatter["cascade"]
hasCascade := n.p.bucket.cascade != nil && len(n.p.bucket.cascade) > 0
if !ok {
isCascade = hasCascade
return true
}
if !hasCascade {
isCascade = true
return true
}
isCascade = !reflect.DeepEqual(cascade1, n.p.bucket.cascade)
return true
})
return isCascade, section
}
// Collect.
func (c *pagesCollector) Collect() error {
func (c *pagesCollector) Collect() (collectErr error) {
c.proc.Start(context.Background())
defer func() {
collectErr = c.proc.Wait()
}()
var collectErr error
if len(c.filenames) == 0 {
// Collect everything.
collectErr = c.collectDir("", false, nil)
} else {
for _, pm := range c.contentMap.pmaps {
pm.cfg.isRebuild = true
}
dirs := make(map[contentDirKey]bool)
for _, filename := range c.filenames {
dir, btype := c.tracker.resolveAndRemove(filename)
@ -121,9 +175,19 @@ func (c *pagesCollector) Collect() error {
}
for dir := range dirs {
for _, pm := range c.contentMap.pmaps {
pm.s.ResourceSpec.DeleteBySubstring(dir.dirname)
}
switch dir.tp {
case bundleLeaf, bundleBranch:
case bundleLeaf:
collectErr = c.collectDir(dir.dirname, true, nil)
case bundleBranch:
isCascading, section := c.isCascadingEdit(dir)
if isCascading {
c.contentMap.deleteSection(section)
}
collectErr = c.collectDir(dir.dirname, !isCascading, nil)
default:
// We always start from a directory.
collectErr = c.collectDir(dir.dirname, true, func(fim hugofs.FileMetaInfo) bool {
@ -138,185 +202,7 @@ func (c *pagesCollector) Collect() error {
}
err := c.proc.Wait()
if collectErr != nil {
return collectErr
}
return err
}
func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func(fim hugofs.FileMetaInfo) bool) error {
fi, err := c.fs.Stat(dirname)
if err != nil {
if os.IsNotExist(err) {
// May have been deleted.
return nil
}
return err
}
handleDir := func(
btype bundleDirType,
dir hugofs.FileMetaInfo,
path string,
readdir []hugofs.FileMetaInfo) error {
if btype > bundleNot && c.tracker != nil {
c.tracker.add(path, btype)
}
if btype == bundleBranch {
if err := c.handleBundleBranch(readdir); err != nil {
return err
}
// A branch bundle is only this directory level, so keep walking.
return nil
} else if btype == bundleLeaf {
if err := c.handleBundleLeaf(dir, path, readdir); err != nil {
return err
}
return nil
}
if err := c.handleFiles(readdir...); err != nil {
return err
}
return nil
}
filter := func(fim hugofs.FileMetaInfo) bool {
if fim.Meta().SkipDir() {
return false
}
if c.sp.IgnoreFile(fim.Meta().Filename()) {
return false
}
if inFilter != nil {
return inFilter(fim)
}
return true
}
preHook := func(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) ([]hugofs.FileMetaInfo, error) {
var btype bundleDirType
filtered := readdir[:0]
for _, fi := range readdir {
if filter(fi) {
filtered = append(filtered, fi)
if c.tracker != nil {
// Track symlinks.
c.tracker.addSymbolicLinkMapping(fi)
}
}
}
readdir = filtered
// We merge language directories, so there can be duplicates, but they
// will be ordered, most important first.
var duplicates []int
seen := make(map[string]bool)
for i, fi := range readdir {
if fi.IsDir() {
continue
}
meta := fi.Meta()
class := meta.Classifier()
translationBase := meta.TranslationBaseNameWithExt()
key := pth.Join(meta.Lang(), translationBase)
if seen[key] {
duplicates = append(duplicates, i)
continue
}
seen[key] = true
var thisBtype bundleDirType
switch class {
case files.ContentClassLeaf:
thisBtype = bundleLeaf
case files.ContentClassBranch:
thisBtype = bundleBranch
}
// Folders with both index.md and _index.md type of files have
// undefined behaviour and can never work.
// The branch variant will win because of sort order, but log
// a warning about it.
if thisBtype > bundleNot && btype > bundleNot && thisBtype != btype {
c.logger.WARN.Printf("Content directory %q have both index.* and _index.* files, pick one.", dir.Meta().Filename())
// Reclassify it so it will be handled as a content file inside the
// section, which is in line with the <= 0.55 behaviour.
meta["classifier"] = files.ContentClassContent
} else if thisBtype > bundleNot {
btype = thisBtype
}
}
if len(duplicates) > 0 {
for i := len(duplicates) - 1; i >= 0; i-- {
idx := duplicates[i]
readdir = append(readdir[:idx], readdir[idx+1:]...)
}
}
err := handleDir(btype, dir, path, readdir)
if err != nil {
return nil, err
}
if btype == bundleLeaf || partial {
return nil, filepath.SkipDir
}
// Keep walking.
return readdir, nil
}
var postHook hugofs.WalkHook
if c.tracker != nil {
postHook = func(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) ([]hugofs.FileMetaInfo, error) {
if c.tracker == nil {
// Nothing to do.
return readdir, nil
}
return readdir, nil
}
}
wfn := func(path string, info hugofs.FileMetaInfo, err error) error {
if err != nil {
return err
}
return nil
}
w := hugofs.NewWalkway(hugofs.WalkwayConfig{
Fs: c.fs,
Logger: c.logger,
Root: dirname,
Info: fi.(hugofs.FileMetaInfo),
HookPre: preHook,
HookPost: postHook,
WalkFn: wfn})
return w.Walk()
return
}
@ -432,11 +318,195 @@ func (c *pagesCollector) cloneFileInfo(fi hugofs.FileMetaInfo) hugofs.FileMetaIn
return hugofs.NewFileMetaInfo(fi, cm)
}
func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func(fim hugofs.FileMetaInfo) bool) error {
fi, err := c.fs.Stat(dirname)
if err != nil {
if os.IsNotExist(err) {
// May have been deleted.
return nil
}
return err
}
handleDir := func(
btype bundleDirType,
dir hugofs.FileMetaInfo,
path string,
readdir []hugofs.FileMetaInfo) error {
if btype > bundleNot && c.tracker != nil {
c.tracker.add(path, btype)
}
if btype == bundleBranch {
if err := c.handleBundleBranch(readdir); err != nil {
return err
}
// A branch bundle is only this directory level, so keep walking.
return nil
} else if btype == bundleLeaf {
if err := c.handleBundleLeaf(dir, path, readdir); err != nil {
return err
}
return nil
}
if err := c.handleFiles(readdir...); err != nil {
return err
}
return nil
}
filter := func(fim hugofs.FileMetaInfo) bool {
if fim.Meta().SkipDir() {
return false
}
if c.sp.IgnoreFile(fim.Meta().Filename()) {
return false
}
if inFilter != nil {
return inFilter(fim)
}
return true
}
preHook := func(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) ([]hugofs.FileMetaInfo, error) {
var btype bundleDirType
filtered := readdir[:0]
for _, fi := range readdir {
if filter(fi) {
filtered = append(filtered, fi)
if c.tracker != nil {
// Track symlinks.
c.tracker.addSymbolicLinkMapping(fi)
}
}
}
walkRoot := dir.Meta().GetBool(walkIsRootFileMetaKey)
readdir = filtered
// We merge language directories, so there can be duplicates, but they
// will be ordered, most important first.
var duplicates []int
seen := make(map[string]bool)
for i, fi := range readdir {
if fi.IsDir() {
continue
}
meta := fi.Meta()
if walkRoot {
meta[walkIsRootFileMetaKey] = true
}
class := meta.Classifier()
translationBase := meta.TranslationBaseNameWithExt()
key := pth.Join(meta.Lang(), translationBase)
if seen[key] {
duplicates = append(duplicates, i)
continue
}
seen[key] = true
var thisBtype bundleDirType
switch class {
case files.ContentClassLeaf:
thisBtype = bundleLeaf
case files.ContentClassBranch:
thisBtype = bundleBranch
}
// Folders with both index.md and _index.md type of files have
// undefined behaviour and can never work.
// The branch variant will win because of sort order, but log
// a warning about it.
if thisBtype > bundleNot && btype > bundleNot && thisBtype != btype {
c.logger.WARN.Printf("Content directory %q have both index.* and _index.* files, pick one.", dir.Meta().Filename())
// Reclassify it so it will be handled as a content file inside the
// section, which is in line with the <= 0.55 behaviour.
meta["classifier"] = files.ContentClassContent
} else if thisBtype > bundleNot {
btype = thisBtype
}
}
if len(duplicates) > 0 {
for i := len(duplicates) - 1; i >= 0; i-- {
idx := duplicates[i]
readdir = append(readdir[:idx], readdir[idx+1:]...)
}
}
err := handleDir(btype, dir, path, readdir)
if err != nil {
return nil, err
}
if btype == bundleLeaf || partial {
return nil, filepath.SkipDir
}
// Keep walking.
return readdir, nil
}
var postHook hugofs.WalkHook
if c.tracker != nil {
postHook = func(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) ([]hugofs.FileMetaInfo, error) {
if c.tracker == nil {
// Nothing to do.
return readdir, nil
}
return readdir, nil
}
}
wfn := func(path string, info hugofs.FileMetaInfo, err error) error {
if err != nil {
return err
}
return nil
}
fim := fi.(hugofs.FileMetaInfo)
// Make sure the pages in this directory gets re-rendered,
// even in fast render mode.
fim.Meta()[walkIsRootFileMetaKey] = true
w := hugofs.NewWalkway(hugofs.WalkwayConfig{
Fs: c.fs,
Logger: c.logger,
Root: dirname,
Info: fim,
HookPre: preHook,
HookPost: postHook,
WalkFn: wfn})
return w.Walk()
}
func (c *pagesCollector) handleBundleBranch(readdir []hugofs.FileMetaInfo) error {
// Maps bundles to its language.
bundles := pageBundles{}
var contentFiles []hugofs.FileMetaInfo
for _, fim := range readdir {
if fim.IsDir() {
@ -447,9 +517,7 @@ func (c *pagesCollector) handleBundleBranch(readdir []hugofs.FileMetaInfo) error
switch meta.Classifier() {
case files.ContentClassContent:
if err := c.handleFiles(fim); err != nil {
return err
}
contentFiles = append(contentFiles, fim)
default:
if err := c.addToBundle(fim, bundleBranch, bundles); err != nil {
return err
@ -458,7 +526,12 @@ func (c *pagesCollector) handleBundleBranch(readdir []hugofs.FileMetaInfo) error
}
return c.proc.Process(bundles)
// Make sure the section is created before its pages.
if err := c.proc.Process(bundles); err != nil {
return err
}
return c.handleFiles(contentFiles...)
}
@ -508,273 +581,6 @@ func (c *pagesCollector) handleFiles(fis ...hugofs.FileMetaInfo) error {
return nil
}
type pagesCollectorProcessorProvider interface {
Process(item interface{}) error
Start(ctx context.Context) context.Context
Wait() error
}
type pagesProcessor struct {
h *HugoSites
sp *source.SourceSpec
itemChan chan interface{}
itemGroup *errgroup.Group
// The output Pages
pagesChan chan *pageState
pagesGroup *errgroup.Group
numWorkers int
partialBuild bool
}
func (proc *pagesProcessor) Process(item interface{}) error {
proc.itemChan <- item
return nil
}
func (proc *pagesProcessor) Start(ctx context.Context) context.Context {
proc.pagesChan = make(chan *pageState, proc.numWorkers)
proc.pagesGroup, ctx = errgroup.WithContext(ctx)
proc.itemChan = make(chan interface{}, proc.numWorkers)
proc.itemGroup, ctx = errgroup.WithContext(ctx)
proc.pagesGroup.Go(func() error {
for p := range proc.pagesChan {
s := p.s
p.forceRender = proc.partialBuild
if p.forceRender {
s.replacePage(p)
} else {
s.addPage(p)
}
}
return nil
})
for i := 0; i < proc.numWorkers; i++ {
proc.itemGroup.Go(func() error {
for item := range proc.itemChan {
select {
case <-proc.h.Done():
return nil
default:
if err := proc.process(item); err != nil {
proc.h.SendError(err)
}
}
}
return nil
})
}
return ctx
}
func (proc *pagesProcessor) Wait() error {
close(proc.itemChan)
err := proc.itemGroup.Wait()
close(proc.pagesChan)
if err != nil {
return err
}
return proc.pagesGroup.Wait()
}
func (proc *pagesProcessor) newPageFromBundle(b *fileinfoBundle) (*pageState, error) {
p, err := proc.newPageFromFi(b.header, nil)
if err != nil {
return nil, err
}
if len(b.resources) > 0 {
resources := make(resource.Resources, len(b.resources))
for i, rfi := range b.resources {
meta := rfi.Meta()
classifier := meta.Classifier()
var r resource.Resource
switch classifier {
case files.ContentClassContent:
rp, err := proc.newPageFromFi(rfi, p)
if err != nil {
return nil, err
}
rp.m.resourcePath = filepath.ToSlash(strings.TrimPrefix(rp.Path(), p.File().Dir()))
r = rp
case files.ContentClassFile:
r, err = proc.newResource(rfi, p)
if err != nil {
return nil, err
}
default:
panic(fmt.Sprintf("invalid classifier: %q", classifier))
}
resources[i] = r
}
p.addResources(resources...)
}
return p, nil
}
func (proc *pagesProcessor) newPageFromFi(fim hugofs.FileMetaInfo, owner *pageState) (*pageState, error) {
fi, err := newFileInfo(proc.sp, fim)
if err != nil {
return nil, err
}
var s *Site
meta := fim.Meta()
if owner != nil {
s = owner.s
} else {
lang := meta.Lang()
s = proc.getSite(lang)
}
r := func() (hugio.ReadSeekCloser, error) {
return meta.Open()
}
p, err := newPageWithContent(fi, s, owner != nil, r)
if err != nil {
return nil, err
}
p.parent = owner
return p, nil
}
func (proc *pagesProcessor) newResource(fim hugofs.FileMetaInfo, owner *pageState) (resource.Resource, error) {
// TODO(bep) consolidate with multihost logic + clean up
outputFormats := owner.m.outputFormats()
seen := make(map[string]bool)
var targetBasePaths []string
// Make sure bundled resources are published to all of the ouptput formats'
// sub paths.
for _, f := range outputFormats {
p := f.Path
if seen[p] {
continue
}
seen[p] = true
targetBasePaths = append(targetBasePaths, p)
}
meta := fim.Meta()
r := func() (hugio.ReadSeekCloser, error) {
return meta.Open()
}
target := strings.TrimPrefix(meta.Path(), owner.File().Dir())
return owner.s.ResourceSpec.New(
resources.ResourceSourceDescriptor{
TargetPaths: owner.getTargetPaths,
OpenReadSeekCloser: r,
FileInfo: fim,
RelTargetFilename: target,
TargetBasePaths: targetBasePaths,
})
}
func (proc *pagesProcessor) getSite(lang string) *Site {
if lang == "" {
return proc.h.Sites[0]
}
for _, s := range proc.h.Sites {
if lang == s.Lang() {
return s
}
}
return proc.h.Sites[0]
}
func (proc *pagesProcessor) copyFile(fim hugofs.FileMetaInfo) error {
meta := fim.Meta()
s := proc.getSite(meta.Lang())
f, err := meta.Open()
if err != nil {
return errors.Wrap(err, "copyFile: failed to open")
}
target := filepath.Join(s.PathSpec.GetTargetLanguageBasePath(), meta.Path())
defer f.Close()
return s.publish(&s.PathSpec.ProcessingStats.Files, target, f)
}
func (proc *pagesProcessor) process(item interface{}) error {
send := func(p *pageState, err error) {
if err != nil {
proc.sendError(err)
} else {
proc.pagesChan <- p
}
}
switch v := item.(type) {
// Page bundles mapped to their language.
case pageBundles:
for _, bundle := range v {
if proc.shouldSkip(bundle.header) {
continue
}
send(proc.newPageFromBundle(bundle))
}
case hugofs.FileMetaInfo:
if proc.shouldSkip(v) {
return nil
}
meta := v.Meta()
classifier := meta.Classifier()
switch classifier {
case files.ContentClassContent:
send(proc.newPageFromFi(v, nil))
case files.ContentClassFile:
proc.sendError(proc.copyFile(v))
default:
panic(fmt.Sprintf("invalid classifier: %q", classifier))
}
default:
panic(fmt.Sprintf("unrecognized item type in Process: %T", item))
}
return nil
}
func (proc *pagesProcessor) sendError(err error) {
if err == nil {
return
}
proc.h.SendError(err)
}
func (proc *pagesProcessor) shouldSkip(fim hugofs.FileMetaInfo) bool {
return proc.sp.DisabledLanguages[fim.Meta().Lang()]
}
func stringSliceContains(k string, values ...string) bool {
for _, v := range values {
if k == v {

View file

@ -19,8 +19,6 @@ import (
"path/filepath"
"testing"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/source"
@ -59,17 +57,11 @@ func TestPagesCapture(t *testing.T) {
t.Run("Collect", func(t *testing.T) {
c := qt.New(t)
proc := &testPagesCollectorProcessor{}
coll := newPagesCollector(sourceSpec, loggers.NewErrorLogger(), nil, proc)
coll := newPagesCollector(sourceSpec, nil, loggers.NewErrorLogger(), nil, proc)
c.Assert(coll.Collect(), qt.IsNil)
c.Assert(len(proc.items), qt.Equals, 4)
})
t.Run("error in Wait", func(t *testing.T) {
c := qt.New(t)
coll := newPagesCollector(sourceSpec, loggers.NewErrorLogger(), nil,
&testPagesCollectorProcessor{waitErr: errors.New("failed")})
c.Assert(coll.Collect(), qt.Not(qt.IsNil))
})
}
type testPagesCollectorProcessor struct {

View file

@ -1,474 +0,0 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"path"
"path/filepath"
"strings"
"sync"
"github.com/gohugoio/hugo/common/maps"
radix "github.com/armon/go-radix"
"github.com/spf13/cast"
"github.com/gohugoio/hugo/resources/page"
)
func newPagesMap(s *Site) *pagesMap {
return &pagesMap{
r: radix.New(),
s: s,
}
}
type pagesMap struct {
r *radix.Tree
s *Site
}
func (m *pagesMap) Get(key string) *pagesMapBucket {
key = m.cleanKey(key)
v, found := m.r.Get(key)
if !found {
return nil
}
return v.(*pagesMapBucket)
}
func (m *pagesMap) getKey(p *pageState) string {
if !p.File().IsZero() {
return m.cleanKey(p.File().Dir())
}
return m.cleanKey(p.SectionsPath())
}
func (m *pagesMap) getOrCreateHome() *pageState {
var home *pageState
b, found := m.r.Get("/")
if !found {
home = m.s.newPage(page.KindHome)
m.addBucketFor("/", home, nil)
} else {
home = b.(*pagesMapBucket).owner
}
return home
}
func (m *pagesMap) initPageMeta(p *pageState, bucket *pagesMapBucket) error {
var err error
p.metaInit.Do(func() {
if p.metaInitFn != nil {
err = p.metaInitFn(bucket)
}
})
return err
}
func (m *pagesMap) initPageMetaFor(prefix string, bucket *pagesMapBucket) error {
parentBucket := m.parentBucket(prefix)
m.mergeCascades(bucket, parentBucket)
if err := m.initPageMeta(bucket.owner, bucket); err != nil {
return err
}
if !bucket.view {
for _, p := range bucket.pages {
ps := p.(*pageState)
if err := m.initPageMeta(ps, bucket); err != nil {
return err
}
for _, p := range ps.resources.ByType(pageResourceType) {
if err := m.initPageMeta(p.(*pageState), bucket); err != nil {
return err
}
}
}
// Now that the metadata is initialized (with dates, draft set etc.)
// we can remove the pages that we for some reason should not include
// in this build.
tmp := bucket.pages[:0]
for _, x := range bucket.pages {
if m.s.shouldBuild(x) {
if x.(*pageState).m.headless {
bucket.headlessPages = append(bucket.headlessPages, x)
} else {
tmp = append(tmp, x)
}
}
}
bucket.pages = tmp
}
return nil
}
func (m *pagesMap) createSectionIfNotExists(section string) {
key := m.cleanKey(section)
_, found := m.r.Get(key)
if !found {
kind := m.s.kindFromSectionPath(section)
p := m.s.newPage(kind, section)
m.addBucketFor(key, p, nil)
}
}
func (m *pagesMap) addBucket(p *pageState) {
key := m.getKey(p)
m.addBucketFor(key, p, nil)
}
func (m *pagesMap) addBucketFor(key string, p *pageState, meta map[string]interface{}) *pagesMapBucket {
var isView bool
switch p.Kind() {
case page.KindTaxonomy, page.KindTaxonomyTerm:
isView = true
}
disabled := !m.s.isEnabled(p.Kind())
var cascade map[string]interface{}
if p.bucket != nil {
cascade = p.bucket.cascade
}
bucket := &pagesMapBucket{
owner: p,
view: isView,
cascade: cascade,
meta: meta,
disabled: disabled,
}
p.bucket = bucket
m.r.Insert(key, bucket)
return bucket
}
func (m *pagesMap) addPage(p *pageState) {
if !p.IsPage() {
m.addBucket(p)
return
}
if !m.s.isEnabled(page.KindPage) {
return
}
key := m.getKey(p)
var bucket *pagesMapBucket
_, v, found := m.r.LongestPrefix(key)
if !found {
panic(fmt.Sprintf("[BUG] bucket with key %q not found", key))
}
bucket = v.(*pagesMapBucket)
bucket.pages = append(bucket.pages, p)
}
func (m *pagesMap) assemblePageMeta() error {
var walkErr error
m.r.Walk(func(s string, v interface{}) bool {
bucket := v.(*pagesMapBucket)
if err := m.initPageMetaFor(s, bucket); err != nil {
walkErr = err
return true
}
return false
})
return walkErr
}
func (m *pagesMap) assembleTaxonomies(s *Site) error {
s.Taxonomies = make(TaxonomyList)
type bucketKey struct {
plural string
termKey string
}
// Temporary cache.
taxonomyBuckets := make(map[bucketKey]*pagesMapBucket)
for singular, plural := range s.siteCfg.taxonomiesConfig {
s.Taxonomies[plural] = make(Taxonomy)
bkey := bucketKey{
plural: plural,
}
bucket := m.Get(plural)
if bucket == nil {
// Create the page and bucket
n := s.newPage(page.KindTaxonomyTerm, plural)
key := m.cleanKey(plural)
bucket = m.addBucketFor(key, n, nil)
if err := m.initPageMetaFor(key, bucket); err != nil {
return err
}
}
if bucket.meta == nil {
bucket.meta = map[string]interface{}{
"singular": singular,
"plural": plural,
}
}
// Add it to the temporary cache.
taxonomyBuckets[bkey] = bucket
// Taxonomy entries used in page front matter will be picked up later,
// but there may be some yet to be used.
pluralPrefix := m.cleanKey(plural) + "/"
m.r.WalkPrefix(pluralPrefix, func(k string, v interface{}) bool {
tb := v.(*pagesMapBucket)
termKey := strings.TrimPrefix(k, pluralPrefix)
if tb.meta == nil {
tb.meta = map[string]interface{}{
"singular": singular,
"plural": plural,
"term": tb.owner.Title(),
"termKey": termKey,
}
}
bucket.pages = append(bucket.pages, tb.owner)
bkey.termKey = termKey
taxonomyBuckets[bkey] = tb
return false
})
}
addTaxonomy := func(singular, plural, term string, weight int, p page.Page) error {
bkey := bucketKey{
plural: plural,
}
termKey := s.getTaxonomyKey(term)
b1 := taxonomyBuckets[bkey]
var b2 *pagesMapBucket
bkey.termKey = termKey
b, found := taxonomyBuckets[bkey]
if found {
b2 = b
} else {
// Create the page and bucket
n := s.newTaxonomyPage(term, plural, termKey)
meta := map[string]interface{}{
"singular": singular,
"plural": plural,
"term": term,
"termKey": termKey,
}
key := m.cleanKey(path.Join(plural, termKey))
b2 = m.addBucketFor(key, n, meta)
if err := m.initPageMetaFor(key, b2); err != nil {
return err
}
b1.pages = append(b1.pages, b2.owner)
taxonomyBuckets[bkey] = b2
}
w := page.NewWeightedPage(weight, p, b2.owner)
s.Taxonomies[plural].add(termKey, w)
b1.owner.m.Dates.UpdateDateAndLastmodIfAfter(p)
b2.owner.m.Dates.UpdateDateAndLastmodIfAfter(p)
return nil
}
m.r.Walk(func(k string, v interface{}) bool {
b := v.(*pagesMapBucket)
if b.view {
return false
}
for singular, plural := range s.siteCfg.taxonomiesConfig {
for _, p := range b.pages {
vals := getParam(p, plural, false)
w := getParamToLower(p, plural+"_weight")
weight, err := cast.ToIntE(w)
if err != nil {
m.s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %q", w, p.Path())
// weight will equal zero, so let the flow continue
}
if vals != nil {
if v, ok := vals.([]string); ok {
for _, idx := range v {
if err := addTaxonomy(singular, plural, idx, weight, p); err != nil {
m.s.Log.ERROR.Printf("Failed to add taxonomy %q for %q: %s", plural, p.Path(), err)
}
}
} else if v, ok := vals.(string); ok {
if err := addTaxonomy(singular, plural, v, weight, p); err != nil {
m.s.Log.ERROR.Printf("Failed to add taxonomy %q for %q: %s", plural, p.Path(), err)
}
} else {
m.s.Log.ERROR.Printf("Invalid %s in %q\n", plural, p.Path())
}
}
}
}
return false
})
for _, plural := range s.siteCfg.taxonomiesConfig {
for k := range s.Taxonomies[plural] {
s.Taxonomies[plural][k].Sort()
}
}
return nil
}
func (m *pagesMap) cleanKey(key string) string {
key = filepath.ToSlash(strings.ToLower(key))
key = strings.Trim(key, "/")
return "/" + key
}
func (m *pagesMap) mergeCascades(b1, b2 *pagesMapBucket) {
if b1.cascade == nil {
b1.cascade = make(maps.Params)
}
if b2 != nil && b2.cascade != nil {
for k, v := range b2.cascade {
if _, found := b1.cascade[k]; !found {
b1.cascade[k] = v
}
}
}
}
func (m *pagesMap) parentBucket(prefix string) *pagesMapBucket {
if prefix == "/" {
return nil
}
_, parentv, found := m.r.LongestPrefix(path.Dir(prefix))
if !found {
panic(fmt.Sprintf("[BUG] parent bucket not found for %q", prefix))
}
return parentv.(*pagesMapBucket)
}
func (m *pagesMap) withEveryPage(f func(p *pageState)) {
m.r.Walk(func(k string, v interface{}) bool {
b := v.(*pagesMapBucket)
f(b.owner)
if !b.view {
for _, p := range b.pages {
f(p.(*pageState))
}
}
return false
})
}
type pagesMapBucket struct {
// Set if the pages in this bucket is also present in another bucket.
view bool
// Some additional metatadata attached to this node.
meta map[string]interface{}
// Cascading front matter.
cascade map[string]interface{}
owner *pageState // The branch node
// When disableKinds is enabled for this node.
disabled bool
// Used to navigate the sections tree
parent *pagesMapBucket
bucketSections []*pagesMapBucket
pagesInit sync.Once
pages page.Pages
headlessPages page.Pages
pagesAndSectionsInit sync.Once
pagesAndSections page.Pages
sectionsInit sync.Once
sections page.Pages
}
func (b *pagesMapBucket) isEmpty() bool {
return len(b.pages) == 0 && len(b.headlessPages) == 0 && len(b.bucketSections) == 0
}
func (b *pagesMapBucket) getPages() page.Pages {
b.pagesInit.Do(func() {
page.SortByDefault(b.pages)
})
return b.pages
}
func (b *pagesMapBucket) getPagesAndSections() page.Pages {
b.pagesAndSectionsInit.Do(func() {
var pas page.Pages
pas = append(pas, b.getPages()...)
for _, p := range b.bucketSections {
pas = append(pas, p.owner)
}
b.pagesAndSections = pas
page.SortByDefault(b.pagesAndSections)
})
return b.pagesAndSections
}
func (b *pagesMapBucket) getSections() page.Pages {
b.sectionsInit.Do(func() {
for _, p := range b.bucketSections {
b.sections = append(b.sections, p.owner)
}
page.SortByDefault(b.sections)
})
return b.sections
}

198
hugolib/pages_process.go Normal file
View file

@ -0,0 +1,198 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"context"
"fmt"
"path/filepath"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/pkg/errors"
"golang.org/x/sync/errgroup"
"github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/hugofs"
)
func newPagesProcessor(h *HugoSites, sp *source.SourceSpec) *pagesProcessor {
procs := make(map[string]pagesCollectorProcessorProvider)
for _, s := range h.Sites {
procs[s.Lang()] = &sitePagesProcessor{
m: s.pageMap,
errorSender: s.h,
itemChan: make(chan interface{}, config.GetNumWorkerMultiplier()*2),
}
}
return &pagesProcessor{
procs: procs,
}
}
type pagesCollectorProcessorProvider interface {
Process(item interface{}) error
Start(ctx context.Context) context.Context
Wait() error
}
type pagesProcessor struct {
// Per language/Site
procs map[string]pagesCollectorProcessorProvider
}
func (proc *pagesProcessor) Process(item interface{}) error {
switch v := item.(type) {
// Page bundles mapped to their language.
case pageBundles:
for _, vv := range v {
proc.getProcFromFi(vv.header).Process(vv)
}
case hugofs.FileMetaInfo:
proc.getProcFromFi(v).Process(v)
default:
panic(fmt.Sprintf("unrecognized item type in Process: %T", item))
}
return nil
}
func (proc *pagesProcessor) Start(ctx context.Context) context.Context {
for _, p := range proc.procs {
ctx = p.Start(ctx)
}
return ctx
}
func (proc *pagesProcessor) Wait() error {
var err error
for _, p := range proc.procs {
if e := p.Wait(); e != nil {
err = e
}
}
return err
}
func (proc *pagesProcessor) getProcFromFi(fi hugofs.FileMetaInfo) pagesCollectorProcessorProvider {
if p, found := proc.procs[fi.Meta().Lang()]; found {
return p
}
return defaultPageProcessor
}
type nopPageProcessor int
func (nopPageProcessor) Process(item interface{}) error {
return nil
}
func (nopPageProcessor) Start(ctx context.Context) context.Context {
return context.Background()
}
func (nopPageProcessor) Wait() error {
return nil
}
var defaultPageProcessor = new(nopPageProcessor)
type sitePagesProcessor struct {
m *pageMap
errorSender herrors.ErrorSender
itemChan chan interface{}
itemGroup *errgroup.Group
}
func (p *sitePagesProcessor) Process(item interface{}) error {
p.itemChan <- item
return nil
}
func (p *sitePagesProcessor) Start(ctx context.Context) context.Context {
p.itemGroup, ctx = errgroup.WithContext(ctx)
p.itemGroup.Go(func() error {
for item := range p.itemChan {
if err := p.doProcess(item); err != nil {
return err
}
}
return nil
})
return ctx
}
func (p *sitePagesProcessor) Wait() error {
close(p.itemChan)
return p.itemGroup.Wait()
}
func (p *sitePagesProcessor) copyFile(fim hugofs.FileMetaInfo) error {
meta := fim.Meta()
f, err := meta.Open()
if err != nil {
return errors.Wrap(err, "copyFile: failed to open")
}
s := p.m.s
target := filepath.Join(s.PathSpec.GetTargetLanguageBasePath(), meta.Path())
defer f.Close()
return s.publish(&s.PathSpec.ProcessingStats.Files, target, f)
}
func (p *sitePagesProcessor) doProcess(item interface{}) error {
m := p.m
switch v := item.(type) {
case *fileinfoBundle:
if err := m.AddFilesBundle(v.header, v.resources...); err != nil {
return err
}
case hugofs.FileMetaInfo:
if p.shouldSkip(v) {
return nil
}
meta := v.Meta()
classifier := meta.Classifier()
switch classifier {
case files.ContentClassContent:
if err := m.AddFilesBundle(v); err != nil {
return err
}
case files.ContentClassFile:
if err := p.copyFile(v); err != nil {
return err
}
default:
panic(fmt.Sprintf("invalid classifier: %q", classifier))
}
default:
panic(fmt.Sprintf("unrecognized item type in Process: %T", item))
}
return nil
}
func (p *sitePagesProcessor) shouldSkip(fim hugofs.FileMetaInfo) bool {
// TODO(ep) unify
return p.m.s.SourceSpec.DisabledLanguages[fim.Meta().Lang()]
}

View file

@ -100,10 +100,10 @@ type Site struct {
*PageCollections
Taxonomies TaxonomyList
taxonomies TaxonomyList
Sections Taxonomy
Info SiteInfo
Info *SiteInfo
language *langs.Language
@ -163,9 +163,28 @@ type Site struct {
init *siteInit
}
func (s *Site) Taxonomies() TaxonomyList {
s.init.taxonomies.Do()
return s.taxonomies
}
type taxonomiesConfig map[string]string
func (t taxonomiesConfig) Values() []viewName {
var vals []viewName
for k, v := range t {
vals = append(vals, viewName{singular: k, plural: v})
}
sort.Slice(vals, func(i, j int) bool {
return vals[i].plural < vals[j].plural
})
return vals
}
type siteConfigHolder struct {
sitemap config.Sitemap
taxonomiesConfig map[string]string
taxonomiesConfig taxonomiesConfig
timeout time.Duration
hasCJKLanguage bool
enableEmoji bool
@ -176,12 +195,14 @@ type siteInit struct {
prevNext *lazy.Init
prevNextInSection *lazy.Init
menus *lazy.Init
taxonomies *lazy.Init
}
func (init *siteInit) Reset() {
init.prevNext.Reset()
init.prevNextInSection.Reset()
init.menus.Reset()
init.taxonomies.Reset()
}
func (s *Site) initInit(init *lazy.Init, pctx pageContext) bool {
@ -198,65 +219,87 @@ func (s *Site) prepareInits() {
var init lazy.Init
s.init.prevNext = init.Branch(func() (interface{}, error) {
regularPages := s.findWorkPagesByKind(page.KindPage)
regularPages := s.RegularPages()
for i, p := range regularPages {
if p.posNextPrev == nil {
np, ok := p.(nextPrevProvider)
if !ok {
continue
}
p.posNextPrev.nextPage = nil
p.posNextPrev.prevPage = nil
pos := np.getNextPrev()
if pos == nil {
continue
}
pos.nextPage = nil
pos.prevPage = nil
if i > 0 {
p.posNextPrev.nextPage = regularPages[i-1]
pos.nextPage = regularPages[i-1]
}
if i < len(regularPages)-1 {
p.posNextPrev.prevPage = regularPages[i+1]
pos.prevPage = regularPages[i+1]
}
}
return nil, nil
})
s.init.prevNextInSection = init.Branch(func() (interface{}, error) {
var rootSection []int
// TODO(bep) cm attach this to the bucket.
for i, p1 := range s.workAllPages {
if p1.IsPage() && p1.Section() == "" {
rootSection = append(rootSection, i)
}
if p1.IsSection() {
sectionPages := p1.RegularPages()
for i, p2 := range sectionPages {
p2s := p2.(*pageState)
if p2s.posNextPrevSection == nil {
continue
}
p2s.posNextPrevSection.nextPage = nil
p2s.posNextPrevSection.prevPage = nil
var sections page.Pages
s.home.treeRef.m.collectSectionsRecursiveIncludingSelf(s.home.treeRef.key, func(n *contentNode) {
sections = append(sections, n.p)
})
if i > 0 {
p2s.posNextPrevSection.nextPage = sectionPages[i-1]
}
setNextPrev := func(pas page.Pages) {
for i, p := range pas {
np, ok := p.(nextPrevInSectionProvider)
if !ok {
continue
}
if i < len(sectionPages)-1 {
p2s.posNextPrevSection.prevPage = sectionPages[i+1]
}
pos := np.getNextPrevInSection()
if pos == nil {
continue
}
pos.nextPage = nil
pos.prevPage = nil
if i > 0 {
pos.nextPage = pas[i-1]
}
if i < len(pas)-1 {
pos.prevPage = pas[i+1]
}
}
}
for i, j := range rootSection {
p := s.workAllPages[j]
if i > 0 {
p.posNextPrevSection.nextPage = s.workAllPages[rootSection[i-1]]
}
for _, sect := range sections {
treeRef := sect.(treeRefProvider).getTreeRef()
if i < len(rootSection)-1 {
p.posNextPrevSection.prevPage = s.workAllPages[rootSection[i+1]]
}
var pas page.Pages
treeRef.m.collectPages(treeRef.key+cmBranchSeparator, func(c *contentNode) {
pas = append(pas, c.p)
})
page.SortByDefault(pas)
setNextPrev(pas)
}
// The root section only goes one level down.
treeRef := s.home.getTreeRef()
var pas page.Pages
treeRef.m.collectPages(treeRef.key+cmBranchSeparator, func(c *contentNode) {
pas = append(pas, c.p)
})
page.SortByDefault(pas)
setNextPrev(pas)
return nil, nil
})
@ -265,6 +308,11 @@ func (s *Site) prepareInits() {
return nil, nil
})
s.init.taxonomies = init.Branch(func() (interface{}, error) {
err := s.pageMap.assembleTaxonomies()
return nil, err
})
}
type siteRenderingContext struct {
@ -279,14 +327,15 @@ func (s *Site) Menus() navigation.Menus {
func (s *Site) initRenderFormats() {
formatSet := make(map[string]bool)
formats := output.Formats{}
for _, p := range s.workAllPages {
for _, f := range p.m.configuredOutputFormats {
s.pageMap.pageTrees.WalkRenderable(func(s string, n *contentNode) bool {
for _, f := range n.p.m.configuredOutputFormats {
if !formatSet[f.Name] {
formats = append(formats, f)
formatSet[f.Name] = true
}
}
}
return false
})
// Add the per kind configured output formats
for _, kind := range allKindsInPages {
@ -345,8 +394,6 @@ func (s *Site) reset() *Site {
// newSite creates a new site with the given configuration.
func newSite(cfg deps.DepsCfg) (*Site, error) {
c := newPageCollections()
if cfg.Language == nil {
cfg.Language = langs.NewDefaultLanguage(cfg.Cfg)
}
@ -385,6 +432,17 @@ func newSite(cfg deps.DepsCfg) (*Site, error) {
return nil, err
}
if disabledKinds[kindRSS] {
// Legacy
tmp := siteOutputFormatsConfig[:0]
for _, x := range siteOutputFormatsConfig {
if !strings.EqualFold(x.Name, "rss") {
tmp = append(tmp, x)
}
}
siteOutputFormatsConfig = tmp
}
outputFormats, err := createSiteOutputFormats(siteOutputFormatsConfig, cfg.Language)
if err != nil {
return nil, err
@ -435,18 +493,23 @@ func newSite(cfg deps.DepsCfg) (*Site, error) {
}
s := &Site{
PageCollections: c,
language: cfg.Language,
disabledKinds: disabledKinds,
titleFunc: titleFunc,
relatedDocsHandler: page.NewRelatedDocsHandler(relatedContentConfig),
outputFormats: outputFormats,
rc: &siteRenderingContext{output.HTMLFormat},
outputFormatsConfig: siteOutputFormatsConfig,
mediaTypesConfig: siteMediaTypesConfig,
frontmatterHandler: frontMatterHandler,
language: cfg.Language,
disabledKinds: disabledKinds,
outputFormats: outputFormats,
outputFormatsConfig: siteOutputFormatsConfig,
mediaTypesConfig: siteMediaTypesConfig,
enableInlineShortcodes: cfg.Language.GetBool("enableInlineShortcodes"),
siteCfg: siteConfig,
titleFunc: titleFunc,
rc: &siteRenderingContext{output.HTMLFormat},
frontmatterHandler: frontMatterHandler,
relatedDocsHandler: page.NewRelatedDocsHandler(relatedContentConfig),
}
s.prepareInits()
@ -595,7 +658,7 @@ func (s *SiteInfo) Menus() navigation.Menus {
// TODO(bep) type
func (s *SiteInfo) Taxonomies() interface{} {
return s.s.Taxonomies
return s.s.Taxonomies()
}
func (s *SiteInfo) Params() maps.Params {
@ -734,7 +797,7 @@ func (s *siteRefLinker) refLink(ref string, source interface{}, relative bool, o
if refURL.Path != "" {
var err error
target, err = s.s.getPageNew(p, refURL.Path)
target, err = s.s.getPageRef(p, refURL.Path)
var pos text.Position
if err != nil || target == nil {
if p, ok := source.(text.Positioner); ok {
@ -988,7 +1051,7 @@ func (s *Site) processPartial(config *BuildCfg, init func(config *BuildCfg) erro
OutputFormats: site.outputFormatsConfig,
}
site.Deps, err = first.Deps.ForLanguage(depsCfg, func(d *deps.Deps) error {
d.Site = &site.Info
d.Site = site.Info
return nil
})
if err != nil {
@ -1189,7 +1252,7 @@ func (s *Site) initializeSiteInfo() error {
}
}
s.Info = SiteInfo{
s.Info = &SiteInfo{
title: lang.GetString("title"),
Author: lang.GetStringMap("author"),
Social: lang.GetStringMapString("social"),
@ -1231,11 +1294,17 @@ func (s *Site) eventToIdentity(e fsnotify.Event) (identity.PathIdentity, bool) {
func (s *Site) readAndProcessContent(filenames ...string) error {
sourceSpec := source.NewSourceSpec(s.PathSpec, s.BaseFs.Content.Fs)
proc := newPagesProcessor(s.h, sourceSpec, len(filenames) > 0)
proc := newPagesProcessor(s.h, sourceSpec)
c := newPagesCollector(sourceSpec, s.Log, s.h.ContentChanges, proc, filenames...)
c := newPagesCollector(sourceSpec, s.h.content, s.Log, s.h.ContentChanges, proc, filenames...)
return c.Collect()
if err := c.Collect(); err != nil {
return err
}
s.h.content = newPageMaps(s.h)
return nil
}
func (s *Site) getMenusFromConfig() navigation.Menus {
@ -1309,35 +1378,45 @@ func (s *Site) assembleMenus() {
sectionPagesMenu := s.Info.sectionPagesMenu
if sectionPagesMenu != "" {
for _, p := range s.workAllPages {
if p.Kind() == page.KindSection {
// From Hugo 0.22 we have nested sections, but until we get a
// feel of how that would work in this setting, let us keep
// this menu for the top level only.
id := p.Section()
if _, ok := flat[twoD{sectionPagesMenu, id}]; ok {
continue
}
me := navigation.MenuEntry{Identifier: id,
Name: p.LinkTitle(),
Weight: p.Weight(),
Page: p}
flat[twoD{sectionPagesMenu, me.KeyName()}] = &me
s.pageMap.sections.Walk(func(s string, v interface{}) bool {
p := v.(*contentNode).p
if p.IsHome() {
return false
}
}
// From Hugo 0.22 we have nested sections, but until we get a
// feel of how that would work in this setting, let us keep
// this menu for the top level only.
id := p.Section()
if _, ok := flat[twoD{sectionPagesMenu, id}]; ok {
return false
}
me := navigation.MenuEntry{Identifier: id,
Name: p.LinkTitle(),
Weight: p.Weight(),
Page: p}
flat[twoD{sectionPagesMenu, me.KeyName()}] = &me
return false
})
}
// Add menu entries provided by pages
for _, p := range s.workAllPages {
s.pageMap.pageTrees.WalkRenderable(func(ss string, n *contentNode) bool {
p := n.p
for name, me := range p.pageMenus.menus() {
if _, ok := flat[twoD{name, me.KeyName()}]; ok {
s.SendError(p.wrapError(errors.Errorf("duplicate menu entry with identifier %q in menu %q", me.KeyName(), name)))
err := p.wrapError(errors.Errorf("duplicate menu entry with identifier %q in menu %q", me.KeyName(), name))
s.Log.WARN.Println(err)
continue
}
flat[twoD{name, me.KeyName()}] = me
}
}
return false
})
// Create Children Menus First
for _, e := range flat {
@ -1410,15 +1489,17 @@ func (s *Site) resetBuildState(sourceChanged bool) {
s.init.Reset()
if sourceChanged {
s.PageCollections = newPageCollectionsFromPages(s.rawAllPages)
for _, p := range s.rawAllPages {
s.PageCollections = newPageCollections(s.pageMap)
s.pageMap.withEveryBundlePage(func(p *pageState) bool {
p.pagePages = &pagePages{}
p.parent = nil
p.Scratcher = maps.NewScratcher()
}
return false
})
} else {
s.pagesMap.withEveryPage(func(p *pageState) {
s.pageMap.withEveryBundlePage(func(p *pageState) bool {
p.Scratcher = maps.NewScratcher()
return false
})
}
}
@ -1613,6 +1694,7 @@ func (s *Site) kindFromFileInfoOrSections(fi *fileInfo, sections []string) strin
return s.kindFromSections(sections)
}
return page.KindPage
}
@ -1640,26 +1722,21 @@ func (s *Site) kindFromSectionPath(sectionPath string) string {
return page.KindSection
}
func (s *Site) newTaxonomyPage(title string, sections ...string) *pageState {
p, err := newPageFromMeta(
map[string]interface{}{"title": title},
&pageMeta{
s: s,
kind: page.KindTaxonomy,
sections: sections,
})
func (s *Site) newPage(
n *contentNode,
parentbBucket *pagesMapBucket,
kind, title string,
sections ...string) *pageState {
if err != nil {
panic(err)
m := map[string]interface{}{}
if title != "" {
m["title"] = title
}
return p
}
func (s *Site) newPage(kind string, sections ...string) *pageState {
p, err := newPageFromMeta(
map[string]interface{}{},
n,
parentbBucket,
m,
&pageMeta{
s: s,
kind: kind,

View file

@ -379,6 +379,29 @@ func TestBenchmarkSiteNew(b *testing.T) {
}
}
func TestBenchmarkSiteDeepContentEdit(t *testing.T) {
b := getBenchmarkSiteDeepContent(t).Running()
b.Build(BuildCfg{})
p := b.H.Sites[0].RegularPages()[12]
b.EditFiles(p.File().Filename(), fmt.Sprintf(`---
title: %s
---
Edited!!`, p.Title()))
counters := &testCounters{}
b.Build(BuildCfg{testCounters: counters})
// We currently rebuild all the language versions of the same content file.
// We could probably optimize that case, but it's not trivial.
b.Assert(int(counters.contentRenderCounter), qt.Equals, 4)
b.AssertFileContent("public"+p.RelPermalink()+"index.html", "Edited!!")
}
func BenchmarkSiteNew(b *testing.B) {
rnd := rand.New(rand.NewSource(32))
benchmarks := getBenchmarkSiteNewTestCases()

View file

@ -23,24 +23,35 @@ import (
)
func createDefaultOutputFormats(allFormats output.Formats, cfg config.Provider) map[string]output.Formats {
rssOut, _ := allFormats.GetByName(output.RSSFormat.Name)
rssOut, rssFound := allFormats.GetByName(output.RSSFormat.Name)
htmlOut, _ := allFormats.GetByName(output.HTMLFormat.Name)
robotsOut, _ := allFormats.GetByName(output.RobotsTxtFormat.Name)
sitemapOut, _ := allFormats.GetByName(output.SitemapFormat.Name)
return map[string]output.Formats{
defaultListTypes := output.Formats{htmlOut}
if rssFound {
defaultListTypes = append(defaultListTypes, rssOut)
}
m := map[string]output.Formats{
page.KindPage: {htmlOut},
page.KindHome: {htmlOut, rssOut},
page.KindSection: {htmlOut, rssOut},
page.KindTaxonomy: {htmlOut, rssOut},
page.KindTaxonomyTerm: {htmlOut, rssOut},
page.KindHome: defaultListTypes,
page.KindSection: defaultListTypes,
page.KindTaxonomy: defaultListTypes,
page.KindTaxonomyTerm: defaultListTypes,
// Below are for consistency. They are currently not used during rendering.
kindRSS: {rssOut},
kindSitemap: {sitemapOut},
kindRobotsTXT: {robotsOut},
kind404: {htmlOut},
}
// May be disabled
if rssFound {
m[kindRSS] = output.Formats{rssOut}
}
return m
}
func createSiteOutputFormats(allFormats output.Formats, cfg config.Provider) (map[string]output.Formats, error) {

View file

@ -77,22 +77,17 @@ func (s *Site) renderPages(ctx *siteRenderContext) error {
cfg := ctx.cfg
if !cfg.PartialReRender && ctx.outIdx == 0 && len(s.headlessPages) > 0 {
wg.Add(1)
go headlessPagesPublisher(s, wg)
}
L:
for _, page := range s.workAllPages {
if cfg.shouldRender(page) {
s.pageMap.pageTrees.Walk(func(ss string, n *contentNode) bool {
if cfg.shouldRender(n.p) {
select {
case <-s.h.Done():
break L
return true
default:
pages <- page
pages <- n.p
}
}
}
return false
})
close(pages)
@ -107,15 +102,6 @@ L:
return nil
}
func headlessPagesPublisher(s *Site, wg *sync.WaitGroup) {
defer wg.Done()
for _, p := range s.headlessPages {
if err := p.renderResources(); err != nil {
s.SendError(p.errorf(err, "failed to render page resources"))
}
}
}
func pageRenderer(
ctx *siteRenderContext,
s *Site,
@ -126,15 +112,15 @@ func pageRenderer(
defer wg.Done()
for p := range pages {
f := p.outputFormat()
// TODO(bep) get rid of this odd construct. RSS is an output format.
if f.Name == "RSS" && !s.isEnabled(kindRSS) {
continue
if p.m.buildConfig.PublishResources {
if err := p.renderResources(); err != nil {
s.SendError(p.errorf(err, "failed to render page resources"))
continue
}
}
if err := p.renderResources(); err != nil {
s.SendError(p.errorf(err, "failed to render page resources"))
if !p.render {
// Nothing more to do for this page.
continue
}
@ -145,7 +131,7 @@ func pageRenderer(
}
if !found {
s.logMissingLayout("", p.Kind(), f.Name)
s.logMissingLayout("", p.Kind(), p.f.Name)
continue
}
@ -235,10 +221,6 @@ func (s *Site) renderPaginator(p *pageState, templ tpl.Template) error {
}
func (s *Site) render404() error {
if !s.isEnabled(kind404) {
return nil
}
p, err := newPageStandalone(&pageMeta{
s: s,
kind: kind404,
@ -253,6 +235,10 @@ func (s *Site) render404() error {
return err
}
if !p.render {
return nil
}
var d output.LayoutDescriptor
d.Kind = kind404
@ -274,10 +260,6 @@ func (s *Site) render404() error {
}
func (s *Site) renderSitemap() error {
if !s.isEnabled(kindSitemap) {
return nil
}
p, err := newPageStandalone(&pageMeta{
s: s,
kind: kindSitemap,
@ -291,6 +273,10 @@ func (s *Site) renderSitemap() error {
return err
}
if !p.render {
return nil
}
targetPath := p.targetPaths().TargetFilename
if targetPath == "" {
@ -303,10 +289,6 @@ func (s *Site) renderSitemap() error {
}
func (s *Site) renderRobotsTXT() error {
if !s.isEnabled(kindRobotsTXT) {
return nil
}
if !s.Cfg.GetBool("enableRobotsTXT") {
return nil
}
@ -324,6 +306,10 @@ func (s *Site) renderRobotsTXT() error {
return err
}
if !p.render {
return nil
}
templ := s.lookupLayouts("robots.txt", "_default/robots.txt", "_internal/_default/robots.txt")
return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "Robots Txt", p.targetPaths().TargetFilename, p, templ)
@ -332,15 +318,16 @@ func (s *Site) renderRobotsTXT() error {
// renderAliases renders shell pages that simply have a redirect in the header.
func (s *Site) renderAliases() error {
for _, p := range s.workAllPages {
var err error
s.pageMap.pageTrees.WalkRenderable(func(ss string, n *contentNode) bool {
p := n.p
if len(p.Aliases()) == 0 {
continue
return false
}
for _, of := range p.OutputFormats() {
if !of.Format.IsHTML {
continue
return false
}
plink := of.Permalink()
@ -372,14 +359,16 @@ func (s *Site) renderAliases() error {
a = path.Join(lang, a)
}
if err := s.writeDestAlias(a, plink, f, p); err != nil {
return err
err = s.writeDestAlias(a, plink, f, p)
if err != nil {
return true
}
}
}
}
return false
})
return nil
return err
}
// renderMainLanguageRedirect creates a redirect to the main language home,

View file

@ -303,7 +303,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
c := qt.New(t)
sections := strings.Split(test.sections, ",")
p := s.getPage(page.KindSection, sections...)
c.Assert(p, qt.Not(qt.IsNil))
c.Assert(p, qt.Not(qt.IsNil), qt.Commentf(fmt.Sprint(sections)))
if p.Pages() != nil {
c.Assert(p.Data().(page.Data).Pages(), deepEqualsPages, p.Pages())

View file

@ -905,16 +905,16 @@ func TestWeightedTaxonomies(t *testing.T) {
writeSourcesToSource(t, "content", fs, sources...)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
if s.Taxonomies["tags"]["a"][0].Page.Title() != "foo" {
t.Errorf("Pages in unexpected order, 'foo' expected first, got '%v'", s.Taxonomies["tags"]["a"][0].Page.Title())
if s.Taxonomies()["tags"]["a"][0].Page.Title() != "foo" {
t.Errorf("Pages in unexpected order, 'foo' expected first, got '%v'", s.Taxonomies()["tags"]["a"][0].Page.Title())
}
if s.Taxonomies["categories"]["d"][0].Page.Title() != "bar" {
t.Errorf("Pages in unexpected order, 'bar' expected first, got '%v'", s.Taxonomies["categories"]["d"][0].Page.Title())
if s.Taxonomies()["categories"]["d"][0].Page.Title() != "bar" {
t.Errorf("Pages in unexpected order, 'bar' expected first, got '%v'", s.Taxonomies()["categories"]["d"][0].Page.Title())
}
if s.Taxonomies["categories"]["e"][0].Page.Title() != "bza" {
t.Errorf("Pages in unexpected order, 'bza' expected first, got '%v'", s.Taxonomies["categories"]["e"][0].Page.Title())
if s.Taxonomies()["categories"]["e"][0].Page.Title() != "bza" {
t.Errorf("Pages in unexpected order, 'bza' expected first, got '%v'", s.Taxonomies()["categories"]["e"][0].Page.Title())
}
}
@ -1008,10 +1008,13 @@ func TestRefLinking(t *testing.T) {
//test empty link, as well as fragment only link
{"", "", true, ""},
} {
checkLinkCase(site, test.link, currentPage, test.relative, test.outputFormat, test.expected, t, i)
//make sure fragment links are also handled
checkLinkCase(site, test.link+"#intro", currentPage, test.relative, test.outputFormat, test.expected+"#intro", t, i)
t.Run(fmt.Sprint(i), func(t *testing.T) {
checkLinkCase(site, test.link, currentPage, test.relative, test.outputFormat, test.expected, t, i)
//make sure fragment links are also handled
checkLinkCase(site, test.link+"#intro", currentPage, test.relative, test.outputFormat, test.expected+"#intro", t, i)
})
}
// TODO: and then the failure cases.

View file

@ -50,7 +50,7 @@ YAML frontmatter with tags and categories taxonomy.`
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
st := make([]string, 0)
for _, t := range s.Taxonomies["tags"].ByCount() {
for _, t := range s.Taxonomies()["tags"].ByCount() {
st = append(st, t.Page().Title()+":"+t.Name)
}
@ -166,9 +166,10 @@ permalinkeds:
}
for taxonomy, count := range taxonomyTermPageCounts {
msg := qt.Commentf(taxonomy)
term := s.getPage(page.KindTaxonomyTerm, taxonomy)
b.Assert(term, qt.Not(qt.IsNil))
b.Assert(len(term.Pages()), qt.Equals, count, qt.Commentf(taxonomy))
b.Assert(term, qt.Not(qt.IsNil), msg)
b.Assert(len(term.Pages()), qt.Equals, count, msg)
for _, p := range term.Pages() {
b.Assert(p.Kind(), qt.Equals, page.KindTaxonomy)
@ -258,8 +259,18 @@ title: "This is S3s"
s := b.H.Sites[0]
ta := s.findPagesByKind(page.KindTaxonomy)
te := s.findPagesByKind(page.KindTaxonomyTerm)
filterbyKind := func(kind string) page.Pages {
var pages page.Pages
for _, p := range s.Pages() {
if p.Kind() == kind {
pages = append(pages, p)
}
}
return pages
}
ta := filterbyKind(page.KindTaxonomy)
te := filterbyKind(page.KindTaxonomyTerm)
b.Assert(len(te), qt.Equals, 4)
b.Assert(len(ta), qt.Equals, 7)
@ -353,9 +364,6 @@ categories: ["regular"]
}
// See https://github.com/gohugoio/hugo/issues/6222
// We need to revisit this once we figure out what to do with the
// draft etc _index pages, but for now we need to avoid the crash.
func TestTaxonomiesIndexDraft(t *testing.T) {
t.Parallel()
@ -366,9 +374,18 @@ title: "The Categories"
draft: true
---
This is the invisible content.
Content.
`)
`,
"page.md", `---
title: "The Page"
categories: ["cool"]
---
Content.
`,
)
b.WithTemplates("index.html", `
{{ range .Site.Pages }}
@ -378,7 +395,145 @@ This is the invisible content.
b.Build(BuildCfg{})
// We publish the index page, but the content will be empty.
b.AssertFileContent("public/index.html", " /categories/|The Categories|0||")
b.AssertFileContentFn("public/index.html", func(s string) bool {
return !strings.Contains(s, "categories")
})
}
// https://github.com/gohugoio/hugo/issues/6173
func TestTaxonomiesWithBundledResources(t *testing.T) {
b := newTestSitesBuilder(t)
b.WithTemplates("_default/list.html", `
List {{ .Title }}:
{{ range .Resources }}
Resource: {{ .RelPermalink }}|{{ .MediaType }}
{{ end }}
`)
b.WithContent("p1.md", `---
title: Page
categories: ["funny"]
---
`,
"categories/_index.md", "---\ntitle: Categories Page\n---",
"categories/data.json", "Category data",
"categories/funny/_index.md", "---\ntitle: Funnny Category\n---",
"categories/funny/funnydata.json", "Category funny data",
)
b.Build(BuildCfg{})
b.AssertFileContent("public/categories/index.html", `Resource: /categories/data.json|application/json`)
b.AssertFileContent("public/categories/funny/index.html", `Resource: /categories/funny/funnydata.json|application/json`)
}
func TestTaxonomiesRemoveOne(t *testing.T) {
b := newTestSitesBuilder(t).Running()
b.WithTemplates("index.html", `
{{ $cats := .Site.Taxonomies.categories.cats }}
{{ if $cats }}
Len cats: {{ len $cats }}
{{ range $cats }}
Cats:|{{ .Page.RelPermalink }}|
{{ end }}
{{ end }}
{{ $funny := .Site.Taxonomies.categories.funny }}
{{ if $funny }}
Len funny: {{ len $funny }}
{{ range $funny }}
Funny:|{{ .Page.RelPermalink }}|
{{ end }}
{{ end }}
`)
b.WithContent("p1.md", `---
title: Page
categories: ["funny", "cats"]
---
`, "p2.md", `---
title: Page2
categories: ["funny", "cats"]
---
`,
)
b.Build(BuildCfg{})
b.AssertFileContent("public/index.html", `
Len cats: 2
Len funny: 2
Cats:|/p1/|
Cats:|/p2/|
Funny:|/p1/|
Funny:|/p2/|`)
// Remove one category from one of the pages.
b.EditFiles("content/p1.md", `---
title: Page
categories: ["funny"]
---
`)
b.Build(BuildCfg{})
b.AssertFileContent("public/index.html", `
Len cats: 1
Len funny: 2
Cats:|/p2/|
Funny:|/p1/|
Funny:|/p2/|`)
}
//https://github.com/gohugoio/hugo/issues/6590
func TestTaxonomiesListPages(t *testing.T) {
b := newTestSitesBuilder(t)
b.WithTemplates("_default/list.html", `
{{ template "print-taxo" "categories.cats" }}
{{ template "print-taxo" "categories.funny" }}
{{ define "print-taxo" }}
{{ $node := index site.Taxonomies (split $ ".") }}
{{ if $node }}
Len {{ $ }}: {{ len $node }}
{{ range $node }}
{{ $ }}:|{{ .Page.RelPermalink }}|
{{ end }}
{{ else }}
{{ $ }} not found.
{{ end }}
{{ end }}
`)
b.WithContent("_index.md", `---
title: Home
categories: ["funny", "cats"]
---
`, "blog/p1.md", `---
title: Page1
categories: ["funny"]
---
`, "blog/_index.md", `---
title: Blog Section
categories: ["cats"]
---
`,
)
b.Build(BuildCfg{})
b.AssertFileContent("public/index.html", `
Len categories.cats: 2
categories.cats:|/blog/|
categories.cats:|/|
Len categories.funny: 2
categories.funny:|/|
categories.funny:|/blog/p1/|
`)
}

View file

@ -16,6 +16,7 @@ package hugolib
import (
"fmt"
"path/filepath"
"strings"
"testing"
"github.com/gohugoio/hugo/identity"
@ -656,23 +657,6 @@ func collectIdentities(set map[identity.Identity]bool, provider identity.Provide
}
}
func printRecursiveIdentities(level int, id identity.Provider) {
if level == 0 {
fmt.Println(id.GetIdentity(), "===>")
}
if ids, ok := id.(identity.IdentitiesProvider); ok {
level++
for _, id := range ids.GetIdentities() {
printRecursiveIdentities(level, id)
}
} else {
ident(level)
fmt.Println("ID", id)
}
}
func ident(n int) {
for i := 0; i < n; i++ {
fmt.Print(" ")
}
func ident(level int) string {
return strings.Repeat(" ", level)
}

View file

@ -11,6 +11,8 @@ import (
"time"
"unicode/utf8"
"github.com/gohugoio/hugo/htesting"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/parser/metadecoders"
@ -750,7 +752,7 @@ func (s *sitesBuilder) AssertObject(expected string, object interface{}) {
if expected != got {
fmt.Println(got)
diff := helpers.DiffStrings(expected, got)
diff := htesting.DiffStrings(expected, got)
s.Fatalf("diff:\n%s\nexpected\n%s\ngot\n%s", diff, expected, got)
}
}
@ -775,6 +777,12 @@ func (s *sitesBuilder) GetPage(ref string) page.Page {
return p
}
func (s *sitesBuilder) GetPageRel(p page.Page, ref string) page.Page {
p, err := s.H.Sites[0].getPageNew(p, ref)
s.Assert(err, qt.IsNil)
return p
}
func newTestHelper(cfg config.Provider, fs *hugofs.Fs, t testing.TB) testHelper {
return testHelper{
Cfg: cfg,

View file

@ -21,7 +21,8 @@ func pagesToTranslationsMap(sites []*Site) map[string]page.Pages {
out := make(map[string]page.Pages)
for _, s := range sites {
for _, p := range s.workAllPages {
s.pageMap.pageTrees.Walk(func(ss string, n *contentNode) bool {
p := n.p
// TranslationKey is implemented for all page types.
base := p.TranslationKey()
@ -32,7 +33,9 @@ func pagesToTranslationsMap(sites []*Site) map[string]page.Pages {
pageTranslations = append(pageTranslations, p)
out[base] = pageTranslations
}
return false
})
}
return out
@ -40,14 +43,15 @@ func pagesToTranslationsMap(sites []*Site) map[string]page.Pages {
func assignTranslationsToPages(allTranslations map[string]page.Pages, sites []*Site) {
for _, s := range sites {
for _, p := range s.workAllPages {
s.pageMap.pageTrees.Walk(func(ss string, n *contentNode) bool {
p := n.p
base := p.TranslationKey()
translations, found := allTranslations[base]
if !found {
continue
return false
}
p.setTranslations(translations)
}
return false
})
}
}

View file

@ -18,8 +18,6 @@ import (
"strings"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/parser/pageparser"
)
type Format string
@ -72,22 +70,6 @@ func FormatFromMediaType(m media.Type) Format {
return ""
}
// FormatFromFrontMatterType will return empty if not supported.
func FormatFromFrontMatterType(typ pageparser.ItemType) Format {
switch typ {
case pageparser.TypeFrontMatterJSON:
return JSON
case pageparser.TypeFrontMatterORG:
return ORG
case pageparser.TypeFrontMatterTOML:
return TOML
case pageparser.TypeFrontMatterYAML:
return YAML
default:
return ""
}
}
// FormatFromContentString tries to detect the format (JSON, YAML or TOML)
// in the given string.
// It return an empty string if no format could be detected.

View file

@ -18,8 +18,6 @@ import (
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/parser/pageparser"
qt "github.com/frankban/quicktest"
)
@ -57,22 +55,6 @@ func TestFormatFromMediaType(t *testing.T) {
}
}
func TestFormatFromFrontMatterType(t *testing.T) {
c := qt.New(t)
for _, test := range []struct {
typ pageparser.ItemType
expect Format
}{
{pageparser.TypeFrontMatterJSON, JSON},
{pageparser.TypeFrontMatterTOML, TOML},
{pageparser.TypeFrontMatterYAML, YAML},
{pageparser.TypeFrontMatterORG, ORG},
{pageparser.TypeIgnore, ""},
} {
c.Assert(FormatFromFrontMatterType(test.typ), qt.Equals, test.expect)
}
}
func TestFormatFromContentString(t *testing.T) {
t.Parallel()
c := qt.New(t)

View file

@ -22,6 +22,7 @@ import (
"io"
"io/ioutil"
"github.com/gohugoio/hugo/parser/metadecoders"
"github.com/pkg/errors"
)
@ -43,6 +44,61 @@ func Parse(r io.Reader, cfg Config) (Result, error) {
return parseSection(r, cfg, lexIntroSection)
}
type ContentFrontMatter struct {
Content []byte
FrontMatter map[string]interface{}
FrontMatterFormat metadecoders.Format
}
// ParseFrontMatterAndContent is a convenience method to extract front matter
// and content from a content page.
func ParseFrontMatterAndContent(r io.Reader) (ContentFrontMatter, error) {
var cf ContentFrontMatter
psr, err := Parse(r, Config{})
if err != nil {
return cf, err
}
var frontMatterSource []byte
iter := psr.Iterator()
walkFn := func(item Item) bool {
if frontMatterSource != nil {
// The rest is content.
cf.Content = psr.Input()[item.Pos:]
// Done
return false
} else if item.IsFrontMatter() {
cf.FrontMatterFormat = FormatFromFrontMatterType(item.Type)
frontMatterSource = item.Val
}
return true
}
iter.PeekWalk(walkFn)
cf.FrontMatter, err = metadecoders.Default.UnmarshalToMap(frontMatterSource, cf.FrontMatterFormat)
return cf, err
}
func FormatFromFrontMatterType(typ ItemType) metadecoders.Format {
switch typ {
case TypeFrontMatterJSON:
return metadecoders.JSON
case TypeFrontMatterORG:
return metadecoders.ORG
case TypeFrontMatterTOML:
return metadecoders.TOML
case TypeFrontMatterYAML:
return metadecoders.YAML
default:
return ""
}
}
// ParseMain parses starting with the main section. Used in tests.
func ParseMain(r io.Reader, cfg Config) (Result, error) {
return parseSection(r, cfg, lexMainSection)

View file

@ -16,6 +16,9 @@ package pageparser
import (
"strings"
"testing"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/parser/metadecoders"
)
func BenchmarkParse(b *testing.B) {
@ -69,3 +72,19 @@ This is some summary. This is some summary. This is some summary. This is some s
}
}
}
func TestFormatFromFrontMatterType(t *testing.T) {
c := qt.New(t)
for _, test := range []struct {
typ ItemType
expect metadecoders.Format
}{
{TypeFrontMatterJSON, metadecoders.JSON},
{TypeFrontMatterTOML, metadecoders.TOML},
{TypeFrontMatterYAML, metadecoders.YAML},
{TypeFrontMatterORG, metadecoders.ORG},
{TypeIgnore, ""},
} {
c.Assert(FormatFromFrontMatterType(test.typ), qt.Equals, test.expect)
}
}

View file

@ -35,32 +35,25 @@ type imageCache struct {
store map[string]*resourceAdapter
}
func (c *imageCache) isInCache(key string) bool {
c.mu.RLock()
_, found := c.store[c.normalizeKey(key)]
c.mu.RUnlock()
return found
}
func (c *imageCache) deleteByPrefix(prefix string) {
func (c *imageCache) deleteIfContains(s string) {
c.mu.Lock()
defer c.mu.Unlock()
prefix = c.normalizeKey(prefix)
s = c.normalizeKeyBase(s)
for k := range c.store {
if strings.HasPrefix(k, prefix) {
if strings.Contains(k, s) {
delete(c.store, k)
}
}
}
// The cache key is a lowecase path with Unix style slashes and it always starts with
// a leading slash.
func (c *imageCache) normalizeKey(key string) string {
// It is a path with Unix style slashes and it always starts with a leading slash.
key = filepath.ToSlash(key)
if !strings.HasPrefix(key, "/") {
key = "/" + key
}
return "/" + c.normalizeKeyBase(key)
}
return key
func (c *imageCache) normalizeKeyBase(key string) string {
return strings.Trim(strings.ToLower(filepath.ToSlash(key)), "/")
}
func (c *imageCache) clear() {
@ -74,6 +67,7 @@ func (c *imageCache) getOrCreate(
createImage func() (*imageResource, image.Image, error)) (*resourceAdapter, error) {
relTarget := parent.relTargetPathFromConfig(conf)
memKey := parent.relTargetPathForRel(relTarget.path(), false, false, false)
memKey = c.normalizeKey(memKey)
// For the file cache we want to generate and store it once if possible.
fileKeyPath := relTarget

View file

@ -598,6 +598,7 @@ func TestImageOperationsGolden(t *testing.T) {
}
resized, err := orig.Fill("400x200 center")
c.Assert(err, qt.IsNil)
for _, filter := range filters {
resized, err := resized.Filter(filter)

View file

@ -23,8 +23,8 @@ import (
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/compare"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/navigation"
"github.com/gohugoio/hugo/related"
@ -133,7 +133,7 @@ type PageMetaProvider interface {
// BundleType returns the bundle type: "leaf", "branch" or an empty string if it is none.
// See https://gohugo.io/content-management/page-bundles/
BundleType() string
BundleType() files.ContentClass
// A configured description.
Description() string

View file

@ -20,6 +20,7 @@ import (
"github.com/bep/gitmap"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/navigation"
@ -112,7 +113,7 @@ func MarshalPageToJSON(p Page) ([]byte, error) {
PublishDate time.Time
ExpiryDate time.Time
Aliases []string
BundleType string
BundleType files.ContentClass
Description string
Draft bool
IsHome bool

View file

@ -19,6 +19,8 @@ import (
"html/template"
"time"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/hugofs"
"github.com/bep/gitmap"
@ -83,7 +85,7 @@ func (p *nopPage) BaseFileName() string {
return ""
}
func (p *nopPage) BundleType() string {
func (p *nopPage) BundleType() files.ContentClass {
return ""
}

View file

@ -13,9 +13,59 @@
package pagemeta
import (
"github.com/mitchellh/mapstructure"
)
type URLPath struct {
URL string
Permalink string
Slug string
Section string
}
var defaultBuildConfig = BuildConfig{
List: true,
Render: true,
PublishResources: true,
set: true,
}
// BuildConfig holds configuration options about how to handle a Page in Hugo's
// build process.
type BuildConfig struct {
// Whether to add it to any of the page collections.
// Note that the page can still be found with .Site.GetPage.
List bool
// Whether to render it.
Render bool
// Whether to publish its resources. These will still be published on demand,
// but enabling this can be useful if the originals (e.g. images) are
// never used.
PublishResources bool
set bool // BuildCfg is non-zero if this is set to true.
}
// Disable sets all options to their off value.
func (b *BuildConfig) Disable() {
b.List = false
b.Render = false
b.PublishResources = false
b.set = true
}
func (b BuildConfig) IsZero() bool {
return !b.set
}
func DecodeBuildConfig(m interface{}) (BuildConfig, error) {
b := defaultBuildConfig
if m == nil {
return b, nil
}
err := mapstructure.WeakDecode(m, &b)
return b, err
}

View file

@ -19,6 +19,8 @@ import (
"path/filepath"
"time"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/modules"
"github.com/bep/gitmap"
@ -133,7 +135,7 @@ func (p *testPage) BaseFileName() string {
panic("not implemented")
}
func (p *testPage) BundleType() string {
func (p *testPage) BundleType() files.ContentClass {
panic("not implemented")
}

View file

@ -129,15 +129,8 @@ func (r *Spec) ClearCaches() {
r.ResourceCache.clear()
}
func (r *Spec) DeleteCacheByPrefix(prefix string) {
r.imageCache.deleteByPrefix(prefix)
}
// TODO(bep) unify
func (r *Spec) IsInImageCache(key string) bool {
// This is used for cache pruning. We currently only have images, but we could
// imagine expanding on this.
return r.imageCache.isInCache(key)
func (r *Spec) DeleteBySubstring(s string) {
r.imageCache.deleteIfContains(s)
}
func (s *Spec) String() string {

View file

@ -111,6 +111,8 @@ func (n *Namespace) Eq(first interface{}, others ...interface{}) bool {
return vv.Float()
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return vv.Uint()
case reflect.String:
return vv.String()
default:
return v
}

View file

@ -63,6 +63,8 @@ func (t tstEqerType1) String() string {
return string(t)
}
type stringType string
type tstCompareType int
const (
@ -390,6 +392,15 @@ func TestLessThanExtend(t *testing.T) {
}
func TestCase(t *testing.T) {
c := qt.New(t)
n := New(false)
c.Assert(n.Eq("az", "az"), qt.Equals, true)
c.Assert(n.Eq("az", stringType("az")), qt.Equals, true)
}
func TestStringType(t *testing.T) {
c := qt.New(t)
n := New(true)

View file

@ -16,8 +16,9 @@ package transform
import (
"testing"
"github.com/gohugoio/hugo/htesting"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/helpers"
"github.com/spf13/viper"
)
@ -99,7 +100,7 @@ title: Test Metadata
converted, err := ns.Remarshal(v1.format, v2.data)
c.Assert(err, qt.IsNil, fromTo)
diff := helpers.DiffStrings(v1.data, converted)
diff := htesting.DiffStrings(v1.data, converted)
if len(diff) > 0 {
t.Errorf("[%s] Expected \n%v\ngot\n%v\ndiff:\n%v", fromTo, v1.data, converted, diff)
}
@ -147,7 +148,7 @@ Hugo = "Rules"
c.Assert(err, qt.IsNil, fromTo)
}
diff := helpers.DiffStrings(expected, converted)
diff := htesting.DiffStrings(expected, converted)
if len(diff) > 0 {
t.Fatalf("[%s] Expected \n%v\ngot\n%v\ndiff:\n%v\n", fromTo, expected, converted, diff)
}