Make Page an interface

The main motivation of this commit is to add a `page.Page` interface to replace the very file-oriented `hugolib.Page` struct.
This is all a preparation step for issue  #5074, "pages from other data sources".

But this also fixes a set of annoying limitations, especially related to custom output formats, and shortcodes.

Most notable changes:

* The inner content of shortcodes using the `{{%` as the outer-most delimiter will now be sent to the content renderer, e.g. Blackfriday.
  This means that any markdown will partake in the global ToC and footnote context etc.
* The Custom Output formats are now "fully virtualized". This removes many of the current limitations.
* The taxonomy list type now has a reference to the `Page` object.
  This improves the taxonomy template `.Title` situation and make common template constructs much simpler.

See #5074
Fixes #5763
Fixes #5758
Fixes #5090
Fixes #5204
Fixes #4695
Fixes #5607
Fixes #5707
Fixes #5719
Fixes #3113
Fixes #5706
Fixes #5767
Fixes #5723
Fixes #5769
Fixes #5770
Fixes #5771
Fixes #5759
Fixes #5776
Fixes #5777
Fixes #5778
This commit is contained in:
Bjørn Erik Pedersen 2019-01-02 12:33:26 +01:00
parent 44f5c1c14c
commit 597e418cb0
No known key found for this signature in database
GPG key ID: 330E6E2BD4859D8F
206 changed files with 14442 additions and 9679 deletions

2
benchbep.sh Executable file
View file

@ -0,0 +1,2 @@
gobench -package=./hugolib -bench="BenchmarkSiteBuilding/TOML,num_langs=3,num_pages=5000,tags_per_page=5,shortcodes,render" -count=3 > 1.bench
benchcmp -best 0.bench 1.bench

529
codegen/methods.go Normal file
View file

@ -0,0 +1,529 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
// Some functions in this file (see comments) is based on the Go source code,
// copyright The Go Authors and governed by a BSD-style license.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package codegen contains helpers for code generation.
package codegen
import (
"fmt"
"go/ast"
"go/parser"
"go/token"
"os"
"path"
"path/filepath"
"reflect"
"regexp"
"sort"
"strings"
"sync"
)
// Make room for insertions
const weightWidth = 1000
// NewInspector creates a new Inspector given a source root.
func NewInspector(root string) *Inspector {
return &Inspector{ProjectRootDir: root}
}
// Inspector provides methods to help code generation. It uses a combination
// of reflection and source code AST to do the heavy lifting.
type Inspector struct {
ProjectRootDir string
init sync.Once
// Determines method order. Go's reflect sorts lexicographically, so
// we must parse the source to preserve this order.
methodWeight map[string]map[string]int
}
// MethodsFromTypes create a method set from the include slice, excluding any
// method in exclude.
func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.Type) Methods {
c.parseSource()
var methods Methods
var excludes = make(map[string]bool)
if len(exclude) > 0 {
for _, m := range c.MethodsFromTypes(exclude, nil) {
excludes[m.Name] = true
}
}
// There may be overlapping interfaces in types. Do a simple check for now.
seen := make(map[string]bool)
nameAndPackage := func(t reflect.Type) (string, string) {
var name, pkg string
isPointer := t.Kind() == reflect.Ptr
if isPointer {
t = t.Elem()
}
pkgPrefix := ""
if pkgPath := t.PkgPath(); pkgPath != "" {
pkgPath = strings.TrimSuffix(pkgPath, "/")
_, shortPath := path.Split(pkgPath)
pkgPrefix = shortPath + "."
pkg = pkgPath
}
name = t.Name()
if name == "" {
// interface{}
name = t.String()
}
if isPointer {
pkgPrefix = "*" + pkgPrefix
}
name = pkgPrefix + name
return name, pkg
}
for _, t := range include {
for i := 0; i < t.NumMethod(); i++ {
m := t.Method(i)
if excludes[m.Name] || seen[m.Name] {
continue
}
seen[m.Name] = true
if m.PkgPath != "" {
// Not exported
continue
}
numIn := m.Type.NumIn()
ownerName, _ := nameAndPackage(t)
method := Method{Owner: t, OwnerName: ownerName, Name: m.Name}
for i := 0; i < numIn; i++ {
in := m.Type.In(i)
name, pkg := nameAndPackage(in)
if pkg != "" {
method.Imports = append(method.Imports, pkg)
}
method.In = append(method.In, name)
}
numOut := m.Type.NumOut()
if numOut > 0 {
for i := 0; i < numOut; i++ {
out := m.Type.Out(i)
name, pkg := nameAndPackage(out)
if pkg != "" {
method.Imports = append(method.Imports, pkg)
}
method.Out = append(method.Out, name)
}
}
methods = append(methods, method)
}
}
sort.SliceStable(methods, func(i, j int) bool {
mi, mj := methods[i], methods[j]
wi := c.methodWeight[mi.OwnerName][mi.Name]
wj := c.methodWeight[mj.OwnerName][mj.Name]
if wi == wj {
return mi.Name < mj.Name
}
return wi < wj
})
return methods
}
func (c *Inspector) parseSource() {
c.init.Do(func() {
if !strings.Contains(c.ProjectRootDir, "hugo") {
panic("dir must be set to the Hugo root")
}
c.methodWeight = make(map[string]map[string]int)
dirExcludes := regexp.MustCompile("docs|examples")
fileExcludes := regexp.MustCompile("autogen")
var filenames []string
filepath.Walk(c.ProjectRootDir, func(path string, info os.FileInfo, err error) error {
if info.IsDir() {
if dirExcludes.MatchString(info.Name()) {
return filepath.SkipDir
}
}
if !strings.HasSuffix(path, ".go") || fileExcludes.MatchString(path) {
return nil
}
filenames = append(filenames, path)
return nil
})
for _, filename := range filenames {
pkg := c.packageFromPath(filename)
fset := token.NewFileSet()
node, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
if err != nil {
panic(err)
}
ast.Inspect(node, func(n ast.Node) bool {
switch t := n.(type) {
case *ast.TypeSpec:
if t.Name.IsExported() {
switch it := t.Type.(type) {
case *ast.InterfaceType:
iface := pkg + "." + t.Name.Name
methodNames := collectMethodsRecursive(pkg, it.Methods.List)
weights := make(map[string]int)
weight := weightWidth
for _, name := range methodNames {
weights[name] = weight
weight += weightWidth
}
c.methodWeight[iface] = weights
}
}
}
return true
})
}
// Complement
for _, v1 := range c.methodWeight {
for k2, w := range v1 {
if v, found := c.methodWeight[k2]; found {
for k3, v3 := range v {
v1[k3] = (v3 / weightWidth) + w
}
}
}
}
})
}
func (c *Inspector) packageFromPath(p string) string {
p = filepath.ToSlash(p)
base := path.Base(p)
if !strings.Contains(base, ".") {
return base
}
return path.Base(strings.TrimSuffix(p, base))
}
// Method holds enough information about it to recreate it.
type Method struct {
// The interface we extracted this method from.
Owner reflect.Type
// String version of the above, on the form PACKAGE.NAME, e.g.
// page.Page
OwnerName string
// Method name.
Name string
// Imports needed to satisfy the method signature.
Imports []string
// Argument types, including any package prefix, e.g. string, int, interface{},
// net.Url
In []string
// Return types.
Out []string
}
// Declaration creates a method declaration (without any body) for the given receiver.
func (m Method) Declaration(receiver string) string {
return fmt.Sprintf("func (%s %s) %s%s %s", receiverShort(receiver), receiver, m.Name, m.inStr(), m.outStr())
}
// Delegate creates a delegate call string.
func (m Method) Delegate(receiver, delegate string) string {
ret := ""
if len(m.Out) > 0 {
ret = "return "
}
return fmt.Sprintf("%s%s.%s.%s%s", ret, receiverShort(receiver), delegate, m.Name, m.inOutStr())
}
func (m Method) String() string {
return m.Name + m.inStr() + " " + m.outStr() + "\n"
}
func (m Method) inOutStr() string {
if len(m.In) == 0 {
return "()"
}
args := make([]string, len(m.In))
for i := 0; i < len(args); i++ {
args[i] = fmt.Sprintf("arg%d", i)
}
return "(" + strings.Join(args, ", ") + ")"
}
func (m Method) inStr() string {
if len(m.In) == 0 {
return "()"
}
args := make([]string, len(m.In))
for i := 0; i < len(args); i++ {
args[i] = fmt.Sprintf("arg%d %s", i, m.In[i])
}
return "(" + strings.Join(args, ", ") + ")"
}
func (m Method) outStr() string {
if len(m.Out) == 0 {
return ""
}
if len(m.Out) == 1 {
return m.Out[0]
}
return "(" + strings.Join(m.Out, ", ") + ")"
}
// Methods represents a list of methods for one or more interfaces.
// The order matches the defined order in their source file(s).
type Methods []Method
// Imports returns a sorted list of package imports needed to satisfy the
// signatures of all methods.
func (m Methods) Imports() []string {
var pkgImports []string
for _, method := range m {
pkgImports = append(pkgImports, method.Imports...)
}
if len(pkgImports) > 0 {
pkgImports = uniqueNonEmptyStrings(pkgImports)
sort.Strings(pkgImports)
}
return pkgImports
}
// ToMarshalJSON creates a MarshalJSON method for these methods. Any method name
// matchin any of the regexps in excludes will be ignored.
func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (string, []string) {
var sb strings.Builder
r := receiverShort(receiver)
what := firstToUpper(trimAsterisk(receiver))
pgkName := path.Base(pkgPath)
fmt.Fprintf(&sb, "func Marshal%sToJSON(%s %s) ([]byte, error) {\n", what, r, receiver)
var methods Methods
var excludeRes = make([]*regexp.Regexp, len(excludes))
for i, exclude := range excludes {
excludeRes[i] = regexp.MustCompile(exclude)
}
for _, method := range m {
// Exclude methods with arguments and incompatible return values
if len(method.In) > 0 || len(method.Out) == 0 || len(method.Out) > 2 {
continue
}
if len(method.Out) == 2 {
if method.Out[1] != "error" {
continue
}
}
for _, re := range excludeRes {
if re.MatchString(method.Name) {
continue
}
}
methods = append(methods, method)
}
for _, method := range methods {
varn := varName(method.Name)
if len(method.Out) == 1 {
fmt.Fprintf(&sb, "\t%s := %s.%s()\n", varn, r, method.Name)
} else {
fmt.Fprintf(&sb, "\t%s, err := %s.%s()\n", varn, r, method.Name)
fmt.Fprint(&sb, "\tif err != nil {\n\t\treturn nil, err\n\t}\n")
}
}
fmt.Fprint(&sb, "\n\ts := struct {\n")
for _, method := range methods {
fmt.Fprintf(&sb, "\t\t%s %s\n", method.Name, typeName(method.Out[0], pgkName))
}
fmt.Fprint(&sb, "\n\t}{\n")
for _, method := range methods {
varn := varName(method.Name)
fmt.Fprintf(&sb, "\t\t%s: %s,\n", method.Name, varn)
}
fmt.Fprint(&sb, "\n\t}\n\n")
fmt.Fprint(&sb, "\treturn json.Marshal(&s)\n}")
pkgImports := append(methods.Imports(), "encoding/json")
if pkgPath != "" {
// Exclude self
for i, pkgImp := range pkgImports {
if pkgImp == pkgPath {
pkgImports = append(pkgImports[:i], pkgImports[i+1:]...)
}
}
}
return sb.String(), pkgImports
}
func collectMethodsRecursive(pkg string, f []*ast.Field) []string {
var methodNames []string
for _, m := range f {
if m.Names != nil {
methodNames = append(methodNames, m.Names[0].Name)
continue
}
if ident, ok := m.Type.(*ast.Ident); ok && ident.Obj != nil {
// Embedded interface
methodNames = append(
methodNames,
collectMethodsRecursive(
pkg,
ident.Obj.Decl.(*ast.TypeSpec).Type.(*ast.InterfaceType).Methods.List)...)
} else {
// Embedded, but in a different file/package. Return the
// package.Name and deal with that later.
name := packageName(m.Type)
if !strings.Contains(name, ".") {
// Assume current package
name = pkg + "." + name
}
methodNames = append(methodNames, name)
}
}
return methodNames
}
func firstToLower(name string) string {
return strings.ToLower(name[:1]) + name[1:]
}
func firstToUpper(name string) string {
return strings.ToUpper(name[:1]) + name[1:]
}
func packageName(e ast.Expr) string {
switch tp := e.(type) {
case *ast.Ident:
return tp.Name
case *ast.SelectorExpr:
return fmt.Sprintf("%s.%s", packageName(tp.X), packageName(tp.Sel))
}
return ""
}
func receiverShort(receiver string) string {
return strings.ToLower(trimAsterisk(receiver))[:1]
}
func trimAsterisk(name string) string {
return strings.TrimPrefix(name, "*")
}
func typeName(name, pkg string) string {
return strings.TrimPrefix(name, pkg+".")
}
func uniqueNonEmptyStrings(s []string) []string {
var unique []string
set := map[string]interface{}{}
for _, val := range s {
if val == "" {
continue
}
if _, ok := set[val]; !ok {
unique = append(unique, val)
set[val] = val
}
}
return unique
}
func varName(name string) string {
name = firstToLower(name)
// Adjust some reserved keywords, see https://golang.org/ref/spec#Keywords
switch name {
case "type":
name = "typ"
case "package":
name = "pkg"
// Not reserved, but syntax highlighters has it as a keyword.
case "len":
name = "length"
}
return name
}

View file

@ -1,4 +1,4 @@
// Copyright 2017-present The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -11,13 +11,10 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
package codegen
import (
"github.com/gohugoio/hugo/resources/resource"
)
var (
_ resource.Resource = (*Page)(nil)
_ resource.Resource = (*PageOutput)(nil)
)
type IEmbed interface {
MethodEmbed3(s string) string
MethodEmbed1() string
MethodEmbed2()
}

100
codegen/methods_test.go Normal file
View file

@ -0,0 +1,100 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package codegen
import (
"fmt"
"net"
"os"
"reflect"
"testing"
"github.com/gohugoio/hugo/common/herrors"
"github.com/stretchr/testify/require"
)
func TestMethods(t *testing.T) {
var (
zeroIE = reflect.TypeOf((*IEmbed)(nil)).Elem()
zeroIEOnly = reflect.TypeOf((*IEOnly)(nil)).Elem()
zeroI = reflect.TypeOf((*I)(nil)).Elem()
)
dir, _ := os.Getwd()
c := NewInspector(dir)
t.Run("MethodsFromTypes", func(t *testing.T) {
assert := require.New(t)
methods := c.MethodsFromTypes([]reflect.Type{zeroI}, nil)
methodsStr := fmt.Sprint(methods)
assert.Contains(methodsStr, "Method1(arg0 herrors.ErrorContext)")
assert.Contains(methodsStr, "Method7() interface {}")
assert.Contains(methodsStr, "Method0() string\n Method4() string")
assert.Contains(methodsStr, "MethodEmbed3(arg0 string) string\n MethodEmbed1() string")
assert.Contains(methods.Imports(), "github.com/gohugoio/hugo/common/herrors")
})
t.Run("EmbedOnly", func(t *testing.T) {
assert := require.New(t)
methods := c.MethodsFromTypes([]reflect.Type{zeroIEOnly}, nil)
methodsStr := fmt.Sprint(methods)
assert.Contains(methodsStr, "MethodEmbed3(arg0 string) string")
})
t.Run("ToMarshalJSON", func(t *testing.T) {
assert := require.New(t)
m, pkg := c.MethodsFromTypes(
[]reflect.Type{zeroI},
[]reflect.Type{zeroIE}).ToMarshalJSON("*page", "page")
assert.Contains(m, "method6 := p.Method6()")
assert.Contains(m, "Method0: method0,")
assert.Contains(m, "return json.Marshal(&s)")
assert.Contains(pkg, "github.com/gohugoio/hugo/common/herrors")
assert.Contains(pkg, "encoding/json")
fmt.Println(pkg)
})
}
type I interface {
IEmbed
Method0() string
Method4() string
Method1(myerr herrors.ErrorContext)
Method3(myint int, mystring string)
Method5() (string, error)
Method6() *net.IP
Method7() interface{}
Method8() herrors.ErrorContext
method2()
method9() os.FileInfo
}
type IEOnly interface {
IEmbed
}

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -357,6 +357,13 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
c.changeDetector = changeDetector
}
if c.Cfg.GetBool("logPathWarnings") {
fs.Destination = hugofs.NewCreateCountingFs(fs.Destination)
}
// To debug hard-to-find path issues.
//fs.Destination = hugofs.NewStacktracerFs(fs.Destination, `fr/fr`)
err = c.initFs(fs)
if err != nil {
return

View file

@ -1,4 +1,4 @@
// Copyright 2017 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -23,7 +23,6 @@ import (
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/helpers"
"github.com/spf13/cobra"
"github.com/spf13/nitro"
)
type commandsBuilder struct {
@ -197,6 +196,12 @@ type hugoBuilderCommon struct {
gc bool
// Profile flags (for debugging of performance problems)
cpuprofile string
memprofile string
mutexprofile string
traceprofile string
// TODO(bep) var vs string
logging bool
verbose bool
@ -255,13 +260,22 @@ func (cc *hugoBuilderCommon) handleFlags(cmd *cobra.Command) {
cmd.Flags().Bool("enableGitInfo", false, "add Git revision, date and author info to the pages")
cmd.Flags().BoolVar(&cc.gc, "gc", false, "enable to run some cleanup tasks (remove unused cache files) after the build")
cmd.Flags().BoolVar(&nitro.AnalysisOn, "stepAnalysis", false, "display memory and timing of different steps of the program")
cmd.Flags().Bool("templateMetrics", false, "display metrics about template executions")
cmd.Flags().Bool("templateMetricsHints", false, "calculate some improvement hints when combined with --templateMetrics")
cmd.Flags().BoolP("forceSyncStatic", "", false, "copy all files when static is changed.")
cmd.Flags().BoolP("noTimes", "", false, "don't sync modification time of files")
cmd.Flags().BoolP("noChmod", "", false, "don't sync permission mode of files")
cmd.Flags().BoolP("i18n-warnings", "", false, "print missing translations")
cmd.Flags().BoolP("path-warnings", "", false, "print warnings on duplicate target paths etc.")
cmd.Flags().StringVarP(&cc.cpuprofile, "profile-cpu", "", "", "write cpu profile to `file`")
cmd.Flags().StringVarP(&cc.memprofile, "profile-mem", "", "", "write memory profile to `file`")
cmd.Flags().StringVarP(&cc.mutexprofile, "profile-mutex", "", "", "write Mutex profile to `file`")
cmd.Flags().StringVarP(&cc.traceprofile, "trace", "", "", "write trace to `file` (not useful in general)")
// Hide these for now.
cmd.Flags().MarkHidden("profile-cpu")
cmd.Flags().MarkHidden("profile-mem")
cmd.Flags().MarkHidden("profile-mutex")
cmd.Flags().StringSlice("disableKinds", []string{}, "disable different kind of pages (home, RSS etc.)")

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -20,6 +20,8 @@ import (
"path/filepath"
"testing"
"github.com/gohugoio/hugo/common/types"
"github.com/spf13/cobra"
"github.com/spf13/viper"
@ -41,7 +43,7 @@ func TestExecute(t *testing.T) {
assert.NoError(resp.Err)
result := resp.Result
assert.True(len(result.Sites) == 1)
assert.True(len(result.Sites[0].RegularPages) == 1)
assert.True(len(result.Sites[0].RegularPages()) == 1)
}
func TestCommandsPersistentFlags(t *testing.T) {
@ -75,6 +77,7 @@ func TestCommandsPersistentFlags(t *testing.T) {
"--port=1366",
"--renderToDisk",
"--source=mysource",
"--path-warnings",
}, func(commands []cmder) {
var sc *serverCmd
for _, command := range commands {
@ -112,6 +115,9 @@ func TestCommandsPersistentFlags(t *testing.T) {
assert.True(cfg.GetBool("gc"))
// The flag is named path-warnings
assert.True(cfg.GetBool("logPathWarnings"))
// The flag is named i18n-warnings
assert.True(cfg.GetBool("logI18nWarnings"))
@ -183,8 +189,8 @@ func TestCommandsExecute(t *testing.T) {
}
for _, test := range tests {
hugoCmd := newCommandsBuilder().addAll().build().getCommand()
b := newCommandsBuilder().addAll().build()
hugoCmd := b.getCommand()
test.flags = append(test.flags, "--quiet")
hugoCmd.SetArgs(append(test.commands, test.flags...))
@ -200,6 +206,13 @@ func TestCommandsExecute(t *testing.T) {
assert.NoError(err, fmt.Sprintf("%v", test.commands))
}
// Assert that we have not left any development debug artifacts in
// the code.
if b.c != nil {
_, ok := b.c.destinationFs.(types.DevMarker)
assert.False(ok)
}
}
}

View file

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -20,6 +20,8 @@ import (
"strings"
"time"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/helpers"
@ -124,8 +126,8 @@ func (cc *convertCmd) convertContents(format metadecoders.Format) error {
site := h.Sites[0]
site.Log.FEEDBACK.Println("processing", len(site.AllPages), "content files")
for _, p := range site.AllPages {
site.Log.FEEDBACK.Println("processing", len(site.AllPages()), "content files")
for _, p := range site.AllPages() {
if err := cc.convertAndSavePage(p, site, format); err != nil {
return err
}
@ -133,24 +135,24 @@ func (cc *convertCmd) convertContents(format metadecoders.Format) error {
return nil
}
func (cc *convertCmd) convertAndSavePage(p *hugolib.Page, site *hugolib.Site, targetFormat metadecoders.Format) error {
func (cc *convertCmd) convertAndSavePage(p page.Page, site *hugolib.Site, targetFormat metadecoders.Format) error {
// The resources are not in .Site.AllPages.
for _, r := range p.Resources.ByType("page") {
if err := cc.convertAndSavePage(r.(*hugolib.Page), site, targetFormat); err != nil {
for _, r := range p.Resources().ByType("page") {
if err := cc.convertAndSavePage(r.(page.Page), site, targetFormat); err != nil {
return err
}
}
if p.Filename() == "" {
if p.File() == nil {
// No content file.
return nil
}
errMsg := fmt.Errorf("Error processing file %q", p.Path())
site.Log.INFO.Println("Attempting to convert", p.LogicalName())
site.Log.INFO.Println("Attempting to convert", p.File().Filename())
f, _ := p.File.(src.ReadableFile)
f, _ := p.File().(src.ReadableFile)
file, err := f.Open()
if err != nil {
site.Log.ERROR.Println(errMsg)
@ -186,7 +188,7 @@ func (cc *convertCmd) convertAndSavePage(p *hugolib.Page, site *hugolib.Site, ta
newContent.Write(pf.content)
newFilename := p.Filename()
newFilename := p.File().Filename()
if cc.outputDir != "" {
contentDir := strings.TrimSuffix(newFilename, p.Path())

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -18,11 +18,16 @@ package commands
import (
"fmt"
"io/ioutil"
"os/signal"
"runtime/pprof"
"runtime/trace"
"sort"
"sync/atomic"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/common/hugo"
"github.com/pkg/errors"
@ -214,6 +219,7 @@ func initializeFlags(cmd *cobra.Command, cfg config.Provider) {
"themesDir",
"verbose",
"verboseLog",
"duplicateTargetPaths",
}
// Will set a value even if it is the default.
@ -235,6 +241,7 @@ func initializeFlags(cmd *cobra.Command, cfg config.Provider) {
// Set some "config aliases"
setValueFromFlag(cmd.Flags(), "destination", cfg, "publishDir", false)
setValueFromFlag(cmd.Flags(), "i18n-warnings", cfg, "logI18nWarnings", false)
setValueFromFlag(cmd.Flags(), "path-warnings", cfg, "logPathWarnings", false)
}
@ -290,6 +297,7 @@ func (c *commandeer) fullBuild() error {
}
copyStaticFunc := func() error {
cnt, err := c.copyStatic()
if err != nil {
if !os.IsNotExist(err) {
@ -326,7 +334,7 @@ func (c *commandeer) fullBuild() error {
}
for _, s := range c.hugo.Sites {
s.ProcessingStats.Static = langCount[s.Language.Lang]
s.ProcessingStats.Static = langCount[s.Language().Lang]
}
if c.h.gc {
@ -344,9 +352,125 @@ func (c *commandeer) fullBuild() error {
}
func (c *commandeer) initCPUProfile() (func(), error) {
if c.h.cpuprofile == "" {
return nil, nil
}
f, err := os.Create(c.h.cpuprofile)
if err != nil {
return nil, errors.Wrap(err, "failed to create CPU profile")
}
if err := pprof.StartCPUProfile(f); err != nil {
return nil, errors.Wrap(err, "failed to start CPU profile")
}
return func() {
pprof.StopCPUProfile()
f.Close()
}, nil
}
func (c *commandeer) initMemProfile() {
if c.h.memprofile == "" {
return
}
f, err := os.Create(c.h.memprofile)
if err != nil {
c.logger.ERROR.Println("could not create memory profile: ", err)
}
defer f.Close()
runtime.GC() // get up-to-date statistics
if err := pprof.WriteHeapProfile(f); err != nil {
c.logger.ERROR.Println("could not write memory profile: ", err)
}
}
func (c *commandeer) initTraceProfile() (func(), error) {
if c.h.traceprofile == "" {
return nil, nil
}
f, err := os.Create(c.h.traceprofile)
if err != nil {
return nil, errors.Wrap(err, "failed to create trace file")
}
if err := trace.Start(f); err != nil {
return nil, errors.Wrap(err, "failed to start trace")
}
return func() {
trace.Stop()
f.Close()
}, nil
}
func (c *commandeer) initMutexProfile() (func(), error) {
if c.h.mutexprofile == "" {
return nil, nil
}
f, err := os.Create(c.h.mutexprofile)
if err != nil {
return nil, err
}
runtime.SetMutexProfileFraction(1)
return func() {
pprof.Lookup("mutex").WriteTo(f, 0)
f.Close()
}, nil
}
func (c *commandeer) initProfiling() (func(), error) {
stopCPUProf, err := c.initCPUProfile()
if err != nil {
return nil, err
}
defer c.initMemProfile()
stopMutexProf, err := c.initMutexProfile()
if err != nil {
return nil, err
}
stopTraceProf, err := c.initTraceProfile()
if err != nil {
return nil, err
}
return func() {
if stopCPUProf != nil {
stopCPUProf()
}
if stopMutexProf != nil {
stopMutexProf()
}
if stopTraceProf != nil {
stopTraceProf()
}
}, nil
}
func (c *commandeer) build() error {
defer c.timeTrack(time.Now(), "Total")
stopProfiling, err := c.initProfiling()
if err != nil {
return err
}
defer func() {
if stopProfiling != nil {
stopProfiling()
}
}()
if err := c.fullBuild(); err != nil {
return err
}
@ -356,6 +480,13 @@ func (c *commandeer) build() error {
fmt.Println()
c.hugo.PrintProcessingStats(os.Stdout)
fmt.Println()
if createCounter, ok := c.destinationFs.(hugofs.DuplicatesReporter); ok {
dupes := createCounter.ReportDuplicates()
if dupes != "" {
c.logger.WARN.Println("Duplicate target paths:", dupes)
}
}
}
if c.h.buildWatch {
@ -369,7 +500,7 @@ func (c *commandeer) build() error {
checkErr(c.Logger, err)
defer watcher.Close()
var sigs = make(chan os.Signal)
var sigs = make(chan os.Signal, 1)
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
<-sigs
@ -381,6 +512,17 @@ func (c *commandeer) build() error {
func (c *commandeer) serverBuild() error {
defer c.timeTrack(time.Now(), "Total")
stopProfiling, err := c.initProfiling()
if err != nil {
return err
}
defer func() {
if stopProfiling != nil {
stopProfiling()
}
}()
if err := c.fullBuild(); err != nil {
return err
}
@ -474,11 +616,9 @@ func (c *commandeer) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint6
}
c.logger.INFO.Println("syncing static files to", publishDir)
var err error
// because we are using a baseFs (to get the union right).
// set sync src to root
err = syncer.Sync(publishDir, helpers.FilePathSeparator)
err := syncer.Sync(publishDir, helpers.FilePathSeparator)
if err != nil {
return 0, err
}
@ -619,13 +759,6 @@ func (c *commandeer) getDirList() ([]string, error) {
return a, nil
}
func (c *commandeer) resetAndBuildSites() (err error) {
if !c.h.quiet {
c.logger.FEEDBACK.Println("Started building sites ...")
}
return c.hugo.Build(hugolib.BuildCfg{ResetState: true})
}
func (c *commandeer) buildSites() (err error) {
return c.hugo.Build(hugolib.BuildCfg{})
}
@ -973,7 +1106,7 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
navigate := c.Cfg.GetBool("navigateToChanged")
// We have fetched the same page above, but it may have
// changed.
var p *hugolib.Page
var p page.Page
if navigate {
if onePageName != "" {
@ -982,7 +1115,7 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
}
if p != nil {
livereload.NavigateToPathForPort(p.RelPermalink(), p.Site.ServerPort())
livereload.NavigateToPathForPort(p.RelPermalink(), p.Site().ServerPort())
} else {
livereload.ForceRefresh()
}
@ -1044,9 +1177,11 @@ func (c *commandeer) isThemeVsHugoVersionMismatch(fs afero.Fs) (dir string, mism
}
b, err := afero.ReadFile(fs, path)
if err != nil {
continue
}
tomlMeta, err := metadecoders.Default.UnmarshalToMap(b, metadecoders.TOML)
if err != nil {
continue
}

View file

@ -1,4 +1,4 @@
// Copyright 2016 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -340,7 +340,7 @@ func copyDir(source string, dest string) error {
if err != nil {
return err
}
entries, err := ioutil.ReadDir(source)
entries, _ := ioutil.ReadDir(source)
for _, entry := range entries {
sfp := filepath.Join(source, entry.Name())
dfp := filepath.Join(dest, entry.Name())
@ -373,6 +373,10 @@ func (i *importCmd) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPos
return err
}
entries, err := ioutil.ReadDir(jekyllRoot)
if err != nil {
return err
}
for _, entry := range entries {
sfp := filepath.Join(jekyllRoot, entry.Name())
dfp := filepath.Join(dest, entry.Name())
@ -464,7 +468,7 @@ func convertJekyllPost(s *hugolib.Site, path, relPath, targetDir string, draft b
fs := hugofs.Os
if err := helpers.WriteToDisk(targetFile, strings.NewReader(content), fs); err != nil {
return fmt.Errorf("Failed to save file %q:", filename)
return fmt.Errorf("failed to save file %q: %s", filename, err)
}
return nil

View file

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -20,6 +20,7 @@ import (
"time"
"github.com/gohugoio/hugo/hugolib"
"github.com/gohugoio/hugo/resources/resource"
"github.com/spf13/cobra"
jww "github.com/spf13/jwalterweatherman"
)
@ -70,7 +71,7 @@ List requires a subcommand, e.g. ` + "`hugo list drafts`.",
for _, p := range sites.Pages() {
if p.IsDraft() {
jww.FEEDBACK.Println(filepath.Join(p.File.Dir(), p.File.LogicalName()))
jww.FEEDBACK.Println(filepath.Join(p.File().Dir(), p.File().LogicalName()))
}
}
@ -108,8 +109,8 @@ posted in the future.`,
defer writer.Flush()
for _, p := range sites.Pages() {
if p.IsFuture() {
err := writer.Write([]string{filepath.Join(p.File.Dir(), p.File.LogicalName()), p.PublishDate.Format(time.RFC3339)})
if resource.IsFuture(p) {
err := writer.Write([]string{filepath.Join(p.File().Dir(), p.File().LogicalName()), p.PublishDate().Format(time.RFC3339)})
if err != nil {
return newSystemError("Error writing future posts to stdout", err)
}
@ -149,11 +150,12 @@ expired.`,
defer writer.Flush()
for _, p := range sites.Pages() {
if p.IsExpired() {
err := writer.Write([]string{filepath.Join(p.File.Dir(), p.File.LogicalName()), p.ExpiryDate.Format(time.RFC3339)})
if resource.IsExpired(p) {
err := writer.Write([]string{filepath.Join(p.File().Dir(), p.File().LogicalName()), p.ExpiryDate().Format(time.RFC3339)})
if err != nil {
return newSystemError("Error writing expired posts to stdout", err)
}
}
}

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -62,7 +62,7 @@ func TestDoNewSite_noerror_base_exists_but_empty(t *testing.T) {
_, fs := newTestCfg()
n := newNewSiteCmd()
require.NoError(t, fs.Source.MkdirAll(basepath, 777))
require.NoError(t, fs.Source.MkdirAll(basepath, 0777))
require.NoError(t, n.doNewSite(fs, basepath, false))
}
@ -72,7 +72,7 @@ func TestDoNewSite_error_base_exists(t *testing.T) {
_, fs := newTestCfg()
n := newNewSiteCmd()
require.NoError(t, fs.Source.MkdirAll(basepath, 777))
require.NoError(t, fs.Source.MkdirAll(basepath, 0777))
_, err := fs.Source.Create(filepath.Join(basepath, "foo"))
require.NoError(t, err)
// Since the directory already exists and isn't empty, expect an error
@ -85,7 +85,7 @@ func TestDoNewSite_force_empty_dir(t *testing.T) {
_, fs := newTestCfg()
n := newNewSiteCmd()
require.NoError(t, fs.Source.MkdirAll(basepath, 777))
require.NoError(t, fs.Source.MkdirAll(basepath, 0777))
require.NoError(t, n.doNewSite(fs, basepath, true))
@ -99,7 +99,7 @@ func TestDoNewSite_error_force_dir_inside_exists(t *testing.T) {
contentPath := filepath.Join(basepath, "content")
require.NoError(t, fs.Source.MkdirAll(contentPath, 777))
require.NoError(t, fs.Source.MkdirAll(contentPath, 0777))
require.Error(t, n.doNewSite(fs, basepath, true))
}
@ -109,7 +109,7 @@ func TestDoNewSite_error_force_config_inside_exists(t *testing.T) {
n := newNewSiteCmd()
configPath := filepath.Join(basepath, "config.toml")
require.NoError(t, fs.Source.MkdirAll(basepath, 777))
require.NoError(t, fs.Source.MkdirAll(basepath, 0777))
_, err := fs.Source.Create(configPath)
require.NoError(t, err)

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -358,7 +358,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, string, string, erro
if err := f.c.partialReRender(p); err != nil {
f.c.handleBuildErr(err, fmt.Sprintf("Failed to render %q", p))
if f.c.showErrorInBrowser {
http.Redirect(w, r, p, 301)
http.Redirect(w, r, p, http.StatusMovedPermanently)
return
}
}
@ -386,7 +386,7 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, string, string, erro
return mu, u.String(), endpoint, nil
}
var logErrorRe = regexp.MustCompile("(?s)ERROR \\d{4}/\\d{2}/\\d{2} \\d{2}:\\d{2}:\\d{2} ")
var logErrorRe = regexp.MustCompile(`(?s)ERROR \d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2} `)
func removeErrorPrefixFromLog(content string) string {
return logErrorRe.ReplaceAllLiteralString(content, "")
@ -403,7 +403,7 @@ func (c *commandeer) serve(s *serverCmd) error {
if isMultiHost {
for _, s := range c.hugo.Sites {
baseURLs = append(baseURLs, s.BaseURL.String())
roots = append(roots, s.Language.Lang)
roots = append(roots, s.Language().Lang)
}
} else {
s := c.hugo.Sites[0]
@ -430,7 +430,7 @@ func (c *commandeer) serve(s *serverCmd) error {
livereload.Initialize()
}
var sigs = make(chan os.Signal)
var sigs = make(chan os.Signal, 1)
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
for i := range baseURLs {

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -92,9 +92,7 @@ func appendToInterfaceSlice(tov reflect.Value, from ...interface{}) ([]interface
tos = append(tos, tov.Index(i).Interface())
}
for _, v := range from {
tos = append(tos, v)
}
tos = append(tos, from...)
return tos, nil
}

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -75,11 +75,11 @@ func (p *tstSlicerIn2) Slice(in interface{}) (interface{}, error) {
}
func (p *tstSlicerIn1) Name() string {
return p.Name()
return p.name
}
func (p *tstSlicerIn2) Name() string {
return p.Name()
return p.name
}
func (p *tstSlicer) Slice(in interface{}) (interface{}, error) {

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -32,6 +32,7 @@ type ReadSeekCloser interface {
}
// ReadSeekerNoOpCloser implements ReadSeekCloser by doing nothing in Close.
// TODO(bep) rename this and simila to ReadSeekerNopCloser, naming used in stdlib, which kind of makes sense.
type ReadSeekerNoOpCloser struct {
ReadSeeker
}

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -28,6 +28,24 @@ type Scratch struct {
mu sync.RWMutex
}
// Scratcher provides a scratching service.
type Scratcher interface {
Scratch() *Scratch
}
type scratcher struct {
s *Scratch
}
func (s scratcher) Scratch() *Scratch {
return s.s
}
// NewScratcher creates a new Scratcher.
func NewScratcher() Scratcher {
return scratcher{s: NewScratch()}
}
// Add will, for single values, add (using the + operator) the addend to the existing addend (if found).
// Supports numeric values and strings.
//

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -16,6 +16,7 @@ package types
import (
"fmt"
"reflect"
"github.com/spf13/cast"
)
@ -56,3 +57,24 @@ func NewKeyValuesStrings(key string, values ...string) KeyValues {
type Zeroer interface {
IsZero() bool
}
// IsNil reports whether v is nil.
func IsNil(v interface{}) bool {
if v == nil {
return true
}
value := reflect.ValueOf(v)
switch value.Kind() {
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
return value.IsNil()
}
return false
}
// DevMarker is a marker interface for types that should only be used during
// development.
type DevMarker interface {
DevOnly()
}

View file

@ -1,4 +1,4 @@
// Copyright 2017-present The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -40,3 +40,15 @@ func GetStringSlicePreserveString(cfg Provider, key string) []string {
}
return cast.ToStringSlice(sd)
}
// SetBaseTestDefaults provides some common config defaults used in tests.
func SetBaseTestDefaults(cfg Provider) {
cfg.Set("resourceDir", "resources")
cfg.Set("contentDir", "content")
cfg.Set("dataDir", "data")
cfg.Set("i18nDir", "i18n")
cfg.Set("layoutDir", "layouts")
cfg.Set("assetDir", "assets")
cfg.Set("archetypeDir", "archetypes")
cfg.Set("publishDir", "public")
}

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -23,6 +23,7 @@ const (
disqusShortnameKey = "disqusshortname"
googleAnalyticsKey = "googleanalytics"
rssLimitKey = "rssLimit"
)
// Config is a privacy configuration for all the relevant services in Hugo.
@ -31,6 +32,7 @@ type Config struct {
GoogleAnalytics GoogleAnalytics
Instagram Instagram
Twitter Twitter
RSS RSS
}
// Disqus holds the functional configuration settings related to the Disqus template.
@ -61,6 +63,12 @@ type Twitter struct {
DisableInlineCSS bool
}
// RSS holds the functional configuration settings related to the RSS feeds.
type RSS struct {
// Limit the number of pages.
Limit int
}
// DecodeConfig creates a services Config from a given Hugo configuration.
func DecodeConfig(cfg config.Provider) (c Config, err error) {
m := cfg.GetStringMap(servicesConfigKey)
@ -76,5 +84,9 @@ func DecodeConfig(cfg config.Provider) (c Config, err error) {
c.Disqus.Shortname = cfg.GetString(disqusShortnameKey)
}
if c.RSS.Limit == 0 {
c.RSS.Limit = cfg.GetInt(rssLimitKey)
}
return
}

View file

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -11,7 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
package config
import (
"github.com/spf13/cast"
@ -25,21 +25,20 @@ type Sitemap struct {
Filename string
}
func parseSitemap(input map[string]interface{}) Sitemap {
sitemap := Sitemap{Priority: -1, Filename: "sitemap.xml"}
func DecodeSitemap(prototype Sitemap, input map[string]interface{}) Sitemap {
for key, value := range input {
switch key {
case "changefreq":
sitemap.ChangeFreq = cast.ToString(value)
prototype.ChangeFreq = cast.ToString(value)
case "priority":
sitemap.Priority = cast.ToFloat64(value)
prototype.Priority = cast.ToFloat64(value)
case "filename":
sitemap.Filename = cast.ToString(value)
prototype.Filename = cast.ToString(value)
default:
jww.WARN.Printf("Unknown Sitemap field: %s\n", key)
}
}
return sitemap
return prototype
}

View file

@ -1,4 +1,4 @@
// Copyright 2016 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -50,7 +50,7 @@ func NewContent(
if isDir {
langFs := hugofs.NewLanguageFs(s.Language.Lang, sites.LanguageSet(), archetypeFs)
langFs := hugofs.NewLanguageFs(s.Language().Lang, sites.LanguageSet(), archetypeFs)
cm, err := mapArcheTypeDir(ps, langFs, archetypeFilename)
if err != nil {
@ -113,7 +113,7 @@ func NewContent(
func targetSite(sites *hugolib.HugoSites, fi *hugofs.LanguageFileInfo) *hugolib.Site {
for _, s := range sites.Sites {
if fi.Lang() == s.Language.Lang {
if fi.Lang() == s.Language().Lang {
return s
}
}
@ -245,7 +245,7 @@ func resolveContentPath(sites *hugolib.HugoSites, fs afero.Fs, targetPath string
// Try the filename: my-post.en.md
for _, ss := range sites.Sites {
if strings.Contains(targetPath, "."+ss.Language.Lang+".") {
if strings.Contains(targetPath, "."+ss.Language().Lang+".") {
s = ss
break
}

7
deps/deps.go vendored
View file

@ -7,13 +7,14 @@ import (
"github.com/pkg/errors"
"github.com/gohugoio/hugo/cache/filecache"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/metrics"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/resources"
@ -67,7 +68,7 @@ type Deps struct {
Language *langs.Language
// The site building.
Site hugo.Site
Site page.Site
// All the output formats available for the current site.
OutputFormatsConfig output.Formats
@ -325,7 +326,7 @@ type DepsCfg struct {
Language *langs.Language
// The Site in use
Site hugo.Site
Site page.Site
// The configuration to use.
Cfg config.Provider

View file

@ -79,8 +79,7 @@ See [`.Scratch`](/functions/scratch/) for page-scoped, writable variables.
: the page's *kind*. Possible return values are `page`, `home`, `section`, `taxonomy`, or `taxonomyTerm`. Note that there are also `RSS`, `sitemap`, `robotsTXT`, and `404` kinds, but these are only available during the rendering of each of these respective page's kind and therefore *not* available in any of the `Pages` collections.
.Language
: a language object that points to the language's definition in the site
`config`.
: a language object that points to the language's definition in the site `config`. `.Language.Lang` gives you the language code.
.Lastmod
: the date the content was last modified. `.Lastmod` pulls from the `lastmod` field in a content's front matter.
@ -93,10 +92,7 @@ See also `.ExpiryDate`, `.Date`, `.PublishDate`, and [`.GitInfo`][gitinfo].
.LinkTitle
: access when creating links to the content. If set, Hugo will use the `linktitle` from the front matter before `title`.
.Next (deprecated)
: In older Hugo versions this pointer went the wrong direction. Please use `.PrevPage` instead.
.NextPage
.Next
: Pointer to the next [regular page](/variables/site/#site-pages) (sorted by Hugo's [default sort](/templates/lists#default-weight-date-linktitle-filepath)). Example: `{{if .NextPage}}{{.NextPage.Permalink}}{{end}}`.
.NextInSection
@ -119,9 +115,6 @@ See also `.ExpiryDate`, `.Date`, `.PublishDate`, and [`.GitInfo`][gitinfo].
: the Page content stripped of HTML as a `[]string` using Go's [`strings.Fields`](https://golang.org/pkg/strings/#Fields) to split `.Plain` into a slice.
.Prev (deprecated)
: In older Hugo versions this pointer went the wrong direction. Please use `.NextPage` instead.
.PrevPage
: Pointer to the previous [regular page](/variables/site/#site-pages) (sorted by Hugo's [default sort](/templates/lists#default-weight-date-linktitle-filepath)). Example: `{{if .PrevPage}}{{.PrevPage.Permalink}}{{end}}`.
.PrevInSection
@ -130,8 +123,8 @@ See also `.ExpiryDate`, `.Date`, `.PublishDate`, and [`.GitInfo`][gitinfo].
.PublishDate
: the date on which the content was or will be published; `.Publishdate` pulls from the `publishdate` field in a content's front matter. See also `.ExpiryDate`, `.Date`, and `.Lastmod`.
.RSSLink
: link to the taxonomies' RSS link.
.RSSLink (deprecated)
: link to the page's RSS feed. This is deprecated. You should instead do something like this: `{{ with .OutputFormats.Get "RSS" }}{{ . RelPermalink }}{{ end }}`.
.RawContent
: raw markdown content without the front matter. Useful with [remarkjs.com](

1
go.mod
View file

@ -44,7 +44,6 @@ require (
github.com/spf13/cobra v0.0.3
github.com/spf13/fsync v0.0.0-20170320142552-12a01e648f05
github.com/spf13/jwalterweatherman v1.1.0
github.com/spf13/nitro v0.0.0-20131003134307-24d7ef30a12d
github.com/spf13/pflag v1.0.3
github.com/spf13/viper v1.3.2
github.com/stretchr/testify v1.3.0

2
go.sum
View file

@ -126,8 +126,6 @@ github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
github.com/spf13/nitro v0.0.0-20131003134307-24d7ef30a12d h1:ihvj2nmx8eqWjlgNgdW6h0DyGJuq5GiwHadJkG0wXtQ=
github.com/spf13/nitro v0.0.0-20131003134307-24d7ef30a12d/go.mod h1:jU8A+8xL+6n1OX4XaZtCj4B3mIa64tULUsD6YegdpFo=
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M=

View file

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -57,7 +57,7 @@ type ContentSpec struct {
Highlight func(code, lang, optsStr string) (string, error)
defatultPygmentsOpts map[string]string
cfg config.Provider
Cfg config.Provider
}
// NewContentSpec returns a ContentSpec initialized
@ -73,7 +73,7 @@ func NewContentSpec(cfg config.Provider) (*ContentSpec, error) {
BuildExpired: cfg.GetBool("buildExpired"),
BuildDrafts: cfg.GetBool("buildDrafts"),
cfg: cfg,
Cfg: cfg,
}
// Highlighting setup
@ -382,7 +382,7 @@ func (c *ContentSpec) getMmarkHTMLRenderer(defaultFlags int, ctx *RenderingConte
return &HugoMmarkHTMLRenderer{
cs: c,
Renderer: mmark.HtmlRendererWithParameters(htmlFlags, "", "", renderParameters),
Cfg: c.cfg,
Cfg: c.Cfg,
}
}

View file

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -24,7 +24,7 @@ import (
// Renders a codeblock using Blackfriday
func (c ContentSpec) render(input string) string {
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
render := c.getHTMLRenderer(0, ctx)
buf := &bytes.Buffer{}
@ -34,7 +34,7 @@ func (c ContentSpec) render(input string) string {
// Renders a codeblock using Mmark
func (c ContentSpec) renderWithMmark(input string) string {
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
render := c.getMmarkHTMLRenderer(0, ctx)
buf := &bytes.Buffer{}

View file

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -181,7 +181,7 @@ func TestTruncateWordsByRune(t *testing.T) {
func TestGetHTMLRendererFlags(t *testing.T) {
c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
renderer := c.getHTMLRenderer(blackfriday.HTML_USE_XHTML, ctx)
flags := renderer.GetFlags()
if flags&blackfriday.HTML_USE_XHTML != blackfriday.HTML_USE_XHTML {
@ -210,7 +210,7 @@ func TestGetHTMLRendererAllFlags(t *testing.T) {
{blackfriday.HTML_SMARTYPANTS_LATEX_DASHES},
}
defaultFlags := blackfriday.HTML_USE_XHTML
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Config.AngledQuotes = true
ctx.Config.Fractions = true
ctx.Config.HrefTargetBlank = true
@ -235,7 +235,7 @@ func TestGetHTMLRendererAllFlags(t *testing.T) {
func TestGetHTMLRendererAnchors(t *testing.T) {
c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.DocumentID = "testid"
ctx.Config.PlainIDAnchors = false
@ -259,7 +259,7 @@ func TestGetHTMLRendererAnchors(t *testing.T) {
func TestGetMmarkHTMLRenderer(t *testing.T) {
c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.DocumentID = "testid"
ctx.Config.PlainIDAnchors = false
actualRenderer := c.getMmarkHTMLRenderer(0, ctx)
@ -283,7 +283,7 @@ func TestGetMmarkHTMLRenderer(t *testing.T) {
func TestGetMarkdownExtensionsMasksAreRemovedFromExtensions(t *testing.T) {
c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Config.Extensions = []string{"headerId"}
ctx.Config.ExtensionsMask = []string{"noIntraEmphasis"}
@ -298,7 +298,7 @@ func TestGetMarkdownExtensionsByDefaultAllExtensionsAreEnabled(t *testing.T) {
testFlag int
}
c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Config.Extensions = []string{""}
ctx.Config.ExtensionsMask = []string{""}
allExtensions := []data{
@ -330,7 +330,7 @@ func TestGetMarkdownExtensionsByDefaultAllExtensionsAreEnabled(t *testing.T) {
func TestGetMarkdownExtensionsAddingFlagsThroughRenderingContext(t *testing.T) {
c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Config.Extensions = []string{"definitionLists"}
ctx.Config.ExtensionsMask = []string{""}
@ -342,7 +342,7 @@ func TestGetMarkdownExtensionsAddingFlagsThroughRenderingContext(t *testing.T) {
func TestGetMarkdownRenderer(t *testing.T) {
c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Content = []byte("testContent")
actualRenderedMarkdown := c.markdownRender(ctx)
expectedRenderedMarkdown := []byte("<p>testContent</p>\n")
@ -353,7 +353,7 @@ func TestGetMarkdownRenderer(t *testing.T) {
func TestGetMarkdownRendererWithTOC(t *testing.T) {
c := newTestContentSpec()
ctx := &RenderingContext{RenderTOC: true, Cfg: c.cfg, Config: c.BlackFriday}
ctx := &RenderingContext{RenderTOC: true, Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Content = []byte("testContent")
actualRenderedMarkdown := c.markdownRender(ctx)
expectedRenderedMarkdown := []byte("<nav>\n</nav>\n\n<p>testContent</p>\n")
@ -368,7 +368,7 @@ func TestGetMmarkExtensions(t *testing.T) {
testFlag int
}
c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Config.Extensions = []string{"tables"}
ctx.Config.ExtensionsMask = []string{""}
allExtensions := []data{
@ -397,7 +397,7 @@ func TestGetMmarkExtensions(t *testing.T) {
func TestMmarkRender(t *testing.T) {
c := newTestContentSpec()
ctx := &RenderingContext{Cfg: c.cfg, Config: c.BlackFriday}
ctx := &RenderingContext{Cfg: c.Cfg, Config: c.BlackFriday}
ctx.Content = []byte("testContent")
actualRenderedMarkdown := c.mmarkRender(ctx)
expectedRenderedMarkdown := []byte("<p>testContent</p>\n")

View file

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -92,7 +92,7 @@ func GuessType(in string) string {
return "org"
}
return "unknown"
return ""
}
// FirstUpper returns a string with the first character as upper case.
@ -325,12 +325,15 @@ func InitLoggers() {
// The idea is two remove an item in two Hugo releases to give users and theme authors
// plenty of time to fix their templates.
func Deprecated(object, item, alternative string, err bool) {
if !strings.HasSuffix(alternative, ".") {
alternative += "."
}
if err {
DistinctErrorLog.Printf("%s's %s is deprecated and will be removed in Hugo %s. %s", object, item, hugo.CurrentVersion.Next().ReleaseVersion(), alternative)
} else {
// Make sure the users see this while avoiding build breakage. This will not lead to an os.Exit(-1)
DistinctFeedbackLog.Printf("WARNING: %s's %s is deprecated and will be removed in a future release. %s", object, item, alternative)
DistinctWarnLog.Printf("%s's %s is deprecated and will be removed in a future release. %s", object, item, alternative)
}
}

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -42,7 +42,7 @@ func TestGuessType(t *testing.T) {
{"html", "html"},
{"htm", "html"},
{"org", "org"},
{"excel", "unknown"},
{"excel", ""},
} {
result := GuessType(this.in)
if result != this.expect {
@ -166,6 +166,27 @@ var containsAdditionalTestData = []struct {
{"", []byte(""), false},
}
func TestSliceToLower(t *testing.T) {
t.Parallel()
tests := []struct {
value []string
expected []string
}{
{[]string{"a", "b", "c"}, []string{"a", "b", "c"}},
{[]string{"a", "B", "c"}, []string{"a", "b", "c"}},
{[]string{"A", "B", "C"}, []string{"a", "b", "c"}},
}
for _, test := range tests {
res := SliceToLower(test.value)
for i, val := range res {
if val != test.expected[i] {
t.Errorf("Case mismatch. Expected %s, got %s", test.expected[i], res[i])
}
}
}
}
func TestReaderContains(t *testing.T) {
for i, this := range append(containsBenchTestData, containsAdditionalTestData...) {
result := ReaderContains(strings.NewReader(this.v1), this.v2)

View file

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -86,6 +86,13 @@ func (p *PathSpec) MakePath(s string) string {
return p.UnicodeSanitize(s)
}
// MakePathsSanitized applies MakePathSanitized on every item in the slice
func (p *PathSpec) MakePathsSanitized(paths []string) {
for i, path := range paths {
paths[i] = p.MakePathSanitized(path)
}
}
// MakePathSanitized creates a Unicode-sanitized string, with the spaces replaced
func (p *PathSpec) MakePathSanitized(s string) string {
if p.DisablePathToLower {

View file

@ -1,4 +1,4 @@
// Copyright 2016 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -56,7 +56,7 @@ type highlighters struct {
}
func newHiglighters(cs *ContentSpec) highlighters {
return highlighters{cs: cs, ignoreCache: cs.cfg.GetBool("ignoreCache"), cacheDir: cs.cfg.GetString("cacheDir")}
return highlighters{cs: cs, ignoreCache: cs.Cfg.GetBool("ignoreCache"), cacheDir: cs.Cfg.GetString("cacheDir")}
}
func (h highlighters) chromaHighlight(code, lang, optsStr string) (string, error) {

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -14,8 +14,13 @@
package htesting
import (
"html/template"
"time"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/navigation"
"github.com/gohugoio/hugo/resources/page"
"github.com/spf13/viper"
)
@ -28,6 +33,22 @@ func (t testSite) Hugo() hugo.Info {
return t.h
}
func (t testSite) ServerPort() int {
return 1313
}
func (testSite) LastChange() (t time.Time) {
return
}
func (t testSite) Title() string {
return "foo"
}
func (t testSite) Sites() page.Sites {
return nil
}
func (t testSite) IsServer() bool {
return false
}
@ -36,8 +57,36 @@ func (t testSite) Language() *langs.Language {
return t.l
}
func (t testSite) Pages() page.Pages {
return nil
}
func (t testSite) RegularPages() page.Pages {
return nil
}
func (t testSite) Menus() navigation.Menus {
return nil
}
func (t testSite) Taxonomies() interface{} {
return nil
}
func (t testSite) BaseURL() template.URL {
return ""
}
func (t testSite) Params() map[string]interface{} {
return nil
}
func (t testSite) Data() map[string]interface{} {
return nil
}
// NewTestHugoSite creates a new minimal test site.
func NewTestHugoSite() hugo.Site {
func NewTestHugoSite() page.Site {
return testSite{
h: hugo.NewInfo(hugo.EnvironmentProduction),
l: langs.NewLanguage("en", newTestConfig()),

View file

@ -0,0 +1,99 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"fmt"
"os"
"sort"
"strings"
"sync"
"github.com/spf13/afero"
)
// Reseter is implemented by some of the stateful filesystems.
type Reseter interface {
Reset()
}
// DuplicatesReporter reports about duplicate filenames.
type DuplicatesReporter interface {
ReportDuplicates() string
}
func NewCreateCountingFs(fs afero.Fs) afero.Fs {
return &createCountingFs{Fs: fs, fileCount: make(map[string]int)}
}
// ReportDuplicates reports filenames written more than once.
func (c *createCountingFs) ReportDuplicates() string {
c.mu.Lock()
defer c.mu.Unlock()
var dupes []string
for k, v := range c.fileCount {
if v > 1 {
dupes = append(dupes, fmt.Sprintf("%s (%d)", k, v))
}
}
if len(dupes) == 0 {
return ""
}
sort.Strings(dupes)
return strings.Join(dupes, ", ")
}
// createCountingFs counts filenames of created files or files opened
// for writing.
type createCountingFs struct {
afero.Fs
mu sync.Mutex
fileCount map[string]int
}
func (c *createCountingFs) Reset() {
c.mu.Lock()
defer c.mu.Unlock()
c.fileCount = make(map[string]int)
}
func (fs *createCountingFs) onCreate(filename string) {
fs.mu.Lock()
defer fs.mu.Unlock()
fs.fileCount[filename] = fs.fileCount[filename] + 1
}
func (fs *createCountingFs) Create(name string) (afero.File, error) {
f, err := fs.Fs.Create(name)
if err == nil {
fs.onCreate(name)
}
return f, err
}
func (fs *createCountingFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
f, err := fs.Fs.OpenFile(name, flag, perm)
if err == nil && isWrite(flag) {
fs.onCreate(name)
}
return f, err
}

View file

@ -1,4 +1,4 @@
// Copyright 2016 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -15,6 +15,8 @@
package hugofs
import (
"os"
"github.com/gohugoio/hugo/config"
"github.com/spf13/afero"
)
@ -80,3 +82,7 @@ func getWorkingDirFs(base afero.Fs, cfg config.Provider) *afero.BasePathFs {
return nil
}
func isWrite(flag int) bool {
return flag&os.O_RDWR != 0 || flag&os.O_WRONLY != 0
}

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -67,10 +67,6 @@ func (fs *md5HashingFs) wrapFile(f afero.File) afero.File {
return &hashingFile{File: f, h: md5.New(), hashReceiver: fs.hashReceiver}
}
func isWrite(flag int) bool {
return flag&os.O_RDWR != 0 || flag&os.O_WRONLY != 0
}
func (fs *md5HashingFs) Name() string {
return "md5HashingFs"
}

70
hugofs/stacktracer_fs.go Normal file
View file

@ -0,0 +1,70 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"fmt"
"os"
"regexp"
"runtime"
"github.com/gohugoio/hugo/common/types"
"github.com/spf13/afero"
)
// Make sure we don't accidently use this in the real Hugo.
var _ types.DevMarker = (*stacktracerFs)(nil)
// NewStacktracerFs wraps the given fs printing stack traces for file creates
// matching the given regexp pattern.
func NewStacktracerFs(fs afero.Fs, pattern string) afero.Fs {
return &stacktracerFs{Fs: fs, re: regexp.MustCompile(pattern)}
}
// stacktracerFs can be used in hard-to-debug development situations where
// you get some input you don't understand where comes from.
type stacktracerFs struct {
afero.Fs
// Will print stacktrace for every file creates matching this pattern.
re *regexp.Regexp
}
func (fs *stacktracerFs) DevOnly() {
}
func (fs *stacktracerFs) onCreate(filename string) {
if fs.re.MatchString(filename) {
trace := make([]byte, 1500)
runtime.Stack(trace, true)
fmt.Printf("\n===========\n%q:\n%s\n", filename, trace)
}
}
func (fs *stacktracerFs) Create(name string) (afero.File, error) {
f, err := fs.Fs.Create(name)
if err == nil {
fs.onCreate(name)
}
return f, err
}
func (fs *stacktracerFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
f, err := fs.Fs.OpenFile(name, flag, perm)
if err == nil && isWrite(flag) {
fs.onCreate(name)
}
return f, err
}

View file

@ -1,4 +1,4 @@
// Copyright 2017 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -26,6 +26,7 @@ import (
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/publisher"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/helpers"
@ -55,7 +56,12 @@ func newAliasHandler(t tpl.TemplateFinder, l *loggers.Logger, allowRoot bool) al
return aliasHandler{t, l, allowRoot}
}
func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (io.Reader, error) {
type aliasPage struct {
Permalink string
page.Page
}
func (a aliasHandler) renderAlias(isXHTML bool, permalink string, p page.Page) (io.Reader, error) {
t := "alias"
if isXHTML {
t = "alias-xhtml"
@ -75,12 +81,9 @@ func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (i
}
}
data := struct {
Permalink string
Page *Page
}{
data := aliasPage{
permalink,
page,
p,
}
buffer := new(bytes.Buffer)
@ -91,11 +94,11 @@ func (a aliasHandler) renderAlias(isXHTML bool, permalink string, page *Page) (i
return buffer, nil
}
func (s *Site) writeDestAlias(path, permalink string, outputFormat output.Format, p *Page) (err error) {
func (s *Site) writeDestAlias(path, permalink string, outputFormat output.Format, p page.Page) (err error) {
return s.publishDestAlias(false, path, permalink, outputFormat, p)
}
func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFormat output.Format, p *Page) (err error) {
func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFormat output.Format, p page.Page) (err error) {
handler := newAliasHandler(s.Tmpl, s.Log, allowRoot)
isXHTML := strings.HasSuffix(path, ".xhtml")
@ -126,19 +129,19 @@ func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, outputFo
func (a aliasHandler) targetPathAlias(src string) (string, error) {
originalAlias := src
if len(src) <= 0 {
return "", fmt.Errorf("Alias \"\" is an empty string")
return "", fmt.Errorf("alias \"\" is an empty string")
}
alias := filepath.Clean(src)
components := strings.Split(alias, helpers.FilePathSeparator)
if !a.allowRoot && alias == helpers.FilePathSeparator {
return "", fmt.Errorf("Alias \"%s\" resolves to website root directory", originalAlias)
return "", fmt.Errorf("alias \"%s\" resolves to website root directory", originalAlias)
}
// Validate against directory traversal
if components[0] == ".." {
return "", fmt.Errorf("Alias \"%s\" traverses outside the website root directory", originalAlias)
return "", fmt.Errorf("alias \"%s\" traverses outside the website root directory", originalAlias)
}
// Handle Windows file and directory naming restrictions
@ -171,7 +174,7 @@ func (a aliasHandler) targetPathAlias(src string) (string, error) {
for _, m := range msgs {
a.log.ERROR.Println(m)
}
return "", fmt.Errorf("Cannot create \"%s\": Windows filename restriction", originalAlias)
return "", fmt.Errorf("cannot create \"%s\": Windows filename restriction", originalAlias)
}
for _, m := range msgs {
a.log.INFO.Println(m)

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -50,7 +50,7 @@ func TestAlias(t *testing.T) {
b.CreateSites().Build(BuildCfg{})
assert.Equal(1, len(b.H.Sites))
require.Len(t, b.H.Sites[0].RegularPages, 1)
require.Len(t, b.H.Sites[0].RegularPages(), 1)
// the real page
b.AssertFileContent("public/page/index.html", "For some moments the old man")

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -14,19 +14,13 @@
package hugolib
import (
"fmt"
"github.com/gohugoio/hugo/resources/resource"
"github.com/gohugoio/hugo/common/collections"
"github.com/gohugoio/hugo/resources/page"
)
var (
_ collections.Grouper = (*Page)(nil)
_ collections.Slicer = (*Page)(nil)
_ collections.Slicer = PageGroup{}
_ collections.Slicer = WeightedPage{}
_ resource.ResourcesConverter = Pages{}
_ collections.Grouper = (*pageState)(nil)
_ collections.Slicer = (*pageState)(nil)
)
// collections.Slicer implementations below. We keep these bridge implementations
@ -35,50 +29,8 @@ var (
// Slice is not meant to be used externally. It's a bridge function
// for the template functions. See collections.Slice.
func (p *Page) Slice(items interface{}) (interface{}, error) {
return toPages(items)
}
// Slice is not meant to be used externally. It's a bridge function
// for the template functions. See collections.Slice.
func (p PageGroup) Slice(in interface{}) (interface{}, error) {
switch items := in.(type) {
case PageGroup:
return items, nil
case []interface{}:
groups := make(PagesGroup, len(items))
for i, v := range items {
g, ok := v.(PageGroup)
if !ok {
return nil, fmt.Errorf("type %T is not a PageGroup", v)
}
groups[i] = g
}
return groups, nil
default:
return nil, fmt.Errorf("invalid slice type %T", items)
}
}
// Slice is not meant to be used externally. It's a bridge function
// for the template functions. See collections.Slice.
func (p WeightedPage) Slice(in interface{}) (interface{}, error) {
switch items := in.(type) {
case WeightedPages:
return items, nil
case []interface{}:
weighted := make(WeightedPages, len(items))
for i, v := range items {
g, ok := v.(WeightedPage)
if !ok {
return nil, fmt.Errorf("type %T is not a WeightedPage", v)
}
weighted[i] = g
}
return weighted, nil
default:
return nil, fmt.Errorf("invalid slice type %T", items)
}
func (p *pageState) Slice(items interface{}) (interface{}, error) {
return page.ToPages(items)
}
// collections.Grouper implementations below
@ -86,19 +38,10 @@ func (p WeightedPage) Slice(in interface{}) (interface{}, error) {
// Group creates a PageGroup from a key and a Pages object
// This method is not meant for external use. It got its non-typed arguments to satisfy
// a very generic interface in the tpl package.
func (p *Page) Group(key interface{}, in interface{}) (interface{}, error) {
pages, err := toPages(in)
func (p *pageState) Group(key interface{}, in interface{}) (interface{}, error) {
pages, err := page.ToPages(in)
if err != nil {
return nil, err
}
return PageGroup{Key: key, Pages: pages}, nil
}
// ToResources wraps resource.ResourcesConverter
func (pages Pages) ToResources() resource.Resources {
r := make(resource.Resources, len(pages))
for i, p := range pages {
r[i] = p
}
return r
return page.PageGroup{Key: key, Pages: pages}, nil
}

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -40,7 +40,7 @@ title: "Page"
b.CreateSites().Build(BuildCfg{})
assert.Equal(1, len(b.H.Sites))
require.Len(t, b.H.Sites[0].RegularPages, 2)
require.Len(t, b.H.Sites[0].RegularPages(), 2)
b.AssertFileContent("public/index.html", "cool: 2")
}
@ -79,12 +79,12 @@ tags_weight: %d
b.CreateSites().Build(BuildCfg{})
assert.Equal(1, len(b.H.Sites))
require.Len(t, b.H.Sites[0].RegularPages, 2)
require.Len(t, b.H.Sites[0].RegularPages(), 2)
b.AssertFileContent("public/index.html",
"pages:2:hugolib.Pages:Page(/page1.md)/Page(/page2.md)",
"pageGroups:2:hugolib.PagesGroup:Page(/page1.md)/Page(/page2.md)",
`weightedPages:2::hugolib.WeightedPages:[WeightedPage(10,"Page") WeightedPage(20,"Page")]`)
"pages:2:page.Pages:Page(/page1.md)/Page(/page2.md)",
"pageGroups:2:page.PagesGroup:Page(/page1.md)/Page(/page2.md)",
`weightedPages:2::page.WeightedPages:[WeightedPage(10,"Page") WeightedPage(20,"Page")]`)
}
func TestAppendFunc(t *testing.T) {
@ -129,11 +129,11 @@ tags_weight: %d
b.CreateSites().Build(BuildCfg{})
assert.Equal(1, len(b.H.Sites))
require.Len(t, b.H.Sites[0].RegularPages, 2)
require.Len(t, b.H.Sites[0].RegularPages(), 2)
b.AssertFileContent("public/index.html",
"pages:2:hugolib.Pages:Page(/page2.md)/Page(/page1.md)",
"appendPages:9:hugolib.Pages:home/page",
"pages:2:page.Pages:Page(/page2.md)/Page(/page1.md)",
"appendPages:9:page.Pages:home/page",
"appendStrings:[]string:[a b c d e]",
"appendStringsSlice:[]string:[a b c c d]",
"union:[]string:[a b c d e]",

View file

@ -1,4 +1,4 @@
// Copyright 2016-present The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -24,7 +24,6 @@ import (
"github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/hugolib/paths"
"github.com/pkg/errors"
_errors "github.com/pkg/errors"
@ -177,14 +176,6 @@ type configLoader struct {
ConfigSourceDescriptor
}
func (l configLoader) wrapFileInfoError(err error, fi os.FileInfo) error {
rfi, ok := fi.(hugofs.RealFilenameInfo)
if !ok {
return err
}
return l.wrapFileError(err, rfi.RealFilename())
}
func (l configLoader) loadConfig(configName string, v *viper.Viper) (string, error) {
baseDir := l.configFileDir()
var baseFilename string
@ -240,11 +231,6 @@ func (l configLoader) wrapFileError(err error, filename string) error {
return err
}
func (l configLoader) newRealBaseFs(path string) afero.Fs {
return hugofs.NewBasePathRealFilenameFs(afero.NewBasePathFs(l.Fs, path).(*afero.BasePathFs))
}
func (l configLoader) loadConfigFromConfigDir(v *viper.Viper) ([]string, error) {
sourceFs := l.Fs
configDir := l.AbsConfigDir
@ -274,7 +260,7 @@ func (l configLoader) loadConfigFromConfigDir(v *viper.Viper) ([]string, error)
for _, configDir := range configDirs {
err := afero.Walk(sourceFs, configDir, func(path string, fi os.FileInfo, err error) error {
if fi == nil {
if fi == nil || err != nil {
return nil
}
@ -616,8 +602,8 @@ func loadDefaultSettingsFor(v *viper.Viper) error {
v.SetDefault("removePathAccents", false)
v.SetDefault("titleCaseStyle", "AP")
v.SetDefault("taxonomies", map[string]string{"tag": "tags", "category": "categories"})
v.SetDefault("permalinks", make(PermalinkOverrides, 0))
v.SetDefault("sitemap", Sitemap{Priority: -1, Filename: "sitemap.xml"})
v.SetDefault("permalinks", make(map[string]string))
v.SetDefault("sitemap", config.Sitemap{Priority: -1, Filename: "sitemap.xml"})
v.SetDefault("pygmentsStyle", "monokai")
v.SetDefault("pygmentsUseClasses", false)
v.SetDefault("pygmentsCodeFences", false)
@ -625,7 +611,6 @@ func loadDefaultSettingsFor(v *viper.Viper) error {
v.SetDefault("pygmentsOptions", "")
v.SetDefault("disableLiveReload", false)
v.SetDefault("pluralizeListTitles", true)
v.SetDefault("preserveTaxonomyNames", false)
v.SetDefault("forceSyncStatic", false)
v.SetDefault("footnoteAnchorPrefix", "")
v.SetDefault("footnoteReturnLinkContents", "")

View file

@ -1,4 +1,4 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -349,7 +349,7 @@ func doTestDataDirImpl(t *testing.T, dd dataDir, expected interface{}, configKey
s := buildSingleSiteExpected(t, false, expectBuildError, depsCfg, BuildCfg{SkipRender: true})
if !expectBuildError && !reflect.DeepEqual(expected, s.Data) {
if !expectBuildError && !reflect.DeepEqual(expected, s.h.Data()) {
// This disabled code detects the situation described in the WARNING message below.
// The situation seems to only occur for TOML data with integer values.
// Perhaps the TOML parser returns ints in another type.
@ -366,7 +366,7 @@ func doTestDataDirImpl(t *testing.T, dd dataDir, expected interface{}, configKey
}
*/
return fmt.Sprintf("Expected data:\n%v got\n%v\n\nExpected type structure:\n%#[1]v got\n%#[2]v", expected, s.Data)
return fmt.Sprintf("Expected data:\n%v got\n%v\n\nExpected type structure:\n%#[1]v got\n%#[2]v", expected, s.h.Data())
}
return

View file

@ -1,4 +1,4 @@
// Copyright 2016 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -18,6 +18,8 @@ import (
"fmt"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/deps"
"github.com/spf13/afero"
@ -33,13 +35,13 @@ func TestDisableKindsNoneDisabled(t *testing.T) {
func TestDisableKindsSomeDisabled(t *testing.T) {
t.Parallel()
doTestDisableKinds(t, KindSection, kind404)
doTestDisableKinds(t, page.KindSection, kind404)
}
func TestDisableKindsOneDisabled(t *testing.T) {
t.Parallel()
for _, kind := range allKinds {
if kind == KindPage {
if kind == page.KindPage {
// Turning off regular page generation have some side-effects
// not handled by the assertions below (no sections), so
// skip that for now.
@ -124,64 +126,64 @@ func assertDisabledKinds(th testHelper, s *Site, disabled ...string) {
assertDisabledKind(th,
func(isDisabled bool) bool {
if isDisabled {
return len(s.RegularPages) == 0
return len(s.RegularPages()) == 0
}
return len(s.RegularPages) > 0
}, disabled, KindPage, "public/sect/p1/index.html", "Single|P1")
return len(s.RegularPages()) > 0
}, disabled, page.KindPage, "public/sect/p1/index.html", "Single|P1")
assertDisabledKind(th,
func(isDisabled bool) bool {
p := s.getPage(KindHome)
p := s.getPage(page.KindHome)
if isDisabled {
return p == nil
}
return p != nil
}, disabled, KindHome, "public/index.html", "Home")
}, disabled, page.KindHome, "public/index.html", "Home")
assertDisabledKind(th,
func(isDisabled bool) bool {
p := s.getPage(KindSection, "sect")
p := s.getPage(page.KindSection, "sect")
if isDisabled {
return p == nil
}
return p != nil
}, disabled, KindSection, "public/sect/index.html", "Sects")
}, disabled, page.KindSection, "public/sect/index.html", "Sects")
assertDisabledKind(th,
func(isDisabled bool) bool {
p := s.getPage(KindTaxonomy, "tags", "tag1")
p := s.getPage(page.KindTaxonomy, "tags", "tag1")
if isDisabled {
return p == nil
}
return p != nil
}, disabled, KindTaxonomy, "public/tags/tag1/index.html", "Tag1")
}, disabled, page.KindTaxonomy, "public/tags/tag1/index.html", "Tag1")
assertDisabledKind(th,
func(isDisabled bool) bool {
p := s.getPage(KindTaxonomyTerm, "tags")
p := s.getPage(page.KindTaxonomyTerm, "tags")
if isDisabled {
return p == nil
}
return p != nil
}, disabled, KindTaxonomyTerm, "public/tags/index.html", "Tags")
}, disabled, page.KindTaxonomyTerm, "public/tags/index.html", "Tags")
assertDisabledKind(th,
func(isDisabled bool) bool {
p := s.getPage(KindTaxonomyTerm, "categories")
p := s.getPage(page.KindTaxonomyTerm, "categories")
if isDisabled {
return p == nil
}
return p != nil
}, disabled, KindTaxonomyTerm, "public/categories/index.html", "Category Terms")
}, disabled, page.KindTaxonomyTerm, "public/categories/index.html", "Category Terms")
assertDisabledKind(th,
func(isDisabled bool) bool {
p := s.getPage(KindTaxonomy, "categories", "hugo")
p := s.getPage(page.KindTaxonomy, "categories", "hugo")
if isDisabled {
return p == nil
}
return p != nil
}, disabled, KindTaxonomy, "public/categories/hugo/index.html", "Hugo")
}, disabled, page.KindTaxonomy, "public/categories/hugo/index.html", "Hugo")
// The below have no page in any collection.
assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindRSS, "public/index.xml", "<link>")
assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindSitemap, "public/sitemap.xml", "sitemap")
@ -195,7 +197,7 @@ func assertDisabledKind(th testHelper, kindAssert func(bool) bool, disabled []st
if kind == kindRSS && !isDisabled {
// If the home page is also disabled, there is not RSS to look for.
if stringSliceContains(KindHome, disabled...) {
if stringSliceContains(page.KindHome, disabled...) {
isDisabled = true
}
}

View file

@ -1,4 +1,4 @@
// Copyright 2016 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -20,6 +20,8 @@ import (
"strings"
"testing"
"github.com/spf13/cast"
"path/filepath"
"github.com/gohugoio/hugo/deps"
@ -67,9 +69,11 @@ func doTestShortcodeCrossrefs(t *testing.T, relative bool) {
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
require.Len(t, s.RegularPages, 1)
require.Len(t, s.RegularPages(), 1)
output := string(s.RegularPages[0].content())
content, err := s.RegularPages()[0].Content()
require.NoError(t, err)
output := cast.ToString(content)
if !strings.Contains(output, expected) {
t.Errorf("Got\n%q\nExpected\n%q", output, expected)

View file

@ -1,4 +1,4 @@
// Copyright 2016-present The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -19,6 +19,7 @@ import (
"github.com/bep/gitmap"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/resources/page"
)
type gitInfo struct {
@ -26,15 +27,12 @@ type gitInfo struct {
repo *gitmap.GitRepo
}
func (g *gitInfo) forPage(p *Page) (*gitmap.GitInfo, bool) {
if g == nil {
return nil, false
}
name := strings.TrimPrefix(filepath.ToSlash(p.Filename()), g.contentDir)
func (g *gitInfo) forPage(p page.Page) *gitmap.GitInfo {
name := strings.TrimPrefix(filepath.ToSlash(p.File().Filename()), g.contentDir)
name = strings.TrimPrefix(name, "/")
return g.repo.Files[name], true
return g.repo.Files[name]
}
func newGitInfo(cfg config.Provider) (*gitInfo, error) {

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -14,14 +14,24 @@
package hugolib
import (
"errors"
"io"
"path/filepath"
"sort"
"strings"
"sync"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/parser/metadecoders"
"github.com/gohugoio/hugo/hugofs"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/source"
"github.com/bep/gitmap"
"github.com/gohugoio/hugo/config"
"github.com/spf13/afero"
"github.com/gohugoio/hugo/publisher"
@ -30,8 +40,10 @@ import (
"github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/lazy"
"github.com/gohugoio/hugo/i18n"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/tpl/tplimpl"
)
@ -48,17 +60,96 @@ type HugoSites struct {
// If this is running in the dev server.
running bool
// Render output formats for all sites.
renderFormats output.Formats
*deps.Deps
gitInfo *gitInfo
// As loaded from the /data dirs
data map[string]interface{}
// Keeps track of bundle directories and symlinks to enable partial rebuilding.
ContentChanges *contentChangeMap
// If enabled, keeps a revision map for all content.
gitInfo *gitInfo
init *hugoSitesInit
*fatalErrorHandler
}
func (h *HugoSites) siteInfos() SiteInfos {
infos := make(SiteInfos, len(h.Sites))
type fatalErrorHandler struct {
mu sync.Mutex
h *HugoSites
err error
done bool
donec chan bool // will be closed when done
}
// FatalError error is used in some rare situations where it does not make sense to
// continue processing, to abort as soon as possible and log the error.
func (f *fatalErrorHandler) FatalError(err error) {
f.mu.Lock()
defer f.mu.Unlock()
if !f.done {
f.done = true
close(f.donec)
}
f.err = err
}
func (f *fatalErrorHandler) getErr() error {
f.mu.Lock()
defer f.mu.Unlock()
return f.err
}
func (f *fatalErrorHandler) Done() <-chan bool {
return f.donec
}
type hugoSitesInit struct {
// Loads the data from all of the /data folders.
data *lazy.Init
// Loads the Git info for all the pages if enabled.
gitInfo *lazy.Init
// Maps page translations.
translations *lazy.Init
}
func (h *hugoSitesInit) Reset() {
h.data.Reset()
h.gitInfo.Reset()
h.translations.Reset()
}
func (h *HugoSites) Data() map[string]interface{} {
if _, err := h.init.data.Do(); err != nil {
h.SendError(errors.Wrap(err, "failed to load data"))
return nil
}
return h.data
}
func (h *HugoSites) gitInfoForPage(p page.Page) (*gitmap.GitInfo, error) {
if _, err := h.init.gitInfo.Do(); err != nil {
return nil, err
}
if h.gitInfo == nil {
return nil, nil
}
return h.gitInfo.forPage(p), nil
}
func (h *HugoSites) siteInfos() page.Sites {
infos := make(page.Sites, len(h.Sites))
for i, site := range h.Sites {
infos[i] = &site.Info
}
@ -106,7 +197,7 @@ func (h *HugoSites) IsMultihost() bool {
func (h *HugoSites) LanguageSet() map[string]bool {
set := make(map[string]bool)
for _, s := range h.Sites {
set[s.Language.Lang] = true
set[s.language.Lang] = true
}
return set
}
@ -129,14 +220,14 @@ func (h *HugoSites) PrintProcessingStats(w io.Writer) {
func (h *HugoSites) langSite() map[string]*Site {
m := make(map[string]*Site)
for _, s := range h.Sites {
m[s.Language.Lang] = s
m[s.language.Lang] = s
}
return m
}
// GetContentPage finds a Page with content given the absolute filename.
// Returns nil if none found.
func (h *HugoSites) GetContentPage(filename string) *Page {
func (h *HugoSites) GetContentPage(filename string) page.Page {
for _, s := range h.Sites {
pos := s.rawAllPages.findPagePosByFilename(filename)
if pos == -1 {
@ -178,10 +269,40 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
running: cfg.Running,
multilingual: langConfig,
multihost: cfg.Cfg.GetBool("multihost"),
Sites: sites}
Sites: sites,
init: &hugoSitesInit{
data: lazy.New(),
gitInfo: lazy.New(),
translations: lazy.New(),
},
}
h.fatalErrorHandler = &fatalErrorHandler{
h: h,
donec: make(chan bool),
}
h.init.data.Add(func() (interface{}, error) {
err := h.loadData(h.PathSpec.BaseFs.Data.Fs)
return err, nil
})
h.init.translations.Add(func() (interface{}, error) {
if len(h.Sites) > 1 {
allTranslations := pagesToTranslationsMap(h.Sites)
assignTranslationsToPages(allTranslations, h.Sites)
}
return nil, nil
})
h.init.gitInfo.Add(func() (interface{}, error) {
err := h.loadGitInfo()
return nil, err
})
for _, s := range sites {
s.owner = h
s.h = h
}
if err := applyDeps(cfg, sites...); err != nil {
@ -197,14 +318,10 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
h.ContentChanges = contentChangeTracker
}
if err := h.initGitInfo(); err != nil {
return nil, err
}
return h, nil
}
func (h *HugoSites) initGitInfo() error {
func (h *HugoSites) loadGitInfo() error {
if h.Cfg.GetBool("enableGitInfo") {
gi, err := newGitInfo(h.Cfg)
if err != nil {
@ -247,16 +364,16 @@ func applyDeps(cfg deps.DepsCfg, sites ...*Site) error {
d.Site = &s.Info
siteConfig, err := loadSiteConfig(s.Language)
siteConfig, err := loadSiteConfig(s.language)
if err != nil {
return err
}
s.siteConfig = siteConfig
s.siteRefLinker, err = newSiteRefLinker(s.Language, s)
s.siteConfigConfig = siteConfig
s.siteRefLinker, err = newSiteRefLinker(s.language, s)
return err
}
cfg.Language = s.Language
cfg.Language = s.language
cfg.MediaTypes = s.mediaTypesConfig
cfg.OutputFormats = s.outputFormatsConfig
@ -347,11 +464,23 @@ func createSitesFromConfig(cfg deps.DepsCfg) ([]*Site, error) {
return sites, nil
}
// Reset resets the sites and template caches, making it ready for a full rebuild.
func (h *HugoSites) reset() {
for i, s := range h.Sites {
h.Sites[i] = s.reset()
// Reset resets the sites and template caches etc., making it ready for a full rebuild.
func (h *HugoSites) reset(config *BuildCfg) {
if config.ResetState {
for i, s := range h.Sites {
h.Sites[i] = s.reset()
if r, ok := s.Fs.Destination.(hugofs.Reseter); ok {
r.Reset()
}
}
}
h.fatalErrorHandler = &fatalErrorHandler{
h: h,
donec: make(chan bool),
}
h.init.Reset()
}
// resetLogs resets the log counters etc. Used to do a new build on the same sites.
@ -387,7 +516,7 @@ func (h *HugoSites) createSitesFromConfig(cfg config.Provider) error {
h.Sites = sites
for _, s := range sites {
s.owner = h
s.h = h
}
if err := applyDeps(depsCfg, sites...); err != nil {
@ -435,7 +564,10 @@ type BuildCfg struct {
// Note that a page does not have to have a content page / file.
// For regular builds, this will allways return true.
// TODO(bep) rename/work this.
func (cfg *BuildCfg) shouldRender(p *Page) bool {
func (cfg *BuildCfg) shouldRender(p *pageState) bool {
if !p.render {
return false
}
if p.forceRender {
p.forceRender = false
return true
@ -445,15 +577,8 @@ func (cfg *BuildCfg) shouldRender(p *Page) bool {
return true
}
if cfg.RecentlyVisited[p.RelPermalink()] {
if cfg.PartialReRender {
_ = p.initMainOutputFormat()
}
return true
}
if cfg.whatChanged != nil && p.File != nil {
return cfg.whatChanged.files[p.File.Filename()]
if cfg.whatChanged != nil && p.File() != nil {
return cfg.whatChanged.files[p.File().Filename()]
}
return false
@ -477,100 +602,85 @@ func (h *HugoSites) renderCrossSitesArtifacts() error {
return nil
}
// TODO(bep) DRY
sitemapDefault := parseSitemap(h.Cfg.GetStringMap("sitemap"))
s := h.Sites[0]
smLayouts := []string{"sitemapindex.xml", "_default/sitemapindex.xml", "_internal/_default/sitemapindex.xml"}
return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemapindex",
sitemapDefault.Filename, h.toSiteInfos(), smLayouts...)
}
func (h *HugoSites) assignMissingTranslations() error {
// This looks heavy, but it should be a small number of nodes by now.
allPages := h.findAllPagesByKindNotIn(KindPage)
for _, nodeType := range []string{KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm} {
nodes := h.findPagesByKindIn(nodeType, allPages)
// Assign translations
for _, t1 := range nodes {
for _, t2 := range nodes {
if t1.isNewTranslation(t2) {
t1.translations = append(t1.translations, t2)
}
}
}
}
// Now we can sort the translations.
for _, p := range allPages {
if len(p.translations) > 0 {
pageBy(languagePageSort).Sort(p.translations)
}
}
return nil
s.siteCfg.sitemap.Filename, h.toSiteInfos(), smLayouts...)
}
// createMissingPages creates home page, taxonomies etc. that isnt't created as an
// effect of having a content file.
func (h *HugoSites) createMissingPages() error {
var newPages Pages
var newPages pageStatePages
for _, s := range h.Sites {
if s.isEnabled(KindHome) {
if s.isEnabled(page.KindHome) {
// home pages
home := s.findPagesByKind(KindHome)
if len(home) > 1 {
homes := s.findWorkPagesByKind(page.KindHome)
if len(homes) > 1 {
panic("Too many homes")
}
if len(home) == 0 {
n := s.newHomePage()
s.Pages = append(s.Pages, n)
newPages = append(newPages, n)
var home *pageState
if len(homes) == 0 {
home = s.newPage(page.KindHome)
s.workAllPages = append(s.workAllPages, home)
newPages = append(newPages, home)
} else {
home = homes[0]
}
s.home = home
}
// Will create content-less root sections.
newSections := s.assembleSections()
s.Pages = append(s.Pages, newSections...)
s.workAllPages = append(s.workAllPages, newSections...)
newPages = append(newPages, newSections...)
taxonomyTermEnabled := s.isEnabled(page.KindTaxonomyTerm)
taxonomyEnabled := s.isEnabled(page.KindTaxonomy)
// taxonomy list and terms pages
taxonomies := s.Language.GetStringMapString("taxonomies")
taxonomies := s.Language().GetStringMapString("taxonomies")
if len(taxonomies) > 0 {
taxonomyPages := s.findPagesByKind(KindTaxonomy)
taxonomyTermsPages := s.findPagesByKind(KindTaxonomyTerm)
taxonomyPages := s.findWorkPagesByKind(page.KindTaxonomy)
taxonomyTermsPages := s.findWorkPagesByKind(page.KindTaxonomyTerm)
// Make them navigable from WeightedPage etc.
for _, p := range taxonomyPages {
p.getTaxonomyNodeInfo().TransferValues(p)
}
for _, p := range taxonomyTermsPages {
p.getTaxonomyNodeInfo().TransferValues(p)
}
for _, plural := range taxonomies {
if s.isEnabled(KindTaxonomyTerm) {
if taxonomyTermEnabled {
foundTaxonomyTermsPage := false
for _, p := range taxonomyTermsPages {
if p.sectionsPath() == plural {
if p.SectionsPath() == plural {
foundTaxonomyTermsPage = true
break
}
}
if !foundTaxonomyTermsPage {
n := s.newTaxonomyTermsPage(plural)
s.Pages = append(s.Pages, n)
n := s.newPage(page.KindTaxonomyTerm, plural)
n.getTaxonomyNodeInfo().TransferValues(n)
s.workAllPages = append(s.workAllPages, n)
newPages = append(newPages, n)
}
}
if s.isEnabled(KindTaxonomy) {
for key := range s.Taxonomies[plural] {
foundTaxonomyPage := false
origKey := key
if taxonomyEnabled {
for termKey := range s.Taxonomies[plural] {
foundTaxonomyPage := false
if s.Info.preserveTaxonomyNames {
key = s.PathSpec.MakePathSanitized(key)
}
for _, p := range taxonomyPages {
sectionsPath := p.sectionsPath()
sectionsPath := p.SectionsPath()
if !strings.HasPrefix(sectionsPath, plural) {
continue
@ -579,20 +689,21 @@ func (h *HugoSites) createMissingPages() error {
singularKey := strings.TrimPrefix(sectionsPath, plural)
singularKey = strings.TrimPrefix(singularKey, "/")
// Some people may have /authors/MaxMustermann etc. as paths.
// p.sections contains the raw values from the file system.
// See https://github.com/gohugoio/hugo/issues/4238
singularKey = s.PathSpec.MakePathSanitized(singularKey)
if singularKey == key {
if singularKey == termKey {
foundTaxonomyPage = true
break
}
}
if !foundTaxonomyPage {
n := s.newTaxonomyPage(plural, origKey)
s.Pages = append(s.Pages, n)
info := s.taxonomyNodes.Get(plural, termKey)
if info == nil {
panic("no info found")
}
n := s.newTaxonomyPage(info.term, info.plural, info.termKey)
info.TransferValues(n)
s.workAllPages = append(s.workAllPages, n)
newPages = append(newPages, n)
}
}
@ -601,24 +712,6 @@ func (h *HugoSites) createMissingPages() error {
}
}
if len(newPages) > 0 {
// This resorting is unfortunate, but it also needs to be sorted
// when sections are created.
first := h.Sites[0]
first.AllPages = append(first.AllPages, newPages...)
first.AllPages.sort()
for _, s := range h.Sites {
s.Pages.sort()
}
for i := 1; i < len(h.Sites); i++ {
h.Sites[i].AllPages = first.AllPages
}
}
return nil
}
@ -628,61 +721,58 @@ func (h *HugoSites) removePageByFilename(filename string) {
}
}
func (h *HugoSites) setupTranslations() {
func (h *HugoSites) createPageCollections() error {
for _, s := range h.Sites {
for _, p := range s.rawAllPages {
if p.Kind == kindUnknown {
p.Kind = p.kindFromSections()
}
if !p.s.isEnabled(p.Kind) {
if !s.isEnabled(p.Kind()) {
continue
}
shouldBuild := p.shouldBuild()
s.updateBuildStats(p)
shouldBuild := s.shouldBuild(p)
s.buildStats.update(p)
if shouldBuild {
if p.headless {
if p.m.headless {
s.headlessPages = append(s.headlessPages, p)
} else {
s.Pages = append(s.Pages, p)
s.workAllPages = append(s.workAllPages, p)
}
}
}
}
allPages := make(Pages, 0)
allPages := newLazyPagesFactory(func() page.Pages {
var pages page.Pages
for _, s := range h.Sites {
pages = append(pages, s.Pages()...)
}
page.SortByDefault(pages)
return pages
})
allRegularPages := newLazyPagesFactory(func() page.Pages {
return h.findPagesByKindIn(page.KindPage, allPages.get())
})
for _, s := range h.Sites {
allPages = append(allPages, s.Pages...)
s.PageCollections.allPages = allPages
s.PageCollections.allRegularPages = allRegularPages
}
allPages.sort()
for _, s := range h.Sites {
s.AllPages = allPages
}
// Pull over the collections from the master site
for i := 1; i < len(h.Sites); i++ {
h.Sites[i].Data = h.Sites[0].Data
}
if len(h.Sites) > 1 {
allTranslations := pagesToTranslationsMap(allPages)
assignTranslationsToPages(allTranslations, allPages)
}
return nil
}
func (s *Site) preparePagesForRender(start bool) error {
for _, p := range s.Pages {
if err := p.prepareForRender(start); err != nil {
func (s *Site) preparePagesForRender(idx int) error {
for _, p := range s.workAllPages {
if err := p.initOutputFormat(idx); err != nil {
return err
}
}
for _, p := range s.headlessPages {
if err := p.prepareForRender(start); err != nil {
if err := p.initOutputFormat(idx); err != nil {
return err
}
}
@ -691,62 +781,141 @@ func (s *Site) preparePagesForRender(start bool) error {
}
// Pages returns all pages for all sites.
func (h *HugoSites) Pages() Pages {
return h.Sites[0].AllPages
func (h *HugoSites) Pages() page.Pages {
return h.Sites[0].AllPages()
}
func handleShortcodes(p *PageWithoutContent, rawContentCopy []byte) ([]byte, error) {
if p.shortcodeState != nil && p.shortcodeState.contentShortcodes.Len() > 0 {
p.s.Log.DEBUG.Printf("Replace %d shortcodes in %q", p.shortcodeState.contentShortcodes.Len(), p.BaseFileName())
err := p.shortcodeState.executeShortcodesForDelta(p)
if err != nil {
return rawContentCopy, err
}
rawContentCopy, err = replaceShortcodeTokens(rawContentCopy, shortcodePlaceholderPrefix, p.shortcodeState.renderedShortcodes)
if err != nil {
p.s.Log.FATAL.Printf("Failed to replace shortcode tokens in %s:\n%s", p.BaseFileName(), err.Error())
func (h *HugoSites) loadData(fs afero.Fs) (err error) {
spec := source.NewSourceSpec(h.PathSpec, fs)
fileSystem := spec.NewFilesystem("")
h.data = make(map[string]interface{})
for _, r := range fileSystem.Files() {
if err := h.handleDataFile(r); err != nil {
return err
}
}
return rawContentCopy, nil
return
}
func (s *Site) updateBuildStats(page *Page) {
if page.IsDraft() {
s.draftCount++
func (h *HugoSites) handleDataFile(r source.ReadableFile) error {
var current map[string]interface{}
f, err := r.Open()
if err != nil {
return errors.Wrapf(err, "Failed to open data file %q:", r.LogicalName())
}
defer f.Close()
// Crawl in data tree to insert data
current = h.data
keyParts := strings.Split(r.Dir(), helpers.FilePathSeparator)
// The first path element is the virtual folder (typically theme name), which is
// not part of the key.
if len(keyParts) > 1 {
for _, key := range keyParts[1:] {
if key != "" {
if _, ok := current[key]; !ok {
current[key] = make(map[string]interface{})
}
current = current[key].(map[string]interface{})
}
}
}
if page.IsFuture() {
s.futureCount++
data, err := h.readData(r)
if err != nil {
return h.errWithFileContext(err, r)
}
if page.IsExpired() {
s.expiredCount++
if data == nil {
return nil
}
// filepath.Walk walks the files in lexical order, '/' comes before '.'
// this warning could happen if
// 1. A theme uses the same key; the main data folder wins
// 2. A sub folder uses the same key: the sub folder wins
higherPrecedentData := current[r.BaseFileName()]
switch data.(type) {
case nil:
// hear the crickets?
case map[string]interface{}:
switch higherPrecedentData.(type) {
case nil:
current[r.BaseFileName()] = data
case map[string]interface{}:
// merge maps: insert entries from data for keys that
// don't already exist in higherPrecedentData
higherPrecedentMap := higherPrecedentData.(map[string]interface{})
for key, value := range data.(map[string]interface{}) {
if _, exists := higherPrecedentMap[key]; exists {
h.Log.WARN.Printf("Data for key '%s' in path '%s' is overridden by higher precedence data already in the data tree", key, r.Path())
} else {
higherPrecedentMap[key] = value
}
}
default:
// can't merge: higherPrecedentData is not a map
h.Log.WARN.Printf("The %T data from '%s' overridden by "+
"higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData)
}
case []interface{}:
if higherPrecedentData == nil {
current[r.BaseFileName()] = data
} else {
// we don't merge array data
h.Log.WARN.Printf("The %T data from '%s' overridden by "+
"higher precedence %T data already in the data tree", data, r.Path(), higherPrecedentData)
}
default:
h.Log.ERROR.Printf("unexpected data type %T in file %s", data, r.LogicalName())
}
return nil
}
func (h *HugoSites) findPagesByKindNotIn(kind string, inPages Pages) Pages {
return h.Sites[0].findPagesByKindNotIn(kind, inPages)
func (h *HugoSites) errWithFileContext(err error, f source.File) error {
rfi, ok := f.FileInfo().(hugofs.RealFilenameInfo)
if !ok {
return err
}
realFilename := rfi.RealFilename()
err, _ = herrors.WithFileContextForFile(
err,
realFilename,
realFilename,
h.SourceSpec.Fs.Source,
herrors.SimpleLineMatcher)
return err
}
func (h *HugoSites) findPagesByKindIn(kind string, inPages Pages) Pages {
func (h *HugoSites) readData(f source.ReadableFile) (interface{}, error) {
file, err := f.Open()
if err != nil {
return nil, errors.Wrap(err, "readData: failed to open data file")
}
defer file.Close()
content := helpers.ReaderToBytes(file)
format := metadecoders.FormatFromString(f.Extension())
return metadecoders.Default.Unmarshal(content, format)
}
func (h *HugoSites) findPagesByKindIn(kind string, inPages page.Pages) page.Pages {
return h.Sites[0].findPagesByKindIn(kind, inPages)
}
func (h *HugoSites) findAllPagesByKind(kind string) Pages {
return h.findPagesByKindIn(kind, h.Sites[0].AllPages)
}
func (h *HugoSites) findAllPagesByKindNotIn(kind string) Pages {
return h.findPagesByKindNotIn(kind, h.Sites[0].AllPages)
}
func (h *HugoSites) findPagesByShortcode(shortcode string) Pages {
var pages Pages
func (h *HugoSites) findPagesByShortcode(shortcode string) page.Pages {
var pages page.Pages
for _, s := range h.Sites {
pages = append(pages, s.findPagesByShortcode(shortcode)...)
}

View file

@ -1,4 +1,4 @@
// Copyright 2016-present The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -15,7 +15,12 @@ package hugolib
import (
"bytes"
"context"
"fmt"
"runtime/trace"
"sort"
"github.com/gohugoio/hugo/output"
"errors"
@ -26,6 +31,9 @@ import (
// Build builds all sites. If filesystem events are provided,
// this is considered to be a potential partial rebuild.
func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
ctx, task := trace.NewTask(context.Background(), "Build")
defer task.End()
errCollector := h.StartErrorCollector()
errs := make(chan error)
@ -71,22 +79,36 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
return err
}
} else {
if err := h.init(conf); err != nil {
if err := h.initSites(conf); err != nil {
return err
}
}
if err := h.process(conf, events...); err != nil {
var err error
f := func() {
err = h.process(conf, events...)
}
trace.WithRegion(ctx, "process", f)
if err != nil {
return err
}
if err := h.assemble(conf); err != nil {
f = func() {
err = h.assemble(conf)
}
trace.WithRegion(ctx, "assemble", f)
if err != nil {
return err
}
return nil
}
prepareErr = prepare()
f := func() {
prepareErr = prepare()
}
trace.WithRegion(ctx, "prepare", f)
if prepareErr != nil {
h.SendError(prepareErr)
}
@ -94,7 +116,12 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
}
if prepareErr == nil {
if err := h.render(conf); err != nil {
var err error
f := func() {
err = h.render(conf)
}
trace.WithRegion(ctx, "render", f)
if err != nil {
h.SendError(err)
}
}
@ -120,6 +147,10 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
return err
}
if err := h.fatalErrorHandler.getErr(); err != nil {
return err
}
errorCount := h.Log.ErrorCounter.Count()
if errorCount > 0 {
return fmt.Errorf("logged %d error(s)", errorCount)
@ -132,17 +163,8 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
// Build lifecycle methods below.
// The order listed matches the order of execution.
func (h *HugoSites) init(config *BuildCfg) error {
for _, s := range h.Sites {
if s.PageCollections == nil {
s.PageCollections = newPageCollections()
}
}
if config.ResetState {
h.reset()
}
func (h *HugoSites) initSites(config *BuildCfg) error {
h.reset(config)
if config.NewConfig != nil {
if err := h.createSitesFromConfig(config.NewConfig); err != nil {
@ -155,28 +177,22 @@ func (h *HugoSites) init(config *BuildCfg) error {
func (h *HugoSites) initRebuild(config *BuildCfg) error {
if config.NewConfig != nil {
return errors.New("Rebuild does not support 'NewConfig'.")
return errors.New("rebuild does not support 'NewConfig'")
}
if config.ResetState {
return errors.New("Rebuild does not support 'ResetState'.")
return errors.New("rebuild does not support 'ResetState'")
}
if !h.running {
return errors.New("Rebuild called when not in watch mode")
}
if config.whatChanged.source {
// This is for the non-renderable content pages (rarely used, I guess).
// We could maybe detect if this is really needed, but it should be
// pretty fast.
h.TemplateHandler().RebuildClone()
return errors.New("rebuild called when not in watch mode")
}
for _, s := range h.Sites {
s.resetBuildState()
}
h.reset(config)
h.resetLogs()
helpers.InitLoggers()
@ -203,14 +219,6 @@ func (h *HugoSites) process(config *BuildCfg, events ...fsnotify.Event) error {
}
func (h *HugoSites) assemble(config *BuildCfg) error {
if config.whatChanged.source {
for _, s := range h.Sites {
s.createTaxonomiesEntries()
}
}
// TODO(bep) we could probably wait and do this in one go later
h.setupTranslations()
if len(h.Sites) > 1 {
// The first is initialized during process; initialize the rest
@ -221,47 +229,26 @@ func (h *HugoSites) assemble(config *BuildCfg) error {
}
}
if err := h.createPageCollections(); err != nil {
return err
}
if config.whatChanged.source {
for _, s := range h.Sites {
if err := s.buildSiteMeta(); err != nil {
if err := s.assembleTaxonomies(); err != nil {
return err
}
}
}
// Create pagexs for the section pages etc. without content file.
if err := h.createMissingPages(); err != nil {
return err
}
for _, s := range h.Sites {
for _, pages := range []Pages{s.Pages, s.headlessPages} {
for _, p := range pages {
// May have been set in front matter
if len(p.outputFormats) == 0 {
p.outputFormats = s.outputFormats[p.Kind]
}
if p.headless {
// headless = 1 output format only
p.outputFormats = p.outputFormats[:1]
}
for _, r := range p.Resources.ByType(pageResourceType) {
r.(*Page).outputFormats = p.outputFormats
}
if err := p.initPaths(); err != nil {
return err
}
}
}
s.assembleMenus()
s.refreshPageCaches()
s.setupSitePages()
}
if err := h.assignMissingTranslations(); err != nil {
return err
sort.Stable(s.workAllPages)
}
return nil
@ -269,42 +256,60 @@ func (h *HugoSites) assemble(config *BuildCfg) error {
}
func (h *HugoSites) render(config *BuildCfg) error {
siteRenderContext := &siteRenderContext{cfg: config, multihost: h.multihost}
if !config.PartialReRender {
h.renderFormats = output.Formats{}
for _, s := range h.Sites {
s.initRenderFormats()
h.renderFormats = append(h.renderFormats, s.renderFormats...)
}
}
i := 0
for _, s := range h.Sites {
for i, rf := range s.renderFormats {
for _, s2 := range h.Sites {
// We render site by site, but since the content is lazily rendered
// and a site can "borrow" content from other sites, every site
// needs this set.
s2.rc = &siteRenderingContext{Format: rf}
for siteOutIdx, renderFormat := range s.renderFormats {
siteRenderContext.outIdx = siteOutIdx
siteRenderContext.sitesOutIdx = i
i++
isRenderingSite := s == s2
select {
case <-h.Done():
return nil
default:
// For the non-renderable pages, we use the content iself as
// template and we may have to re-parse and execute it for
// each output format.
h.TemplateHandler().RebuildClone()
if !config.PartialReRender {
if err := s2.preparePagesForRender(isRenderingSite && i == 0); err != nil {
return err
for _, s2 := range h.Sites {
// We render site by site, but since the content is lazily rendered
// and a site can "borrow" content from other sites, every site
// needs this set.
s2.rc = &siteRenderingContext{Format: renderFormat}
if !config.PartialReRender {
if err := s2.preparePagesForRender(siteRenderContext.sitesOutIdx); err != nil {
return err
}
}
}
}
if !config.SkipRender {
if config.PartialReRender {
if err := s.renderPages(config); err != nil {
return err
}
} else {
if err := s.render(config, i); err != nil {
return err
if !config.SkipRender {
if config.PartialReRender {
if err := s.renderPages(siteRenderContext); err != nil {
return err
}
} else {
if err := s.render(siteRenderContext); err != nil {
return err
}
}
}
}
}
}
if !config.SkipRender {

View file

@ -7,6 +7,9 @@ import (
"runtime"
"strings"
"testing"
"time"
"github.com/fortytw2/leaktest"
"github.com/gohugoio/hugo/common/herrors"
"github.com/stretchr/testify/require"
@ -20,25 +23,24 @@ type testSiteBuildErrorAsserter struct {
func (t testSiteBuildErrorAsserter) getFileError(err error) *herrors.ErrorWithFileContext {
t.assert.NotNil(err, t.name)
ferr := herrors.UnwrapErrorWithFileContext(err)
t.assert.NotNil(ferr, fmt.Sprintf("[%s] got %T: %+v\n%s", t.name, err, err, trace()))
t.assert.NotNil(ferr, fmt.Sprintf("[%s] got %T: %+v\n%s", t.name, err, err, stackTrace()))
return ferr
}
func (t testSiteBuildErrorAsserter) assertLineNumber(lineNumber int, err error) {
fe := t.getFileError(err)
t.assert.Equal(lineNumber, fe.Position().LineNumber, fmt.Sprintf("[%s] got => %s\n%s", t.name, fe, trace()))
t.assert.Equal(lineNumber, fe.Position().LineNumber, fmt.Sprintf("[%s] got => %s\n%s", t.name, fe, stackTrace()))
}
func (t testSiteBuildErrorAsserter) assertErrorMessage(e1, e2 string) {
// The error message will contain filenames with OS slashes. Normalize before compare.
e1, e2 = filepath.ToSlash(e1), filepath.ToSlash(e2)
t.assert.Contains(e2, e1, trace())
t.assert.Contains(e2, e1, stackTrace())
}
func TestSiteBuildErrors(t *testing.T) {
t.Parallel()
assert := require.New(t)
const (
yamlcontent = "yamlcontent"
@ -88,9 +90,9 @@ func TestSiteBuildErrors(t *testing.T) {
},
assertCreateError: func(a testSiteBuildErrorAsserter, err error) {
fe := a.getFileError(err)
assert.Equal(5, fe.Position().LineNumber)
assert.Equal(1, fe.Position().ColumnNumber)
assert.Equal("go-html-template", fe.ChromaLexer)
a.assert.Equal(5, fe.Position().LineNumber)
a.assert.Equal(1, fe.Position().ColumnNumber)
a.assert.Equal("go-html-template", fe.ChromaLexer)
a.assertErrorMessage("\"layouts/_default/single.html:5:1\": parse failed: template: _default/single.html:5: unexpected \"}\" in operand", fe.Error())
},
@ -103,9 +105,9 @@ func TestSiteBuildErrors(t *testing.T) {
},
assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
fe := a.getFileError(err)
assert.Equal(5, fe.Position().LineNumber)
assert.Equal(14, fe.Position().ColumnNumber)
assert.Equal("go-html-template", fe.ChromaLexer)
a.assert.Equal(5, fe.Position().LineNumber)
a.assert.Equal(14, fe.Position().ColumnNumber)
a.assert.Equal("go-html-template", fe.ChromaLexer)
a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error())
},
@ -118,9 +120,9 @@ func TestSiteBuildErrors(t *testing.T) {
},
assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
fe := a.getFileError(err)
assert.Equal(5, fe.Position().LineNumber)
assert.Equal(14, fe.Position().ColumnNumber)
assert.Equal("go-html-template", fe.ChromaLexer)
a.assert.Equal(5, fe.Position().LineNumber)
a.assert.Equal(14, fe.Position().ColumnNumber)
a.assert.Equal("go-html-template", fe.ChromaLexer)
a.assertErrorMessage("\"layouts/_default/single.html:5:14\": execute of template failed", fe.Error())
},
@ -143,8 +145,8 @@ func TestSiteBuildErrors(t *testing.T) {
},
assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
fe := a.getFileError(err)
assert.Equal(7, fe.Position().LineNumber)
assert.Equal("md", fe.ChromaLexer)
a.assert.Equal(7, fe.Position().LineNumber)
a.assert.Equal("md", fe.ChromaLexer)
// Make sure that it contains both the content file and template
a.assertErrorMessage(`content/myyaml.md:7:10": failed to render shortcode "sc"`, fe.Error())
a.assertErrorMessage(`shortcodes/sc.html:4:22: executing "shortcodes/sc.html" at <.Page.Titles>: can't evaluate`, fe.Error())
@ -158,10 +160,10 @@ func TestSiteBuildErrors(t *testing.T) {
},
assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
fe := a.getFileError(err)
assert.Equal(7, fe.Position().LineNumber)
assert.Equal(14, fe.Position().ColumnNumber)
assert.Equal("md", fe.ChromaLexer)
a.assertErrorMessage("\"content/myyaml.md:7:14\": failed to extract shortcode: template for shortcode \"nono\" not found", fe.Error())
a.assert.Equal(7, fe.Position().LineNumber)
a.assert.Equal(10, fe.Position().ColumnNumber)
a.assert.Equal("md", fe.ChromaLexer)
a.assertErrorMessage(`"content/myyaml.md:7:10": failed to extract shortcode: template for shortcode "nono" not found`, fe.Error())
},
},
{
@ -182,8 +184,8 @@ func TestSiteBuildErrors(t *testing.T) {
},
assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
fe := a.getFileError(err)
assert.Equal(6, fe.Position().LineNumber)
assert.Equal("toml", fe.ErrorContext.ChromaLexer)
a.assert.Equal(6, fe.Position().LineNumber)
a.assert.Equal("toml", fe.ErrorContext.ChromaLexer)
},
},
@ -196,8 +198,8 @@ func TestSiteBuildErrors(t *testing.T) {
assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
fe := a.getFileError(err)
assert.Equal(3, fe.Position().LineNumber)
assert.Equal("json", fe.ErrorContext.ChromaLexer)
a.assert.Equal(3, fe.Position().LineNumber)
a.assert.Equal("json", fe.ErrorContext.ChromaLexer)
},
},
@ -210,42 +212,43 @@ func TestSiteBuildErrors(t *testing.T) {
},
assertBuildError: func(a testSiteBuildErrorAsserter, err error) {
assert.Error(err)
a.assert.Error(err)
// This is fixed in latest Go source
if regexp.MustCompile("devel|12").MatchString(runtime.Version()) {
fe := a.getFileError(err)
assert.Equal(5, fe.Position().LineNumber)
assert.Equal(21, fe.Position().ColumnNumber)
a.assert.Equal(5, fe.Position().LineNumber)
a.assert.Equal(21, fe.Position().ColumnNumber)
} else {
assert.Contains(err.Error(), `execute of template failed: panic in Execute`)
a.assert.Contains(err.Error(), `execute of template failed: panic in Execute`)
}
},
},
}
for _, test := range tests {
errorAsserter := testSiteBuildErrorAsserter{
assert: assert,
name: test.name,
}
b := newTestSitesBuilder(t).WithSimpleConfigFile()
f := func(fileType, content string) string {
if fileType != test.fileType {
return content
t.Run(test.name, func(t *testing.T) {
assert := require.New(t)
errorAsserter := testSiteBuildErrorAsserter{
assert: assert,
name: test.name,
}
return test.fileFixer(content)
}
b := newTestSitesBuilder(t).WithSimpleConfigFile()
b.WithTemplatesAdded("layouts/shortcodes/sc.html", f(shortcode, `SHORTCODE L1
f := func(fileType, content string) string {
if fileType != test.fileType {
return content
}
return test.fileFixer(content)
}
b.WithTemplatesAdded("layouts/shortcodes/sc.html", f(shortcode, `SHORTCODE L1
SHORTCODE L2
SHORTCODE L3:
SHORTCODE L4: {{ .Page.Title }}
`))
b.WithTemplatesAdded("layouts/_default/baseof.html", f(base, `BASEOF L1
b.WithTemplatesAdded("layouts/_default/baseof.html", f(base, `BASEOF L1
BASEOF L2
BASEOF L3
BASEOF L4{{ if .Title }}{{ end }}
@ -253,7 +256,7 @@ BASEOF L4{{ if .Title }}{{ end }}
BASEOF L6
`))
b.WithTemplatesAdded("layouts/_default/single.html", f(single, `{{ define "main" }}
b.WithTemplatesAdded("layouts/_default/single.html", f(single, `{{ define "main" }}
SINGLE L2:
SINGLE L3:
SINGLE L4:
@ -261,7 +264,7 @@ SINGLE L5: {{ .Title }} {{ .Content }}
{{ end }}
`))
b.WithContent("myyaml.md", f(yamlcontent, `---
b.WithContent("myyaml.md", f(yamlcontent, `---
title: "The YAML"
---
@ -275,7 +278,7 @@ The end.
`))
b.WithContent("mytoml.md", f(tomlcontent, `+++
b.WithContent("mytoml.md", f(tomlcontent, `+++
title = "The TOML"
p1 = "v"
p2 = "v"
@ -288,7 +291,7 @@ Some content.
`))
b.WithContent("myjson.md", f(jsoncontent, `{
b.WithContent("myjson.md", f(jsoncontent, `{
"title": "This is a title",
"description": "This is a description."
}
@ -298,26 +301,30 @@ Some content.
`))
createErr := b.CreateSitesE()
if test.assertCreateError != nil {
test.assertCreateError(errorAsserter, createErr)
} else {
assert.NoError(createErr)
}
if createErr == nil {
buildErr := b.BuildE(BuildCfg{})
if test.assertBuildError != nil {
test.assertBuildError(errorAsserter, buildErr)
createErr := b.CreateSitesE()
if test.assertCreateError != nil {
test.assertCreateError(errorAsserter, createErr)
} else {
assert.NoError(buildErr)
assert.NoError(createErr)
}
}
if createErr == nil {
buildErr := b.BuildE(BuildCfg{})
if test.assertBuildError != nil {
test.assertBuildError(errorAsserter, buildErr)
} else {
assert.NoError(buildErr)
}
}
})
}
}
// https://github.com/gohugoio/hugo/issues/5375
func TestSiteBuildTimeout(t *testing.T) {
if !isCI() {
defer leaktest.CheckTimeout(t, 10*time.Second)()
}
b := newTestSitesBuilder(t)
b.WithConfigFile("toml", `
@ -342,6 +349,6 @@ title: "A page"
}
b.CreateSites().Build(BuildCfg{})
b.CreateSites().BuildFail(BuildCfg{})
}

View file

@ -1,16 +1,16 @@
package hugolib
import (
"bytes"
"fmt"
"strings"
"testing"
"html/template"
"os"
"path/filepath"
"time"
"github.com/gohugoio/hugo/resources/page"
"github.com/fortytw2/leaktest"
"github.com/fsnotify/fsnotify"
"github.com/gohugoio/hugo/helpers"
@ -66,8 +66,8 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
assert.Equal("/blog/en/foo", enSite.PathSpec.RelURL("foo", true))
doc1en := enSite.RegularPages[0]
doc1fr := frSite.RegularPages[0]
doc1en := enSite.RegularPages()[0]
doc1fr := frSite.RegularPages()[0]
enPerm := doc1en.Permalink()
enRelPerm := doc1en.RelPermalink()
@ -100,7 +100,7 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
// Check list pages
b.AssertFileContent(pathMod("public/fr/sect/index.html"), "List", "Bonjour")
b.AssertFileContent("public/en/sect/index.html", "List", "Hello")
b.AssertFileContent(pathMod("public/fr/plaques/frtag1/index.html"), "Taxonomy List", "Bonjour")
b.AssertFileContent(pathMod("public/fr/plaques/FRtag1/index.html"), "Taxonomy List", "Bonjour")
b.AssertFileContent("public/en/tags/tag1/index.html", "Taxonomy List", "Hello")
// Check sitemaps
@ -126,8 +126,8 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
pathMod(`<atom:link href="http://example.com/blog/fr/sect/index.xml"`))
b.AssertFileContent("public/en/sect/index.xml", `<atom:link href="http://example.com/blog/en/sect/index.xml"`)
b.AssertFileContent(
pathMod("public/fr/plaques/frtag1/index.xml"),
pathMod(`<atom:link href="http://example.com/blog/fr/plaques/frtag1/index.xml"`))
pathMod("public/fr/plaques/FRtag1/index.xml"),
pathMod(`<atom:link href="http://example.com/blog/fr/plaques/FRtag1/index.xml"`))
b.AssertFileContent("public/en/tags/tag1/index.xml", `<atom:link href="http://example.com/blog/en/tags/tag1/index.xml"`)
// Check paginators
@ -140,12 +140,12 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
b.AssertFileContent(pathMod("public/fr/sect/page/2/index.html"), "List Page 2", "Bonjour", pathMod("http://example.com/blog/fr/sect/"))
b.AssertFileContent("public/en/sect/page/2/index.html", "List Page 2", "Hello", "http://example.com/blog/en/sect/")
b.AssertFileContent(
pathMod("public/fr/plaques/frtag1/page/1/index.html"),
pathMod(`refresh" content="0; url=http://example.com/blog/fr/plaques/frtag1/"`))
pathMod("public/fr/plaques/FRtag1/page/1/index.html"),
pathMod(`refresh" content="0; url=http://example.com/blog/fr/plaques/FRtag1/"`))
b.AssertFileContent("public/en/tags/tag1/page/1/index.html", `refresh" content="0; url=http://example.com/blog/en/tags/tag1/"`)
b.AssertFileContent(
pathMod("public/fr/plaques/frtag1/page/2/index.html"), "List Page 2", "Bonjour",
pathMod("http://example.com/blog/fr/plaques/frtag1/"))
pathMod("public/fr/plaques/FRtag1/page/2/index.html"), "List Page 2", "Bonjour",
pathMod("http://example.com/blog/fr/plaques/FRtag1/"))
b.AssertFileContent("public/en/tags/tag1/page/2/index.html", "List Page 2", "Hello", "http://example.com/blog/en/tags/tag1/")
// nn (Nynorsk) and nb (Bokmål) have custom pagePath: side ("page" in Norwegian)
b.AssertFileContent("public/nn/side/1/index.html", `refresh" content="0; url=http://example.com/blog/nn/"`)
@ -183,12 +183,12 @@ p1 = "p1en"
assert.Len(sites, 2)
nnSite := sites[0]
nnHome := nnSite.getPage(KindHome)
nnHome := nnSite.getPage(page.KindHome)
assert.Len(nnHome.AllTranslations(), 2)
assert.Len(nnHome.Translations(), 1)
assert.True(nnHome.IsTranslated())
enHome := sites[1].getPage(KindHome)
enHome := sites[1].getPage(page.KindHome)
p1, err := enHome.Param("p1")
assert.NoError(err)
@ -199,9 +199,7 @@ p1 = "p1en"
assert.Equal("p1nn", p1)
}
//
func TestMultiSitesBuild(t *testing.T) {
t.Parallel()
for _, config := range []struct {
content string
@ -211,7 +209,11 @@ func TestMultiSitesBuild(t *testing.T) {
{multiSiteYAMLConfigTemplate, "yml"},
{multiSiteJSONConfigTemplate, "json"},
} {
doTestMultiSitesBuild(t, config.content, config.suffix)
t.Run(config.suffix, func(t *testing.T) {
t.Parallel()
doTestMultiSitesBuild(t, config.content, config.suffix)
})
}
}
@ -228,64 +230,51 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
// Check site config
for _, s := range sites {
require.True(t, s.Info.defaultContentLanguageInSubdir, s.Info.Title)
require.True(t, s.Info.defaultContentLanguageInSubdir, s.Info.title)
require.NotNil(t, s.disabledKinds)
}
gp1 := b.H.GetContentPage(filepath.FromSlash("content/sect/doc1.en.md"))
require.NotNil(t, gp1)
require.Equal(t, "doc1", gp1.title)
require.Equal(t, "doc1", gp1.Title())
gp2 := b.H.GetContentPage(filepath.FromSlash("content/dummysect/notfound.md"))
require.Nil(t, gp2)
enSite := sites[0]
enSiteHome := enSite.getPage(KindHome)
enSiteHome := enSite.getPage(page.KindHome)
require.True(t, enSiteHome.IsTranslated())
require.Equal(t, "en", enSite.Language.Lang)
require.Equal(t, "en", enSite.language.Lang)
assert.Equal(5, len(enSite.RegularPages))
assert.Equal(32, len(enSite.AllPages))
assert.Equal(5, len(enSite.RegularPages()))
assert.Equal(32, len(enSite.AllPages()))
doc1en := enSite.RegularPages[0]
permalink := doc1en.Permalink()
require.Equal(t, "http://example.com/blog/en/sect/doc1-slug/", permalink, "invalid doc1.en permalink")
require.Len(t, doc1en.Translations(), 1, "doc1-en should have one translation, excluding itself")
// Check 404s
b.AssertFileContent("public/en/404.html", "404|en|404 Page not found")
b.AssertFileContent("public/fr/404.html", "404|fr|404 Page not found")
doc2 := enSite.RegularPages[1]
permalink = doc2.Permalink()
require.Equal(t, "http://example.com/blog/en/sect/doc2/", permalink, "invalid doc2 permalink")
// Check robots.txt
b.AssertFileContent("public/en/robots.txt", "robots|en|")
b.AssertFileContent("public/nn/robots.txt", "robots|nn|")
doc3 := enSite.RegularPages[2]
permalink = doc3.Permalink()
// Note that /superbob is a custom URL set in frontmatter.
// We respect that URL literally (it can be /search.json)
// and do no not do any language code prefixing.
require.Equal(t, "http://example.com/blog/superbob/", permalink, "invalid doc3 permalink")
require.Equal(t, "/superbob", doc3.URL(), "invalid url, was specified on doc3")
b.AssertFileContent("public/superbob/index.html", "doc3|Hello|en")
require.Equal(t, doc2.PrevPage, doc3, "doc3 should follow doc2, in .PrevPage")
b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Permalink: http://example.com/blog/en/sect/doc1-slug/")
b.AssertFileContent("public/en/sect/doc2/index.html", "Permalink: http://example.com/blog/en/sect/doc2/")
b.AssertFileContent("public/superbob/index.html", "Permalink: http://example.com/blog/superbob/")
doc2 := enSite.RegularPages()[1]
doc3 := enSite.RegularPages()[2]
require.Equal(t, doc2.Prev(), doc3, "doc3 should follow doc2, in .PrevPage")
doc1en := enSite.RegularPages()[0]
doc1fr := doc1en.Translations()[0]
permalink = doc1fr.Permalink()
require.Equal(t, "http://example.com/blog/fr/sect/doc1/", permalink, "invalid doc1fr permalink")
b.AssertFileContent("public/fr/sect/doc1/index.html", "Permalink: http://example.com/blog/fr/sect/doc1/")
require.Equal(t, doc1en.Translations()[0], doc1fr, "doc1-en should have doc1-fr as translation")
require.Equal(t, doc1fr.Translations()[0], doc1en, "doc1-fr should have doc1-en as translation")
require.Equal(t, "fr", doc1fr.Language().Lang)
doc4 := enSite.AllPages[4]
permalink = doc4.Permalink()
require.Equal(t, "http://example.com/blog/fr/sect/doc4/", permalink, "invalid doc4 permalink")
require.Equal(t, "/blog/fr/sect/doc4/", doc4.URL())
doc4 := enSite.AllPages()[4]
require.Len(t, doc4.Translations(), 0, "found translations for doc4")
doc5 := enSite.AllPages[5]
permalink = doc5.Permalink()
require.Equal(t, "http://example.com/blog/fr/somewhere/else/doc5/", permalink, "invalid doc5 permalink")
// Taxonomies and their URLs
require.Len(t, enSite.Taxonomies, 1, "should have 1 taxonomy")
tags := enSite.Taxonomies["tags"]
@ -294,12 +283,13 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
frSite := sites[1]
require.Equal(t, "fr", frSite.Language.Lang)
require.Len(t, frSite.RegularPages, 4, "should have 3 pages")
require.Len(t, frSite.AllPages, 32, "should have 32 total pages (including translations and nodes)")
require.Equal(t, "fr", frSite.language.Lang)
require.Len(t, frSite.RegularPages(), 4, "should have 3 pages")
require.Len(t, frSite.AllPages(), 32, "should have 32 total pages (including translations and nodes)")
for _, frenchPage := range frSite.RegularPages {
require.Equal(t, "fr", frenchPage.Lang())
for _, frenchPage := range frSite.RegularPages() {
p := frenchPage
require.Equal(t, "fr", p.Language().Lang)
}
// See https://github.com/gohugoio/hugo/issues/4285
@ -307,10 +297,10 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
// isn't ideal in a multilingual setup. You want a way to get the current language version if available.
// Now you can do lookups with translation base name to get that behaviour.
// Let us test all the regular page variants:
getPageDoc1En := enSite.getPage(KindPage, filepath.ToSlash(doc1en.Path()))
getPageDoc1EnBase := enSite.getPage(KindPage, "sect/doc1")
getPageDoc1Fr := frSite.getPage(KindPage, filepath.ToSlash(doc1fr.Path()))
getPageDoc1FrBase := frSite.getPage(KindPage, "sect/doc1")
getPageDoc1En := enSite.getPage(page.KindPage, filepath.ToSlash(doc1en.File().Path()))
getPageDoc1EnBase := enSite.getPage(page.KindPage, "sect/doc1")
getPageDoc1Fr := frSite.getPage(page.KindPage, filepath.ToSlash(doc1fr.File().Path()))
getPageDoc1FrBase := frSite.getPage(page.KindPage, "sect/doc1")
require.Equal(t, doc1en, getPageDoc1En)
require.Equal(t, doc1fr, getPageDoc1Fr)
require.Equal(t, doc1en, getPageDoc1EnBase)
@ -328,35 +318,36 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello", "LingoDefault")
// Check node translations
homeEn := enSite.getPage(KindHome)
homeEn := enSite.getPage(page.KindHome)
require.NotNil(t, homeEn)
require.Len(t, homeEn.Translations(), 3)
require.Equal(t, "fr", homeEn.Translations()[0].Lang())
require.Equal(t, "nn", homeEn.Translations()[1].Lang())
require.Equal(t, "På nynorsk", homeEn.Translations()[1].title)
require.Equal(t, "nb", homeEn.Translations()[2].Lang())
require.Equal(t, "På bokmål", homeEn.Translations()[2].title, configSuffix)
require.Equal(t, "fr", homeEn.Translations()[0].Language().Lang)
require.Equal(t, "nn", homeEn.Translations()[1].Language().Lang)
require.Equal(t, "På nynorsk", homeEn.Translations()[1].Title())
require.Equal(t, "nb", homeEn.Translations()[2].Language().Lang)
require.Equal(t, "På bokmål", homeEn.Translations()[2].Title(), configSuffix)
require.Equal(t, "Bokmål", homeEn.Translations()[2].Language().LanguageName, configSuffix)
sectFr := frSite.getPage(KindSection, "sect")
sectFr := frSite.getPage(page.KindSection, "sect")
require.NotNil(t, sectFr)
require.Equal(t, "fr", sectFr.Lang())
require.Equal(t, "fr", sectFr.Language().Lang)
require.Len(t, sectFr.Translations(), 1)
require.Equal(t, "en", sectFr.Translations()[0].Lang())
require.Equal(t, "Sects", sectFr.Translations()[0].title)
require.Equal(t, "en", sectFr.Translations()[0].Language().Lang)
require.Equal(t, "Sects", sectFr.Translations()[0].Title())
nnSite := sites[2]
require.Equal(t, "nn", nnSite.Language.Lang)
taxNn := nnSite.getPage(KindTaxonomyTerm, "lag")
require.Equal(t, "nn", nnSite.language.Lang)
taxNn := nnSite.getPage(page.KindTaxonomyTerm, "lag")
require.NotNil(t, taxNn)
require.Len(t, taxNn.Translations(), 1)
require.Equal(t, "nb", taxNn.Translations()[0].Lang())
require.Equal(t, "nb", taxNn.Translations()[0].Language().Lang)
taxTermNn := nnSite.getPage(KindTaxonomy, "lag", "sogndal")
taxTermNn := nnSite.getPage(page.KindTaxonomy, "lag", "sogndal")
require.NotNil(t, taxTermNn)
require.Equal(t, taxTermNn, nnSite.getPage(page.KindTaxonomy, "LAG", "SOGNDAL"))
require.Len(t, taxTermNn.Translations(), 1)
require.Equal(t, "nb", taxTermNn.Translations()[0].Lang())
require.Equal(t, "nb", taxTermNn.Translations()[0].Language().Lang)
// Check sitemap(s)
b.AssertFileContent("public/sitemap.xml",
@ -371,59 +362,53 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
require.Len(t, enTags, 2, fmt.Sprintf("Tags in en: %v", enTags))
require.Len(t, frTags, 2, fmt.Sprintf("Tags in fr: %v", frTags))
require.NotNil(t, enTags["tag1"])
require.NotNil(t, frTags["frtag1"])
b.AssertFileContent("public/fr/plaques/frtag1/index.html", "Frtag1|Bonjour|http://example.com/blog/fr/plaques/frtag1/")
b.AssertFileContent("public/en/tags/tag1/index.html", "Tag1|Hello|http://example.com/blog/en/tags/tag1/")
require.NotNil(t, frTags["FRtag1"])
b.AssertFileContent("public/fr/plaques/FRtag1/index.html", "FRtag1|Bonjour|http://example.com/blog/fr/plaques/FRtag1/")
b.AssertFileContent("public/en/tags/tag1/index.html", "tag1|Hello|http://example.com/blog/en/tags/tag1/")
// Check Blackfriday config
require.True(t, strings.Contains(string(doc1fr.content()), "&laquo;"), string(doc1fr.content()))
require.False(t, strings.Contains(string(doc1en.content()), "&laquo;"), string(doc1en.content()))
require.True(t, strings.Contains(string(doc1en.content()), "&ldquo;"), string(doc1en.content()))
// Check that the drafts etc. are not built/processed/rendered.
assertShouldNotBuild(t, b.H)
require.True(t, strings.Contains(content(doc1fr), "&laquo;"), content(doc1fr))
require.False(t, strings.Contains(content(doc1en), "&laquo;"), content(doc1en))
require.True(t, strings.Contains(content(doc1en), "&ldquo;"), content(doc1en))
// en and nn have custom site menus
require.Len(t, frSite.Menus, 0, "fr: "+configSuffix)
require.Len(t, enSite.Menus, 1, "en: "+configSuffix)
require.Len(t, nnSite.Menus, 1, "nn: "+configSuffix)
require.Len(t, frSite.Menus(), 0, "fr: "+configSuffix)
require.Len(t, enSite.Menus(), 1, "en: "+configSuffix)
require.Len(t, nnSite.Menus(), 1, "nn: "+configSuffix)
require.Equal(t, "Home", enSite.Menus["main"].ByName()[0].Name)
require.Equal(t, "Heim", nnSite.Menus["main"].ByName()[0].Name)
// Issue #1302
require.Equal(t, template.URL(""), enSite.RegularPages[0].RSSLink())
require.Equal(t, "Home", enSite.Menus()["main"].ByName()[0].Name)
require.Equal(t, "Heim", nnSite.Menus()["main"].ByName()[0].Name)
// Issue #3108
prevPage := enSite.RegularPages[0].PrevPage
prevPage := enSite.RegularPages()[0].Prev()
require.NotNil(t, prevPage)
require.Equal(t, KindPage, prevPage.Kind)
require.Equal(t, page.KindPage, prevPage.Kind())
for {
if prevPage == nil {
break
}
require.Equal(t, KindPage, prevPage.Kind)
prevPage = prevPage.PrevPage
require.Equal(t, page.KindPage, prevPage.Kind())
prevPage = prevPage.Prev()
}
// Check bundles
bundleFr := frSite.getPage(KindPage, "bundles/b1/index.md")
b.AssertFileContent("public/fr/bundles/b1/index.html", "RelPermalink: /blog/fr/bundles/b1/|")
bundleFr := frSite.getPage(page.KindPage, "bundles/b1/index.md")
require.NotNil(t, bundleFr)
require.Equal(t, "/blog/fr/bundles/b1/", bundleFr.RelPermalink())
require.Equal(t, 1, len(bundleFr.Resources))
logoFr := bundleFr.Resources.GetMatch("logo*")
require.Equal(t, 1, len(bundleFr.Resources()))
logoFr := bundleFr.Resources().GetMatch("logo*")
require.NotNil(t, logoFr)
require.Equal(t, "/blog/fr/bundles/b1/logo.png", logoFr.RelPermalink())
b.AssertFileContent("public/fr/bundles/b1/index.html", "Resources: image/png: /blog/fr/bundles/b1/logo.png")
b.AssertFileContent("public/fr/bundles/b1/logo.png", "PNG Data")
bundleEn := enSite.getPage(KindPage, "bundles/b1/index.en.md")
bundleEn := enSite.getPage(page.KindPage, "bundles/b1/index.en.md")
require.NotNil(t, bundleEn)
require.Equal(t, "/blog/en/bundles/b1/", bundleEn.RelPermalink())
require.Equal(t, 1, len(bundleEn.Resources))
logoEn := bundleEn.Resources.GetMatch("logo*")
b.AssertFileContent("public/en/bundles/b1/index.html", "RelPermalink: /blog/en/bundles/b1/|")
require.Equal(t, 1, len(bundleEn.Resources()))
logoEn := bundleEn.Resources().GetMatch("logo*")
require.NotNil(t, logoEn)
require.Equal(t, "/blog/en/bundles/b1/logo.png", logoEn.RelPermalink())
b.AssertFileContent("public/en/bundles/b1/index.html", "Resources: image/png: /blog/en/bundles/b1/logo.png")
b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data")
}
@ -442,13 +427,13 @@ func TestMultiSitesRebuild(t *testing.T) {
sites := b.H.Sites
fs := b.Fs
b.AssertFileContent("public/en/sect/doc2/index.html", "Single: doc2|Hello|en|\n\n<h1 id=\"doc2\">doc2</h1>\n\n<p><em>some content</em>")
b.AssertFileContent("public/en/sect/doc2/index.html", "Single: doc2|Hello|en|", "\n\n<h1 id=\"doc2\">doc2</h1>\n\n<p><em>some content</em>")
enSite := sites[0]
frSite := sites[1]
assert.Len(enSite.RegularPages, 5)
assert.Len(frSite.RegularPages, 4)
assert.Len(enSite.RegularPages(), 5)
assert.Len(frSite.RegularPages(), 4)
// Verify translations
b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Hello")
@ -458,6 +443,10 @@ func TestMultiSitesRebuild(t *testing.T) {
b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Shortcode: Bonjour")
b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello")
homeEn := enSite.getPage(page.KindHome)
require.NotNil(t, homeEn)
assert.Len(homeEn.Translations(), 3)
contentFs := b.H.BaseFs.Content.Fs
for i, this := range []struct {
@ -478,15 +467,15 @@ func TestMultiSitesRebuild(t *testing.T) {
},
[]fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc2.en.md"), Op: fsnotify.Remove}},
func(t *testing.T) {
assert.Len(enSite.RegularPages, 4, "1 en removed")
assert.Len(enSite.RegularPages(), 4, "1 en removed")
// Check build stats
require.Equal(t, 1, enSite.draftCount, "Draft")
require.Equal(t, 1, enSite.futureCount, "Future")
require.Equal(t, 1, enSite.expiredCount, "Expired")
require.Equal(t, 0, frSite.draftCount, "Draft")
require.Equal(t, 1, frSite.futureCount, "Future")
require.Equal(t, 1, frSite.expiredCount, "Expired")
require.Equal(t, 1, enSite.buildStats.draftCount, "Draft")
require.Equal(t, 1, enSite.buildStats.futureCount, "Future")
require.Equal(t, 1, enSite.buildStats.expiredCount, "Expired")
require.Equal(t, 0, frSite.buildStats.draftCount, "Draft")
require.Equal(t, 1, frSite.buildStats.futureCount, "Future")
require.Equal(t, 1, frSite.buildStats.expiredCount, "Expired")
},
},
{
@ -501,12 +490,12 @@ func TestMultiSitesRebuild(t *testing.T) {
{Name: filepath.FromSlash("content/new1.fr.md"), Op: fsnotify.Create},
},
func(t *testing.T) {
assert.Len(enSite.RegularPages, 6)
assert.Len(enSite.AllPages, 34)
assert.Len(frSite.RegularPages, 5)
require.Equal(t, "new_fr_1", frSite.RegularPages[3].title)
require.Equal(t, "new_en_2", enSite.RegularPages[0].title)
require.Equal(t, "new_en_1", enSite.RegularPages[1].title)
assert.Len(enSite.RegularPages(), 6)
assert.Len(enSite.AllPages(), 34)
assert.Len(frSite.RegularPages(), 5)
require.Equal(t, "new_fr_1", frSite.RegularPages()[3].Title())
require.Equal(t, "new_en_2", enSite.RegularPages()[0].Title())
require.Equal(t, "new_en_1", enSite.RegularPages()[1].Title())
rendered := readDestination(t, fs, "public/en/new1/index.html")
require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
@ -521,7 +510,7 @@ func TestMultiSitesRebuild(t *testing.T) {
},
[]fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc1.en.md"), Op: fsnotify.Write}},
func(t *testing.T) {
assert.Len(enSite.RegularPages, 6)
assert.Len(enSite.RegularPages(), 6)
doc1 := readDestination(t, fs, "public/en/sect/doc1-slug/index.html")
require.True(t, strings.Contains(doc1, "CHANGED"), doc1)
@ -539,8 +528,8 @@ func TestMultiSitesRebuild(t *testing.T) {
{Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Rename},
},
func(t *testing.T) {
assert.Len(enSite.RegularPages, 6, "Rename")
require.Equal(t, "new_en_1", enSite.RegularPages[1].title)
assert.Len(enSite.RegularPages(), 6, "Rename")
require.Equal(t, "new_en_1", enSite.RegularPages()[1].Title())
rendered := readDestination(t, fs, "public/en/new1renamed/index.html")
require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
}},
@ -554,9 +543,9 @@ func TestMultiSitesRebuild(t *testing.T) {
},
[]fsnotify.Event{{Name: filepath.FromSlash("layouts/_default/single.html"), Op: fsnotify.Write}},
func(t *testing.T) {
assert.Len(enSite.RegularPages, 6)
assert.Len(enSite.AllPages, 34)
assert.Len(frSite.RegularPages, 5)
assert.Len(enSite.RegularPages(), 6)
assert.Len(enSite.AllPages(), 34)
assert.Len(frSite.RegularPages(), 5)
doc1 := readDestination(t, fs, "public/en/sect/doc1-slug/index.html")
require.True(t, strings.Contains(doc1, "Template Changed"), doc1)
},
@ -571,18 +560,18 @@ func TestMultiSitesRebuild(t *testing.T) {
},
[]fsnotify.Event{{Name: filepath.FromSlash("i18n/fr.yaml"), Op: fsnotify.Write}},
func(t *testing.T) {
assert.Len(enSite.RegularPages, 6)
assert.Len(enSite.AllPages, 34)
assert.Len(frSite.RegularPages, 5)
assert.Len(enSite.RegularPages(), 6)
assert.Len(enSite.AllPages(), 34)
assert.Len(frSite.RegularPages(), 5)
docEn := readDestination(t, fs, "public/en/sect/doc1-slug/index.html")
require.True(t, strings.Contains(docEn, "Hello"), "No Hello")
docFr := readDestination(t, fs, "public/fr/sect/doc1/index.html")
require.True(t, strings.Contains(docFr, "Salut"), "No Salut")
homeEn := enSite.getPage(KindHome)
homeEn := enSite.getPage(page.KindHome)
require.NotNil(t, homeEn)
assert.Len(homeEn.Translations(), 3)
require.Equal(t, "fr", homeEn.Translations()[0].Lang())
require.Equal(t, "fr", homeEn.Translations()[0].Language().Lang)
},
},
@ -595,9 +584,9 @@ func TestMultiSitesRebuild(t *testing.T) {
{Name: filepath.FromSlash("layouts/shortcodes/shortcode.html"), Op: fsnotify.Write},
},
func(t *testing.T) {
assert.Len(enSite.RegularPages, 6)
assert.Len(enSite.AllPages, 34)
assert.Len(frSite.RegularPages, 5)
assert.Len(enSite.RegularPages(), 6)
assert.Len(enSite.AllPages(), 34)
assert.Len(frSite.RegularPages(), 5)
b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Modified Shortcode: Salut")
b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Modified Shortcode: Hello")
},
@ -617,23 +606,6 @@ func TestMultiSitesRebuild(t *testing.T) {
this.assertFunc(t)
}
// Check that the drafts etc. are not built/processed/rendered.
assertShouldNotBuild(t, b.H)
}
func assertShouldNotBuild(t *testing.T, sites *HugoSites) {
s := sites.Sites[0]
for _, p := range s.rawAllPages {
// No HTML when not processed
require.Equal(t, p.shouldBuild(), bytes.Contains(p.workContent, []byte("</")), p.BaseFileName()+": "+string(p.workContent))
require.Equal(t, p.shouldBuild(), p.content() != "", fmt.Sprintf("%v:%v", p.content(), p.shouldBuild()))
require.Equal(t, p.shouldBuild(), p.content() != "", p.BaseFileName())
}
}
func TestAddNewLanguage(t *testing.T) {
@ -671,31 +643,32 @@ title = "Svenska"
enSite := sites.Sites[0]
svSite := sites.Sites[1]
frSite := sites.Sites[2]
require.True(t, enSite.Language.Lang == "en", enSite.Language.Lang)
require.True(t, svSite.Language.Lang == "sv", svSite.Language.Lang)
require.True(t, frSite.Language.Lang == "fr", frSite.Language.Lang)
require.True(t, enSite.language.Lang == "en", enSite.language.Lang)
require.True(t, svSite.language.Lang == "sv", svSite.language.Lang)
require.True(t, frSite.language.Lang == "fr", frSite.language.Lang)
homeEn := enSite.getPage(KindHome)
homeEn := enSite.getPage(page.KindHome)
require.NotNil(t, homeEn)
require.Len(t, homeEn.Translations(), 4)
require.Equal(t, "sv", homeEn.Translations()[0].Lang())
require.Len(t, enSite.RegularPages, 5)
require.Len(t, frSite.RegularPages, 4)
require.Equal(t, "sv", homeEn.Translations()[0].Language().Lang)
require.Len(t, enSite.RegularPages(), 5)
require.Len(t, frSite.RegularPages(), 4)
// Veriy Swedish site
require.Len(t, svSite.RegularPages, 1)
svPage := svSite.RegularPages[0]
require.Len(t, svSite.RegularPages(), 1)
svPage := svSite.RegularPages()[0]
require.Equal(t, "Swedish Contentfile", svPage.title)
require.Equal(t, "sv", svPage.Lang())
require.Equal(t, "Swedish Contentfile", svPage.Title())
require.Equal(t, "sv", svPage.Language().Lang)
require.Len(t, svPage.Translations(), 2)
require.Len(t, svPage.AllTranslations(), 3)
require.Equal(t, "en", svPage.Translations()[0].Lang())
require.Equal(t, "en", svPage.Translations()[0].Language().Lang)
// Regular pages have no children
require.Len(t, svPage.Pages, 0)
require.Len(t, svPage.data["Pages"], 0)
require.Len(t, svPage.Pages(), 0)
require.Len(t, svPage.Data().(page.Data).Pages(), 0)
}
@ -782,12 +755,12 @@ Some text. Some more text.
content = append(content, []string{"s2/_index.md", fmt.Sprintf(contentTempl, defaultOutputs, fmt.Sprintf("S %d", 2), 2, true)}...)
b.WithSimpleConfigFile()
b.WithTemplates("layouts/_default/single.html", `Single: {{ .Content }}`)
b.WithTemplates("layouts/_default/single.html", `Single: {{ .Content }}|RelPermalink: {{ .RelPermalink }}|Permalink: {{ .Permalink }}`)
b.WithTemplates("layouts/_default/myview.html", `View: {{ len .Content }}`)
b.WithTemplates("layouts/_default/single.json", `Single JSON: {{ .Content }}`)
b.WithTemplates("layouts/_default/single.json", `Single JSON: {{ .Content }}|RelPermalink: {{ .RelPermalink }}|Permalink: {{ .Permalink }}`)
b.WithTemplates("layouts/_default/list.html", `
Page: {{ .Paginator.PageNumber }}
P: {{ path.Join .Path }}
P: {{ with .File }}{{ path.Join .Path }}{{ end }}
List: {{ len .Paginator.Pages }}|List Content: {{ len .Content }}
{{ $shuffled := where .Site.RegularPages "Params.multioutput" true | shuffle }}
{{ $first5 := $shuffled | first 5 }}
@ -810,7 +783,7 @@ END
if i%10 == 0 {
section = "s2"
}
checkContent(b, fmt.Sprintf("public/%s/page%d/index.html", section, i), 8343, contentMatchers...)
checkContent(b, fmt.Sprintf("public/%s/page%d/index.html", section, i), contentMatchers...)
}
}
@ -819,48 +792,158 @@ END
if i%10 == 0 {
section = "s2"
}
checkContent(b, fmt.Sprintf("public/%s/page%d/index.json", section, i), 8348, contentMatchers...)
checkContent(b, fmt.Sprintf("public/%s/page%d/index.json", section, i), contentMatchers...)
}
checkContent(b, "public/s1/index.html", 184, "P: s1/_index.md\nList: 10|List Content: 8335\n\n\nL1: 500 L2: 5\n\nRender 0: View: 8335\n\nRender 1: View: 8335\n\nRender 2: View: 8335\n\nRender 3: View: 8335\n\nRender 4: View: 8335\n\nEND\n")
checkContent(b, "public/s2/index.html", 184, "P: s2/_index.md\nList: 10|List Content: 8335", "Render 4: View: 8335\n\nEND")
checkContent(b, "public/index.html", 181, "P: _index.md\nList: 10|List Content: 8335", "4: View: 8335\n\nEND")
checkContent(b, "public/s1/index.html", "P: s1/_index.md\nList: 10|List Content: 8335\n\n\nL1: 500 L2: 5\n\nRender 0: View: 8335\n\nRender 1: View: 8335\n\nRender 2: View: 8335\n\nRender 3: View: 8335\n\nRender 4: View: 8335\n\nEND\n")
checkContent(b, "public/s2/index.html", "P: s2/_index.md\nList: 10|List Content: 8335", "Render 4: View: 8335\n\nEND")
checkContent(b, "public/index.html", "P: _index.md\nList: 10|List Content: 8335", "4: View: 8335\n\nEND")
// Chek paginated pages
// Check paginated pages
for i := 2; i <= 9; i++ {
checkContent(b, fmt.Sprintf("public/page/%d/index.html", i), 181, fmt.Sprintf("Page: %d", i), "Content: 8335\n\n\nL1: 500 L2: 5\n\nRender 0: View: 8335", "Render 4: View: 8335\n\nEND")
checkContent(b, fmt.Sprintf("public/page/%d/index.html", i), fmt.Sprintf("Page: %d", i), "Content: 8335\n\n\nL1: 500 L2: 5\n\nRender 0: View: 8335", "Render 4: View: 8335\n\nEND")
}
}
func checkContent(s *sitesBuilder, filename string, length int, matches ...string) {
func checkContent(s *sitesBuilder, filename string, matches ...string) {
content := readDestination(s.T, s.Fs, filename)
for _, match := range matches {
if !strings.Contains(content, match) {
s.Fatalf("No match for %q in content for %s\n%q", match, filename, content)
}
}
if len(content) != length {
s.Fatalf("got %d expected %d", len(content), length)
}
func TestTranslationsFromContentToNonContent(t *testing.T) {
b := newTestSitesBuilder(t)
b.WithConfigFile("toml", `
baseURL = "http://example.com/"
defaultContentLanguage = "en"
[languages]
[languages.en]
weight = 10
contentDir = "content/en"
[languages.nn]
weight = 20
contentDir = "content/nn"
`)
b.WithContent("en/mysection/_index.md", `
---
Title: My Section
---
`)
b.WithContent("en/_index.md", `
---
Title: My Home
---
`)
b.WithContent("en/categories/mycat/_index.md", `
---
Title: My MyCat
---
`)
b.WithContent("en/categories/_index.md", `
---
Title: My categories
---
`)
for _, lang := range []string{"en", "nn"} {
b.WithContent(lang+"/mysection/page.md", `
---
Title: My Page
categories: ["mycat"]
---
`)
}
b.Build(BuildCfg{})
for _, path := range []string{
"/",
"/mysection",
"/categories",
"/categories/mycat",
} {
t.Run(path, func(t *testing.T) {
assert := require.New(t)
s1, _ := b.H.Sites[0].getPageNew(nil, path)
s2, _ := b.H.Sites[1].getPageNew(nil, path)
assert.NotNil(s1)
assert.NotNil(s2)
assert.Equal(1, len(s1.Translations()))
assert.Equal(1, len(s2.Translations()))
assert.Equal(s2, s1.Translations()[0])
assert.Equal(s1, s2.Translations()[0])
m1 := s1.Translations().MergeByLanguage(s2.Translations())
m2 := s2.Translations().MergeByLanguage(s1.Translations())
assert.Equal(1, len(m1))
assert.Equal(1, len(m2))
})
}
}
// https://github.com/gohugoio/hugo/issues/5777
func TestTableOfContentsInShortcodes(t *testing.T) {
t.Parallel()
b := newMultiSiteTestDefaultBuilder(t)
b.WithTemplatesAdded("layouts/shortcodes/toc.html", tocShortcode)
b.WithTemplatesAdded("layouts/shortcodes/wrapper.html", "{{ .Inner }}")
b.WithContent("post/simple.en.md", tocPageSimple)
b.WithContent("post/variants1.en.md", tocPageVariants1)
b.WithContent("post/variants2.en.md", tocPageVariants2)
b.WithContent("post/withSCInHeading.en.md", tocPageWithShortcodesInHeadings)
b.CreateSites().Build(BuildCfg{})
b.AssertFileContent("public/en/post/simple/index.html", tocPageSimpleExpected)
b.AssertFileContent("public/en/post/simple/index.html",
tocPageSimpleExpected,
// Make sure it is inserted twice
`TOC1: <nav id="TableOfContents">`,
`TOC2: <nav id="TableOfContents">`,
)
b.AssertFileContentFn("public/en/post/variants1/index.html", func(s string) bool {
return strings.Count(s, "TableOfContents") == 4
})
b.AssertFileContentFn("public/en/post/variants2/index.html", func(s string) bool {
return strings.Count(s, "TableOfContents") == 6
})
b.AssertFileContent("public/en/post/withSCInHeading/index.html", tocPageWithShortcodesInHeadingsExpected)
}
var tocShortcode = `
{{ .Page.TableOfContents }}
TOC1: {{ .Page.TableOfContents }}
TOC2: {{ .Page.TableOfContents }}
`
func TestSelfReferencedContentInShortcode(t *testing.T) {
@ -901,6 +984,41 @@ Even more text.
Lorem ipsum...
`
var tocPageVariants1 = `---
title: tocTest
publishdate: "2000-01-01"
---
Variant 1:
{{% wrapper %}}
{{< toc >}}
{{% /wrapper %}}
# Heading 1
Variant 3:
{{% toc %}}
`
var tocPageVariants2 = `---
title: tocTest
publishdate: "2000-01-01"
---
Variant 1:
{{% wrapper %}}
{{< toc >}}
{{% /wrapper %}}
# Heading 1
Variant 2:
{{< wrapper >}}
{{< toc >}}
{{< /wrapper >}}
Variant 3:
{{% toc %}}
`
var tocPageSimpleExpected = `<nav id="TableOfContents">
<ul>
<li><a href="#1">Heading 1</a>
@ -958,6 +1076,7 @@ paginate = 1
disablePathToLower = true
defaultContentLanguage = "{{ .DefaultContentLanguage }}"
defaultContentLanguageInSubdir = {{ .DefaultContentLanguageInSubdir }}
enableRobotsTXT = true
[permalinks]
other = "/somewhere/else/:filename"
@ -1015,6 +1134,7 @@ disablePathToLower: true
paginate: 1
defaultContentLanguage: "{{ .DefaultContentLanguage }}"
defaultContentLanguageInSubdir: {{ .DefaultContentLanguageInSubdir }}
enableRobotsTXT: true
permalinks:
other: "/somewhere/else/:filename"
@ -1073,6 +1193,7 @@ var multiSiteJSONConfigTemplate = `
"disablePathToLower": true,
"defaultContentLanguage": "{{ .DefaultContentLanguage }}",
"defaultContentLanguageInSubdir": true,
"enableRobotsTXT": true,
"permalinks": {
"other": "/somewhere/else/:filename"
},
@ -1170,7 +1291,23 @@ func readFileFromFs(t testing.TB, fs afero.Fs, filename string) string {
b, err := afero.ReadFile(fs, filename)
if err != nil {
// Print some debug info
root := strings.Split(filename, helpers.FilePathSeparator)[0]
hadSlash := strings.HasPrefix(filename, helpers.FilePathSeparator)
start := 0
if hadSlash {
start = 1
}
end := start + 1
parts := strings.Split(filename, helpers.FilePathSeparator)
if parts[start] == "work" {
end++
}
root := filepath.Join(parts[start:end]...)
if hadSlash {
root = helpers.FilePathSeparator + root
}
helpers.PrintFs(fs, root, os.Stdout)
Fatalf(t, "Failed to read file: %s", err)
}
@ -1262,8 +1399,8 @@ NOTE: slug should be used as URL
title: doc1
weight: 1
plaques:
- frtag1
- frtag2
- FRtag1
- FRtag2
publishdate: "2000-01-04"
---
# doc1
@ -1293,7 +1430,7 @@ aliases: [/en/al/alias1,/al/alias2/]
tags:
- tag2
- tag1
url: /superbob
url: /superbob/
---
# doc3
*some content*
@ -1303,7 +1440,7 @@ NOTE: third 'en' doc, should trigger pagination on home page.
title: doc4
weight: 4
plaques:
- frtag1
- FRtag1
publishdate: "2000-01-05"
---
# doc4

View file

@ -3,6 +3,8 @@ package hugolib
import (
"testing"
"github.com/gohugoio/hugo/resources/page"
"github.com/stretchr/testify/require"
)
@ -55,7 +57,7 @@ languageName = "Nynorsk"
s1 := b.H.Sites[0]
s1h := s1.getPage(KindHome)
s1h := s1.getPage(page.KindHome)
assert.True(s1h.IsTranslated())
assert.Len(s1h.Translations(), 2)
assert.Equal("https://example.com/docs/", s1h.Permalink())
@ -66,9 +68,8 @@ languageName = "Nynorsk"
// For multihost, we never want any content in the root.
//
// check url in front matter:
pageWithURLInFrontMatter := s1.getPage(KindPage, "sect/doc3.en.md")
pageWithURLInFrontMatter := s1.getPage(page.KindPage, "sect/doc3.en.md")
assert.NotNil(pageWithURLInFrontMatter)
assert.Equal("/superbob", pageWithURLInFrontMatter.URL())
assert.Equal("/docs/superbob/", pageWithURLInFrontMatter.RelPermalink())
b.AssertFileContent("public/en/superbob/index.html", "doc3|Hello|en")
@ -78,7 +79,7 @@ languageName = "Nynorsk"
s2 := b.H.Sites[1]
s2h := s2.getPage(KindHome)
s2h := s2.getPage(page.KindHome)
assert.Equal("https://example.fr/", s2h.Permalink())
b.AssertFileContent("public/fr/index.html", "French Home Page", "String Resource: /docs/text/pipes.txt")
@ -94,22 +95,19 @@ languageName = "Nynorsk"
// Check bundles
bundleEn := s1.getPage(KindPage, "bundles/b1/index.en.md")
bundleEn := s1.getPage(page.KindPage, "bundles/b1/index.en.md")
require.NotNil(t, bundleEn)
require.Equal(t, "/docs/bundles/b1/", bundleEn.RelPermalink())
require.Equal(t, 1, len(bundleEn.Resources))
logoEn := bundleEn.Resources.GetMatch("logo*")
require.NotNil(t, logoEn)
require.Equal(t, "/docs/bundles/b1/logo.png", logoEn.RelPermalink())
b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data")
require.Equal(t, 1, len(bundleEn.Resources()))
bundleFr := s2.getPage(KindPage, "bundles/b1/index.md")
b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data")
b.AssertFileContent("public/en/bundles/b1/index.html", " image/png: /docs/bundles/b1/logo.png")
bundleFr := s2.getPage(page.KindPage, "bundles/b1/index.md")
require.NotNil(t, bundleFr)
require.Equal(t, "/bundles/b1/", bundleFr.RelPermalink())
require.Equal(t, 1, len(bundleFr.Resources))
logoFr := bundleFr.Resources.GetMatch("logo*")
require.NotNil(t, logoFr)
require.Equal(t, "/bundles/b1/logo.png", logoFr.RelPermalink())
require.Equal(t, 1, len(bundleFr.Resources()))
b.AssertFileContent("public/fr/bundles/b1/logo.png", "PNG Data")
b.AssertFileContent("public/fr/bundles/b1/index.html", " image/png: /bundles/b1/logo.png")
}

303
hugolib/hugo_smoke_test.go Normal file
View file

@ -0,0 +1,303 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"strings"
"testing"
"github.com/stretchr/testify/require"
)
func TestSmoke(t *testing.T) {
t.Parallel()
assert := require.New(t)
const configFile = `
baseURL = "https://example.com"
title = "Simple Site"
rssLimit = 3
defaultContentLanguage = "en"
enableRobotsTXT = true
[languages]
[languages.en]
weight = 1
title = "In English"
[languages.no]
weight = 2
title = "På norsk"
[params]
hugo = "Rules!"
[outputs]
home = ["HTML", "JSON", "CSV", "RSS"]
`
const pageContentAndSummaryDivider = `---
title: Page with outputs
hugo: "Rocks!"
outputs: ["HTML", "JSON"]
tags: [ "hugo" ]
aliases: [ "/a/b/c" ]
---
This is summary.
<!--more-->
This is content with some shortcodes.
Shortcode 1: {{< sc >}}.
Shortcode 2: {{< sc >}}.
`
const pageContentWithMarkdownShortcodes = `---
title: Page with markdown shortcode
hugo: "Rocks!"
outputs: ["HTML", "JSON"]
---
This is summary.
<!--more-->
This is content[^a].
# Header above
{{% markdown-shortcode %}}
# Header inside
Some **markdown**.[^b]
{{% /markdown-shortcode %}}
# Heder below
Some more content[^c].
Footnotes:
[^a]: Fn 1
[^b]: Fn 2
[^c]: Fn 3
`
var pageContentAutoSummary = strings.Replace(pageContentAndSummaryDivider, "<!--more-->", "", 1)
b := newTestSitesBuilder(t).WithConfigFile("toml", configFile)
b.WithTemplatesAdded("shortcodes/markdown-shortcode.html", `
Some **Markdown** in shortcode.
{{ .Inner }}
`)
b.WithTemplatesAdded("shortcodes/markdown-shortcode.json", `
Some **Markdown** in JSON shortcode.
{{ .Inner }}
`)
for i := 1; i <= 11; i++ {
if i%2 == 0 {
b.WithContent(fmt.Sprintf("blog/page%d.md", i), pageContentAndSummaryDivider)
b.WithContent(fmt.Sprintf("blog/page%d.no.md", i), pageContentAndSummaryDivider)
} else {
b.WithContent(fmt.Sprintf("blog/page%d.md", i), pageContentAutoSummary)
}
}
for i := 1; i <= 5; i++ {
// Root section pages
b.WithContent(fmt.Sprintf("root%d.md", i), pageContentAutoSummary)
}
// https://github.com/gohugoio/hugo/issues/4695
b.WithContent("blog/markyshort.md", pageContentWithMarkdownShortcodes)
// Add one bundle
b.WithContent("blog/mybundle/index.md", pageContentAndSummaryDivider)
b.WithContent("blog/mybundle/mydata.csv", "Bundled CSV")
const (
commonPageTemplate = `|{{ .Kind }}|{{ .Title }}|{{ .Path }}|{{ .Summary }}|{{ .Content }}|RelPermalink: {{ .RelPermalink }}|WordCount: {{ .WordCount }}|Pages: {{ .Pages }}|Data Pages: Pages({{ len .Data.Pages }})|Resources: {{ len .Resources }}|Summary: {{ .Summary }}`
commonPaginatorTemplate = `|Paginator: {{ with .Paginator }}{{ .PageNumber }}{{ else }}NIL{{ end }}`
commonListTemplateNoPaginator = `|{{ range $i, $e := (.Pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
commonListTemplate = commonPaginatorTemplate + `|{{ range $i, $e := (.Pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
commonShortcodeTemplate = `|{{ .Name }}|{{ .Ordinal }}|{{ .Page.Summary }}|{{ .Page.Content }}|WordCount: {{ .Page.WordCount }}`
prevNextTemplate = `|Prev: {{ with .Prev }}{{ .RelPermalink }}{{ end }}|Next: {{ with .Next }}{{ .RelPermalink }}{{ end }}`
prevNextInSectionTemplate = `|PrevInSection: {{ with .PrevInSection }}{{ .RelPermalink }}{{ end }}|NextInSection: {{ with .NextInSection }}{{ .RelPermalink }}{{ end }}`
paramsTemplate = `|Params: {{ .Params.hugo }}`
treeNavTemplate = `|CurrentSection: {{ .CurrentSection }}`
)
b.WithTemplates(
"_default/list.html", "HTML: List"+commonPageTemplate+commonListTemplate+"|First Site: {{ .Sites.First.Title }}",
"_default/list.json", "JSON: List"+commonPageTemplate+commonListTemplateNoPaginator,
"_default/list.csv", "CSV: List"+commonPageTemplate+commonListTemplateNoPaginator,
"_default/single.html", "HTML: Single"+commonPageTemplate+prevNextTemplate+prevNextInSectionTemplate+treeNavTemplate,
"_default/single.json", "JSON: Single"+commonPageTemplate,
// For .Render test
"_default/li.html", `HTML: LI|{{ strings.Contains .Content "HTML: Shortcode: sc" }}`+paramsTemplate,
"_default/li.json", `JSON: LI|{{ strings.Contains .Content "JSON: Shortcode: sc" }}`+paramsTemplate,
"_default/li.csv", `CSV: LI|{{ strings.Contains .Content "CSV: Shortcode: sc" }}`+paramsTemplate,
"404.html", "{{ .Kind }}|{{ .Title }}|Page not found",
"shortcodes/sc.html", "HTML: Shortcode: "+commonShortcodeTemplate,
"shortcodes/sc.json", "JSON: Shortcode: "+commonShortcodeTemplate,
"shortcodes/sc.csv", "CSV: Shortcode: "+commonShortcodeTemplate,
)
b.CreateSites().Build(BuildCfg{})
b.AssertFileContent("public/blog/page1/index.html",
"This is content with some shortcodes.",
"Page with outputs",
"Pages: Pages(0)",
"RelPermalink: /blog/page1/|",
"Shortcode 1: HTML: Shortcode: |sc|0|||WordCount: 0.",
"Shortcode 2: HTML: Shortcode: |sc|1|||WordCount: 0.",
"Prev: /blog/page10/|Next: /blog/mybundle/",
"PrevInSection: /blog/page10/|NextInSection: /blog/mybundle/",
"Summary: This is summary.",
"CurrentSection: Page(/blog)",
)
b.AssertFileContent("public/blog/page1/index.json",
"JSON: Single|page|Page with outputs|",
"SON: Shortcode: |sc|0||")
b.AssertFileContent("public/index.html",
"home|In English",
"Site params: Rules",
"Pages: Pages(18)|Data Pages: Pages(18)",
"Paginator: 1",
"First Site: In English",
"RelPermalink: /",
)
b.AssertFileContent("public/no/index.html", "home|På norsk", "RelPermalink: /no/")
// Check RSS
rssHome := b.FileContent("public/index.xml")
assert.Contains(rssHome, `<atom:link href="https://example.com/index.xml" rel="self" type="application/rss+xml" />`)
assert.Equal(3, strings.Count(rssHome, "<item>")) // rssLimit = 3
// .Render should use template/content from the current output format
// even if that output format isn't configured for that page.
b.AssertFileContent(
"public/index.json",
"Render 0: page|JSON: LI|false|Params: Rocks!",
)
b.AssertFileContent(
"public/index.html",
"Render 0: page|HTML: LI|false|Params: Rocks!|",
)
b.AssertFileContent(
"public/index.csv",
"Render 0: page|CSV: LI|false|Params: Rocks!|",
)
// Check bundled resources
b.AssertFileContent(
"public/blog/mybundle/index.html",
"Resources: 1",
)
// Check pages in root section
b.AssertFileContent(
"public/root3/index.html",
"Single|page|Page with outputs|root3.md|",
"Prev: /root4/|Next: /root2/|PrevInSection: /root4/|NextInSection: /root2/",
)
b.AssertFileContent(
"public/root3/index.json", "Shortcode 1: JSON:")
// Paginators
b.AssertFileContent("public/page/1/index.html", `rel="canonical" href="https://example.com/"`)
b.AssertFileContent("public/page/2/index.html", "HTML: List|home|In English|", "Paginator: 2")
// 404
b.AssertFileContent("public/404.html", "404|404 Page not found")
// Sitemaps
b.AssertFileContent("public/en/sitemap.xml", "<loc>https://example.com/blog/</loc>")
b.AssertFileContent("public/no/sitemap.xml", `hreflang="no"`)
b.AssertFileContent("public/sitemap.xml", "<loc>https://example.com/en/sitemap.xml</loc>", "<loc>https://example.com/no/sitemap.xml</loc>")
// robots.txt
b.AssertFileContent("public/robots.txt", `User-agent: *`)
// Aliases
b.AssertFileContent("public/a/b/c/index.html", `refresh`)
// Markdown vs shortcodes
// Check that all footnotes are grouped (even those from inside the shortcode)
b.AssertFileContentRe("public/blog/markyshort/index.html", `Footnotes:.*<ol>.*Fn 1.*Fn 2.*Fn 3.*</ol>`)
}
// https://github.com/golang/go/issues/30286
func TestDataRace(t *testing.T) {
const page = `
---
title: "The Page"
outputs: ["HTML", "JSON"]
---
The content.
`
b := newTestSitesBuilder(t).WithSimpleConfigFile()
for i := 1; i <= 50; i++ {
b.WithContent(fmt.Sprintf("blog/page%d.md", i), page)
}
b.WithContent("_index.md", `
---
title: "The Home"
outputs: ["HTML", "JSON", "CSV", "RSS"]
---
The content.
`)
commonTemplate := `{{ .Data.Pages }}`
b.WithTemplatesAdded("_default/single.html", "HTML Single: "+commonTemplate)
b.WithTemplatesAdded("_default/list.html", "HTML List: "+commonTemplate)
b.CreateSites().Build(BuildCfg{})
}

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -19,6 +19,8 @@ import (
"path/filepath"
"testing"
"github.com/gohugoio/hugo/resources/page"
"github.com/stretchr/testify/require"
)
@ -99,15 +101,19 @@ Content.
section := "sect"
var contentRoot = func(lang string) string {
contentRoot := "content/main"
switch lang {
case "nn":
contentRoot = "content/norsk"
return "content/norsk"
case "sv":
contentRoot = "content/svensk"
return "content/svensk"
default:
return "content/main"
}
return contentRoot + "/" + section
}
var contentSectionRoot = func(lang string) string {
return contentRoot(lang) + "/" + section
}
for _, lang := range []string{"en", "nn", "sv"} {
@ -124,7 +130,7 @@ Content.
}
base := fmt.Sprintf("p-%s-%d", lang, j)
slug := fmt.Sprintf("%s", base)
slug := base
langID := ""
if lang == "sv" && j%4 == 0 {
@ -139,7 +145,7 @@ Content.
slug += langID
contentRoot := contentRoot(lang)
contentRoot := contentSectionRoot(lang)
filename := filepath.Join(contentRoot, fmt.Sprintf("page%d%s.md", j, langID))
contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, slug, slug, j))
@ -148,7 +154,7 @@ Content.
// Put common translations in all of them
for i, lang := range []string{"en", "nn", "sv"} {
contentRoot := contentRoot(lang)
contentRoot := contentSectionRoot(lang)
slug := fmt.Sprintf("common_%s", lang)
@ -173,7 +179,7 @@ Content.
// Add a bundle with some images
for i, lang := range []string{"en", "nn", "sv"} {
contentRoot := contentRoot(lang)
contentRoot := contentSectionRoot(lang)
slug := fmt.Sprintf("bundle_%s", lang)
filename := filepath.Join(contentRoot, "mybundle", "index.md")
contentFiles = append(contentFiles, filename, fmt.Sprintf(pageBundleTemplate, slug, 400+i))
@ -190,11 +196,20 @@ Content.
}
// Add some static files inside the content dir
// https://github.com/gohugoio/hugo/issues/5759
for _, lang := range []string{"en", "nn", "sv"} {
contentRoot := contentRoot(lang)
for i := 0; i < 2; i++ {
filename := filepath.Join(contentRoot, "mystatic", fmt.Sprintf("file%d.yaml", i))
contentFiles = append(contentFiles, filename, lang)
}
}
b := newTestSitesBuilder(t)
b.WithWorkingDir("/my/project").WithConfigFile("toml", config).WithContent(contentFiles...).CreateSites()
_ = os.Stdout
//printFs(b.H.BaseFs.ContentFs, "/", os.Stdout)
b.Build(BuildCfg{})
@ -204,11 +219,14 @@ Content.
nnSite := b.H.Sites[1]
svSite := b.H.Sites[2]
//dumpPages(nnSite.RegularPages...)
assert.Equal(12, len(nnSite.RegularPages))
assert.Equal(13, len(enSite.RegularPages))
b.AssertFileContent("/my/project/public/en/mystatic/file1.yaml", "en")
b.AssertFileContent("/my/project/public/nn/mystatic/file1.yaml", "nn")
assert.Equal(10, len(svSite.RegularPages))
//dumpPages(nnSite.RegularPages...)
assert.Equal(12, len(nnSite.RegularPages()))
assert.Equal(13, len(enSite.RegularPages()))
assert.Equal(10, len(svSite.RegularPages()))
svP2, err := svSite.getPageNew(nil, "/sect/page2.md")
assert.NoError(err)
@ -217,9 +235,9 @@ Content.
enP2, err := enSite.getPageNew(nil, "/sect/page2.md")
assert.NoError(err)
assert.Equal("en", enP2.Lang())
assert.Equal("sv", svP2.Lang())
assert.Equal("nn", nnP2.Lang())
assert.Equal("en", enP2.Language().Lang)
assert.Equal("sv", svP2.Language().Lang)
assert.Equal("nn", nnP2.Language().Lang)
content, _ := nnP2.Content()
assert.Contains(content, "SVP3-REF: https://example.org/sv/sect/p-sv-3/")
@ -241,10 +259,10 @@ Content.
assert.NoError(err)
assert.Equal("https://example.org/nn/sect/p-nn-3/", nnP3Ref)
for i, p := range enSite.RegularPages {
for i, p := range enSite.RegularPages() {
j := i + 1
msg := fmt.Sprintf("Test %d", j)
assert.Equal("en", p.Lang(), msg)
assert.Equal("en", p.Language().Lang, msg)
assert.Equal("sect", p.Section())
if j < 9 {
if j%4 == 0 {
@ -256,20 +274,20 @@ Content.
}
// Check bundles
bundleEn := enSite.RegularPages[len(enSite.RegularPages)-1]
bundleNn := nnSite.RegularPages[len(nnSite.RegularPages)-1]
bundleSv := svSite.RegularPages[len(svSite.RegularPages)-1]
bundleEn := enSite.RegularPages()[len(enSite.RegularPages())-1]
bundleNn := nnSite.RegularPages()[len(nnSite.RegularPages())-1]
bundleSv := svSite.RegularPages()[len(svSite.RegularPages())-1]
assert.Equal("/en/sect/mybundle/", bundleEn.RelPermalink())
assert.Equal("/sv/sect/mybundle/", bundleSv.RelPermalink())
assert.Equal(4, len(bundleEn.Resources))
assert.Equal(4, len(bundleNn.Resources))
assert.Equal(4, len(bundleSv.Resources))
assert.Equal(4, len(bundleEn.Resources()))
assert.Equal(4, len(bundleNn.Resources()))
assert.Equal(4, len(bundleSv.Resources()))
assert.Equal("/en/sect/mybundle/logo.png", bundleEn.Resources.GetMatch("logo*").RelPermalink())
assert.Equal("/nn/sect/mybundle/logo.png", bundleNn.Resources.GetMatch("logo*").RelPermalink())
assert.Equal("/sv/sect/mybundle/logo.png", bundleSv.Resources.GetMatch("logo*").RelPermalink())
b.AssertFileContent("/my/project/public/en/sect/mybundle/index.html", "image/png: /en/sect/mybundle/logo.png")
b.AssertFileContent("/my/project/public/nn/sect/mybundle/index.html", "image/png: /nn/sect/mybundle/logo.png")
b.AssertFileContent("/my/project/public/sv/sect/mybundle/index.html", "image/png: /sv/sect/mybundle/logo.png")
b.AssertFileContent("/my/project/public/sv/sect/mybundle/featured.png", "PNG Data for sv")
b.AssertFileContent("/my/project/public/nn/sect/mybundle/featured.png", "PNG Data for nn")
@ -278,9 +296,9 @@ Content.
b.AssertFileContent("/my/project/public/sv/sect/mybundle/logo.png", "PNG Data")
b.AssertFileContent("/my/project/public/nn/sect/mybundle/logo.png", "PNG Data")
nnSect := nnSite.getPage(KindSection, "sect")
nnSect := nnSite.getPage(page.KindSection, "sect")
assert.NotNil(nnSect)
assert.Equal(12, len(nnSect.Pages))
assert.Equal(12, len(nnSect.Pages()))
nnHome, _ := nnSite.Info.Home()
assert.Equal("/nn/", nnHome.RelPermalink())

View file

@ -1,60 +0,0 @@
// Copyright 2015 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
// An Image contains metadata for images + image sitemaps
// https://support.google.com/webmasters/answer/178636?hl=en
type Image struct {
// The URL of the image. In some cases, the image URL may not be on the
// same domain as your main site. This is fine, as long as both domains
// are verified in Webmaster Tools. If, for example, you use a
// content delivery network (CDN) to host your images, make sure that the
// hosting site is verified in Webmaster Tools OR that you submit your
// sitemap using robots.txt. In addition, make sure that your robots.txt
// file doesnt disallow the crawling of any content you want indexed.
URL string
Title string
Caption string
AltText string
// The geographic location of the image. For example,
// <image:geo_location>Limerick, Ireland</image:geo_location>.
GeoLocation string
// A URL to the license of the image.
License string
}
// A Video contains metadata for videos + video sitemaps
// https://support.google.com/webmasters/answer/80471?hl=en
type Video struct {
ThumbnailLoc string
Title string
Description string
ContentLoc string
PlayerLoc string
Duration string
ExpirationDate string
Rating string
ViewCount string
PublicationDate string
FamilyFriendly string
Restriction string
GalleryLoc string
Price string
RequiresSubscription string
Uploader string
Live string
}

View file

@ -1,4 +1,4 @@
// Copyright 2017 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -83,9 +83,9 @@ Menu Main: {{ partial "menu.html" (dict "page" . "menu" "main") }}`,
s := h.Sites[0]
require.Len(t, s.Menus, 2)
require.Len(t, s.Menus(), 2)
p1 := s.RegularPages[0].Menus()
p1 := s.RegularPages()[0].Menus()
// There is only one menu in the page, but it is "member of" 2
require.Len(t, p1, 1)

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -17,13 +17,10 @@ import (
"testing"
"github.com/spf13/viper"
"github.com/stretchr/testify/require"
)
func TestMinifyPublisher(t *testing.T) {
t.Parallel()
assert := require.New(t)
v := viper.New()
v.Set("minify", true)
@ -43,29 +40,24 @@ func TestMinifyPublisher(t *testing.T) {
<body id="home">
<h1>{{ .Page.Title }}</h1>
<h1>{{ .Title }}</h1>
<p>{{ .Permalink }}</p>
</body>
</html>
`
b := newTestSitesBuilder(t)
b.WithViper(v).WithContent("page.md", pageWithAlias)
b.WithTemplates("_default/list.html", htmlTemplate, "_default/single.html", htmlTemplate, "alias.html", htmlTemplate)
b.WithViper(v).WithTemplatesAdded("layouts/index.html", htmlTemplate)
b.CreateSites().Build(BuildCfg{})
assert.Equal(1, len(b.H.Sites))
require.Len(t, b.H.Sites[0].RegularPages, 1)
// Check minification
// HTML
b.AssertFileContent("public/page/index.html", "<!doctype html><html lang=en><head><meta charset=utf-8><title>HTML5 boilerplate all you really need…</title><link rel=stylesheet href=css/style.css></head><body id=home><h1>Has Alias</h1></body></html>")
// HTML alias. Note the custom template which does no redirect.
b.AssertFileContent("public/foo/bar/index.html", "<!doctype html><html lang=en><head><meta charset=utf-8><title>HTML5 boilerplate ")
b.AssertFileContent("public/index.html", "<!doctype html>")
// RSS
b.AssertFileContent("public/index.xml", "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?><rss version=\"2.0\" xmlns:atom=\"http://www.w3.org/2005/Atom\"><channel><title/><link>https://example.org/</link>")
// Sitemap
b.AssertFileContent("public/sitemap.xml", "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?><urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\" xmlns:xhtml=\"http://www.w3.org/1999/xhtml\"><url><loc>https://example.org/</loc><priority>0</priority></url><url>")
b.AssertFileContent("public/sitemap.xml", "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\"?><urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\" xmlns:xhtml=\"http://www.w3.org/1999/xhtml\"><url><loc>h")
}

View file

@ -1,4 +1,4 @@
// Copyright 2016-present The Hugo Authors. All rights reserved.
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@ -62,10 +62,10 @@ func newMultiLingualFromSites(cfg config.Provider, sites ...*Site) (*Multilingua
languages := make(langs.Languages, len(sites))
for i, s := range sites {
if s.Language == nil {
return nil, errors.New("Missing language for site")
if s.language == nil {
return nil, errors.New("missing language for site")
}
languages[i] = s.Language
languages[i] = s.language
}
defaultLang := cfg.GetString("defaultContentLanguage")
@ -78,19 +78,15 @@ func newMultiLingualFromSites(cfg config.Provider, sites ...*Site) (*Multilingua
}
func newMultiLingualForLanguage(language *langs.Language) *Multilingual {
languages := langs.Languages{language}
return &Multilingual{Languages: languages, DefaultLang: language}
}
func (ml *Multilingual) enabled() bool {
return len(ml.Languages) > 1
}
func (s *Site) multilingualEnabled() bool {
if s.owner == nil {
if s.h == nil {
return false
}
return s.owner.multilingual != nil && s.owner.multilingual.enabled()
return s.h.multilingual != nil && s.h.multilingual.enabled()
}
func toSortedLanguages(cfg config.Provider, l map[string]interface{}) (langs.Languages, error) {

View file

@ -1,99 +0,0 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"sync"
)
type orderedMap struct {
sync.RWMutex
keys []interface{}
m map[interface{}]interface{}
}
func newOrderedMap() *orderedMap {
return &orderedMap{m: make(map[interface{}]interface{})}
}
func newOrderedMapFromStringMapString(m map[string]string) *orderedMap {
om := newOrderedMap()
for k, v := range m {
om.Add(k, v)
}
return om
}
func (m *orderedMap) Add(k, v interface{}) {
m.Lock()
defer m.Unlock()
_, found := m.m[k]
if found {
panic(fmt.Sprintf("%v already added", v))
}
m.m[k] = v
m.keys = append(m.keys, k)
}
func (m *orderedMap) Get(k interface{}) (interface{}, bool) {
m.RLock()
defer m.RUnlock()
v, found := m.m[k]
return v, found
}
func (m *orderedMap) Contains(k interface{}) bool {
m.RLock()
defer m.RUnlock()
_, found := m.m[k]
return found
}
func (m *orderedMap) Keys() []interface{} {
m.RLock()
defer m.RUnlock()
return m.keys
}
func (m *orderedMap) Len() int {
m.RLock()
defer m.RUnlock()
return len(m.keys)
}
// Some shortcuts for known types.
func (m *orderedMap) getShortcode(k interface{}) *shortcode {
v, found := m.Get(k)
if !found {
return nil
}
return v.(*shortcode)
}
func (m *orderedMap) getShortcodeRenderer(k interface{}) func() (string, error) {
v, found := m.Get(k)
if !found {
return nil
}
return v.(func() (string, error))
}
func (m *orderedMap) getString(k interface{}) string {
v, found := m.Get(k)
if !found {
return ""
}
return v.(string)
}

View file

@ -1,69 +0,0 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"sync"
"testing"
"github.com/stretchr/testify/require"
)
func TestOrderedMap(t *testing.T) {
t.Parallel()
assert := require.New(t)
m := newOrderedMap()
m.Add("b", "vb")
m.Add("c", "vc")
m.Add("a", "va")
b, f1 := m.Get("b")
assert.True(f1)
assert.Equal(b, "vb")
assert.True(m.Contains("b"))
assert.False(m.Contains("e"))
assert.Equal([]interface{}{"b", "c", "a"}, m.Keys())
}
func TestOrderedMapConcurrent(t *testing.T) {
t.Parallel()
assert := require.New(t)
var wg sync.WaitGroup
m := newOrderedMap()
for i := 1; i < 20; i++ {
wg.Add(1)
go func(id int) {
defer wg.Done()
key := fmt.Sprintf("key%d", id)
val := key + "val"
m.Add(key, val)
v, found := m.Get(key)
assert.True(found)
assert.Equal(v, val)
assert.True(m.Contains(key))
assert.True(m.Len() > 0)
assert.True(len(m.Keys()) > 0)
}(i)
}
wg.Wait()
}

File diff suppressed because it is too large Load diff

112
hugolib/page__common.go Normal file
View file

@ -0,0 +1,112 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"sync"
"github.com/bep/gitmap"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/compare"
"github.com/gohugoio/hugo/lazy"
"github.com/gohugoio/hugo/navigation"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/resource"
)
type pageCommon struct {
s *Site
m *pageMeta
// Laziliy initialized dependencies.
init *lazy.Init
// All of these represents the common parts of a page.Page
maps.Scratcher
navigation.PageMenusProvider
page.AuthorProvider
page.PageRenderProvider
page.AlternativeOutputFormatsProvider
page.ChildCareProvider
page.FileProvider
page.GetPageProvider
page.GitInfoProvider
page.InSectionPositioner
page.OutputFormatsProvider
page.PageMetaProvider
page.Positioner
page.RawContentProvider
page.RelatedKeywordsProvider
page.RefProvider
page.ShortcodeInfoProvider
page.SitesProvider
page.DeprecatedWarningPageMethods
page.TranslationsProvider
page.TreeProvider
resource.LanguageProvider
resource.ResourceDataProvider
resource.ResourceMetaProvider
resource.ResourceParamsProvider
resource.ResourceTypesProvider
resource.TranslationKeyProvider
compare.Eqer
// Describes how paths and URLs for this page and its descendants
// should look like.
targetPathDescriptor page.TargetPathDescriptor
layoutDescriptor output.LayoutDescriptor
layoutDescriptorInit sync.Once
// The parsed page content.
pageContent
// Set if feature enabled and this is in a Git repo.
gitInfo *gitmap.GitInfo
// Positional navigation
posNextPrev *nextPrev
posNextPrevSection *nextPrev
// Menus
pageMenus *pageMenus
// Internal use
page.InternalDependencies
// The children. Regular pages will have none.
pages page.Pages
pagesInit sync.Once
// Any bundled resources
resources resource.Resources
resourcesInit sync.Once
translations page.Pages
allTranslations page.Pages
// Calculated an cached translation mapping key
translationKey string
translationKeyInit sync.Once
// Will only be set for sections and regular pages.
parent *pageState
// Will only be set for section pages and the home page.
subSections page.Pages
// Set in fast render mode to force render a given page.
forceRender bool
}

135
hugolib/page__content.go Normal file
View file

@ -0,0 +1,135 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/parser/pageparser"
)
var (
internalSummaryDividerBase = "HUGOMORE42"
internalSummaryDividerBaseBytes = []byte(internalSummaryDividerBase)
internalSummaryDividerPre = []byte("\n\n" + internalSummaryDividerBase + "\n\n")
)
// The content related items on a Page.
type pageContent struct {
renderable bool
selfLayout string
truncated bool
cmap *pageContentMap
shortcodeState *shortcodeHandler
source rawPageContent
}
// returns the content to be processed by Blackfriday or similar.
func (p pageContent) contentToRender(renderedShortcodes map[string]string) []byte {
source := p.source.parsed.Input()
c := make([]byte, 0, len(source)+(len(source)/10))
for _, it := range p.cmap.items {
switch v := it.(type) {
case pageparser.Item:
c = append(c, source[v.Pos:v.Pos+len(v.Val)]...)
case pageContentReplacement:
c = append(c, v.val...)
case *shortcode:
if v.doMarkup || !p.renderable {
// Insert the rendered shortcode.
renderedShortcode, found := renderedShortcodes[v.placeholder]
if !found {
// This should never happen.
panic(fmt.Sprintf("rendered shortcode %q not found", v.placeholder))
}
c = append(c, []byte(renderedShortcode)...)
} else {
// Insert the placeholder so we can insert the content after
// markdown processing.
c = append(c, []byte(v.placeholder)...)
}
default:
panic(fmt.Sprintf("unkown item type %T", it))
}
}
return c
}
func (p pageContent) selfLayoutForOutput(f output.Format) string {
if p.selfLayout == "" {
return ""
}
return p.selfLayout + f.Name
}
type rawPageContent struct {
hasSummaryDivider bool
// The AST of the parsed page. Contains information about:
// shortcodes, front matter, summary indicators.
parsed pageparser.Result
// Returns the position in bytes after any front matter.
posMainContent int
// These are set if we're able to determine this from the source.
posSummaryEnd int
posBodyStart int
}
type pageContentReplacement struct {
val []byte
source pageparser.Item
}
type pageContentMap struct {
// If not, we can skip any pre-rendering of shortcodes.
hasMarkdownShortcode bool
// Indicates whether we must do placeholder replacements.
hasNonMarkdownShortcode bool
// *shortcode, pageContentReplacement or pageparser.Item
items []interface{}
}
func (p *pageContentMap) AddBytes(item pageparser.Item) {
p.items = append(p.items, item)
}
func (p *pageContentMap) AddReplacement(val []byte, source pageparser.Item) {
p.items = append(p.items, pageContentReplacement{val: val, source: source})
}
func (p *pageContentMap) AddShortcode(s *shortcode) {
p.items = append(p.items, s)
if s.doMarkup {
p.hasMarkdownShortcode = true
} else {
p.hasNonMarkdownShortcode = true
}
}

70
hugolib/page__data.go Normal file
View file

@ -0,0 +1,70 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"sync"
"github.com/gohugoio/hugo/resources/page"
)
type pageData struct {
*pageState
dataInit sync.Once
data page.Data
}
func (p *pageData) Data() interface{} {
p.dataInit.Do(func() {
p.data = make(page.Data)
if p.Kind() == page.KindPage {
return
}
switch p.Kind() {
case page.KindTaxonomy:
termInfo := p.getTaxonomyNodeInfo()
pluralInfo := termInfo.parent
singular := pluralInfo.singular
plural := pluralInfo.plural
term := termInfo.term
taxonomy := p.s.Taxonomies[plural].Get(termInfo.termKey)
p.data[singular] = taxonomy
p.data["Singular"] = singular
p.data["Plural"] = plural
p.data["Term"] = term
case page.KindTaxonomyTerm:
info := p.getTaxonomyNodeInfo()
plural := info.plural
singular := info.singular
p.data["Singular"] = singular
p.data["Plural"] = plural
p.data["Terms"] = p.s.Taxonomies[plural]
// keep the following just for legacy reasons
p.data["OrderedIndex"] = p.data["Terms"]
p.data["Index"] = p.data["Terms"]
}
// Assign the function to the map to make sure it is lazily initialized
p.data["pages"] = p.Pages
})
return p.data
}

74
hugolib/page__menus.go Normal file
View file

@ -0,0 +1,74 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"sync"
"github.com/gohugoio/hugo/navigation"
)
type pageMenus struct {
p *pageState
q navigation.MenyQueryProvider
pmInit sync.Once
pm navigation.PageMenus
}
func (p *pageMenus) HasMenuCurrent(menuID string, me *navigation.MenuEntry) bool {
p.p.s.init.menus.Do()
p.init()
return p.q.HasMenuCurrent(menuID, me)
}
func (p *pageMenus) IsMenuCurrent(menuID string, inme *navigation.MenuEntry) bool {
p.p.s.init.menus.Do()
p.init()
return p.q.IsMenuCurrent(menuID, inme)
}
func (p *pageMenus) Menus() navigation.PageMenus {
// There is a reverse dependency here. initMenus will, once, build the
// site menus and update any relevant page.
p.p.s.init.menus.Do()
return p.menus()
}
func (p *pageMenus) menus() navigation.PageMenus {
p.init()
return p.pm
}
func (p *pageMenus) init() {
p.pmInit.Do(func() {
p.q = navigation.NewMenuQueryProvider(
p.p.s.Info.sectionPagesMenu,
p,
p.p.s,
p.p,
)
var err error
p.pm, err = navigation.PageMenusFromPage(p.p)
if err != nil {
p.p.s.Log.ERROR.Println(p.p.wrapError(err))
}
})
}

652
hugolib/page__meta.go Normal file
View file

@ -0,0 +1,652 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"path"
"regexp"
"strings"
"time"
"github.com/gohugoio/hugo/related"
"github.com/gohugoio/hugo/source"
"github.com/markbates/inflect"
"github.com/mitchellh/mapstructure"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/page/pagemeta"
"github.com/gohugoio/hugo/resources/resource"
"github.com/spf13/cast"
)
var cjkRe = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`)
type pageMeta struct {
// kind is the discriminator that identifies the different page types
// in the different page collections. This can, as an example, be used
// to to filter regular pages, find sections etc.
// Kind will, for the pages available to the templates, be one of:
// page, home, section, taxonomy and taxonomyTerm.
// It is of string type to make it easy to reason about in
// the templates.
kind string
// This is a standalone page not part of any page collection. These
// include sitemap, robotsTXT and similar. It will have no pageOutputs, but
// a fixed pageOutput.
standalone bool
bundleType string
// Params contains configuration defined in the params section of page frontmatter.
params map[string]interface{}
title string
linkTitle string
resourcePath string
weight int
markup string
contentType string
// whether the content is in a CJK language.
isCJKLanguage bool
layout string
aliases []string
draft bool
description string
keywords []string
urlPaths pagemeta.URLPath
resource.Dates
// This is enabled if it is a leaf bundle (the "index.md" type) and it is marked as headless in front matter.
// Being headless means that
// 1. The page itself is not rendered to disk
// 2. It is not available in .Site.Pages etc.
// 3. But you can get it via .Site.GetPage
headless bool
// A key that maps to translation(s) of this page. This value is fetched
// from the page front matter.
translationKey string
// From front matter.
configuredOutputFormats output.Formats
// This is the raw front matter metadata that is going to be assigned to
// the Resources above.
resourcesMetadata []map[string]interface{}
f source.File
sections []string
// Sitemap overrides from front matter.
sitemap config.Sitemap
s *Site
renderingConfig *helpers.BlackFriday
}
func (p *pageMeta) Aliases() []string {
return p.aliases
}
func (p *pageMeta) Author() page.Author {
authors := p.Authors()
for _, author := range authors {
return author
}
return page.Author{}
}
func (p *pageMeta) Authors() page.AuthorList {
authorKeys, ok := p.params["authors"]
if !ok {
return page.AuthorList{}
}
authors := authorKeys.([]string)
if len(authors) < 1 || len(p.s.Info.Authors) < 1 {
return page.AuthorList{}
}
al := make(page.AuthorList)
for _, author := range authors {
a, ok := p.s.Info.Authors[author]
if ok {
al[author] = a
}
}
return al
}
func (p *pageMeta) BundleType() string {
return p.bundleType
}
func (p *pageMeta) Description() string {
return p.description
}
func (p *pageMeta) Lang() string {
return p.s.Lang()
}
func (p *pageMeta) Draft() bool {
return p.draft
}
func (p *pageMeta) File() source.File {
return p.f
}
func (p *pageMeta) IsHome() bool {
return p.Kind() == page.KindHome
}
func (p *pageMeta) Keywords() []string {
return p.keywords
}
func (p *pageMeta) Kind() string {
return p.kind
}
func (p *pageMeta) Layout() string {
return p.layout
}
func (p *pageMeta) LinkTitle() string {
if p.linkTitle != "" {
return p.linkTitle
}
return p.Title()
}
func (p *pageMeta) Name() string {
if p.resourcePath != "" {
return p.resourcePath
}
return p.Title()
}
func (p *pageMeta) IsNode() bool {
return !p.IsPage()
}
func (p *pageMeta) IsPage() bool {
return p.Kind() == page.KindPage
}
// Param is a convenience method to do lookups in Page's and Site's Params map,
// in that order.
//
// This method is also implemented on SiteInfo.
// TODO(bep) interface
func (p *pageMeta) Param(key interface{}) (interface{}, error) {
return resource.Param(p, p.s.Info.Params(), key)
}
func (p *pageMeta) Params() map[string]interface{} {
return p.params
}
func (p *pageMeta) Path() string {
if p.File() != nil {
return p.File().Path()
}
return p.SectionsPath()
}
// RelatedKeywords implements the related.Document interface needed for fast page searches.
func (p *pageMeta) RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error) {
v, err := p.Param(cfg.Name)
if err != nil {
return nil, err
}
return cfg.ToKeywords(v)
}
func (p *pageMeta) IsSection() bool {
return p.Kind() == page.KindSection
}
func (p *pageMeta) Section() string {
if p.IsHome() {
return ""
}
if p.IsNode() {
if len(p.sections) == 0 {
// May be a sitemap or similar.
return ""
}
return p.sections[0]
}
if p.File() != nil {
return p.File().Section()
}
panic("invalid page state")
}
func (p *pageMeta) SectionsEntries() []string {
return p.sections
}
func (p *pageMeta) SectionsPath() string {
return path.Join(p.SectionsEntries()...)
}
func (p *pageMeta) Sitemap() config.Sitemap {
return p.sitemap
}
func (p *pageMeta) Title() string {
return p.title
}
func (p *pageMeta) Type() string {
if p.contentType != "" {
return p.contentType
}
if x := p.Section(); x != "" {
return x
}
return "page"
}
func (p *pageMeta) Weight() int {
return p.weight
}
func (pm *pageMeta) setMetadata(p *pageState, frontmatter map[string]interface{}) error {
if frontmatter == nil {
return errors.New("missing frontmatter data")
}
pm.params = make(map[string]interface{})
// Needed for case insensitive fetching of params values
maps.ToLower(frontmatter)
var mtime time.Time
if p.File().FileInfo() != nil {
mtime = p.File().FileInfo().ModTime()
}
var gitAuthorDate time.Time
if p.gitInfo != nil {
gitAuthorDate = p.gitInfo.AuthorDate
}
descriptor := &pagemeta.FrontMatterDescriptor{
Frontmatter: frontmatter,
Params: pm.params,
Dates: &pm.Dates,
PageURLs: &pm.urlPaths,
BaseFilename: p.File().ContentBaseName(),
ModTime: mtime,
GitAuthorDate: gitAuthorDate,
}
// Handle the date separately
// TODO(bep) we need to "do more" in this area so this can be split up and
// more easily tested without the Page, but the coupling is strong.
err := pm.s.frontmatterHandler.HandleDates(descriptor)
if err != nil {
p.s.Log.ERROR.Printf("Failed to handle dates for page %q: %s", p.pathOrTitle(), err)
}
var sitemapSet bool
var draft, published, isCJKLanguage *bool
for k, v := range frontmatter {
loki := strings.ToLower(k)
if loki == "published" { // Intentionally undocumented
vv, err := cast.ToBoolE(v)
if err == nil {
published = &vv
}
// published may also be a date
continue
}
if pm.s.frontmatterHandler.IsDateKey(loki) {
continue
}
switch loki {
case "title":
pm.title = cast.ToString(v)
pm.params[loki] = pm.title
case "linktitle":
pm.linkTitle = cast.ToString(v)
pm.params[loki] = pm.linkTitle
case "description":
pm.description = cast.ToString(v)
pm.params[loki] = pm.description
case "slug":
// Don't start or end with a -
pm.urlPaths.Slug = strings.Trim(cast.ToString(v), "-")
pm.params[loki] = pm.Slug()
case "url":
if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
return fmt.Errorf("only relative URLs are supported, %v provided", url)
}
pm.urlPaths.URL = cast.ToString(v)
pm.params[loki] = pm.urlPaths.URL
case "type":
pm.contentType = cast.ToString(v)
pm.params[loki] = pm.contentType
case "keywords":
pm.keywords = cast.ToStringSlice(v)
pm.params[loki] = pm.keywords
case "headless":
// For now, only the leaf bundles ("index.md") can be headless (i.e. produce no output).
// We may expand on this in the future, but that gets more complex pretty fast.
if p.File().TranslationBaseName() == "index" {
pm.headless = cast.ToBool(v)
}
pm.params[loki] = pm.headless
case "outputs":
o := cast.ToStringSlice(v)
if len(o) > 0 {
// Output formats are exlicitly set in front matter, use those.
outFormats, err := p.s.outputFormatsConfig.GetByNames(o...)
if err != nil {
p.s.Log.ERROR.Printf("Failed to resolve output formats: %s", err)
} else {
pm.configuredOutputFormats = outFormats
pm.params[loki] = outFormats
}
}
case "draft":
draft = new(bool)
*draft = cast.ToBool(v)
case "layout":
pm.layout = cast.ToString(v)
pm.params[loki] = pm.layout
case "markup":
pm.markup = cast.ToString(v)
pm.params[loki] = pm.markup
case "weight":
pm.weight = cast.ToInt(v)
pm.params[loki] = pm.weight
case "aliases":
pm.aliases = cast.ToStringSlice(v)
for _, alias := range pm.aliases {
if strings.HasPrefix(alias, "http://") || strings.HasPrefix(alias, "https://") {
return fmt.Errorf("only relative aliases are supported, %v provided", alias)
}
}
pm.params[loki] = pm.aliases
case "sitemap":
p.m.sitemap = config.DecodeSitemap(p.s.siteCfg.sitemap, cast.ToStringMap(v))
pm.params[loki] = p.m.sitemap
sitemapSet = true
case "iscjklanguage":
isCJKLanguage = new(bool)
*isCJKLanguage = cast.ToBool(v)
case "translationkey":
pm.translationKey = cast.ToString(v)
pm.params[loki] = pm.translationKey
case "resources":
var resources []map[string]interface{}
handled := true
switch vv := v.(type) {
case []map[interface{}]interface{}:
for _, vvv := range vv {
resources = append(resources, cast.ToStringMap(vvv))
}
case []map[string]interface{}:
resources = append(resources, vv...)
case []interface{}:
for _, vvv := range vv {
switch vvvv := vvv.(type) {
case map[interface{}]interface{}:
resources = append(resources, cast.ToStringMap(vvvv))
case map[string]interface{}:
resources = append(resources, vvvv)
}
}
default:
handled = false
}
if handled {
pm.params[loki] = resources
pm.resourcesMetadata = resources
break
}
fallthrough
default:
// If not one of the explicit values, store in Params
switch vv := v.(type) {
case bool:
pm.params[loki] = vv
case string:
pm.params[loki] = vv
case int64, int32, int16, int8, int:
pm.params[loki] = vv
case float64, float32:
pm.params[loki] = vv
case time.Time:
pm.params[loki] = vv
default: // handle array of strings as well
switch vvv := vv.(type) {
case []interface{}:
if len(vvv) > 0 {
switch vvv[0].(type) {
case map[interface{}]interface{}: // Proper parsing structured array from YAML based FrontMatter
pm.params[loki] = vvv
case map[string]interface{}: // Proper parsing structured array from JSON based FrontMatter
pm.params[loki] = vvv
case []interface{}:
pm.params[loki] = vvv
default:
a := make([]string, len(vvv))
for i, u := range vvv {
a[i] = cast.ToString(u)
}
pm.params[loki] = a
}
} else {
pm.params[loki] = []string{}
}
default:
pm.params[loki] = vv
}
}
}
}
if !sitemapSet {
pm.sitemap = p.s.siteCfg.sitemap
}
pm.markup = helpers.GuessType(pm.markup)