mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
✨ Implement Page bundling and image handling
This commit is not the smallest in Hugo's history. Some hightlights include: * Page bundles (for complete articles, keeping images and content together etc.). * Bundled images can be processed in as many versions/sizes as you need with the three methods `Resize`, `Fill` and `Fit`. * Processed images are cached inside `resources/_gen/images` (default) in your project. * Symbolic links (both files and dirs) are now allowed anywhere inside /content * A new table based build summary * The "Total in nn ms" now reports the total including the handling of the files inside /static. So if it now reports more than you're used to, it is just **more real** and probably faster than before (see below). A site building benchmark run compared to `v0.31.1` shows that this should be slightly faster and use less memory: ```bash ▶ ./benchSite.sh "TOML,num_langs=.*,num_root_sections=5,num_pages=(500|1000),tags_per_page=5,shortcodes,render" benchmark old ns/op new ns/op delta BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 101785785 78067944 -23.30% BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 185481057 149159919 -19.58% BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 103149918 85679409 -16.94% BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 203515478 169208775 -16.86% benchmark old allocs new allocs delta BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 532464 391539 -26.47% BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 1056549 772702 -26.87% BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 555974 406630 -26.86% BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 1086545 789922 -27.30% benchmark old bytes new bytes delta BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 53243246 43598155 -18.12% BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 105811617 86087116 -18.64% BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 54558852 44545097 -18.35% BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 106903858 86978413 -18.64% ``` Fixes #3651 Closes #3158 Fixes #1014 Closes #2021 Fixes #1240 Updates #3757
This commit is contained in:
parent
02f2735f68
commit
3cdf19e9b7
85 changed files with 5791 additions and 3287 deletions
119
Gopkg.lock
generated
119
Gopkg.lock
generated
|
@ -21,7 +21,13 @@
|
|||
|
||||
[[projects]]
|
||||
name = "github.com/alecthomas/chroma"
|
||||
packages = [".","formatters","formatters/html","lexers","styles"]
|
||||
packages = [
|
||||
".",
|
||||
"formatters",
|
||||
"formatters/html",
|
||||
"lexers",
|
||||
"styles"
|
||||
]
|
||||
revision = "v0.2.0"
|
||||
|
||||
[[projects]]
|
||||
|
@ -54,21 +60,26 @@
|
|||
version = "v1.1.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/dchest/cssmin"
|
||||
name = "github.com/disintegration/imaging"
|
||||
packages = ["."]
|
||||
revision = "fb8d9b44afdc258bfff6052d3667521babcb2239"
|
||||
revision = "v1.2.4"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/dlclark/regexp2"
|
||||
packages = [".","syntax"]
|
||||
packages = [
|
||||
".",
|
||||
"syntax"
|
||||
]
|
||||
revision = "487489b64fb796de2e55f4e8a4ad1e145f80e957"
|
||||
version = "v1.1.6"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/eknkc/amber"
|
||||
packages = [".","parser"]
|
||||
packages = [
|
||||
".",
|
||||
"parser"
|
||||
]
|
||||
revision = "cdade1c073850f4ffc70a829e31235ea6892853b"
|
||||
|
||||
[[projects]]
|
||||
|
@ -104,7 +115,17 @@
|
|||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/hashicorp/hcl"
|
||||
packages = [".","hcl/ast","hcl/parser","hcl/scanner","hcl/strconv","hcl/token","json/parser","json/scanner","json/token"]
|
||||
packages = [
|
||||
".",
|
||||
"hcl/ast",
|
||||
"hcl/parser",
|
||||
"hcl/scanner",
|
||||
"hcl/strconv",
|
||||
"hcl/token",
|
||||
"json/parser",
|
||||
"json/scanner",
|
||||
"json/token"
|
||||
]
|
||||
revision = "23c074d0eceb2b8a5bfdbb271ab780cde70f05a8"
|
||||
|
||||
[[projects]]
|
||||
|
@ -115,7 +136,10 @@
|
|||
|
||||
[[projects]]
|
||||
name = "github.com/jdkato/prose"
|
||||
packages = ["internal/util","transform"]
|
||||
packages = [
|
||||
"internal/util",
|
||||
"transform"
|
||||
]
|
||||
revision = "20d3663d4bc9dd10d75abcde9d92e04b4861c674"
|
||||
version = "v1.1.0"
|
||||
|
||||
|
@ -133,7 +157,10 @@
|
|||
|
||||
[[projects]]
|
||||
name = "github.com/magefile/mage"
|
||||
packages = ["mg","sh"]
|
||||
packages = [
|
||||
"mg",
|
||||
"sh"
|
||||
]
|
||||
revision = "2f974307b636f59c13b88704cf350a4772fef271"
|
||||
version = "v1.0.2"
|
||||
|
||||
|
@ -149,6 +176,12 @@
|
|||
packages = ["."]
|
||||
revision = "54ffb37507cd7d2ccd4768855e84f9ff6c4352b6"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/mattn/go-runewidth"
|
||||
packages = ["."]
|
||||
revision = "9e777a8366cce605130a531d2cd6363d07ad7317"
|
||||
version = "v0.0.2"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/miekg/mmark"
|
||||
packages = ["."]
|
||||
|
@ -163,10 +196,20 @@
|
|||
|
||||
[[projects]]
|
||||
name = "github.com/nicksnyder/go-i18n"
|
||||
packages = ["i18n/bundle","i18n/language","i18n/translation"]
|
||||
packages = [
|
||||
"i18n/bundle",
|
||||
"i18n/language",
|
||||
"i18n/translation"
|
||||
]
|
||||
revision = "0dc1626d56435e9d605a29875701721c54bc9bbd"
|
||||
version = "v1.10.0"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/olekukonko/tablewriter"
|
||||
packages = ["."]
|
||||
revision = "65fec0d89a572b4367094e2058d3ebe667de3b60"
|
||||
|
||||
[[projects]]
|
||||
name = "github.com/pelletier/go-toml"
|
||||
packages = ["."]
|
||||
|
@ -193,7 +236,10 @@
|
|||
|
||||
[[projects]]
|
||||
name = "github.com/spf13/afero"
|
||||
packages = [".","mem"]
|
||||
packages = [
|
||||
".",
|
||||
"mem"
|
||||
]
|
||||
revision = "8d919cbe7e2627e417f3e45c3c0e489a5b7e2536"
|
||||
version = "v1.0.0"
|
||||
|
||||
|
@ -206,7 +252,10 @@
|
|||
[[projects]]
|
||||
branch = "master"
|
||||
name = "github.com/spf13/cobra"
|
||||
packages = [".","doc"]
|
||||
packages = [
|
||||
".",
|
||||
"doc"
|
||||
]
|
||||
revision = "7b2c5ac9fc04fc5efafb60700713d4fa609b777b"
|
||||
|
||||
[[projects]]
|
||||
|
@ -241,7 +290,10 @@
|
|||
|
||||
[[projects]]
|
||||
name = "github.com/stretchr/testify"
|
||||
packages = ["assert","require"]
|
||||
packages = [
|
||||
"assert",
|
||||
"require"
|
||||
]
|
||||
revision = "69483b4bd14f5845b5a1e55bca19e954e827f1d0"
|
||||
version = "v1.1.4"
|
||||
|
||||
|
@ -254,15 +306,32 @@
|
|||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/image"
|
||||
packages = ["riff","vp8","vp8l","webp"]
|
||||
packages = [
|
||||
"bmp",
|
||||
"riff",
|
||||
"tiff",
|
||||
"tiff/lzw",
|
||||
"vp8",
|
||||
"vp8l",
|
||||
"webp"
|
||||
]
|
||||
revision = "f7e31b4ea2e3413ab91b4e7d2dc83e5f8d19a44c"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/net"
|
||||
packages = ["idna"]
|
||||
packages = [
|
||||
"context",
|
||||
"idna"
|
||||
]
|
||||
revision = "cd69bc3fc700721b709c3a59e16e24c67b58f6ff"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/sync"
|
||||
packages = ["errgroup"]
|
||||
revision = "fd80eb99c8f653c847d294a001bdf2a3a6f768f5"
|
||||
|
||||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/sys"
|
||||
|
@ -272,7 +341,23 @@
|
|||
[[projects]]
|
||||
branch = "master"
|
||||
name = "golang.org/x/text"
|
||||
packages = ["collate","collate/build","internal/colltab","internal/gen","internal/tag","internal/triegen","internal/ucd","language","secure/bidirule","transform","unicode/bidi","unicode/cldr","unicode/norm","unicode/rangetable","width"]
|
||||
packages = [
|
||||
"collate",
|
||||
"collate/build",
|
||||
"internal/colltab",
|
||||
"internal/gen",
|
||||
"internal/tag",
|
||||
"internal/triegen",
|
||||
"internal/ucd",
|
||||
"language",
|
||||
"secure/bidirule",
|
||||
"transform",
|
||||
"unicode/bidi",
|
||||
"unicode/cldr",
|
||||
"unicode/norm",
|
||||
"unicode/rangetable",
|
||||
"width"
|
||||
]
|
||||
revision = "c01e4764d870b77f8abe5096ee19ad20d80e8075"
|
||||
|
||||
[[projects]]
|
||||
|
@ -284,6 +369,6 @@
|
|||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
analyzer-version = 1
|
||||
inputs-digest = "d75b02c8a7c7d724120447dd438e7bef140d0f4d4986adda52eabbfe3db8271a"
|
||||
inputs-digest = "2d9c34c260bc26814a0635c93009daeb9d8ffa56c29c0cff6827ae2d3e9ef96d"
|
||||
solver-name = "gps-cdcl"
|
||||
solver-version = 1
|
||||
|
|
|
@ -21,8 +21,8 @@
|
|||
revision = "v1.1.0"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/dchest/cssmin"
|
||||
name = "github.com/disintegration/imaging"
|
||||
revision = "v1.2.4"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/magefile/mage"
|
||||
|
@ -116,6 +116,10 @@
|
|||
name = "github.com/stretchr/testify"
|
||||
version = "1.1.4"
|
||||
|
||||
[[constraint]]
|
||||
branch = "master"
|
||||
name = "github.com/olekukonko/tablewriter"
|
||||
|
||||
[[constraint]]
|
||||
name = "github.com/yosssi/ace"
|
||||
version = "0.0.5"
|
||||
|
|
|
@ -48,12 +48,7 @@ func init() {
|
|||
}
|
||||
|
||||
func benchmark(cmd *cobra.Command, args []string) error {
|
||||
cfg, err := InitializeConfig(benchmarkCmd)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := newCommandeer(cfg)
|
||||
c, err := InitializeConfig(false, nil, benchmarkCmd)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -84,7 +79,7 @@ func benchmark(cmd *cobra.Command, args []string) error {
|
|||
|
||||
t := time.Now()
|
||||
for i := 0; i < benchmarkTimes; i++ {
|
||||
if err = c.resetAndBuildSites(false); err != nil {
|
||||
if err = c.resetAndBuildSites(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ import (
|
|||
"github.com/gohugoio/hugo/deps"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
src "github.com/gohugoio/hugo/source"
|
||||
)
|
||||
|
||||
type commandeer struct {
|
||||
|
@ -25,7 +26,10 @@ type commandeer struct {
|
|||
pathSpec *helpers.PathSpec
|
||||
visitedURLs *types.EvictingStringQueue
|
||||
|
||||
staticDirsConfig []*src.Dirs
|
||||
|
||||
serverPorts []int
|
||||
languages helpers.Languages
|
||||
|
||||
configured bool
|
||||
}
|
||||
|
@ -44,10 +48,6 @@ func (c *commandeer) PathSpec() *helpers.PathSpec {
|
|||
return c.pathSpec
|
||||
}
|
||||
|
||||
func (c *commandeer) languages() helpers.Languages {
|
||||
return c.Cfg.Get("languagesSorted").(helpers.Languages)
|
||||
}
|
||||
|
||||
func (c *commandeer) initFs(fs *hugofs.Fs) error {
|
||||
c.DepsCfg.Fs = fs
|
||||
ps, err := helpers.NewPathSpec(fs, c.Cfg)
|
||||
|
@ -55,18 +55,26 @@ func (c *commandeer) initFs(fs *hugofs.Fs) error {
|
|||
return err
|
||||
}
|
||||
c.pathSpec = ps
|
||||
|
||||
dirsConfig, err := c.createStaticDirsConfig()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.staticDirsConfig = dirsConfig
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func newCommandeer(cfg *deps.DepsCfg) (*commandeer, error) {
|
||||
l := cfg.Language
|
||||
if l == nil {
|
||||
l = helpers.NewDefaultLanguage(cfg.Cfg)
|
||||
}
|
||||
ps, err := helpers.NewPathSpec(cfg.Fs, l)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
func newCommandeer(cfg *deps.DepsCfg, running bool) (*commandeer, error) {
|
||||
cfg.Running = running
|
||||
|
||||
var languages helpers.Languages
|
||||
|
||||
if l, ok := cfg.Cfg.Get("languagesSorted").(helpers.Languages); ok {
|
||||
languages = l
|
||||
}
|
||||
|
||||
return &commandeer{DepsCfg: cfg, pathSpec: ps, visitedURLs: types.NewEvictingStringQueue(10)}, nil
|
||||
c := &commandeer{DepsCfg: cfg, languages: languages, visitedURLs: types.NewEvictingStringQueue(10)}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
|
|
@ -14,12 +14,15 @@
|
|||
package commands
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
src "github.com/gohugoio/hugo/source"
|
||||
|
||||
"github.com/gohugoio/hugo/hugolib"
|
||||
|
||||
"path/filepath"
|
||||
|
||||
"github.com/gohugoio/hugo/parser"
|
||||
"github.com/spf13/cast"
|
||||
"github.com/spf13/cobra"
|
||||
|
@ -78,81 +81,103 @@ func init() {
|
|||
}
|
||||
|
||||
func convertContents(mark rune) error {
|
||||
cfg, err := InitializeConfig()
|
||||
if outputDir == "" && !unsafe {
|
||||
return newUserError("Unsafe operation not allowed, use --unsafe or set a different output path")
|
||||
}
|
||||
|
||||
c, err := InitializeConfig(false, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
h, err := hugolib.NewHugoSites(*cfg)
|
||||
h, err := hugolib.NewHugoSites(*c.DepsCfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := h.Build(hugolib.BuildCfg{SkipRender: true}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
site := h.Sites[0]
|
||||
|
||||
if err = site.Initialise(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if site.Source == nil {
|
||||
panic("site.Source not set")
|
||||
}
|
||||
if len(site.Source.Files()) < 1 {
|
||||
return errors.New("No source files found")
|
||||
}
|
||||
|
||||
contentDir := site.PathSpec.AbsPathify(site.Cfg.GetString("contentDir"))
|
||||
site.Log.FEEDBACK.Println("processing", len(site.Source.Files()), "content files")
|
||||
for _, file := range site.Source.Files() {
|
||||
site.Log.INFO.Println("Attempting to convert", file.LogicalName())
|
||||
page, err := site.NewPage(file.LogicalName())
|
||||
if err != nil {
|
||||
site.Log.FEEDBACK.Println("processing", len(site.AllPages), "content files")
|
||||
for _, p := range site.AllPages {
|
||||
if err := convertAndSavePage(p, site, mark); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
psr, err := parser.ReadFrom(file.Contents)
|
||||
if err != nil {
|
||||
site.Log.ERROR.Println("Error processing file:", file.Path())
|
||||
return err
|
||||
}
|
||||
metadata, err := psr.Metadata()
|
||||
if err != nil {
|
||||
site.Log.ERROR.Println("Error processing file:", file.Path())
|
||||
return err
|
||||
}
|
||||
|
||||
// better handling of dates in formats that don't have support for them
|
||||
if mark == parser.FormatToLeadRune("json") || mark == parser.FormatToLeadRune("yaml") || mark == parser.FormatToLeadRune("toml") {
|
||||
newMetadata := cast.ToStringMap(metadata)
|
||||
for k, v := range newMetadata {
|
||||
switch vv := v.(type) {
|
||||
case time.Time:
|
||||
newMetadata[k] = vv.Format(time.RFC3339)
|
||||
}
|
||||
}
|
||||
metadata = newMetadata
|
||||
}
|
||||
|
||||
page.SetDir(filepath.Join(contentDir, file.Dir()))
|
||||
page.SetSourceContent(psr.Content())
|
||||
if err = page.SetSourceMetaData(metadata, mark); err != nil {
|
||||
site.Log.ERROR.Printf("Failed to set source metadata for file %q: %s. For more info see For more info see https://github.com/gohugoio/hugo/issues/2458", page.FullFilePath(), err)
|
||||
continue
|
||||
}
|
||||
|
||||
if outputDir != "" {
|
||||
if err = page.SaveSourceAs(filepath.Join(outputDir, page.FullFilePath())); err != nil {
|
||||
return fmt.Errorf("Failed to save file %q: %s", page.FullFilePath(), err)
|
||||
}
|
||||
} else {
|
||||
if unsafe {
|
||||
if err = page.SaveSource(); err != nil {
|
||||
return fmt.Errorf("Failed to save file %q: %s", page.FullFilePath(), err)
|
||||
}
|
||||
} else {
|
||||
site.Log.FEEDBACK.Println("Unsafe operation not allowed, use --unsafe or set a different output path")
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func convertAndSavePage(p *hugolib.Page, site *hugolib.Site, mark rune) error {
|
||||
// The resources are not in .Site.AllPages.
|
||||
for _, r := range p.Resources.ByType("page") {
|
||||
if err := convertAndSavePage(r.(*hugolib.Page), site, mark); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if p.Filename() == "" {
|
||||
// No content file.
|
||||
return nil
|
||||
}
|
||||
|
||||
site.Log.INFO.Println("Attempting to convert", p.LogicalName())
|
||||
newPage, err := site.NewPage(p.LogicalName())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
f, _ := p.File.(src.ReadableFile)
|
||||
file, err := f.Open()
|
||||
if err != nil {
|
||||
site.Log.ERROR.Println("Error reading file:", p.Path())
|
||||
file.Close()
|
||||
return nil
|
||||
}
|
||||
|
||||
psr, err := parser.ReadFrom(file)
|
||||
if err != nil {
|
||||
site.Log.ERROR.Println("Error processing file:", p.Path())
|
||||
file.Close()
|
||||
return err
|
||||
}
|
||||
|
||||
file.Close()
|
||||
|
||||
metadata, err := psr.Metadata()
|
||||
if err != nil {
|
||||
site.Log.ERROR.Println("Error processing file:", p.Path())
|
||||
return err
|
||||
}
|
||||
|
||||
// better handling of dates in formats that don't have support for them
|
||||
if mark == parser.FormatToLeadRune("json") || mark == parser.FormatToLeadRune("yaml") || mark == parser.FormatToLeadRune("toml") {
|
||||
newMetadata := cast.ToStringMap(metadata)
|
||||
for k, v := range newMetadata {
|
||||
switch vv := v.(type) {
|
||||
case time.Time:
|
||||
newMetadata[k] = vv.Format(time.RFC3339)
|
||||
}
|
||||
}
|
||||
metadata = newMetadata
|
||||
}
|
||||
|
||||
newPage.SetSourceContent(psr.Content())
|
||||
if err = newPage.SetSourceMetaData(metadata, mark); err != nil {
|
||||
site.Log.ERROR.Printf("Failed to set source metadata for file %q: %s. For more info see For more info see https://github.com/gohugoio/hugo/issues/2458", newPage.FullFilePath(), err)
|
||||
return nil
|
||||
}
|
||||
|
||||
newFilename := p.Filename()
|
||||
if outputDir != "" {
|
||||
newFilename = filepath.Join(outputDir, p.Dir(), newPage.LogicalName())
|
||||
}
|
||||
|
||||
if err = newPage.SaveSourceAs(newFilename); err != nil {
|
||||
return fmt.Errorf("Failed to save file %q: %s", newFilename, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
478
commands/hugo.go
478
commands/hugo.go
|
@ -18,6 +18,10 @@ package commands
|
|||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"sort"
|
||||
"sync/atomic"
|
||||
|
||||
"golang.org/x/sync/errgroup"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
|
||||
|
@ -58,6 +62,13 @@ import (
|
|||
// provide a cleaner external API, but until then, this is it.
|
||||
var Hugo *hugolib.HugoSites
|
||||
|
||||
const (
|
||||
ansiEsc = "\u001B"
|
||||
clearLine = "\r\033[K"
|
||||
hideCursor = ansiEsc + "[?25l"
|
||||
showCursor = ansiEsc + "[?25h"
|
||||
)
|
||||
|
||||
// Reset resets Hugo ready for a new full build. This is mainly only useful
|
||||
// for benchmark testing etc. via the CLI commands.
|
||||
func Reset() error {
|
||||
|
@ -116,18 +127,20 @@ built with love by spf13 and friends in Go.
|
|||
|
||||
Complete documentation is available at http://gohugo.io/.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
cfg, err := InitializeConfig()
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
cfgInit := func(c *commandeer) error {
|
||||
if buildWatch {
|
||||
c.Set("disableLiveReload", true)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
c, err := newCommandeer(cfg)
|
||||
c, err := InitializeConfig(buildWatch, cfgInit)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if buildWatch {
|
||||
cfg.Cfg.Set("disableLiveReload", true)
|
||||
c.watchConfig()
|
||||
}
|
||||
|
||||
|
@ -149,6 +162,7 @@ var (
|
|||
)
|
||||
|
||||
var (
|
||||
gc bool
|
||||
baseURL string
|
||||
cacheDir string
|
||||
contentDir string
|
||||
|
@ -201,6 +215,7 @@ func AddCommands() {
|
|||
genCmd.AddCommand(genmanCmd)
|
||||
genCmd.AddCommand(createGenDocsHelper().cmd)
|
||||
genCmd.AddCommand(createGenChromaStyles().cmd)
|
||||
|
||||
}
|
||||
|
||||
// initHugoBuilderFlags initializes all common flags, typically used by the
|
||||
|
@ -240,6 +255,7 @@ func initHugoBuildCommonFlags(cmd *cobra.Command) {
|
|||
cmd.Flags().Bool("canonifyURLs", false, "if true, all relative URLs will be canonicalized using baseURL")
|
||||
cmd.Flags().StringVarP(&baseURL, "baseURL", "b", "", "hostname (and path) to the root, e.g. http://spf13.com/")
|
||||
cmd.Flags().Bool("enableGitInfo", false, "add Git revision, date and author info to the pages")
|
||||
cmd.Flags().BoolVar(&gc, "gc", false, "enable to run some cleanup tasks (remove unused cache files) after the build")
|
||||
|
||||
cmd.Flags().BoolVar(&nitro.AnalysisOn, "stepAnalysis", false, "display memory and timing of different steps of the program")
|
||||
cmd.Flags().Bool("templateMetrics", false, "display metrics about template executions")
|
||||
|
@ -285,7 +301,7 @@ func init() {
|
|||
}
|
||||
|
||||
// InitializeConfig initializes a config file with sensible default configuration flags.
|
||||
func InitializeConfig(subCmdVs ...*cobra.Command) (*deps.DepsCfg, error) {
|
||||
func InitializeConfig(running bool, doWithCommandeer func(c *commandeer) error, subCmdVs ...*cobra.Command) (*commandeer, error) {
|
||||
|
||||
var cfg *deps.DepsCfg = &deps.DepsCfg{}
|
||||
|
||||
|
@ -294,13 +310,13 @@ func InitializeConfig(subCmdVs ...*cobra.Command) (*deps.DepsCfg, error) {
|
|||
|
||||
config, err := hugolib.LoadConfig(osFs, source, cfgFile)
|
||||
if err != nil {
|
||||
return cfg, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Init file systems. This may be changed at a later point.
|
||||
cfg.Cfg = config
|
||||
|
||||
c, err := newCommandeer(cfg)
|
||||
c, err := newCommandeer(cfg, running)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -309,23 +325,29 @@ func InitializeConfig(subCmdVs ...*cobra.Command) (*deps.DepsCfg, error) {
|
|||
c.initializeFlags(cmdV)
|
||||
}
|
||||
|
||||
if baseURL != "" {
|
||||
config.Set("baseURL", baseURL)
|
||||
}
|
||||
|
||||
if doWithCommandeer != nil {
|
||||
if err := doWithCommandeer(c); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if len(disableKinds) > 0 {
|
||||
c.Set("disableKinds", disableKinds)
|
||||
}
|
||||
|
||||
logger, err := createLogger(cfg.Cfg)
|
||||
if err != nil {
|
||||
return cfg, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cfg.Logger = logger
|
||||
|
||||
config.Set("logI18nWarnings", logI18nWarnings)
|
||||
|
||||
if baseURL != "" {
|
||||
config.Set("baseURL", baseURL)
|
||||
}
|
||||
|
||||
if !config.GetBool("relativeURLs") && config.GetString("baseURL") == "" {
|
||||
cfg.Logger.ERROR.Println("No 'baseURL' set in configuration or as a flag. Features like page menus will not work without one.")
|
||||
}
|
||||
|
@ -350,17 +372,6 @@ func InitializeConfig(subCmdVs ...*cobra.Command) (*deps.DepsCfg, error) {
|
|||
}
|
||||
config.Set("workingDir", dir)
|
||||
|
||||
fs := hugofs.NewFrom(osFs, config)
|
||||
|
||||
// Hugo writes the output to memory instead of the disk.
|
||||
// This is only used for benchmark testing. Cause the content is only visible
|
||||
// in memory.
|
||||
if renderToMemory {
|
||||
fs.Destination = new(afero.MemMapFs)
|
||||
// Rendering to memoryFS, publish to Root regardless of publishDir.
|
||||
c.Set("publishDir", "/")
|
||||
}
|
||||
|
||||
if contentDir != "" {
|
||||
config.Set("contentDir", contentDir)
|
||||
}
|
||||
|
@ -373,6 +384,17 @@ func InitializeConfig(subCmdVs ...*cobra.Command) (*deps.DepsCfg, error) {
|
|||
config.Set("cacheDir", cacheDir)
|
||||
}
|
||||
|
||||
fs := hugofs.NewFrom(osFs, config)
|
||||
|
||||
// Hugo writes the output to memory instead of the disk.
|
||||
// This is only used for benchmark testing. Cause the content is only visible
|
||||
// in memory.
|
||||
if config.GetBool("renderToMemory") {
|
||||
fs.Destination = new(afero.MemMapFs)
|
||||
// Rendering to memoryFS, publish to Root regardless of publishDir.
|
||||
config.Set("publishDir", "/")
|
||||
}
|
||||
|
||||
cacheDir = config.GetString("cacheDir")
|
||||
if cacheDir != "" {
|
||||
if helpers.FilePathSeparator != cacheDir[len(cacheDir)-1:] {
|
||||
|
@ -397,7 +419,7 @@ func InitializeConfig(subCmdVs ...*cobra.Command) (*deps.DepsCfg, error) {
|
|||
themeDir := c.PathSpec().GetThemeDir()
|
||||
if themeDir != "" {
|
||||
if _, err := cfg.Fs.Source.Stat(themeDir); os.IsNotExist(err) {
|
||||
return cfg, newSystemError("Unable to find theme Directory:", themeDir)
|
||||
return nil, newSystemError("Unable to find theme Directory:", themeDir)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -408,7 +430,7 @@ func InitializeConfig(subCmdVs ...*cobra.Command) (*deps.DepsCfg, error) {
|
|||
helpers.CurrentHugoVersion.ReleaseVersion(), minVersion)
|
||||
}
|
||||
|
||||
return cfg, nil
|
||||
return c, nil
|
||||
|
||||
}
|
||||
|
||||
|
@ -482,17 +504,17 @@ func (c *commandeer) initializeFlags(cmd *cobra.Command) {
|
|||
"templateMetricsHints",
|
||||
}
|
||||
|
||||
// Remove these in Hugo 0.23.
|
||||
// Remove these in Hugo 0.33.
|
||||
if cmd.Flags().Changed("disable404") {
|
||||
helpers.Deprecated("command line", "--disable404", "Use --disableKinds=404", false)
|
||||
helpers.Deprecated("command line", "--disable404", "Use --disableKinds=404", true)
|
||||
}
|
||||
|
||||
if cmd.Flags().Changed("disableRSS") {
|
||||
helpers.Deprecated("command line", "--disableRSS", "Use --disableKinds=RSS", false)
|
||||
helpers.Deprecated("command line", "--disableRSS", "Use --disableKinds=RSS", true)
|
||||
}
|
||||
|
||||
if cmd.Flags().Changed("disableSitemap") {
|
||||
helpers.Deprecated("command line", "--disableSitemap", "Use --disableKinds=sitemap", false)
|
||||
helpers.Deprecated("command line", "--disableSitemap", "Use --disableKinds=sitemap", true)
|
||||
}
|
||||
|
||||
for _, key := range persFlagKeys {
|
||||
|
@ -525,16 +547,71 @@ func (c *commandeer) watchConfig() {
|
|||
})
|
||||
}
|
||||
|
||||
func (c *commandeer) fullBuild(watches ...bool) error {
|
||||
var (
|
||||
g errgroup.Group
|
||||
langCount map[string]uint64
|
||||
)
|
||||
|
||||
if !quiet {
|
||||
fmt.Print(hideCursor + "Building sites … ")
|
||||
defer func() {
|
||||
fmt.Print(showCursor + clearLine)
|
||||
}()
|
||||
}
|
||||
|
||||
g.Go(func() error {
|
||||
cnt, err := c.copyStatic()
|
||||
if err != nil {
|
||||
return fmt.Errorf("Error copying static files: %s", err)
|
||||
}
|
||||
langCount = cnt
|
||||
return nil
|
||||
})
|
||||
|
||||
g.Go(func() error {
|
||||
if err := c.buildSites(); err != nil {
|
||||
return fmt.Errorf("Error building site: %s", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
if err := g.Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, s := range Hugo.Sites {
|
||||
s.ProcessingStats.Static = langCount[s.Language.Lang]
|
||||
}
|
||||
|
||||
if gc {
|
||||
count, err := Hugo.GC()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, s := range Hugo.Sites {
|
||||
// We have no way of knowing what site the garbage belonged to.
|
||||
s.ProcessingStats.Cleaned = uint64(count)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
func (c *commandeer) build(watches ...bool) error {
|
||||
if err := c.copyStatic(); err != nil {
|
||||
return fmt.Errorf("Error copying static files: %s", err)
|
||||
defer c.timeTrack(time.Now(), "Total")
|
||||
|
||||
if err := c.fullBuild(watches...); err != nil {
|
||||
return err
|
||||
}
|
||||
watch := false
|
||||
if len(watches) > 0 && watches[0] {
|
||||
watch = true
|
||||
}
|
||||
if err := c.buildSites(buildWatch || watch); err != nil {
|
||||
return fmt.Errorf("Error building site: %s", err)
|
||||
|
||||
// TODO(bep) Feedback?
|
||||
if !quiet {
|
||||
fmt.Println()
|
||||
Hugo.PrintProcessingStats(os.Stdout)
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if buildWatch {
|
||||
|
@ -550,62 +627,101 @@ func (c *commandeer) build(watches ...bool) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (c *commandeer) copyStatic() error {
|
||||
func (c *commandeer) copyStatic() (map[string]uint64, error) {
|
||||
return c.doWithPublishDirs(c.copyStaticTo)
|
||||
}
|
||||
|
||||
func (c *commandeer) doWithPublishDirs(f func(dirs *src.Dirs, publishDir string) error) error {
|
||||
publishDir := c.PathSpec().AbsPathify(c.Cfg.GetString("publishDir")) + helpers.FilePathSeparator
|
||||
// If root, remove the second '/'
|
||||
if publishDir == "//" {
|
||||
publishDir = helpers.FilePathSeparator
|
||||
}
|
||||
func (c *commandeer) createStaticDirsConfig() ([]*src.Dirs, error) {
|
||||
var dirsConfig []*src.Dirs
|
||||
|
||||
languages := c.languages()
|
||||
|
||||
if !languages.IsMultihost() {
|
||||
if !c.languages.IsMultihost() {
|
||||
dirs, err := src.NewDirs(c.Fs, c.Cfg, c.DepsCfg.Logger)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
return f(dirs, publishDir)
|
||||
}
|
||||
|
||||
for _, l := range languages {
|
||||
dir := filepath.Join(publishDir, l.Lang)
|
||||
dirs, err := src.NewDirs(c.Fs, l, c.DepsCfg.Logger)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := f(dirs, dir); err != nil {
|
||||
return err
|
||||
dirsConfig = append(dirsConfig, dirs)
|
||||
} else {
|
||||
for _, l := range c.languages {
|
||||
dirs, err := src.NewDirs(c.Fs, l, c.DepsCfg.Logger)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dirsConfig = append(dirsConfig, dirs)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
return dirsConfig, nil
|
||||
|
||||
}
|
||||
|
||||
func (c *commandeer) copyStaticTo(dirs *src.Dirs, publishDir string) error {
|
||||
func (c *commandeer) doWithPublishDirs(f func(dirs *src.Dirs, publishDir string) (uint64, error)) (map[string]uint64, error) {
|
||||
|
||||
langCount := make(map[string]uint64)
|
||||
|
||||
for _, dirs := range c.staticDirsConfig {
|
||||
|
||||
cnt, err := f(dirs, c.pathSpec.PublishDir)
|
||||
if err != nil {
|
||||
return langCount, err
|
||||
}
|
||||
|
||||
if dirs.Language == nil {
|
||||
// Not multihost
|
||||
for _, l := range c.languages {
|
||||
langCount[l.Lang] = cnt
|
||||
}
|
||||
} else {
|
||||
langCount[dirs.Language.Lang] = cnt
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return langCount, nil
|
||||
}
|
||||
|
||||
type countingStatFs struct {
|
||||
afero.Fs
|
||||
statCounter uint64
|
||||
}
|
||||
|
||||
func (fs *countingStatFs) Stat(name string) (os.FileInfo, error) {
|
||||
f, err := fs.Fs.Stat(name)
|
||||
if err == nil {
|
||||
if !f.IsDir() {
|
||||
atomic.AddUint64(&fs.statCounter, 1)
|
||||
}
|
||||
}
|
||||
return f, err
|
||||
}
|
||||
|
||||
func (c *commandeer) copyStaticTo(dirs *src.Dirs, publishDir string) (uint64, error) {
|
||||
|
||||
// If root, remove the second '/'
|
||||
if publishDir == "//" {
|
||||
publishDir = helpers.FilePathSeparator
|
||||
}
|
||||
|
||||
if dirs.Language != nil {
|
||||
// Multihost setup.
|
||||
publishDir = filepath.Join(publishDir, dirs.Language.Lang)
|
||||
}
|
||||
|
||||
staticSourceFs, err := dirs.CreateStaticFs()
|
||||
if err != nil {
|
||||
return err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
if staticSourceFs == nil {
|
||||
c.Logger.WARN.Println("No static directories found to sync")
|
||||
return nil
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
fs := &countingStatFs{Fs: staticSourceFs}
|
||||
|
||||
syncer := fsync.NewSyncer()
|
||||
syncer.NoTimes = c.Cfg.GetBool("noTimes")
|
||||
syncer.NoChmod = c.Cfg.GetBool("noChmod")
|
||||
syncer.SrcFs = staticSourceFs
|
||||
syncer.SrcFs = fs
|
||||
syncer.DestFs = c.Fs.Destination
|
||||
// Now that we are using a unionFs for the static directories
|
||||
// We can effectively clean the publishDir on initial sync
|
||||
|
@ -622,12 +738,30 @@ func (c *commandeer) copyStaticTo(dirs *src.Dirs, publishDir string) error {
|
|||
|
||||
// because we are using a baseFs (to get the union right).
|
||||
// set sync src to root
|
||||
return syncer.Sync(publishDir, helpers.FilePathSeparator)
|
||||
err = syncer.Sync(publishDir, helpers.FilePathSeparator)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// Sync runs Stat 3 times for every source file (which sounds much)
|
||||
numFiles := fs.statCounter / 3
|
||||
|
||||
return numFiles, err
|
||||
}
|
||||
|
||||
func (c *commandeer) timeTrack(start time.Time, name string) {
|
||||
elapsed := time.Since(start)
|
||||
c.Logger.FEEDBACK.Printf("%s in %v ms", name, int(1000*elapsed.Seconds()))
|
||||
}
|
||||
|
||||
// getDirList provides NewWatcher() with a list of directories to watch for changes.
|
||||
func (c *commandeer) getDirList() ([]string, error) {
|
||||
var a []string
|
||||
|
||||
// To handle nested symlinked content dirs
|
||||
var seen = make(map[string]bool)
|
||||
var nested []string
|
||||
|
||||
dataDir := c.PathSpec().AbsPathify(c.Cfg.GetString("dataDir"))
|
||||
i18nDir := c.PathSpec().AbsPathify(c.Cfg.GetString("i18nDir"))
|
||||
staticSyncer, err := newStaticSyncer(c)
|
||||
|
@ -638,86 +772,121 @@ func (c *commandeer) getDirList() ([]string, error) {
|
|||
layoutDir := c.PathSpec().GetLayoutDirPath()
|
||||
staticDirs := staticSyncer.d.AbsStaticDirs
|
||||
|
||||
walker := func(path string, fi os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
if path == dataDir && os.IsNotExist(err) {
|
||||
c.Logger.WARN.Println("Skip dataDir:", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
if path == i18nDir && os.IsNotExist(err) {
|
||||
c.Logger.WARN.Println("Skip i18nDir:", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
if path == layoutDir && os.IsNotExist(err) {
|
||||
c.Logger.WARN.Println("Skip layoutDir:", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
if os.IsNotExist(err) {
|
||||
for _, staticDir := range staticDirs {
|
||||
if path == staticDir && os.IsNotExist(err) {
|
||||
c.Logger.WARN.Println("Skip staticDir:", err)
|
||||
}
|
||||
newWalker := func(allowSymbolicDirs bool) func(path string, fi os.FileInfo, err error) error {
|
||||
return func(path string, fi os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
if path == dataDir && os.IsNotExist(err) {
|
||||
c.Logger.WARN.Println("Skip dataDir:", err)
|
||||
return nil
|
||||
}
|
||||
// Ignore.
|
||||
|
||||
if path == i18nDir && os.IsNotExist(err) {
|
||||
c.Logger.WARN.Println("Skip i18nDir:", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
if path == layoutDir && os.IsNotExist(err) {
|
||||
c.Logger.WARN.Println("Skip layoutDir:", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
if os.IsNotExist(err) {
|
||||
for _, staticDir := range staticDirs {
|
||||
if path == staticDir && os.IsNotExist(err) {
|
||||
c.Logger.WARN.Println("Skip staticDir:", err)
|
||||
}
|
||||
}
|
||||
// Ignore.
|
||||
return nil
|
||||
}
|
||||
|
||||
c.Logger.ERROR.Println("Walker: ", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
c.Logger.ERROR.Println("Walker: ", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Skip .git directories.
|
||||
// Related to https://github.com/gohugoio/hugo/issues/3468.
|
||||
if fi.Name() == ".git" {
|
||||
return nil
|
||||
}
|
||||
|
||||
if fi.Mode()&os.ModeSymlink == os.ModeSymlink {
|
||||
link, err := filepath.EvalSymlinks(path)
|
||||
if err != nil {
|
||||
c.Logger.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", path, err)
|
||||
// Skip .git directories.
|
||||
// Related to https://github.com/gohugoio/hugo/issues/3468.
|
||||
if fi.Name() == ".git" {
|
||||
return nil
|
||||
}
|
||||
linkfi, err := c.Fs.Source.Stat(link)
|
||||
if err != nil {
|
||||
c.Logger.ERROR.Printf("Cannot stat '%s', error was: %s", link, err)
|
||||
return nil
|
||||
|
||||
if fi.Mode()&os.ModeSymlink == os.ModeSymlink {
|
||||
link, err := filepath.EvalSymlinks(path)
|
||||
if err != nil {
|
||||
c.Logger.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", path, err)
|
||||
return nil
|
||||
}
|
||||
linkfi, err := helpers.LstatIfOs(c.Fs.Source, link)
|
||||
if err != nil {
|
||||
c.Logger.ERROR.Printf("Cannot stat %q: %s", link, err)
|
||||
return nil
|
||||
}
|
||||
if !allowSymbolicDirs && !linkfi.Mode().IsRegular() {
|
||||
c.Logger.ERROR.Printf("Symbolic links for directories not supported, skipping %q", path)
|
||||
return nil
|
||||
}
|
||||
|
||||
if allowSymbolicDirs && linkfi.IsDir() {
|
||||
// afero.Walk will not walk symbolic links, so wee need to do it.
|
||||
if !seen[path] {
|
||||
seen[path] = true
|
||||
nested = append(nested, path)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
fi = linkfi
|
||||
}
|
||||
if !linkfi.Mode().IsRegular() {
|
||||
c.Logger.ERROR.Printf("Symbolic links for directories not supported, skipping '%s'", path)
|
||||
|
||||
if fi.IsDir() {
|
||||
if fi.Name() == ".git" ||
|
||||
fi.Name() == "node_modules" || fi.Name() == "bower_components" {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
a = append(a, path)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if fi.IsDir() {
|
||||
if fi.Name() == ".git" ||
|
||||
fi.Name() == "node_modules" || fi.Name() == "bower_components" {
|
||||
return filepath.SkipDir
|
||||
}
|
||||
a = append(a, path)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
symLinkWalker := newWalker(true)
|
||||
regularWalker := newWalker(false)
|
||||
|
||||
// SymbolicWalk will log anny ERRORs
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, dataDir, walker)
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, c.PathSpec().AbsPathify(c.Cfg.GetString("contentDir")), walker)
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, i18nDir, walker)
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, layoutDir, walker)
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, dataDir, regularWalker)
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, c.PathSpec().AbsPathify(c.Cfg.GetString("contentDir")), symLinkWalker)
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, i18nDir, regularWalker)
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, layoutDir, regularWalker)
|
||||
for _, staticDir := range staticDirs {
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, staticDir, walker)
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, staticDir, regularWalker)
|
||||
}
|
||||
|
||||
if c.PathSpec().ThemeSet() {
|
||||
themesDir := c.PathSpec().GetThemeDir()
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "layouts"), walker)
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "i18n"), walker)
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "data"), walker)
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "layouts"), regularWalker)
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "i18n"), regularWalker)
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, filepath.Join(themesDir, "data"), regularWalker)
|
||||
}
|
||||
|
||||
if len(nested) > 0 {
|
||||
for {
|
||||
|
||||
toWalk := nested
|
||||
nested = nested[:0]
|
||||
|
||||
for _, d := range toWalk {
|
||||
_ = helpers.SymbolicWalk(c.Fs.Source, d, symLinkWalker)
|
||||
}
|
||||
|
||||
if len(nested) == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
a = helpers.UniqueStrings(a)
|
||||
sort.Strings(a)
|
||||
|
||||
return a, nil
|
||||
}
|
||||
|
||||
|
@ -728,17 +897,17 @@ func (c *commandeer) recreateAndBuildSites(watching bool) (err error) {
|
|||
if !quiet {
|
||||
c.Logger.FEEDBACK.Println("Started building sites ...")
|
||||
}
|
||||
return Hugo.Build(hugolib.BuildCfg{CreateSitesFromConfig: true, Watching: watching, PrintStats: !quiet})
|
||||
return Hugo.Build(hugolib.BuildCfg{CreateSitesFromConfig: true})
|
||||
}
|
||||
|
||||
func (c *commandeer) resetAndBuildSites(watching bool) (err error) {
|
||||
func (c *commandeer) resetAndBuildSites() (err error) {
|
||||
if err = c.initSites(); err != nil {
|
||||
return
|
||||
}
|
||||
if !quiet {
|
||||
c.Logger.FEEDBACK.Println("Started building sites ...")
|
||||
}
|
||||
return Hugo.Build(hugolib.BuildCfg{ResetState: true, Watching: watching, PrintStats: !quiet})
|
||||
return Hugo.Build(hugolib.BuildCfg{ResetState: true})
|
||||
}
|
||||
|
||||
func (c *commandeer) initSites() error {
|
||||
|
@ -755,17 +924,16 @@ func (c *commandeer) initSites() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (c *commandeer) buildSites(watching bool) (err error) {
|
||||
func (c *commandeer) buildSites() (err error) {
|
||||
if err := c.initSites(); err != nil {
|
||||
return err
|
||||
}
|
||||
if !quiet {
|
||||
c.Logger.FEEDBACK.Println("Started building sites ...")
|
||||
}
|
||||
return Hugo.Build(hugolib.BuildCfg{Watching: watching, PrintStats: !quiet})
|
||||
return Hugo.Build(hugolib.BuildCfg{})
|
||||
}
|
||||
|
||||
func (c *commandeer) rebuildSites(events []fsnotify.Event) error {
|
||||
defer c.timeTrack(time.Now(), "Total")
|
||||
|
||||
if err := c.initSites(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -776,7 +944,7 @@ func (c *commandeer) rebuildSites(events []fsnotify.Event) error {
|
|||
// Make sure we always render the home page
|
||||
visited[home] = true
|
||||
}
|
||||
return Hugo.Build(hugolib.BuildCfg{PrintStats: !quiet, Watching: true, RecentlyVisited: visited}, events...)
|
||||
return Hugo.Build(hugolib.BuildCfg{RecentlyVisited: visited}, events...)
|
||||
}
|
||||
|
||||
// newWatcher creates a new watcher to watch filesystem events.
|
||||
|
@ -818,6 +986,37 @@ func (c *commandeer) newWatcher(serve bool, dirList ...string) error {
|
|||
staticEvents := []fsnotify.Event{}
|
||||
dynamicEvents := []fsnotify.Event{}
|
||||
|
||||
// Special handling for symbolic links inside /content.
|
||||
filtered := []fsnotify.Event{}
|
||||
for _, ev := range evs {
|
||||
// Check the most specific first, i.e. files.
|
||||
contentMapped := Hugo.ContentChanges.GetSymbolicLinkMappings(ev.Name)
|
||||
if len(contentMapped) > 0 {
|
||||
for _, mapped := range contentMapped {
|
||||
filtered = append(filtered, fsnotify.Event{Name: mapped, Op: ev.Op})
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Check for any symbolic directory mapping.
|
||||
|
||||
dir, name := filepath.Split(ev.Name)
|
||||
|
||||
contentMapped = Hugo.ContentChanges.GetSymbolicLinkMappings(dir)
|
||||
|
||||
if len(contentMapped) == 0 {
|
||||
filtered = append(filtered, ev)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, mapped := range contentMapped {
|
||||
mappedFilename := filepath.Join(mapped, name)
|
||||
filtered = append(filtered, fsnotify.Event{Name: mappedFilename, Op: ev.Op})
|
||||
}
|
||||
}
|
||||
|
||||
evs = filtered
|
||||
|
||||
for _, ev := range evs {
|
||||
ext := filepath.Ext(ev.Name)
|
||||
baseName := filepath.Base(ev.Name)
|
||||
|
@ -894,7 +1093,7 @@ func (c *commandeer) newWatcher(serve bool, dirList ...string) error {
|
|||
|
||||
if c.Cfg.GetBool("forceSyncStatic") {
|
||||
c.Logger.FEEDBACK.Printf("Syncing all static files\n")
|
||||
err := c.copyStatic()
|
||||
_, err := c.copyStatic()
|
||||
if err != nil {
|
||||
utils.StopOnErr(c.Logger, err, "Error copying static files to publish dir")
|
||||
}
|
||||
|
@ -932,8 +1131,9 @@ func (c *commandeer) newWatcher(serve bool, dirList ...string) error {
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
c.Logger.FEEDBACK.Println("\nChange detected, rebuilding site")
|
||||
const layout = "2006-01-02 15:04 -0700"
|
||||
const layout = "2006-01-02 15:04:05.000 -0700"
|
||||
c.Logger.FEEDBACK.Println(time.Now().Format(layout))
|
||||
|
||||
if err := c.rebuildSites(dynamicEvents); err != nil {
|
||||
|
@ -950,6 +1150,7 @@ func (c *commandeer) newWatcher(serve bool, dirList ...string) error {
|
|||
if onePageName != "" {
|
||||
p = Hugo.GetContentPage(onePageName)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if p != nil {
|
||||
|
@ -978,6 +1179,9 @@ func (c *commandeer) newWatcher(serve bool, dirList ...string) error {
|
|||
func pickOneWriteOrCreatePath(events []fsnotify.Event) string {
|
||||
name := ""
|
||||
|
||||
// Some editors (for example notepad.exe on Windows) triggers a change
|
||||
// both for directory and file. So we pick the longest path, which should
|
||||
// be the file itself.
|
||||
for _, ev := range events {
|
||||
if (ev.Op&fsnotify.Write == fsnotify.Write || ev.Op&fsnotify.Create == fsnotify.Create) && len(ev.Name) > len(name) {
|
||||
name = ev.Name
|
||||
|
|
|
@ -468,7 +468,6 @@ func convertJekyllPost(s *hugolib.Site, path, relPath, targetDir string, draft b
|
|||
return err
|
||||
}
|
||||
|
||||
page.SetDir(targetParentDir)
|
||||
page.SetSourceContent([]byte(content))
|
||||
page.SetSourceMetaData(newmetadata, parser.FormatToLeadRune("yaml"))
|
||||
page.SaveSourceAs(targetFile)
|
||||
|
|
|
@ -43,20 +43,16 @@ var listDraftsCmd = &cobra.Command{
|
|||
Short: "List all drafts",
|
||||
Long: `List all of the drafts in your content directory.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
|
||||
cfg, err := InitializeConfig()
|
||||
cfgInit := func(c *commandeer) error {
|
||||
c.Set("buildDrafts", true)
|
||||
return nil
|
||||
}
|
||||
c, err := InitializeConfig(false, cfgInit)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := newCommandeer(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c.Set("buildDrafts", true)
|
||||
|
||||
sites, err := hugolib.NewHugoSites(*cfg)
|
||||
sites, err := hugolib.NewHugoSites(*c.DepsCfg)
|
||||
|
||||
if err != nil {
|
||||
return newSystemError("Error creating sites", err)
|
||||
|
@ -84,20 +80,16 @@ var listFutureCmd = &cobra.Command{
|
|||
Long: `List all of the posts in your content directory which will be
|
||||
posted in the future.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
|
||||
cfg, err := InitializeConfig()
|
||||
cfgInit := func(c *commandeer) error {
|
||||
c.Set("buildFuture", true)
|
||||
return nil
|
||||
}
|
||||
c, err := InitializeConfig(false, cfgInit)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := newCommandeer(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c.Set("buildFuture", true)
|
||||
|
||||
sites, err := hugolib.NewHugoSites(*cfg)
|
||||
sites, err := hugolib.NewHugoSites(*c.DepsCfg)
|
||||
|
||||
if err != nil {
|
||||
return newSystemError("Error creating sites", err)
|
||||
|
@ -125,20 +117,16 @@ var listExpiredCmd = &cobra.Command{
|
|||
Long: `List all of the posts in your content directory which has already
|
||||
expired.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
|
||||
cfg, err := InitializeConfig()
|
||||
cfgInit := func(c *commandeer) error {
|
||||
c.Set("buildExpired", true)
|
||||
return nil
|
||||
}
|
||||
c, err := InitializeConfig(false, cfgInit)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := newCommandeer(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c.Set("buildExpired", true)
|
||||
|
||||
sites, err := hugolib.NewHugoSites(*cfg)
|
||||
sites, err := hugolib.NewHugoSites(*c.DepsCfg)
|
||||
|
||||
if err != nil {
|
||||
return newSystemError("Error creating sites", err)
|
||||
|
|
|
@ -33,7 +33,7 @@ func init() {
|
|||
}
|
||||
|
||||
func printConfig(cmd *cobra.Command, args []string) error {
|
||||
cfg, err := InitializeConfig(configCmd)
|
||||
cfg, err := InitializeConfig(false, nil, configCmd)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
|
@ -86,21 +86,19 @@ as you see fit.`,
|
|||
|
||||
// NewContent adds new content to a Hugo site.
|
||||
func NewContent(cmd *cobra.Command, args []string) error {
|
||||
cfg, err := InitializeConfig()
|
||||
cfgInit := func(c *commandeer) error {
|
||||
if cmd.Flags().Changed("editor") {
|
||||
c.Set("newContentEditor", contentEditor)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
c, err := InitializeConfig(false, cfgInit)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c, err := newCommandeer(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if cmd.Flags().Changed("editor") {
|
||||
c.Set("newContentEditor", contentEditor)
|
||||
}
|
||||
|
||||
if len(args) < 1 {
|
||||
return newUserError("path needs to be provided")
|
||||
}
|
||||
|
@ -115,6 +113,8 @@ func NewContent(cmd *cobra.Command, args []string) error {
|
|||
kind = contentType
|
||||
}
|
||||
|
||||
cfg := c.DepsCfg
|
||||
|
||||
ps, err := helpers.NewPathSpec(cfg.Fs, cfg.Cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -130,7 +130,7 @@ func NewContent(cmd *cobra.Command, args []string) error {
|
|||
return nil, err
|
||||
}
|
||||
|
||||
if err := Hugo.Build(hugolib.BuildCfg{SkipRender: true, PrintStats: false}); err != nil {
|
||||
if err := Hugo.Build(hugolib.BuildCfg{SkipRender: true}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
@ -240,7 +240,7 @@ func NewSite(cmd *cobra.Command, args []string) error {
|
|||
|
||||
// NewTheme creates a new Hugo theme.
|
||||
func NewTheme(cmd *cobra.Command, args []string) error {
|
||||
cfg, err := InitializeConfig()
|
||||
c, err := InitializeConfig(false, nil)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -250,14 +250,11 @@ func NewTheme(cmd *cobra.Command, args []string) error {
|
|||
return newUserError("theme name needs to be provided")
|
||||
}
|
||||
|
||||
c, err := newCommandeer(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
createpath := c.PathSpec().AbsPathify(filepath.Join(c.Cfg.GetString("themesDir"), args[0]))
|
||||
jww.INFO.Println("creating theme at", createpath)
|
||||
|
||||
cfg := c.DepsCfg
|
||||
|
||||
if x, _ := helpers.Exists(createpath, cfg.Fs.Source); x {
|
||||
return newUserError(createpath, "already exists")
|
||||
}
|
||||
|
@ -375,7 +372,11 @@ func newContentPathSection(path string) (string, string) {
|
|||
var section string
|
||||
// assume the first directory is the section (kind)
|
||||
if strings.Contains(createpath[1:], helpers.FilePathSeparator) {
|
||||
section = helpers.GuessSection(createpath)
|
||||
parts := strings.Split(strings.TrimPrefix(createpath, helpers.FilePathSeparator), helpers.FilePathSeparator)
|
||||
if len(parts) > 0 {
|
||||
section = parts[0]
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return createpath, section
|
||||
|
|
|
@ -110,109 +110,94 @@ func init() {
|
|||
}
|
||||
|
||||
func server(cmd *cobra.Command, args []string) error {
|
||||
cfg, err := InitializeConfig(serverCmd)
|
||||
if err != nil {
|
||||
return err
|
||||
// If a Destination is provided via flag write to disk
|
||||
if destination != "" {
|
||||
renderToDisk = true
|
||||
}
|
||||
|
||||
c, err := newCommandeer(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if cmd.Flags().Changed("disableLiveReload") {
|
||||
c.Set("disableLiveReload", disableLiveReload)
|
||||
}
|
||||
|
||||
if cmd.Flags().Changed("navigateToChanged") {
|
||||
c.Set("navigateToChanged", navigateToChanged)
|
||||
}
|
||||
|
||||
if cmd.Flags().Changed("disableFastRender") {
|
||||
c.Set("disableFastRender", disableFastRender)
|
||||
}
|
||||
|
||||
if serverWatch {
|
||||
c.Set("watch", true)
|
||||
}
|
||||
|
||||
if c.Cfg.GetBool("watch") {
|
||||
serverWatch = true
|
||||
c.watchConfig()
|
||||
}
|
||||
|
||||
languages := c.languages()
|
||||
serverPorts := make([]int, 1)
|
||||
|
||||
if languages.IsMultihost() {
|
||||
if !serverAppend {
|
||||
return newSystemError("--appendPort=false not supported when in multihost mode")
|
||||
cfgInit := func(c *commandeer) error {
|
||||
c.Set("renderToMemory", !renderToDisk)
|
||||
if cmd.Flags().Changed("navigateToChanged") {
|
||||
c.Set("navigateToChanged", navigateToChanged)
|
||||
}
|
||||
if cmd.Flags().Changed("disableLiveReload") {
|
||||
c.Set("disableLiveReload", disableLiveReload)
|
||||
}
|
||||
if cmd.Flags().Changed("disableFastRender") {
|
||||
c.Set("disableFastRender", disableFastRender)
|
||||
}
|
||||
if serverWatch {
|
||||
c.Set("watch", true)
|
||||
}
|
||||
serverPorts = make([]int, len(languages))
|
||||
}
|
||||
|
||||
currentServerPort := serverPort
|
||||
serverPorts := make([]int, 1)
|
||||
|
||||
for i := 0; i < len(serverPorts); i++ {
|
||||
l, err := net.Listen("tcp", net.JoinHostPort(serverInterface, strconv.Itoa(currentServerPort)))
|
||||
if err == nil {
|
||||
l.Close()
|
||||
serverPorts[i] = currentServerPort
|
||||
if c.languages.IsMultihost() {
|
||||
if !serverAppend {
|
||||
return newSystemError("--appendPort=false not supported when in multihost mode")
|
||||
}
|
||||
serverPorts = make([]int, len(c.languages))
|
||||
}
|
||||
|
||||
currentServerPort := serverPort
|
||||
|
||||
for i := 0; i < len(serverPorts); i++ {
|
||||
l, err := net.Listen("tcp", net.JoinHostPort(serverInterface, strconv.Itoa(currentServerPort)))
|
||||
if err == nil {
|
||||
l.Close()
|
||||
serverPorts[i] = currentServerPort
|
||||
} else {
|
||||
if i == 0 && serverCmd.Flags().Changed("port") {
|
||||
// port set explicitly by user -- he/she probably meant it!
|
||||
return newSystemErrorF("Server startup failed: %s", err)
|
||||
}
|
||||
jww.ERROR.Println("port", serverPort, "already in use, attempting to use an available port")
|
||||
sp, err := helpers.FindAvailablePort()
|
||||
if err != nil {
|
||||
return newSystemError("Unable to find alternative port to use:", err)
|
||||
}
|
||||
serverPorts[i] = sp.Port
|
||||
}
|
||||
|
||||
currentServerPort = serverPorts[i] + 1
|
||||
}
|
||||
|
||||
c.serverPorts = serverPorts
|
||||
|
||||
c.Set("port", serverPort)
|
||||
if liveReloadPort != -1 {
|
||||
c.Set("liveReloadPort", liveReloadPort)
|
||||
} else {
|
||||
if i == 0 && serverCmd.Flags().Changed("port") {
|
||||
// port set explicitly by user -- he/she probably meant it!
|
||||
return newSystemErrorF("Server startup failed: %s", err)
|
||||
}
|
||||
jww.ERROR.Println("port", serverPort, "already in use, attempting to use an available port")
|
||||
sp, err := helpers.FindAvailablePort()
|
||||
if err != nil {
|
||||
return newSystemError("Unable to find alternative port to use:", err)
|
||||
}
|
||||
serverPorts[i] = sp.Port
|
||||
c.Set("liveReloadPort", serverPorts[0])
|
||||
}
|
||||
|
||||
currentServerPort = serverPorts[i] + 1
|
||||
}
|
||||
|
||||
c.serverPorts = serverPorts
|
||||
|
||||
c.Set("port", serverPort)
|
||||
if liveReloadPort != -1 {
|
||||
c.Set("liveReloadPort", liveReloadPort)
|
||||
} else {
|
||||
c.Set("liveReloadPort", serverPorts[0])
|
||||
}
|
||||
|
||||
if languages.IsMultihost() {
|
||||
for i, language := range languages {
|
||||
baseURL, err = fixURL(language, baseURL, serverPorts[i])
|
||||
if c.languages.IsMultihost() {
|
||||
for i, language := range c.languages {
|
||||
baseURL, err := fixURL(language, baseURL, serverPorts[i])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
language.Set("baseURL", baseURL)
|
||||
}
|
||||
} else {
|
||||
baseURL, err := fixURL(c.Cfg, baseURL, serverPorts[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
language.Set("baseURL", baseURL)
|
||||
c.Set("baseURL", baseURL)
|
||||
}
|
||||
} else {
|
||||
baseURL, err = fixURL(c.Cfg, baseURL, serverPorts[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.Set("baseURL", baseURL)
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
if err := memStats(); err != nil {
|
||||
jww.ERROR.Println("memstats error:", err)
|
||||
}
|
||||
|
||||
// If a Destination is provided via flag write to disk
|
||||
if destination != "" {
|
||||
renderToDisk = true
|
||||
}
|
||||
|
||||
// Hugo writes the output to memory instead of the disk
|
||||
if !renderToDisk {
|
||||
cfg.Fs.Destination = new(afero.MemMapFs)
|
||||
// Rendering to memoryFS, publish to Root regardless of publishDir.
|
||||
c.Set("publishDir", "/")
|
||||
c, err := InitializeConfig(true, cfgInit, serverCmd)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := c.build(serverWatch); err != nil {
|
||||
|
@ -223,6 +208,10 @@ func server(cmd *cobra.Command, args []string) error {
|
|||
s.RegisterMediaTypes()
|
||||
}
|
||||
|
||||
if serverWatch {
|
||||
c.watchConfig()
|
||||
}
|
||||
|
||||
// Watch runs its own server as part of the routine
|
||||
if serverWatch {
|
||||
|
||||
|
|
|
@ -44,15 +44,20 @@ func (s *staticSyncer) isStatic(path string) bool {
|
|||
func (s *staticSyncer) syncsStaticEvents(staticEvents []fsnotify.Event) error {
|
||||
c := s.c
|
||||
|
||||
syncFn := func(dirs *src.Dirs, publishDir string) error {
|
||||
syncFn := func(dirs *src.Dirs, publishDir string) (uint64, error) {
|
||||
staticSourceFs, err := dirs.CreateStaticFs()
|
||||
if err != nil {
|
||||
return err
|
||||
return 0, err
|
||||
}
|
||||
|
||||
if dirs.Language != nil {
|
||||
// Multihost setup
|
||||
publishDir = filepath.Join(publishDir, dirs.Language.Lang)
|
||||
}
|
||||
|
||||
if staticSourceFs == nil {
|
||||
c.Logger.WARN.Println("No static directories found to sync")
|
||||
return nil
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
syncer := fsync.NewSyncer()
|
||||
|
@ -127,9 +132,10 @@ func (s *staticSyncer) syncsStaticEvents(staticEvents []fsnotify.Event) error {
|
|||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
return c.doWithPublishDirs(syncFn)
|
||||
_, err := c.doWithPublishDirs(syncFn)
|
||||
return err
|
||||
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ If the content's draft status is 'False', nothing is done.`,
|
|||
// to false and setting its publish date to now. If the specified content is
|
||||
// not a draft, it will log an error.
|
||||
func Undraft(cmd *cobra.Command, args []string) error {
|
||||
cfg, err := InitializeConfig()
|
||||
c, err := InitializeConfig(false, nil)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -46,6 +46,8 @@ func Undraft(cmd *cobra.Command, args []string) error {
|
|||
return newUserError("a piece of content needs to be specified")
|
||||
}
|
||||
|
||||
cfg := c.DepsCfg
|
||||
|
||||
location := args[0]
|
||||
// open the file
|
||||
f, err := cfg.Fs.Source.Open(location)
|
||||
|
|
|
@ -45,7 +45,7 @@ type ArchetypeFileData struct {
|
|||
|
||||
// The target content file. Note that the .Content will be empty, as that
|
||||
// has not been created yet.
|
||||
*source.File
|
||||
source.File
|
||||
}
|
||||
|
||||
const (
|
||||
|
@ -82,7 +82,7 @@ func executeArcheTypeAsTemplate(s *hugolib.Site, kind, targetPath, archetypeFile
|
|||
)
|
||||
|
||||
sp := source.NewSourceSpec(s.Deps.Cfg, s.Deps.Fs)
|
||||
f := sp.NewFile(targetPath)
|
||||
f := sp.NewFileInfo("", targetPath, nil)
|
||||
|
||||
data := ArchetypeFileData{
|
||||
Type: kind,
|
||||
|
|
10
deps/deps.go
vendored
10
deps/deps.go
vendored
|
@ -10,6 +10,7 @@ import (
|
|||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/gohugoio/hugo/metrics"
|
||||
"github.com/gohugoio/hugo/output"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
"github.com/gohugoio/hugo/tpl"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
)
|
||||
|
@ -33,6 +34,9 @@ type Deps struct {
|
|||
// The ContentSpec to use
|
||||
*helpers.ContentSpec `json:"-"`
|
||||
|
||||
// The SourceSpec to use
|
||||
SourceSpec *source.SourceSpec `json:"-"`
|
||||
|
||||
// The configuration to use
|
||||
Cfg config.Provider `json:"-"`
|
||||
|
||||
|
@ -122,6 +126,8 @@ func New(cfg DepsCfg) (*Deps, error) {
|
|||
return nil, err
|
||||
}
|
||||
|
||||
sp := source.NewSourceSpec(cfg.Language, fs)
|
||||
|
||||
d := &Deps{
|
||||
Fs: fs,
|
||||
Log: logger,
|
||||
|
@ -130,6 +136,7 @@ func New(cfg DepsCfg) (*Deps, error) {
|
|||
WithTemplate: cfg.WithTemplate,
|
||||
PathSpec: ps,
|
||||
ContentSpec: contentSpec,
|
||||
SourceSpec: sp,
|
||||
Cfg: cfg.Language,
|
||||
Language: cfg.Language,
|
||||
}
|
||||
|
@ -194,4 +201,7 @@ type DepsCfg struct {
|
|||
|
||||
// i18n handling.
|
||||
TranslationProvider ResourceProvider
|
||||
|
||||
// Whether we are in running (server) mode
|
||||
Running bool
|
||||
}
|
||||
|
|
|
@ -47,6 +47,10 @@ type ContentSpec struct {
|
|||
// SummaryLength is the length of the summary that Hugo extracts from a content.
|
||||
summaryLength int
|
||||
|
||||
BuildFuture bool
|
||||
BuildExpired bool
|
||||
BuildDrafts bool
|
||||
|
||||
Highlight func(code, lang, optsStr string) (string, error)
|
||||
defatultPygmentsOpts map[string]string
|
||||
|
||||
|
@ -62,6 +66,9 @@ func NewContentSpec(cfg config.Provider) (*ContentSpec, error) {
|
|||
footnoteAnchorPrefix: cfg.GetString("footnoteAnchorPrefix"),
|
||||
footnoteReturnLinkContents: cfg.GetString("footnoteReturnLinkContents"),
|
||||
summaryLength: cfg.GetInt("summaryLength"),
|
||||
BuildFuture: cfg.GetBool("buildFuture"),
|
||||
BuildExpired: cfg.GetBool("buildExpired"),
|
||||
BuildDrafts: cfg.GetBool("buildDrafts"),
|
||||
|
||||
cfg: cfg,
|
||||
}
|
||||
|
|
|
@ -19,9 +19,12 @@ import (
|
|||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
|
||||
"github.com/miekg/mmark"
|
||||
"github.com/russross/blackfriday"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
const tstHTMLContent = "<!DOCTYPE html><html><head><script src=\"http://two/foobar.js\"></script></head><body><nav><ul><li hugo-nav=\"section_0\"></li><li hugo-nav=\"section_1\"></li></ul></nav><article>content <a href=\"http://two/foobar\">foobar</a>. Follow up</article><p>This is some text.<br>And some more.</p></body></html>"
|
||||
|
@ -73,6 +76,25 @@ func TestBytesToHTML(t *testing.T) {
|
|||
assert.Equal(t, template.HTML("dobedobedo"), BytesToHTML([]byte("dobedobedo")))
|
||||
}
|
||||
|
||||
func TestNewContentSpec(t *testing.T) {
|
||||
cfg := viper.New()
|
||||
assert := require.New(t)
|
||||
|
||||
cfg.Set("summaryLength", 32)
|
||||
cfg.Set("buildFuture", true)
|
||||
cfg.Set("buildExpired", true)
|
||||
cfg.Set("buildDrafts", true)
|
||||
|
||||
spec, err := NewContentSpec(cfg)
|
||||
|
||||
assert.NoError(err)
|
||||
assert.Equal(32, spec.summaryLength)
|
||||
assert.True(spec.BuildFuture)
|
||||
assert.True(spec.BuildExpired)
|
||||
assert.True(spec.BuildDrafts)
|
||||
|
||||
}
|
||||
|
||||
var benchmarkTruncateString = strings.Repeat("This is a sentence about nothing.", 20)
|
||||
|
||||
func BenchmarkTestTruncateWordsToWholeSentence(b *testing.B) {
|
||||
|
|
|
@ -365,8 +365,8 @@ func SliceToLower(s []string) []string {
|
|||
return l
|
||||
}
|
||||
|
||||
// Md5String takes a string and returns its MD5 hash.
|
||||
func Md5String(f string) string {
|
||||
// MD5String takes a string and returns its MD5 hash.
|
||||
func MD5String(f string) string {
|
||||
h := md5.New()
|
||||
h.Write([]byte(f))
|
||||
return hex.EncodeToString(h.Sum([]byte{}))
|
||||
|
|
|
@ -53,7 +53,16 @@ func (l *Language) String() string {
|
|||
|
||||
// NewLanguage creates a new language.
|
||||
func NewLanguage(lang string, cfg config.Provider) *Language {
|
||||
return &Language{Lang: lang, Cfg: cfg, params: make(map[string]interface{})}
|
||||
params := make(map[string]interface{})
|
||||
// Merge with global config.
|
||||
globalParams := cfg.GetStringMap("params")
|
||||
for k, v := range globalParams {
|
||||
if _, ok := params[k]; !ok {
|
||||
params[k] = v
|
||||
}
|
||||
}
|
||||
l := &Language{Lang: lang, Cfg: cfg, params: params}
|
||||
return l
|
||||
}
|
||||
|
||||
// NewDefaultLanguage creates the default language for a config.Provider.
|
||||
|
@ -88,17 +97,6 @@ func (l Languages) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
|
|||
|
||||
// Params retunrs language-specific params merged with the global params.
|
||||
func (l *Language) Params() map[string]interface{} {
|
||||
l.paramsInit.Do(func() {
|
||||
// Merge with global config.
|
||||
// TODO(bep) consider making this part of a constructor func.
|
||||
|
||||
globalParams := l.Cfg.GetStringMap("params")
|
||||
for k, v := range globalParams {
|
||||
if _, ok := l.params[k]; !ok {
|
||||
l.params[k] = v
|
||||
}
|
||||
}
|
||||
})
|
||||
return l.params
|
||||
}
|
||||
|
||||
|
|
|
@ -277,6 +277,12 @@ func Ext(in string) string {
|
|||
return ext
|
||||
}
|
||||
|
||||
// FileAndExt takes a path and returns the file and extension separated,
|
||||
// the extension including the delmiter, i.e. ".md".
|
||||
func FileAndExt(in string) (string, string) {
|
||||
return fileAndExt(in, fpb)
|
||||
}
|
||||
|
||||
// Filename takes a path, strips out the extension,
|
||||
// and returns the name of the file.
|
||||
func Filename(in string) (name string) {
|
||||
|
@ -348,40 +354,6 @@ func GetRelativePath(path, base string) (final string, err error) {
|
|||
return name, nil
|
||||
}
|
||||
|
||||
// GuessSection returns the section given a source path.
|
||||
// A section is the part between the root slash and the second slash
|
||||
// or before the first slash.
|
||||
func GuessSection(in string) string {
|
||||
parts := strings.Split(in, FilePathSeparator)
|
||||
// This will include an empty entry before and after paths with leading and trailing slashes
|
||||
// eg... /sect/one/ -> ["", "sect", "one", ""]
|
||||
|
||||
// Needs to have at least a value and a slash
|
||||
if len(parts) < 2 {
|
||||
return ""
|
||||
}
|
||||
|
||||
// If it doesn't have a leading slash and value and file or trailing slash, then return ""
|
||||
if parts[0] == "" && len(parts) < 3 {
|
||||
return ""
|
||||
}
|
||||
|
||||
// strip leading slash
|
||||
if parts[0] == "" {
|
||||
parts = parts[1:]
|
||||
}
|
||||
|
||||
// if first directory is "content", return second directory
|
||||
if parts[0] == "content" {
|
||||
if len(parts) > 2 {
|
||||
return parts[1]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
return parts[0]
|
||||
}
|
||||
|
||||
// PathPrep prepares the path using the uglify setting to create paths on
|
||||
// either the form /section/name/index.html or /section/name.html.
|
||||
func PathPrep(ugly bool, in string) string {
|
||||
|
@ -504,7 +476,7 @@ func SymbolicWalk(fs afero.Fs, root string, walker filepath.WalkFunc) error {
|
|||
}
|
||||
|
||||
func getRealFileInfo(fs afero.Fs, path string) (os.FileInfo, string, error) {
|
||||
fileInfo, err := lstatIfOs(fs, path)
|
||||
fileInfo, err := LstatIfOs(fs, path)
|
||||
realPath := path
|
||||
|
||||
if err != nil {
|
||||
|
@ -516,7 +488,7 @@ func getRealFileInfo(fs afero.Fs, path string) (os.FileInfo, string, error) {
|
|||
if err != nil {
|
||||
return nil, "", fmt.Errorf("Cannot read symbolic link '%s', error was: %s", path, err)
|
||||
}
|
||||
fileInfo, err = lstatIfOs(fs, link)
|
||||
fileInfo, err = LstatIfOs(fs, link)
|
||||
if err != nil {
|
||||
return nil, "", fmt.Errorf("Cannot stat '%s', error was: %s", link, err)
|
||||
}
|
||||
|
@ -539,7 +511,7 @@ func GetRealPath(fs afero.Fs, path string) (string, error) {
|
|||
|
||||
// Code copied from Afero's path.go
|
||||
// if the filesystem is OsFs use Lstat, else use fs.Stat
|
||||
func lstatIfOs(fs afero.Fs, path string) (info os.FileInfo, err error) {
|
||||
func LstatIfOs(fs afero.Fs, path string) (info os.FileInfo, err error) {
|
||||
_, ok := fs.(*afero.OsFs)
|
||||
if ok {
|
||||
info, err = os.Lstat(path)
|
||||
|
|
|
@ -638,40 +638,6 @@ func TestFileAndExt(t *testing.T) {
|
|||
|
||||
}
|
||||
|
||||
func TestGuessSection(t *testing.T) {
|
||||
type test struct {
|
||||
input, expected string
|
||||
}
|
||||
|
||||
data := []test{
|
||||
{"/", ""},
|
||||
{"", ""},
|
||||
{"/content", ""},
|
||||
{"content/", ""},
|
||||
{"/content/", ""}, // /content/ is a special case. It will never be the section
|
||||
{"/blog", ""},
|
||||
{"/blog/", "blog"},
|
||||
{"blog", ""},
|
||||
{"content/blog", ""},
|
||||
{"/content/blog/", "blog"},
|
||||
{"/content/blog", ""}, // Lack of trailing slash indicates 'blog' is not a directory.
|
||||
{"content/blog/", "blog"},
|
||||
{"/contents/myblog/", "contents"},
|
||||
{"/contents/yourblog", "contents"},
|
||||
{"/contents/ourblog/", "contents"},
|
||||
{"/content/myblog/", "myblog"},
|
||||
{"/content/yourblog", ""},
|
||||
{"/content/ourblog/", "ourblog"},
|
||||
}
|
||||
|
||||
for i, d := range data {
|
||||
expected := GuessSection(filepath.FromSlash(d.input))
|
||||
if d.expected != expected {
|
||||
t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestPathPrep(t *testing.T) {
|
||||
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@ package helpers
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
|
@ -31,6 +32,7 @@ type PathSpec struct {
|
|||
canonifyURLs bool
|
||||
|
||||
language *Language
|
||||
//StatsCounter *siteSta
|
||||
|
||||
// pagination path handling
|
||||
paginatePath string
|
||||
|
@ -38,10 +40,12 @@ type PathSpec struct {
|
|||
theme string
|
||||
|
||||
// Directories
|
||||
contentDir string
|
||||
themesDir string
|
||||
layoutDir string
|
||||
workingDir string
|
||||
staticDirs []string
|
||||
PublishDir string
|
||||
|
||||
// The PathSpec looks up its config settings in both the current language
|
||||
// and then in the global Viper config.
|
||||
|
@ -52,6 +56,8 @@ type PathSpec struct {
|
|||
defaultContentLanguage string
|
||||
multilingual bool
|
||||
|
||||
ProcessingStats *ProcessingStats
|
||||
|
||||
// The file systems to use
|
||||
Fs *hugofs.Fs
|
||||
|
||||
|
@ -79,6 +85,11 @@ func NewPathSpec(fs *hugofs.Fs, cfg config.Provider) (*PathSpec, error) {
|
|||
staticDirs = append(staticDirs, getStringOrStringSlice(cfg, "staticDir", i)...)
|
||||
}
|
||||
|
||||
var lang string
|
||||
if l, ok := cfg.(*Language); ok {
|
||||
lang = l.Lang
|
||||
}
|
||||
|
||||
ps := &PathSpec{
|
||||
Fs: fs,
|
||||
Cfg: cfg,
|
||||
|
@ -91,13 +102,23 @@ func NewPathSpec(fs *hugofs.Fs, cfg config.Provider) (*PathSpec, error) {
|
|||
defaultContentLanguage: cfg.GetString("defaultContentLanguage"),
|
||||
paginatePath: cfg.GetString("paginatePath"),
|
||||
BaseURL: baseURL,
|
||||
contentDir: cfg.GetString("contentDir"),
|
||||
themesDir: cfg.GetString("themesDir"),
|
||||
layoutDir: cfg.GetString("layoutDir"),
|
||||
workingDir: cfg.GetString("workingDir"),
|
||||
staticDirs: staticDirs,
|
||||
theme: cfg.GetString("theme"),
|
||||
ProcessingStats: NewProcessingStats(lang),
|
||||
}
|
||||
|
||||
publishDir := ps.AbsPathify(cfg.GetString("publishDir")) + FilePathSeparator
|
||||
// If root, remove the second '/'
|
||||
if publishDir == "//" {
|
||||
publishDir = FilePathSeparator
|
||||
}
|
||||
|
||||
ps.PublishDir = publishDir
|
||||
|
||||
if language, ok := cfg.(*Language); ok {
|
||||
ps.language = language
|
||||
}
|
||||
|
@ -129,6 +150,11 @@ func (p *PathSpec) PaginatePath() string {
|
|||
return p.paginatePath
|
||||
}
|
||||
|
||||
// ContentDir returns the configured workingDir.
|
||||
func (p *PathSpec) ContentDir() string {
|
||||
return p.contentDir
|
||||
}
|
||||
|
||||
// WorkingDir returns the configured workingDir.
|
||||
func (p *PathSpec) WorkingDir() string {
|
||||
return p.workingDir
|
||||
|
@ -153,3 +179,13 @@ func (p *PathSpec) Theme() string {
|
|||
func (p *PathSpec) ThemesDir() string {
|
||||
return p.themesDir
|
||||
}
|
||||
|
||||
// PermalinkForBaseURL creates a permalink from the given link and baseURL.
|
||||
func (p *PathSpec) PermalinkForBaseURL(link, baseURL string) string {
|
||||
link = strings.TrimPrefix(link, "/")
|
||||
if !strings.HasSuffix(baseURL, "/") {
|
||||
baseURL += "/"
|
||||
}
|
||||
return baseURL + link
|
||||
|
||||
}
|
||||
|
|
116
helpers/processing_stats.go
Normal file
116
helpers/processing_stats.go
Normal file
|
@ -0,0 +1,116 @@
|
|||
// Copyright 2017 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package helpers
|
||||
|
||||
import (
|
||||
"io"
|
||||
"strconv"
|
||||
"sync/atomic"
|
||||
|
||||
"github.com/olekukonko/tablewriter"
|
||||
)
|
||||
|
||||
type ProcessingStats struct {
|
||||
Name string
|
||||
|
||||
Pages uint64
|
||||
PaginatorPages uint64
|
||||
Static uint64
|
||||
ProcessedImages uint64
|
||||
Files uint64
|
||||
Aliases uint64
|
||||
Sitemaps uint64
|
||||
Cleaned uint64
|
||||
}
|
||||
|
||||
type processingStatsTitleVal struct {
|
||||
name string
|
||||
val uint64
|
||||
}
|
||||
|
||||
func (s *ProcessingStats) toVals() []processingStatsTitleVal {
|
||||
return []processingStatsTitleVal{
|
||||
processingStatsTitleVal{"Pages", s.Pages},
|
||||
processingStatsTitleVal{"Paginator pages", s.PaginatorPages},
|
||||
processingStatsTitleVal{"Non-page files", s.Files},
|
||||
processingStatsTitleVal{"Static files", s.Static},
|
||||
processingStatsTitleVal{"Processed images", s.ProcessedImages},
|
||||
processingStatsTitleVal{"Aliases", s.Aliases},
|
||||
processingStatsTitleVal{"Sitemaps", s.Sitemaps},
|
||||
processingStatsTitleVal{"Cleaned", s.Cleaned},
|
||||
}
|
||||
}
|
||||
|
||||
func NewProcessingStats(name string) *ProcessingStats {
|
||||
return &ProcessingStats{Name: name}
|
||||
}
|
||||
|
||||
func (s *ProcessingStats) Incr(counter *uint64) {
|
||||
atomic.AddUint64(counter, 1)
|
||||
}
|
||||
|
||||
func (s *ProcessingStats) Add(counter *uint64, amount int) {
|
||||
atomic.AddUint64(counter, uint64(amount))
|
||||
}
|
||||
|
||||
func (s *ProcessingStats) Table(w io.Writer) {
|
||||
titleVals := s.toVals()
|
||||
data := make([][]string, len(titleVals))
|
||||
for i, tv := range titleVals {
|
||||
data[i] = []string{tv.name, strconv.Itoa(int(tv.val))}
|
||||
}
|
||||
|
||||
table := tablewriter.NewWriter(w)
|
||||
|
||||
table.AppendBulk(data)
|
||||
table.SetHeader([]string{"", s.Name})
|
||||
table.SetBorder(false)
|
||||
table.Render()
|
||||
|
||||
}
|
||||
|
||||
func ProcessingStatsTable(w io.Writer, stats ...*ProcessingStats) {
|
||||
names := make([]string, len(stats)+1)
|
||||
|
||||
var data [][]string
|
||||
|
||||
for i := 0; i < len(stats); i++ {
|
||||
stat := stats[i]
|
||||
names[i+1] = stat.Name
|
||||
|
||||
titleVals := stat.toVals()
|
||||
|
||||
if i == 0 {
|
||||
data = make([][]string, len(titleVals))
|
||||
}
|
||||
|
||||
for j, tv := range titleVals {
|
||||
if i == 0 {
|
||||
data[j] = []string{tv.name, strconv.Itoa(int(tv.val))}
|
||||
} else {
|
||||
data[j] = append(data[j], strconv.Itoa(int(tv.val)))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
table := tablewriter.NewWriter(w)
|
||||
|
||||
table.AppendBulk(data)
|
||||
table.SetHeader(names)
|
||||
table.SetBorder(false)
|
||||
table.Render()
|
||||
|
||||
}
|
|
@ -109,7 +109,7 @@ func (s *Site) publishDestAlias(allowRoot bool, path, permalink string, p *Page)
|
|||
return err
|
||||
}
|
||||
|
||||
return s.publish(targetPath, aliasContent)
|
||||
return s.publish(&s.PathSpec.ProcessingStats.Aliases, targetPath, aliasContent)
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -51,7 +51,9 @@ func TestAlias(t *testing.T) {
|
|||
writeSource(t, fs, filepath.Join("content", "page.md"), pageWithAlias)
|
||||
writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), basicTemplate)
|
||||
|
||||
buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
|
||||
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
|
||||
|
||||
require.Len(t, s.rawAllPages, 1)
|
||||
|
||||
// the real page
|
||||
th.assertFileContent(filepath.Join("public", "page", "index.html"), "For some moments the old man")
|
||||
|
|
|
@ -64,22 +64,21 @@ func LoadConfig(fs afero.Fs, relativeSourcePath, configFilename string) (*viper.
|
|||
|
||||
v.RegisterAlias("indexes", "taxonomies")
|
||||
|
||||
// Remove these in Hugo 0.23.
|
||||
// Remove these in Hugo 0.33.
|
||||
if v.IsSet("disable404") {
|
||||
helpers.Deprecated("site config", "disable404", "Use disableKinds=[\"404\"]", false)
|
||||
helpers.Deprecated("site config", "disable404", "Use disableKinds=[\"404\"]", true)
|
||||
}
|
||||
|
||||
if v.IsSet("disableRSS") {
|
||||
helpers.Deprecated("site config", "disableRSS", "Use disableKinds=[\"RSS\"]", false)
|
||||
helpers.Deprecated("site config", "disableRSS", "Use disableKinds=[\"RSS\"]", true)
|
||||
}
|
||||
|
||||
if v.IsSet("disableSitemap") {
|
||||
// NOTE: Do not remove this until Hugo 0.24, ERROR in 0.23.
|
||||
helpers.Deprecated("site config", "disableSitemap", "Use disableKinds= [\"sitemap\"]", false)
|
||||
helpers.Deprecated("site config", "disableSitemap", "Use disableKinds= [\"sitemap\"]", true)
|
||||
}
|
||||
|
||||
if v.IsSet("disableRobotsTXT") {
|
||||
helpers.Deprecated("site config", "disableRobotsTXT", "Use disableKinds= [\"robotsTXT\"]", false)
|
||||
helpers.Deprecated("site config", "disableRobotsTXT", "Use disableKinds= [\"robotsTXT\"]", true)
|
||||
}
|
||||
|
||||
if err := loadDefaultSettingsFor(v); err != nil {
|
||||
|
@ -176,6 +175,7 @@ func loadDefaultSettingsFor(v *viper.Viper) error {
|
|||
v.SetDefault("contentDir", "content")
|
||||
v.SetDefault("layoutDir", "layouts")
|
||||
v.SetDefault("staticDir", "static")
|
||||
v.SetDefault("resourceDir", "resources")
|
||||
v.SetDefault("archetypeDir", "archetypes")
|
||||
v.SetDefault("publishDir", "public")
|
||||
v.SetDefault("dataDir", "data")
|
||||
|
|
|
@ -27,16 +27,15 @@ import (
|
|||
jww "github.com/spf13/jwalterweatherman"
|
||||
|
||||
"github.com/gohugoio/hugo/parser"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestDataDirJSON(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
sources := []source.ByteSource{
|
||||
{Name: filepath.FromSlash("data/test/foo.json"), Content: []byte(`{ "bar": "foofoo" }`)},
|
||||
{Name: filepath.FromSlash("data/test.json"), Content: []byte(`{ "hello": [ { "world": "foo" } ] }`)},
|
||||
sources := [][2]string{
|
||||
{filepath.FromSlash("data/test/foo.json"), `{ "bar": "foofoo" }`},
|
||||
{filepath.FromSlash("data/test.json"), `{ "hello": [ { "world": "foo" } ] }`},
|
||||
}
|
||||
|
||||
expected, err := parser.HandleJSONMetaData([]byte(`{ "test": { "hello": [{ "world": "foo" }] , "foo": { "bar":"foofoo" } } }`))
|
||||
|
@ -51,8 +50,8 @@ func TestDataDirJSON(t *testing.T) {
|
|||
func TestDataDirToml(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
sources := []source.ByteSource{
|
||||
{Name: filepath.FromSlash("data/test/kung.toml"), Content: []byte("[foo]\nbar = 1")},
|
||||
sources := [][2]string{
|
||||
{"data/test/kung.toml", "[foo]\nbar = 1"},
|
||||
}
|
||||
|
||||
expected, err := parser.HandleTOMLMetaData([]byte("[test]\n[test.kung]\n[test.kung.foo]\nbar = 1"))
|
||||
|
@ -67,12 +66,12 @@ func TestDataDirToml(t *testing.T) {
|
|||
func TestDataDirYAMLWithOverridenValue(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
sources := []source.ByteSource{
|
||||
sources := [][2]string{
|
||||
// filepath.Walk walks the files in lexical order, '/' comes before '.'. Simulate this:
|
||||
{Name: filepath.FromSlash("data/a.yaml"), Content: []byte("a: 1")},
|
||||
{Name: filepath.FromSlash("data/test/v1.yaml"), Content: []byte("v1-2: 2")},
|
||||
{Name: filepath.FromSlash("data/test/v2.yaml"), Content: []byte("v2:\n- 2\n- 3")},
|
||||
{Name: filepath.FromSlash("data/test.yaml"), Content: []byte("v1: 1")},
|
||||
{filepath.FromSlash("data/a.yaml"), "a: 1"},
|
||||
{filepath.FromSlash("data/test/v1.yaml"), "v1-2: 2"},
|
||||
{filepath.FromSlash("data/test/v2.yaml"), "v2:\n- 2\n- 3"},
|
||||
{filepath.FromSlash("data/test.yaml"), "v1: 1"},
|
||||
}
|
||||
|
||||
expected := map[string]interface{}{"a": map[string]interface{}{"a": 1},
|
||||
|
@ -85,10 +84,10 @@ func TestDataDirYAMLWithOverridenValue(t *testing.T) {
|
|||
func TestDataDirMultipleSources(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
sources := []source.ByteSource{
|
||||
{Name: filepath.FromSlash("data/test/first.toml"), Content: []byte("bar = 1")},
|
||||
{Name: filepath.FromSlash("themes/mytheme/data/test/first.toml"), Content: []byte("bar = 2")},
|
||||
{Name: filepath.FromSlash("data/test/second.toml"), Content: []byte("tender = 2")},
|
||||
sources := [][2]string{
|
||||
{filepath.FromSlash("data/test/first.toml"), "bar = 1"},
|
||||
{filepath.FromSlash("themes/mytheme/data/test/first.toml"), "bar = 2"},
|
||||
{filepath.FromSlash("data/test/second.toml"), "tender = 2"},
|
||||
}
|
||||
|
||||
expected, _ := parser.HandleTOMLMetaData([]byte("[test.first]\nbar = 1\n[test.second]\ntender=2"))
|
||||
|
@ -98,7 +97,7 @@ func TestDataDirMultipleSources(t *testing.T) {
|
|||
|
||||
}
|
||||
|
||||
func doTestDataDir(t *testing.T, expected interface{}, sources []source.ByteSource, configKeyValues ...interface{}) {
|
||||
func doTestDataDir(t *testing.T, expected interface{}, sources [][2]string, configKeyValues ...interface{}) {
|
||||
var (
|
||||
cfg, fs = newTestCfg()
|
||||
)
|
||||
|
|
109
hugolib/fileInfo.go
Normal file
109
hugolib/fileInfo.go
Normal file
|
@ -0,0 +1,109 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
)
|
||||
|
||||
// fileInfo implements the File and ReadableFile interface.
|
||||
var (
|
||||
_ source.File = (*fileInfo)(nil)
|
||||
_ source.ReadableFile = (*fileInfo)(nil)
|
||||
)
|
||||
|
||||
type fileInfo struct {
|
||||
bundleTp bundleDirType
|
||||
source.ReadableFile
|
||||
overriddenLang string
|
||||
}
|
||||
|
||||
func (fi *fileInfo) Lang() string {
|
||||
if fi.overriddenLang != "" {
|
||||
return fi.overriddenLang
|
||||
}
|
||||
return fi.ReadableFile.Lang()
|
||||
}
|
||||
|
||||
func (fi *fileInfo) isOwner() bool {
|
||||
return fi.bundleTp > bundleNot
|
||||
}
|
||||
|
||||
func isContentFile(filename string) bool {
|
||||
return contentFileExtensionsSet[strings.TrimPrefix(helpers.Ext(filename), ".")]
|
||||
}
|
||||
|
||||
func (fi *fileInfo) isContentFile() bool {
|
||||
return contentFileExtensionsSet[fi.Ext()]
|
||||
}
|
||||
|
||||
func newFileInfo(sp *source.SourceSpec, baseDir, filename string, fi os.FileInfo, tp bundleDirType) *fileInfo {
|
||||
|
||||
baseFi := sp.NewFileInfo(baseDir, filename, fi)
|
||||
f := &fileInfo{
|
||||
bundleTp: tp,
|
||||
ReadableFile: baseFi,
|
||||
}
|
||||
|
||||
return f
|
||||
|
||||
}
|
||||
|
||||
type bundleDirType int
|
||||
|
||||
const (
|
||||
bundleNot bundleDirType = iota
|
||||
|
||||
// All from here are bundles in one form or another.
|
||||
bundleLeaf
|
||||
bundleBranch
|
||||
)
|
||||
|
||||
// Returns the given file's name's bundle type and whether it is a content
|
||||
// file or not.
|
||||
func classifyBundledFile(name string) (bundleDirType, bool) {
|
||||
if !isContentFile(name) {
|
||||
return bundleNot, false
|
||||
}
|
||||
if strings.HasPrefix(name, "_index.") {
|
||||
return bundleBranch, true
|
||||
}
|
||||
|
||||
if strings.HasPrefix(name, "index.") {
|
||||
return bundleLeaf, true
|
||||
}
|
||||
|
||||
return bundleNot, true
|
||||
}
|
||||
|
||||
func (b bundleDirType) String() string {
|
||||
switch b {
|
||||
case bundleNot:
|
||||
return "Not a bundle"
|
||||
case bundleLeaf:
|
||||
return "Regular bundle"
|
||||
case bundleBranch:
|
||||
return "Branch bundle"
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
func (b bundleDirType) isBundle() bool {
|
||||
return b > bundleNot
|
||||
}
|
61
hugolib/fileInfo_test.go
Normal file
61
hugolib/fileInfo_test.go
Normal file
|
@ -0,0 +1,61 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"path/filepath"
|
||||
|
||||
"github.com/gohugoio/hugo/source"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestBundleFileInfo(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
assert := require.New(t)
|
||||
cfg, fs := newTestBundleSourcesMultilingual(t)
|
||||
sourceSpec := source.NewSourceSpec(cfg, fs)
|
||||
|
||||
for _, this := range []struct {
|
||||
filename string
|
||||
check func(f *fileInfo)
|
||||
}{
|
||||
{"/path/to/file.md", func(fi *fileInfo) {
|
||||
assert.Equal("md", fi.Ext())
|
||||
assert.Equal("en", fi.Lang())
|
||||
assert.False(fi.isOwner())
|
||||
assert.True(fi.isContentFile())
|
||||
}},
|
||||
{"/path/to/file.JPG", func(fi *fileInfo) {
|
||||
assert.Equal("jpg", fi.Ext())
|
||||
assert.False(fi.isContentFile())
|
||||
}},
|
||||
{"/path/to/file.nn.png", func(fi *fileInfo) {
|
||||
assert.Equal("png", fi.Ext())
|
||||
assert.Equal("nn", fi.Lang())
|
||||
assert.Equal("file", fi.TranslationBaseName())
|
||||
assert.False(fi.isContentFile())
|
||||
}},
|
||||
} {
|
||||
fi := newFileInfo(
|
||||
sourceSpec,
|
||||
filepath.FromSlash("/work/base"),
|
||||
filepath.FromSlash(this.filename),
|
||||
nil, bundleNot)
|
||||
this.check(fi)
|
||||
}
|
||||
|
||||
}
|
|
@ -1,65 +0,0 @@
|
|||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"github.com/gohugoio/hugo/source"
|
||||
)
|
||||
|
||||
// Handler is used for processing files of a specific type.
|
||||
type Handler interface {
|
||||
FileConvert(*source.File, *Site) HandledResult
|
||||
PageConvert(*Page) HandledResult
|
||||
Read(*source.File, *Site) HandledResult
|
||||
Extensions() []string
|
||||
}
|
||||
|
||||
// Handle identifies functionality associated with certain file extensions.
|
||||
type Handle struct {
|
||||
extensions []string
|
||||
}
|
||||
|
||||
// Extensions returns a list of extensions.
|
||||
func (h Handle) Extensions() []string {
|
||||
return h.extensions
|
||||
}
|
||||
|
||||
// HandledResult describes the results of a file handling operation.
|
||||
type HandledResult struct {
|
||||
page *Page
|
||||
file *source.File
|
||||
err error
|
||||
}
|
||||
|
||||
// HandledResult is an error
|
||||
func (h HandledResult) Error() string {
|
||||
if h.err != nil {
|
||||
if h.page != nil {
|
||||
return "Error: " + h.err.Error() + " for " + h.page.File.LogicalName()
|
||||
}
|
||||
if h.file != nil {
|
||||
return "Error: " + h.err.Error() + " for " + h.file.LogicalName()
|
||||
}
|
||||
}
|
||||
return h.err.Error()
|
||||
}
|
||||
|
||||
func (h HandledResult) String() string {
|
||||
return h.Error()
|
||||
}
|
||||
|
||||
// Page returns the affected page.
|
||||
func (h HandledResult) Page() *Page {
|
||||
return h.page
|
||||
}
|
|
@ -1,59 +0,0 @@
|
|||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"github.com/dchest/cssmin"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
)
|
||||
|
||||
func init() {
|
||||
RegisterHandler(new(cssHandler))
|
||||
RegisterHandler(new(defaultHandler))
|
||||
}
|
||||
|
||||
type basicFileHandler Handle
|
||||
|
||||
func (h basicFileHandler) Read(f *source.File, s *Site) HandledResult {
|
||||
return HandledResult{file: f}
|
||||
}
|
||||
|
||||
func (h basicFileHandler) PageConvert(*Page) HandledResult {
|
||||
return HandledResult{}
|
||||
}
|
||||
|
||||
type defaultHandler struct{ basicFileHandler }
|
||||
|
||||
func (h defaultHandler) Extensions() []string { return []string{"*"} }
|
||||
func (h defaultHandler) FileConvert(f *source.File, s *Site) HandledResult {
|
||||
err := s.publish(f.Path(), f.Contents)
|
||||
if err != nil {
|
||||
return HandledResult{err: err}
|
||||
}
|
||||
return HandledResult{file: f}
|
||||
}
|
||||
|
||||
type cssHandler struct{ basicFileHandler }
|
||||
|
||||
func (h cssHandler) Extensions() []string { return []string{"css"} }
|
||||
func (h cssHandler) FileConvert(f *source.File, s *Site) HandledResult {
|
||||
x := cssmin.Minify(f.Bytes())
|
||||
err := s.publish(f.Path(), bytes.NewReader(x))
|
||||
if err != nil {
|
||||
return HandledResult{err: err}
|
||||
}
|
||||
return HandledResult{file: f}
|
||||
}
|
|
@ -1,128 +0,0 @@
|
|||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"fmt"
|
||||
|
||||
"github.com/gohugoio/hugo/source"
|
||||
)
|
||||
|
||||
var handlers []Handler
|
||||
|
||||
// MetaHandler abstracts reading and converting functionality of a Handler.
|
||||
type MetaHandler interface {
|
||||
// Read the Files in and register
|
||||
Read(*source.File, *Site, HandleResults)
|
||||
|
||||
// Generic Convert Function with coordination
|
||||
Convert(interface{}, *Site, HandleResults)
|
||||
|
||||
Handle() Handler
|
||||
}
|
||||
|
||||
// HandleResults is a channel for HandledResult.
|
||||
type HandleResults chan<- HandledResult
|
||||
|
||||
// NewMetaHandler creates a MetaHandle for a given extensions.
|
||||
func NewMetaHandler(in string) *MetaHandle {
|
||||
x := &MetaHandle{ext: in}
|
||||
x.Handler()
|
||||
return x
|
||||
}
|
||||
|
||||
// MetaHandle is a generic MetaHandler that internally uses
|
||||
// the globally registered handlers for handling specific file types.
|
||||
type MetaHandle struct {
|
||||
handler Handler
|
||||
ext string
|
||||
}
|
||||
|
||||
func (mh *MetaHandle) Read(f *source.File, s *Site, results HandleResults) {
|
||||
if h := mh.Handler(); h != nil {
|
||||
results <- h.Read(f, s)
|
||||
return
|
||||
}
|
||||
|
||||
results <- HandledResult{err: errors.New("No handler found"), file: f}
|
||||
}
|
||||
|
||||
// Convert handles the conversion of files and pages.
|
||||
func (mh *MetaHandle) Convert(i interface{}, s *Site, results HandleResults) {
|
||||
h := mh.Handler()
|
||||
|
||||
if f, ok := i.(*source.File); ok {
|
||||
results <- h.FileConvert(f, s)
|
||||
return
|
||||
}
|
||||
|
||||
if p, ok := i.(*Page); ok {
|
||||
if p == nil {
|
||||
results <- HandledResult{err: errors.New("file resulted in a nil page")}
|
||||
return
|
||||
}
|
||||
|
||||
if h == nil {
|
||||
results <- HandledResult{err: fmt.Errorf("No handler found for page '%s'. Verify the markup is supported by Hugo.", p.FullFilePath())}
|
||||
return
|
||||
}
|
||||
|
||||
results <- h.PageConvert(p)
|
||||
}
|
||||
}
|
||||
|
||||
// Handler finds the registered handler for the used extensions.
|
||||
func (mh *MetaHandle) Handler() Handler {
|
||||
if mh.handler == nil {
|
||||
mh.handler = FindHandler(mh.ext)
|
||||
|
||||
// if no handler found, use default handler
|
||||
if mh.handler == nil {
|
||||
mh.handler = FindHandler("*")
|
||||
}
|
||||
}
|
||||
return mh.handler
|
||||
}
|
||||
|
||||
// FindHandler finds a Handler in the globally registered handlers.
|
||||
func FindHandler(ext string) Handler {
|
||||
for _, h := range Handlers() {
|
||||
if HandlerMatch(h, ext) {
|
||||
return h
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// HandlerMatch checks if the given extensions matches.
|
||||
func HandlerMatch(h Handler, ext string) bool {
|
||||
for _, x := range h.Extensions() {
|
||||
if ext == x {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// RegisterHandler adds a handler to the globally registered ones.
|
||||
func RegisterHandler(h Handler) {
|
||||
handlers = append(handlers, h)
|
||||
}
|
||||
|
||||
// Handlers returns the globally registered handlers.
|
||||
func Handlers() []Handler {
|
||||
return handlers
|
||||
}
|
|
@ -1,157 +0,0 @@
|
|||
// Copyright 2016 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
)
|
||||
|
||||
func init() {
|
||||
RegisterHandler(new(markdownHandler))
|
||||
RegisterHandler(new(htmlHandler))
|
||||
RegisterHandler(new(asciidocHandler))
|
||||
RegisterHandler(new(rstHandler))
|
||||
RegisterHandler(new(pandocHandler))
|
||||
RegisterHandler(new(mmarkHandler))
|
||||
RegisterHandler(new(orgHandler))
|
||||
}
|
||||
|
||||
type basicPageHandler Handle
|
||||
|
||||
func (b basicPageHandler) Read(f *source.File, s *Site) HandledResult {
|
||||
page, err := s.NewPage(f.Path())
|
||||
|
||||
if err != nil {
|
||||
return HandledResult{file: f, err: err}
|
||||
}
|
||||
|
||||
if _, err := page.ReadFrom(f.Contents); err != nil {
|
||||
return HandledResult{file: f, err: err}
|
||||
}
|
||||
|
||||
// In a multilanguage setup, we use the first site to
|
||||
// do the initial processing.
|
||||
// That site may be different than where the page will end up,
|
||||
// so we do the assignment here.
|
||||
// We should clean up this, but that will have to wait.
|
||||
s.assignSiteByLanguage(page)
|
||||
|
||||
return HandledResult{file: f, page: page, err: err}
|
||||
}
|
||||
|
||||
func (b basicPageHandler) FileConvert(*source.File, *Site) HandledResult {
|
||||
return HandledResult{}
|
||||
}
|
||||
|
||||
type markdownHandler struct {
|
||||
basicPageHandler
|
||||
}
|
||||
|
||||
func (h markdownHandler) Extensions() []string { return []string{"mdown", "markdown", "md"} }
|
||||
func (h markdownHandler) PageConvert(p *Page) HandledResult {
|
||||
return commonConvert(p)
|
||||
}
|
||||
|
||||
type htmlHandler struct {
|
||||
basicPageHandler
|
||||
}
|
||||
|
||||
func (h htmlHandler) Extensions() []string { return []string{"html", "htm"} }
|
||||
|
||||
func (h htmlHandler) PageConvert(p *Page) HandledResult {
|
||||
if p.rendered {
|
||||
panic(fmt.Sprintf("Page %q already rendered, does not need conversion", p.BaseFileName()))
|
||||
}
|
||||
|
||||
// Work on a copy of the raw content from now on.
|
||||
p.createWorkContentCopy()
|
||||
|
||||
if err := p.processShortcodes(); err != nil {
|
||||
p.s.Log.ERROR.Println(err)
|
||||
}
|
||||
|
||||
return HandledResult{err: nil}
|
||||
}
|
||||
|
||||
type asciidocHandler struct {
|
||||
basicPageHandler
|
||||
}
|
||||
|
||||
func (h asciidocHandler) Extensions() []string { return []string{"asciidoc", "adoc", "ad"} }
|
||||
func (h asciidocHandler) PageConvert(p *Page) HandledResult {
|
||||
return commonConvert(p)
|
||||
}
|
||||
|
||||
type rstHandler struct {
|
||||
basicPageHandler
|
||||
}
|
||||
|
||||
func (h rstHandler) Extensions() []string { return []string{"rest", "rst"} }
|
||||
func (h rstHandler) PageConvert(p *Page) HandledResult {
|
||||
return commonConvert(p)
|
||||
}
|
||||
|
||||
type pandocHandler struct {
|
||||
basicPageHandler
|
||||
}
|
||||
|
||||
func (h pandocHandler) Extensions() []string { return []string{"pandoc", "pdc"} }
|
||||
func (h pandocHandler) PageConvert(p *Page) HandledResult {
|
||||
return commonConvert(p)
|
||||
}
|
||||
|
||||
type mmarkHandler struct {
|
||||
basicPageHandler
|
||||
}
|
||||
|
||||
func (h mmarkHandler) Extensions() []string { return []string{"mmark"} }
|
||||
func (h mmarkHandler) PageConvert(p *Page) HandledResult {
|
||||
return commonConvert(p)
|
||||
}
|
||||
|
||||
type orgHandler struct {
|
||||
basicPageHandler
|
||||
}
|
||||
|
||||
func (h orgHandler) Extensions() []string { return []string{"org"} }
|
||||
func (h orgHandler) PageConvert(p *Page) HandledResult {
|
||||
return commonConvert(p)
|
||||
}
|
||||
|
||||
func commonConvert(p *Page) HandledResult {
|
||||
if p.rendered {
|
||||
panic(fmt.Sprintf("Page %q already rendered, does not need conversion", p.BaseFileName()))
|
||||
}
|
||||
|
||||
// Work on a copy of the raw content from now on.
|
||||
p.createWorkContentCopy()
|
||||
|
||||
if err := p.processShortcodes(); err != nil {
|
||||
p.s.Log.ERROR.Println(err)
|
||||
}
|
||||
|
||||
// TODO(bep) these page handlers need to be re-evaluated, as it is hard to
|
||||
// process a page in isolation. See the new preRender func.
|
||||
if p.s.Cfg.GetBool("enableEmoji") {
|
||||
p.workContent = helpers.Emojify(p.workContent)
|
||||
}
|
||||
|
||||
p.workContent = p.replaceDivider(p.workContent)
|
||||
p.workContent = p.renderContent(p.workContent)
|
||||
|
||||
return HandledResult{err: nil}
|
||||
}
|
|
@ -1,77 +0,0 @@
|
|||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/gohugoio/hugo/deps"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
)
|
||||
|
||||
func TestDefaultHandler(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
var (
|
||||
cfg, fs = newTestCfg()
|
||||
)
|
||||
|
||||
cfg.Set("verbose", true)
|
||||
cfg.Set("uglyURLs", true)
|
||||
|
||||
writeSource(t, fs, filepath.FromSlash("content/sect/doc1.html"), "---\nmarkup: markdown\n---\n# title\nsome *content*")
|
||||
writeSource(t, fs, filepath.FromSlash("content/sect/doc2.html"), "<!doctype html><html><body>more content</body></html>")
|
||||
writeSource(t, fs, filepath.FromSlash("content/sect/doc3.md"), "# doc3\n*some* content")
|
||||
writeSource(t, fs, filepath.FromSlash("content/sect/doc4.md"), "---\ntitle: doc4\n---\n# doc4\n*some content*")
|
||||
writeSource(t, fs, filepath.FromSlash("content/sect/doc3/img1.png"), "‰PNG <20><><EFBFBD> IHDR<44><52><EFBFBD><01><><EFBFBD><08><><EFBFBD><EFBFBD>:~›U<E280BA><55><EFBFBD> IDATWcø<0F><01>ZMoñ<6F><C3B1><EFBFBD><EFBFBD>IEND®B`‚")
|
||||
writeSource(t, fs, filepath.FromSlash("content/sect/img2.gif"), "GIF89a<01><01>€<EFBFBD><E282AC>ÿÿÿ<C3BF><C3BF><EFBFBD>,<2C><><EFBFBD><EFBFBD><01><01><>D<01>;")
|
||||
writeSource(t, fs, filepath.FromSlash("content/sect/img2.spf"), "****FAKE-FILETYPE****")
|
||||
writeSource(t, fs, filepath.FromSlash("content/doc7.html"), "<html><body>doc7 content</body></html>")
|
||||
writeSource(t, fs, filepath.FromSlash("content/sect/doc8.html"), "---\nmarkup: md\n---\n# title\nsome *content*")
|
||||
|
||||
writeSource(t, fs, filepath.FromSlash("layouts/_default/single.html"), "{{.Content}}")
|
||||
writeSource(t, fs, filepath.FromSlash("head"), "<head><script src=\"script.js\"></script></head>")
|
||||
writeSource(t, fs, filepath.FromSlash("head_abs"), "<head><script src=\"/script.js\"></script></head")
|
||||
|
||||
buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
|
||||
|
||||
tests := []struct {
|
||||
doc string
|
||||
expected string
|
||||
}{
|
||||
{filepath.FromSlash("public/sect/doc1.html"), "\n\n<h1 id=\"title\">title</h1>\n\n<p>some <em>content</em></p>\n"},
|
||||
{filepath.FromSlash("public/sect/doc2.html"), "<!doctype html><html><body>more content</body></html>"},
|
||||
{filepath.FromSlash("public/sect/doc3.html"), "\n\n<h1 id=\"doc3\">doc3</h1>\n\n<p><em>some</em> content</p>\n"},
|
||||
{filepath.FromSlash("public/sect/doc3/img1.png"), string([]byte("‰PNG <20><><EFBFBD> IHDR<44><52><EFBFBD><01><><EFBFBD><08><><EFBFBD><EFBFBD>:~›U<E280BA><55><EFBFBD> IDATWcø<0F><01>ZMoñ<6F><C3B1><EFBFBD><EFBFBD>IEND®B`‚"))},
|
||||
{filepath.FromSlash("public/sect/img2.gif"), string([]byte("GIF89a<01><01>€<EFBFBD><E282AC>ÿÿÿ<C3BF><C3BF><EFBFBD>,<2C><><EFBFBD><EFBFBD><01><01><>D<01>;"))},
|
||||
{filepath.FromSlash("public/sect/img2.spf"), string([]byte("****FAKE-FILETYPE****"))},
|
||||
{filepath.FromSlash("public/doc7.html"), "<html><body>doc7 content</body></html>"},
|
||||
{filepath.FromSlash("public/sect/doc8.html"), "\n\n<h1 id=\"title\">title</h1>\n\n<p>some <em>content</em></p>\n"},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
file, err := fs.Destination.Open(test.doc)
|
||||
if err != nil {
|
||||
t.Fatalf("Did not find %s in target.", test.doc)
|
||||
}
|
||||
|
||||
content := helpers.ReaderToString(file)
|
||||
|
||||
if content != test.expected {
|
||||
t.Errorf("%s content expected:\n%q\ngot:\n%q", test.doc, test.expected, content)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -15,10 +15,13 @@ package hugolib
|
|||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"path/filepath"
|
||||
"github.com/gohugoio/hugo/resource"
|
||||
|
||||
"github.com/gohugoio/hugo/deps"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
|
@ -32,20 +35,40 @@ import (
|
|||
type HugoSites struct {
|
||||
Sites []*Site
|
||||
|
||||
runMode runmode
|
||||
|
||||
multilingual *Multilingual
|
||||
|
||||
// Multihost is set if multilingual and baseURL set on the language level.
|
||||
multihost bool
|
||||
|
||||
// If this is running in the dev server.
|
||||
running bool
|
||||
|
||||
*deps.Deps
|
||||
|
||||
// Keeps track of bundle directories and symlinks to enable partial rebuilding.
|
||||
ContentChanges *contentChangeMap
|
||||
}
|
||||
|
||||
func (h *HugoSites) IsMultihost() bool {
|
||||
return h != nil && h.multihost
|
||||
}
|
||||
|
||||
func (h *HugoSites) PrintProcessingStats(w io.Writer) {
|
||||
stats := make([]*helpers.ProcessingStats, len(h.Sites))
|
||||
for i := 0; i < len(h.Sites); i++ {
|
||||
stats[i] = h.Sites[i].PathSpec.ProcessingStats
|
||||
}
|
||||
helpers.ProcessingStatsTable(w, stats...)
|
||||
}
|
||||
|
||||
func (h *HugoSites) langSite() map[string]*Site {
|
||||
m := make(map[string]*Site)
|
||||
for _, s := range h.Sites {
|
||||
m[s.Language.Lang] = s
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// GetContentPage finds a Page with content given the absolute filename.
|
||||
// Returns nil if none found.
|
||||
func (h *HugoSites) GetContentPage(filename string) *Page {
|
||||
|
@ -58,12 +81,29 @@ func (h *HugoSites) GetContentPage(filename string) *Page {
|
|||
rel := strings.TrimPrefix(filename, contendDir)
|
||||
rel = strings.TrimPrefix(rel, helpers.FilePathSeparator)
|
||||
|
||||
pos := s.rawAllPages.findPagePosByFilePath(rel)
|
||||
for _, s := range h.Sites {
|
||||
|
||||
if pos == -1 {
|
||||
return nil
|
||||
pos := s.rawAllPages.findPagePosByFilePath(rel)
|
||||
|
||||
if pos == -1 {
|
||||
continue
|
||||
}
|
||||
return s.rawAllPages[pos]
|
||||
}
|
||||
return s.rawAllPages[pos]
|
||||
|
||||
// If not found already, this may be bundled in another content file.
|
||||
rel = filepath.Dir(rel)
|
||||
for _, s := range h.Sites {
|
||||
|
||||
pos := s.rawAllPages.findFirstPagePosByFilePathPrefix(rel)
|
||||
|
||||
if pos == -1 {
|
||||
continue
|
||||
}
|
||||
return s.rawAllPages[pos]
|
||||
}
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
|
@ -81,10 +121,20 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
|
|||
return nil, err
|
||||
}
|
||||
|
||||
var contentChangeTracker *contentChangeMap
|
||||
|
||||
// Only needed in server mode.
|
||||
// TODO(bep) clean up the running vs watching terms
|
||||
if cfg.Running {
|
||||
contentChangeTracker = &contentChangeMap{symContent: make(map[string]map[string]bool)}
|
||||
}
|
||||
|
||||
h := &HugoSites{
|
||||
multilingual: langConfig,
|
||||
multihost: cfg.Cfg.GetBool("multihost"),
|
||||
Sites: sites}
|
||||
running: cfg.Running,
|
||||
multilingual: langConfig,
|
||||
multihost: cfg.Cfg.GetBool("multihost"),
|
||||
ContentChanges: contentChangeTracker,
|
||||
Sites: sites}
|
||||
|
||||
for _, s := range sites {
|
||||
s.owner = h
|
||||
|
@ -143,6 +193,10 @@ func applyDepsIfNeeded(cfg deps.DepsCfg, sites ...*Site) error {
|
|||
d.OutputFormatsConfig = s.outputFormatsConfig
|
||||
s.Deps = d
|
||||
}
|
||||
s.resourceSpec, err = resource.NewSpec(s.Deps.PathSpec, s.mediaTypesConfig)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@ -258,10 +312,6 @@ func (h *HugoSites) toSiteInfos() []*SiteInfo {
|
|||
|
||||
// BuildCfg holds build options used to, as an example, skip the render step.
|
||||
type BuildCfg struct {
|
||||
// Whether we are in watch (server) mode
|
||||
Watching bool
|
||||
// Print build stats at the end of a build
|
||||
PrintStats bool
|
||||
// Reset site state before build. Use to force full rebuilds.
|
||||
ResetState bool
|
||||
// Re-creates the sites from configuration before a build.
|
||||
|
@ -304,11 +354,12 @@ func (h *HugoSites) renderCrossSitesArtifacts() error {
|
|||
|
||||
smLayouts := []string{"sitemapindex.xml", "_default/sitemapindex.xml", "_internal/_default/sitemapindex.xml"}
|
||||
|
||||
return s.renderAndWriteXML("sitemapindex",
|
||||
return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemapindex",
|
||||
sitemapDefault.Filename, h.toSiteInfos(), s.appendThemeTemplates(smLayouts)...)
|
||||
}
|
||||
|
||||
func (h *HugoSites) assignMissingTranslations() error {
|
||||
|
||||
// This looks heavy, but it should be a small number of nodes by now.
|
||||
allPages := h.findAllPagesByKindNotIn(KindPage)
|
||||
for _, nodeType := range []string{KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm} {
|
||||
|
@ -427,73 +478,57 @@ func (h *HugoSites) createMissingPages() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (s *Site) assignSiteByLanguage(p *Page) {
|
||||
|
||||
pageLang := p.Lang()
|
||||
|
||||
if pageLang == "" {
|
||||
panic("Page language missing: " + p.Title)
|
||||
func (h *HugoSites) removePageByPathPrefix(path string) {
|
||||
for _, s := range h.Sites {
|
||||
s.removePageByPathPrefix(path)
|
||||
}
|
||||
}
|
||||
|
||||
for _, site := range s.owner.Sites {
|
||||
if strings.HasPrefix(site.Language.Lang, pageLang) {
|
||||
p.s = site
|
||||
p.Site = &site.Info
|
||||
return
|
||||
}
|
||||
func (h *HugoSites) removePageByPath(path string) {
|
||||
for _, s := range h.Sites {
|
||||
s.removePageByPath(path)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (h *HugoSites) setupTranslations() {
|
||||
|
||||
master := h.Sites[0]
|
||||
|
||||
for _, p := range master.rawAllPages {
|
||||
if p.Lang() == "" {
|
||||
panic("Page language missing: " + p.Title)
|
||||
}
|
||||
|
||||
if p.Kind == kindUnknown {
|
||||
p.Kind = p.s.kindFromSections(p.sections)
|
||||
}
|
||||
|
||||
if !p.s.isEnabled(p.Kind) {
|
||||
continue
|
||||
}
|
||||
|
||||
shouldBuild := p.shouldBuild()
|
||||
|
||||
for i, site := range h.Sites {
|
||||
// The site is assigned by language when read.
|
||||
if site == p.s {
|
||||
site.updateBuildStats(p)
|
||||
if shouldBuild {
|
||||
site.Pages = append(site.Pages, p)
|
||||
}
|
||||
for _, s := range h.Sites {
|
||||
for _, p := range s.rawAllPages {
|
||||
if p.Kind == kindUnknown {
|
||||
p.Kind = p.s.kindFromSections(p.sections)
|
||||
}
|
||||
|
||||
if !shouldBuild {
|
||||
if !p.s.isEnabled(p.Kind) {
|
||||
continue
|
||||
}
|
||||
|
||||
if i == 0 {
|
||||
site.AllPages = append(site.AllPages, p)
|
||||
shouldBuild := p.shouldBuild()
|
||||
s.updateBuildStats(p)
|
||||
if shouldBuild {
|
||||
s.Pages = append(s.Pages, p)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
allPages := make(Pages, 0)
|
||||
|
||||
for _, s := range h.Sites {
|
||||
allPages = append(allPages, s.Pages...)
|
||||
}
|
||||
|
||||
allPages.Sort()
|
||||
|
||||
for _, s := range h.Sites {
|
||||
s.AllPages = allPages
|
||||
}
|
||||
|
||||
// Pull over the collections from the master site
|
||||
for i := 1; i < len(h.Sites); i++ {
|
||||
h.Sites[i].AllPages = h.Sites[0].AllPages
|
||||
h.Sites[i].Data = h.Sites[0].Data
|
||||
}
|
||||
|
||||
if len(h.Sites) > 1 {
|
||||
pages := h.Sites[0].AllPages
|
||||
allTranslations := pagesToTranslationsMap(pages)
|
||||
assignTranslationsToPages(allTranslations, pages)
|
||||
allTranslations := pagesToTranslationsMap(allPages)
|
||||
assignTranslationsToPages(allTranslations, allPages)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -501,6 +536,7 @@ func (s *Site) preparePagesForRender(cfg *BuildCfg) {
|
|||
|
||||
pageChan := make(chan *Page)
|
||||
wg := &sync.WaitGroup{}
|
||||
|
||||
numWorkers := getGoMaxProcs() * 4
|
||||
|
||||
for i := 0; i < numWorkers; i++ {
|
||||
|
@ -508,77 +544,10 @@ func (s *Site) preparePagesForRender(cfg *BuildCfg) {
|
|||
go func(pages <-chan *Page, wg *sync.WaitGroup) {
|
||||
defer wg.Done()
|
||||
for p := range pages {
|
||||
if !p.shouldRenderTo(s.rc.Format) {
|
||||
// No need to prepare
|
||||
continue
|
||||
if err := p.prepareForRender(cfg); err != nil {
|
||||
s.Log.ERROR.Printf("Failed to prepare page %q for render: %s", p.BaseFileName(), err)
|
||||
|
||||
}
|
||||
var shortcodeUpdate bool
|
||||
if p.shortcodeState != nil {
|
||||
shortcodeUpdate = p.shortcodeState.updateDelta()
|
||||
}
|
||||
|
||||
if !shortcodeUpdate && !cfg.whatChanged.other && p.rendered {
|
||||
// No need to process it again.
|
||||
continue
|
||||
}
|
||||
|
||||
// If we got this far it means that this is either a new Page pointer
|
||||
// or a template or similar has changed so wee need to do a rerendering
|
||||
// of the shortcodes etc.
|
||||
|
||||
// Mark it as rendered
|
||||
p.rendered = true
|
||||
|
||||
// If in watch mode or if we have multiple output formats,
|
||||
// we need to keep the original so we can
|
||||
// potentially repeat this process on rebuild.
|
||||
needsACopy := cfg.Watching || len(p.outputFormats) > 1
|
||||
var workContentCopy []byte
|
||||
if needsACopy {
|
||||
workContentCopy = make([]byte, len(p.workContent))
|
||||
copy(workContentCopy, p.workContent)
|
||||
} else {
|
||||
// Just reuse the same slice.
|
||||
workContentCopy = p.workContent
|
||||
}
|
||||
|
||||
if p.Markup == "markdown" {
|
||||
tmpContent, tmpTableOfContents := helpers.ExtractTOC(workContentCopy)
|
||||
p.TableOfContents = helpers.BytesToHTML(tmpTableOfContents)
|
||||
workContentCopy = tmpContent
|
||||
}
|
||||
|
||||
var err error
|
||||
if workContentCopy, err = handleShortcodes(p, workContentCopy); err != nil {
|
||||
s.Log.ERROR.Printf("Failed to handle shortcodes for page %s: %s", p.BaseFileName(), err)
|
||||
}
|
||||
|
||||
if p.Markup != "html" {
|
||||
|
||||
// Now we know enough to create a summary of the page and count some words
|
||||
summaryContent, err := p.setUserDefinedSummaryIfProvided(workContentCopy)
|
||||
|
||||
if err != nil {
|
||||
s.Log.ERROR.Printf("Failed to set user defined summary for page %q: %s", p.Path(), err)
|
||||
} else if summaryContent != nil {
|
||||
workContentCopy = summaryContent.content
|
||||
}
|
||||
|
||||
p.Content = helpers.BytesToHTML(workContentCopy)
|
||||
|
||||
if summaryContent == nil {
|
||||
if err := p.setAutoSummary(); err != nil {
|
||||
s.Log.ERROR.Printf("Failed to set user auto summary for page %q: %s", p.pathOrTitle(), err)
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
p.Content = helpers.BytesToHTML(workContentCopy)
|
||||
}
|
||||
|
||||
//analyze for raw stats
|
||||
p.analyzePage()
|
||||
|
||||
}
|
||||
}(pageChan, wg)
|
||||
}
|
||||
|
@ -646,3 +615,113 @@ func (h *HugoSites) findAllPagesByKind(kind string) Pages {
|
|||
func (h *HugoSites) findAllPagesByKindNotIn(kind string) Pages {
|
||||
return h.findPagesByKindNotIn(kind, h.Sites[0].AllPages)
|
||||
}
|
||||
|
||||
func (h *HugoSites) findPagesByShortcode(shortcode string) Pages {
|
||||
var pages Pages
|
||||
for _, s := range h.Sites {
|
||||
pages = append(pages, s.findPagesByShortcode(shortcode)...)
|
||||
}
|
||||
return pages
|
||||
}
|
||||
|
||||
// Used in partial reloading to determine if the change is in a bundle.
|
||||
type contentChangeMap struct {
|
||||
mu sync.RWMutex
|
||||
branches []string
|
||||
leafs []string
|
||||
|
||||
// Hugo supports symlinked content (both directories and files). This
|
||||
// can lead to situations where the same file can be referenced from several
|
||||
// locations in /content -- which is really cool, but also means we have to
|
||||
// go an extra mile to handle changes.
|
||||
// This map is only used in watch mode.
|
||||
// It maps either file to files or the real dir to a set of content directories where it is in use.
|
||||
symContent map[string]map[string]bool
|
||||
symContentMu sync.Mutex
|
||||
}
|
||||
|
||||
func (m *contentChangeMap) add(filename string, tp bundleDirType) {
|
||||
m.mu.Lock()
|
||||
dir := filepath.Dir(filename)
|
||||
switch tp {
|
||||
case bundleBranch:
|
||||
m.branches = append(m.branches, dir)
|
||||
case bundleLeaf:
|
||||
m.leafs = append(m.leafs, dir)
|
||||
default:
|
||||
panic("invalid bundle type")
|
||||
}
|
||||
m.mu.Unlock()
|
||||
}
|
||||
|
||||
// Track the addition of bundle dirs.
|
||||
func (m *contentChangeMap) handleBundles(b *bundleDirs) {
|
||||
for _, bd := range b.bundles {
|
||||
m.add(bd.fi.Filename(), bd.tp)
|
||||
}
|
||||
}
|
||||
|
||||
// resolveAndRemove resolves the given filename to the root folder of a bundle, if relevant.
|
||||
// It also removes the entry from the map. It will be re-added again by the partial
|
||||
// build if it still is a bundle.
|
||||
func (m *contentChangeMap) resolveAndRemove(filename string) (string, string, bundleDirType) {
|
||||
m.mu.RLock()
|
||||
defer m.mu.RUnlock()
|
||||
|
||||
dir, name := filepath.Split(filename)
|
||||
dir = strings.TrimSuffix(dir, helpers.FilePathSeparator)
|
||||
fileTp, isContent := classifyBundledFile(name)
|
||||
|
||||
// If the file itself is a bundle, no need to look further:
|
||||
if fileTp > bundleNot {
|
||||
return dir, dir, fileTp
|
||||
}
|
||||
|
||||
// This may be a member of a bundle. Start with branch bundles, the most specific.
|
||||
if !isContent {
|
||||
for i, b := range m.branches {
|
||||
if b == dir {
|
||||
m.branches = append(m.branches[:i], m.branches[i+1:]...)
|
||||
return dir, dir, bundleBranch
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// And finally the leaf bundles, which can contain anything.
|
||||
for i, l := range m.leafs {
|
||||
if strings.HasPrefix(dir, l) {
|
||||
m.leafs = append(m.leafs[:i], m.leafs[i+1:]...)
|
||||
return dir, dir, bundleLeaf
|
||||
}
|
||||
}
|
||||
|
||||
// Not part of any bundle
|
||||
return dir, filename, bundleNot
|
||||
}
|
||||
|
||||
func (m *contentChangeMap) addSymbolicLinkMapping(from, to string) {
|
||||
m.symContentMu.Lock()
|
||||
mm, found := m.symContent[from]
|
||||
if !found {
|
||||
mm = make(map[string]bool)
|
||||
m.symContent[from] = mm
|
||||
}
|
||||
mm[to] = true
|
||||
m.symContentMu.Unlock()
|
||||
}
|
||||
|
||||
func (m *contentChangeMap) GetSymbolicLinkMappings(dir string) []string {
|
||||
mm, found := m.symContent[dir]
|
||||
if !found {
|
||||
return nil
|
||||
}
|
||||
dirs := make([]string, len(mm))
|
||||
i := 0
|
||||
for dir, _ := range mm {
|
||||
dirs[i] = dir
|
||||
i++
|
||||
}
|
||||
|
||||
sort.Strings(dirs)
|
||||
return dirs
|
||||
}
|
||||
|
|
|
@ -15,7 +15,6 @@ package hugolib
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"time"
|
||||
|
||||
"errors"
|
||||
|
||||
|
@ -30,7 +29,7 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
|
|||
h.Metrics.Reset()
|
||||
}
|
||||
|
||||
t0 := time.Now()
|
||||
//t0 := time.Now()
|
||||
|
||||
// Need a pointer as this may be modified.
|
||||
conf := &config
|
||||
|
@ -63,10 +62,6 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
|
|||
return err
|
||||
}
|
||||
|
||||
if config.PrintStats {
|
||||
h.Log.FEEDBACK.Printf("total in %v ms\n", int(1000*time.Since(t0).Seconds()))
|
||||
}
|
||||
|
||||
if h.Metrics != nil {
|
||||
var b bytes.Buffer
|
||||
h.Metrics.WriteMetrics(&b)
|
||||
|
@ -101,8 +96,6 @@ func (h *HugoSites) init(config *BuildCfg) error {
|
|||
}
|
||||
}
|
||||
|
||||
h.runMode.Watching = config.Watching
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -115,12 +108,10 @@ func (h *HugoSites) initRebuild(config *BuildCfg) error {
|
|||
return errors.New("Rebuild does not support 'ResetState'.")
|
||||
}
|
||||
|
||||
if !config.Watching {
|
||||
if !h.running {
|
||||
return errors.New("Rebuild called when not in watch mode")
|
||||
}
|
||||
|
||||
h.runMode.Watching = config.Watching
|
||||
|
||||
if config.whatChanged.source {
|
||||
// This is for the non-renderable content pages (rarely used, I guess).
|
||||
// We could maybe detect if this is really needed, but it should be
|
||||
|
@ -147,7 +138,7 @@ func (h *HugoSites) process(config *BuildCfg, events ...fsnotify.Event) error {
|
|||
|
||||
if len(events) > 0 {
|
||||
// This is a rebuild
|
||||
changed, err := firstSite.reProcess(events)
|
||||
changed, err := firstSite.processPartial(events)
|
||||
config.whatChanged = &changed
|
||||
return err
|
||||
}
|
||||
|
@ -188,25 +179,19 @@ func (h *HugoSites) assemble(config *BuildCfg) error {
|
|||
}
|
||||
|
||||
for _, s := range h.Sites {
|
||||
s.siteStats = &siteStats{}
|
||||
for _, p := range s.Pages {
|
||||
// May have been set in front matter
|
||||
if len(p.outputFormats) == 0 {
|
||||
p.outputFormats = s.outputFormats[p.Kind]
|
||||
}
|
||||
|
||||
cnt := len(p.outputFormats)
|
||||
if p.Kind == KindPage {
|
||||
s.siteStats.pageCountRegular += cnt
|
||||
for _, r := range p.Resources.ByType(pageResourceType) {
|
||||
r.(*Page).outputFormats = p.outputFormats
|
||||
}
|
||||
s.siteStats.pageCount += cnt
|
||||
|
||||
if err := p.initTargetPathDescriptor(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.initURLs(); err != nil {
|
||||
if err := p.initPaths(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
s.assembleMenus()
|
||||
s.refreshPageCaches()
|
||||
|
@ -222,7 +207,6 @@ func (h *HugoSites) assemble(config *BuildCfg) error {
|
|||
}
|
||||
|
||||
func (h *HugoSites) render(config *BuildCfg) error {
|
||||
|
||||
for _, s := range h.Sites {
|
||||
s.initRenderFormats()
|
||||
for i, rf := range s.renderFormats {
|
||||
|
@ -235,10 +219,6 @@ func (h *HugoSites) render(config *BuildCfg) error {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !config.SkipRender && config.PrintStats {
|
||||
s.Stats()
|
||||
}
|
||||
}
|
||||
|
||||
if !config.SkipRender {
|
||||
|
|
|
@ -16,7 +16,6 @@ import (
|
|||
"github.com/gohugoio/hugo/deps"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
"github.com/spf13/afero"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
@ -26,6 +25,7 @@ type testSiteConfig struct {
|
|||
DefaultContentLanguage string
|
||||
DefaultContentLanguageInSubdir bool
|
||||
Fs afero.Fs
|
||||
Running bool
|
||||
}
|
||||
|
||||
func TestMultiSitesMainLangInRoot(t *testing.T) {
|
||||
|
@ -226,7 +226,7 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
|
|||
gp1 := sites.GetContentPage(filepath.FromSlash("content/sect/doc1.en.md"))
|
||||
require.NotNil(t, gp1)
|
||||
require.Equal(t, "doc1", gp1.Title)
|
||||
gp2 := sites.GetContentPage(filepath.FromSlash("content/sect/notfound.md"))
|
||||
gp2 := sites.GetContentPage(filepath.FromSlash("content/dummysect/notfound.md"))
|
||||
require.Nil(t, gp2)
|
||||
|
||||
enSite := sites.Sites[0]
|
||||
|
@ -238,7 +238,6 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
|
|||
if len(enSite.RegularPages) != 4 {
|
||||
t.Fatal("Expected 4 english pages")
|
||||
}
|
||||
require.Len(t, enSite.Source.Files(), 14, "should have 13 source files")
|
||||
require.Len(t, enSite.AllPages, 28, "should have 28 total pages (including translations and index types)")
|
||||
|
||||
doc1en := enSite.RegularPages[0]
|
||||
|
@ -401,12 +400,11 @@ func TestMultiSitesRebuild(t *testing.T) {
|
|||
if !isCI() {
|
||||
defer leaktest.CheckTimeout(t, 30*time.Second)()
|
||||
}
|
||||
siteConfig := testSiteConfig{Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: true}
|
||||
siteConfig := testSiteConfig{Running: true, Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: true}
|
||||
sites := createMultiTestSites(t, siteConfig, multiSiteTOMLConfigTemplate)
|
||||
fs := sites.Fs
|
||||
cfg := BuildCfg{Watching: true}
|
||||
th := testHelper{sites.Cfg, fs, t}
|
||||
|
||||
cfg := BuildCfg{}
|
||||
err := sites.Build(cfg)
|
||||
|
||||
if err != nil {
|
||||
|
@ -446,8 +444,10 @@ func TestMultiSitesRebuild(t *testing.T) {
|
|||
// * Change a template
|
||||
// * Change language file
|
||||
{
|
||||
nil,
|
||||
[]fsnotify.Event{{Name: "content/sect/doc2.en.md", Op: fsnotify.Remove}},
|
||||
func(t *testing.T) {
|
||||
fs.Source.Remove("content/sect/doc2.en.md")
|
||||
},
|
||||
[]fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc2.en.md"), Op: fsnotify.Remove}},
|
||||
func(t *testing.T) {
|
||||
require.Len(t, enSite.RegularPages, 3, "1 en removed")
|
||||
|
||||
|
@ -467,9 +467,9 @@ func TestMultiSitesRebuild(t *testing.T) {
|
|||
writeNewContentFile(t, fs, "new_fr_1", "2016-07-30", "content/new1.fr.md", 10)
|
||||
},
|
||||
[]fsnotify.Event{
|
||||
{Name: "content/new1.en.md", Op: fsnotify.Create},
|
||||
{Name: "content/new2.en.md", Op: fsnotify.Create},
|
||||
{Name: "content/new1.fr.md", Op: fsnotify.Create},
|
||||
{Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Create},
|
||||
{Name: filepath.FromSlash("content/new2.en.md"), Op: fsnotify.Create},
|
||||
{Name: filepath.FromSlash("content/new1.fr.md"), Op: fsnotify.Create},
|
||||
},
|
||||
func(t *testing.T) {
|
||||
require.Len(t, enSite.RegularPages, 5)
|
||||
|
@ -490,7 +490,7 @@ func TestMultiSitesRebuild(t *testing.T) {
|
|||
doc1 += "CHANGED"
|
||||
writeSource(t, fs, p, doc1)
|
||||
},
|
||||
[]fsnotify.Event{{Name: "content/sect/doc1.en.md", Op: fsnotify.Write}},
|
||||
[]fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc1.en.md"), Op: fsnotify.Write}},
|
||||
func(t *testing.T) {
|
||||
require.Len(t, enSite.RegularPages, 5)
|
||||
doc1 := readDestination(t, fs, "public/en/sect/doc1-slug/index.html")
|
||||
|
@ -506,8 +506,8 @@ func TestMultiSitesRebuild(t *testing.T) {
|
|||
}
|
||||
},
|
||||
[]fsnotify.Event{
|
||||
{Name: "content/new1renamed.en.md", Op: fsnotify.Rename},
|
||||
{Name: "content/new1.en.md", Op: fsnotify.Rename},
|
||||
{Name: filepath.FromSlash("content/new1renamed.en.md"), Op: fsnotify.Rename},
|
||||
{Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Rename},
|
||||
},
|
||||
func(t *testing.T) {
|
||||
require.Len(t, enSite.RegularPages, 5, "Rename")
|
||||
|
@ -523,7 +523,7 @@ func TestMultiSitesRebuild(t *testing.T) {
|
|||
templateContent += "{{ print \"Template Changed\"}}"
|
||||
writeSource(t, fs, template, templateContent)
|
||||
},
|
||||
[]fsnotify.Event{{Name: "layouts/_default/single.html", Op: fsnotify.Write}},
|
||||
[]fsnotify.Event{{Name: filepath.FromSlash("layouts/_default/single.html"), Op: fsnotify.Write}},
|
||||
func(t *testing.T) {
|
||||
require.Len(t, enSite.RegularPages, 5)
|
||||
require.Len(t, enSite.AllPages, 30)
|
||||
|
@ -540,7 +540,7 @@ func TestMultiSitesRebuild(t *testing.T) {
|
|||
langContent = strings.Replace(langContent, "Bonjour", "Salut", 1)
|
||||
writeSource(t, fs, languageFile, langContent)
|
||||
},
|
||||
[]fsnotify.Event{{Name: "i18n/fr.yaml", Op: fsnotify.Write}},
|
||||
[]fsnotify.Event{{Name: filepath.FromSlash("i18n/fr.yaml"), Op: fsnotify.Write}},
|
||||
func(t *testing.T) {
|
||||
require.Len(t, enSite.RegularPages, 5)
|
||||
require.Len(t, enSite.AllPages, 30)
|
||||
|
@ -563,7 +563,7 @@ func TestMultiSitesRebuild(t *testing.T) {
|
|||
writeSource(t, fs, "layouts/shortcodes/shortcode.html", "Modified Shortcode: {{ i18n \"hello\" }}")
|
||||
},
|
||||
[]fsnotify.Event{
|
||||
{Name: "layouts/shortcodes/shortcode.html", Op: fsnotify.Write},
|
||||
{Name: filepath.FromSlash("layouts/shortcodes/shortcode.html"), Op: fsnotify.Write},
|
||||
},
|
||||
func(t *testing.T) {
|
||||
require.Len(t, enSite.RegularPages, 5)
|
||||
|
@ -1097,16 +1097,16 @@ hello:
|
|||
}
|
||||
|
||||
// Sources
|
||||
sources := []source.ByteSource{
|
||||
{Name: filepath.FromSlash("root.en.md"), Content: []byte(`---
|
||||
sources := [][2]string{
|
||||
{filepath.FromSlash("root.en.md"), `---
|
||||
title: root
|
||||
weight: 10000
|
||||
slug: root
|
||||
publishdate: "2000-01-01"
|
||||
---
|
||||
# root
|
||||
`)},
|
||||
{Name: filepath.FromSlash("sect/doc1.en.md"), Content: []byte(`---
|
||||
`},
|
||||
{filepath.FromSlash("sect/doc1.en.md"), `---
|
||||
title: doc1
|
||||
weight: 1
|
||||
slug: doc1-slug
|
||||
|
@ -1122,8 +1122,8 @@ publishdate: "2000-01-01"
|
|||
{{< lingo >}}
|
||||
|
||||
NOTE: slug should be used as URL
|
||||
`)},
|
||||
{Name: filepath.FromSlash("sect/doc1.fr.md"), Content: []byte(`---
|
||||
`},
|
||||
{filepath.FromSlash("sect/doc1.fr.md"), `---
|
||||
title: doc1
|
||||
weight: 1
|
||||
plaques:
|
||||
|
@ -1140,8 +1140,8 @@ publishdate: "2000-01-04"
|
|||
|
||||
NOTE: should be in the 'en' Page's 'Translations' field.
|
||||
NOTE: date is after "doc3"
|
||||
`)},
|
||||
{Name: filepath.FromSlash("sect/doc2.en.md"), Content: []byte(`---
|
||||
`},
|
||||
{filepath.FromSlash("sect/doc2.en.md"), `---
|
||||
title: doc2
|
||||
weight: 2
|
||||
publishdate: "2000-01-02"
|
||||
|
@ -1149,8 +1149,8 @@ publishdate: "2000-01-02"
|
|||
# doc2
|
||||
*some content*
|
||||
NOTE: without slug, "doc2" should be used, without ".en" as URL
|
||||
`)},
|
||||
{Name: filepath.FromSlash("sect/doc3.en.md"), Content: []byte(`---
|
||||
`},
|
||||
{filepath.FromSlash("sect/doc3.en.md"), `---
|
||||
title: doc3
|
||||
weight: 3
|
||||
publishdate: "2000-01-03"
|
||||
|
@ -1163,8 +1163,8 @@ url: /superbob
|
|||
# doc3
|
||||
*some content*
|
||||
NOTE: third 'en' doc, should trigger pagination on home page.
|
||||
`)},
|
||||
{Name: filepath.FromSlash("sect/doc4.md"), Content: []byte(`---
|
||||
`},
|
||||
{filepath.FromSlash("sect/doc4.md"), `---
|
||||
title: doc4
|
||||
weight: 4
|
||||
plaques:
|
||||
|
@ -1175,8 +1175,8 @@ publishdate: "2000-01-05"
|
|||
*du contenu francophone*
|
||||
NOTE: should use the defaultContentLanguage and mark this doc as 'fr'.
|
||||
NOTE: doesn't have any corresponding translation in 'en'
|
||||
`)},
|
||||
{Name: filepath.FromSlash("other/doc5.fr.md"), Content: []byte(`---
|
||||
`},
|
||||
{filepath.FromSlash("other/doc5.fr.md"), `---
|
||||
title: doc5
|
||||
weight: 5
|
||||
publishdate: "2000-01-06"
|
||||
|
@ -1184,45 +1184,45 @@ publishdate: "2000-01-06"
|
|||
# doc5
|
||||
*autre contenu francophone*
|
||||
NOTE: should use the "permalinks" configuration with :filename
|
||||
`)},
|
||||
`},
|
||||
// Add some for the stats
|
||||
{Name: filepath.FromSlash("stats/expired.fr.md"), Content: []byte(`---
|
||||
{filepath.FromSlash("stats/expired.fr.md"), `---
|
||||
title: expired
|
||||
publishdate: "2000-01-06"
|
||||
expiryDate: "2001-01-06"
|
||||
---
|
||||
# Expired
|
||||
`)},
|
||||
{Name: filepath.FromSlash("stats/future.fr.md"), Content: []byte(`---
|
||||
`},
|
||||
{filepath.FromSlash("stats/future.fr.md"), `---
|
||||
title: future
|
||||
weight: 6
|
||||
publishdate: "2100-01-06"
|
||||
---
|
||||
# Future
|
||||
`)},
|
||||
{Name: filepath.FromSlash("stats/expired.en.md"), Content: []byte(`---
|
||||
`},
|
||||
{filepath.FromSlash("stats/expired.en.md"), `---
|
||||
title: expired
|
||||
weight: 7
|
||||
publishdate: "2000-01-06"
|
||||
expiryDate: "2001-01-06"
|
||||
---
|
||||
# Expired
|
||||
`)},
|
||||
{Name: filepath.FromSlash("stats/future.en.md"), Content: []byte(`---
|
||||
`},
|
||||
{filepath.FromSlash("stats/future.en.md"), `---
|
||||
title: future
|
||||
weight: 6
|
||||
publishdate: "2100-01-06"
|
||||
---
|
||||
# Future
|
||||
`)},
|
||||
{Name: filepath.FromSlash("stats/draft.en.md"), Content: []byte(`---
|
||||
`},
|
||||
{filepath.FromSlash("stats/draft.en.md"), `---
|
||||
title: expired
|
||||
publishdate: "2000-01-06"
|
||||
draft: true
|
||||
---
|
||||
# Draft
|
||||
`)},
|
||||
{Name: filepath.FromSlash("stats/tax.nn.md"), Content: []byte(`---
|
||||
`},
|
||||
{filepath.FromSlash("stats/tax.nn.md"), `---
|
||||
title: Tax NN
|
||||
weight: 8
|
||||
publishdate: "2000-01-06"
|
||||
|
@ -1231,8 +1231,8 @@ lag:
|
|||
- Sogndal
|
||||
---
|
||||
# Tax NN
|
||||
`)},
|
||||
{Name: filepath.FromSlash("stats/tax.nb.md"), Content: []byte(`---
|
||||
`},
|
||||
{filepath.FromSlash("stats/tax.nb.md"), `---
|
||||
title: Tax NB
|
||||
weight: 8
|
||||
publishdate: "2000-01-06"
|
||||
|
@ -1241,7 +1241,7 @@ lag:
|
|||
- Sogndal
|
||||
---
|
||||
# Tax NB
|
||||
`)},
|
||||
`},
|
||||
}
|
||||
|
||||
configFile := "multilangconfig." + configSuffix
|
||||
|
@ -1252,10 +1252,8 @@ lag:
|
|||
|
||||
fs := hugofs.NewFrom(mf, cfg)
|
||||
|
||||
// Hugo support using ByteSource's directly (for testing),
|
||||
// but to make it more real, we write them to the mem file system.
|
||||
for _, s := range sources {
|
||||
if err := afero.WriteFile(mf, filepath.Join("content", s.Name), s.Content, 0755); err != nil {
|
||||
if err := afero.WriteFile(mf, filepath.Join("content", s[0]), []byte(s[1]), 0755); err != nil {
|
||||
t.Fatalf("Failed to write file: %s", err)
|
||||
}
|
||||
}
|
||||
|
@ -1263,7 +1261,7 @@ lag:
|
|||
// Add some data
|
||||
writeSource(t, fs, "data/hugo.toml", "slogan = \"Hugo Rocks!\"")
|
||||
|
||||
sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg}) //, Logger: newDebugLogger()})
|
||||
sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg, Running: siteConfig.Running}) //, Logger: newDebugLogger()})
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create sites: %s", err)
|
||||
|
@ -1311,7 +1309,7 @@ func readFileFromFs(t testing.TB, fs afero.Fs, filename string) string {
|
|||
b, err := afero.ReadFile(fs, filename)
|
||||
if err != nil {
|
||||
// Print some debug info
|
||||
root := strings.Split(filename, helpers.FilePathSeparator)[0]
|
||||
root := "/" //strings.Split(filename, helpers.FilePathSeparator)[0]
|
||||
afero.Walk(fs, root, func(path string, info os.FileInfo, err error) error {
|
||||
if info != nil && !info.IsDir() {
|
||||
fmt.Println(" ", path)
|
||||
|
|
|
@ -47,13 +47,12 @@ languageName = "Nynorsk"
|
|||
|
||||
`
|
||||
|
||||
siteConfig := testSiteConfig{Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: false}
|
||||
siteConfig := testSiteConfig{Running: true, Fs: afero.NewMemMapFs(), DefaultContentLanguage: "fr", DefaultContentLanguageInSubdir: false}
|
||||
sites := createMultiTestSites(t, siteConfig, multiSiteTOMLConfigTemplate)
|
||||
fs := sites.Fs
|
||||
cfg := BuildCfg{Watching: true}
|
||||
th := testHelper{sites.Cfg, fs, t}
|
||||
assert := require.New(t)
|
||||
|
||||
cfg := BuildCfg{}
|
||||
err := sites.Build(cfg)
|
||||
assert.NoError(err)
|
||||
|
||||
|
|
BIN
hugolib/hugolib.debug
Normal file
BIN
hugolib/hugolib.debug
Normal file
Binary file not shown.
|
@ -1,654 +0,0 @@
|
|||
// Copyright 2016 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
// TODO(bep) remove this file when the reworked tests in menu_test.go is done.
|
||||
// NOTE: Do not add more tests to this file!
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/gohugoio/hugo/deps"
|
||||
|
||||
"path/filepath"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
const (
|
||||
confMenu1 = `
|
||||
[[menu.main]]
|
||||
name = "Go Home"
|
||||
url = "/"
|
||||
weight = 1
|
||||
pre = "<div>"
|
||||
post = "</div>"
|
||||
[[menu.main]]
|
||||
name = "Blog"
|
||||
url = "/posts"
|
||||
[[menu.main]]
|
||||
name = "ext"
|
||||
url = "http://gohugo.io"
|
||||
identifier = "ext"
|
||||
[[menu.main]]
|
||||
name = "ext2"
|
||||
url = "http://foo.local/Zoo/foo"
|
||||
identifier = "ext2"
|
||||
[[menu.grandparent]]
|
||||
name = "grandparent"
|
||||
url = "/grandparent"
|
||||
identifier = "grandparentId"
|
||||
[[menu.grandparent]]
|
||||
name = "parent"
|
||||
url = "/parent"
|
||||
identifier = "parentId"
|
||||
parent = "grandparentId"
|
||||
[[menu.grandparent]]
|
||||
name = "Go Home3"
|
||||
url = "/"
|
||||
identifier = "grandchildId"
|
||||
parent = "parentId"
|
||||
[[menu.tax]]
|
||||
name = "Tax1"
|
||||
url = "/two/key/"
|
||||
identifier="1"
|
||||
[[menu.tax]]
|
||||
name = "Tax2"
|
||||
url = "/two/key/"
|
||||
identifier="2"
|
||||
[[menu.tax]]
|
||||
name = "Tax RSS"
|
||||
url = "/two/key.xml"
|
||||
identifier="xml"
|
||||
[[menu.hash]]
|
||||
name = "Tax With #"
|
||||
url = "/resource#anchor"
|
||||
identifier="hash"
|
||||
[[menu.unicode]]
|
||||
name = "Unicode Russian"
|
||||
identifier = "unicode-russian"
|
||||
url = "/новости-проекта" # Russian => "news-project"
|
||||
[[menu.with_title]]
|
||||
name="entry with title"
|
||||
title="a menuentry title"
|
||||
url="/title"
|
||||
identifier="titled"`
|
||||
)
|
||||
|
||||
var menuPage1 = []byte(`+++
|
||||
title = "One"
|
||||
weight = 1
|
||||
[menu]
|
||||
[menu.p_one]
|
||||
+++
|
||||
Front Matter with Menu Pages`)
|
||||
|
||||
var menuPage2 = []byte(`+++
|
||||
title = "Two"
|
||||
weight = 2
|
||||
[menu]
|
||||
[menu.p_one]
|
||||
[menu.p_two]
|
||||
identifier = "Two"
|
||||
|
||||
+++
|
||||
Front Matter with Menu Pages`)
|
||||
|
||||
var menuPage3 = []byte(`+++
|
||||
title = "Three"
|
||||
weight = 3
|
||||
[menu]
|
||||
[menu.p_two]
|
||||
Name = "Three"
|
||||
Parent = "Two"
|
||||
+++
|
||||
Front Matter with Menu Pages`)
|
||||
|
||||
var menuPage4 = []byte(`+++
|
||||
title = "Four"
|
||||
weight = 4
|
||||
[menu]
|
||||
[menu.p_two]
|
||||
Name = "Four"
|
||||
Parent = "Three"
|
||||
+++
|
||||
Front Matter with Menu Pages`)
|
||||
|
||||
var menuPageSources = []source.ByteSource{
|
||||
{Name: filepath.FromSlash("sect/doc1.md"), Content: menuPage1},
|
||||
{Name: filepath.FromSlash("sect/doc2.md"), Content: menuPage2},
|
||||
{Name: filepath.FromSlash("sect/doc3.md"), Content: menuPage3},
|
||||
}
|
||||
|
||||
var menuPageSectionsSources = []source.ByteSource{
|
||||
{Name: filepath.FromSlash("first/doc1.md"), Content: menuPage1},
|
||||
{Name: filepath.FromSlash("first/doc2.md"), Content: menuPage2},
|
||||
{Name: filepath.FromSlash("second-section/doc3.md"), Content: menuPage3},
|
||||
{Name: filepath.FromSlash("Fish and Chips/doc4.md"), Content: menuPage4},
|
||||
}
|
||||
|
||||
func tstCreateMenuPageWithNameTOML(title, menu, name string) []byte {
|
||||
return []byte(fmt.Sprintf(`+++
|
||||
title = "%s"
|
||||
weight = 1
|
||||
[menu]
|
||||
[menu.%s]
|
||||
name = "%s"
|
||||
+++
|
||||
Front Matter with Menu with Name`, title, menu, name))
|
||||
}
|
||||
|
||||
func tstCreateMenuPageWithIdentifierTOML(title, menu, identifier string) []byte {
|
||||
return []byte(fmt.Sprintf(`+++
|
||||
title = "%s"
|
||||
weight = 1
|
||||
[menu]
|
||||
[menu.%s]
|
||||
identifier = "%s"
|
||||
name = "somename"
|
||||
+++
|
||||
Front Matter with Menu with Identifier`, title, menu, identifier))
|
||||
}
|
||||
|
||||
func tstCreateMenuPageWithNameYAML(title, menu, name string) []byte {
|
||||
return []byte(fmt.Sprintf(`---
|
||||
title: "%s"
|
||||
weight: 1
|
||||
menu:
|
||||
%s:
|
||||
name: "%s"
|
||||
---
|
||||
Front Matter with Menu with Name`, title, menu, name))
|
||||
}
|
||||
|
||||
func tstCreateMenuPageWithIdentifierYAML(title, menu, identifier string) []byte {
|
||||
return []byte(fmt.Sprintf(`---
|
||||
title: "%s"
|
||||
weight: 1
|
||||
menu:
|
||||
%s:
|
||||
identifier: "%s"
|
||||
name: "somename"
|
||||
---
|
||||
Front Matter with Menu with Identifier`, title, menu, identifier))
|
||||
}
|
||||
|
||||
// Issue 817 - identifier should trump everything
|
||||
func TestPageMenuWithIdentifier(t *testing.T) {
|
||||
t.Parallel()
|
||||
toml := []source.ByteSource{
|
||||
{Name: "sect/doc1.md", Content: tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i1")},
|
||||
{Name: "sect/doc2.md", Content: tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i2")},
|
||||
{Name: "sect/doc3.md", Content: tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i2")}, // duplicate
|
||||
}
|
||||
|
||||
yaml := []source.ByteSource{
|
||||
{Name: "sect/doc1.md", Content: tstCreateMenuPageWithIdentifierYAML("t1", "m1", "i1")},
|
||||
{Name: "sect/doc2.md", Content: tstCreateMenuPageWithIdentifierYAML("t1", "m1", "i2")},
|
||||
{Name: "sect/doc3.md", Content: tstCreateMenuPageWithIdentifierYAML("t1", "m1", "i2")}, // duplicate
|
||||
}
|
||||
|
||||
doTestPageMenuWithIdentifier(t, toml)
|
||||
doTestPageMenuWithIdentifier(t, yaml)
|
||||
|
||||
}
|
||||
|
||||
func doTestPageMenuWithIdentifier(t *testing.T, menuPageSources []source.ByteSource) {
|
||||
|
||||
s := setupMenuTests(t, menuPageSources)
|
||||
|
||||
assert.Equal(t, 3, len(s.RegularPages), "Not enough pages")
|
||||
|
||||
me1 := findTestMenuEntryByID(s, "m1", "i1")
|
||||
me2 := findTestMenuEntryByID(s, "m1", "i2")
|
||||
|
||||
require.NotNil(t, me1)
|
||||
require.NotNil(t, me2)
|
||||
|
||||
assert.True(t, strings.Contains(me1.URL, "doc1"), me1.URL)
|
||||
assert.True(t, strings.Contains(me2.URL, "doc2") || strings.Contains(me2.URL, "doc3"), me2.URL)
|
||||
|
||||
}
|
||||
|
||||
// Issue 817 contd - name should be second identifier in
|
||||
func TestPageMenuWithDuplicateName(t *testing.T) {
|
||||
t.Parallel()
|
||||
toml := []source.ByteSource{
|
||||
{Name: "sect/doc1.md", Content: tstCreateMenuPageWithNameTOML("t1", "m1", "n1")},
|
||||
{Name: "sect/doc2.md", Content: tstCreateMenuPageWithNameTOML("t1", "m1", "n2")},
|
||||
{Name: "sect/doc3.md", Content: tstCreateMenuPageWithNameTOML("t1", "m1", "n2")}, // duplicate
|
||||
}
|
||||
|
||||
yaml := []source.ByteSource{
|
||||
{Name: "sect/doc1.md", Content: tstCreateMenuPageWithNameYAML("t1", "m1", "n1")},
|
||||
{Name: "sect/doc2.md", Content: tstCreateMenuPageWithNameYAML("t1", "m1", "n2")},
|
||||
{Name: "sect/doc3.md", Content: tstCreateMenuPageWithNameYAML("t1", "m1", "n2")}, // duplicate
|
||||
}
|
||||
|
||||
doTestPageMenuWithDuplicateName(t, toml)
|
||||
doTestPageMenuWithDuplicateName(t, yaml)
|
||||
|
||||
}
|
||||
|
||||
func doTestPageMenuWithDuplicateName(t *testing.T, menuPageSources []source.ByteSource) {
|
||||
|
||||
s := setupMenuTests(t, menuPageSources)
|
||||
|
||||
assert.Equal(t, 3, len(s.RegularPages), "Not enough pages")
|
||||
|
||||
me1 := findTestMenuEntryByName(s, "m1", "n1")
|
||||
me2 := findTestMenuEntryByName(s, "m1", "n2")
|
||||
|
||||
require.NotNil(t, me1)
|
||||
require.NotNil(t, me2)
|
||||
|
||||
assert.True(t, strings.Contains(me1.URL, "doc1"), me1.URL)
|
||||
assert.True(t, strings.Contains(me2.URL, "doc2") || strings.Contains(me2.URL, "doc3"), me2.URL)
|
||||
|
||||
}
|
||||
|
||||
func TestPageMenu(t *testing.T) {
|
||||
t.Parallel()
|
||||
s := setupMenuTests(t, menuPageSources)
|
||||
|
||||
if len(s.RegularPages) != 3 {
|
||||
t.Fatalf("Posts not created, expected 3 got %d", len(s.RegularPages))
|
||||
}
|
||||
|
||||
first := s.RegularPages[0]
|
||||
second := s.RegularPages[1]
|
||||
third := s.RegularPages[2]
|
||||
|
||||
pOne := findTestMenuEntryByName(s, "p_one", "One")
|
||||
pTwo := findTestMenuEntryByID(s, "p_two", "Two")
|
||||
|
||||
for i, this := range []struct {
|
||||
menu string
|
||||
page *Page
|
||||
menuItem *MenuEntry
|
||||
isMenuCurrent bool
|
||||
hasMenuCurrent bool
|
||||
}{
|
||||
{"p_one", first, pOne, true, false},
|
||||
{"p_one", first, pTwo, false, false},
|
||||
{"p_one", second, pTwo, false, false},
|
||||
{"p_two", second, pTwo, true, false},
|
||||
{"p_two", third, pTwo, false, true},
|
||||
{"p_one", third, pTwo, false, false},
|
||||
} {
|
||||
|
||||
if i != 4 {
|
||||
continue
|
||||
}
|
||||
|
||||
isMenuCurrent := this.page.IsMenuCurrent(this.menu, this.menuItem)
|
||||
hasMenuCurrent := this.page.HasMenuCurrent(this.menu, this.menuItem)
|
||||
|
||||
if isMenuCurrent != this.isMenuCurrent {
|
||||
t.Errorf("[%d] Wrong result from IsMenuCurrent: %v", i, isMenuCurrent)
|
||||
}
|
||||
|
||||
if hasMenuCurrent != this.hasMenuCurrent {
|
||||
t.Errorf("[%d] Wrong result for menuItem %v for HasMenuCurrent: %v", i, this.menuItem, hasMenuCurrent)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestMenuURL(t *testing.T) {
|
||||
t.Parallel()
|
||||
s := setupMenuTests(t, menuPageSources)
|
||||
|
||||
for i, this := range []struct {
|
||||
me *MenuEntry
|
||||
expectedURL string
|
||||
}{
|
||||
// issue #888
|
||||
{findTestMenuEntryByID(s, "hash", "hash"), "/Zoo/resource#anchor"},
|
||||
// issue #1774
|
||||
{findTestMenuEntryByID(s, "main", "ext"), "http://gohugo.io"},
|
||||
{findTestMenuEntryByID(s, "main", "ext2"), "http://foo.local/Zoo/foo"},
|
||||
} {
|
||||
|
||||
if this.me == nil {
|
||||
t.Errorf("[%d] MenuEntry not found", i)
|
||||
continue
|
||||
}
|
||||
|
||||
if this.me.URL != this.expectedURL {
|
||||
t.Errorf("[%d] Got URL %s expected %s", i, this.me.URL, this.expectedURL)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Issue #1934
|
||||
func TestYAMLMenuWithMultipleEntries(t *testing.T) {
|
||||
t.Parallel()
|
||||
ps1 := []byte(`---
|
||||
title: "Yaml 1"
|
||||
weight: 5
|
||||
menu: ["p_one", "p_two"]
|
||||
---
|
||||
Yaml Front Matter with Menu Pages`)
|
||||
|
||||
ps2 := []byte(`---
|
||||
title: "Yaml 2"
|
||||
weight: 5
|
||||
menu:
|
||||
p_three:
|
||||
p_four:
|
||||
---
|
||||
Yaml Front Matter with Menu Pages`)
|
||||
|
||||
s := setupMenuTests(t, []source.ByteSource{
|
||||
{Name: filepath.FromSlash("sect/yaml1.md"), Content: ps1},
|
||||
{Name: filepath.FromSlash("sect/yaml2.md"), Content: ps2}})
|
||||
|
||||
p1 := s.RegularPages[0]
|
||||
assert.Len(t, p1.Menus(), 2, "List YAML")
|
||||
p2 := s.RegularPages[1]
|
||||
assert.Len(t, p2.Menus(), 2, "Map YAML")
|
||||
|
||||
}
|
||||
|
||||
// issue #719
|
||||
func TestMenuWithUnicodeURLs(t *testing.T) {
|
||||
t.Parallel()
|
||||
for _, canonifyURLs := range []bool{true, false} {
|
||||
doTestMenuWithUnicodeURLs(t, canonifyURLs)
|
||||
}
|
||||
}
|
||||
|
||||
func doTestMenuWithUnicodeURLs(t *testing.T, canonifyURLs bool) {
|
||||
|
||||
s := setupMenuTests(t, menuPageSources, "canonifyURLs", canonifyURLs)
|
||||
|
||||
unicodeRussian := findTestMenuEntryByID(s, "unicode", "unicode-russian")
|
||||
|
||||
expected := "/%D0%BD%D0%BE%D0%B2%D0%BE%D1%81%D1%82%D0%B8-%D0%BF%D1%80%D0%BE%D0%B5%D0%BA%D1%82%D0%B0"
|
||||
|
||||
if !canonifyURLs {
|
||||
expected = "/Zoo" + expected
|
||||
}
|
||||
|
||||
assert.Equal(t, expected, unicodeRussian.URL)
|
||||
}
|
||||
|
||||
func TestMenuWithTitle(t *testing.T) {
|
||||
s := setupMenuTests(t, menuPageSources)
|
||||
titled := findTestMenuEntryByID(s, "with_title", "titled")
|
||||
expected := "a menuentry title"
|
||||
assert.Equal(t, expected, titled.Title())
|
||||
}
|
||||
|
||||
// Issue #1114
|
||||
func TestSectionPagesMenu2(t *testing.T) {
|
||||
t.Parallel()
|
||||
doTestSectionPagesMenu(true, t)
|
||||
doTestSectionPagesMenu(false, t)
|
||||
}
|
||||
|
||||
func doTestSectionPagesMenu(canonifyURLs bool, t *testing.T) {
|
||||
|
||||
s := setupMenuTests(t, menuPageSectionsSources,
|
||||
"sectionPagesMenu", "spm",
|
||||
"canonifyURLs", canonifyURLs,
|
||||
)
|
||||
|
||||
sects := s.getPage(KindHome).Sections()
|
||||
|
||||
require.Equal(t, 3, len(sects))
|
||||
|
||||
firstSectionPages := s.getPage(KindSection, "first").Pages
|
||||
require.Equal(t, 2, len(firstSectionPages))
|
||||
secondSectionPages := s.getPage(KindSection, "second-section").Pages
|
||||
require.Equal(t, 1, len(secondSectionPages))
|
||||
fishySectionPages := s.getPage(KindSection, "Fish and Chips").Pages
|
||||
require.Equal(t, 1, len(fishySectionPages))
|
||||
|
||||
nodeFirst := s.getPage(KindSection, "first")
|
||||
require.NotNil(t, nodeFirst)
|
||||
nodeSecond := s.getPage(KindSection, "second-section")
|
||||
require.NotNil(t, nodeSecond)
|
||||
nodeFishy := s.getPage(KindSection, "Fish and Chips")
|
||||
require.Equal(t, "Fish and Chips", nodeFishy.sections[0])
|
||||
|
||||
firstSectionMenuEntry := findTestMenuEntryByID(s, "spm", "first")
|
||||
secondSectionMenuEntry := findTestMenuEntryByID(s, "spm", "second-section")
|
||||
fishySectionMenuEntry := findTestMenuEntryByID(s, "spm", "Fish and Chips")
|
||||
|
||||
require.NotNil(t, firstSectionMenuEntry)
|
||||
require.NotNil(t, secondSectionMenuEntry)
|
||||
require.NotNil(t, nodeFirst)
|
||||
require.NotNil(t, nodeSecond)
|
||||
require.NotNil(t, fishySectionMenuEntry)
|
||||
require.NotNil(t, nodeFishy)
|
||||
|
||||
require.True(t, nodeFirst.IsMenuCurrent("spm", firstSectionMenuEntry))
|
||||
require.False(t, nodeFirst.IsMenuCurrent("spm", secondSectionMenuEntry))
|
||||
require.False(t, nodeFirst.IsMenuCurrent("spm", fishySectionMenuEntry))
|
||||
require.True(t, nodeFishy.IsMenuCurrent("spm", fishySectionMenuEntry))
|
||||
require.Equal(t, "Fish and Chips", fishySectionMenuEntry.Name)
|
||||
|
||||
for _, p := range firstSectionPages {
|
||||
require.True(t, p.HasMenuCurrent("spm", firstSectionMenuEntry))
|
||||
require.False(t, p.HasMenuCurrent("spm", secondSectionMenuEntry))
|
||||
}
|
||||
|
||||
for _, p := range secondSectionPages {
|
||||
require.False(t, p.HasMenuCurrent("spm", firstSectionMenuEntry))
|
||||
require.True(t, p.HasMenuCurrent("spm", secondSectionMenuEntry))
|
||||
}
|
||||
|
||||
for _, p := range fishySectionPages {
|
||||
require.False(t, p.HasMenuCurrent("spm", firstSectionMenuEntry))
|
||||
require.False(t, p.HasMenuCurrent("spm", secondSectionMenuEntry))
|
||||
require.True(t, p.HasMenuCurrent("spm", fishySectionMenuEntry))
|
||||
}
|
||||
}
|
||||
|
||||
func TestMenuLimit(t *testing.T) {
|
||||
t.Parallel()
|
||||
s := setupMenuTests(t, menuPageSources)
|
||||
m := *s.Menus["main"]
|
||||
|
||||
// main menu has 4 entries
|
||||
firstTwo := m.Limit(2)
|
||||
assert.Equal(t, 2, len(firstTwo))
|
||||
for i := 0; i < 2; i++ {
|
||||
assert.Equal(t, m[i], firstTwo[i])
|
||||
}
|
||||
assert.Equal(t, m, m.Limit(4))
|
||||
assert.Equal(t, m, m.Limit(5))
|
||||
}
|
||||
|
||||
func TestMenuSortByN(t *testing.T) {
|
||||
t.Parallel()
|
||||
for i, this := range []struct {
|
||||
sortFunc func(p Menu) Menu
|
||||
assertFunc func(p Menu) bool
|
||||
}{
|
||||
{(Menu).Sort, func(p Menu) bool { return p[0].Weight == 1 && p[1].Name == "nx" && p[2].Identifier == "ib" }},
|
||||
{(Menu).ByWeight, func(p Menu) bool { return p[0].Weight == 1 && p[1].Name == "nx" && p[2].Identifier == "ib" }},
|
||||
{(Menu).ByName, func(p Menu) bool { return p[0].Name == "na" }},
|
||||
{(Menu).Reverse, func(p Menu) bool { return p[0].Identifier == "ib" && p[len(p)-1].Identifier == "ia" }},
|
||||
} {
|
||||
menu := Menu{&MenuEntry{Weight: 3, Name: "nb", Identifier: "ia"},
|
||||
&MenuEntry{Weight: 1, Name: "na", Identifier: "ic"},
|
||||
&MenuEntry{Weight: 1, Name: "nx", Identifier: "ic"},
|
||||
&MenuEntry{Weight: 2, Name: "nb", Identifier: "ix"},
|
||||
&MenuEntry{Weight: 2, Name: "nb", Identifier: "ib"}}
|
||||
|
||||
sorted := this.sortFunc(menu)
|
||||
|
||||
if !this.assertFunc(sorted) {
|
||||
t.Errorf("[%d] sort error", i)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestHomeNodeMenu(t *testing.T) {
|
||||
t.Parallel()
|
||||
s := setupMenuTests(t, menuPageSources,
|
||||
"canonifyURLs", true,
|
||||
"uglyURLs", false,
|
||||
)
|
||||
|
||||
home := s.getPage(KindHome)
|
||||
homeMenuEntry := &MenuEntry{Name: home.Title, URL: home.URL()}
|
||||
|
||||
for i, this := range []struct {
|
||||
menu string
|
||||
menuItem *MenuEntry
|
||||
isMenuCurrent bool
|
||||
hasMenuCurrent bool
|
||||
}{
|
||||
{"main", homeMenuEntry, true, false},
|
||||
{"doesnotexist", homeMenuEntry, false, false},
|
||||
{"main", &MenuEntry{Name: "Somewhere else", URL: "/somewhereelse"}, false, false},
|
||||
{"grandparent", findTestMenuEntryByID(s, "grandparent", "grandparentId"), false, true},
|
||||
{"grandparent", findTestMenuEntryByID(s, "grandparent", "parentId"), false, true},
|
||||
{"grandparent", findTestMenuEntryByID(s, "grandparent", "grandchildId"), true, false},
|
||||
} {
|
||||
|
||||
isMenuCurrent := home.IsMenuCurrent(this.menu, this.menuItem)
|
||||
hasMenuCurrent := home.HasMenuCurrent(this.menu, this.menuItem)
|
||||
|
||||
if isMenuCurrent != this.isMenuCurrent {
|
||||
fmt.Println("isMenuCurrent", isMenuCurrent)
|
||||
fmt.Printf("this: %#v\n", this)
|
||||
t.Errorf("[%d] Wrong result from IsMenuCurrent: %v for %q", i, isMenuCurrent, this.menuItem)
|
||||
}
|
||||
|
||||
if hasMenuCurrent != this.hasMenuCurrent {
|
||||
fmt.Println("hasMenuCurrent", hasMenuCurrent)
|
||||
fmt.Printf("this: %#v\n", this)
|
||||
t.Errorf("[%d] Wrong result for menu %q menuItem %v for HasMenuCurrent: %v", i, this.menu, this.menuItem, hasMenuCurrent)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestHopefullyUniqueID(t *testing.T) {
|
||||
t.Parallel()
|
||||
assert.Equal(t, "i", (&MenuEntry{Identifier: "i", URL: "u", Name: "n"}).hopefullyUniqueID())
|
||||
assert.Equal(t, "u", (&MenuEntry{Identifier: "", URL: "u", Name: "n"}).hopefullyUniqueID())
|
||||
assert.Equal(t, "n", (&MenuEntry{Identifier: "", URL: "", Name: "n"}).hopefullyUniqueID())
|
||||
}
|
||||
|
||||
func TestAddMenuEntryChild(t *testing.T) {
|
||||
t.Parallel()
|
||||
root := &MenuEntry{Weight: 1}
|
||||
root.addChild(&MenuEntry{Weight: 2})
|
||||
root.addChild(&MenuEntry{Weight: 1})
|
||||
assert.Equal(t, 2, len(root.Children))
|
||||
assert.Equal(t, 1, root.Children[0].Weight)
|
||||
}
|
||||
|
||||
var testMenuIdentityMatcher = func(me *MenuEntry, id string) bool { return me.Identifier == id }
|
||||
var testMenuNameMatcher = func(me *MenuEntry, id string) bool { return me.Name == id }
|
||||
|
||||
func findTestMenuEntryByID(s *Site, mn string, id string) *MenuEntry {
|
||||
return findTestMenuEntry(s, mn, id, testMenuIdentityMatcher)
|
||||
}
|
||||
func findTestMenuEntryByName(s *Site, mn string, id string) *MenuEntry {
|
||||
return findTestMenuEntry(s, mn, id, testMenuNameMatcher)
|
||||
}
|
||||
|
||||
func findTestMenuEntry(s *Site, mn string, id string, matcher func(me *MenuEntry, id string) bool) *MenuEntry {
|
||||
var found *MenuEntry
|
||||
if menu, ok := s.Menus[mn]; ok {
|
||||
for _, me := range *menu {
|
||||
|
||||
if matcher(me, id) {
|
||||
if found != nil {
|
||||
panic(fmt.Sprintf("Duplicate menu entry in menu %s with id/name %s", mn, id))
|
||||
}
|
||||
found = me
|
||||
}
|
||||
|
||||
descendant := findDescendantTestMenuEntry(me, id, matcher)
|
||||
if descendant != nil {
|
||||
if found != nil {
|
||||
panic(fmt.Sprintf("Duplicate menu entry in menu %s with id/name %s", mn, id))
|
||||
}
|
||||
found = descendant
|
||||
}
|
||||
}
|
||||
}
|
||||
return found
|
||||
}
|
||||
|
||||
func findDescendantTestMenuEntry(parent *MenuEntry, id string, matcher func(me *MenuEntry, id string) bool) *MenuEntry {
|
||||
var found *MenuEntry
|
||||
if parent.HasChildren() {
|
||||
for _, child := range parent.Children {
|
||||
|
||||
if matcher(child, id) {
|
||||
if found != nil {
|
||||
panic(fmt.Sprintf("Duplicate menu entry in menuitem %s with id/name %s", parent.KeyName(), id))
|
||||
}
|
||||
found = child
|
||||
}
|
||||
|
||||
descendant := findDescendantTestMenuEntry(child, id, matcher)
|
||||
if descendant != nil {
|
||||
if found != nil {
|
||||
panic(fmt.Sprintf("Duplicate menu entry in menuitem %s with id/name %s", parent.KeyName(), id))
|
||||
}
|
||||
found = descendant
|
||||
}
|
||||
}
|
||||
}
|
||||
return found
|
||||
}
|
||||
|
||||
func setupMenuTests(t *testing.T, pageSources []source.ByteSource, configKeyValues ...interface{}) *Site {
|
||||
|
||||
var (
|
||||
cfg, fs = newTestCfg()
|
||||
)
|
||||
|
||||
menus, err := tomlToMap(confMenu1)
|
||||
require.NoError(t, err)
|
||||
|
||||
cfg.Set("menu", menus["menu"])
|
||||
cfg.Set("baseURL", "http://foo.local/Zoo/")
|
||||
|
||||
for i := 0; i < len(configKeyValues); i += 2 {
|
||||
cfg.Set(configKeyValues[i].(string), configKeyValues[i+1])
|
||||
}
|
||||
|
||||
for _, src := range pageSources {
|
||||
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
|
||||
|
||||
}
|
||||
|
||||
return buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
|
||||
|
||||
}
|
||||
|
||||
func tomlToMap(s string) (map[string]interface{}, error) {
|
||||
var data = make(map[string]interface{})
|
||||
_, err := toml.Decode(s, &data)
|
||||
return data, err
|
||||
}
|
156
hugolib/page.go
156
hugolib/page.go
|
@ -25,6 +25,8 @@ import (
|
|||
"github.com/bep/gitmap"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/resource"
|
||||
|
||||
"github.com/gohugoio/hugo/output"
|
||||
"github.com/gohugoio/hugo/parser"
|
||||
"github.com/mitchellh/mapstructure"
|
||||
|
@ -80,6 +82,8 @@ const (
|
|||
kindSitemap = "sitemap"
|
||||
kindRobotsTXT = "robotsTXT"
|
||||
kind404 = "404"
|
||||
|
||||
pageResourceType = "page"
|
||||
)
|
||||
|
||||
type Page struct {
|
||||
|
@ -101,6 +105,12 @@ type Page struct {
|
|||
// This collection will be nil for regular pages.
|
||||
Pages Pages
|
||||
|
||||
// Since Hugo 0.32, a Page can have resources such as images and CSS associated
|
||||
// with itself. The resource will typically be placed relative to the Page,
|
||||
// but templates should use the links (Permalink and RelPermalink)
|
||||
// provided by the Resource object.
|
||||
Resources resource.Resources
|
||||
|
||||
// translations will contain references to this page in other language
|
||||
// if available.
|
||||
translations Pages
|
||||
|
@ -155,9 +165,6 @@ type Page struct {
|
|||
// workContent is a copy of rawContent that may be mutated during site build.
|
||||
workContent []byte
|
||||
|
||||
// state telling if this is a "new page" or if we have rendered it previously.
|
||||
rendered bool
|
||||
|
||||
// whether the content is in a CJK language.
|
||||
isCJKLanguage bool
|
||||
|
||||
|
@ -218,8 +225,9 @@ type Page struct {
|
|||
Sitemap Sitemap
|
||||
|
||||
URLPath
|
||||
permalink string
|
||||
relPermalink string
|
||||
permalink string
|
||||
relPermalink string
|
||||
relPermalinkBase string // relPermalink without extension
|
||||
|
||||
layoutDescriptor output.LayoutDescriptor
|
||||
|
||||
|
@ -263,6 +271,10 @@ func (p *Page) PubDate() time.Time {
|
|||
return p.Date
|
||||
}
|
||||
|
||||
func (*Page) ResourceType() string {
|
||||
return pageResourceType
|
||||
}
|
||||
|
||||
func (p *Page) RSSLink() template.URL {
|
||||
f, found := p.outputFormats.GetByName(output.RSSFormat.Name)
|
||||
if !found {
|
||||
|
@ -726,22 +738,29 @@ func (p *Page) getRenderingConfig() *helpers.BlackFriday {
|
|||
}
|
||||
|
||||
func (s *Site) newPage(filename string) *Page {
|
||||
sp := source.NewSourceSpec(s.Cfg, s.Fs)
|
||||
p := &Page{
|
||||
fi := newFileInfo(
|
||||
s.SourceSpec,
|
||||
s.absContentDir(),
|
||||
filename,
|
||||
nil,
|
||||
bundleNot,
|
||||
)
|
||||
return s.newPageFromFile(fi)
|
||||
}
|
||||
|
||||
func (s *Site) newPageFromFile(fi *fileInfo) *Page {
|
||||
return &Page{
|
||||
pageInit: &pageInit{},
|
||||
Kind: kindFromFilename(filename),
|
||||
Kind: kindFromFilename(fi.Path()),
|
||||
contentType: "",
|
||||
Source: Source{File: *sp.NewFile(filename)},
|
||||
Source: Source{File: fi},
|
||||
Keywords: []string{}, Sitemap: Sitemap{Priority: -1},
|
||||
Params: make(map[string]interface{}),
|
||||
translations: make(Pages, 0),
|
||||
sections: sectionsFromFilename(filename),
|
||||
sections: sectionsFromDir(fi.Dir()),
|
||||
Site: &s.Info,
|
||||
s: s,
|
||||
}
|
||||
|
||||
s.Log.DEBUG.Println("Reading from", p.File.Path())
|
||||
return p
|
||||
}
|
||||
|
||||
func (p *Page) IsRenderable() bool {
|
||||
|
@ -910,8 +929,8 @@ func (p *Page) LinkTitle() string {
|
|||
}
|
||||
|
||||
func (p *Page) shouldBuild() bool {
|
||||
return shouldBuild(p.s.Cfg.GetBool("buildFuture"), p.s.Cfg.GetBool("buildExpired"),
|
||||
p.s.Cfg.GetBool("buildDrafts"), p.Draft, p.PublishDate, p.ExpiryDate)
|
||||
return shouldBuild(p.s.BuildFuture, p.s.BuildExpired,
|
||||
p.s.BuildDrafts, p.Draft, p.PublishDate, p.ExpiryDate)
|
||||
}
|
||||
|
||||
func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bool,
|
||||
|
@ -967,20 +986,91 @@ func (p *Page) RelPermalink() string {
|
|||
return p.relPermalink
|
||||
}
|
||||
|
||||
func (p *Page) initURLs() error {
|
||||
if len(p.outputFormats) == 0 {
|
||||
p.outputFormats = p.s.outputFormats[p.Kind]
|
||||
func (p *Page) subResourceLinkFactory(base string) string {
|
||||
return path.Join(p.relPermalinkBase, base)
|
||||
}
|
||||
|
||||
func (p *Page) prepareForRender(cfg *BuildCfg) error {
|
||||
s := p.s
|
||||
|
||||
if !p.shouldRenderTo(s.rc.Format) {
|
||||
// No need to prepare
|
||||
return nil
|
||||
}
|
||||
|
||||
var shortcodeUpdate bool
|
||||
if p.shortcodeState != nil {
|
||||
shortcodeUpdate = p.shortcodeState.updateDelta()
|
||||
}
|
||||
|
||||
if !shortcodeUpdate && !cfg.whatChanged.other {
|
||||
// No need to process it again.
|
||||
return nil
|
||||
}
|
||||
|
||||
// If we got this far it means that this is either a new Page pointer
|
||||
// or a template or similar has changed so wee need to do a rerendering
|
||||
// of the shortcodes etc.
|
||||
|
||||
// If in watch mode or if we have multiple output formats,
|
||||
// we need to keep the original so we can
|
||||
// potentially repeat this process on rebuild.
|
||||
needsACopy := p.s.running() || len(p.outputFormats) > 1
|
||||
var workContentCopy []byte
|
||||
if needsACopy {
|
||||
workContentCopy = make([]byte, len(p.workContent))
|
||||
copy(workContentCopy, p.workContent)
|
||||
} else {
|
||||
// Just reuse the same slice.
|
||||
workContentCopy = p.workContent
|
||||
}
|
||||
|
||||
if p.Markup == "markdown" {
|
||||
tmpContent, tmpTableOfContents := helpers.ExtractTOC(workContentCopy)
|
||||
p.TableOfContents = helpers.BytesToHTML(tmpTableOfContents)
|
||||
workContentCopy = tmpContent
|
||||
}
|
||||
rel := p.createRelativePermalink()
|
||||
|
||||
var err error
|
||||
p.permalink, err = p.s.permalinkForOutputFormat(rel, p.outputFormats[0])
|
||||
if err != nil {
|
||||
return err
|
||||
if workContentCopy, err = handleShortcodes(p, workContentCopy); err != nil {
|
||||
s.Log.ERROR.Printf("Failed to handle shortcodes for page %s: %s", p.BaseFileName(), err)
|
||||
}
|
||||
rel = p.s.PathSpec.PrependBasePath(rel)
|
||||
p.relPermalink = rel
|
||||
p.layoutDescriptor = p.createLayoutDescriptor()
|
||||
|
||||
if p.Markup != "html" {
|
||||
|
||||
// Now we know enough to create a summary of the page and count some words
|
||||
summaryContent, err := p.setUserDefinedSummaryIfProvided(workContentCopy)
|
||||
|
||||
if err != nil {
|
||||
s.Log.ERROR.Printf("Failed to set user defined summary for page %q: %s", p.Path(), err)
|
||||
} else if summaryContent != nil {
|
||||
workContentCopy = summaryContent.content
|
||||
}
|
||||
|
||||
p.Content = helpers.BytesToHTML(workContentCopy)
|
||||
|
||||
if summaryContent == nil {
|
||||
if err := p.setAutoSummary(); err != nil {
|
||||
s.Log.ERROR.Printf("Failed to set user auto summary for page %q: %s", p.pathOrTitle(), err)
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
p.Content = helpers.BytesToHTML(workContentCopy)
|
||||
}
|
||||
|
||||
//analyze for raw stats
|
||||
p.analyzePage()
|
||||
|
||||
// Handle bundled pages.
|
||||
for _, r := range p.Resources.ByType(pageResourceType) {
|
||||
p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Pages)
|
||||
bp := r.(*Page)
|
||||
if err := bp.prepareForRender(cfg); err != nil {
|
||||
s.Log.ERROR.Printf("Failed to prepare bundled page %q for render: %s", bp.BaseFileName(), err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -1849,14 +1939,18 @@ func (p *Page) addLangPathPrefixIfFlagSet(outfile string, should bool) string {
|
|||
return outfile
|
||||
}
|
||||
|
||||
func sectionsFromFilename(filename string) []string {
|
||||
var sections []string
|
||||
dir, _ := filepath.Split(filename)
|
||||
dir = strings.TrimSuffix(dir, helpers.FilePathSeparator)
|
||||
if dir == "" {
|
||||
func sectionsFromDir(dirname string) []string {
|
||||
sections := strings.Split(dirname, helpers.FilePathSeparator)
|
||||
if len(sections) == 1 {
|
||||
if sections[0] == "" {
|
||||
return nil
|
||||
}
|
||||
return sections
|
||||
}
|
||||
sections = strings.Split(dir, helpers.FilePathSeparator)
|
||||
if len(sections) > 1 && sections[0] == "" {
|
||||
return sections[1:]
|
||||
}
|
||||
|
||||
return sections
|
||||
}
|
||||
|
||||
|
|
188
hugolib/page_bundler.go
Normal file
188
hugolib/page_bundler.go
Normal file
|
@ -0,0 +1,188 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
// Use this until errgroup gets ported to context
|
||||
// See https://github.com/golang/go/issues/19781
|
||||
"golang.org/x/net/context"
|
||||
"golang.org/x/sync/errgroup"
|
||||
)
|
||||
|
||||
type siteContentProcessor struct {
|
||||
baseDir string
|
||||
|
||||
site *Site
|
||||
|
||||
handleContent contentHandler
|
||||
|
||||
// The input file bundles.
|
||||
fileBundlesChan chan *bundleDir
|
||||
|
||||
// The input file singles.
|
||||
fileSinglesChan chan *fileInfo
|
||||
|
||||
// These assets should be just copied to destination.
|
||||
fileAssetsChan chan []string
|
||||
|
||||
numWorkers int
|
||||
|
||||
// The output Pages
|
||||
pagesChan chan *Page
|
||||
|
||||
// Used for partial rebuilds (aka. live reload)
|
||||
// Will signal replacement of pages in the site collection.
|
||||
partialBuild bool
|
||||
}
|
||||
|
||||
func newSiteContentProcessor(baseDir string, partialBuild bool, s *Site) *siteContentProcessor {
|
||||
numWorkers := 12
|
||||
if n := runtime.NumCPU() * 3; n > numWorkers {
|
||||
numWorkers = n
|
||||
}
|
||||
|
||||
numWorkers = int(math.Ceil(float64(numWorkers) / float64(len(s.owner.Sites))))
|
||||
|
||||
return &siteContentProcessor{
|
||||
partialBuild: partialBuild,
|
||||
baseDir: baseDir,
|
||||
site: s,
|
||||
handleContent: newHandlerChain(s),
|
||||
fileBundlesChan: make(chan *bundleDir, numWorkers),
|
||||
fileSinglesChan: make(chan *fileInfo, numWorkers),
|
||||
fileAssetsChan: make(chan []string, numWorkers),
|
||||
numWorkers: numWorkers,
|
||||
pagesChan: make(chan *Page, numWorkers),
|
||||
}
|
||||
}
|
||||
|
||||
func (s *siteContentProcessor) closeInput() {
|
||||
close(s.fileSinglesChan)
|
||||
close(s.fileBundlesChan)
|
||||
close(s.fileAssetsChan)
|
||||
}
|
||||
|
||||
func (s *siteContentProcessor) process(ctx context.Context) error {
|
||||
g1, ctx := errgroup.WithContext(ctx)
|
||||
g2, _ := errgroup.WithContext(ctx)
|
||||
|
||||
// There can be only one of these per site.
|
||||
g1.Go(func() error {
|
||||
for p := range s.pagesChan {
|
||||
if p.s != s.site {
|
||||
panic(fmt.Sprintf("invalid page site: %v vs %v", p.s, s))
|
||||
}
|
||||
|
||||
if s.partialBuild {
|
||||
s.site.replacePage(p)
|
||||
} else {
|
||||
s.site.addPage(p)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
for i := 0; i < s.numWorkers; i++ {
|
||||
g2.Go(func() error {
|
||||
for {
|
||||
select {
|
||||
case f, ok := <-s.fileSinglesChan:
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
err := s.readAndConvertContentFile(f)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
g2.Go(func() error {
|
||||
for {
|
||||
select {
|
||||
case filenames, ok := <-s.fileAssetsChan:
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
for _, filename := range filenames {
|
||||
name := strings.TrimPrefix(filename, s.baseDir)
|
||||
f, err := s.site.Fs.Source.Open(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = s.site.publish(&s.site.PathSpec.ProcessingStats.Files, name, f)
|
||||
f.Close()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
g2.Go(func() error {
|
||||
for {
|
||||
select {
|
||||
case bundle, ok := <-s.fileBundlesChan:
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
err := s.readAndConvertContentBundle(bundle)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if err := g2.Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
close(s.pagesChan)
|
||||
|
||||
if err := g1.Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.site.rawAllPages.Sort()
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
func (s *siteContentProcessor) readAndConvertContentFile(file *fileInfo) error {
|
||||
ctx := &handlerContext{source: file, baseDir: s.baseDir, pages: s.pagesChan}
|
||||
return s.handleContent(ctx).err
|
||||
}
|
||||
|
||||
func (s *siteContentProcessor) readAndConvertContentBundle(bundle *bundleDir) error {
|
||||
ctx := &handlerContext{bundle: bundle, baseDir: s.baseDir, pages: s.pagesChan}
|
||||
return s.handleContent(ctx).err
|
||||
}
|
683
hugolib/page_bundler_capture.go
Normal file
683
hugolib/page_bundler_capture.go
Normal file
|
@ -0,0 +1,683 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
|
||||
"golang.org/x/sync/errgroup"
|
||||
|
||||
"github.com/gohugoio/hugo/source"
|
||||
"github.com/spf13/afero"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
)
|
||||
|
||||
var errSkipCyclicDir = errors.New("skip potential cyclic dir")
|
||||
|
||||
type capturer struct {
|
||||
// To prevent symbolic link cycles: Visit same folder only once.
|
||||
seen map[string]bool
|
||||
seenMu sync.Mutex
|
||||
|
||||
handler captureResultHandler
|
||||
|
||||
sourceSpec *source.SourceSpec
|
||||
fs afero.Fs
|
||||
logger *jww.Notepad
|
||||
|
||||
baseDir string
|
||||
|
||||
// Filenames limits the content to process to a list of filenames/directories.
|
||||
// This is used for partial building in server mode.
|
||||
filenames []string
|
||||
|
||||
// Used to determine how to handle content changes in server mode.
|
||||
contentChanges *contentChangeMap
|
||||
|
||||
// Semaphore used to throttle the concurrent sub directory handling.
|
||||
sem chan bool
|
||||
}
|
||||
|
||||
func newCapturer(
|
||||
logger *jww.Notepad,
|
||||
sourceSpec *source.SourceSpec,
|
||||
handler captureResultHandler,
|
||||
contentChanges *contentChangeMap,
|
||||
baseDir string, filenames ...string) *capturer {
|
||||
|
||||
numWorkers := 4
|
||||
if n := runtime.NumCPU(); n > numWorkers {
|
||||
numWorkers = n
|
||||
}
|
||||
|
||||
c := &capturer{
|
||||
sem: make(chan bool, numWorkers),
|
||||
handler: handler,
|
||||
sourceSpec: sourceSpec,
|
||||
logger: logger,
|
||||
contentChanges: contentChanges,
|
||||
fs: sourceSpec.Fs.Source, baseDir: baseDir, seen: make(map[string]bool),
|
||||
filenames: filenames}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// Captured files and bundles ready to be processed will be passed on to
|
||||
// these channels.
|
||||
type captureResultHandler interface {
|
||||
handleSingles(fis ...*fileInfo)
|
||||
handleCopyFiles(filenames ...string)
|
||||
captureBundlesHandler
|
||||
}
|
||||
|
||||
type captureBundlesHandler interface {
|
||||
handleBundles(b *bundleDirs)
|
||||
}
|
||||
|
||||
type captureResultHandlerChain struct {
|
||||
handlers []captureBundlesHandler
|
||||
}
|
||||
|
||||
func (c *captureResultHandlerChain) handleSingles(fis ...*fileInfo) {
|
||||
for _, h := range c.handlers {
|
||||
if hh, ok := h.(captureResultHandler); ok {
|
||||
hh.handleSingles(fis...)
|
||||
}
|
||||
}
|
||||
}
|
||||
func (c *captureResultHandlerChain) handleBundles(b *bundleDirs) {
|
||||
for _, h := range c.handlers {
|
||||
h.handleBundles(b)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *captureResultHandlerChain) handleCopyFiles(filenames ...string) {
|
||||
for _, h := range c.handlers {
|
||||
if hh, ok := h.(captureResultHandler); ok {
|
||||
hh.handleCopyFiles(filenames...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *capturer) capturePartial(filenames ...string) error {
|
||||
handled := make(map[string]bool)
|
||||
|
||||
for _, filename := range filenames {
|
||||
dir, resolvedFilename, tp := c.contentChanges.resolveAndRemove(filename)
|
||||
if handled[resolvedFilename] {
|
||||
continue
|
||||
}
|
||||
|
||||
handled[resolvedFilename] = true
|
||||
|
||||
switch tp {
|
||||
case bundleLeaf:
|
||||
if err := c.handleDir(resolvedFilename); err != nil {
|
||||
return err
|
||||
}
|
||||
case bundleBranch:
|
||||
if err := c.handleBranchDir(resolvedFilename); err != nil {
|
||||
return err
|
||||
}
|
||||
default:
|
||||
fi, _, err := c.getRealFileInfo(resolvedFilename)
|
||||
if os.IsNotExist(err) {
|
||||
// File has been deleted.
|
||||
continue
|
||||
}
|
||||
|
||||
// Just in case the owning dir is a new symlink -- this will
|
||||
// create the proper mapping for it.
|
||||
c.getRealFileInfo(dir)
|
||||
|
||||
f := c.newFileInfo(resolvedFilename, fi, tp)
|
||||
c.copyOrHandleSingle(f)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *capturer) capture() error {
|
||||
if len(c.filenames) > 0 {
|
||||
return c.capturePartial(c.filenames...)
|
||||
}
|
||||
|
||||
err := c.handleDir(c.baseDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *capturer) handleNestedDir(dirname string) error {
|
||||
select {
|
||||
case c.sem <- true:
|
||||
var g errgroup.Group
|
||||
|
||||
g.Go(func() error {
|
||||
defer func() {
|
||||
<-c.sem
|
||||
}()
|
||||
return c.handleDir(dirname)
|
||||
})
|
||||
return g.Wait()
|
||||
default:
|
||||
// For deeply nested file trees, waiting for a semaphore wil deadlock.
|
||||
return c.handleDir(dirname)
|
||||
}
|
||||
}
|
||||
|
||||
// This handles a bundle branch and its resources only. This is used
|
||||
// in server mode on changes. If this dir does not (anymore) represent a bundle
|
||||
// branch, the handling is upgraded to the full handleDir method.
|
||||
func (c *capturer) handleBranchDir(dirname string) error {
|
||||
files, err := c.readDir(dirname)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var (
|
||||
dirType bundleDirType
|
||||
)
|
||||
|
||||
for _, fi := range files {
|
||||
if !fi.IsDir() {
|
||||
tp, _ := classifyBundledFile(fi.Name())
|
||||
if dirType == bundleNot {
|
||||
dirType = tp
|
||||
}
|
||||
|
||||
if dirType == bundleLeaf {
|
||||
return c.handleDir(dirname)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if dirType != bundleBranch {
|
||||
return c.handleDir(dirname)
|
||||
}
|
||||
|
||||
dirs := newBundleDirs(bundleBranch, c)
|
||||
|
||||
for _, fi := range files {
|
||||
|
||||
if fi.IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
tp, isContent := classifyBundledFile(fi.Name())
|
||||
|
||||
f := c.newFileInfo(fi.filename, fi.FileInfo, tp)
|
||||
if f.isOwner() {
|
||||
dirs.addBundleHeader(f)
|
||||
} else if !isContent {
|
||||
// This is a partial update -- we only care about the files that
|
||||
// is in this bundle.
|
||||
dirs.addBundleFiles(f)
|
||||
}
|
||||
}
|
||||
|
||||
c.handler.handleBundles(dirs)
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
func (c *capturer) handleDir(dirname string) error {
|
||||
files, err := c.readDir(dirname)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
type dirState int
|
||||
|
||||
const (
|
||||
dirStateDefault dirState = iota
|
||||
|
||||
dirStateAssetsOnly
|
||||
dirStateSinglesOnly
|
||||
)
|
||||
|
||||
var (
|
||||
fileBundleTypes = make([]bundleDirType, len(files))
|
||||
|
||||
// Start with the assumption that this dir contains only non-content assets (images etc.)
|
||||
// If that is still true after we had a first look at the list of files, we
|
||||
// can just copy the files to destination. We will still have to look at the
|
||||
// sub-folders for potential bundles.
|
||||
state = dirStateAssetsOnly
|
||||
|
||||
// Start with the assumption that this dir is not a bundle.
|
||||
// A directory is a bundle if it contains a index content file,
|
||||
// e.g. index.md (a leaf bundle) or a _index.md (a branch bundle).
|
||||
bundleType = bundleNot
|
||||
)
|
||||
|
||||
/* First check for any content files.
|
||||
- If there are none, then this is a assets folder only (images etc.)
|
||||
and we can just plainly copy them to
|
||||
destination.
|
||||
- If this is a section with no image etc. or similar, we can just handle it
|
||||
as it was a single content file.
|
||||
*/
|
||||
var hasNonContent, isBranch bool
|
||||
|
||||
for i, fi := range files {
|
||||
if !fi.IsDir() {
|
||||
tp, isContent := classifyBundledFile(fi.Name())
|
||||
fileBundleTypes[i] = tp
|
||||
if !isBranch {
|
||||
isBranch = tp == bundleBranch
|
||||
}
|
||||
|
||||
if isContent {
|
||||
// This is not a assets-only folder.
|
||||
state = dirStateDefault
|
||||
} else {
|
||||
hasNonContent = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if isBranch && !hasNonContent {
|
||||
// This is a section or similar with no need for any bundle handling.
|
||||
state = dirStateSinglesOnly
|
||||
}
|
||||
|
||||
if state > dirStateDefault {
|
||||
return c.handleNonBundle(dirname, files, state == dirStateSinglesOnly)
|
||||
}
|
||||
|
||||
var fileInfos = make([]*fileInfo, len(files))
|
||||
|
||||
for i, fi := range files {
|
||||
currentType := bundleNot
|
||||
|
||||
if !fi.IsDir() {
|
||||
currentType = fileBundleTypes[i]
|
||||
if bundleType == bundleNot && currentType != bundleNot {
|
||||
bundleType = currentType
|
||||
}
|
||||
}
|
||||
|
||||
if bundleType == bundleNot && currentType != bundleNot {
|
||||
bundleType = currentType
|
||||
}
|
||||
|
||||
fileInfos[i] = c.newFileInfo(fi.filename, fi.FileInfo, currentType)
|
||||
}
|
||||
|
||||
var todo []*fileInfo
|
||||
|
||||
if bundleType != bundleLeaf {
|
||||
for _, fi := range fileInfos {
|
||||
if fi.FileInfo().IsDir() {
|
||||
// Handle potential nested bundles.
|
||||
filename := fi.Filename()
|
||||
if err := c.handleNestedDir(filename); err != nil {
|
||||
return err
|
||||
}
|
||||
} else if bundleType == bundleNot || (!fi.isOwner() && fi.isContentFile()) {
|
||||
// Not in a bundle.
|
||||
c.copyOrHandleSingle(fi)
|
||||
} else {
|
||||
// This is a section folder or similar with non-content files in it.
|
||||
todo = append(todo, fi)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
todo = fileInfos
|
||||
}
|
||||
|
||||
if len(todo) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
dirs, err := c.createBundleDirs(todo, bundleType)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Send the bundle to the next step in the processor chain.
|
||||
c.handler.handleBundles(dirs)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *capturer) handleNonBundle(
|
||||
dirname string,
|
||||
fileInfos []fileInfoName,
|
||||
singlesOnly bool) error {
|
||||
|
||||
for _, fi := range fileInfos {
|
||||
if fi.IsDir() {
|
||||
if err := c.handleNestedDir(fi.filename); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if singlesOnly {
|
||||
file := c.newFileInfo(fi.filename, fi, bundleNot)
|
||||
c.handler.handleSingles(file)
|
||||
} else {
|
||||
c.handler.handleCopyFiles(fi.filename)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *capturer) copyOrHandleSingle(fi *fileInfo) {
|
||||
if fi.isContentFile() {
|
||||
c.handler.handleSingles(fi)
|
||||
} else {
|
||||
// These do not currently need any further processing.
|
||||
c.handler.handleCopyFiles(fi.Filename())
|
||||
}
|
||||
}
|
||||
|
||||
func (c *capturer) createBundleDirs(fileInfos []*fileInfo, bundleType bundleDirType) (*bundleDirs, error) {
|
||||
dirs := newBundleDirs(bundleType, c)
|
||||
|
||||
for _, fi := range fileInfos {
|
||||
if fi.FileInfo().IsDir() {
|
||||
var collector func(fis ...*fileInfo)
|
||||
|
||||
if bundleType == bundleBranch {
|
||||
// All files in the current directory are part of this bundle.
|
||||
// Trying to include sub folders in these bundles are filled with ambiguity.
|
||||
collector = func(fis ...*fileInfo) {
|
||||
for _, fi := range fis {
|
||||
c.copyOrHandleSingle(fi)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// All nested files and directories are part of this bundle.
|
||||
collector = func(fis ...*fileInfo) {
|
||||
fileInfos = append(fileInfos, fis...)
|
||||
}
|
||||
}
|
||||
err := c.collectFiles(fi.Filename(), collector)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
} else if fi.isOwner() {
|
||||
// There can be more than one language, so:
|
||||
// 1. Content files must be attached to its language's bundle.
|
||||
// 2. Other files must be attached to all languages.
|
||||
// 3. Every content file needs a bundle header.
|
||||
dirs.addBundleHeader(fi)
|
||||
}
|
||||
}
|
||||
|
||||
for _, fi := range fileInfos {
|
||||
if fi.FileInfo().IsDir() || fi.isOwner() {
|
||||
continue
|
||||
}
|
||||
|
||||
if fi.isContentFile() {
|
||||
if bundleType != bundleBranch {
|
||||
dirs.addBundleContentFile(fi)
|
||||
}
|
||||
} else {
|
||||
dirs.addBundleFiles(fi)
|
||||
}
|
||||
}
|
||||
|
||||
return dirs, nil
|
||||
}
|
||||
|
||||
func (c *capturer) collectFiles(dirname string, handleFiles func(fis ...*fileInfo)) error {
|
||||
filesInDir, err := c.readDir(dirname)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, fi := range filesInDir {
|
||||
if fi.IsDir() {
|
||||
err := c.collectFiles(fi.filename, handleFiles)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
handleFiles(c.newFileInfo(fi.filename, fi.FileInfo, bundleNot))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *capturer) readDir(dirname string) ([]fileInfoName, error) {
|
||||
if c.sourceSpec.IgnoreFile(dirname) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
dir, err := c.fs.Open(dirname)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer dir.Close()
|
||||
names, err := dir.Readdirnames(-1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fis := make([]fileInfoName, 0, len(names))
|
||||
|
||||
for _, name := range names {
|
||||
filename := filepath.Join(dirname, name)
|
||||
if !c.sourceSpec.IgnoreFile(filename) {
|
||||
fi, _, err := c.getRealFileInfo(filename)
|
||||
|
||||
if err != nil {
|
||||
// It may have been deleted in the meantime.
|
||||
if err == errSkipCyclicDir || os.IsNotExist(err) {
|
||||
continue
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fis = append(fis, fileInfoName{filename: filename, FileInfo: fi})
|
||||
}
|
||||
}
|
||||
|
||||
return fis, nil
|
||||
}
|
||||
|
||||
func (c *capturer) newFileInfo(filename string, fi os.FileInfo, tp bundleDirType) *fileInfo {
|
||||
return newFileInfo(c.sourceSpec, c.baseDir, filename, fi, tp)
|
||||
}
|
||||
|
||||
type singlesHandler func(fis ...*fileInfo)
|
||||
type bundlesHandler func(b *bundleDirs)
|
||||
|
||||
type fileInfoName struct {
|
||||
os.FileInfo
|
||||
filename string
|
||||
}
|
||||
|
||||
type bundleDirs struct {
|
||||
tp bundleDirType
|
||||
// Maps languages to bundles.
|
||||
bundles map[string]*bundleDir
|
||||
|
||||
// Keeps track of language overrides for non-content files, e.g. logo.en.png.
|
||||
langOverrides map[string]bool
|
||||
|
||||
c *capturer
|
||||
}
|
||||
|
||||
func newBundleDirs(tp bundleDirType, c *capturer) *bundleDirs {
|
||||
return &bundleDirs{tp: tp, bundles: make(map[string]*bundleDir), langOverrides: make(map[string]bool), c: c}
|
||||
}
|
||||
|
||||
type bundleDir struct {
|
||||
tp bundleDirType
|
||||
fi *fileInfo
|
||||
|
||||
resources map[string]*fileInfo
|
||||
}
|
||||
|
||||
func (b bundleDir) clone() *bundleDir {
|
||||
b.resources = make(map[string]*fileInfo)
|
||||
fic := *b.fi
|
||||
b.fi = &fic
|
||||
return &b
|
||||
}
|
||||
|
||||
func newBundleDir(fi *fileInfo, bundleType bundleDirType) *bundleDir {
|
||||
return &bundleDir{fi: fi, tp: bundleType, resources: make(map[string]*fileInfo)}
|
||||
}
|
||||
|
||||
func (b *bundleDirs) addBundleContentFile(fi *fileInfo) {
|
||||
dir, found := b.bundles[fi.Lang()]
|
||||
if !found {
|
||||
// Every bundled content file needs a bundle header.
|
||||
// If one does not exist in its language, we pick the default
|
||||
// language version, or a random one if that doesn't exist, either.
|
||||
tl := b.c.sourceSpec.DefaultContentLanguage
|
||||
ldir, found := b.bundles[tl]
|
||||
if !found {
|
||||
// Just pick one.
|
||||
for _, v := range b.bundles {
|
||||
ldir = v
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if ldir == nil {
|
||||
panic(fmt.Sprintf("bundle not found for file %q", fi.Filename()))
|
||||
}
|
||||
|
||||
dir = ldir.clone()
|
||||
dir.fi.overriddenLang = fi.Lang()
|
||||
b.bundles[fi.Lang()] = dir
|
||||
}
|
||||
|
||||
dir.resources[fi.Filename()] = fi
|
||||
}
|
||||
|
||||
func (b *bundleDirs) addBundleFiles(fi *fileInfo) {
|
||||
dir := filepath.ToSlash(fi.Dir())
|
||||
p := dir + fi.TranslationBaseName() + "." + fi.Ext()
|
||||
for lang, bdir := range b.bundles {
|
||||
key := lang + p
|
||||
// Given mypage.de.md (German translation) and mypage.md we pick the most
|
||||
// the specific for that language.
|
||||
if fi.Lang() == lang || !b.langOverrides[key] {
|
||||
bdir.resources[key] = fi
|
||||
}
|
||||
b.langOverrides[key] = true
|
||||
}
|
||||
}
|
||||
|
||||
func (b *bundleDirs) addBundleHeader(fi *fileInfo) {
|
||||
b.bundles[fi.Lang()] = newBundleDir(fi, b.tp)
|
||||
}
|
||||
|
||||
func (c *capturer) isSeen(dirname string) bool {
|
||||
c.seenMu.Lock()
|
||||
defer c.seenMu.Unlock()
|
||||
seen := c.seen[dirname]
|
||||
c.seen[dirname] = true
|
||||
if seen {
|
||||
c.logger.WARN.Printf("Content dir %q already processed; skipped to avoid infinite recursion.", dirname)
|
||||
return true
|
||||
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (c *capturer) getRealFileInfo(path string) (os.FileInfo, string, error) {
|
||||
fileInfo, err := c.lstatIfOs(path)
|
||||
realPath := path
|
||||
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
if fileInfo.Mode()&os.ModeSymlink == os.ModeSymlink {
|
||||
link, err := filepath.EvalSymlinks(path)
|
||||
if err != nil {
|
||||
return nil, "", fmt.Errorf("Cannot read symbolic link %q, error was: %s", path, err)
|
||||
}
|
||||
|
||||
fileInfo, err = c.lstatIfOs(link)
|
||||
if err != nil {
|
||||
return nil, "", fmt.Errorf("Cannot stat %q, error was: %s", link, err)
|
||||
}
|
||||
|
||||
realPath = link
|
||||
|
||||
if realPath != path && fileInfo.IsDir() && c.isSeen(realPath) {
|
||||
// Avoid cyclic symlinks.
|
||||
// Note that this may prevent some uses that isn't cyclic and also
|
||||
// potential useful, but this implementation is both robust and simple:
|
||||
// We stop at the first directory that we have seen before, e.g.
|
||||
// /content/blog will only be processed once.
|
||||
return nil, realPath, errSkipCyclicDir
|
||||
}
|
||||
|
||||
if c.contentChanges != nil {
|
||||
// Keep track of symbolic links in watch mode.
|
||||
var from, to string
|
||||
if fileInfo.IsDir() {
|
||||
from = realPath
|
||||
to = path
|
||||
|
||||
if !strings.HasSuffix(to, helpers.FilePathSeparator) {
|
||||
to = to + helpers.FilePathSeparator
|
||||
}
|
||||
if !strings.HasSuffix(from, helpers.FilePathSeparator) {
|
||||
from = from + helpers.FilePathSeparator
|
||||
}
|
||||
|
||||
baseDir := c.baseDir
|
||||
if !strings.HasSuffix(baseDir, helpers.FilePathSeparator) {
|
||||
baseDir = baseDir + helpers.FilePathSeparator
|
||||
}
|
||||
|
||||
if strings.HasPrefix(from, baseDir) {
|
||||
// With symbolic links inside /content we need to keep
|
||||
// a reference to both. This may be confusing with --navigateToChanged
|
||||
// but the user has chosen this him or herself.
|
||||
c.contentChanges.addSymbolicLinkMapping(from, from)
|
||||
}
|
||||
|
||||
} else {
|
||||
from = realPath
|
||||
to = path
|
||||
}
|
||||
|
||||
c.contentChanges.addSymbolicLinkMapping(from, to)
|
||||
}
|
||||
}
|
||||
|
||||
return fileInfo, realPath, nil
|
||||
}
|
||||
|
||||
func (c *capturer) lstatIfOs(path string) (os.FileInfo, error) {
|
||||
return helpers.LstatIfOs(c.fs, path)
|
||||
}
|
255
hugolib/page_bundler_capture_test.go
Normal file
255
hugolib/page_bundler_capture_test.go
Normal file
|
@ -0,0 +1,255 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
|
||||
"strings"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
type storeFilenames struct {
|
||||
sync.Mutex
|
||||
filenames []string
|
||||
copyNames []string
|
||||
dirKeys []string
|
||||
}
|
||||
|
||||
func (s *storeFilenames) handleSingles(fis ...*fileInfo) {
|
||||
s.Lock()
|
||||
defer s.Unlock()
|
||||
for _, fi := range fis {
|
||||
s.filenames = append(s.filenames, filepath.ToSlash(fi.Filename()))
|
||||
}
|
||||
}
|
||||
|
||||
func (s *storeFilenames) handleBundles(d *bundleDirs) {
|
||||
s.Lock()
|
||||
defer s.Unlock()
|
||||
var keys []string
|
||||
for _, b := range d.bundles {
|
||||
res := make([]string, len(b.resources))
|
||||
i := 0
|
||||
for _, r := range b.resources {
|
||||
res[i] = path.Join(r.Lang(), filepath.ToSlash(r.Filename()))
|
||||
i++
|
||||
}
|
||||
sort.Strings(res)
|
||||
keys = append(keys, path.Join("__bundle", b.fi.Lang(), filepath.ToSlash(b.fi.Filename()), "resources", strings.Join(res, "|")))
|
||||
}
|
||||
s.dirKeys = append(s.dirKeys, keys...)
|
||||
}
|
||||
|
||||
func (s *storeFilenames) handleCopyFiles(names ...string) {
|
||||
s.Lock()
|
||||
defer s.Unlock()
|
||||
for _, name := range names {
|
||||
s.copyNames = append(s.copyNames, filepath.ToSlash(name))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (s *storeFilenames) sortedStr() string {
|
||||
s.Lock()
|
||||
defer s.Unlock()
|
||||
sort.Strings(s.filenames)
|
||||
sort.Strings(s.dirKeys)
|
||||
sort.Strings(s.copyNames)
|
||||
return "\nF:\n" + strings.Join(s.filenames, "\n") + "\nD:\n" + strings.Join(s.dirKeys, "\n") +
|
||||
"\nC:\n" + strings.Join(s.copyNames, "\n") + "\n"
|
||||
}
|
||||
|
||||
func TestPageBundlerCaptureSymlinks(t *testing.T) {
|
||||
assert := require.New(t)
|
||||
cfg, fs, workDir := newTestBundleSymbolicSources(t)
|
||||
contentDir := "base"
|
||||
sourceSpec := source.NewSourceSpec(cfg, fs)
|
||||
|
||||
fileStore := &storeFilenames{}
|
||||
logger := newWarningLogger()
|
||||
c := newCapturer(logger, sourceSpec, fileStore, nil, filepath.Join(workDir, contentDir))
|
||||
|
||||
assert.NoError(c.capture())
|
||||
|
||||
// Symlik back to content skipped to prevent infinite recursion.
|
||||
assert.Equal(uint64(3), logger.LogCountForLevelsGreaterThanorEqualTo(jww.LevelWarn))
|
||||
|
||||
expected := `
|
||||
F:
|
||||
/base/a/page_s.md
|
||||
/base/a/regular.md
|
||||
/base/symbolic1/s1.md
|
||||
/base/symbolic1/s2.md
|
||||
/base/symbolic3/circus/a/page_s.md
|
||||
/base/symbolic3/circus/a/regular.md
|
||||
D:
|
||||
__bundle/en/base/symbolic2/a1/index.md/resources/en/base/symbolic2/a1/logo.png|en/base/symbolic2/a1/page.md
|
||||
C:
|
||||
/base/symbolic3/s1.png
|
||||
/base/symbolic3/s2.png
|
||||
`
|
||||
got := strings.Replace(fileStore.sortedStr(), filepath.ToSlash(workDir), "", -1)
|
||||
got = strings.Replace(got, "//", "/", -1)
|
||||
|
||||
if expected != got {
|
||||
diff := helpers.DiffStringSlices(strings.Fields(expected), strings.Fields(got))
|
||||
t.Log(got)
|
||||
t.Fatalf("Failed:\n%s", diff)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPageBundlerCapture(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
assert := require.New(t)
|
||||
cfg, fs := newTestBundleSources(t)
|
||||
|
||||
sourceSpec := source.NewSourceSpec(cfg, fs)
|
||||
|
||||
fileStore := &storeFilenames{}
|
||||
|
||||
c := newCapturer(newErrorLogger(), sourceSpec, fileStore, nil, filepath.FromSlash("/work/base"))
|
||||
|
||||
assert.NoError(c.capture())
|
||||
|
||||
expected := `
|
||||
F:
|
||||
/work/base/_1.md
|
||||
/work/base/a/1.md
|
||||
/work/base/a/2.md
|
||||
/work/base/assets/pages/mypage.md
|
||||
D:
|
||||
__bundle/en/work/base/_index.md/resources/en/work/base/_1.png
|
||||
__bundle/en/work/base/a/b/index.md/resources/en/work/base/a/b/ab1.md
|
||||
__bundle/en/work/base/b/index.md/resources/en/work/base/b/1.md|en/work/base/b/2.md|en/work/base/b/c/logo.png|en/work/base/b/custom-mime.bep
|
||||
C:
|
||||
/work/base/assets/pic1.png
|
||||
/work/base/assets/pic2.png
|
||||
/work/base/images/hugo-logo.png
|
||||
`
|
||||
|
||||
got := fileStore.sortedStr()
|
||||
|
||||
if expected != got {
|
||||
diff := helpers.DiffStringSlices(strings.Fields(expected), strings.Fields(got))
|
||||
t.Log(got)
|
||||
t.Fatalf("Failed:\n%s", diff)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPageBundlerCaptureMultilingual(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
assert := require.New(t)
|
||||
cfg, fs := newTestBundleSourcesMultilingual(t)
|
||||
sourceSpec := source.NewSourceSpec(cfg, fs)
|
||||
fileStore := &storeFilenames{}
|
||||
c := newCapturer(newErrorLogger(), sourceSpec, fileStore, nil, filepath.FromSlash("/work/base"))
|
||||
|
||||
assert.NoError(c.capture())
|
||||
|
||||
expected := `
|
||||
F:
|
||||
/work/base/1s/mypage.md
|
||||
/work/base/bb/_1.md
|
||||
/work/base/bb/_1.nn.md
|
||||
/work/base/bb/en.md
|
||||
/work/base/bc/page.md
|
||||
/work/base/bc/page.nn.md
|
||||
/work/base/be/_index.md
|
||||
/work/base/be/page.md
|
||||
/work/base/be/page.nn.md
|
||||
D:
|
||||
__bundle/en/work/base/bb/_index.md/resources/en/work/base/bb/a.png|en/work/base/bb/b.png|nn/work/base/bb/c.nn.png
|
||||
__bundle/en/work/base/bc/_index.md/resources/en/work/base/bc/logo-bc.png
|
||||
__bundle/en/work/base/bd/index.md/resources/en/work/base/bd/page.md
|
||||
__bundle/en/work/base/lb/index.md/resources/en/work/base/lb/1.md|en/work/base/lb/2.md|en/work/base/lb/c/d/deep.png|en/work/base/lb/c/logo.png|en/work/base/lb/c/one.png
|
||||
__bundle/nn/work/base/bb/_index.nn.md/resources/en/work/base/bb/a.png|nn/work/base/bb/b.nn.png|nn/work/base/bb/c.nn.png
|
||||
__bundle/nn/work/base/bd/index.md/resources/nn/work/base/bd/page.nn.md
|
||||
__bundle/nn/work/base/lb/index.nn.md/resources/en/work/base/lb/c/d/deep.png|en/work/base/lb/c/one.png|nn/work/base/lb/2.nn.md|nn/work/base/lb/c/logo.nn.png
|
||||
C:
|
||||
/work/base/1s/mylogo.png
|
||||
/work/base/bb/b/d.nn.png
|
||||
`
|
||||
|
||||
got := fileStore.sortedStr()
|
||||
|
||||
if expected != got {
|
||||
diff := helpers.DiffStringSlices(strings.Fields(expected), strings.Fields(got))
|
||||
t.Log(got)
|
||||
t.Fatalf("Failed:\n%s", diff)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
type noOpFileStore int
|
||||
|
||||
func (noOpFileStore) handleSingles(fis ...*fileInfo) {}
|
||||
func (noOpFileStore) handleBundles(b *bundleDirs) {}
|
||||
func (noOpFileStore) handleCopyFiles(names ...string) {}
|
||||
|
||||
func BenchmarkPageBundlerCapture(b *testing.B) {
|
||||
capturers := make([]*capturer, b.N)
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
cfg, fs := newTestCfg()
|
||||
sourceSpec := source.NewSourceSpec(cfg, fs)
|
||||
|
||||
base := fmt.Sprintf("base%d", i)
|
||||
for j := 1; j <= 5; j++ {
|
||||
js := fmt.Sprintf("j%d", j)
|
||||
writeSource(b, fs, filepath.Join(base, js, "index.md"), "content")
|
||||
writeSource(b, fs, filepath.Join(base, js, "logo1.png"), "content")
|
||||
writeSource(b, fs, filepath.Join(base, js, "sub", "logo2.png"), "content")
|
||||
writeSource(b, fs, filepath.Join(base, js, "section", "_index.md"), "content")
|
||||
writeSource(b, fs, filepath.Join(base, js, "section", "logo.png"), "content")
|
||||
writeSource(b, fs, filepath.Join(base, js, "section", "sub", "logo.png"), "content")
|
||||
|
||||
for k := 1; k <= 5; k++ {
|
||||
ks := fmt.Sprintf("k%d", k)
|
||||
writeSource(b, fs, filepath.Join(base, js, ks, "logo1.png"), "content")
|
||||
writeSource(b, fs, filepath.Join(base, js, "section", ks, "logo.png"), "content")
|
||||
}
|
||||
}
|
||||
|
||||
for i := 1; i <= 5; i++ {
|
||||
writeSource(b, fs, filepath.Join(base, "assetsonly", fmt.Sprintf("image%d.png", i)), "image")
|
||||
}
|
||||
|
||||
for i := 1; i <= 5; i++ {
|
||||
writeSource(b, fs, filepath.Join(base, "contentonly", fmt.Sprintf("c%d.md", i)), "content")
|
||||
}
|
||||
|
||||
capturers[i] = newCapturer(newErrorLogger(), sourceSpec, new(noOpFileStore), nil, base)
|
||||
}
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
err := capturers[i].capture()
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
346
hugolib/page_bundler_handlers.go
Normal file
346
hugolib/page_bundler_handlers.go
Normal file
|
@ -0,0 +1,346 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"sort"
|
||||
|
||||
"strings"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/resource"
|
||||
)
|
||||
|
||||
var (
|
||||
// This should be the only list of valid extensions for content files.
|
||||
contentFileExtensions = []string{
|
||||
"html", "htm",
|
||||
"mdown", "markdown", "md",
|
||||
"asciidoc", "adoc", "ad",
|
||||
"rest", "rst",
|
||||
"mmark",
|
||||
"org",
|
||||
"pandoc", "pdc"}
|
||||
|
||||
contentFileExtensionsSet map[string]bool
|
||||
)
|
||||
|
||||
func init() {
|
||||
contentFileExtensionsSet = make(map[string]bool)
|
||||
for _, ext := range contentFileExtensions {
|
||||
contentFileExtensionsSet[ext] = true
|
||||
}
|
||||
}
|
||||
|
||||
func newHandlerChain(s *Site) contentHandler {
|
||||
c := &contentHandlers{s: s}
|
||||
|
||||
contentFlow := c.parsePage(c.processFirstMatch(
|
||||
// Handles all files with a content file extension. See above.
|
||||
c.handlePageContent(),
|
||||
|
||||
// Every HTML file without front matter will be passed on to this handler.
|
||||
c.handleHTMLContent(),
|
||||
))
|
||||
|
||||
c.rootHandler = c.processFirstMatch(
|
||||
contentFlow,
|
||||
|
||||
// Creates a file resource (image, CSS etc.) if there is a parent
|
||||
// page set on the current context.
|
||||
c.createResource(),
|
||||
|
||||
// Everything that isn't handled above, will just be copied
|
||||
// to destination.
|
||||
c.copyFile(),
|
||||
)
|
||||
|
||||
return c.rootHandler
|
||||
|
||||
}
|
||||
|
||||
type contentHandlers struct {
|
||||
s *Site
|
||||
rootHandler contentHandler
|
||||
}
|
||||
|
||||
func (c *contentHandlers) processFirstMatch(handlers ...contentHandler) func(ctx *handlerContext) handlerResult {
|
||||
return func(ctx *handlerContext) handlerResult {
|
||||
for _, h := range handlers {
|
||||
res := h(ctx)
|
||||
if res.handled || res.err != nil {
|
||||
return res
|
||||
}
|
||||
}
|
||||
return handlerResult{err: errors.New("no matching handler found")}
|
||||
}
|
||||
}
|
||||
|
||||
type handlerContext struct {
|
||||
// These are the pages stored in Site.
|
||||
pages chan<- *Page
|
||||
|
||||
doNotAddToSiteCollections bool
|
||||
|
||||
currentPage *Page
|
||||
parentPage *Page
|
||||
|
||||
bundle *bundleDir
|
||||
|
||||
// The source baseDir, e.g. "/myproject/content/"
|
||||
baseDir string
|
||||
|
||||
source *fileInfo
|
||||
|
||||
// Relative path to the target.
|
||||
target string
|
||||
}
|
||||
|
||||
func (c *handlerContext) ext() string {
|
||||
if c.currentPage != nil {
|
||||
if c.currentPage.Markup != "" {
|
||||
return c.currentPage.Markup
|
||||
}
|
||||
return c.currentPage.Ext()
|
||||
}
|
||||
|
||||
if c.bundle != nil {
|
||||
return c.bundle.fi.Ext()
|
||||
} else {
|
||||
return c.source.Ext()
|
||||
}
|
||||
}
|
||||
|
||||
func (c *handlerContext) targetPath() string {
|
||||
if c.target != "" {
|
||||
return c.target
|
||||
}
|
||||
|
||||
return strings.TrimPrefix(c.source.Filename(), c.baseDir)
|
||||
}
|
||||
|
||||
func (c *handlerContext) file() *fileInfo {
|
||||
if c.bundle != nil {
|
||||
return c.bundle.fi
|
||||
}
|
||||
|
||||
return c.source
|
||||
}
|
||||
|
||||
// Create a copy with the current context as its parent.
|
||||
func (c handlerContext) childCtx(fi *fileInfo) *handlerContext {
|
||||
if c.currentPage == nil {
|
||||
panic("Need a Page to create a child context")
|
||||
}
|
||||
|
||||
c.target = strings.TrimPrefix(fi.Path(), c.bundle.fi.Dir())
|
||||
c.source = fi
|
||||
|
||||
c.doNotAddToSiteCollections = c.bundle != nil && c.bundle.tp != bundleBranch
|
||||
|
||||
c.bundle = nil
|
||||
|
||||
c.parentPage = c.currentPage
|
||||
c.currentPage = nil
|
||||
|
||||
return &c
|
||||
}
|
||||
|
||||
func (c *handlerContext) supports(exts ...string) bool {
|
||||
ext := c.ext()
|
||||
for _, s := range exts {
|
||||
if s == ext {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (c *handlerContext) isContentFile() bool {
|
||||
return contentFileExtensionsSet[c.ext()]
|
||||
}
|
||||
|
||||
type (
|
||||
handlerResult struct {
|
||||
err error
|
||||
handled bool
|
||||
resource resource.Resource
|
||||
}
|
||||
|
||||
contentHandlerChain func(h contentHandler) contentHandler
|
||||
contentHandler func(ctx *handlerContext) handlerResult
|
||||
)
|
||||
|
||||
var (
|
||||
notHandled handlerResult
|
||||
noOpContenHandler = func(ctx *handlerContext) handlerResult {
|
||||
return handlerResult{handled: true}
|
||||
}
|
||||
)
|
||||
|
||||
func (c *contentHandlers) parsePage(h contentHandler) contentHandler {
|
||||
return func(ctx *handlerContext) handlerResult {
|
||||
if !ctx.isContentFile() {
|
||||
return notHandled
|
||||
}
|
||||
|
||||
result := handlerResult{handled: true}
|
||||
fi := ctx.file()
|
||||
|
||||
f, err := fi.Open()
|
||||
if err != nil {
|
||||
return handlerResult{err: fmt.Errorf("(%s) failed to open content file: %s", fi.Filename(), err)}
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
p := c.s.newPageFromFile(fi)
|
||||
|
||||
_, err = p.ReadFrom(f)
|
||||
if err != nil {
|
||||
return handlerResult{err: err}
|
||||
}
|
||||
|
||||
if !p.shouldBuild() {
|
||||
if !ctx.doNotAddToSiteCollections {
|
||||
ctx.pages <- p
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
ctx.currentPage = p
|
||||
|
||||
if ctx.bundle != nil {
|
||||
// Add the bundled files
|
||||
for _, fi := range ctx.bundle.resources {
|
||||
childCtx := ctx.childCtx(fi)
|
||||
res := c.rootHandler(childCtx)
|
||||
if res.err != nil {
|
||||
return res
|
||||
}
|
||||
if res.resource != nil {
|
||||
p.Resources = append(p.Resources, res.resource)
|
||||
}
|
||||
}
|
||||
|
||||
sort.SliceStable(p.Resources, func(i, j int) bool {
|
||||
if p.Resources[i].ResourceType() < p.Resources[j].ResourceType() {
|
||||
return true
|
||||
}
|
||||
|
||||
p1, ok1 := p.Resources[i].(*Page)
|
||||
p2, ok2 := p.Resources[j].(*Page)
|
||||
|
||||
if ok1 != ok2 {
|
||||
return ok2
|
||||
}
|
||||
|
||||
if ok1 {
|
||||
return defaultPageSort(p1, p2)
|
||||
}
|
||||
|
||||
return p.Resources[i].RelPermalink() < p.Resources[j].RelPermalink()
|
||||
})
|
||||
}
|
||||
|
||||
return h(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
func (c *contentHandlers) handlePageContent() contentHandler {
|
||||
return func(ctx *handlerContext) handlerResult {
|
||||
if ctx.supports("html", "htm") {
|
||||
return notHandled
|
||||
}
|
||||
|
||||
p := ctx.currentPage
|
||||
|
||||
// Work on a copy of the raw content from now on.
|
||||
p.createWorkContentCopy()
|
||||
|
||||
if err := p.processShortcodes(); err != nil {
|
||||
p.s.Log.ERROR.Println(err)
|
||||
}
|
||||
|
||||
if c.s.Cfg.GetBool("enableEmoji") {
|
||||
p.workContent = helpers.Emojify(p.workContent)
|
||||
}
|
||||
|
||||
p.workContent = p.replaceDivider(p.workContent)
|
||||
p.workContent = p.renderContent(p.workContent)
|
||||
|
||||
if !ctx.doNotAddToSiteCollections {
|
||||
ctx.pages <- p
|
||||
}
|
||||
|
||||
return handlerResult{handled: true, resource: p}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *contentHandlers) handleHTMLContent() contentHandler {
|
||||
return func(ctx *handlerContext) handlerResult {
|
||||
if !ctx.supports("html", "htm") {
|
||||
return notHandled
|
||||
}
|
||||
|
||||
p := ctx.currentPage
|
||||
|
||||
p.createWorkContentCopy()
|
||||
|
||||
if err := p.processShortcodes(); err != nil {
|
||||
p.s.Log.ERROR.Println(err)
|
||||
}
|
||||
|
||||
if !ctx.doNotAddToSiteCollections {
|
||||
ctx.pages <- p
|
||||
}
|
||||
|
||||
return handlerResult{handled: true, resource: p}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *contentHandlers) createResource() contentHandler {
|
||||
return func(ctx *handlerContext) handlerResult {
|
||||
if ctx.parentPage == nil {
|
||||
return notHandled
|
||||
}
|
||||
|
||||
resource, err := c.s.resourceSpec.NewResourceFromFilename(
|
||||
ctx.parentPage.subResourceLinkFactory,
|
||||
c.s.absPublishDir(),
|
||||
ctx.source.Filename(), ctx.target)
|
||||
|
||||
return handlerResult{err: err, handled: true, resource: resource}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *contentHandlers) copyFile() contentHandler {
|
||||
return func(ctx *handlerContext) handlerResult {
|
||||
f, err := c.s.Fs.Source.Open(ctx.source.Filename())
|
||||
if err != nil {
|
||||
return handlerResult{err: err}
|
||||
}
|
||||
|
||||
target := ctx.targetPath()
|
||||
|
||||
defer f.Close()
|
||||
if err := c.s.publish(&c.s.PathSpec.ProcessingStats.Files, target, f); err != nil {
|
||||
return handlerResult{err: err}
|
||||
}
|
||||
|
||||
return handlerResult{handled: true}
|
||||
}
|
||||
}
|
379
hugolib/page_bundler_test.go
Normal file
379
hugolib/page_bundler_test.go
Normal file
|
@ -0,0 +1,379 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/afero"
|
||||
|
||||
"github.com/gohugoio/hugo/media"
|
||||
|
||||
"path/filepath"
|
||||
|
||||
"fmt"
|
||||
|
||||
"github.com/gohugoio/hugo/deps"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/gohugoio/hugo/resource"
|
||||
"github.com/spf13/viper"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestPageBundlerSite(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
for _, ugly := range []bool{false, true} {
|
||||
t.Run(fmt.Sprintf("ugly=%t", ugly),
|
||||
func(t *testing.T) {
|
||||
|
||||
assert := require.New(t)
|
||||
cfg, fs := newTestBundleSources(t)
|
||||
|
||||
cfg.Set("permalinks", map[string]string{
|
||||
"a": ":sections/:filename",
|
||||
"b": ":year/:slug/",
|
||||
})
|
||||
|
||||
cfg.Set("outputFormats", map[string]interface{}{
|
||||
"CUSTOMO": map[string]interface{}{
|
||||
"mediaType": media.HTMLType,
|
||||
"baseName": "cindex",
|
||||
"path": "cpath",
|
||||
},
|
||||
})
|
||||
|
||||
cfg.Set("outputs", map[string]interface{}{
|
||||
"home": []string{"HTML", "CUSTOMO"},
|
||||
"page": []string{"HTML", "CUSTOMO"},
|
||||
"section": []string{"HTML", "CUSTOMO"},
|
||||
})
|
||||
|
||||
cfg.Set("uglyURLs", ugly)
|
||||
|
||||
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
|
||||
|
||||
th := testHelper{s.Cfg, s.Fs, t}
|
||||
|
||||
// Singles (2), Below home (1), Bundle (1)
|
||||
assert.Len(s.RegularPages, 6)
|
||||
|
||||
singlePage := s.getPage(KindPage, "a/1.md")
|
||||
|
||||
assert.NotNil(singlePage)
|
||||
assert.Contains(singlePage.Content, "TheContent")
|
||||
|
||||
if ugly {
|
||||
assert.Equal("/a/1.html", singlePage.RelPermalink())
|
||||
th.assertFileContent(filepath.FromSlash("/work/public/a/1.html"), "TheContent")
|
||||
|
||||
} else {
|
||||
assert.Equal("/a/1/", singlePage.RelPermalink())
|
||||
th.assertFileContent(filepath.FromSlash("/work/public/a/1/index.html"), "TheContent")
|
||||
}
|
||||
|
||||
th.assertFileContent(filepath.FromSlash("/work/public/images/hugo-logo.png"), "content")
|
||||
|
||||
// This should be just copied to destination.
|
||||
th.assertFileContent(filepath.FromSlash("/work/public/assets/pic1.png"), "content")
|
||||
|
||||
leafBundle1 := s.getPage(KindPage, "b/index.md")
|
||||
assert.NotNil(leafBundle1)
|
||||
leafBundle2 := s.getPage(KindPage, "a/b/index.md")
|
||||
assert.NotNil(leafBundle2)
|
||||
|
||||
pageResources := leafBundle1.Resources.ByType(pageResourceType)
|
||||
assert.Len(pageResources, 2)
|
||||
firstPage := pageResources[0].(*Page)
|
||||
secondPage := pageResources[1].(*Page)
|
||||
assert.Equal(filepath.FromSlash("b/1.md"), firstPage.pathOrTitle(), secondPage.pathOrTitle())
|
||||
assert.Contains(firstPage.Content, "TheContent")
|
||||
assert.Len(leafBundle1.Resources, 4) // 2 pages 1 image 1 custom mime type
|
||||
|
||||
imageResources := leafBundle1.Resources.ByType("image")
|
||||
assert.Len(imageResources, 1)
|
||||
image := imageResources[0]
|
||||
|
||||
altFormat := leafBundle1.OutputFormats().Get("CUSTOMO")
|
||||
assert.NotNil(altFormat)
|
||||
|
||||
assert.Equal(filepath.FromSlash("/work/base/b/c/logo.png"), image.(resource.Source).AbsSourceFilename())
|
||||
assert.Equal("https://example.com/2017/pageslug/c/logo.png", image.Permalink())
|
||||
th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/c/logo.png"), "content")
|
||||
th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/c/logo.png"), "content")
|
||||
|
||||
// Custom media type defined in site config.
|
||||
assert.Len(leafBundle1.Resources.ByType("bepsays"), 1)
|
||||
|
||||
if ugly {
|
||||
assert.Equal("/2017/pageslug.html", leafBundle1.RelPermalink())
|
||||
th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug.html"), "TheContent")
|
||||
th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug.html"), "TheContent")
|
||||
|
||||
assert.Equal("/a/b.html", leafBundle2.RelPermalink())
|
||||
|
||||
} else {
|
||||
assert.Equal("/2017/pageslug/", leafBundle1.RelPermalink())
|
||||
th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/index.html"), "TheContent")
|
||||
th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/cindex.html"), "TheContent")
|
||||
|
||||
assert.Equal("/a/b/", leafBundle2.RelPermalink())
|
||||
|
||||
}
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestPageBundlerSiteWitSymbolicLinksInContent(t *testing.T) {
|
||||
assert := require.New(t)
|
||||
cfg, fs, workDir := newTestBundleSymbolicSources(t)
|
||||
|
||||
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg, Logger: newWarningLogger()}, BuildCfg{})
|
||||
|
||||
th := testHelper{s.Cfg, s.Fs, t}
|
||||
|
||||
assert.Equal(7, len(s.RegularPages))
|
||||
a1Bundle := s.getPage(KindPage, "symbolic2/a1/index.md")
|
||||
assert.NotNil(a1Bundle)
|
||||
assert.Equal(2, len(a1Bundle.Resources))
|
||||
assert.Equal(1, len(a1Bundle.Resources.ByType(pageResourceType)))
|
||||
|
||||
th.assertFileContent(filepath.FromSlash(workDir+"/public/a/page/index.html"), "TheContent")
|
||||
th.assertFileContent(filepath.FromSlash(workDir+"/public/symbolic1/s1/index.html"), "TheContent")
|
||||
th.assertFileContent(filepath.FromSlash(workDir+"/public/symbolic2/a1/index.html"), "TheContent")
|
||||
|
||||
}
|
||||
|
||||
func newTestBundleSources(t *testing.T) (*viper.Viper, *hugofs.Fs) {
|
||||
cfg, fs := newTestCfg()
|
||||
|
||||
workDir := "/work"
|
||||
cfg.Set("workingDir", workDir)
|
||||
cfg.Set("contentDir", "base")
|
||||
cfg.Set("baseURL", "https://example.com")
|
||||
cfg.Set("mediaTypes", map[string]interface{}{
|
||||
"text/bepsays": map[string]interface{}{
|
||||
"suffix": "bep",
|
||||
},
|
||||
})
|
||||
|
||||
pageContent := `---
|
||||
title: "Bundle Galore"
|
||||
slug: pageslug
|
||||
date: 2017-10-09
|
||||
---
|
||||
|
||||
TheContent.
|
||||
`
|
||||
|
||||
pageContentNoSlug := `---
|
||||
title: "Bundle Galore #2"
|
||||
date: 2017-10-09
|
||||
---
|
||||
|
||||
TheContent.
|
||||
`
|
||||
|
||||
layout := `{{ .Title }}|{{ .Content }}`
|
||||
|
||||
writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), layout)
|
||||
writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), layout)
|
||||
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "_index.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "_1.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "_1.png"), pageContent)
|
||||
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "images", "hugo-logo.png"), "content")
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "a", "2.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "a", "1.md"), pageContent)
|
||||
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "a", "b", "index.md"), pageContentNoSlug)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "a", "b", "ab1.md"), pageContentNoSlug)
|
||||
|
||||
// Mostly plain static assets in a folder with a page in a sub folder thrown in.
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pic1.png"), "content")
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pic2.png"), "content")
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pages", "mypage.md"), pageContent)
|
||||
|
||||
// Bundle
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "b", "index.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "b", "1.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "b", "2.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "b", "custom-mime.bep"), "bepsays")
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "b", "c", "logo.png"), "content")
|
||||
|
||||
return cfg, fs
|
||||
}
|
||||
|
||||
func newTestBundleSourcesMultilingual(t *testing.T) (*viper.Viper, *hugofs.Fs) {
|
||||
cfg, fs := newTestCfg()
|
||||
|
||||
workDir := "/work"
|
||||
cfg.Set("workingDir", workDir)
|
||||
cfg.Set("contentDir", "base")
|
||||
cfg.Set("baseURL", "https://example.com")
|
||||
cfg.Set("defaultContentLanguage", "en")
|
||||
|
||||
langConfig := map[string]interface{}{
|
||||
"en": map[string]interface{}{
|
||||
"weight": 1,
|
||||
"languageName": "English",
|
||||
},
|
||||
"nn": map[string]interface{}{
|
||||
"weight": 2,
|
||||
"languageName": "Nynorsk",
|
||||
},
|
||||
}
|
||||
|
||||
cfg.Set("languages", langConfig)
|
||||
|
||||
pageContent := `---
|
||||
slug: pageslug
|
||||
date: 2017-10-09
|
||||
---
|
||||
|
||||
TheContent.
|
||||
`
|
||||
|
||||
layout := `{{ .Title }}|{{ .Content }}|Lang: {{ .Site.Language.Lang }}`
|
||||
|
||||
writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), layout)
|
||||
writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), layout)
|
||||
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mypage.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "1s", "mylogo.png"), "content")
|
||||
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_index.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_index.nn.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bb", "en.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_1.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bb", "_1.nn.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bb", "a.png"), "content")
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b.png"), "content")
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b.nn.png"), "content")
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bb", "c.nn.png"), "content")
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bb", "b", "d.nn.png"), "content")
|
||||
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bc", "_index.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bc", "page.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bc", "logo-bc.png"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bc", "page.nn.md"), pageContent)
|
||||
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bd", "index.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bd", "page.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "bd", "page.nn.md"), pageContent)
|
||||
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "be", "_index.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "be", "page.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "be", "page.nn.md"), pageContent)
|
||||
|
||||
// Bundle leaf, multilingual
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "lb", "index.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "lb", "index.nn.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "lb", "1.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "lb", "2.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "lb", "2.nn.md"), pageContent)
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "logo.png"), "content")
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "logo.nn.png"), "content")
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "one.png"), "content")
|
||||
writeSource(t, fs, filepath.Join(workDir, "base", "lb", "c", "d", "deep.png"), "content")
|
||||
|
||||
return cfg, fs
|
||||
}
|
||||
|
||||
func newTestBundleSymbolicSources(t *testing.T) (*viper.Viper, *hugofs.Fs, string) {
|
||||
assert := require.New(t)
|
||||
// We need to use the OS fs for this.
|
||||
cfg := viper.New()
|
||||
fs := hugofs.NewFrom(hugofs.Os, cfg)
|
||||
fs.Destination = &afero.MemMapFs{}
|
||||
loadDefaultSettingsFor(cfg)
|
||||
|
||||
workDir, err := ioutil.TempDir("", "hugosym")
|
||||
|
||||
if runtime.GOOS == "darwin" && !strings.HasPrefix(workDir, "/private") {
|
||||
// To get the entry folder in line with the rest. This its a little bit
|
||||
// mysterious, but so be it.
|
||||
workDir = "/private" + workDir
|
||||
}
|
||||
|
||||
contentDir := "base"
|
||||
cfg.Set("workingDir", workDir)
|
||||
cfg.Set("contentDir", contentDir)
|
||||
cfg.Set("baseURL", "https://example.com")
|
||||
|
||||
layout := `{{ .Title }}|{{ .Content }}`
|
||||
pageContent := `---
|
||||
slug: %s
|
||||
date: 2017-10-09
|
||||
---
|
||||
|
||||
TheContent.
|
||||
`
|
||||
|
||||
fs.Source.MkdirAll(filepath.Join(workDir, "layouts", "_default"), 0777)
|
||||
fs.Source.MkdirAll(filepath.Join(workDir, contentDir), 0777)
|
||||
fs.Source.MkdirAll(filepath.Join(workDir, contentDir, "a"), 0777)
|
||||
for i := 1; i <= 3; i++ {
|
||||
fs.Source.MkdirAll(filepath.Join(workDir, fmt.Sprintf("symcontent%d", i)), 0777)
|
||||
|
||||
}
|
||||
fs.Source.MkdirAll(filepath.Join(workDir, "symcontent2", "a1"), 0777)
|
||||
|
||||
writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), layout)
|
||||
writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), layout)
|
||||
|
||||
writeSource(t, fs, filepath.Join(workDir, contentDir, "a", "regular.md"), fmt.Sprintf(pageContent, "a1"))
|
||||
|
||||
// Regular files inside symlinked folder.
|
||||
writeSource(t, fs, filepath.Join(workDir, "symcontent1", "s1.md"), fmt.Sprintf(pageContent, "s1"))
|
||||
writeSource(t, fs, filepath.Join(workDir, "symcontent1", "s2.md"), fmt.Sprintf(pageContent, "s2"))
|
||||
|
||||
// A bundle
|
||||
writeSource(t, fs, filepath.Join(workDir, "symcontent2", "a1", "index.md"), fmt.Sprintf(pageContent, ""))
|
||||
writeSource(t, fs, filepath.Join(workDir, "symcontent2", "a1", "page.md"), fmt.Sprintf(pageContent, "page"))
|
||||
writeSource(t, fs, filepath.Join(workDir, "symcontent2", "a1", "logo.png"), "image")
|
||||
|
||||
// Assets
|
||||
writeSource(t, fs, filepath.Join(workDir, "symcontent3", "s1.png"), "image")
|
||||
writeSource(t, fs, filepath.Join(workDir, "symcontent3", "s2.png"), "image")
|
||||
|
||||
// Symlinked sections inside content.
|
||||
os.Chdir(filepath.Join(workDir, contentDir))
|
||||
for i := 1; i <= 3; i++ {
|
||||
assert.NoError(os.Symlink(filepath.FromSlash(fmt.Sprintf(("../symcontent%d"), i)), fmt.Sprintf("symbolic%d", i)))
|
||||
}
|
||||
|
||||
os.Chdir(filepath.Join(workDir, contentDir, "a"))
|
||||
|
||||
// Create a symlink to one single content file
|
||||
assert.NoError(os.Symlink(filepath.FromSlash("../../symcontent2/a1/page.md"), "page_s.md"))
|
||||
|
||||
os.Chdir(filepath.FromSlash("../../symcontent3"))
|
||||
|
||||
// Create a circular symlink. Will print some warnings.
|
||||
assert.NoError(os.Symlink(filepath.Join("..", contentDir), filepath.FromSlash("circus")))
|
||||
|
||||
os.Chdir(workDir)
|
||||
assert.NoError(err)
|
||||
|
||||
return cfg, fs, workDir
|
||||
}
|
|
@ -151,14 +151,18 @@ func (c *PageCollections) removePageByPathPrefix(path string) {
|
|||
|
||||
func (c *PageCollections) removePageByPath(path string) {
|
||||
if i := c.rawAllPages.findPagePosByFilePath(path); i >= 0 {
|
||||
c.clearResourceCacheForPage(c.rawAllPages[i])
|
||||
c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (c *PageCollections) removePage(page *Page) {
|
||||
if i := c.rawAllPages.findPagePos(page); i >= 0 {
|
||||
c.clearResourceCacheForPage(c.rawAllPages[i])
|
||||
c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (c *PageCollections) findPagesByShortcode(shortcode string) Pages {
|
||||
|
@ -179,3 +183,12 @@ func (c *PageCollections) replacePage(page *Page) {
|
|||
c.removePage(page)
|
||||
c.addPage(page)
|
||||
}
|
||||
|
||||
func (c *PageCollections) clearResourceCacheForPage(page *Page) {
|
||||
if len(page.Resources) > 0 {
|
||||
first := page.Resources[0]
|
||||
dir := path.Dir(first.RelPermalink())
|
||||
// This is done to keep the memory usage in check when doing live reloads.
|
||||
page.s.resourceSpec.DeleteCacheByPrefix(dir)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -133,7 +133,7 @@ func TestGetPage(t *testing.T) {
|
|||
errorMsg := fmt.Sprintf("Test %d", i)
|
||||
page := s.getPage(test.kind, test.path...)
|
||||
assert.NotNil(page, errorMsg)
|
||||
assert.Equal(test.kind, page.Kind)
|
||||
assert.Equal(test.kind, page.Kind, errorMsg)
|
||||
assert.Equal(test.expectedTitle, page.Title)
|
||||
}
|
||||
|
||||
|
|
|
@ -16,9 +16,12 @@ package hugolib
|
|||
import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"os"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/gohugoio/hugo/resource"
|
||||
|
||||
"github.com/gohugoio/hugo/media"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
|
@ -34,6 +37,10 @@ type PageOutput struct {
|
|||
paginator *Pager
|
||||
paginatorInit sync.Once
|
||||
|
||||
// Page output specific resources
|
||||
resources resource.Resources
|
||||
resourcesInit sync.Once
|
||||
|
||||
// Keep this to create URL/path variations, i.e. paginators.
|
||||
targetPathDescriptor targetPathDescriptor
|
||||
|
||||
|
@ -51,10 +58,7 @@ func (p *PageOutput) targetPath(addends ...string) (string, error) {
|
|||
func newPageOutput(p *Page, createCopy bool, f output.Format) (*PageOutput, error) {
|
||||
// TODO(bep) This is only needed for tests and we should get rid of it.
|
||||
if p.targetPathDescriptorPrototype == nil {
|
||||
if err := p.initTargetPathDescriptor(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := p.initURLs(); err != nil {
|
||||
if err := p.initPaths(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
@ -241,6 +245,68 @@ func (p *PageOutput) AlternativeOutputFormats() (OutputFormats, error) {
|
|||
return o, nil
|
||||
}
|
||||
|
||||
// deleteResource removes the resource from this PageOutput and the Page. They will
|
||||
// always be of the same length, but may contain different elements.
|
||||
func (p *PageOutput) deleteResource(i int) {
|
||||
p.resources = append(p.resources[:i], p.resources[i+1:]...)
|
||||
p.Page.Resources = append(p.Page.Resources[:i], p.Page.Resources[i+1:]...)
|
||||
|
||||
}
|
||||
|
||||
func (p *PageOutput) Resources() resource.Resources {
|
||||
p.resourcesInit.Do(func() {
|
||||
// If the current out shares the same path as the main page output, we reuse
|
||||
// the resource set. For the "amp" use case, we need to clone them with new
|
||||
// base folder.
|
||||
ff := p.outputFormats[0]
|
||||
if p.outputFormat.Path == ff.Path {
|
||||
p.resources = p.Page.Resources
|
||||
return
|
||||
}
|
||||
|
||||
// Clone it with new base.
|
||||
resources := make(resource.Resources, len(p.Page.Resources))
|
||||
|
||||
for i, r := range p.Page.Resources {
|
||||
if c, ok := r.(resource.Cloner); ok {
|
||||
// Clone the same resource with a new target.
|
||||
resources[i] = c.WithNewBase(p.outputFormat.Path)
|
||||
} else {
|
||||
resources[i] = r
|
||||
}
|
||||
}
|
||||
|
||||
p.resources = resources
|
||||
})
|
||||
|
||||
return p.resources
|
||||
}
|
||||
|
||||
func (p *PageOutput) renderResources() error {
|
||||
|
||||
for i, r := range p.Resources() {
|
||||
src, ok := r.(resource.Source)
|
||||
if !ok {
|
||||
// Pages gets rendered with the owning page.
|
||||
continue
|
||||
}
|
||||
|
||||
if err := src.Publish(); err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
// The resource has been deleted from the file system.
|
||||
// This should be extremely rare, but can happen on live reload in server
|
||||
// mode when the same resource is member of different page bundles.
|
||||
p.deleteResource(i)
|
||||
} else {
|
||||
p.s.Log.ERROR.Printf("Failed to publish %q for page %q: %s", src.AbsSourceFilename(), p.pathOrTitle(), err)
|
||||
}
|
||||
} else {
|
||||
p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Files)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// AlternativeOutputFormats is only available on the top level rendering
|
||||
// entry point, and not inside range loops on the Page collections.
|
||||
// This method is just here to inform users of that restriction.
|
||||
|
|
|
@ -82,7 +82,6 @@ func (p *Page) createTargetPathDescriptor(t output.Format) (targetPathDescriptor
|
|||
}
|
||||
|
||||
func (p *Page) initTargetPathDescriptor() error {
|
||||
|
||||
d := &targetPathDescriptor{
|
||||
PathSpec: p.s.PathSpec,
|
||||
Kind: p.Kind,
|
||||
|
@ -126,6 +125,35 @@ func (p *Page) initTargetPathDescriptor() error {
|
|||
|
||||
}
|
||||
|
||||
func (p *Page) initURLs() error {
|
||||
if len(p.outputFormats) == 0 {
|
||||
p.outputFormats = p.s.outputFormats[p.Kind]
|
||||
}
|
||||
rel := p.createRelativePermalink()
|
||||
|
||||
var err error
|
||||
f := p.outputFormats[0]
|
||||
p.permalink, err = p.s.permalinkForOutputFormat(rel, f)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rel = p.s.PathSpec.PrependBasePath(rel)
|
||||
p.relPermalink = rel
|
||||
p.relPermalinkBase = strings.TrimSuffix(rel, f.MediaType.FullSuffix())
|
||||
p.layoutDescriptor = p.createLayoutDescriptor()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Page) initPaths() error {
|
||||
if err := p.initTargetPathDescriptor(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := p.initURLs(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// createTargetPath creates the target filename for this Page for the given
|
||||
// output.Format. Some additional URL parts can also be provided, the typical
|
||||
// use case being pagination.
|
||||
|
@ -156,12 +184,7 @@ func createTargetPath(d targetPathDescriptor) string {
|
|||
|
||||
isUgly := d.UglyURLs && !d.Type.NoUgly
|
||||
|
||||
// If the page output format's base name is the same as the page base name,
|
||||
// we treat it as an ugly path, i.e.
|
||||
// my-blog-post-1/index.md => my-blog-post-1/index.html
|
||||
// (given the default values for that content file, i.e. no slug set etc.).
|
||||
// This introduces the behaviour from < Hugo 0.20, see issue #3396.
|
||||
if d.BaseName != "" && d.BaseName == d.Type.BaseName {
|
||||
if d.ExpandedPermalink == "" && d.BaseName != "" && d.BaseName == d.Type.BaseName {
|
||||
isUgly = true
|
||||
}
|
||||
|
||||
|
@ -247,6 +270,9 @@ func createTargetPath(d targetPathDescriptor) string {
|
|||
func (p *Page) createRelativePermalink() string {
|
||||
|
||||
if len(p.outputFormats) == 0 {
|
||||
if p.Kind == kindUnknown {
|
||||
panic(fmt.Sprintf("Page %q has unknown kind", p.Title))
|
||||
}
|
||||
panic(fmt.Sprintf("Page %q missing output format(s)", p.Title))
|
||||
}
|
||||
|
||||
|
@ -264,6 +290,7 @@ func (p *Page) createRelativePermalinkForOutputFormat(f output.Format) string {
|
|||
p.s.Log.ERROR.Printf("Failed to create permalink for page %q: %s", p.FullFilePath(), err)
|
||||
return ""
|
||||
}
|
||||
|
||||
// For /index.json etc. we must use the full path.
|
||||
if strings.HasSuffix(f.BaseFilename(), "html") {
|
||||
tp = strings.TrimSuffix(tp, f.BaseFilename())
|
||||
|
|
|
@ -79,7 +79,6 @@ func TestPageTargetPath(t *testing.T) {
|
|||
Type: output.HTMLFormat}, "/a/b/mypage/index.html"},
|
||||
|
||||
{
|
||||
// Issue #3396
|
||||
"HTML page with index as base", targetPathDescriptor{
|
||||
Kind: KindPage,
|
||||
Dir: "/a/b",
|
||||
|
|
23
hugolib/page_resource.go
Normal file
23
hugolib/page_resource.go
Normal file
|
@ -0,0 +1,23 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"github.com/gohugoio/hugo/resource"
|
||||
)
|
||||
|
||||
var (
|
||||
_ resource.Resource = (*Page)(nil)
|
||||
_ resource.Resource = (*PageOutput)(nil)
|
||||
)
|
|
@ -1458,8 +1458,7 @@ func TestTranslationKey(t *testing.T) {
|
|||
assert.Equal("page/k1", s.RegularPages[0].TranslationKey())
|
||||
p2 := s.RegularPages[1]
|
||||
|
||||
// This is a single language setup
|
||||
assert.Equal("page/sect/simple.en", p2.TranslationKey())
|
||||
assert.Equal("page/sect/simple", p2.TranslationKey())
|
||||
|
||||
}
|
||||
|
||||
|
@ -1582,6 +1581,7 @@ tags:
|
|||
*some blog content*`))
|
||||
|
||||
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
|
||||
|
||||
require.Len(t, s.RegularPages, 4)
|
||||
|
||||
pathFunc := func(s string) string {
|
||||
|
|
|
@ -300,7 +300,6 @@ func (p *PageOutput) Paginator(options ...interface{}) (*Pager, error) {
|
|||
p.paginator = pagers[0]
|
||||
p.paginator.source = "paginator"
|
||||
p.paginator.options = options
|
||||
p.Site.addToPaginationPageCount(uint64(p.paginator.TotalPages()))
|
||||
}
|
||||
|
||||
})
|
||||
|
@ -353,7 +352,6 @@ func (p *PageOutput) Paginate(seq interface{}, options ...interface{}) (*Pager,
|
|||
p.paginator = pagers[0]
|
||||
p.paginator.source = seq
|
||||
p.paginator.options = options
|
||||
p.Site.addToPaginationPageCount(uint64(p.paginator.TotalPages()))
|
||||
}
|
||||
|
||||
})
|
||||
|
@ -417,6 +415,10 @@ func paginatePages(td targetPathDescriptor, seq interface{}, pagerSize int) (pag
|
|||
}
|
||||
|
||||
func toPages(seq interface{}) (Pages, error) {
|
||||
if seq == nil {
|
||||
return Pages{}, nil
|
||||
}
|
||||
|
||||
switch seq.(type) {
|
||||
case Pages:
|
||||
return seq.(Pages), nil
|
||||
|
|
|
@ -17,6 +17,7 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
@ -156,9 +157,13 @@ func pageToPermalinkTitle(p *Page, _ string) (string, error) {
|
|||
|
||||
// pageToPermalinkFilename returns the URL-safe form of the filename
|
||||
func pageToPermalinkFilename(p *Page, _ string) (string, error) {
|
||||
//var extension = p.Source.Ext
|
||||
//var name = p.Source.Path()[0 : len(p.Source.Path())-len(extension)]
|
||||
return p.s.PathSpec.URLize(p.Source.TranslationBaseName()), nil
|
||||
name := p.File.TranslationBaseName()
|
||||
if name == "index" {
|
||||
// Page bundles; the directory name will hopefully have a better name.
|
||||
_, name = filepath.Split(p.File.Dir())
|
||||
}
|
||||
|
||||
return p.s.PathSpec.URLize(name), nil
|
||||
}
|
||||
|
||||
// if the page has a slug, return the slug, else return the title
|
||||
|
|
84
hugolib/prune_resources.go
Normal file
84
hugolib/prune_resources.go
Normal file
|
@ -0,0 +1,84 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/afero"
|
||||
)
|
||||
|
||||
// GC requires a build first.
|
||||
func (h *HugoSites) GC() (int, error) {
|
||||
s := h.Sites[0]
|
||||
imageCacheDir := s.resourceSpec.AbsGenImagePath
|
||||
if len(imageCacheDir) < 10 {
|
||||
panic("invalid image cache")
|
||||
}
|
||||
|
||||
isInUse := func(filename string) bool {
|
||||
key := strings.TrimPrefix(filename, imageCacheDir)
|
||||
for _, site := range h.Sites {
|
||||
if site.resourceSpec.IsInCache(key) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
counter := 0
|
||||
|
||||
err := afero.Walk(s.Fs.Source, imageCacheDir, func(path string, info os.FileInfo, err error) error {
|
||||
if info == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(path, imageCacheDir) {
|
||||
return fmt.Errorf("Invalid state, walk outside of resource dir: %q", path)
|
||||
}
|
||||
|
||||
if info.IsDir() {
|
||||
f, err := s.Fs.Source.Open(path)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
defer f.Close()
|
||||
_, err = f.Readdirnames(1)
|
||||
if err == io.EOF {
|
||||
// Empty dir.
|
||||
s.Fs.Source.Remove(path)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
inUse := isInUse(path)
|
||||
if !inUse {
|
||||
err := s.Fs.Source.Remove(path)
|
||||
if err != nil && !os.IsNotExist(err) {
|
||||
s.Log.ERROR.Printf("Failed to remove %q: %s", path, err)
|
||||
} else {
|
||||
counter++
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
return counter, err
|
||||
|
||||
}
|
|
@ -38,7 +38,7 @@ func TestRSSOutput(t *testing.T) {
|
|||
cfg.Set("rssLimit", rssLimit)
|
||||
|
||||
for _, src := range weightedSources {
|
||||
writeSource(t, fs, filepath.Join("content", "sect", src.Name), string(src.Content))
|
||||
writeSource(t, fs, filepath.Join("content", "sect", src[0]), src[1])
|
||||
}
|
||||
|
||||
buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
|
||||
|
|
|
@ -32,7 +32,6 @@ import (
|
|||
|
||||
"github.com/gohugoio/hugo/deps"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
"github.com/gohugoio/hugo/tpl"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
@ -530,10 +529,10 @@ tags:
|
|||
"<p><strong>Tags:</strong> 2</p>\n"},
|
||||
}
|
||||
|
||||
sources := make([]source.ByteSource, len(tests))
|
||||
sources := make([][2]string, len(tests))
|
||||
|
||||
for i, test := range tests {
|
||||
sources[i] = source.ByteSource{Name: filepath.FromSlash(test.contentPath), Content: []byte(test.content)}
|
||||
sources[i] = [2]string{filepath.FromSlash(test.contentPath), test.content}
|
||||
}
|
||||
|
||||
addTemplates := func(templ tpl.TemplateHandler) error {
|
||||
|
|
763
hugolib/site.go
763
hugolib/site.go
File diff suppressed because it is too large
Load diff
|
@ -29,11 +29,13 @@ func TestEncodePage(t *testing.T) {
|
|||
t.Parallel()
|
||||
cfg, fs := newTestCfg()
|
||||
|
||||
// borrowed from menu_test.go
|
||||
for _, src := range menuPageSources {
|
||||
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
|
||||
writeSource(t, fs, filepath.Join("content", "page.md"), `---
|
||||
title: Simple
|
||||
---
|
||||
Summary text
|
||||
|
||||
}
|
||||
<!--more-->
|
||||
`)
|
||||
|
||||
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
|
||||
|
||||
|
|
|
@ -98,6 +98,26 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa
|
|||
continue
|
||||
}
|
||||
|
||||
// We only need to re-publish the resources if the output format is different
|
||||
// from all of the previous (e.g. the "amp" use case).
|
||||
shouldRender := i == 0
|
||||
if i > 0 {
|
||||
for j := i; j >= 0; j-- {
|
||||
if outFormat.Path != page.outputFormats[j].Path {
|
||||
shouldRender = true
|
||||
} else {
|
||||
shouldRender = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if shouldRender {
|
||||
if err := pageOutput.renderResources(); err != nil {
|
||||
s.Log.ERROR.Printf("Failed to render resources for page %q: %s", page, err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
var layouts []string
|
||||
|
||||
if page.selfLayout != "" {
|
||||
|
@ -125,7 +145,7 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa
|
|||
|
||||
s.Log.DEBUG.Printf("Render %s to %q with layouts %q", pageOutput.Kind, targetPath, layouts)
|
||||
|
||||
if err := s.renderAndWritePage("page "+pageOutput.FullFilePath(), targetPath, pageOutput, layouts...); err != nil {
|
||||
if err := s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "page "+pageOutput.FullFilePath(), targetPath, pageOutput, layouts...); err != nil {
|
||||
results <- err
|
||||
}
|
||||
|
||||
|
@ -191,6 +211,7 @@ func (s *Site) renderPaginator(p *PageOutput) error {
|
|||
}
|
||||
|
||||
if err := s.renderAndWritePage(
|
||||
&s.PathSpec.ProcessingStats.PaginatorPages,
|
||||
pagerNode.Title,
|
||||
targetPath, pagerNode, layouts...); err != nil {
|
||||
return err
|
||||
|
@ -232,7 +253,7 @@ func (s *Site) renderRSS(p *PageOutput) error {
|
|||
return err
|
||||
}
|
||||
|
||||
return s.renderAndWriteXML(p.Title,
|
||||
return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Pages, p.Title,
|
||||
targetPath, p, layouts...)
|
||||
}
|
||||
|
||||
|
@ -271,7 +292,7 @@ func (s *Site) render404() error {
|
|||
s.Log.ERROR.Printf("Failed to create target path for page %q: %s", p, err)
|
||||
}
|
||||
|
||||
return s.renderAndWritePage("404 page", targetPath, pageOutput, s.appendThemeTemplates(nfLayouts)...)
|
||||
return s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "404 page", targetPath, pageOutput, s.appendThemeTemplates(nfLayouts)...)
|
||||
}
|
||||
|
||||
func (s *Site) renderSitemap() error {
|
||||
|
@ -325,7 +346,7 @@ func (s *Site) renderSitemap() error {
|
|||
smLayouts := []string{"sitemap.xml", "_default/sitemap.xml", "_internal/_default/sitemap.xml"}
|
||||
addLanguagePrefix := n.Site.IsMultiLingual()
|
||||
|
||||
return s.renderAndWriteXML("sitemap",
|
||||
return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Sitemaps, "sitemap",
|
||||
n.addLangPathPrefixIfFlagSet(page.Sitemap.Filename, addLanguagePrefix), n, s.appendThemeTemplates(smLayouts)...)
|
||||
}
|
||||
|
||||
|
@ -357,7 +378,7 @@ func (s *Site) renderRobotsTXT() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
return s.publish("robots.txt", outBuffer)
|
||||
return s.publish(&s.PathSpec.ProcessingStats.Pages, "robots.txt", outBuffer)
|
||||
}
|
||||
|
||||
// renderAliases renders shell pages that simply have a redirect in the header.
|
||||
|
|
|
@ -114,6 +114,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
|
|||
cfg.Set("paginate", 2)
|
||||
|
||||
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
|
||||
|
||||
require.Len(t, s.RegularPages, 21)
|
||||
|
||||
tests := []struct {
|
||||
|
@ -264,6 +265,8 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
|
|||
}},
|
||||
}
|
||||
|
||||
home := s.getPage(KindHome)
|
||||
|
||||
for _, test := range tests {
|
||||
sections := strings.Split(test.sections, ",")
|
||||
p := s.getPage(KindSection, sections...)
|
||||
|
@ -276,8 +279,6 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
|
|||
test.verify(p)
|
||||
}
|
||||
|
||||
home := s.getPage(KindHome)
|
||||
|
||||
assert.NotNil(home)
|
||||
|
||||
assert.Len(home.Sections(), 9)
|
||||
|
|
101
hugolib/site_stats_test.go
Normal file
101
hugolib/site_stats_test.go
Normal file
|
@ -0,0 +1,101 @@
|
|||
// Copyright 2017 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugolib
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/spf13/afero"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestSiteStats(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
assert := require.New(t)
|
||||
|
||||
siteConfig := `
|
||||
baseURL = "http://example.com/blog"
|
||||
|
||||
paginate = 1
|
||||
defaultContentLanguage = "nn"
|
||||
|
||||
[languages]
|
||||
[languages.nn]
|
||||
languageName = "Nynorsk"
|
||||
weight = 1
|
||||
title = "Hugo på norsk"
|
||||
|
||||
[languages.en]
|
||||
languageName = "English"
|
||||
weight = 2
|
||||
title = "Hugo in English"
|
||||
|
||||
`
|
||||
|
||||
pageTemplate := `---
|
||||
title: "T%d"
|
||||
tags:
|
||||
%s
|
||||
categories:
|
||||
%s
|
||||
aliases: [Ali%d]
|
||||
---
|
||||
# Doc
|
||||
`
|
||||
|
||||
th, h := newTestSitesFromConfig(t, afero.NewMemMapFs(), siteConfig,
|
||||
"layouts/_default/single.html", "Single|{{ .Title }}|{{ .Content }}",
|
||||
"layouts/_default/list.html", `List|{{ .Title }}|Pages: {{ .Paginator.TotalPages }}|{{ .Content }}`,
|
||||
"layouts/_default/terms.html", "Terms List|{{ .Title }}|{{ .Content }}",
|
||||
)
|
||||
require.Len(t, h.Sites, 2)
|
||||
|
||||
fs := th.Fs
|
||||
|
||||
for i := 0; i < 2; i++ {
|
||||
for j := 0; j < 2; j++ {
|
||||
pageID := i + j + 1
|
||||
writeSource(t, fs, fmt.Sprintf("content/sect/p%d.md", pageID),
|
||||
fmt.Sprintf(pageTemplate, pageID, fmt.Sprintf("- tag%d", j), fmt.Sprintf("- category%d", j), pageID))
|
||||
}
|
||||
}
|
||||
|
||||
for i := 0; i < 5; i++ {
|
||||
writeSource(t, fs, fmt.Sprintf("content/assets/image%d.png", i+1), "image")
|
||||
}
|
||||
|
||||
err := h.Build(BuildCfg{})
|
||||
|
||||
assert.NoError(err)
|
||||
|
||||
stats := []*helpers.ProcessingStats{
|
||||
h.Sites[0].PathSpec.ProcessingStats,
|
||||
h.Sites[1].PathSpec.ProcessingStats}
|
||||
|
||||
stats[0].Table(ioutil.Discard)
|
||||
stats[1].Table(ioutil.Discard)
|
||||
|
||||
var buff bytes.Buffer
|
||||
|
||||
helpers.ProcessingStatsTable(&buff, stats...)
|
||||
|
||||
assert.Contains(buff.String(), "Pages | 19 | 6")
|
||||
|
||||
}
|
|
@ -23,7 +23,6 @@ import (
|
|||
jww "github.com/spf13/jwalterweatherman"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
|
||||
"github.com/gohugoio/hugo/deps"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
|
@ -74,11 +73,11 @@ func TestRenderWithInvalidTemplate(t *testing.T) {
|
|||
|
||||
func TestDraftAndFutureRender(t *testing.T) {
|
||||
t.Parallel()
|
||||
sources := []source.ByteSource{
|
||||
{Name: filepath.FromSlash("sect/doc1.md"), Content: []byte("---\ntitle: doc1\ndraft: true\npublishdate: \"2414-05-29\"\n---\n# doc1\n*some content*")},
|
||||
{Name: filepath.FromSlash("sect/doc2.md"), Content: []byte("---\ntitle: doc2\ndraft: true\npublishdate: \"2012-05-29\"\n---\n# doc2\n*some content*")},
|
||||
{Name: filepath.FromSlash("sect/doc3.md"), Content: []byte("---\ntitle: doc3\ndraft: false\npublishdate: \"2414-05-29\"\n---\n# doc3\n*some content*")},
|
||||
{Name: filepath.FromSlash("sect/doc4.md"), Content: []byte("---\ntitle: doc4\ndraft: false\npublishdate: \"2012-05-29\"\n---\n# doc4\n*some content*")},
|
||||
sources := [][2]string{
|
||||
{filepath.FromSlash("sect/doc1.md"), "---\ntitle: doc1\ndraft: true\npublishdate: \"2414-05-29\"\n---\n# doc1\n*some content*"},
|
||||
{filepath.FromSlash("sect/doc2.md"), "---\ntitle: doc2\ndraft: true\npublishdate: \"2012-05-29\"\n---\n# doc2\n*some content*"},
|
||||
{filepath.FromSlash("sect/doc3.md"), "---\ntitle: doc3\ndraft: false\npublishdate: \"2414-05-29\"\n---\n# doc3\n*some content*"},
|
||||
{filepath.FromSlash("sect/doc4.md"), "---\ntitle: doc4\ndraft: false\npublishdate: \"2012-05-29\"\n---\n# doc4\n*some content*"},
|
||||
}
|
||||
|
||||
siteSetup := func(t *testing.T, configKeyValues ...interface{}) *Site {
|
||||
|
@ -91,7 +90,7 @@ func TestDraftAndFutureRender(t *testing.T) {
|
|||
}
|
||||
|
||||
for _, src := range sources {
|
||||
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
|
||||
writeSource(t, fs, filepath.Join("content", src[0]), src[1])
|
||||
|
||||
}
|
||||
|
||||
|
@ -132,9 +131,9 @@ func TestDraftAndFutureRender(t *testing.T) {
|
|||
|
||||
func TestFutureExpirationRender(t *testing.T) {
|
||||
t.Parallel()
|
||||
sources := []source.ByteSource{
|
||||
{Name: filepath.FromSlash("sect/doc3.md"), Content: []byte("---\ntitle: doc1\nexpirydate: \"2400-05-29\"\n---\n# doc1\n*some content*")},
|
||||
{Name: filepath.FromSlash("sect/doc4.md"), Content: []byte("---\ntitle: doc2\nexpirydate: \"2000-05-29\"\n---\n# doc2\n*some content*")},
|
||||
sources := [][2]string{
|
||||
{filepath.FromSlash("sect/doc3.md"), "---\ntitle: doc1\nexpirydate: \"2400-05-29\"\n---\n# doc1\n*some content*"},
|
||||
{filepath.FromSlash("sect/doc4.md"), "---\ntitle: doc2\nexpirydate: \"2000-05-29\"\n---\n# doc2\n*some content*"},
|
||||
}
|
||||
|
||||
siteSetup := func(t *testing.T) *Site {
|
||||
|
@ -142,7 +141,7 @@ func TestFutureExpirationRender(t *testing.T) {
|
|||
cfg.Set("baseURL", "http://auth/bub")
|
||||
|
||||
for _, src := range sources {
|
||||
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
|
||||
writeSource(t, fs, filepath.Join("content", src[0]), src[1])
|
||||
|
||||
}
|
||||
|
||||
|
@ -234,29 +233,29 @@ func doTestCrossrefs(t *testing.T, relative, uglyURLs bool) {
|
|||
|
||||
doc3Slashed := filepath.FromSlash("/sect/doc3.md")
|
||||
|
||||
sources := []source.ByteSource{
|
||||
sources := [][2]string{
|
||||
{
|
||||
Name: filepath.FromSlash("sect/doc1.md"),
|
||||
Content: []byte(fmt.Sprintf(`Ref 2: {{< %s "sect/doc2.md" >}}`, refShortcode)),
|
||||
filepath.FromSlash("sect/doc1.md"),
|
||||
fmt.Sprintf(`Ref 2: {{< %s "sect/doc2.md" >}}`, refShortcode),
|
||||
},
|
||||
// Issue #1148: Make sure that no P-tags is added around shortcodes.
|
||||
{
|
||||
Name: filepath.FromSlash("sect/doc2.md"),
|
||||
Content: []byte(fmt.Sprintf(`**Ref 1:**
|
||||
filepath.FromSlash("sect/doc2.md"),
|
||||
fmt.Sprintf(`**Ref 1:**
|
||||
|
||||
{{< %s "sect/doc1.md" >}}
|
||||
|
||||
THE END.`, refShortcode)),
|
||||
THE END.`, refShortcode),
|
||||
},
|
||||
// Issue #1753: Should not add a trailing newline after shortcode.
|
||||
{
|
||||
Name: filepath.FromSlash("sect/doc3.md"),
|
||||
Content: []byte(fmt.Sprintf(`**Ref 1:**{{< %s "sect/doc3.md" >}}.`, refShortcode)),
|
||||
filepath.FromSlash("sect/doc3.md"),
|
||||
fmt.Sprintf(`**Ref 1:**{{< %s "sect/doc3.md" >}}.`, refShortcode),
|
||||
},
|
||||
// Issue #3703
|
||||
{
|
||||
Name: filepath.FromSlash("sect/doc4.md"),
|
||||
Content: []byte(fmt.Sprintf(`**Ref 1:**{{< %s "%s" >}}.`, refShortcode, doc3Slashed)),
|
||||
filepath.FromSlash("sect/doc4.md"),
|
||||
fmt.Sprintf(`**Ref 1:**{{< %s "%s" >}}.`, refShortcode, doc3Slashed),
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -267,7 +266,7 @@ THE END.`, refShortcode)),
|
|||
cfg.Set("verbose", true)
|
||||
|
||||
for _, src := range sources {
|
||||
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
|
||||
writeSource(t, fs, filepath.Join("content", src[0]), src[1])
|
||||
}
|
||||
|
||||
s := buildSingleSite(
|
||||
|
@ -323,13 +322,13 @@ func doTestShouldAlwaysHaveUglyURLs(t *testing.T, uglyURLs bool) {
|
|||
|
||||
cfg.Set("uglyURLs", uglyURLs)
|
||||
|
||||
sources := []source.ByteSource{
|
||||
{Name: filepath.FromSlash("sect/doc1.md"), Content: []byte("---\nmarkup: markdown\n---\n# title\nsome *content*")},
|
||||
{Name: filepath.FromSlash("sect/doc2.md"), Content: []byte("---\nurl: /ugly.html\nmarkup: markdown\n---\n# title\ndoc2 *content*")},
|
||||
sources := [][2]string{
|
||||
{filepath.FromSlash("sect/doc1.md"), "---\nmarkup: markdown\n---\n# title\nsome *content*"},
|
||||
{filepath.FromSlash("sect/doc2.md"), "---\nurl: /ugly.html\nmarkup: markdown\n---\n# title\ndoc2 *content*"},
|
||||
}
|
||||
|
||||
for _, src := range sources {
|
||||
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
|
||||
writeSource(t, fs, filepath.Join("content", src[0]), src[1])
|
||||
}
|
||||
|
||||
writeSource(t, fs, filepath.Join("layouts", "index.html"), "Home Sweet {{ if.IsHome }}Home{{ end }}.")
|
||||
|
@ -402,7 +401,9 @@ func TestSectionNaming(t *testing.T) {
|
|||
for _, canonify := range []bool{true, false} {
|
||||
for _, uglify := range []bool{true, false} {
|
||||
for _, pluralize := range []bool{true, false} {
|
||||
doTestSectionNaming(t, canonify, uglify, pluralize)
|
||||
t.Run(fmt.Sprintf("canonify=%t,uglify=%t,pluralize=%t", canonify, uglify, pluralize), func(t *testing.T) {
|
||||
doTestSectionNaming(t, canonify, uglify, pluralize)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -418,12 +419,12 @@ func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) {
|
|||
expectedPathSuffix = "/index.html"
|
||||
}
|
||||
|
||||
sources := []source.ByteSource{
|
||||
{Name: filepath.FromSlash("sect/doc1.html"), Content: []byte("doc1")},
|
||||
sources := [][2]string{
|
||||
{filepath.FromSlash("sect/doc1.html"), "doc1"},
|
||||
// Add one more page to sect to make sure sect is picked in mainSections
|
||||
{Name: filepath.FromSlash("sect/sect.html"), Content: []byte("sect")},
|
||||
{Name: filepath.FromSlash("Fish and Chips/doc2.html"), Content: []byte("doc2")},
|
||||
{Name: filepath.FromSlash("ラーメン/doc3.html"), Content: []byte("doc3")},
|
||||
{filepath.FromSlash("sect/sect.html"), "sect"},
|
||||
{filepath.FromSlash("Fish and Chips/doc2.html"), "doc2"},
|
||||
{filepath.FromSlash("ラーメン/doc3.html"), "doc3"},
|
||||
}
|
||||
|
||||
cfg, fs := newTestCfg()
|
||||
|
@ -433,8 +434,8 @@ func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) {
|
|||
cfg.Set("pluralizeListTitles", pluralize)
|
||||
cfg.Set("canonifyURLs", canonify)
|
||||
|
||||
for _, source := range sources {
|
||||
writeSource(t, fs, filepath.Join("content", source.Name), string(source.Content))
|
||||
for _, src := range sources {
|
||||
writeSource(t, fs, filepath.Join("content", src[0]), src[1])
|
||||
}
|
||||
|
||||
writeSource(t, fs, filepath.Join("layouts", "_default/single.html"), "{{.Content}}")
|
||||
|
@ -472,17 +473,17 @@ func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) {
|
|||
}
|
||||
func TestSkipRender(t *testing.T) {
|
||||
t.Parallel()
|
||||
sources := []source.ByteSource{
|
||||
{Name: filepath.FromSlash("sect/doc1.html"), Content: []byte("---\nmarkup: markdown\n---\n# title\nsome *content*")},
|
||||
{Name: filepath.FromSlash("sect/doc2.html"), Content: []byte("<!doctype html><html><body>more content</body></html>")},
|
||||
{Name: filepath.FromSlash("sect/doc3.md"), Content: []byte("# doc3\n*some* content")},
|
||||
{Name: filepath.FromSlash("sect/doc4.md"), Content: []byte("---\ntitle: doc4\n---\n# doc4\n*some content*")},
|
||||
{Name: filepath.FromSlash("sect/doc5.html"), Content: []byte("<!doctype html><html>{{ template \"head\" }}<body>body5</body></html>")},
|
||||
{Name: filepath.FromSlash("sect/doc6.html"), Content: []byte("<!doctype html><html>{{ template \"head_abs\" }}<body>body5</body></html>")},
|
||||
{Name: filepath.FromSlash("doc7.html"), Content: []byte("<html><body>doc7 content</body></html>")},
|
||||
{Name: filepath.FromSlash("sect/doc8.html"), Content: []byte("---\nmarkup: md\n---\n# title\nsome *content*")},
|
||||
sources := [][2]string{
|
||||
{filepath.FromSlash("sect/doc1.html"), "---\nmarkup: markdown\n---\n# title\nsome *content*"},
|
||||
{filepath.FromSlash("sect/doc2.html"), "<!doctype html><html><body>more content</body></html>"},
|
||||
{filepath.FromSlash("sect/doc3.md"), "# doc3\n*some* content"},
|
||||
{filepath.FromSlash("sect/doc4.md"), "---\ntitle: doc4\n---\n# doc4\n*some content*"},
|
||||
{filepath.FromSlash("sect/doc5.html"), "<!doctype html><html>{{ template \"head\" }}<body>body5</body></html>"},
|
||||
{filepath.FromSlash("sect/doc6.html"), "<!doctype html><html>{{ template \"head_abs\" }}<body>body5</body></html>"},
|
||||
{filepath.FromSlash("doc7.html"), "<html><body>doc7 content</body></html>"},
|
||||
{filepath.FromSlash("sect/doc8.html"), "---\nmarkup: md\n---\n# title\nsome *content*"},
|
||||
// Issue #3021
|
||||
{Name: filepath.FromSlash("doc9.html"), Content: []byte("<html><body>doc9: {{< myshortcode >}}</body></html>")},
|
||||
{filepath.FromSlash("doc9.html"), "<html><body>doc9: {{< myshortcode >}}</body></html>"},
|
||||
}
|
||||
|
||||
cfg, fs := newTestCfg()
|
||||
|
@ -493,7 +494,7 @@ func TestSkipRender(t *testing.T) {
|
|||
cfg.Set("baseURL", "http://auth/bub")
|
||||
|
||||
for _, src := range sources {
|
||||
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
|
||||
writeSource(t, fs, filepath.Join("content", src[0]), src[1])
|
||||
|
||||
}
|
||||
|
||||
|
@ -535,9 +536,9 @@ func TestSkipRender(t *testing.T) {
|
|||
|
||||
func TestAbsURLify(t *testing.T) {
|
||||
t.Parallel()
|
||||
sources := []source.ByteSource{
|
||||
{Name: filepath.FromSlash("sect/doc1.html"), Content: []byte("<!doctype html><html><head></head><body><a href=\"#frag1\">link</a></body></html>")},
|
||||
{Name: filepath.FromSlash("blue/doc2.html"), Content: []byte("---\nf: t\n---\n<!doctype html><html><body>more content</body></html>")},
|
||||
sources := [][2]string{
|
||||
{filepath.FromSlash("sect/doc1.html"), "<!doctype html><html><head></head><body><a href=\"#frag1\">link</a></body></html>"},
|
||||
{filepath.FromSlash("blue/doc2.html"), "---\nf: t\n---\n<!doctype html><html><body>more content</body></html>"},
|
||||
}
|
||||
for _, baseURL := range []string{"http://auth/bub", "http://base", "//base"} {
|
||||
for _, canonify := range []bool{true, false} {
|
||||
|
@ -549,7 +550,7 @@ func TestAbsURLify(t *testing.T) {
|
|||
cfg.Set("baseURL", baseURL)
|
||||
|
||||
for _, src := range sources {
|
||||
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
|
||||
writeSource(t, fs, filepath.Join("content", src[0]), src[1])
|
||||
|
||||
}
|
||||
|
||||
|
@ -584,23 +585,23 @@ func TestAbsURLify(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
var weightedPage1 = []byte(`+++
|
||||
var weightedPage1 = `+++
|
||||
weight = "2"
|
||||
title = "One"
|
||||
my_param = "foo"
|
||||
my_date = 1979-05-27T07:32:00Z
|
||||
+++
|
||||
Front Matter with Ordered Pages`)
|
||||
Front Matter with Ordered Pages`
|
||||
|
||||
var weightedPage2 = []byte(`+++
|
||||
var weightedPage2 = `+++
|
||||
weight = "6"
|
||||
title = "Two"
|
||||
publishdate = "2012-03-05"
|
||||
my_param = "foo"
|
||||
+++
|
||||
Front Matter with Ordered Pages 2`)
|
||||
Front Matter with Ordered Pages 2`
|
||||
|
||||
var weightedPage3 = []byte(`+++
|
||||
var weightedPage3 = `+++
|
||||
weight = "4"
|
||||
title = "Three"
|
||||
date = "2012-04-06"
|
||||
|
@ -609,9 +610,9 @@ my_param = "bar"
|
|||
only_one = "yes"
|
||||
my_date = 2010-05-27T07:32:00Z
|
||||
+++
|
||||
Front Matter with Ordered Pages 3`)
|
||||
Front Matter with Ordered Pages 3`
|
||||
|
||||
var weightedPage4 = []byte(`+++
|
||||
var weightedPage4 = `+++
|
||||
weight = "4"
|
||||
title = "Four"
|
||||
date = "2012-01-01"
|
||||
|
@ -620,13 +621,13 @@ my_param = "baz"
|
|||
my_date = 2010-05-27T07:32:00Z
|
||||
categories = [ "hugo" ]
|
||||
+++
|
||||
Front Matter with Ordered Pages 4. This is longer content`)
|
||||
Front Matter with Ordered Pages 4. This is longer content`
|
||||
|
||||
var weightedSources = []source.ByteSource{
|
||||
{Name: filepath.FromSlash("sect/doc1.md"), Content: weightedPage1},
|
||||
{Name: filepath.FromSlash("sect/doc2.md"), Content: weightedPage2},
|
||||
{Name: filepath.FromSlash("sect/doc3.md"), Content: weightedPage3},
|
||||
{Name: filepath.FromSlash("sect/doc4.md"), Content: weightedPage4},
|
||||
var weightedSources = [][2]string{
|
||||
{filepath.FromSlash("sect/doc1.md"), weightedPage1},
|
||||
{filepath.FromSlash("sect/doc2.md"), weightedPage2},
|
||||
{filepath.FromSlash("sect/doc3.md"), weightedPage3},
|
||||
{filepath.FromSlash("sect/doc4.md"), weightedPage4},
|
||||
}
|
||||
|
||||
func TestOrderedPages(t *testing.T) {
|
||||
|
@ -635,7 +636,7 @@ func TestOrderedPages(t *testing.T) {
|
|||
cfg.Set("baseURL", "http://auth/bub")
|
||||
|
||||
for _, src := range weightedSources {
|
||||
writeSource(t, fs, filepath.Join("content", src.Name), string(src.Content))
|
||||
writeSource(t, fs, filepath.Join("content", src[0]), src[1])
|
||||
|
||||
}
|
||||
|
||||
|
@ -678,11 +679,11 @@ func TestOrderedPages(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
var groupedSources = []source.ByteSource{
|
||||
{Name: filepath.FromSlash("sect1/doc1.md"), Content: weightedPage1},
|
||||
{Name: filepath.FromSlash("sect1/doc2.md"), Content: weightedPage2},
|
||||
{Name: filepath.FromSlash("sect2/doc3.md"), Content: weightedPage3},
|
||||
{Name: filepath.FromSlash("sect3/doc4.md"), Content: weightedPage4},
|
||||
var groupedSources = [][2]string{
|
||||
{filepath.FromSlash("sect1/doc1.md"), weightedPage1},
|
||||
{filepath.FromSlash("sect1/doc2.md"), weightedPage2},
|
||||
{filepath.FromSlash("sect2/doc3.md"), weightedPage3},
|
||||
{filepath.FromSlash("sect3/doc4.md"), weightedPage4},
|
||||
}
|
||||
|
||||
func TestGroupedPages(t *testing.T) {
|
||||
|
@ -822,16 +823,16 @@ func TestGroupedPages(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
var pageWithWeightedTaxonomies1 = []byte(`+++
|
||||
var pageWithWeightedTaxonomies1 = `+++
|
||||
tags = [ "a", "b", "c" ]
|
||||
tags_weight = 22
|
||||
categories = ["d"]
|
||||
title = "foo"
|
||||
categories_weight = 44
|
||||
+++
|
||||
Front Matter with weighted tags and categories`)
|
||||
Front Matter with weighted tags and categories`
|
||||
|
||||
var pageWithWeightedTaxonomies2 = []byte(`+++
|
||||
var pageWithWeightedTaxonomies2 = `+++
|
||||
tags = "a"
|
||||
tags_weight = 33
|
||||
title = "bar"
|
||||
|
@ -840,23 +841,23 @@ categories_weight = 11
|
|||
alias = "spf13"
|
||||
date = 1979-05-27T07:32:00Z
|
||||
+++
|
||||
Front Matter with weighted tags and categories`)
|
||||
Front Matter with weighted tags and categories`
|
||||
|
||||
var pageWithWeightedTaxonomies3 = []byte(`+++
|
||||
var pageWithWeightedTaxonomies3 = `+++
|
||||
title = "bza"
|
||||
categories = [ "e" ]
|
||||
categories_weight = 11
|
||||
alias = "spf13"
|
||||
date = 2010-05-27T07:32:00Z
|
||||
+++
|
||||
Front Matter with weighted tags and categories`)
|
||||
Front Matter with weighted tags and categories`
|
||||
|
||||
func TestWeightedTaxonomies(t *testing.T) {
|
||||
t.Parallel()
|
||||
sources := []source.ByteSource{
|
||||
{Name: filepath.FromSlash("sect/doc1.md"), Content: pageWithWeightedTaxonomies2},
|
||||
{Name: filepath.FromSlash("sect/doc2.md"), Content: pageWithWeightedTaxonomies1},
|
||||
{Name: filepath.FromSlash("sect/doc3.md"), Content: pageWithWeightedTaxonomies3},
|
||||
sources := [][2]string{
|
||||
{filepath.FromSlash("sect/doc1.md"), pageWithWeightedTaxonomies2},
|
||||
{filepath.FromSlash("sect/doc2.md"), pageWithWeightedTaxonomies1},
|
||||
{filepath.FromSlash("sect/doc3.md"), pageWithWeightedTaxonomies3},
|
||||
}
|
||||
taxonomies := make(map[string]string)
|
||||
|
||||
|
@ -884,39 +885,23 @@ func TestWeightedTaxonomies(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func findPage(site *Site, f string) *Page {
|
||||
sp := source.NewSourceSpec(site.Cfg, site.Fs)
|
||||
currentPath := sp.NewFile(filepath.FromSlash(f))
|
||||
//t.Logf("looking for currentPath: %s", currentPath.Path())
|
||||
|
||||
for _, page := range site.Pages {
|
||||
//t.Logf("page: %s", page.Source.Path())
|
||||
if page.Source.Path() == currentPath.Path() {
|
||||
return page
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func setupLinkingMockSite(t *testing.T) *Site {
|
||||
sources := []source.ByteSource{
|
||||
{Name: filepath.FromSlash("level2/unique.md"), Content: []byte("")},
|
||||
{Name: filepath.FromSlash("index.md"), Content: []byte("")},
|
||||
{Name: filepath.FromSlash("rootfile.md"), Content: []byte("")},
|
||||
{Name: filepath.FromSlash("root-image.png"), Content: []byte("")},
|
||||
sources := [][2]string{
|
||||
{filepath.FromSlash("level2/unique.md"), ""},
|
||||
{filepath.FromSlash("rootfile.md"), ""},
|
||||
{filepath.FromSlash("root-image.png"), ""},
|
||||
|
||||
{Name: filepath.FromSlash("level2/2-root.md"), Content: []byte("")},
|
||||
{Name: filepath.FromSlash("level2/index.md"), Content: []byte("")},
|
||||
{Name: filepath.FromSlash("level2/common.md"), Content: []byte("")},
|
||||
{filepath.FromSlash("level2/2-root.md"), ""},
|
||||
{filepath.FromSlash("level2/common.md"), ""},
|
||||
|
||||
{Name: filepath.FromSlash("level2/2-image.png"), Content: []byte("")},
|
||||
{Name: filepath.FromSlash("level2/common.png"), Content: []byte("")},
|
||||
{filepath.FromSlash("level2/2-image.png"), ""},
|
||||
{filepath.FromSlash("level2/common.png"), ""},
|
||||
|
||||
{Name: filepath.FromSlash("level2/level3/3-root.md"), Content: []byte("")},
|
||||
{Name: filepath.FromSlash("level2/level3/index.md"), Content: []byte("")},
|
||||
{Name: filepath.FromSlash("level2/level3/common.md"), Content: []byte("")},
|
||||
{Name: filepath.FromSlash("level2/level3/3-image.png"), Content: []byte("")},
|
||||
{Name: filepath.FromSlash("level2/level3/common.png"), Content: []byte("")},
|
||||
{filepath.FromSlash("level2/level3/start.md"), ""},
|
||||
{filepath.FromSlash("level2/level3/3-root.md"), ""},
|
||||
{filepath.FromSlash("level2/level3/common.md"), ""},
|
||||
{filepath.FromSlash("level2/level3/3-image.png"), ""},
|
||||
{filepath.FromSlash("level2/level3/common.png"), ""},
|
||||
}
|
||||
|
||||
cfg, fs := newTestCfg()
|
||||
|
@ -939,7 +924,7 @@ func TestRefLinking(t *testing.T) {
|
|||
t.Parallel()
|
||||
site := setupLinkingMockSite(t)
|
||||
|
||||
currentPage := findPage(site, "level2/level3/index.md")
|
||||
currentPage := site.getPage(KindPage, "level2/level3/start.md")
|
||||
if currentPage == nil {
|
||||
t.Fatalf("failed to find current page in site")
|
||||
}
|
||||
|
@ -953,8 +938,6 @@ func TestRefLinking(t *testing.T) {
|
|||
{"unique.md", "", true, "/level2/unique/"},
|
||||
{"level2/common.md", "", true, "/level2/common/"},
|
||||
{"3-root.md", "", true, "/level2/level3/3-root/"},
|
||||
{"level2/level3/index.md", "amp", true, "/amp/level2/level3/"},
|
||||
{"level2/index.md", "amp", false, "http://auth/amp/level2/"},
|
||||
} {
|
||||
if out, err := site.Info.refLink(test.link, currentPage, test.relative, test.outputFormat); err != nil || out != test.expected {
|
||||
t.Errorf("[%d] Expected %s to resolve to (%s), got (%s) - error: %s", i, test.link, test.expected, out, err)
|
||||
|
|
|
@ -20,7 +20,6 @@ import (
|
|||
"html/template"
|
||||
|
||||
"github.com/gohugoio/hugo/deps"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
|
@ -33,9 +32,9 @@ slug: slug-doc-2
|
|||
slug doc 2 content
|
||||
`
|
||||
|
||||
var urlFakeSource = []source.ByteSource{
|
||||
{Name: filepath.FromSlash("content/blue/doc1.md"), Content: []byte(slugDoc1)},
|
||||
{Name: filepath.FromSlash("content/blue/doc2.md"), Content: []byte(slugDoc2)},
|
||||
var urlFakeSource = [][2]string{
|
||||
{filepath.FromSlash("content/blue/doc1.md"), slugDoc1},
|
||||
{filepath.FromSlash("content/blue/doc2.md"), slugDoc2},
|
||||
}
|
||||
|
||||
// Issue #1105
|
||||
|
|
|
@ -14,7 +14,6 @@ import (
|
|||
"github.com/spf13/afero"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
"github.com/gohugoio/hugo/tpl"
|
||||
"github.com/spf13/viper"
|
||||
|
||||
|
@ -169,6 +168,11 @@ func newDebugLogger() *jww.Notepad {
|
|||
func newErrorLogger() *jww.Notepad {
|
||||
return jww.NewNotepad(jww.LevelError, jww.LevelError, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime)
|
||||
}
|
||||
|
||||
func newWarningLogger() *jww.Notepad {
|
||||
return jww.NewNotepad(jww.LevelWarn, jww.LevelError, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime)
|
||||
}
|
||||
|
||||
func createWithTemplateFromNameValues(additionalTemplates ...string) func(templ tpl.TemplateHandler) error {
|
||||
|
||||
return func(templ tpl.TemplateHandler) error {
|
||||
|
@ -203,9 +207,17 @@ func buildSingleSiteExpected(t testing.TB, expectBuildError bool, depsCfg deps.D
|
|||
return h.Sites[0]
|
||||
}
|
||||
|
||||
func writeSourcesToSource(t *testing.T, base string, fs *hugofs.Fs, sources ...source.ByteSource) {
|
||||
func writeSourcesToSource(t *testing.T, base string, fs *hugofs.Fs, sources ...[2]string) {
|
||||
for _, src := range sources {
|
||||
writeSource(t, fs, filepath.Join(base, src.Name), string(src.Content))
|
||||
writeSource(t, fs, filepath.Join(base, src[0]), src[1])
|
||||
}
|
||||
}
|
||||
|
||||
func dumpPages(pages ...*Page) {
|
||||
for i, p := range pages {
|
||||
fmt.Printf("%d: Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s Len Sections(): %d\n",
|
||||
i+1,
|
||||
p.Kind, p.Title, p.RelPermalink(), p.Path(), p.sections, len(p.Sections()))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -17,6 +17,8 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
|
||||
"github.com/gohugoio/hugo/deps"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
"github.com/nicksnyder/go-i18n/i18n/bundle"
|
||||
|
@ -73,9 +75,8 @@ func (tp *TranslationProvider) Update(d *deps.Deps) error {
|
|||
|
||||
for _, currentSource := range sources {
|
||||
for _, r := range currentSource.Files() {
|
||||
err := i18nBundle.ParseTranslationFileBytes(r.LogicalName(), r.Bytes())
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to load translations in file %q: %s", r.LogicalName(), err)
|
||||
if err := addTranslationFile(i18nBundle, r); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -88,6 +89,19 @@ func (tp *TranslationProvider) Update(d *deps.Deps) error {
|
|||
|
||||
}
|
||||
|
||||
func addTranslationFile(bundle *bundle.Bundle, r source.ReadableFile) error {
|
||||
f, err := r.Open()
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to open translations file %q: %s", r.LogicalName(), err)
|
||||
}
|
||||
defer f.Close()
|
||||
err = bundle.ParseTranslationFileBytes(r.LogicalName(), helpers.ReaderToBytes(f))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to load translations in file %q: %s", r.LogicalName(), err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Clone sets the language func for the new language.
|
||||
func (tp *TranslationProvider) Clone(d *deps.Deps) error {
|
||||
d.Translate = tp.t.Func(d.Language.Lang)
|
||||
|
|
551
resource/image.go
Normal file
551
resource/image.go
Normal file
|
@ -0,0 +1,551 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package resource
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"image/color"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/mitchellh/mapstructure"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/spf13/afero"
|
||||
|
||||
// Importing image codecs for image.DecodeConfig
|
||||
"image"
|
||||
_ "image/gif"
|
||||
"image/jpeg"
|
||||
_ "image/png"
|
||||
|
||||
"github.com/disintegration/imaging"
|
||||
|
||||
// Import webp codec
|
||||
"sync"
|
||||
|
||||
_ "golang.org/x/image/webp"
|
||||
)
|
||||
|
||||
var (
|
||||
_ Resource = (*Image)(nil)
|
||||
_ Source = (*Image)(nil)
|
||||
_ Cloner = (*Image)(nil)
|
||||
)
|
||||
|
||||
// Imaging contains default image processing configuration. This will be fetched
|
||||
// from site (or language) config.
|
||||
type Imaging struct {
|
||||
// Default image quality setting (1-100). Only used for JPEG images.
|
||||
Quality int
|
||||
|
||||
// Resample filter used. See https://github.com/disintegration/imaging
|
||||
ResampleFilter string
|
||||
}
|
||||
|
||||
const (
|
||||
defaultJPEGQuality = 75
|
||||
defaultResampleFilter = "box"
|
||||
)
|
||||
|
||||
var imageFormats = map[string]imaging.Format{
|
||||
".jpg": imaging.JPEG,
|
||||
".jpeg": imaging.JPEG,
|
||||
".png": imaging.PNG,
|
||||
".tif": imaging.TIFF,
|
||||
".tiff": imaging.TIFF,
|
||||
".bmp": imaging.BMP,
|
||||
".gif": imaging.GIF,
|
||||
}
|
||||
|
||||
var anchorPositions = map[string]imaging.Anchor{
|
||||
strings.ToLower("Center"): imaging.Center,
|
||||
strings.ToLower("TopLeft"): imaging.TopLeft,
|
||||
strings.ToLower("Top"): imaging.Top,
|
||||
strings.ToLower("TopRight"): imaging.TopRight,
|
||||
strings.ToLower("Left"): imaging.Left,
|
||||
strings.ToLower("Right"): imaging.Right,
|
||||
strings.ToLower("BottomLeft"): imaging.BottomLeft,
|
||||
strings.ToLower("Bottom"): imaging.Bottom,
|
||||
strings.ToLower("BottomRight"): imaging.BottomRight,
|
||||
}
|
||||
|
||||
var imageFilters = map[string]imaging.ResampleFilter{
|
||||
strings.ToLower("NearestNeighbor"): imaging.NearestNeighbor,
|
||||
strings.ToLower("Box"): imaging.Box,
|
||||
strings.ToLower("Linear"): imaging.Linear,
|
||||
strings.ToLower("Hermite"): imaging.Hermite,
|
||||
strings.ToLower("MitchellNetravali"): imaging.MitchellNetravali,
|
||||
strings.ToLower("CatmullRom"): imaging.CatmullRom,
|
||||
strings.ToLower("BSpline"): imaging.BSpline,
|
||||
strings.ToLower("Gaussian"): imaging.Gaussian,
|
||||
strings.ToLower("Lanczos"): imaging.Lanczos,
|
||||
strings.ToLower("Hann"): imaging.Hann,
|
||||
strings.ToLower("Hamming"): imaging.Hamming,
|
||||
strings.ToLower("Blackman"): imaging.Blackman,
|
||||
strings.ToLower("Bartlett"): imaging.Bartlett,
|
||||
strings.ToLower("Welch"): imaging.Welch,
|
||||
strings.ToLower("Cosine"): imaging.Cosine,
|
||||
}
|
||||
|
||||
type Image struct {
|
||||
config image.Config
|
||||
configInit sync.Once
|
||||
configLoaded bool
|
||||
|
||||
copiedToDestinationInit sync.Once
|
||||
|
||||
imaging *Imaging
|
||||
|
||||
*genericResource
|
||||
}
|
||||
|
||||
func (i *Image) Width() int {
|
||||
i.initConfig()
|
||||
return i.config.Width
|
||||
}
|
||||
|
||||
func (i *Image) Height() int {
|
||||
i.initConfig()
|
||||
return i.config.Height
|
||||
}
|
||||
|
||||
// Implement the Cloner interface.
|
||||
func (i *Image) WithNewBase(base string) Resource {
|
||||
return &Image{
|
||||
imaging: i.imaging,
|
||||
genericResource: i.genericResource.WithNewBase(base).(*genericResource)}
|
||||
}
|
||||
|
||||
// Resize resizes the image to the specified width and height using the specified resampling
|
||||
// filter and returns the transformed image. If one of width or height is 0, the image aspect
|
||||
// ratio is preserved.
|
||||
func (i *Image) Resize(spec string) (*Image, error) {
|
||||
return i.doWithImageConfig("resize", spec, func(src image.Image, conf imageConfig) (image.Image, error) {
|
||||
return imaging.Resize(src, conf.Width, conf.Height, conf.Filter), nil
|
||||
})
|
||||
}
|
||||
|
||||
// Fit scales down the image using the specified resample filter to fit the specified
|
||||
// maximum width and height.
|
||||
func (i *Image) Fit(spec string) (*Image, error) {
|
||||
return i.doWithImageConfig("fit", spec, func(src image.Image, conf imageConfig) (image.Image, error) {
|
||||
return imaging.Fit(src, conf.Width, conf.Height, conf.Filter), nil
|
||||
})
|
||||
}
|
||||
|
||||
// Fill scales the image to the smallest possible size that will cover the specified dimensions,
|
||||
// crops the resized image to the specified dimensions using the given anchor point.
|
||||
// Space delimited config: 200x300 TopLeft
|
||||
func (i *Image) Fill(spec string) (*Image, error) {
|
||||
return i.doWithImageConfig("fill", spec, func(src image.Image, conf imageConfig) (image.Image, error) {
|
||||
return imaging.Fill(src, conf.Width, conf.Height, conf.Anchor, conf.Filter), nil
|
||||
})
|
||||
}
|
||||
|
||||
// Holds configuration to create a new image from an existing one, resize etc.
|
||||
type imageConfig struct {
|
||||
Action string
|
||||
|
||||
// Quality ranges from 1 to 100 inclusive, higher is better.
|
||||
// This is only relevant for JPEG images.
|
||||
// Default is 75.
|
||||
Quality int
|
||||
|
||||
// Rotate rotates an image by the given angle counter-clockwise.
|
||||
// The rotation will be performed first.
|
||||
Rotate int
|
||||
|
||||
Width int
|
||||
Height int
|
||||
|
||||
Filter imaging.ResampleFilter
|
||||
FilterStr string
|
||||
|
||||
Anchor imaging.Anchor
|
||||
AnchorStr string
|
||||
}
|
||||
|
||||
func (i *Image) isJPEG() bool {
|
||||
name := strings.ToLower(i.rel)
|
||||
return strings.HasSuffix(name, ".jpg") || strings.HasSuffix(name, ".jpeg")
|
||||
}
|
||||
|
||||
func (i *Image) doWithImageConfig(action, spec string, f func(src image.Image, conf imageConfig) (image.Image, error)) (*Image, error) {
|
||||
conf, err := parseImageConfig(spec)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
conf.Action = action
|
||||
|
||||
if conf.Quality <= 0 && i.isJPEG() {
|
||||
// We need a quality setting for all JPEGs
|
||||
conf.Quality = i.imaging.Quality
|
||||
}
|
||||
|
||||
if conf.FilterStr == "" {
|
||||
conf.FilterStr = i.imaging.ResampleFilter
|
||||
conf.Filter = imageFilters[conf.FilterStr]
|
||||
}
|
||||
|
||||
key := i.relPermalinkForRel(i.filenameFromConfig(conf))
|
||||
|
||||
return i.spec.imageCache.getOrCreate(i.spec, key, func(resourceCacheFilename string) (*Image, error) {
|
||||
ci := i.clone()
|
||||
|
||||
ci.setBasePath(conf)
|
||||
|
||||
src, err := i.decodeSource()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if conf.Rotate != 0 {
|
||||
// Rotate it befor any scaling to get the dimensions correct.
|
||||
src = imaging.Rotate(src, float64(conf.Rotate), color.Transparent)
|
||||
}
|
||||
|
||||
converted, err := f(src, conf)
|
||||
if err != nil {
|
||||
return ci, err
|
||||
}
|
||||
|
||||
b := converted.Bounds()
|
||||
ci.config = image.Config{Width: b.Max.X, Height: b.Max.Y}
|
||||
ci.configLoaded = true
|
||||
|
||||
return ci, i.encodeToDestinations(converted, conf, resourceCacheFilename, ci.RelPermalink())
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func (i imageConfig) key() string {
|
||||
k := strconv.Itoa(i.Width) + "x" + strconv.Itoa(i.Height)
|
||||
if i.Action != "" {
|
||||
k += "_" + i.Action
|
||||
}
|
||||
if i.Quality > 0 {
|
||||
k += "_q" + strconv.Itoa(i.Quality)
|
||||
}
|
||||
if i.Rotate != 0 {
|
||||
k += "_r" + strconv.Itoa(i.Rotate)
|
||||
}
|
||||
k += "_" + i.FilterStr + "_" + i.AnchorStr
|
||||
return k
|
||||
}
|
||||
|
||||
var defaultImageConfig = imageConfig{
|
||||
Action: "",
|
||||
Anchor: imaging.Center,
|
||||
AnchorStr: strings.ToLower("Center"),
|
||||
}
|
||||
|
||||
func newImageConfig(width, height, quality, rotate int, filter, anchor string) imageConfig {
|
||||
c := defaultImageConfig
|
||||
|
||||
c.Width = width
|
||||
c.Height = height
|
||||
c.Quality = quality
|
||||
c.Rotate = rotate
|
||||
|
||||
if filter != "" {
|
||||
filter = strings.ToLower(filter)
|
||||
if v, ok := imageFilters[filter]; ok {
|
||||
c.Filter = v
|
||||
c.FilterStr = filter
|
||||
}
|
||||
}
|
||||
|
||||
if anchor != "" {
|
||||
anchor = strings.ToLower(anchor)
|
||||
if v, ok := anchorPositions[anchor]; ok {
|
||||
c.Anchor = v
|
||||
c.AnchorStr = anchor
|
||||
}
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
func parseImageConfig(config string) (imageConfig, error) {
|
||||
var (
|
||||
c = defaultImageConfig
|
||||
err error
|
||||
)
|
||||
|
||||
if config == "" {
|
||||
return c, errors.New("image config cannot be empty")
|
||||
}
|
||||
|
||||
parts := strings.Fields(config)
|
||||
for _, part := range parts {
|
||||
part = strings.ToLower(part)
|
||||
|
||||
if pos, ok := anchorPositions[part]; ok {
|
||||
c.Anchor = pos
|
||||
c.AnchorStr = part
|
||||
} else if filter, ok := imageFilters[part]; ok {
|
||||
c.Filter = filter
|
||||
c.FilterStr = part
|
||||
} else if part[0] == 'q' {
|
||||
c.Quality, err = strconv.Atoi(part[1:])
|
||||
if err != nil {
|
||||
return c, err
|
||||
}
|
||||
if c.Quality < 1 && c.Quality > 100 {
|
||||
return c, errors.New("quality ranges from 1 to 100 inclusive")
|
||||
}
|
||||
} else if part[0] == 'r' {
|
||||
c.Rotate, err = strconv.Atoi(part[1:])
|
||||
if err != nil {
|
||||
return c, err
|
||||
}
|
||||
} else if strings.Contains(part, "x") {
|
||||
widthHeight := strings.Split(part, "x")
|
||||
if len(widthHeight) <= 2 {
|
||||
first := widthHeight[0]
|
||||
if first != "" {
|
||||
c.Width, err = strconv.Atoi(first)
|
||||
if err != nil {
|
||||
return c, err
|
||||
}
|
||||
}
|
||||
|
||||
if len(widthHeight) == 2 {
|
||||
second := widthHeight[1]
|
||||
if second != "" {
|
||||
c.Height, err = strconv.Atoi(second)
|
||||
if err != nil {
|
||||
return c, err
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return c, errors.New("invalid image dimensions")
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
if c.Width == 0 && c.Height == 0 {
|
||||
return c, errors.New("must provide Width or Height")
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func (i *Image) initConfig() error {
|
||||
var err error
|
||||
i.configInit.Do(func() {
|
||||
if i.configLoaded {
|
||||
return
|
||||
}
|
||||
|
||||
var (
|
||||
f afero.File
|
||||
config image.Config
|
||||
)
|
||||
|
||||
f, err = i.spec.Fs.Source.Open(i.AbsSourceFilename())
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
config, _, err = image.DecodeConfig(f)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
i.config = config
|
||||
})
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (i *Image) decodeSource() (image.Image, error) {
|
||||
file, err := i.spec.Fs.Source.Open(i.AbsSourceFilename())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
return imaging.Decode(file)
|
||||
}
|
||||
|
||||
func (i *Image) copyToDestination(src string) error {
|
||||
var res error
|
||||
|
||||
i.copiedToDestinationInit.Do(func() {
|
||||
target := filepath.Join(i.absPublishDir, i.RelPermalink())
|
||||
|
||||
// Fast path:
|
||||
// This is a processed version of the original.
|
||||
// If it exists on destination with the same filename and file size, it is
|
||||
// the same file, so no need to transfer it again.
|
||||
if fi, err := i.spec.Fs.Destination.Stat(target); err == nil && fi.Size() == i.osFileInfo.Size() {
|
||||
return
|
||||
}
|
||||
|
||||
in, err := i.spec.Fs.Source.Open(src)
|
||||
if err != nil {
|
||||
res = err
|
||||
return
|
||||
}
|
||||
defer in.Close()
|
||||
|
||||
out, err := i.spec.Fs.Destination.Create(target)
|
||||
if err != nil {
|
||||
res = err
|
||||
return
|
||||
}
|
||||
defer out.Close()
|
||||
|
||||
_, err = io.Copy(out, in)
|
||||
if err != nil {
|
||||
res = err
|
||||
return
|
||||
}
|
||||
})
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (i *Image) encodeToDestinations(img image.Image, conf imageConfig, resourceCacheFilename, filename string) error {
|
||||
ext := strings.ToLower(helpers.Ext(filename))
|
||||
|
||||
imgFormat, ok := imageFormats[ext]
|
||||
if !ok {
|
||||
return imaging.ErrUnsupportedFormat
|
||||
}
|
||||
|
||||
target := filepath.Join(i.absPublishDir, filename)
|
||||
|
||||
file1, err := i.spec.Fs.Destination.Create(target)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer file1.Close()
|
||||
|
||||
var w io.Writer
|
||||
|
||||
if resourceCacheFilename != "" {
|
||||
// Also save it to the image resource cache for later reuse.
|
||||
if err = i.spec.Fs.Source.MkdirAll(filepath.Dir(resourceCacheFilename), os.FileMode(0755)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
file2, err := i.spec.Fs.Source.Create(resourceCacheFilename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
w = io.MultiWriter(file1, file2)
|
||||
defer file2.Close()
|
||||
} else {
|
||||
w = file1
|
||||
}
|
||||
|
||||
switch imgFormat {
|
||||
case imaging.JPEG:
|
||||
|
||||
var rgba *image.RGBA
|
||||
quality := conf.Quality
|
||||
|
||||
if nrgba, ok := img.(*image.NRGBA); ok {
|
||||
if nrgba.Opaque() {
|
||||
rgba = &image.RGBA{
|
||||
Pix: nrgba.Pix,
|
||||
Stride: nrgba.Stride,
|
||||
Rect: nrgba.Rect,
|
||||
}
|
||||
}
|
||||
}
|
||||
if rgba != nil {
|
||||
return jpeg.Encode(w, rgba, &jpeg.Options{Quality: quality})
|
||||
} else {
|
||||
return jpeg.Encode(w, img, &jpeg.Options{Quality: quality})
|
||||
}
|
||||
default:
|
||||
return imaging.Encode(w, img, imgFormat)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (i *Image) clone() *Image {
|
||||
g := *i.genericResource
|
||||
|
||||
return &Image{
|
||||
imaging: i.imaging,
|
||||
genericResource: &g}
|
||||
}
|
||||
|
||||
func (i *Image) setBasePath(conf imageConfig) {
|
||||
i.rel = i.filenameFromConfig(conf)
|
||||
}
|
||||
|
||||
// We need to set this to something static during tests.
|
||||
var fiModTimeFunc = func(fi os.FileInfo) int64 {
|
||||
return fi.ModTime().Unix()
|
||||
}
|
||||
|
||||
func (i *Image) filenameFromConfig(conf imageConfig) string {
|
||||
p1, p2 := helpers.FileAndExt(i.rel)
|
||||
sizeModeStr := fmt.Sprintf("_S%d_T%d", i.osFileInfo.Size(), fiModTimeFunc(i.osFileInfo))
|
||||
// On scaling an already scaled image, we get the file info from the original.
|
||||
// Repeating the same info in the filename makes it stuttery for no good reason.
|
||||
if strings.Contains(p1, sizeModeStr) {
|
||||
sizeModeStr = ""
|
||||
}
|
||||
|
||||
const md5Threshold = 100
|
||||
|
||||
key := conf.key()
|
||||
|
||||
// It is useful to have the key in clear text, but when nesting transforms, it
|
||||
// can easily be too long to read, and maybe even too long
|
||||
// for the different OSes to handle.
|
||||
if len(p1)+len(sizeModeStr)+len(p2) > md5Threshold {
|
||||
key = helpers.MD5String(p1 + key + p2)
|
||||
p1 = p1[:strings.Index(p1, "_S")]
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%s%s_%s%s", p1, sizeModeStr, key, p2)
|
||||
}
|
||||
|
||||
func decodeImaging(m map[string]interface{}) (Imaging, error) {
|
||||
var i Imaging
|
||||
if err := mapstructure.WeakDecode(m, &i); err != nil {
|
||||
return i, err
|
||||
}
|
||||
|
||||
if i.Quality <= 0 || i.Quality > 100 {
|
||||
i.Quality = defaultJPEGQuality
|
||||
}
|
||||
|
||||
if i.ResampleFilter == "" {
|
||||
i.ResampleFilter = defaultResampleFilter
|
||||
} else {
|
||||
filter := strings.ToLower(i.ResampleFilter)
|
||||
_, found := imageFilters[filter]
|
||||
if !found {
|
||||
return i, fmt.Errorf("%q is not a valid resample filter", filter)
|
||||
}
|
||||
i.ResampleFilter = filter
|
||||
}
|
||||
|
||||
return i, nil
|
||||
}
|
112
resource/image_cache.go
Normal file
112
resource/image_cache.go
Normal file
|
@ -0,0 +1,112 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package resource
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
)
|
||||
|
||||
type imageCache struct {
|
||||
absPublishDir string
|
||||
absCacheDir string
|
||||
pathSpec *helpers.PathSpec
|
||||
mu sync.RWMutex
|
||||
store map[string]*Image
|
||||
}
|
||||
|
||||
func (c *imageCache) isInCache(key string) bool {
|
||||
c.mu.RLock()
|
||||
_, found := c.store[key]
|
||||
c.mu.RUnlock()
|
||||
return found
|
||||
}
|
||||
|
||||
func (c *imageCache) deleteByPrefix(prefix string) {
|
||||
c.mu.Lock()
|
||||
defer c.mu.Unlock()
|
||||
for k, _ := range c.store {
|
||||
if strings.HasPrefix(k, prefix) {
|
||||
delete(c.store, k)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *imageCache) getOrCreate(
|
||||
spec *Spec, key string, create func(resourceCacheFilename string) (*Image, error)) (*Image, error) {
|
||||
// First check the in-memory store, then the disk.
|
||||
c.mu.RLock()
|
||||
img, found := c.store[key]
|
||||
c.mu.RUnlock()
|
||||
|
||||
if found {
|
||||
return img, nil
|
||||
}
|
||||
|
||||
// Now look in the file cache.
|
||||
cacheFilename := filepath.Join(c.absCacheDir, key)
|
||||
|
||||
// The definition of this counter is not that we have processed that amount
|
||||
// (e.g. resized etc.), it can be fetched from file cache,
|
||||
// but the count of processed image variations for this site.
|
||||
c.pathSpec.ProcessingStats.Incr(&c.pathSpec.ProcessingStats.ProcessedImages)
|
||||
|
||||
r, err := spec.NewResourceFromFilename(nil, c.absPublishDir, cacheFilename, key)
|
||||
notFound := err != nil && os.IsNotExist(err)
|
||||
if err != nil && !os.IsNotExist(err) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if notFound {
|
||||
img, err = create(cacheFilename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
img = r.(*Image)
|
||||
}
|
||||
|
||||
c.mu.Lock()
|
||||
if img2, found := c.store[key]; found {
|
||||
c.mu.Unlock()
|
||||
return img2, nil
|
||||
}
|
||||
|
||||
c.store[key] = img
|
||||
|
||||
c.mu.Unlock()
|
||||
|
||||
if notFound {
|
||||
// File already written to destination
|
||||
return img, nil
|
||||
}
|
||||
|
||||
return img, img.copyToDestination(cacheFilename)
|
||||
|
||||
}
|
||||
|
||||
func newImageCache(ps *helpers.PathSpec, absCacheDir, absPublishDir string) *imageCache {
|
||||
return &imageCache{pathSpec: ps, store: make(map[string]*Image), absCacheDir: absCacheDir, absPublishDir: absPublishDir}
|
||||
}
|
||||
|
||||
func timeTrack(start time.Time, name string) {
|
||||
elapsed := time.Since(start)
|
||||
fmt.Printf("%s took %s\n", name, elapsed)
|
||||
}
|
134
resource/image_test.go
Normal file
134
resource/image_test.go
Normal file
|
@ -0,0 +1,134 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package resource
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestParseImageConfig(t *testing.T) {
|
||||
for i, this := range []struct {
|
||||
in string
|
||||
expect interface{}
|
||||
}{
|
||||
{"300x400", newImageConfig(300, 400, 0, 0, "", "")},
|
||||
{"100x200 bottomRight", newImageConfig(100, 200, 0, 0, "", "BottomRight")},
|
||||
{"10x20 topleft Lanczos", newImageConfig(10, 20, 0, 0, "Lanczos", "topleft")},
|
||||
{"linear left 10x r180", newImageConfig(10, 0, 0, 180, "linear", "left")},
|
||||
{"x20 riGht Cosine q95", newImageConfig(0, 20, 95, 0, "cosine", "right")},
|
||||
|
||||
{"", false},
|
||||
{"foo", false},
|
||||
} {
|
||||
result, err := parseImageConfig(this.in)
|
||||
if b, ok := this.expect.(bool); ok && !b {
|
||||
if err == nil {
|
||||
t.Errorf("[%d] parseImageConfig didn't return an expected error", i)
|
||||
}
|
||||
} else {
|
||||
if err != nil {
|
||||
t.Fatalf("[%d] err: %s", i, err)
|
||||
}
|
||||
if fmt.Sprint(result) != fmt.Sprint(this.expect) {
|
||||
t.Fatalf("[%d] got\n%v\n but expected\n%v", i, result, this.expect)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestImageTransform(t *testing.T) {
|
||||
fiModTimeFunc = func(fi os.FileInfo) int64 {
|
||||
return int64(10111213)
|
||||
}
|
||||
|
||||
assert := require.New(t)
|
||||
|
||||
image := fetchSunset(assert)
|
||||
|
||||
assert.Equal("/a/sunset.jpg", image.RelPermalink())
|
||||
assert.Equal("image", image.ResourceType())
|
||||
|
||||
resized, err := image.Resize("300x200")
|
||||
assert.NoError(err)
|
||||
assert.True(image != resized)
|
||||
assert.True(image.genericResource != resized.genericResource)
|
||||
|
||||
resized0x, err := image.Resize("x200")
|
||||
assert.NoError(err)
|
||||
assert.Equal(320, resized0x.Width())
|
||||
assert.Equal(200, resized0x.Height())
|
||||
assertFileCache(assert, image.spec.Fs, resized0x.RelPermalink(), 320, 200)
|
||||
|
||||
resizedx0, err := image.Resize("200x")
|
||||
assert.NoError(err)
|
||||
assert.Equal(200, resizedx0.Width())
|
||||
assert.Equal(125, resizedx0.Height())
|
||||
assertFileCache(assert, image.spec.Fs, resizedx0.RelPermalink(), 200, 125)
|
||||
|
||||
resizedAndRotated, err := image.Resize("x200 r90")
|
||||
assert.NoError(err)
|
||||
assert.Equal(125, resizedAndRotated.Width())
|
||||
assert.Equal(200, resizedAndRotated.Height())
|
||||
assertFileCache(assert, image.spec.Fs, resizedAndRotated.RelPermalink(), 125, 200)
|
||||
|
||||
assert.Equal("/a/sunset_S90587_T10111213_300x200_resize_q75_box_center.jpg", resized.RelPermalink())
|
||||
assert.Equal(300, resized.Width())
|
||||
assert.Equal(200, resized.Height())
|
||||
|
||||
fitted, err := resized.Fit("50x50")
|
||||
assert.NoError(err)
|
||||
assert.Equal("/a/sunset_S90587_T10111213_300x200_resize_q75_box_center_50x50_fit_q75_box_center.jpg", fitted.RelPermalink())
|
||||
assert.Equal(50, fitted.Width())
|
||||
assert.Equal(31, fitted.Height())
|
||||
|
||||
// Check the MD5 key threshold
|
||||
fittedAgain, _ := fitted.Fit("10x20")
|
||||
fittedAgain, err = fittedAgain.Fit("10x20")
|
||||
assert.NoError(err)
|
||||
assert.Equal("/a/sunset_f1fb715a17c42d5d4602a1870424d590.jpg", fittedAgain.RelPermalink())
|
||||
assert.Equal(10, fittedAgain.Width())
|
||||
assert.Equal(6, fittedAgain.Height())
|
||||
|
||||
filled, err := image.Fill("200x100 bottomLeft")
|
||||
assert.NoError(err)
|
||||
assert.Equal("/a/sunset_S90587_T10111213_200x100_fill_q75_box_bottomleft.jpg", filled.RelPermalink())
|
||||
assert.Equal(200, filled.Width())
|
||||
assert.Equal(100, filled.Height())
|
||||
assertFileCache(assert, image.spec.Fs, filled.RelPermalink(), 200, 100)
|
||||
|
||||
// Check cache
|
||||
filledAgain, err := image.Fill("200x100 bottomLeft")
|
||||
assert.NoError(err)
|
||||
assert.True(filled == filledAgain)
|
||||
assertFileCache(assert, image.spec.Fs, filledAgain.RelPermalink(), 200, 100)
|
||||
|
||||
}
|
||||
|
||||
func TestDecodeImaging(t *testing.T) {
|
||||
assert := require.New(t)
|
||||
m := map[string]interface{}{
|
||||
"quality": 42,
|
||||
"resampleFilter": "NearestNeighbor",
|
||||
}
|
||||
|
||||
imaging, err := decodeImaging(m)
|
||||
|
||||
assert.NoError(err)
|
||||
assert.Equal(42, imaging.Quality)
|
||||
assert.Equal("nearestneighbor", imaging.ResampleFilter)
|
||||
}
|
275
resource/resource.go
Normal file
275
resource/resource.go
Normal file
|
@ -0,0 +1,275 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package resource
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"mime"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/gohugoio/hugo/media"
|
||||
"github.com/gohugoio/hugo/source"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
)
|
||||
|
||||
var (
|
||||
_ Resource = (*genericResource)(nil)
|
||||
_ Source = (*genericResource)(nil)
|
||||
_ Cloner = (*genericResource)(nil)
|
||||
)
|
||||
|
||||
const DefaultResourceType = "unknown"
|
||||
|
||||
type Source interface {
|
||||
AbsSourceFilename() string
|
||||
Publish() error
|
||||
}
|
||||
|
||||
type Cloner interface {
|
||||
WithNewBase(base string) Resource
|
||||
}
|
||||
|
||||
// Resource represents a linkable resource, i.e. a content page, image etc.
|
||||
type Resource interface {
|
||||
Permalink() string
|
||||
RelPermalink() string
|
||||
ResourceType() string
|
||||
}
|
||||
|
||||
// Resources represents a slice of resources, which can be a mix of different types.
|
||||
// I.e. both pages and images etc.
|
||||
type Resources []Resource
|
||||
|
||||
func (r Resources) ByType(tp string) []Resource {
|
||||
var filtered []Resource
|
||||
|
||||
for _, resource := range r {
|
||||
if resource.ResourceType() == tp {
|
||||
filtered = append(filtered, resource)
|
||||
}
|
||||
}
|
||||
return filtered
|
||||
}
|
||||
|
||||
// GetBySuffix gets the first resource matching the given filename prefix, e.g
|
||||
// "logo" will match logo.png. It returns nil of none found.
|
||||
// In potential ambiguous situations, combine it with ByType.
|
||||
func (r Resources) GetByPrefix(prefix string) Resource {
|
||||
for _, resource := range r {
|
||||
_, name := filepath.Split(resource.RelPermalink())
|
||||
if strings.HasPrefix(name, prefix) {
|
||||
return resource
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type Spec struct {
|
||||
*helpers.PathSpec
|
||||
mimeTypes media.Types
|
||||
|
||||
// Holds default filter settings etc.
|
||||
imaging *Imaging
|
||||
|
||||
imageCache *imageCache
|
||||
|
||||
AbsGenImagePath string
|
||||
}
|
||||
|
||||
func NewSpec(s *helpers.PathSpec, mimeTypes media.Types) (*Spec, error) {
|
||||
|
||||
imaging, err := decodeImaging(s.Cfg.GetStringMap("imaging"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.GetLayoutDirPath()
|
||||
|
||||
genImagePath := s.AbsPathify(filepath.Join(s.Cfg.GetString("resourceDir"), "_gen", "images"))
|
||||
|
||||
return &Spec{AbsGenImagePath: genImagePath, PathSpec: s, imaging: &imaging, mimeTypes: mimeTypes, imageCache: newImageCache(
|
||||
s,
|
||||
// We're going to write a cache pruning routine later, so make it extremely
|
||||
// unlikely that the user shoots him or herself in the foot
|
||||
// and this is set to a value that represents data he/she
|
||||
// cares about. This should be set in stone once released.
|
||||
genImagePath,
|
||||
s.AbsPathify(s.Cfg.GetString("publishDir")))}, nil
|
||||
}
|
||||
|
||||
func (r *Spec) NewResourceFromFile(
|
||||
linker func(base string) string,
|
||||
absPublishDir string,
|
||||
file source.File, relTargetFilename string) (Resource, error) {
|
||||
|
||||
return r.newResource(linker, absPublishDir, file.Filename(), file.FileInfo(), relTargetFilename)
|
||||
}
|
||||
|
||||
func (r *Spec) NewResourceFromFilename(
|
||||
linker func(base string) string,
|
||||
absPublishDir,
|
||||
absSourceFilename, relTargetFilename string) (Resource, error) {
|
||||
|
||||
fi, err := r.Fs.Source.Stat(absSourceFilename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return r.newResource(linker, absPublishDir, absSourceFilename, fi, relTargetFilename)
|
||||
}
|
||||
|
||||
func (r *Spec) newResource(
|
||||
linker func(base string) string,
|
||||
absPublishDir,
|
||||
absSourceFilename string, fi os.FileInfo, relTargetFilename string) (Resource, error) {
|
||||
|
||||
var mimeType string
|
||||
ext := filepath.Ext(relTargetFilename)
|
||||
m, found := r.mimeTypes.GetBySuffix(strings.TrimPrefix(ext, "."))
|
||||
if found {
|
||||
mimeType = m.SubType
|
||||
} else {
|
||||
mimeType = mime.TypeByExtension(ext)
|
||||
if mimeType == "" {
|
||||
mimeType = DefaultResourceType
|
||||
} else {
|
||||
mimeType = mimeType[:strings.Index(mimeType, "/")]
|
||||
}
|
||||
}
|
||||
|
||||
gr := r.newGenericResource(linker, fi, absPublishDir, absSourceFilename, filepath.ToSlash(relTargetFilename), mimeType)
|
||||
|
||||
if mimeType == "image" {
|
||||
return &Image{
|
||||
imaging: r.imaging,
|
||||
genericResource: gr}, nil
|
||||
}
|
||||
return gr, nil
|
||||
}
|
||||
|
||||
func (r *Spec) IsInCache(key string) bool {
|
||||
// This is used for cache pruning. We currently only have images, but we could
|
||||
// imagine expanding on this.
|
||||
return r.imageCache.isInCache(key)
|
||||
}
|
||||
|
||||
func (r *Spec) DeleteCacheByPrefix(prefix string) {
|
||||
r.imageCache.deleteByPrefix(prefix)
|
||||
}
|
||||
|
||||
func (r *Spec) CacheStats() string {
|
||||
r.imageCache.mu.RLock()
|
||||
defer r.imageCache.mu.RUnlock()
|
||||
|
||||
s := fmt.Sprintf("Cache entries: %d", len(r.imageCache.store))
|
||||
|
||||
count := 0
|
||||
for k, _ := range r.imageCache.store {
|
||||
if count > 5 {
|
||||
break
|
||||
}
|
||||
s += "\n" + k
|
||||
count++
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
// genericResource represents a generic linkable resource.
|
||||
type genericResource struct {
|
||||
// The relative path to this resource.
|
||||
rel string
|
||||
|
||||
// Base is set when the output format's path has a offset, e.g. for AMP.
|
||||
base string
|
||||
|
||||
// Absolute filename to the source, including any content folder path.
|
||||
absSourceFilename string
|
||||
absPublishDir string
|
||||
resourceType string
|
||||
osFileInfo os.FileInfo
|
||||
|
||||
spec *Spec
|
||||
link func(rel string) string
|
||||
}
|
||||
|
||||
func (l *genericResource) Permalink() string {
|
||||
return l.spec.PermalinkForBaseURL(l.RelPermalink(), l.spec.BaseURL.String())
|
||||
}
|
||||
|
||||
func (l *genericResource) RelPermalink() string {
|
||||
return l.relPermalinkForRel(l.rel)
|
||||
}
|
||||
|
||||
// Implement the Cloner interface.
|
||||
func (l genericResource) WithNewBase(base string) Resource {
|
||||
l.base = base
|
||||
return &l
|
||||
}
|
||||
|
||||
func (l *genericResource) relPermalinkForRel(rel string) string {
|
||||
if l.link != nil {
|
||||
rel = l.link(rel)
|
||||
}
|
||||
|
||||
if l.base != "" {
|
||||
rel = path.Join(l.base, rel)
|
||||
if rel[0] != '/' {
|
||||
rel = "/" + rel
|
||||
}
|
||||
}
|
||||
|
||||
return l.spec.PathSpec.URLizeFilename(rel)
|
||||
}
|
||||
|
||||
func (l *genericResource) ResourceType() string {
|
||||
return l.resourceType
|
||||
}
|
||||
|
||||
func (l *genericResource) AbsSourceFilename() string {
|
||||
return l.absSourceFilename
|
||||
}
|
||||
|
||||
func (l *genericResource) Publish() error {
|
||||
f, err := l.spec.Fs.Source.Open(l.AbsSourceFilename())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
target := filepath.Join(l.absPublishDir, l.RelPermalink())
|
||||
|
||||
return helpers.WriteToDisk(target, f, l.spec.Fs.Destination)
|
||||
}
|
||||
|
||||
func (r *Spec) newGenericResource(
|
||||
linker func(base string) string,
|
||||
osFileInfo os.FileInfo,
|
||||
absPublishDir,
|
||||
absSourceFilename,
|
||||
baseFilename,
|
||||
resourceType string) *genericResource {
|
||||
|
||||
return &genericResource{
|
||||
link: linker,
|
||||
osFileInfo: osFileInfo,
|
||||
absPublishDir: absPublishDir,
|
||||
absSourceFilename: absSourceFilename,
|
||||
rel: baseFilename,
|
||||
resourceType: resourceType,
|
||||
spec: r,
|
||||
}
|
||||
}
|
108
resource/resource_test.go
Normal file
108
resource/resource_test.go
Normal file
|
@ -0,0 +1,108 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package resource
|
||||
|
||||
import (
|
||||
"path"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestGenericResource(t *testing.T) {
|
||||
assert := require.New(t)
|
||||
spec := newTestResourceSpec(assert)
|
||||
|
||||
r := spec.newGenericResource(nil, nil, "/public", "/a/foo.css", "foo.css", "css")
|
||||
|
||||
assert.Equal("https://example.com/foo.css", r.Permalink())
|
||||
assert.Equal("foo.css", r.RelPermalink())
|
||||
assert.Equal("css", r.ResourceType())
|
||||
|
||||
}
|
||||
|
||||
func TestGenericResourceWithLinkFacory(t *testing.T) {
|
||||
assert := require.New(t)
|
||||
spec := newTestResourceSpec(assert)
|
||||
|
||||
factory := func(s string) string {
|
||||
return path.Join("/foo", s)
|
||||
}
|
||||
r := spec.newGenericResource(factory, nil, "/public", "/a/foo.css", "foo.css", "css")
|
||||
|
||||
assert.Equal("https://example.com/foo/foo.css", r.Permalink())
|
||||
assert.Equal("/foo/foo.css", r.RelPermalink())
|
||||
assert.Equal("css", r.ResourceType())
|
||||
}
|
||||
|
||||
func TestNewResourceFromFilename(t *testing.T) {
|
||||
assert := require.New(t)
|
||||
spec := newTestResourceSpec(assert)
|
||||
|
||||
writeSource(t, spec.Fs, "/project/a/b/logo.png", "image")
|
||||
writeSource(t, spec.Fs, "/root/a/b/data.json", "json")
|
||||
|
||||
r, err := spec.NewResourceFromFilename(nil, "/public",
|
||||
filepath.FromSlash("/project/a/b/logo.png"), filepath.FromSlash("a/b/logo.png"))
|
||||
|
||||
assert.NoError(err)
|
||||
assert.NotNil(r)
|
||||
assert.Equal("image", r.ResourceType())
|
||||
assert.Equal("a/b/logo.png", r.RelPermalink())
|
||||
assert.Equal("https://example.com/a/b/logo.png", r.Permalink())
|
||||
|
||||
r, err = spec.NewResourceFromFilename(nil, "/public", "/root/a/b/data.json", "a/b/data.json")
|
||||
|
||||
assert.NoError(err)
|
||||
assert.NotNil(r)
|
||||
assert.Equal("json", r.ResourceType())
|
||||
|
||||
cloned := r.(Cloner).WithNewBase("aceof")
|
||||
assert.Equal(r.ResourceType(), cloned.ResourceType())
|
||||
assert.Equal("/aceof/a/b/data.json", cloned.RelPermalink())
|
||||
}
|
||||
|
||||
func TestResourcesByType(t *testing.T) {
|
||||
assert := require.New(t)
|
||||
spec := newTestResourceSpec(assert)
|
||||
resources := Resources{
|
||||
spec.newGenericResource(nil, nil, "/public", "/a/foo1.css", "foo1.css", "css"),
|
||||
spec.newGenericResource(nil, nil, "/public", "/a/logo.png", "logo.css", "image"),
|
||||
spec.newGenericResource(nil, nil, "/public", "/a/foo2.css", "foo2.css", "css"),
|
||||
spec.newGenericResource(nil, nil, "/public", "/a/foo3.css", "foo3.css", "css")}
|
||||
|
||||
assert.Len(resources.ByType("css"), 3)
|
||||
assert.Len(resources.ByType("image"), 1)
|
||||
|
||||
}
|
||||
|
||||
func TestResourcesGetByPrefix(t *testing.T) {
|
||||
assert := require.New(t)
|
||||
spec := newTestResourceSpec(assert)
|
||||
resources := Resources{
|
||||
spec.newGenericResource(nil, nil, "/public", "/a/foo1.css", "foo1.css", "css"),
|
||||
spec.newGenericResource(nil, nil, "/public", "/a/logo1.png", "logo1.png", "image"),
|
||||
spec.newGenericResource(nil, nil, "/public", "/b/logo2.png", "logo2.png", "image"),
|
||||
spec.newGenericResource(nil, nil, "/public", "/b/foo2.css", "foo2.css", "css"),
|
||||
spec.newGenericResource(nil, nil, "/public", "/b/foo3.css", "foo3.css", "css")}
|
||||
|
||||
assert.Nil(resources.GetByPrefix("asdf"))
|
||||
assert.Equal("logo1.png", resources.GetByPrefix("logo").RelPermalink())
|
||||
assert.Equal("foo2.css", resources.GetByPrefix("foo2").RelPermalink())
|
||||
assert.Equal("foo1.css", resources.GetByPrefix("foo1").RelPermalink())
|
||||
assert.Equal("foo1.css", resources.GetByPrefix("foo1").RelPermalink())
|
||||
assert.Nil(resources.GetByPrefix("asdfasdf"))
|
||||
|
||||
}
|
BIN
resource/testdata/sunset.jpg
vendored
Normal file
BIN
resource/testdata/sunset.jpg
vendored
Normal file
Binary file not shown.
After Width: | Height: | Size: 88 KiB |
78
resource/testhelpers_test.go
Normal file
78
resource/testhelpers_test.go
Normal file
|
@ -0,0 +1,78 @@
|
|||
package resource
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"image"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/gohugoio/hugo/media"
|
||||
"github.com/spf13/afero"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func newTestResourceSpec(assert *require.Assertions) *Spec {
|
||||
cfg := viper.New()
|
||||
cfg.Set("baseURL", "https://example.com/")
|
||||
cfg.Set("resourceDir", "/res")
|
||||
fs := hugofs.NewMem(cfg)
|
||||
|
||||
s, err := helpers.NewPathSpec(fs, cfg)
|
||||
assert.NoError(err)
|
||||
|
||||
spec, err := NewSpec(s, media.DefaultTypes)
|
||||
assert.NoError(err)
|
||||
return spec
|
||||
}
|
||||
|
||||
func fetchSunset(assert *require.Assertions) *Image {
|
||||
src, err := os.Open("testdata/sunset.jpg")
|
||||
assert.NoError(err)
|
||||
|
||||
spec := newTestResourceSpec(assert)
|
||||
|
||||
out, err := spec.Fs.Source.Create("/b/sunset.jpg")
|
||||
assert.NoError(err)
|
||||
_, err = io.Copy(out, src)
|
||||
out.Close()
|
||||
src.Close()
|
||||
assert.NoError(err)
|
||||
|
||||
factory := func(s string) string {
|
||||
return path.Join("/a", s)
|
||||
}
|
||||
|
||||
r, err := spec.NewResourceFromFilename(factory, "/public", "/b/sunset.jpg", "sunset.jpg")
|
||||
assert.NoError(err)
|
||||
assert.IsType(&Image{}, r)
|
||||
return r.(*Image)
|
||||
|
||||
}
|
||||
|
||||
func assertFileCache(assert *require.Assertions, fs *hugofs.Fs, filename string, width, height int) {
|
||||
f, err := fs.Source.Open(filepath.Join("/res/_gen/images", filename))
|
||||
assert.NoError(err)
|
||||
defer f.Close()
|
||||
|
||||
config, _, err := image.DecodeConfig(f)
|
||||
assert.NoError(err)
|
||||
|
||||
assert.Equal(width, config.Width)
|
||||
assert.Equal(height, config.Height)
|
||||
}
|
||||
|
||||
func writeSource(t testing.TB, fs *hugofs.Fs, filename, content string) {
|
||||
writeToFs(t, fs.Source, filename, content)
|
||||
}
|
||||
|
||||
func writeToFs(t testing.TB, fs afero.Fs, filename, content string) {
|
||||
if err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte(content), 0755); err != nil {
|
||||
t.Fatalf("Failed to write file: %s", err)
|
||||
}
|
||||
}
|
|
@ -14,6 +14,7 @@
|
|||
package source
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
|
@ -41,21 +42,21 @@ func TestIgnoreDotFilesAndDirectories(t *testing.T) {
|
|||
{"foobar/bar~foo.md", false, nil},
|
||||
{"foobar/foo.md", true, []string{"\\.md$", "\\.boo$"}},
|
||||
{"foobar/foo.html", false, []string{"\\.md$", "\\.boo$"}},
|
||||
{"foobar/foo.md", true, []string{"^foo"}},
|
||||
{"foobar/foo.md", false, []string{"*", "\\.md$", "\\.boo$"}},
|
||||
{"foobar/foo.md", true, []string{"foo.md$"}},
|
||||
{"foobar/foo.md", true, []string{"*", "\\.md$", "\\.boo$"}},
|
||||
{"foobar/.#content.md", true, []string{"/\\.#"}},
|
||||
{".#foobar.md", true, []string{"^\\.#"}},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
for i, test := range tests {
|
||||
|
||||
v := viper.New()
|
||||
v.Set("ignoreFiles", test.ignoreFilesRegexpes)
|
||||
|
||||
s := NewSourceSpec(v, hugofs.NewMem(v))
|
||||
|
||||
if ignored := s.isNonProcessablePath(test.path); test.ignore != ignored {
|
||||
t.Errorf("File not ignored. Expected: %t, got: %t", test.ignore, ignored)
|
||||
if ignored := s.IgnoreFile(filepath.FromSlash(test.path)); test.ignore != ignored {
|
||||
t.Errorf("[%d] File not ignored", i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,7 +38,7 @@ type Dirs struct {
|
|||
staticDirs []string
|
||||
AbsStaticDirs []string
|
||||
|
||||
publishDir string
|
||||
Language *helpers.Language
|
||||
}
|
||||
|
||||
// NewDirs creates a new dirs with the given configuration and filesystem.
|
||||
|
@ -48,7 +48,12 @@ func NewDirs(fs *hugofs.Fs, cfg config.Provider, logger *jww.Notepad) (*Dirs, er
|
|||
return nil, err
|
||||
}
|
||||
|
||||
d := &Dirs{pathSpec: ps, logger: logger}
|
||||
var l *helpers.Language
|
||||
if language, ok := cfg.(*helpers.Language); ok {
|
||||
l = language
|
||||
}
|
||||
|
||||
d := &Dirs{Language: l, pathSpec: ps, logger: logger}
|
||||
|
||||
return d, d.init(cfg)
|
||||
|
||||
|
@ -96,8 +101,6 @@ func (d *Dirs) init(cfg config.Provider) error {
|
|||
d.AbsStaticDirs[i] = d.pathSpec.AbsPathify(di) + helpers.FilePathSeparator
|
||||
}
|
||||
|
||||
d.publishDir = d.pathSpec.AbsPathify(cfg.GetString("publishDir")) + helpers.FilePathSeparator
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
172
source/file.go
172
source/file.go
|
@ -1,172 +0,0 @@
|
|||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package source
|
||||
|
||||
import (
|
||||
"io"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
)
|
||||
|
||||
// SourceSpec abstracts language-specific file creation.
|
||||
type SourceSpec struct {
|
||||
Cfg config.Provider
|
||||
Fs *hugofs.Fs
|
||||
|
||||
languages map[string]interface{}
|
||||
defaultContentLanguage string
|
||||
}
|
||||
|
||||
// NewSourceSpec initializes SourceSpec using languages from a given configuration.
|
||||
func NewSourceSpec(cfg config.Provider, fs *hugofs.Fs) SourceSpec {
|
||||
defaultLang := cfg.GetString("defaultContentLanguage")
|
||||
languages := cfg.GetStringMap("languages")
|
||||
return SourceSpec{Cfg: cfg, Fs: fs, languages: languages, defaultContentLanguage: defaultLang}
|
||||
}
|
||||
|
||||
// File represents a source content file.
|
||||
// All paths are relative from the source directory base
|
||||
type File struct {
|
||||
relpath string // Original relative path, e.g. section/foo.txt
|
||||
logicalName string // foo.txt
|
||||
baseName string // `post` for `post.md`, also `post.en` for `post.en.md`
|
||||
Contents io.Reader
|
||||
section string // The first directory
|
||||
dir string // The relative directory Path (minus file name)
|
||||
ext string // Just the ext (eg txt)
|
||||
uniqueID string // MD5 of the file's path
|
||||
|
||||
translationBaseName string // `post` for `post.es.md` (if `Multilingual` is enabled.)
|
||||
lang string // The language code if `Multilingual` is enabled
|
||||
}
|
||||
|
||||
// UniqueID is the MD5 hash of the file's path and is for most practical applications,
|
||||
// Hugo content files being one of them, considered to be unique.
|
||||
func (f *File) UniqueID() string {
|
||||
return f.uniqueID
|
||||
}
|
||||
|
||||
// String returns the file's content as a string.
|
||||
func (f *File) String() string {
|
||||
return helpers.ReaderToString(f.Contents)
|
||||
}
|
||||
|
||||
// Bytes returns the file's content as a byte slice.
|
||||
func (f *File) Bytes() []byte {
|
||||
return helpers.ReaderToBytes(f.Contents)
|
||||
}
|
||||
|
||||
// BaseFileName is a filename without extension.
|
||||
func (f *File) BaseFileName() string {
|
||||
return f.baseName
|
||||
}
|
||||
|
||||
// TranslationBaseName is a filename with no extension,
|
||||
// not even the optional language extension part.
|
||||
func (f *File) TranslationBaseName() string {
|
||||
return f.translationBaseName
|
||||
}
|
||||
|
||||
// Lang for this page, if `Multilingual` is enabled on your site.
|
||||
func (f *File) Lang() string {
|
||||
return f.lang
|
||||
}
|
||||
|
||||
// Section is first directory below the content root.
|
||||
func (f *File) Section() string {
|
||||
return f.section
|
||||
}
|
||||
|
||||
// LogicalName is filename and extension of the file.
|
||||
func (f *File) LogicalName() string {
|
||||
return f.logicalName
|
||||
}
|
||||
|
||||
// SetDir sets the relative directory where this file lives.
|
||||
// TODO(bep) Get rid of this.
|
||||
func (f *File) SetDir(dir string) {
|
||||
f.dir = dir
|
||||
}
|
||||
|
||||
// Dir gets the name of the directory that contains this file.
|
||||
// The directory is relative to the content root.
|
||||
func (f *File) Dir() string {
|
||||
return f.dir
|
||||
}
|
||||
|
||||
// Extension gets the file extension, i.e "myblogpost.md" will return "md".
|
||||
func (f *File) Extension() string {
|
||||
return f.ext
|
||||
}
|
||||
|
||||
// Ext is an alias for Extension.
|
||||
func (f *File) Ext() string {
|
||||
return f.Extension()
|
||||
}
|
||||
|
||||
// Path gets the relative path including file name and extension.
|
||||
// The directory is relative to the content root.
|
||||
func (f *File) Path() string {
|
||||
return f.relpath
|
||||
}
|
||||
|
||||
// NewFileWithContents creates a new File pointer with the given relative path and
|
||||
// content. The language defaults to "en".
|
||||
func (sp SourceSpec) NewFileWithContents(relpath string, content io.Reader) *File {
|
||||
file := sp.NewFile(relpath)
|
||||
file.Contents = content
|
||||
file.lang = "en"
|
||||
return file
|
||||
}
|
||||
|
||||
// NewFile creates a new File pointer with the given relative path.
|
||||
func (sp SourceSpec) NewFile(relpath string) *File {
|
||||
f := &File{
|
||||
relpath: relpath,
|
||||
}
|
||||
|
||||
f.dir, f.logicalName = filepath.Split(f.relpath)
|
||||
f.ext = strings.TrimPrefix(filepath.Ext(f.LogicalName()), ".")
|
||||
f.baseName = helpers.Filename(f.LogicalName())
|
||||
|
||||
lang := strings.TrimPrefix(filepath.Ext(f.baseName), ".")
|
||||
if _, ok := sp.languages[lang]; lang == "" || !ok {
|
||||
f.lang = sp.defaultContentLanguage
|
||||
f.translationBaseName = f.baseName
|
||||
} else {
|
||||
f.lang = lang
|
||||
f.translationBaseName = helpers.Filename(f.baseName)
|
||||
}
|
||||
|
||||
f.section = helpers.GuessSection(f.Dir())
|
||||
f.uniqueID = helpers.Md5String(filepath.ToSlash(f.relpath))
|
||||
|
||||
return f
|
||||
}
|
||||
|
||||
// NewFileFromAbs creates a new File pointer with the given full file path path and
|
||||
// content.
|
||||
func (sp SourceSpec) NewFileFromAbs(base, fullpath string, content io.Reader) (f *File, err error) {
|
||||
var name string
|
||||
if name, err = helpers.GetRelativePath(fullpath, base); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return sp.NewFileWithContents(name, content), nil
|
||||
}
|
213
source/fileInfo.go
Normal file
213
source/fileInfo.go
Normal file
|
@ -0,0 +1,213 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package source
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
)
|
||||
|
||||
// fileInfo implements the File interface.
|
||||
var (
|
||||
_ File = (*FileInfo)(nil)
|
||||
_ ReadableFile = (*FileInfo)(nil)
|
||||
)
|
||||
|
||||
type File interface {
|
||||
|
||||
// Filename gets the full path and filename to the file.
|
||||
Filename() string
|
||||
|
||||
// Path gets the relative path including file name and extension.
|
||||
// The directory is relative to the content root.
|
||||
Path() string
|
||||
|
||||
// Dir gets the name of the directory that contains this file.
|
||||
// The directory is relative to the content root.
|
||||
Dir() string
|
||||
|
||||
// Extension gets the file extension, i.e "myblogpost.md" will return "md".
|
||||
Extension() string
|
||||
// Ext is an alias for Extension.
|
||||
Ext() string // Hmm... Deprecate Extension
|
||||
|
||||
// Lang for this page, if `Multilingual` is enabled on your site.
|
||||
Lang() string
|
||||
|
||||
// LogicalName is filename and extension of the file.
|
||||
LogicalName() string
|
||||
|
||||
// Section is first directory below the content root.
|
||||
Section() string
|
||||
|
||||
// BaseFileName is a filename without extension.
|
||||
BaseFileName() string
|
||||
|
||||
// TranslationBaseName is a filename with no extension,
|
||||
// not even the optional language extension part.
|
||||
TranslationBaseName() string
|
||||
|
||||
// UniqueID is the MD5 hash of the file's path and is for most practical applications,
|
||||
// Hugo content files being one of them, considered to be unique.
|
||||
UniqueID() string
|
||||
|
||||
FileInfo() os.FileInfo
|
||||
|
||||
String() string
|
||||
|
||||
// Deprecated
|
||||
Bytes() []byte
|
||||
}
|
||||
|
||||
// A ReadableFile is a File that is readable.
|
||||
type ReadableFile interface {
|
||||
File
|
||||
Open() (io.ReadCloser, error)
|
||||
}
|
||||
|
||||
type FileInfo struct {
|
||||
|
||||
// Absolute filename to the file on disk.
|
||||
filename string
|
||||
fi os.FileInfo
|
||||
|
||||
// Derived from filename
|
||||
ext string // Extension without any "."
|
||||
lang string
|
||||
|
||||
name string
|
||||
|
||||
dir string
|
||||
relDir string
|
||||
relPath string
|
||||
baseName string
|
||||
translationBaseName string
|
||||
section string
|
||||
|
||||
uniqueID string
|
||||
|
||||
sp *SourceSpec
|
||||
|
||||
lazyInit sync.Once
|
||||
}
|
||||
|
||||
func (fi *FileInfo) Filename() string { return fi.filename }
|
||||
func (fi *FileInfo) Path() string { return fi.relPath }
|
||||
func (fi *FileInfo) Dir() string { return fi.relDir }
|
||||
func (fi *FileInfo) Extension() string { return fi.Ext() }
|
||||
func (fi *FileInfo) Ext() string { return fi.ext }
|
||||
func (fi *FileInfo) Lang() string { return fi.lang }
|
||||
func (fi *FileInfo) LogicalName() string { return fi.name }
|
||||
func (fi *FileInfo) BaseFileName() string { return fi.baseName }
|
||||
func (fi *FileInfo) TranslationBaseName() string { return fi.translationBaseName }
|
||||
|
||||
func (fi *FileInfo) Section() string {
|
||||
fi.init()
|
||||
return fi.section
|
||||
}
|
||||
|
||||
func (fi *FileInfo) UniqueID() string {
|
||||
fi.init()
|
||||
return fi.uniqueID
|
||||
}
|
||||
func (fi *FileInfo) FileInfo() os.FileInfo {
|
||||
return fi.fi
|
||||
}
|
||||
|
||||
func (fi *FileInfo) Bytes() []byte {
|
||||
// Remove in Hugo 0.34
|
||||
helpers.Deprecated("File", "Bytes", "", false)
|
||||
return []byte("")
|
||||
}
|
||||
|
||||
func (fi *FileInfo) String() string { return fi.BaseFileName() }
|
||||
|
||||
// We create a lot of these FileInfo objects, but there are parts of it used only
|
||||
// in some cases that is slightly expensive to construct.
|
||||
func (fi *FileInfo) init() {
|
||||
fi.lazyInit.Do(func() {
|
||||
parts := strings.Split(fi.relDir, helpers.FilePathSeparator)
|
||||
var section string
|
||||
if len(parts) == 1 {
|
||||
section = parts[0]
|
||||
} else if len(parts) > 1 {
|
||||
if parts[0] == "" {
|
||||
section = parts[1]
|
||||
} else {
|
||||
section = parts[0]
|
||||
}
|
||||
}
|
||||
|
||||
fi.section = section
|
||||
|
||||
fi.uniqueID = helpers.MD5String(filepath.ToSlash(fi.relPath))
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
func (sp *SourceSpec) NewFileInfo(baseDir, filename string, fi os.FileInfo) *FileInfo {
|
||||
dir, name := filepath.Split(filename)
|
||||
|
||||
dir = strings.TrimSuffix(dir, helpers.FilePathSeparator)
|
||||
baseDir = strings.TrimSuffix(baseDir, helpers.FilePathSeparator)
|
||||
|
||||
relDir := ""
|
||||
if dir != baseDir {
|
||||
relDir = strings.TrimPrefix(dir, baseDir)
|
||||
}
|
||||
|
||||
relDir = strings.TrimPrefix(relDir, helpers.FilePathSeparator)
|
||||
|
||||
relPath := filepath.Join(relDir, name)
|
||||
|
||||
ext := strings.ToLower(strings.TrimPrefix(filepath.Ext(name), "."))
|
||||
baseName := helpers.Filename(name)
|
||||
|
||||
lang := strings.TrimPrefix(filepath.Ext(baseName), ".")
|
||||
var translationBaseName string
|
||||
|
||||
if _, ok := sp.Languages[lang]; lang == "" || !ok {
|
||||
lang = sp.DefaultContentLanguage
|
||||
translationBaseName = baseName
|
||||
} else {
|
||||
translationBaseName = helpers.Filename(baseName)
|
||||
}
|
||||
|
||||
f := &FileInfo{
|
||||
sp: sp,
|
||||
filename: filename,
|
||||
fi: fi,
|
||||
lang: lang,
|
||||
ext: ext,
|
||||
dir: dir,
|
||||
relDir: relDir,
|
||||
relPath: relPath,
|
||||
name: name,
|
||||
baseName: baseName,
|
||||
translationBaseName: translationBaseName,
|
||||
}
|
||||
|
||||
return f
|
||||
|
||||
}
|
||||
|
||||
// Open implements ReadableFile.
|
||||
func (fi *FileInfo) Open() (io.ReadCloser, error) {
|
||||
return fi.sp.Fs.Source.Open(fi.Filename())
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -13,13 +13,10 @@
|
|||
|
||||
package source
|
||||
|
||||
// ByteSource represents a source's name and content.
|
||||
// It's currently only used for testing purposes.
|
||||
type ByteSource struct {
|
||||
Name string
|
||||
Content []byte
|
||||
}
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestFileInfo(t *testing.T) {
|
||||
|
||||
func (b *ByteSource) String() string {
|
||||
return b.Name + " " + string(b.Content)
|
||||
}
|
|
@ -1,62 +0,0 @@
|
|||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package source
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/spf13/viper"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestFileUniqueID(t *testing.T) {
|
||||
ss := newTestSourceSpec()
|
||||
|
||||
f1 := File{uniqueID: "123"}
|
||||
f2 := ss.NewFile("a")
|
||||
|
||||
assert.Equal(t, "123", f1.UniqueID())
|
||||
assert.Equal(t, "0cc175b9c0f1b6a831c399e269772661", f2.UniqueID())
|
||||
|
||||
f3 := ss.NewFile(filepath.FromSlash("test1/index.md"))
|
||||
f4 := ss.NewFile(filepath.FromSlash("test2/index.md"))
|
||||
|
||||
assert.NotEqual(t, f3.UniqueID(), f4.UniqueID())
|
||||
|
||||
f5l := ss.NewFile("test3/index.md")
|
||||
f5w := ss.NewFile(filepath.FromSlash("test3/index.md"))
|
||||
|
||||
assert.Equal(t, f5l.UniqueID(), f5w.UniqueID())
|
||||
}
|
||||
|
||||
func TestFileString(t *testing.T) {
|
||||
ss := newTestSourceSpec()
|
||||
assert.Equal(t, "abc", ss.NewFileWithContents("a", strings.NewReader("abc")).String())
|
||||
assert.Equal(t, "", ss.NewFile("a").String())
|
||||
}
|
||||
|
||||
func TestFileBytes(t *testing.T) {
|
||||
ss := newTestSourceSpec()
|
||||
assert.Equal(t, []byte("abc"), ss.NewFileWithContents("a", strings.NewReader("abc")).Bytes())
|
||||
assert.Equal(t, []byte(""), ss.NewFile("a").Bytes())
|
||||
}
|
||||
|
||||
func newTestSourceSpec() SourceSpec {
|
||||
v := viper.New()
|
||||
return SourceSpec{Fs: hugofs.NewMem(v), Cfg: v}
|
||||
}
|
|
@ -14,73 +14,52 @@
|
|||
package source
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/spf13/cast"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
"golang.org/x/text/unicode/norm"
|
||||
)
|
||||
|
||||
type Input interface {
|
||||
Files() []*File
|
||||
}
|
||||
|
||||
type Filesystem struct {
|
||||
files []*File
|
||||
Base string
|
||||
AvoidPaths []string
|
||||
files []ReadableFile
|
||||
filesInit sync.Once
|
||||
|
||||
Base string
|
||||
|
||||
SourceSpec
|
||||
}
|
||||
|
||||
func (sp SourceSpec) NewFilesystem(base string, avoidPaths ...string) *Filesystem {
|
||||
return &Filesystem{SourceSpec: sp, Base: base, AvoidPaths: avoidPaths}
|
||||
type Input interface {
|
||||
Files() []ReadableFile
|
||||
}
|
||||
|
||||
func (f *Filesystem) FilesByExts(exts ...string) []*File {
|
||||
var newFiles []*File
|
||||
|
||||
if len(exts) == 0 {
|
||||
return f.Files()
|
||||
}
|
||||
|
||||
for _, x := range f.Files() {
|
||||
for _, e := range exts {
|
||||
if x.Ext() == strings.TrimPrefix(e, ".") {
|
||||
newFiles = append(newFiles, x)
|
||||
}
|
||||
}
|
||||
}
|
||||
return newFiles
|
||||
func (sp SourceSpec) NewFilesystem(base string) *Filesystem {
|
||||
return &Filesystem{SourceSpec: sp, Base: base}
|
||||
}
|
||||
|
||||
func (f *Filesystem) Files() []*File {
|
||||
if len(f.files) < 1 {
|
||||
func (f *Filesystem) Files() []ReadableFile {
|
||||
f.filesInit.Do(func() {
|
||||
f.captureFiles()
|
||||
}
|
||||
})
|
||||
return f.files
|
||||
}
|
||||
|
||||
// add populates a file in the Filesystem.files
|
||||
func (f *Filesystem) add(name string, reader io.Reader) (err error) {
|
||||
var file *File
|
||||
func (f *Filesystem) add(name string, fi os.FileInfo) (err error) {
|
||||
var file ReadableFile
|
||||
|
||||
if runtime.GOOS == "darwin" {
|
||||
// When a file system is HFS+, its filepath is in NFD form.
|
||||
name = norm.NFC.String(name)
|
||||
}
|
||||
|
||||
file, err = f.SourceSpec.NewFileFromAbs(f.Base, name, reader)
|
||||
file = f.SourceSpec.NewFileInfo(f.Base, name, fi)
|
||||
f.files = append(f.files, file)
|
||||
|
||||
if err == nil {
|
||||
f.files = append(f.files, file)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -90,16 +69,12 @@ func (f *Filesystem) captureFiles() {
|
|||
return nil
|
||||
}
|
||||
|
||||
b, err := f.ShouldRead(filePath, fi)
|
||||
b, err := f.shouldRead(filePath, fi)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if b {
|
||||
rd, err := NewLazyFileReader(f.Fs.Source, filePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
f.add(filePath, rd)
|
||||
f.add(filePath, fi)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
@ -118,11 +93,11 @@ func (f *Filesystem) captureFiles() {
|
|||
|
||||
}
|
||||
|
||||
func (f *Filesystem) ShouldRead(filePath string, fi os.FileInfo) (bool, error) {
|
||||
func (f *Filesystem) shouldRead(filename string, fi os.FileInfo) (bool, error) {
|
||||
if fi.Mode()&os.ModeSymlink == os.ModeSymlink {
|
||||
link, err := filepath.EvalSymlinks(filePath)
|
||||
link, err := filepath.EvalSymlinks(filename)
|
||||
if err != nil {
|
||||
jww.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", filePath, err)
|
||||
jww.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", filename, err)
|
||||
return false, nil
|
||||
}
|
||||
linkfi, err := f.Fs.Source.Stat(link)
|
||||
|
@ -130,52 +105,25 @@ func (f *Filesystem) ShouldRead(filePath string, fi os.FileInfo) (bool, error) {
|
|||
jww.ERROR.Printf("Cannot stat '%s', error was: %s", link, err)
|
||||
return false, nil
|
||||
}
|
||||
|
||||
if !linkfi.Mode().IsRegular() {
|
||||
jww.ERROR.Printf("Symbolic links for directories not supported, skipping '%s'", filePath)
|
||||
jww.ERROR.Printf("Symbolic links for directories not supported, skipping '%s'", filename)
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
ignore := f.SourceSpec.IgnoreFile(filename)
|
||||
|
||||
if fi.IsDir() {
|
||||
if f.avoid(filePath) || f.isNonProcessablePath(filePath) {
|
||||
if ignore {
|
||||
return false, filepath.SkipDir
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
if f.isNonProcessablePath(filePath) {
|
||||
if ignore {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (f *Filesystem) avoid(filePath string) bool {
|
||||
for _, avoid := range f.AvoidPaths {
|
||||
if avoid == filePath {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (sp SourceSpec) isNonProcessablePath(filePath string) bool {
|
||||
base := filepath.Base(filePath)
|
||||
if strings.HasPrefix(base, ".") ||
|
||||
strings.HasPrefix(base, "#") ||
|
||||
strings.HasSuffix(base, "~") {
|
||||
return true
|
||||
}
|
||||
ignoreFiles := cast.ToStringSlice(sp.Cfg.Get("ignoreFiles"))
|
||||
if len(ignoreFiles) > 0 {
|
||||
for _, ignorePattern := range ignoreFiles {
|
||||
match, err := regexp.MatchString(ignorePattern, filePath)
|
||||
if err != nil {
|
||||
helpers.DistinctErrorLog.Printf("Invalid regexp '%s' in ignoreFiles: %s", ignorePattern, err)
|
||||
return false
|
||||
} else if match {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
|
|
@ -14,11 +14,13 @@
|
|||
package source
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"path/filepath"
|
||||
"os"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
func TestEmptySourceFilesystem(t *testing.T) {
|
||||
|
@ -37,54 +39,6 @@ type TestPath struct {
|
|||
dir string
|
||||
}
|
||||
|
||||
func TestAddFile(t *testing.T) {
|
||||
ss := newTestSourceSpec()
|
||||
tests := platformPaths
|
||||
for _, test := range tests {
|
||||
base := platformBase
|
||||
srcDefault := ss.NewFilesystem("")
|
||||
srcWithBase := ss.NewFilesystem(base)
|
||||
|
||||
for _, src := range []*Filesystem{srcDefault, srcWithBase} {
|
||||
|
||||
p := test.filename
|
||||
if !filepath.IsAbs(test.filename) {
|
||||
p = filepath.Join(src.Base, test.filename)
|
||||
}
|
||||
|
||||
if err := src.add(p, bytes.NewReader([]byte(test.content))); err != nil {
|
||||
if err.Error() == "source: missing base directory" {
|
||||
continue
|
||||
}
|
||||
t.Fatalf("%s add returned an error: %s", p, err)
|
||||
}
|
||||
|
||||
if len(src.Files()) != 1 {
|
||||
t.Fatalf("%s Files() should return 1 file", p)
|
||||
}
|
||||
|
||||
f := src.Files()[0]
|
||||
if f.LogicalName() != test.logical {
|
||||
t.Errorf("Filename (Base: %q) expected: %q, got: %q", src.Base, test.logical, f.LogicalName())
|
||||
}
|
||||
|
||||
b := new(bytes.Buffer)
|
||||
b.ReadFrom(f.Contents)
|
||||
if b.String() != test.content {
|
||||
t.Errorf("File (Base: %q) contents should be %q, got: %q", src.Base, test.content, b.String())
|
||||
}
|
||||
|
||||
if f.Section() != test.section {
|
||||
t.Errorf("File section (Base: %q) expected: %q, got: %q", src.Base, test.section, f.Section())
|
||||
}
|
||||
|
||||
if f.Dir() != test.dir {
|
||||
t.Errorf("Dir path (Base: %q) expected: %q, got: %q", src.Base, test.dir, f.Dir())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestUnicodeNorm(t *testing.T) {
|
||||
if runtime.GOOS != "darwin" {
|
||||
// Normalization code is only for Mac OS, since it is not necessary for other OSes.
|
||||
|
@ -100,10 +54,11 @@ func TestUnicodeNorm(t *testing.T) {
|
|||
}
|
||||
|
||||
ss := newTestSourceSpec()
|
||||
var fi os.FileInfo
|
||||
|
||||
for _, path := range paths {
|
||||
src := ss.NewFilesystem("")
|
||||
_ = src.add(path.NFD, strings.NewReader(""))
|
||||
src := ss.NewFilesystem("base")
|
||||
_ = src.add(path.NFD, fi)
|
||||
f := src.Files()[0]
|
||||
if f.BaseFileName() != path.NFC {
|
||||
t.Fatalf("file name in NFD form should be normalized (%s)", path.NFC)
|
||||
|
@ -111,3 +66,8 @@ func TestUnicodeNorm(t *testing.T) {
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
func newTestSourceSpec() SourceSpec {
|
||||
v := viper.New()
|
||||
return SourceSpec{Fs: hugofs.NewMem(v), Cfg: v}
|
||||
}
|
||||
|
|
|
@ -1,170 +0,0 @@
|
|||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
// Portions Copyright 2009 The Go Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package source
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/spf13/afero"
|
||||
)
|
||||
|
||||
// LazyFileReader is an io.Reader implementation to postpone reading the file
|
||||
// contents until it is really needed. It keeps filename and file contents once
|
||||
// it is read.
|
||||
type LazyFileReader struct {
|
||||
fs afero.Fs
|
||||
filename string
|
||||
contents *bytes.Reader
|
||||
pos int64
|
||||
}
|
||||
|
||||
// NewLazyFileReader creates and initializes a new LazyFileReader of filename.
|
||||
// It checks whether the file can be opened. If it fails, it returns nil and an
|
||||
// error.
|
||||
func NewLazyFileReader(fs afero.Fs, filename string) (*LazyFileReader, error) {
|
||||
f, err := fs.Open(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
return &LazyFileReader{fs: fs, filename: filename, contents: nil, pos: 0}, nil
|
||||
}
|
||||
|
||||
// Filename returns a file name which LazyFileReader keeps
|
||||
func (l *LazyFileReader) Filename() string {
|
||||
return l.filename
|
||||
}
|
||||
|
||||
// Read reads up to len(p) bytes from the LazyFileReader's file and copies them
|
||||
// into p. It returns the number of bytes read and any error encountered. If
|
||||
// the file is once read, it returns its contents from cache, doesn't re-read
|
||||
// the file.
|
||||
func (l *LazyFileReader) Read(p []byte) (n int, err error) {
|
||||
if l.contents == nil {
|
||||
b, err := afero.ReadFile(l.fs, l.filename)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("failed to read content from %s: %s", l.filename, err.Error())
|
||||
}
|
||||
l.contents = bytes.NewReader(b)
|
||||
}
|
||||
if _, err = l.contents.Seek(l.pos, 0); err != nil {
|
||||
return 0, errors.New("failed to set read position: " + err.Error())
|
||||
}
|
||||
n, err = l.contents.Read(p)
|
||||
l.pos += int64(n)
|
||||
return n, err
|
||||
}
|
||||
|
||||
// Seek implements the io.Seeker interface. Once reader contents is consumed by
|
||||
// Read, WriteTo etc, to read it again, it must be rewinded by this function
|
||||
func (l *LazyFileReader) Seek(offset int64, whence int) (pos int64, err error) {
|
||||
if l.contents == nil {
|
||||
switch whence {
|
||||
case 0:
|
||||
pos = offset
|
||||
case 1:
|
||||
pos = l.pos + offset
|
||||
case 2:
|
||||
fi, err := l.fs.Stat(l.filename)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("failed to get %q info: %s", l.filename, err.Error())
|
||||
}
|
||||
pos = fi.Size() + offset
|
||||
default:
|
||||
return 0, errors.New("invalid whence")
|
||||
}
|
||||
if pos < 0 {
|
||||
return 0, errors.New("negative position")
|
||||
}
|
||||
} else {
|
||||
pos, err = l.contents.Seek(offset, whence)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
l.pos = pos
|
||||
return pos, nil
|
||||
}
|
||||
|
||||
// WriteTo writes data to w until all the LazyFileReader's file contents is
|
||||
// drained or an error occurs. If the file is once read, it just writes its
|
||||
// read cache to w, doesn't re-read the file but this method itself doesn't try
|
||||
// to keep the contents in cache.
|
||||
func (l *LazyFileReader) WriteTo(w io.Writer) (n int64, err error) {
|
||||
if l.contents != nil {
|
||||
l.contents.Seek(l.pos, 0)
|
||||
if err != nil {
|
||||
return 0, errors.New("failed to set read position: " + err.Error())
|
||||
}
|
||||
n, err = l.contents.WriteTo(w)
|
||||
l.pos += n
|
||||
return n, err
|
||||
}
|
||||
f, err := l.fs.Open(l.filename)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("failed to open %s to read content: %s", l.filename, err.Error())
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
fi, err := f.Stat()
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("failed to get %q info: %s", l.filename, err.Error())
|
||||
}
|
||||
|
||||
if l.pos >= fi.Size() {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
return l.copyBuffer(w, f, nil)
|
||||
}
|
||||
|
||||
// copyBuffer is the actual implementation of Copy and CopyBuffer.
|
||||
// If buf is nil, one is allocated.
|
||||
//
|
||||
// Most of this function is copied from the Go stdlib 'io/io.go'.
|
||||
func (l *LazyFileReader) copyBuffer(dst io.Writer, src io.Reader, buf []byte) (written int64, err error) {
|
||||
if buf == nil {
|
||||
buf = make([]byte, 32*1024)
|
||||
}
|
||||
for {
|
||||
nr, er := src.Read(buf)
|
||||
if nr > 0 {
|
||||
nw, ew := dst.Write(buf[0:nr])
|
||||
if nw > 0 {
|
||||
l.pos += int64(nw)
|
||||
written += int64(nw)
|
||||
}
|
||||
if ew != nil {
|
||||
err = ew
|
||||
break
|
||||
}
|
||||
if nr != nw {
|
||||
err = io.ErrShortWrite
|
||||
break
|
||||
}
|
||||
}
|
||||
if er == io.EOF {
|
||||
break
|
||||
}
|
||||
if er != nil {
|
||||
err = er
|
||||
break
|
||||
}
|
||||
}
|
||||
return written, err
|
||||
}
|
|
@ -1,236 +0,0 @@
|
|||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package source
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/afero"
|
||||
)
|
||||
|
||||
func TestNewLazyFileReader(t *testing.T) {
|
||||
fs := afero.NewOsFs()
|
||||
filename := "itdoesnotexistfile"
|
||||
_, err := NewLazyFileReader(fs, filename)
|
||||
if err == nil {
|
||||
t.Errorf("NewLazyFileReader %s: error expected but no error is returned", filename)
|
||||
}
|
||||
|
||||
filename = "lazy_file_reader_test.go"
|
||||
_, err = NewLazyFileReader(fs, filename)
|
||||
if err != nil {
|
||||
t.Errorf("NewLazyFileReader %s: %v", filename, err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFilename(t *testing.T) {
|
||||
fs := afero.NewOsFs()
|
||||
filename := "lazy_file_reader_test.go"
|
||||
rd, err := NewLazyFileReader(fs, filename)
|
||||
if err != nil {
|
||||
t.Fatalf("NewLazyFileReader %s: %v", filename, err)
|
||||
}
|
||||
if rd.Filename() != filename {
|
||||
t.Errorf("Filename: expected filename %q, got %q", filename, rd.Filename())
|
||||
}
|
||||
}
|
||||
|
||||
func TestRead(t *testing.T) {
|
||||
fs := afero.NewOsFs()
|
||||
filename := "lazy_file_reader_test.go"
|
||||
fi, err := fs.Stat(filename)
|
||||
if err != nil {
|
||||
t.Fatalf("os.Stat: %v", err)
|
||||
}
|
||||
|
||||
b, err := afero.ReadFile(fs, filename)
|
||||
if err != nil {
|
||||
t.Fatalf("afero.ReadFile: %v", err)
|
||||
}
|
||||
|
||||
rd, err := NewLazyFileReader(fs, filename)
|
||||
if err != nil {
|
||||
t.Fatalf("NewLazyFileReader %s: %v", filename, err)
|
||||
}
|
||||
|
||||
tst := func(testcase string) {
|
||||
p := make([]byte, fi.Size())
|
||||
n, err := rd.Read(p)
|
||||
if err != nil {
|
||||
t.Fatalf("Read %s case: %v", testcase, err)
|
||||
}
|
||||
if int64(n) != fi.Size() {
|
||||
t.Errorf("Read %s case: read bytes length expected %d, got %d", testcase, fi.Size(), n)
|
||||
}
|
||||
if !bytes.Equal(b, p) {
|
||||
t.Errorf("Read %s case: read bytes are different from expected", testcase)
|
||||
}
|
||||
}
|
||||
tst("No cache")
|
||||
_, err = rd.Seek(0, 0)
|
||||
if err != nil {
|
||||
t.Fatalf("Seek: %v", err)
|
||||
}
|
||||
tst("Cache")
|
||||
}
|
||||
|
||||
func TestSeek(t *testing.T) {
|
||||
type testcase struct {
|
||||
seek int
|
||||
offset int64
|
||||
length int
|
||||
moveto int64
|
||||
expected []byte
|
||||
}
|
||||
fs := afero.NewOsFs()
|
||||
filename := "lazy_file_reader_test.go"
|
||||
b, err := afero.ReadFile(fs, filename)
|
||||
if err != nil {
|
||||
t.Fatalf("afero.ReadFile: %v", err)
|
||||
}
|
||||
|
||||
// no cache case
|
||||
for i, this := range []testcase{
|
||||
{seek: os.SEEK_SET, offset: 0, length: 10, moveto: 0, expected: b[:10]},
|
||||
{seek: os.SEEK_SET, offset: 5, length: 10, moveto: 5, expected: b[5:15]},
|
||||
{seek: os.SEEK_CUR, offset: 5, length: 10, moveto: 5, expected: b[5:15]}, // current pos = 0
|
||||
{seek: os.SEEK_END, offset: -1, length: 1, moveto: int64(len(b) - 1), expected: b[len(b)-1:]},
|
||||
{seek: 3, expected: nil},
|
||||
{seek: os.SEEK_SET, offset: -1, expected: nil},
|
||||
} {
|
||||
rd, err := NewLazyFileReader(fs, filename)
|
||||
if err != nil {
|
||||
t.Errorf("[%d] NewLazyFileReader %s: %v", i, filename, err)
|
||||
continue
|
||||
}
|
||||
|
||||
pos, err := rd.Seek(this.offset, this.seek)
|
||||
if this.expected == nil {
|
||||
if err == nil {
|
||||
t.Errorf("[%d] Seek didn't return an expected error", i)
|
||||
}
|
||||
} else {
|
||||
if err != nil {
|
||||
t.Errorf("[%d] Seek failed unexpectedly: %v", i, err)
|
||||
continue
|
||||
}
|
||||
if pos != this.moveto {
|
||||
t.Errorf("[%d] Seek failed to move the pointer: got %d, expected: %d", i, pos, this.moveto)
|
||||
}
|
||||
|
||||
buf := make([]byte, this.length)
|
||||
n, err := rd.Read(buf)
|
||||
if err != nil {
|
||||
t.Errorf("[%d] Read failed unexpectedly: %v", i, err)
|
||||
}
|
||||
if !bytes.Equal(this.expected, buf[:n]) {
|
||||
t.Errorf("[%d] Seek and Read got %q but expected %q", i, buf[:n], this.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// cache case
|
||||
rd, err := NewLazyFileReader(fs, filename)
|
||||
if err != nil {
|
||||
t.Fatalf("NewLazyFileReader %s: %v", filename, err)
|
||||
}
|
||||
dummy := make([]byte, len(b))
|
||||
_, err = rd.Read(dummy)
|
||||
if err != nil {
|
||||
t.Fatalf("Read failed unexpectedly: %v", err)
|
||||
}
|
||||
|
||||
for i, this := range []testcase{
|
||||
{seek: os.SEEK_SET, offset: 0, length: 10, moveto: 0, expected: b[:10]},
|
||||
{seek: os.SEEK_SET, offset: 5, length: 10, moveto: 5, expected: b[5:15]},
|
||||
{seek: os.SEEK_CUR, offset: 1, length: 10, moveto: 16, expected: b[16:26]}, // current pos = 15
|
||||
{seek: os.SEEK_END, offset: -1, length: 1, moveto: int64(len(b) - 1), expected: b[len(b)-1:]},
|
||||
{seek: 3, expected: nil},
|
||||
{seek: os.SEEK_SET, offset: -1, expected: nil},
|
||||
} {
|
||||
pos, err := rd.Seek(this.offset, this.seek)
|
||||
if this.expected == nil {
|
||||
if err == nil {
|
||||
t.Errorf("[%d] Seek didn't return an expected error", i)
|
||||
}
|
||||
} else {
|
||||
if err != nil {
|
||||
t.Errorf("[%d] Seek failed unexpectedly: %v", i, err)
|
||||
continue
|
||||
}
|
||||
if pos != this.moveto {
|
||||
t.Errorf("[%d] Seek failed to move the pointer: got %d, expected: %d", i, pos, this.moveto)
|
||||
}
|
||||
|
||||
buf := make([]byte, this.length)
|
||||
n, err := rd.Read(buf)
|
||||
if err != nil {
|
||||
t.Errorf("[%d] Read failed unexpectedly: %v", i, err)
|
||||
}
|
||||
if !bytes.Equal(this.expected, buf[:n]) {
|
||||
t.Errorf("[%d] Seek and Read got %q but expected %q", i, buf[:n], this.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestWriteTo(t *testing.T) {
|
||||
fs := afero.NewOsFs()
|
||||
filename := "lazy_file_reader_test.go"
|
||||
fi, err := fs.Stat(filename)
|
||||
if err != nil {
|
||||
t.Fatalf("os.Stat: %v", err)
|
||||
}
|
||||
|
||||
b, err := afero.ReadFile(fs, filename)
|
||||
if err != nil {
|
||||
t.Fatalf("afero.ReadFile: %v", err)
|
||||
}
|
||||
|
||||
rd, err := NewLazyFileReader(fs, filename)
|
||||
if err != nil {
|
||||
t.Fatalf("NewLazyFileReader %s: %v", filename, err)
|
||||
}
|
||||
|
||||
tst := func(testcase string, expectedSize int64, checkEqual bool) {
|
||||
buf := bytes.NewBuffer(make([]byte, 0, bytes.MinRead))
|
||||
n, err := rd.WriteTo(buf)
|
||||
if err != nil {
|
||||
t.Fatalf("WriteTo %s case: %v", testcase, err)
|
||||
}
|
||||
if n != expectedSize {
|
||||
t.Errorf("WriteTo %s case: written bytes length expected %d, got %d", testcase, expectedSize, n)
|
||||
}
|
||||
if checkEqual && !bytes.Equal(b, buf.Bytes()) {
|
||||
t.Errorf("WriteTo %s case: written bytes are different from expected", testcase)
|
||||
}
|
||||
}
|
||||
tst("No cache", fi.Size(), true)
|
||||
tst("No cache 2nd", 0, false)
|
||||
|
||||
p := make([]byte, fi.Size())
|
||||
_, err = rd.Read(p)
|
||||
if err != nil && err != io.EOF {
|
||||
t.Fatalf("Read: %v", err)
|
||||
}
|
||||
_, err = rd.Seek(0, 0)
|
||||
if err != nil {
|
||||
t.Fatalf("Seek: %v", err)
|
||||
}
|
||||
|
||||
tst("Cache", fi.Size(), true)
|
||||
}
|
117
source/sourceSpec.go
Normal file
117
source/sourceSpec.go
Normal file
|
@ -0,0 +1,117 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package source
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/spf13/cast"
|
||||
)
|
||||
|
||||
// SourceSpec abstracts language-specific file creation.
|
||||
// TODO(bep) rename to Spec
|
||||
type SourceSpec struct {
|
||||
Cfg config.Provider
|
||||
Fs *hugofs.Fs
|
||||
|
||||
// This is set if the ignoreFiles config is set.
|
||||
ignoreFilesRe []*regexp.Regexp
|
||||
|
||||
Languages map[string]interface{}
|
||||
DefaultContentLanguage string
|
||||
}
|
||||
|
||||
// NewSourceSpec initializes SourceSpec using languages from a given configuration.
|
||||
func NewSourceSpec(cfg config.Provider, fs *hugofs.Fs) *SourceSpec {
|
||||
defaultLang := cfg.GetString("defaultContentLanguage")
|
||||
languages := cfg.GetStringMap("languages")
|
||||
|
||||
if len(languages) == 0 {
|
||||
l := helpers.NewDefaultLanguage(cfg)
|
||||
languages[l.Lang] = l
|
||||
defaultLang = l.Lang
|
||||
}
|
||||
|
||||
ignoreFiles := cast.ToStringSlice(cfg.Get("ignoreFiles"))
|
||||
var regexps []*regexp.Regexp
|
||||
if len(ignoreFiles) > 0 {
|
||||
for _, ignorePattern := range ignoreFiles {
|
||||
re, err := regexp.Compile(ignorePattern)
|
||||
if err != nil {
|
||||
helpers.DistinctErrorLog.Printf("Invalid regexp %q in ignoreFiles: %s", ignorePattern, err)
|
||||
} else {
|
||||
regexps = append(regexps, re)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return &SourceSpec{ignoreFilesRe: regexps, Cfg: cfg, Fs: fs, Languages: languages, DefaultContentLanguage: defaultLang}
|
||||
}
|
||||
|
||||
func (s *SourceSpec) IgnoreFile(filename string) bool {
|
||||
base := filepath.Base(filename)
|
||||
|
||||
if len(base) > 0 {
|
||||
first := base[0]
|
||||
last := base[len(base)-1]
|
||||
if first == '.' ||
|
||||
first == '#' ||
|
||||
last == '~' {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
if len(s.ignoreFilesRe) == 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, re := range s.ignoreFilesRe {
|
||||
if re.MatchString(filename) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *SourceSpec) IsRegularSourceFile(filename string) (bool, error) {
|
||||
fi, err := helpers.LstatIfOs(s.Fs.Source, filename)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if fi.IsDir() {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
if fi.Mode()&os.ModeSymlink == os.ModeSymlink {
|
||||
link, err := filepath.EvalSymlinks(filename)
|
||||
fi, err = helpers.LstatIfOs(s.Fs.Source, link)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if fi.IsDir() {
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
Loading…
Reference in a new issue