Add Hugo Modules

This commit implements Hugo Modules.

This is a broad subject, but some keywords include:

* A new `module` configuration section where you can import almost anything. You can configure both your own file mounts nd the file mounts of the modules you import. This is the new recommended way of configuring what you earlier put in `configDir`, `staticDir` etc. And it also allows you to mount folders in non-Hugo-projects, e.g. the `SCSS` folder in the Bootstrap GitHub project.
* A module consists of a set of mounts to the standard 7 component types in Hugo: `static`, `content`, `layouts`, `data`, `assets`, `i18n`, and `archetypes`. Yes, Theme Components can now include content, which should be very useful, especially in bigger multilingual projects.
* Modules not in your local file cache will be downloaded automatically and even "hot replaced" while the server is running.
* Hugo Modules supports and encourages semver versioned modules, and uses the minimal version selection algorithm to resolve versions.
* A new set of CLI commands are provided to manage all of this: `hugo mod init`,  `hugo mod get`,  `hugo mod graph`,  `hugo mod tidy`, and  `hugo mod vendor`.

All of the above is backed by Go Modules.

Fixes #5973
Fixes #5996
Fixes #6010
Fixes #5911
Fixes #5940
Fixes #6074
Fixes #6082
Fixes #6092
This commit is contained in:
Bjørn Erik Pedersen 2019-05-03 09:16:58 +02:00
parent 47953148b6
commit 9f5a92078a
No known key found for this signature in database
GPG key ID: 330E6E2BD4859D8F
158 changed files with 9895 additions and 5433 deletions

View file

@ -1,2 +1 @@
gobench -package=./hugolib -bench="BenchmarkSiteBuilding/YAML,num_langs=3,num_pages=5000,tags_per_page=5,shortcodes,render" -count=3 > 1.bench gobench -package=./hugolib -bench="BenchmarkSiteNew/Deep_content_tree"
benchcmp -best 0.bench 1.bench

View file

@ -44,6 +44,9 @@ type Cache struct {
// 0 is effectively turning this cache off. // 0 is effectively turning this cache off.
maxAge time.Duration maxAge time.Duration
// When set, we just remove this entire root directory on expiration.
pruneAllRootDir string
nlocker *lockTracker nlocker *lockTracker
} }
@ -77,11 +80,12 @@ type ItemInfo struct {
} }
// NewCache creates a new file cache with the given filesystem and max age. // NewCache creates a new file cache with the given filesystem and max age.
func NewCache(fs afero.Fs, maxAge time.Duration) *Cache { func NewCache(fs afero.Fs, maxAge time.Duration, pruneAllRootDir string) *Cache {
return &Cache{ return &Cache{
Fs: fs, Fs: fs,
nlocker: &lockTracker{Locker: locker.NewLocker(), seen: make(map[string]struct{})}, nlocker: &lockTracker{Locker: locker.NewLocker(), seen: make(map[string]struct{})},
maxAge: maxAge, maxAge: maxAge,
pruneAllRootDir: pruneAllRootDir,
} }
} }
@ -307,9 +311,15 @@ func (f Caches) Get(name string) *Cache {
// NewCaches creates a new set of file caches from the given // NewCaches creates a new set of file caches from the given
// configuration. // configuration.
func NewCaches(p *helpers.PathSpec) (Caches, error) { func NewCaches(p *helpers.PathSpec) (Caches, error) {
dcfg, err := decodeConfig(p) var dcfg Configs
if err != nil { if c, ok := p.Cfg.Get("filecacheConfigs").(Configs); ok {
return nil, err dcfg = c
} else {
var err error
dcfg, err = DecodeConfig(p.Fs.Source, p.Cfg)
if err != nil {
return nil, err
}
} }
fs := p.Fs.Source fs := p.Fs.Source
@ -319,30 +329,25 @@ func NewCaches(p *helpers.PathSpec) (Caches, error) {
var cfs afero.Fs var cfs afero.Fs
if v.isResourceDir { if v.isResourceDir {
cfs = p.BaseFs.Resources.Fs cfs = p.BaseFs.ResourcesCache
} else { } else {
cfs = fs cfs = fs
} }
var baseDir string baseDir := v.Dir
if !strings.HasPrefix(v.Dir, "_gen") {
// We do cache eviction (file removes) and since the user can set if err := cfs.MkdirAll(baseDir, 0777); err != nil && !os.IsExist(err) {
// his/hers own cache directory, we really want to make sure
// we do not delete any files that do not belong to this cache.
// We do add the cache name as the root, but this is an extra safe
// guard. We skip the files inside /resources/_gen/ because
// that would be breaking.
baseDir = filepath.Join(v.Dir, filecacheRootDirname, k)
} else {
baseDir = filepath.Join(v.Dir, k)
}
if err = cfs.MkdirAll(baseDir, 0777); err != nil && !os.IsExist(err) {
return nil, err return nil, err
} }
bfs := afero.NewBasePathFs(cfs, baseDir) bfs := afero.NewBasePathFs(cfs, baseDir)
m[k] = NewCache(bfs, v.MaxAge) var pruneAllRootDir string
if k == cacheKeyModules {
pruneAllRootDir = "pkg"
}
m[k] = NewCache(bfs, v.MaxAge, pruneAllRootDir)
} }
return m, nil return m, nil

View file

@ -19,6 +19,8 @@ import (
"strings" "strings"
"time" "time"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
"github.com/mitchellh/mapstructure" "github.com/mitchellh/mapstructure"
@ -32,7 +34,7 @@ const (
resourcesGenDir = ":resourceDir/_gen" resourcesGenDir = ":resourceDir/_gen"
) )
var defaultCacheConfig = cacheConfig{ var defaultCacheConfig = Config{
MaxAge: -1, // Never expire MaxAge: -1, // Never expire
Dir: ":cacheDir/:project", Dir: ":cacheDir/:project",
} }
@ -42,9 +44,20 @@ const (
cacheKeyGetCSV = "getcsv" cacheKeyGetCSV = "getcsv"
cacheKeyImages = "images" cacheKeyImages = "images"
cacheKeyAssets = "assets" cacheKeyAssets = "assets"
cacheKeyModules = "modules"
) )
var defaultCacheConfigs = map[string]cacheConfig{ type Configs map[string]Config
func (c Configs) CacheDirModules() string {
return c[cacheKeyModules].Dir
}
var defaultCacheConfigs = Configs{
cacheKeyModules: {
MaxAge: -1,
Dir: ":cacheDir/modules",
},
cacheKeyGetJSON: defaultCacheConfig, cacheKeyGetJSON: defaultCacheConfig,
cacheKeyGetCSV: defaultCacheConfig, cacheKeyGetCSV: defaultCacheConfig,
cacheKeyImages: { cacheKeyImages: {
@ -57,9 +70,7 @@ var defaultCacheConfigs = map[string]cacheConfig{
}, },
} }
type cachesConfig map[string]cacheConfig type Config struct {
type cacheConfig struct {
// Max age of cache entries in this cache. Any items older than this will // Max age of cache entries in this cache. Any items older than this will
// be removed and not returned from the cache. // be removed and not returned from the cache.
// a negative value means forever, 0 means cache is disabled. // a negative value means forever, 0 means cache is disabled.
@ -88,13 +99,18 @@ func (f Caches) ImageCache() *Cache {
return f[cacheKeyImages] return f[cacheKeyImages]
} }
// ModulesCache gets the file cache for Hugo Modules.
func (f Caches) ModulesCache() *Cache {
return f[cacheKeyModules]
}
// AssetsCache gets the file cache for assets (processed resources, SCSS etc.). // AssetsCache gets the file cache for assets (processed resources, SCSS etc.).
func (f Caches) AssetsCache() *Cache { func (f Caches) AssetsCache() *Cache {
return f[cacheKeyAssets] return f[cacheKeyAssets]
} }
func decodeConfig(p *helpers.PathSpec) (cachesConfig, error) { func DecodeConfig(fs afero.Fs, cfg config.Provider) (Configs, error) {
c := make(cachesConfig) c := make(Configs)
valid := make(map[string]bool) valid := make(map[string]bool)
// Add defaults // Add defaults
for k, v := range defaultCacheConfigs { for k, v := range defaultCacheConfigs {
@ -102,11 +118,9 @@ func decodeConfig(p *helpers.PathSpec) (cachesConfig, error) {
valid[k] = true valid[k] = true
} }
cfg := p.Cfg
m := cfg.GetStringMap(cachesConfigKey) m := cfg.GetStringMap(cachesConfigKey)
_, isOsFs := p.Fs.Source.(*afero.OsFs) _, isOsFs := fs.(*afero.OsFs)
for k, v := range m { for k, v := range m {
cc := defaultCacheConfig cc := defaultCacheConfig
@ -148,7 +162,7 @@ func decodeConfig(p *helpers.PathSpec) (cachesConfig, error) {
for i, part := range parts { for i, part := range parts {
if strings.HasPrefix(part, ":") { if strings.HasPrefix(part, ":") {
resolved, isResource, err := resolveDirPlaceholder(p, part) resolved, isResource, err := resolveDirPlaceholder(fs, cfg, part)
if err != nil { if err != nil {
return c, err return c, err
} }
@ -176,6 +190,18 @@ func decodeConfig(p *helpers.PathSpec) (cachesConfig, error) {
} }
} }
if !strings.HasPrefix(v.Dir, "_gen") {
// We do cache eviction (file removes) and since the user can set
// his/hers own cache directory, we really want to make sure
// we do not delete any files that do not belong to this cache.
// We do add the cache name as the root, but this is an extra safe
// guard. We skip the files inside /resources/_gen/ because
// that would be breaking.
v.Dir = filepath.Join(v.Dir, filecacheRootDirname, k)
} else {
v.Dir = filepath.Join(v.Dir, k)
}
if disabled { if disabled {
v.MaxAge = 0 v.MaxAge = 0
} }
@ -187,15 +213,17 @@ func decodeConfig(p *helpers.PathSpec) (cachesConfig, error) {
} }
// Resolves :resourceDir => /myproject/resources etc., :cacheDir => ... // Resolves :resourceDir => /myproject/resources etc., :cacheDir => ...
func resolveDirPlaceholder(p *helpers.PathSpec, placeholder string) (cacheDir string, isResource bool, err error) { func resolveDirPlaceholder(fs afero.Fs, cfg config.Provider, placeholder string) (cacheDir string, isResource bool, err error) {
workingDir := cfg.GetString("workingDir")
switch strings.ToLower(placeholder) { switch strings.ToLower(placeholder) {
case ":resourcedir": case ":resourcedir":
return "", true, nil return "", true, nil
case ":cachedir": case ":cachedir":
d, err := helpers.GetCacheDir(p.Fs.Source, p.Cfg) d, err := helpers.GetCacheDir(fs, cfg)
return d, false, err return d, false, err
case ":project": case ":project":
return filepath.Base(p.WorkingDir), false, nil return filepath.Base(workingDir), false, nil
} }
return "", false, errors.Errorf("%q is not a valid placeholder (valid values are :cacheDir or :resourceDir)", placeholder) return "", false, errors.Errorf("%q is not a valid placeholder (valid values are :cacheDir or :resourceDir)", placeholder)

View file

@ -20,10 +20,9 @@ import (
"testing" "testing"
"time" "time"
"github.com/gohugoio/hugo/helpers" "github.com/spf13/afero"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/hugofs"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -57,22 +56,19 @@ dir = "/path/to/c3"
cfg, err := config.FromConfigString(configStr, "toml") cfg, err := config.FromConfigString(configStr, "toml")
assert.NoError(err) assert.NoError(err)
fs := hugofs.NewMem(cfg) fs := afero.NewMemMapFs()
p, err := helpers.NewPathSpec(fs, cfg) decoded, err := DecodeConfig(fs, cfg)
assert.NoError(err) assert.NoError(err)
decoded, err := decodeConfig(p) assert.Equal(5, len(decoded))
assert.NoError(err)
assert.Equal(4, len(decoded))
c2 := decoded["getcsv"] c2 := decoded["getcsv"]
assert.Equal("11h0m0s", c2.MaxAge.String()) assert.Equal("11h0m0s", c2.MaxAge.String())
assert.Equal(filepath.FromSlash("/path/to/c2"), c2.Dir) assert.Equal(filepath.FromSlash("/path/to/c2/filecache/getcsv"), c2.Dir)
c3 := decoded["images"] c3 := decoded["images"]
assert.Equal(time.Duration(-1), c3.MaxAge) assert.Equal(time.Duration(-1), c3.MaxAge)
assert.Equal(filepath.FromSlash("/path/to/c3"), c3.Dir) assert.Equal(filepath.FromSlash("/path/to/c3/filecache/images"), c3.Dir)
} }
@ -105,14 +101,11 @@ dir = "/path/to/c3"
cfg, err := config.FromConfigString(configStr, "toml") cfg, err := config.FromConfigString(configStr, "toml")
assert.NoError(err) assert.NoError(err)
fs := hugofs.NewMem(cfg) fs := afero.NewMemMapFs()
p, err := helpers.NewPathSpec(fs, cfg) decoded, err := DecodeConfig(fs, cfg)
assert.NoError(err) assert.NoError(err)
decoded, err := decodeConfig(p) assert.Equal(5, len(decoded))
assert.NoError(err)
assert.Equal(4, len(decoded))
for _, v := range decoded { for _, v := range decoded {
assert.Equal(time.Duration(0), v.MaxAge) assert.Equal(time.Duration(0), v.MaxAge)
@ -133,24 +126,22 @@ func TestDecodeConfigDefault(t *testing.T) {
cfg.Set("cacheDir", "/cache/thecache") cfg.Set("cacheDir", "/cache/thecache")
} }
fs := hugofs.NewMem(cfg) fs := afero.NewMemMapFs()
p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
decoded, err := decodeConfig(p) decoded, err := DecodeConfig(fs, cfg)
assert.NoError(err) assert.NoError(err)
assert.Equal(4, len(decoded)) assert.Equal(5, len(decoded))
imgConfig := decoded[cacheKeyImages] imgConfig := decoded[cacheKeyImages]
jsonConfig := decoded[cacheKeyGetJSON] jsonConfig := decoded[cacheKeyGetJSON]
if runtime.GOOS == "windows" { if runtime.GOOS == "windows" {
assert.Equal("_gen", imgConfig.Dir) assert.Equal(filepath.FromSlash("_gen/images"), imgConfig.Dir)
} else { } else {
assert.Equal("_gen", imgConfig.Dir) assert.Equal("_gen/images", imgConfig.Dir)
assert.Equal("/cache/thecache/hugoproject", jsonConfig.Dir) assert.Equal("/cache/thecache/hugoproject/filecache/getjson", jsonConfig.Dir)
} }
assert.True(imgConfig.isResourceDir) assert.True(imgConfig.isResourceDir)
@ -183,11 +174,9 @@ dir = "/"
cfg, err := config.FromConfigString(configStr, "toml") cfg, err := config.FromConfigString(configStr, "toml")
assert.NoError(err) assert.NoError(err)
fs := hugofs.NewMem(cfg) fs := afero.NewMemMapFs()
p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
_, err = decodeConfig(p) _, err = DecodeConfig(fs, cfg)
assert.Error(err) assert.Error(err)
} }

View file

@ -28,53 +28,100 @@ import (
func (c Caches) Prune() (int, error) { func (c Caches) Prune() (int, error) {
counter := 0 counter := 0
for k, cache := range c { for k, cache := range c {
err := afero.Walk(cache.Fs, "", func(name string, info os.FileInfo, err error) error {
if info == nil {
return nil
}
name = cleanID(name) count, err := cache.Prune(false)
if info.IsDir() {
f, err := cache.Fs.Open(name)
if err != nil {
// This cache dir may not exist.
return nil
}
defer f.Close()
_, err = f.Readdirnames(1)
if err == io.EOF {
// Empty dir.
return cache.Fs.Remove(name)
}
return nil
}
shouldRemove := cache.isExpired(info.ModTime())
if !shouldRemove && len(cache.nlocker.seen) > 0 {
// Remove it if it's not been touched/used in the last build.
_, seen := cache.nlocker.seen[name]
shouldRemove = !seen
}
if shouldRemove {
err := cache.Fs.Remove(name)
if err == nil {
counter++
}
return err
}
return nil
})
if err != nil { if err != nil {
return counter, errors.Wrapf(err, "failed to prune cache %q", k) return counter, errors.Wrapf(err, "failed to prune cache %q", k)
} }
counter += count
} }
return counter, nil return counter, nil
} }
// Prune removes expired and unused items from this cache.
// If force is set, everything will be removed not considering expiry time.
func (c *Cache) Prune(force bool) (int, error) {
if c.pruneAllRootDir != "" {
return c.pruneRootDir(force)
}
counter := 0
err := afero.Walk(c.Fs, "", func(name string, info os.FileInfo, err error) error {
if info == nil {
return nil
}
name = cleanID(name)
if info.IsDir() {
f, err := c.Fs.Open(name)
if err != nil {
// This cache dir may not exist.
return nil
}
defer f.Close()
_, err = f.Readdirnames(1)
if err == io.EOF {
// Empty dir.
return c.Fs.Remove(name)
}
return nil
}
shouldRemove := force || c.isExpired(info.ModTime())
if !shouldRemove && len(c.nlocker.seen) > 0 {
// Remove it if it's not been touched/used in the last build.
_, seen := c.nlocker.seen[name]
shouldRemove = !seen
}
if shouldRemove {
err := c.Fs.Remove(name)
if err == nil {
counter++
}
return err
}
return nil
})
return counter, err
}
func (c *Cache) pruneRootDir(force bool) (int, error) {
info, err := c.Fs.Stat(c.pruneAllRootDir)
if err != nil {
if os.IsNotExist(err) {
return 0, nil
}
return 0, err
}
if !force && !c.isExpired(info.ModTime()) {
return 0, nil
}
counter := 0
// Module cache has 0555 directories; make them writable in order to remove content.
afero.Walk(c.Fs, c.pruneAllRootDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return nil
}
if info.IsDir() {
counter++
c.Fs.Chmod(path, 0777)
}
return nil
})
return 1, c.Fs.RemoveAll(c.pruneAllRootDir)
}

View file

@ -18,9 +18,7 @@ import (
"testing" "testing"
"time" "time"
"github.com/gohugoio/hugo/config" "github.com/spf13/afero"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -54,14 +52,9 @@ maxAge = "200ms"
dir = ":resourceDir/_gen" dir = ":resourceDir/_gen"
` `
cfg, err := config.FromConfigString(configStr, "toml")
assert.NoError(err)
for _, name := range []string{cacheKeyGetCSV, cacheKeyGetJSON, cacheKeyAssets, cacheKeyImages} { for _, name := range []string{cacheKeyGetCSV, cacheKeyGetJSON, cacheKeyAssets, cacheKeyImages} {
msg := fmt.Sprintf("cache: %s", name) msg := fmt.Sprintf("cache: %s", name)
fs := hugofs.NewMem(cfg) p := newPathsSpec(t, afero.NewMemMapFs(), configStr)
p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
caches, err := NewCaches(p) caches, err := NewCaches(p)
assert.NoError(err) assert.NoError(err)
cache := caches[name] cache := caches[name]

View file

@ -25,6 +25,9 @@ import (
"testing" "testing"
"time" "time"
"github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/modules"
"github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
@ -83,12 +86,7 @@ dir = ":cacheDir/c"
configStr = replacer.Replace(configStr) configStr = replacer.Replace(configStr)
configStr = strings.Replace(configStr, "\\", winPathSep, -1) configStr = strings.Replace(configStr, "\\", winPathSep, -1)
cfg, err := config.FromConfigString(configStr, "toml") p := newPathsSpec(t, osfs, configStr)
assert.NoError(err)
fs := hugofs.NewFrom(osfs, cfg)
p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
caches, err := NewCaches(p) caches, err := NewCaches(p)
assert.NoError(err) assert.NoError(err)
@ -207,11 +205,7 @@ dir = "/cache/c"
` `
cfg, err := config.FromConfigString(configStr, "toml") p := newPathsSpec(t, afero.NewMemMapFs(), configStr)
assert.NoError(err)
fs := hugofs.NewMem(cfg)
p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
caches, err := NewCaches(p) caches, err := NewCaches(p)
assert.NoError(err) assert.NoError(err)
@ -255,3 +249,51 @@ func TestCleanID(t *testing.T) {
assert.Equal(filepath.FromSlash("a/b/c.txt"), cleanID(filepath.FromSlash("/a/b//c.txt"))) assert.Equal(filepath.FromSlash("a/b/c.txt"), cleanID(filepath.FromSlash("/a/b//c.txt")))
assert.Equal(filepath.FromSlash("a/b/c.txt"), cleanID(filepath.FromSlash("a/b//c.txt"))) assert.Equal(filepath.FromSlash("a/b/c.txt"), cleanID(filepath.FromSlash("a/b//c.txt")))
} }
func initConfig(fs afero.Fs, cfg config.Provider) error {
if _, err := langs.LoadLanguageSettings(cfg, nil); err != nil {
return err
}
modConfig, err := modules.DecodeConfig(cfg)
if err != nil {
return err
}
workingDir := cfg.GetString("workingDir")
themesDir := cfg.GetString("themesDir")
if !filepath.IsAbs(themesDir) {
themesDir = filepath.Join(workingDir, themesDir)
}
modulesClient := modules.NewClient(modules.ClientConfig{
Fs: fs,
WorkingDir: workingDir,
ThemesDir: themesDir,
ModuleConfig: modConfig,
IgnoreVendor: true,
})
moduleConfig, err := modulesClient.Collect()
if err != nil {
return err
}
if err := modules.ApplyProjectConfigDefaults(cfg, moduleConfig.ActiveModules[len(moduleConfig.ActiveModules)-1]); err != nil {
return err
}
cfg.Set("allModules", moduleConfig.ActiveModules)
return nil
}
func newPathsSpec(t *testing.T, fs afero.Fs, configStr string) *helpers.PathSpec {
assert := require.New(t)
cfg, err := config.FromConfigString(configStr, "toml")
assert.NoError(err)
initConfig(fs, cfg)
p, err := helpers.NewPathSpec(hugofs.NewFrom(fs, cfg), cfg)
assert.NoError(err)
return p
}

View file

@ -16,6 +16,11 @@ package commands
import ( import (
"bytes" "bytes"
"errors" "errors"
"sync"
"golang.org/x/sync/semaphore"
"github.com/gohugoio/hugo/modules"
"io/ioutil" "io/ioutil"
@ -27,8 +32,6 @@ import (
"os" "os"
"path/filepath" "path/filepath"
"regexp" "regexp"
"strings"
"sync"
"time" "time"
"github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/loggers"
@ -88,6 +91,8 @@ type commandeer struct {
configured bool configured bool
paused bool paused bool
fullRebuildSem *semaphore.Weighted
// Any error from the last build. // Any error from the last build.
buildErr error buildErr error
} }
@ -153,6 +158,7 @@ func newCommandeer(mustHaveConfigFile, running bool, h *hugoBuilderCommon, f fla
doWithCommandeer: doWithCommandeer, doWithCommandeer: doWithCommandeer,
visitedURLs: types.NewEvictingStringQueue(10), visitedURLs: types.NewEvictingStringQueue(10),
debounce: rebuildDebouncer, debounce: rebuildDebouncer,
fullRebuildSem: semaphore.NewWeighted(1),
// This will be replaced later, but we need something to log to before the configuration is read. // This will be replaced later, but we need something to log to before the configuration is read.
logger: loggers.NewLogger(jww.LevelError, jww.LevelError, os.Stdout, ioutil.Discard, running), logger: loggers.NewLogger(jww.LevelError, jww.LevelError, os.Stdout, ioutil.Discard, running),
} }
@ -282,6 +288,7 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
WorkingDir: dir, WorkingDir: dir,
Filename: c.h.cfgFile, Filename: c.h.cfgFile,
AbsConfigDir: c.h.getConfigDir(dir), AbsConfigDir: c.h.getConfigDir(dir),
Environ: os.Environ(),
Environment: environment}, Environment: environment},
doWithCommandeer, doWithCommandeer,
doWithConfig) doWithConfig)
@ -290,7 +297,7 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
if mustHaveConfigFile { if mustHaveConfigFile {
return err return err
} }
if err != hugolib.ErrNoConfigFile { if err != hugolib.ErrNoConfigFile && !modules.IsNotExist(err) {
return err return err
} }
@ -388,21 +395,6 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
cfg.Logger.INFO.Println("Using config file:", config.ConfigFileUsed()) cfg.Logger.INFO.Println("Using config file:", config.ConfigFileUsed())
themeDir := c.hugo.PathSpec.GetFirstThemeDir()
if themeDir != "" {
if _, err := sourceFs.Stat(themeDir); os.IsNotExist(err) {
return newSystemError("Unable to find theme Directory:", themeDir)
}
}
dir, themeVersionMismatch, minVersion := c.isThemeVsHugoVersionMismatch(sourceFs)
if themeVersionMismatch {
name := filepath.Base(dir)
cfg.Logger.ERROR.Printf("%s theme does not support Hugo version %s. Minimum version required is %s\n",
strings.ToUpper(name), hugo.CurrentVersion.ReleaseVersion(), minVersion)
}
return nil return nil
} }

View file

@ -54,6 +54,7 @@ func (b *commandsBuilder) addAll() *commandsBuilder {
newImportCmd(), newImportCmd(),
newGenCmd(), newGenCmd(),
createReleaser(), createReleaser(),
b.newModCmd(),
) )
return b return b
@ -243,20 +244,26 @@ func (cc *hugoBuilderCommon) getEnvironment(isServer bool) string {
return hugo.EnvironmentProduction return hugo.EnvironmentProduction
} }
func (cc *hugoBuilderCommon) handleCommonBuilderFlags(cmd *cobra.Command) {
cmd.PersistentFlags().StringVarP(&cc.source, "source", "s", "", "filesystem path to read files relative from")
cmd.PersistentFlags().SetAnnotation("source", cobra.BashCompSubdirsInDir, []string{})
cmd.PersistentFlags().StringVarP(&cc.environment, "environment", "e", "", "build environment")
cmd.PersistentFlags().StringP("themesDir", "", "", "filesystem path to themes directory")
cmd.PersistentFlags().BoolP("ignoreVendor", "", false, "ignores any _vendor directory")
}
func (cc *hugoBuilderCommon) handleFlags(cmd *cobra.Command) { func (cc *hugoBuilderCommon) handleFlags(cmd *cobra.Command) {
cc.handleCommonBuilderFlags(cmd)
cmd.Flags().Bool("cleanDestinationDir", false, "remove files from destination not found in static directories") cmd.Flags().Bool("cleanDestinationDir", false, "remove files from destination not found in static directories")
cmd.Flags().BoolP("buildDrafts", "D", false, "include content marked as draft") cmd.Flags().BoolP("buildDrafts", "D", false, "include content marked as draft")
cmd.Flags().BoolP("buildFuture", "F", false, "include content with publishdate in the future") cmd.Flags().BoolP("buildFuture", "F", false, "include content with publishdate in the future")
cmd.Flags().BoolP("buildExpired", "E", false, "include expired content") cmd.Flags().BoolP("buildExpired", "E", false, "include expired content")
cmd.Flags().StringVarP(&cc.source, "source", "s", "", "filesystem path to read files relative from")
cmd.Flags().StringVarP(&cc.environment, "environment", "e", "", "build environment")
cmd.Flags().StringP("contentDir", "c", "", "filesystem path to content directory") cmd.Flags().StringP("contentDir", "c", "", "filesystem path to content directory")
cmd.Flags().StringP("layoutDir", "l", "", "filesystem path to layout directory") cmd.Flags().StringP("layoutDir", "l", "", "filesystem path to layout directory")
cmd.Flags().StringP("cacheDir", "", "", "filesystem path to cache directory. Defaults: $TMPDIR/hugo_cache/") cmd.Flags().StringP("cacheDir", "", "", "filesystem path to cache directory. Defaults: $TMPDIR/hugo_cache/")
cmd.Flags().BoolP("ignoreCache", "", false, "ignores the cache directory") cmd.Flags().BoolP("ignoreCache", "", false, "ignores the cache directory")
cmd.Flags().StringP("destination", "d", "", "filesystem path to write files to") cmd.Flags().StringP("destination", "d", "", "filesystem path to write files to")
cmd.Flags().StringSliceP("theme", "t", []string{}, "themes to use (located in /themes/THEMENAME/)") cmd.Flags().StringSliceP("theme", "t", []string{}, "themes to use (located in /themes/THEMENAME/)")
cmd.Flags().StringP("themesDir", "", "", "filesystem path to themes directory")
cmd.Flags().StringVarP(&cc.baseURL, "baseURL", "b", "", "hostname (and path) to the root, e.g. http://spf13.com/") cmd.Flags().StringVarP(&cc.baseURL, "baseURL", "b", "", "hostname (and path) to the root, e.g. http://spf13.com/")
cmd.Flags().Bool("enableGitInfo", false, "add Git revision, date and author info to the pages") cmd.Flags().Bool("enableGitInfo", false, "add Git revision, date and author info to the pages")
cmd.Flags().BoolVar(&cc.gc, "gc", false, "enable to run some cleanup tasks (remove unused cache files) after the build") cmd.Flags().BoolVar(&cc.gc, "gc", false, "enable to run some cleanup tasks (remove unused cache files) after the build")

View file

@ -30,7 +30,6 @@ import (
"github.com/gohugoio/hugo/parser/metadecoders" "github.com/gohugoio/hugo/parser/metadecoders"
"github.com/gohugoio/hugo/parser/pageparser" "github.com/gohugoio/hugo/parser/pageparser"
src "github.com/gohugoio/hugo/source"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/gohugoio/hugo/hugolib" "github.com/gohugoio/hugo/hugolib"
@ -152,8 +151,8 @@ func (cc *convertCmd) convertAndSavePage(p page.Page, site *hugolib.Site, target
site.Log.INFO.Println("Attempting to convert", p.File().Filename()) site.Log.INFO.Println("Attempting to convert", p.File().Filename())
f, _ := p.File().(src.ReadableFile) f := p.File()
file, err := f.Open() file, err := f.FileInfo().Meta().Open()
if err != nil { if err != nil {
site.Log.ERROR.Println(errMsg) site.Log.ERROR.Println(errMsg)
file.Close() file.Close()

View file

@ -16,19 +16,18 @@
package commands package commands
import ( import (
"context"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"os/signal" "os/signal"
"runtime/pprof" "runtime/pprof"
"runtime/trace" "runtime/trace"
"sort"
"sync/atomic" "sync/atomic"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/common/hugo"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/herrors"
@ -49,7 +48,6 @@ import (
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/parser/metadecoders"
flag "github.com/spf13/pflag" flag "github.com/spf13/pflag"
"github.com/fsnotify/fsnotify" "github.com/fsnotify/fsnotify"
@ -196,6 +194,7 @@ func initializeFlags(cmd *cobra.Command, cfg config.Provider) {
"forceSyncStatic", "forceSyncStatic",
"noTimes", "noTimes",
"noChmod", "noChmod",
"ignoreVendor",
"templateMetrics", "templateMetrics",
"templateMetricsHints", "templateMetricsHints",
@ -291,6 +290,7 @@ func ifTerminal(s string) string {
} }
func (c *commandeer) fullBuild() error { func (c *commandeer) fullBuild() error {
var ( var (
g errgroup.Group g errgroup.Group
langCount map[string]uint64 langCount map[string]uint64
@ -309,13 +309,9 @@ func (c *commandeer) fullBuild() error {
cnt, err := c.copyStatic() cnt, err := c.copyStatic()
if err != nil { if err != nil {
if !os.IsNotExist(err) { return errors.Wrap(err, "Error copying static files")
return errors.Wrap(err, "Error copying static files")
}
c.logger.INFO.Println("No Static directory found")
} }
langCount = cnt langCount = cnt
langCount = cnt
return nil return nil
} }
buildSitesFunc := func() error { buildSitesFunc := func() error {
@ -503,7 +499,11 @@ func (c *commandeer) build() error {
if err != nil { if err != nil {
return err return err
} }
c.logger.FEEDBACK.Println("Watching for changes in", c.hugo.PathSpec.AbsPathify(c.Cfg.GetString("contentDir")))
baseWatchDir := c.Cfg.GetString("workingDir")
rootWatchDirs := getRootWatchDirsStr(baseWatchDir, watchDirs)
c.logger.FEEDBACK.Printf("Watching for changes in %s%s{%s}\n", baseWatchDir, helpers.FilePathSeparator, rootWatchDirs)
c.logger.FEEDBACK.Println("Press Ctrl+C to stop") c.logger.FEEDBACK.Println("Press Ctrl+C to stop")
watcher, err := c.newWatcher(watchDirs...) watcher, err := c.newWatcher(watchDirs...)
checkErr(c.Logger, err) checkErr(c.Logger, err)
@ -547,7 +547,11 @@ func (c *commandeer) serverBuild() error {
} }
func (c *commandeer) copyStatic() (map[string]uint64, error) { func (c *commandeer) copyStatic() (map[string]uint64, error) {
return c.doWithPublishDirs(c.copyStaticTo) m, err := c.doWithPublishDirs(c.copyStaticTo)
if err == nil || os.IsNotExist(err) {
return m, nil
}
return m, err
} }
func (c *commandeer) doWithPublishDirs(f func(sourceFs *filesystems.SourceFilesystem) (uint64, error)) (map[string]uint64, error) { func (c *commandeer) doWithPublishDirs(f func(sourceFs *filesystems.SourceFilesystem) (uint64, error)) (map[string]uint64, error) {
@ -566,6 +570,7 @@ func (c *commandeer) doWithPublishDirs(f func(sourceFs *filesystems.SourceFilesy
if err != nil { if err != nil {
return langCount, err return langCount, err
} }
if lang == "" { if lang == "" {
// Not multihost // Not multihost
for _, l := range c.languages { for _, l := range c.languages {
@ -594,6 +599,16 @@ func (fs *countingStatFs) Stat(name string) (os.FileInfo, error) {
return f, err return f, err
} }
func chmodFilter(dst, src os.FileInfo) bool {
// Hugo publishes data from multiple sources, potentially
// with overlapping directory structures. We cannot sync permissions
// for directories as that would mean that we might end up with write-protected
// directories inside /public.
// One example of this would be syncing from the Go Module cache,
// which have 0555 directories.
return src.IsDir()
}
func (c *commandeer) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint64, error) { func (c *commandeer) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint64, error) {
publishDir := c.hugo.PathSpec.PublishDir publishDir := c.hugo.PathSpec.PublishDir
// If root, remove the second '/' // If root, remove the second '/'
@ -610,6 +625,7 @@ func (c *commandeer) copyStaticTo(sourceFs *filesystems.SourceFilesystem) (uint6
syncer := fsync.NewSyncer() syncer := fsync.NewSyncer()
syncer.NoTimes = c.Cfg.GetBool("noTimes") syncer.NoTimes = c.Cfg.GetBool("noTimes")
syncer.NoChmod = c.Cfg.GetBool("noChmod") syncer.NoChmod = c.Cfg.GetBool("noChmod")
syncer.ChmodFilter = chmodFilter
syncer.SrcFs = fs syncer.SrcFs = fs
syncer.DestFs = c.Fs.Destination syncer.DestFs = c.Fs.Destination
// Now that we are using a unionFs for the static directories // Now that we are using a unionFs for the static directories
@ -652,120 +668,39 @@ func (c *commandeer) timeTrack(start time.Time, name string) {
// getDirList provides NewWatcher() with a list of directories to watch for changes. // getDirList provides NewWatcher() with a list of directories to watch for changes.
func (c *commandeer) getDirList() ([]string, error) { func (c *commandeer) getDirList() ([]string, error) {
var a []string var dirnames []string
// To handle nested symlinked content dirs walkFn := func(path string, fi hugofs.FileMetaInfo, err error) error {
var seen = make(map[string]bool) if err != nil {
var nested []string c.logger.ERROR.Println("walker: ", err)
newWalker := func(allowSymbolicDirs bool) func(path string, fi os.FileInfo, err error) error {
return func(path string, fi os.FileInfo, err error) error {
if err != nil {
if os.IsNotExist(err) {
return nil
}
c.logger.ERROR.Println("Walker: ", err)
return nil
}
// Skip .git directories.
// Related to https://github.com/gohugoio/hugo/issues/3468.
if fi.Name() == ".git" {
return nil
}
if fi.Mode()&os.ModeSymlink == os.ModeSymlink {
link, err := filepath.EvalSymlinks(path)
if err != nil {
c.logger.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", path, err)
return nil
}
linkfi, err := helpers.LstatIfPossible(c.Fs.Source, link)
if err != nil {
c.logger.ERROR.Printf("Cannot stat %q: %s", link, err)
return nil
}
if !allowSymbolicDirs && !linkfi.Mode().IsRegular() {
c.logger.ERROR.Printf("Symbolic links for directories not supported, skipping %q", path)
return nil
}
if allowSymbolicDirs && linkfi.IsDir() {
// afero.Walk will not walk symbolic links, so wee need to do it.
if !seen[path] {
seen[path] = true
nested = append(nested, path)
}
return nil
}
fi = linkfi
}
if fi.IsDir() {
if fi.Name() == ".git" ||
fi.Name() == "node_modules" || fi.Name() == "bower_components" {
return filepath.SkipDir
}
a = append(a, path)
}
return nil return nil
} }
if fi.IsDir() {
if fi.Name() == ".git" ||
fi.Name() == "node_modules" || fi.Name() == "bower_components" {
return filepath.SkipDir
}
dirnames = append(dirnames, fi.Meta().Filename())
}
return nil
} }
symLinkWalker := newWalker(true) watchDirs := c.hugo.PathSpec.BaseFs.WatchDirs()
regularWalker := newWalker(false) for _, watchDir := range watchDirs {
// SymbolicWalk will log anny ERRORs w := hugofs.NewWalkway(hugofs.WalkwayConfig{Logger: c.logger, Info: watchDir, WalkFn: walkFn})
// Also note that the Dirnames fetched below will contain any relevant theme if err := w.Walk(); err != nil {
// directories. c.logger.ERROR.Println("walker: ", err)
for _, contentDir := range c.hugo.PathSpec.BaseFs.Content.Dirnames {
_ = helpers.SymbolicWalk(c.Fs.Source, contentDir, symLinkWalker)
}
for _, staticDir := range c.hugo.PathSpec.BaseFs.Data.Dirnames {
_ = helpers.SymbolicWalk(c.Fs.Source, staticDir, regularWalker)
}
for _, staticDir := range c.hugo.PathSpec.BaseFs.I18n.Dirnames {
_ = helpers.SymbolicWalk(c.Fs.Source, staticDir, regularWalker)
}
for _, staticDir := range c.hugo.PathSpec.BaseFs.Layouts.Dirnames {
_ = helpers.SymbolicWalk(c.Fs.Source, staticDir, regularWalker)
}
for _, staticFilesystem := range c.hugo.PathSpec.BaseFs.Static {
for _, staticDir := range staticFilesystem.Dirnames {
_ = helpers.SymbolicWalk(c.Fs.Source, staticDir, regularWalker)
} }
} }
for _, assetDir := range c.hugo.PathSpec.BaseFs.Assets.Dirnames { dirnames = helpers.UniqueStringsSorted(dirnames)
_ = helpers.SymbolicWalk(c.Fs.Source, assetDir, regularWalker)
}
if len(nested) > 0 { return dirnames, nil
for {
toWalk := nested
nested = nested[:0]
for _, d := range toWalk {
_ = helpers.SymbolicWalk(c.Fs.Source, d, symLinkWalker)
}
if len(nested) == 0 {
break
}
}
}
a = helpers.UniqueStrings(a)
sort.Strings(a)
return a, nil
} }
func (c *commandeer) buildSites() (err error) { func (c *commandeer) buildSites() (err error) {
@ -812,26 +747,60 @@ func (c *commandeer) partialReRender(urls ...string) error {
return c.hugo.Build(hugolib.BuildCfg{RecentlyVisited: visited, PartialReRender: true}) return c.hugo.Build(hugolib.BuildCfg{RecentlyVisited: visited, PartialReRender: true})
} }
func (c *commandeer) fullRebuild() { func (c *commandeer) fullRebuild(changeType string) {
c.commandeerHugoState = &commandeerHugoState{} if changeType == configChangeGoMod {
err := c.loadConfig(true, true) // go.mod may be changed during the build itself, and
if err != nil { // we really want to prevent superfluous builds.
// Set the processing on pause until the state is recovered. if !c.fullRebuildSem.TryAcquire(1) {
c.paused = true return
c.handleBuildErr(err, "Failed to reload config")
} else {
c.paused = false
}
if !c.paused {
err := c.buildSites()
if err != nil {
c.logger.ERROR.Println(err)
} else if !c.h.buildWatch && !c.Cfg.GetBool("disableLiveReload") {
livereload.ForceRefresh()
} }
c.fullRebuildSem.Release(1)
} }
c.fullRebuildSem.Acquire(context.Background(), 1)
go func() {
defer c.fullRebuildSem.Release(1)
c.printChangeDetected(changeType)
defer func() {
// Allow any file system events to arrive back.
// This will block any rebuild on config changes for the
// duration of the sleep.
time.Sleep(2 * time.Second)
}()
defer c.timeTrack(time.Now(), "Total")
c.commandeerHugoState = &commandeerHugoState{}
err := c.loadConfig(true, true)
if err != nil {
// Set the processing on pause until the state is recovered.
c.paused = true
c.handleBuildErr(err, "Failed to reload config")
} else {
c.paused = false
}
if !c.paused {
_, err := c.copyStatic()
if err != nil {
c.logger.ERROR.Println(err)
return
}
err = c.buildSites()
if err != nil {
c.logger.ERROR.Println(err)
} else if !c.h.buildWatch && !c.Cfg.GetBool("disableLiveReload") {
livereload.ForceRefresh()
}
}
}()
} }
// newWatcher creates a new watcher to watch filesystem events. // newWatcher creates a new watcher to watch filesystem events.
@ -886,26 +855,53 @@ func (c *commandeer) newWatcher(dirList ...string) (*watcher.Batcher, error) {
return watcher, nil return watcher, nil
} }
func (c *commandeer) printChangeDetected(typ string) {
msg := "\nChange"
if typ != "" {
msg += " of " + typ
}
msg += " detected, rebuilding site."
c.logger.FEEDBACK.Println(msg)
const layout = "2006-01-02 15:04:05.000 -0700"
c.logger.FEEDBACK.Println(time.Now().Format(layout))
}
const (
configChangeConfig = "config file"
configChangeGoMod = "go.mod file"
)
func (c *commandeer) handleEvents(watcher *watcher.Batcher, func (c *commandeer) handleEvents(watcher *watcher.Batcher,
staticSyncer *staticSyncer, staticSyncer *staticSyncer,
evs []fsnotify.Event, evs []fsnotify.Event,
configSet map[string]bool) { configSet map[string]bool) {
var isHandled bool
for _, ev := range evs { for _, ev := range evs {
isConfig := configSet[ev.Name] isConfig := configSet[ev.Name]
configChangeType := configChangeConfig
if isConfig {
if strings.Contains(ev.Name, "go.mod") {
configChangeType = configChangeGoMod
}
}
if !isConfig { if !isConfig {
// It may be one of the /config folders // It may be one of the /config folders
dirname := filepath.Dir(ev.Name) dirname := filepath.Dir(ev.Name)
if dirname != "." && configSet[dirname] { if dirname != "." && configSet[dirname] {
isConfig = true isConfig = true
} }
} }
if isConfig { if isConfig {
isHandled = true
if ev.Op&fsnotify.Chmod == fsnotify.Chmod { if ev.Op&fsnotify.Chmod == fsnotify.Chmod {
continue continue
} }
if ev.Op&fsnotify.Remove == fsnotify.Remove || ev.Op&fsnotify.Rename == fsnotify.Rename { if ev.Op&fsnotify.Remove == fsnotify.Remove || ev.Op&fsnotify.Rename == fsnotify.Rename {
for _, configFile := range c.configFiles { for _, configFile := range c.configFiles {
counter := 0 counter := 0
@ -917,13 +913,20 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
time.Sleep(100 * time.Millisecond) time.Sleep(100 * time.Millisecond)
} }
} }
// A write event will follow.
continue
} }
// Config file(s) changed. Need full rebuild. // Config file(s) changed. Need full rebuild.
c.fullRebuild() c.fullRebuild(configChangeType)
break
} }
} }
if isHandled {
return
}
if c.paused { if c.paused {
// Wait for the server to get into a consistent state before // Wait for the server to get into a consistent state before
// we continue with processing. // we continue with processing.
@ -933,7 +936,9 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
if len(evs) > 50 { if len(evs) > 50 {
// This is probably a mass edit of the content dir. // This is probably a mass edit of the content dir.
// Schedule a full rebuild for when it slows down. // Schedule a full rebuild for when it slows down.
c.debounce(c.fullRebuild) c.debounce(func() {
c.fullRebuild("")
})
return return
} }
@ -1015,7 +1020,7 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
continue continue
} }
walkAdder := func(path string, f os.FileInfo, err error) error { walkAdder := func(path string, f hugofs.FileMetaInfo, err error) error {
if f.IsDir() { if f.IsDir() {
c.logger.FEEDBACK.Println("adding created directory to watchlist", path) c.logger.FEEDBACK.Println("adding created directory to watchlist", path)
if err := watcher.Add(path); err != nil { if err := watcher.Add(path); err != nil {
@ -1046,9 +1051,7 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
} }
if len(staticEvents) > 0 { if len(staticEvents) > 0 {
c.logger.FEEDBACK.Println("\nStatic file changes detected") c.printChangeDetected("Static files")
const layout = "2006-01-02 15:04:05.000 -0700"
c.logger.FEEDBACK.Println(time.Now().Format(layout))
if c.Cfg.GetBool("forceSyncStatic") { if c.Cfg.GetBool("forceSyncStatic") {
c.logger.FEEDBACK.Printf("Syncing all static files\n") c.logger.FEEDBACK.Printf("Syncing all static files\n")
@ -1087,10 +1090,7 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
doLiveReload := !c.h.buildWatch && !c.Cfg.GetBool("disableLiveReload") doLiveReload := !c.h.buildWatch && !c.Cfg.GetBool("disableLiveReload")
onePageName := pickOneWriteOrCreatePath(partitionedEvents.ContentEvents) onePageName := pickOneWriteOrCreatePath(partitionedEvents.ContentEvents)
c.logger.FEEDBACK.Println("\nChange detected, rebuilding site") c.printChangeDetected("")
const layout = "2006-01-02 15:04:05.000 -0700"
c.logger.FEEDBACK.Println(time.Now().Format(layout))
c.changeDetector.PrepareNew() c.changeDetector.PrepareNew()
if err := c.rebuildSites(dynamicEvents); err != nil { if err := c.rebuildSites(dynamicEvents); err != nil {
c.handleBuildErr(err, "Rebuild failed") c.handleBuildErr(err, "Rebuild failed")
@ -1167,41 +1167,3 @@ func pickOneWriteOrCreatePath(events []fsnotify.Event) string {
return name return name
} }
// isThemeVsHugoVersionMismatch returns whether the current Hugo version is
// less than any of the themes' min_version.
func (c *commandeer) isThemeVsHugoVersionMismatch(fs afero.Fs) (dir string, mismatch bool, requiredMinVersion string) {
if !c.hugo.PathSpec.ThemeSet() {
return
}
for _, absThemeDir := range c.hugo.BaseFs.AbsThemeDirs {
path := filepath.Join(absThemeDir, "theme.toml")
exists, err := helpers.Exists(path, fs)
if err != nil || !exists {
continue
}
b, err := afero.ReadFile(fs, path)
if err != nil {
continue
}
tomlMeta, err := metadecoders.Default.UnmarshalToMap(b, metadecoders.TOML)
if err != nil {
continue
}
if minVersion, ok := tomlMeta["min_version"]; ok {
if hugo.CompareVersion(minVersion) > 0 {
return absThemeDir, true, fmt.Sprint(minVersion)
}
}
}
return
}

View file

@ -17,7 +17,6 @@ import (
"bytes" "bytes"
"errors" "errors"
"fmt" "fmt"
"io"
"io/ioutil" "io/ioutil"
"os" "os"
"path/filepath" "path/filepath"
@ -27,6 +26,8 @@ import (
"time" "time"
"unicode" "unicode"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/parser/metadecoders" "github.com/gohugoio/hugo/parser/metadecoders"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
@ -113,7 +114,7 @@ func (i *importCmd) importFromJekyll(cmd *cobra.Command, args []string) error {
jww.FEEDBACK.Println("Importing...") jww.FEEDBACK.Println("Importing...")
fileCount := 0 fileCount := 0
callback := func(path string, fi os.FileInfo, err error) error { callback := func(path string, fi hugofs.FileMetaInfo, err error) error {
if err != nil { if err != nil {
return err return err
} }
@ -302,66 +303,10 @@ func (i *importCmd) createConfigFromJekyll(fs afero.Fs, inpath string, kind meta
return helpers.WriteToDisk(filepath.Join(inpath, "config."+string(kind)), &buf, fs) return helpers.WriteToDisk(filepath.Join(inpath, "config."+string(kind)), &buf, fs)
} }
func copyFile(source string, dest string) error {
sf, err := os.Open(source)
if err != nil {
return err
}
defer sf.Close()
df, err := os.Create(dest)
if err != nil {
return err
}
defer df.Close()
_, err = io.Copy(df, sf)
if err == nil {
si, err := os.Stat(source)
if err != nil {
err = os.Chmod(dest, si.Mode())
if err != nil {
return err
}
}
}
return nil
}
func copyDir(source string, dest string) error {
fi, err := os.Stat(source)
if err != nil {
return err
}
if !fi.IsDir() {
return errors.New(source + " is not a directory")
}
err = os.MkdirAll(dest, fi.Mode())
if err != nil {
return err
}
entries, _ := ioutil.ReadDir(source)
for _, entry := range entries {
sfp := filepath.Join(source, entry.Name())
dfp := filepath.Join(dest, entry.Name())
if entry.IsDir() {
err = copyDir(sfp, dfp)
if err != nil {
jww.ERROR.Println(err)
}
} else {
err = copyFile(sfp, dfp)
if err != nil {
jww.ERROR.Println(err)
}
}
}
return nil
}
func (i *importCmd) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPostDirs map[string]bool) (err error) { func (i *importCmd) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPostDirs map[string]bool) (err error) {
fi, err := os.Stat(jekyllRoot) fs := hugofs.Os
fi, err := fs.Stat(jekyllRoot)
if err != nil { if err != nil {
return err return err
} }
@ -383,7 +328,7 @@ func (i *importCmd) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPos
if entry.IsDir() { if entry.IsDir() {
if entry.Name()[0] != '_' && entry.Name()[0] != '.' { if entry.Name()[0] != '_' && entry.Name()[0] != '.' {
if _, ok := jekyllPostDirs[entry.Name()]; !ok { if _, ok := jekyllPostDirs[entry.Name()]; !ok {
err = copyDir(sfp, dfp) err = hugio.CopyDir(fs, sfp, dfp, nil)
if err != nil { if err != nil {
jww.ERROR.Println(err) jww.ERROR.Println(err)
} }
@ -402,7 +347,7 @@ func (i *importCmd) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPos
} }
if !isExcept && entry.Name()[0] != '.' && entry.Name()[0] != '_' { if !isExcept && entry.Name()[0] != '.' && entry.Name()[0] != '_' {
err = copyFile(sfp, dfp) err = hugio.CopyFile(fs, sfp, dfp)
if err != nil { if err != nil {
jww.ERROR.Println(err) jww.ERROR.Println(err)
} }

View file

@ -62,6 +62,7 @@ func TestListAll(t *testing.T) {
}, header) }, header)
record, err := r.Read() record, err := r.Read()
assert.NoError(err)
assert.Equal([]string{ assert.Equal([]string{
filepath.Join("content", "p1.md"), "", "P1", filepath.Join("content", "p1.md"), "", "P1",
"0001-01-01T00:00:00Z", "0001-01-01T00:00:00Z", "0001-01-01T00:00:00Z", "0001-01-01T00:00:00Z", "0001-01-01T00:00:00Z", "0001-01-01T00:00:00Z",

189
commands/mod.go Normal file
View file

@ -0,0 +1,189 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package commands
import (
"os"
"strings"
"github.com/gohugoio/hugo/modules"
"github.com/spf13/cobra"
)
var _ cmder = (*modCmd)(nil)
type modCmd struct {
*baseBuilderCmd
}
func (b *commandsBuilder) newModCmd() *modCmd {
c := &modCmd{}
const commonUsage = `
Note that Hugo will always start out by resolving the components defined in the site
configuration, provided by a _vendor directory (if no --ignoreVendor flag provided),
Go Modules, or a folder inside the themes directory, in that order.
See https://gohugo.io/hugo-modules/ for more information.
`
cmd := &cobra.Command{
Use: "mod",
Short: "Various Hugo Modules helpers.",
Long: `Various helpers to help manage the modules in your project's dependency graph.
Most operations here requires a Go version installed on your system (>= Go 1.12) and the relevant VCS client (typically Git).
This is not needed if you only operate on modules inside /themes or if you have vendored them via "hugo mod vendor".
` + commonUsage,
RunE: nil,
}
cmd.AddCommand(
&cobra.Command{
Use: "get",
DisableFlagParsing: true,
Short: "Resolves dependencies in your current Hugo Project.",
Long: `
Resolves dependencies in your current Hugo Project.
Some examples:
Install the latest version possible for a given module:
hugo mod get github.com/gohugoio/testshortcodes
Install a specific version:
hugo mod get github.com/gohugoio/testshortcodes@v0.3.0
Install the latest versions of all module dependencies:
hugo mod get -u
Run "go help get" for more information. All flags available for "go get" is also relevant here.
` + commonUsage,
RunE: func(cmd *cobra.Command, args []string) error {
return c.withModsClient(false, func(c *modules.Client) error {
// We currently just pass on the flags we get to Go and
// need to do the flag handling manually.
if len(args) == 1 && strings.Contains(args[0], "-h") {
return cmd.Help()
}
return c.Get(args...)
})
},
},
&cobra.Command{
Use: "graph",
Short: "Print a module dependency graph.",
Long: `Print a module dependency graph with information about module status (disabled, vendored).
Note that for vendored modules, that is the version listed and not the one from go.mod.
`,
RunE: func(cmd *cobra.Command, args []string) error {
return c.withModsClient(true, func(c *modules.Client) error {
return c.Graph(os.Stdout)
})
},
},
&cobra.Command{
Use: "init",
Short: "Initialize this project as a Hugo Module.",
Long: `Initialize this project as a Hugo Module.
It will try to guess the module path, but you may help by passing it as an argument, e.g:
hugo mod init github.com/gohugoio/testshortcodes
Note that Hugo Modules supports multi-module projects, so you can initialize a Hugo Module
inside a subfolder on GitHub, as one example.
`,
RunE: func(cmd *cobra.Command, args []string) error {
var path string
if len(args) >= 1 {
path = args[0]
}
return c.withModsClient(false, func(c *modules.Client) error {
return c.Init(path)
})
},
},
&cobra.Command{
Use: "vendor",
Short: "Vendor all module dependencies into the _vendor directory.",
Long: `Vendor all module dependencies into the _vendor directory.
If a module is vendored, that is where Hugo will look for it's dependencies.
`,
RunE: func(cmd *cobra.Command, args []string) error {
return c.withModsClient(true, func(c *modules.Client) error {
return c.Vendor()
})
},
},
&cobra.Command{
Use: "tidy",
Short: "Remove unused entries in go.mod and go.sum.",
RunE: func(cmd *cobra.Command, args []string) error {
return c.withModsClient(true, func(c *modules.Client) error {
return c.Tidy()
})
},
},
&cobra.Command{
Use: "clean",
Short: "Delete the entire Hugo Module cache.",
Long: `Delete the entire Hugo Module cache.
Note that after you run this command, all of your dependencies will be re-downloaded next time you run "hugo".
Also note that if you configure a positive maxAge for the "modules" file cache, it will also be cleaned as part of "hugo --gc".
`,
RunE: func(cmd *cobra.Command, args []string) error {
com, err := c.initConfig(true)
if err != nil {
return err
}
_, err = com.hugo.FileCaches.ModulesCache().Prune(true)
return err
},
},
)
c.baseBuilderCmd = b.newBuilderCmd(cmd)
return c
}
func (c *modCmd) withModsClient(failOnMissingConfig bool, f func(*modules.Client) error) error {
com, err := c.initConfig(failOnMissingConfig)
if err != nil {
return err
}
return f(com.hugo.ModulesClient)
}
func (c *modCmd) initConfig(failOnNoConfig bool) (*commandeer, error) {
com, err := initializeConfig(failOnNoConfig, false, &c.hugoBuilderCommon, c, nil)
if err != nil {
return nil, err
}
return com, nil
}

View file

@ -53,8 +53,6 @@ Ensure you run this within the root directory of your site.`,
cc := &newCmd{baseBuilderCmd: b.newBuilderCmd(cmd)} cc := &newCmd{baseBuilderCmd: b.newBuilderCmd(cmd)}
cmd.Flags().StringVarP(&cc.contentType, "kind", "k", "", "content type to create") cmd.Flags().StringVarP(&cc.contentType, "kind", "k", "", "content type to create")
cmd.PersistentFlags().StringVarP(&cc.source, "source", "s", "", "filesystem path to read files relative from")
cmd.PersistentFlags().SetAnnotation("source", cobra.BashCompSubdirsInDir, []string{})
cmd.Flags().StringVar(&cc.contentEditor, "editor", "", "edit new content with this editor, if provided") cmd.Flags().StringVar(&cc.contentEditor, "editor", "", "edit new content with this editor, if provided")
cmd.AddCommand(newNewSiteCmd().getCommand()) cmd.AddCommand(newNewSiteCmd().getCommand())
@ -120,8 +118,8 @@ func newContentPathSection(h *hugolib.HugoSites, path string) (string, string) {
createpath := filepath.FromSlash(path) createpath := filepath.FromSlash(path)
if h != nil { if h != nil {
for _, s := range h.Sites { for _, dir := range h.BaseFs.Content.Dirs {
createpath = strings.TrimPrefix(createpath, s.PathSpec.ContentDir) createpath = strings.TrimPrefix(createpath, dir.Meta().Filename())
} }
} }

View file

@ -256,15 +256,11 @@ func (sc *serverCmd) server(cmd *cobra.Command, args []string) error {
return err return err
} }
baseWatchDir := c.Cfg.GetString("workingDir") watchGroups := helpers.ExtractAndGroupRootPaths(watchDirs)
relWatchDirs := make([]string, len(watchDirs))
for i, dir := range watchDirs { for _, group := range watchGroups {
relWatchDirs[i], _ = helpers.GetRelativePath(dir, baseWatchDir) jww.FEEDBACK.Printf("Watching for changes in %s\n", group)
} }
rootWatchDirs := strings.Join(helpers.UniqueStrings(helpers.ExtractRootPaths(relWatchDirs)), ",")
jww.FEEDBACK.Printf("Watching for changes in %s%s{%s}\n", baseWatchDir, helpers.FilePathSeparator, rootWatchDirs)
watcher, err := c.newWatcher(watchDirs...) watcher, err := c.newWatcher(watchDirs...)
if err != nil { if err != nil {
@ -279,6 +275,15 @@ func (sc *serverCmd) server(cmd *cobra.Command, args []string) error {
} }
func getRootWatchDirsStr(baseDir string, watchDirs []string) string {
relWatchDirs := make([]string, len(watchDirs))
for i, dir := range watchDirs {
relWatchDirs[i], _ = helpers.GetRelativePath(dir, baseDir)
}
return strings.Join(helpers.UniqueStringsSorted(helpers.ExtractRootPaths(relWatchDirs)), ",")
}
type fileServer struct { type fileServer struct {
baseURLs []string baseURLs []string
roots []string roots []string

View file

@ -53,6 +53,7 @@ func (s *staticSyncer) syncsStaticEvents(staticEvents []fsnotify.Event) error {
syncer := fsync.NewSyncer() syncer := fsync.NewSyncer()
syncer.NoTimes = c.Cfg.GetBool("noTimes") syncer.NoTimes = c.Cfg.GetBool("noTimes")
syncer.NoChmod = c.Cfg.GetBool("noChmod") syncer.NoChmod = c.Cfg.GetBool("noChmod")
syncer.ChmodFilter = chmodFilter
syncer.SrcFs = sourceFs.Fs syncer.SrcFs = sourceFs.Fs
syncer.DestFs = c.Fs.Destination syncer.DestFs = c.Fs.Destination

View file

@ -19,6 +19,7 @@ import (
"fmt" "fmt"
"io" "io"
"os" "os"
"runtime/debug"
_errors "github.com/pkg/errors" _errors "github.com/pkg/errors"
) )
@ -46,6 +47,16 @@ func FprintStackTrace(w io.Writer, err error) {
} }
} }
// Recover is a helper function that can be used to capture panics.
// Put this at the top of a method/function that crashes in a template:
// defer herrors.Recover()
func Recover() {
if r := recover(); r != nil {
fmt.Println("stacktrace from panic: \n" + string(debug.Stack()))
}
}
// ErrFeatureNotAvailable denotes that a feature is unavailable. // ErrFeatureNotAvailable denotes that a feature is unavailable.
// //
// We will, at least to begin with, make some Hugo features (SCSS with libsass) optional, // We will, at least to begin with, make some Hugo features (SCSS with libsass) optional,

90
common/hugio/copy.go Normal file
View file

@ -0,0 +1,90 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugio
import (
"io"
"io/ioutil"
"os"
"path/filepath"
"github.com/pkg/errors"
"github.com/spf13/afero"
)
// CopyFile copies a file.
func CopyFile(fs afero.Fs, from, to string) error {
sf, err := os.Open(from)
if err != nil {
return err
}
defer sf.Close()
df, err := os.Create(to)
if err != nil {
return err
}
defer df.Close()
_, err = io.Copy(df, sf)
if err == nil {
si, err := os.Stat(from)
if err != nil {
err = os.Chmod(to, si.Mode())
if err != nil {
return err
}
}
}
return nil
}
// CopyDir copies a directory.
func CopyDir(fs afero.Fs, from, to string, shouldCopy func(filename string) bool) error {
fi, err := os.Stat(from)
if err != nil {
return err
}
if !fi.IsDir() {
return errors.Errorf("%q is not a directory", from)
}
err = fs.MkdirAll(to, 0777) // before umask
if err != nil {
return err
}
entries, _ := ioutil.ReadDir(from)
for _, entry := range entries {
fromFilename := filepath.Join(from, entry.Name())
toFilename := filepath.Join(to, entry.Name())
if entry.IsDir() {
if shouldCopy != nil && !shouldCopy(fromFilename) {
continue
}
if err := CopyDir(fs, fromFilename, toFilename, shouldCopy); err != nil {
return err
}
} else {
if err := CopyFile(fs, fromFilename, toFilename); err != nil {
return err
}
}
}
return nil
}

View file

@ -15,4 +15,4 @@
package hugo package hugo
var isExtended = true var IsExtended = true

View file

@ -15,4 +15,4 @@
package hugo package hugo
var isExtended = false var IsExtended = false

View file

@ -15,6 +15,7 @@ package hugo
import ( import (
"fmt" "fmt"
"strconv"
"runtime" "runtime"
"strings" "strings"
@ -133,7 +134,7 @@ func BuildVersionString() string {
if commitHash != "" { if commitHash != "" {
version += "-" + strings.ToUpper(commitHash) version += "-" + strings.ToUpper(commitHash)
} }
if isExtended { if IsExtended {
version += "/extended" version += "/extended"
} }
@ -235,3 +236,16 @@ func compareFloatVersions(version float32, v float32) int {
} }
return 1 return 1
} }
func GoMinorVersion() int {
return goMinorVersion(runtime.Version())
}
func goMinorVersion(version string) int {
if strings.HasPrefix(version, "devel") {
return 9999 // magic
}
i, _ := strconv.Atoi(strings.Split(version, ".")[1])
return i
}

View file

@ -77,3 +77,9 @@ func TestParseHugoVersion(t *testing.T) {
require.Equal(t, "0.25-DEV", MustParseVersion("0.25-DEV").String()) require.Equal(t, "0.25-DEV", MustParseVersion("0.25-DEV").String())
} }
func TestGoMinorVersion(t *testing.T) {
assert := require.New(t)
assert.Equal(12, goMinorVersion("go1.12.5"))
assert.True(GoMinorVersion() >= 11)
}

View file

@ -40,6 +40,7 @@ func init() {
type Logger struct { type Logger struct {
*jww.Notepad *jww.Notepad
ErrorCounter *jww.Counter ErrorCounter *jww.Counter
WarnCounter *jww.Counter
// This is only set in server mode. // This is only set in server mode.
errors *bytes.Buffer errors *bytes.Buffer
@ -143,9 +144,10 @@ func getLogWriters(outHandle, logHandle io.Writer) (io.Writer, io.Writer) {
func newLogger(stdoutThreshold, logThreshold jww.Threshold, outHandle, logHandle io.Writer, saveErrors bool) *Logger { func newLogger(stdoutThreshold, logThreshold jww.Threshold, outHandle, logHandle io.Writer, saveErrors bool) *Logger {
errorCounter := &jww.Counter{} errorCounter := &jww.Counter{}
warnCounter := &jww.Counter{}
outHandle, logHandle = getLogWriters(outHandle, logHandle) outHandle, logHandle = getLogWriters(outHandle, logHandle)
listeners := []jww.LogListener{jww.LogCounter(errorCounter, jww.LevelError)} listeners := []jww.LogListener{jww.LogCounter(errorCounter, jww.LevelError), jww.LogCounter(warnCounter, jww.LevelWarn)}
var errorBuff *bytes.Buffer var errorBuff *bytes.Buffer
if saveErrors { if saveErrors {
errorBuff = new(bytes.Buffer) errorBuff = new(bytes.Buffer)
@ -164,6 +166,7 @@ func newLogger(stdoutThreshold, logThreshold jww.Threshold, outHandle, logHandle
return &Logger{ return &Logger{
Notepad: jww.NewNotepad(stdoutThreshold, logThreshold, outHandle, logHandle, "", log.Ldate|log.Ltime, listeners...), Notepad: jww.NewNotepad(stdoutThreshold, logThreshold, outHandle, logHandle, "", log.Ldate|log.Ltime, listeners...),
ErrorCounter: errorCounter, ErrorCounter: errorCounter,
WarnCounter: warnCounter,
errors: errorBuff, errors: errorBuff,
} }
} }

96
common/maps/params.go Normal file
View file

@ -0,0 +1,96 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package maps
import (
"strings"
"github.com/pkg/errors"
"github.com/spf13/cast"
)
// GetNestedParam gets the first match of the keyStr in the candidates given.
// It will first try the exact match and then try to find it as a nested map value,
// using the given separator, e.g. "mymap.name".
// It assumes that all the maps given have lower cased keys.
func GetNestedParam(keyStr, separator string, candidates ...map[string]interface{}) (interface{}, error) {
keyStr = strings.ToLower(keyStr)
lookupFn := func(key string) interface{} {
for _, m := range candidates {
if v, ok := m[key]; ok {
return v
}
}
return nil
}
v, _, _, err := GetNestedParamFn(keyStr, separator, lookupFn)
return v, err
}
func GetNestedParamFn(keyStr, separator string, lookupFn func(key string) interface{}) (interface{}, string, map[string]interface{}, error) {
result, _ := traverseDirectParams(keyStr, lookupFn)
if result != nil {
return result, keyStr, nil, nil
}
keySegments := strings.Split(keyStr, separator)
if len(keySegments) == 1 {
return nil, keyStr, nil, nil
}
return traverseNestedParams(keySegments, lookupFn)
}
func traverseDirectParams(keyStr string, lookupFn func(key string) interface{}) (interface{}, error) {
return lookupFn(keyStr), nil
}
func traverseNestedParams(keySegments []string, lookupFn func(key string) interface{}) (interface{}, string, map[string]interface{}, error) {
firstKey, rest := keySegments[0], keySegments[1:]
result := lookupFn(firstKey)
if result == nil || len(rest) == 0 {
return result, firstKey, nil, nil
}
switch m := result.(type) {
case map[string]interface{}:
v, key, owner := traverseParams(rest, m)
return v, key, owner, nil
default:
return nil, "", nil, errors.Errorf("unsupported Params type: %T", result)
}
}
func traverseParams(keys []string, m map[string]interface{}) (interface{}, string, map[string]interface{}) {
// Shift first element off.
firstKey, rest := keys[0], keys[1:]
result := m[firstKey]
// No point in continuing here.
if result == nil {
return result, "", nil
}
if len(rest) == 0 {
// That was the last key.
return result, firstKey, m
}
// That was not the last key.
return traverseParams(rest, cast.ToStringMap(result))
}

View file

@ -0,0 +1,45 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package maps
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestGetNestedParam(t *testing.T) {
m := map[string]interface{}{
"first": 1,
"with_underscore": 2,
"nested": map[string]interface{}{
"color": "blue",
},
}
assert := require.New(t)
must := func(keyStr, separator string, candidates ...map[string]interface{}) interface{} {
v, err := GetNestedParam(keyStr, separator, candidates...)
assert.NoError(err)
return v
}
assert.Equal(1, must("first", "_", m))
assert.Equal(1, must("First", "_", m))
assert.Equal(2, must("with_underscore", "_", m))
assert.Equal("blue", must("nested_color", "_", m))
}

View file

@ -120,8 +120,6 @@ func RenameKeys(m map[string]interface{}) {
func newViper() *viper.Viper { func newViper() *viper.Viper {
v := viper.New() v := viper.New()
v.AutomaticEnv()
v.SetEnvPrefix("hugo")
return v return v
} }

View file

@ -35,10 +35,14 @@ type Provider interface {
// we do not attempt to split it into fields. // we do not attempt to split it into fields.
func GetStringSlicePreserveString(cfg Provider, key string) []string { func GetStringSlicePreserveString(cfg Provider, key string) []string {
sd := cfg.Get(key) sd := cfg.Get(key)
if sds, ok := sd.(string); ok { return toStringSlicePreserveString(sd)
}
func toStringSlicePreserveString(v interface{}) []string {
if sds, ok := v.(string); ok {
return []string{sds} return []string{sds}
} }
return cast.ToStringSlice(sd) return cast.ToStringSlice(v)
} }
// SetBaseTestDefaults provides some common config defaults used in tests. // SetBaseTestDefaults provides some common config defaults used in tests.

View file

@ -17,6 +17,7 @@ import (
"os" "os"
"runtime" "runtime"
"strconv" "strconv"
"strings"
) )
// GetNumWorkerMultiplier returns the base value used to calculate the number // GetNumWorkerMultiplier returns the base value used to calculate the number
@ -31,3 +32,26 @@ func GetNumWorkerMultiplier() int {
} }
return runtime.NumCPU() return runtime.NumCPU()
} }
// SetEnvVars sets vars on the form key=value in the oldVars slice.
func SetEnvVars(oldVars *[]string, keyValues ...string) {
for i := 0; i < len(keyValues); i += 2 {
setEnvVar(oldVars, keyValues[i], keyValues[i+1])
}
}
func SplitEnvVar(v string) (string, string) {
parts := strings.Split(v, "=")
return parts[0], parts[1]
}
func setEnvVar(vars *[]string, key, value string) {
for i := range *vars {
if strings.HasPrefix((*vars)[i], key+"=") {
(*vars)[i] = key + "=" + value
return
}
}
// New var.
*vars = append(*vars, key+"="+value)
}

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -11,29 +11,22 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
package hugofs package config
import ( import (
"os" "testing"
"github.com/spf13/afero" "github.com/stretchr/testify/require"
) )
var ( func TestSetEnvVars(t *testing.T) {
_ afero.Fs = (*noLstatFs)(nil) t.Parallel()
) assert := require.New(t)
vars := []string{"FOO=bar", "HUGO=cool", "BAR=foo"}
SetEnvVars(&vars, "HUGO", "rocking!", "NEW", "bar")
assert.Equal([]string{"FOO=bar", "HUGO=rocking!", "BAR=foo", "NEW=bar"}, vars)
type noLstatFs struct { key, val := SplitEnvVar("HUGO=rocks")
afero.Fs assert.Equal("HUGO", key)
} assert.Equal("rocks", val)
// NewNoLstatFs creates a new filesystem with no Lstat support.
func NewNoLstatFs(fs afero.Fs) afero.Fs {
return &noLstatFs{Fs: fs}
}
// LstatIfPossible always delegates to Stat.
func (fs *noLstatFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
fi, err := fs.Stat(name)
return fi, false, err
} }

View file

@ -25,6 +25,8 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
@ -50,7 +52,10 @@ func NewContent(
if isDir { if isDir {
langFs := hugofs.NewLanguageFs(s.Language().Lang, sites.LanguageSet(), archetypeFs) langFs, err := hugofs.NewLanguageFs(sites.LanguageSet(), archetypeFs)
if err != nil {
return err
}
cm, err := mapArcheTypeDir(ps, langFs, archetypeFilename) cm, err := mapArcheTypeDir(ps, langFs, archetypeFilename)
if err != nil { if err != nil {
@ -64,7 +69,7 @@ func NewContent(
} }
name := filepath.Base(targetPath) name := filepath.Base(targetPath)
return newContentFromDir(archetypeFilename, sites, archetypeFs, sourceFs, cm, name, contentPath) return newContentFromDir(archetypeFilename, sites, sourceFs, cm, name, contentPath)
} }
// Building the sites can be expensive, so only do it if really needed. // Building the sites can be expensive, so only do it if really needed.
@ -111,9 +116,9 @@ func NewContent(
return nil return nil
} }
func targetSite(sites *hugolib.HugoSites, fi *hugofs.LanguageFileInfo) *hugolib.Site { func targetSite(sites *hugolib.HugoSites, fi hugofs.FileMetaInfo) *hugolib.Site {
for _, s := range sites.Sites { for _, s := range sites.Sites {
if fi.Lang() == s.Language().Lang { if fi.Meta().Lang() == s.Language().Lang {
return s return s
} }
} }
@ -123,13 +128,14 @@ func targetSite(sites *hugolib.HugoSites, fi *hugofs.LanguageFileInfo) *hugolib.
func newContentFromDir( func newContentFromDir(
archetypeDir string, archetypeDir string,
sites *hugolib.HugoSites, sites *hugolib.HugoSites,
sourceFs, targetFs afero.Fs, targetFs afero.Fs,
cm archetypeMap, name, targetPath string) error { cm archetypeMap, name, targetPath string) error {
for _, f := range cm.otherFiles { for _, f := range cm.otherFiles {
filename := f.Filename() meta := f.Meta()
filename := meta.Path()
// Just copy the file to destination. // Just copy the file to destination.
in, err := sourceFs.Open(filename) in, err := meta.Open()
if err != nil { if err != nil {
return errors.Wrap(err, "failed to open non-content file") return errors.Wrap(err, "failed to open non-content file")
} }
@ -156,7 +162,7 @@ func newContentFromDir(
} }
for _, f := range cm.contentFiles { for _, f := range cm.contentFiles {
filename := f.Filename() filename := f.Meta().Path()
s := targetSite(sites, f) s := targetSite(sites, f)
targetFilename := filepath.Join(targetPath, strings.TrimPrefix(filename, archetypeDir)) targetFilename := filepath.Join(targetPath, strings.TrimPrefix(filename, archetypeDir))
@ -177,9 +183,9 @@ func newContentFromDir(
type archetypeMap struct { type archetypeMap struct {
// These needs to be parsed and executed as Go templates. // These needs to be parsed and executed as Go templates.
contentFiles []*hugofs.LanguageFileInfo contentFiles []hugofs.FileMetaInfo
// These are just copied to destination. // These are just copied to destination.
otherFiles []*hugofs.LanguageFileInfo otherFiles []hugofs.FileMetaInfo
// If the templates needs a fully built site. This can potentially be // If the templates needs a fully built site. This can potentially be
// expensive, so only do when needed. // expensive, so only do when needed.
siteUsed bool siteUsed bool
@ -192,7 +198,7 @@ func mapArcheTypeDir(
var m archetypeMap var m archetypeMap
walkFn := func(filename string, fi os.FileInfo, err error) error { walkFn := func(path string, fi hugofs.FileMetaInfo, err error) error {
if err != nil { if err != nil {
return err return err
@ -202,12 +208,12 @@ func mapArcheTypeDir(
return nil return nil
} }
fil := fi.(*hugofs.LanguageFileInfo) fil := fi.(hugofs.FileMetaInfo)
if hugolib.IsContentFile(filename) { if files.IsContentFile(path) {
m.contentFiles = append(m.contentFiles, fil) m.contentFiles = append(m.contentFiles, fil)
if !m.siteUsed { if !m.siteUsed {
m.siteUsed, err = usesSiteVar(fs, filename) m.siteUsed, err = usesSiteVar(fs, path)
if err != nil { if err != nil {
return err return err
} }
@ -220,7 +226,15 @@ func mapArcheTypeDir(
return nil return nil
} }
if err := helpers.SymbolicWalk(fs, archetypeDir, walkFn); err != nil { walkCfg := hugofs.WalkwayConfig{
WalkFn: walkFn,
Fs: fs,
Root: archetypeDir,
}
w := hugofs.NewWalkway(walkCfg)
if err := w.Walk(); err != nil {
return m, errors.Wrapf(err, "failed to walk archetype dir %q", archetypeDir) return m, errors.Wrapf(err, "failed to walk archetype dir %q", archetypeDir)
} }
@ -254,20 +268,32 @@ func resolveContentPath(sites *hugolib.HugoSites, fs afero.Fs, targetPath string
} }
} }
for _, ss := range sites.Sites { var dirLang string
contentDir := ss.PathSpec.ContentDir
for _, dir := range sites.BaseFs.Content.Dirs {
meta := dir.Meta()
contentDir := meta.Filename()
if !strings.HasSuffix(contentDir, helpers.FilePathSeparator) { if !strings.HasSuffix(contentDir, helpers.FilePathSeparator) {
contentDir += helpers.FilePathSeparator contentDir += helpers.FilePathSeparator
} }
if strings.HasPrefix(targetPath, contentDir) { if strings.HasPrefix(targetPath, contentDir) {
siteContentDir = ss.PathSpec.ContentDir siteContentDir = contentDir
if s == nil { dirLang = meta.Lang()
s = ss
}
break break
} }
} }
if s == nil && dirLang != "" {
for _, ss := range sites.Sites {
if ss.Lang() == dirLang {
s = ss
break
}
}
}
if s == nil { if s == nil {
s = first s = first
} }
@ -280,12 +306,22 @@ func resolveContentPath(sites *hugolib.HugoSites, fs afero.Fs, targetPath string
} }
} }
if siteContentDir == "" {
}
if siteContentDir != "" { if siteContentDir != "" {
pp := filepath.Join(siteContentDir, strings.TrimPrefix(targetPath, siteContentDir)) pp := filepath.Join(siteContentDir, strings.TrimPrefix(targetPath, siteContentDir))
return s.PathSpec.AbsPathify(pp), s return s.PathSpec.AbsPathify(pp), s
} else { } else {
return s.PathSpec.AbsPathify(filepath.Join(first.PathSpec.ContentDir, targetPath)), s var contentDir string
for _, dir := range sites.BaseFs.Content.Dirs {
contentDir = dir.Meta().Filename()
if dir.Meta().Lang() == s.Lang() {
break
}
}
return s.PathSpec.AbsPathify(filepath.Join(contentDir, targetPath)), s
} }
} }

View file

@ -90,7 +90,10 @@ func executeArcheTypeAsTemplate(s *hugolib.Site, name, kind, targetPath, archety
err error err error
) )
f := s.SourceSpec.NewFileInfo("", targetPath, false, nil) f, err := s.SourceSpec.NewFileInfoFrom(targetPath, targetPath)
if err != nil {
return nil, err
}
if name == "" { if name == "" {
name = f.TranslationBaseName() name = f.TranslationBaseName()

View file

@ -35,7 +35,6 @@ import (
) )
func TestNewContent(t *testing.T) { func TestNewContent(t *testing.T) {
assert := require.New(t)
cases := []struct { cases := []struct {
kind string kind string
@ -50,12 +49,12 @@ func TestNewContent(t *testing.T) {
{"product", "product/sample-4.md", []string{`title = "SAMPLE-4"`}}, // empty archetype front matter {"product", "product/sample-4.md", []string{`title = "SAMPLE-4"`}}, // empty archetype front matter
{"lang", "post/lang-1.md", []string{`Site Lang: en|Name: Lang 1|i18n: Hugo Rocks!`}}, {"lang", "post/lang-1.md", []string{`Site Lang: en|Name: Lang 1|i18n: Hugo Rocks!`}},
{"lang", "post/lang-2.en.md", []string{`Site Lang: en|Name: Lang 2|i18n: Hugo Rocks!`}}, {"lang", "post/lang-2.en.md", []string{`Site Lang: en|Name: Lang 2|i18n: Hugo Rocks!`}},
{"lang", "post/lang-3.nn.md", []string{`Site Lang: nn|Name: Lang 3|i18n: Hugo Rokkar!`}}, {"lang", "content/post/lang-3.nn.md", []string{`Site Lang: nn|Name: Lang 3|i18n: Hugo Rokkar!`}},
{"lang", "content_nn/post/lang-4.md", []string{`Site Lang: nn|Name: Lang 4|i18n: Hugo Rokkar!`}}, {"lang", "content_nn/post/lang-4.md", []string{`Site Lang: nn|Name: Lang 4|i18n: Hugo Rokkar!`}},
{"lang", "content_nn/post/lang-5.en.md", []string{`Site Lang: en|Name: Lang 5|i18n: Hugo Rocks!`}}, {"lang", "content_nn/post/lang-5.en.md", []string{`Site Lang: en|Name: Lang 5|i18n: Hugo Rocks!`}},
{"lang", "post/my-bundle/index.md", []string{`Site Lang: en|Name: My Bundle|i18n: Hugo Rocks!`}}, {"lang", "post/my-bundle/index.md", []string{`Site Lang: en|Name: My Bundle|i18n: Hugo Rocks!`}},
{"lang", "post/my-bundle/index.en.md", []string{`Site Lang: en|Name: My Bundle|i18n: Hugo Rocks!`}}, {"lang", "post/my-bundle/index.en.md", []string{`Site Lang: en|Name: My Bundle|i18n: Hugo Rocks!`}},
{"lang", "post/my-bundle/index.nn.md", []string{`Site Lang: nn|Name: My Bundle|i18n: Hugo Rokkar!`}}, {"lang", "content/post/my-bundle/index.nn.md", []string{`Site Lang: nn|Name: My Bundle|i18n: Hugo Rokkar!`}},
{"shortcodes", "shortcodes/go.md", []string{ {"shortcodes", "shortcodes/go.md", []string{
`title = "GO"`, `title = "GO"`,
"{{< myshortcode >}}", "{{< myshortcode >}}",
@ -64,37 +63,43 @@ func TestNewContent(t *testing.T) {
} }
for i, c := range cases { for i, c := range cases {
cfg, fs := newTestCfg(assert) c := c
assert.NoError(initFs(fs)) t.Run(fmt.Sprintf("%s-%d", c.kind, i), func(t *testing.T) {
h, err := hugolib.NewHugoSites(deps.DepsCfg{Cfg: cfg, Fs: fs}) t.Parallel()
assert.NoError(err) assert := require.New(t)
mm := afero.NewMemMapFs()
assert.NoError(initFs(mm))
cfg, fs := newTestCfg(assert, mm)
h, err := hugolib.NewHugoSites(deps.DepsCfg{Cfg: cfg, Fs: fs})
assert.NoError(err)
assert.NoError(create.NewContent(h, c.kind, c.path)) assert.NoError(create.NewContent(h, c.kind, c.path))
fname := filepath.FromSlash(c.path) fname := filepath.FromSlash(c.path)
if !strings.HasPrefix(fname, "content") { if !strings.HasPrefix(fname, "content") {
fname = filepath.Join("content", fname) fname = filepath.Join("content", fname)
}
content := readFileFromFs(t, fs.Source, fname)
for _, v := range c.expected {
found := strings.Contains(content, v)
if !found {
t.Fatalf("[%d] %q missing from output:\n%q", i, v, content)
} }
} content := readFileFromFs(t, fs.Source, fname)
for _, v := range c.expected {
found := strings.Contains(content, v)
if !found {
t.Fatalf("[%d] %q missing from output:\n%q", i, v, content)
}
}
})
} }
} }
func TestNewContentFromDir(t *testing.T) { func TestNewContentFromDir(t *testing.T) {
mm := afero.NewMemMapFs()
assert := require.New(t) assert := require.New(t)
cfg, fs := newTestCfg(assert)
assert.NoError(initFs(fs))
archetypeDir := filepath.Join("archetypes", "my-bundle") archetypeDir := filepath.Join("archetypes", "my-bundle")
assert.NoError(fs.Source.Mkdir(archetypeDir, 0755)) assert.NoError(mm.MkdirAll(archetypeDir, 0755))
archetypeThemeDir := filepath.Join("themes", "mytheme", "archetypes", "my-theme-bundle") archetypeThemeDir := filepath.Join("themes", "mytheme", "archetypes", "my-theme-bundle")
assert.NoError(fs.Source.Mkdir(archetypeThemeDir, 0755)) assert.NoError(mm.MkdirAll(archetypeThemeDir, 0755))
contentFile := ` contentFile := `
File: %s File: %s
@ -103,15 +108,18 @@ Name: {{ replace .Name "-" " " | title }}
i18n: {{ T "hugo" }} i18n: {{ T "hugo" }}
` `
assert.NoError(afero.WriteFile(fs.Source, filepath.Join(archetypeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0755)) assert.NoError(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0755))
assert.NoError(afero.WriteFile(fs.Source, filepath.Join(archetypeDir, "index.nn.md"), []byte(fmt.Sprintf(contentFile, "index.nn.md")), 0755)) assert.NoError(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.nn.md"), []byte(fmt.Sprintf(contentFile, "index.nn.md")), 0755))
assert.NoError(afero.WriteFile(fs.Source, filepath.Join(archetypeDir, "pages", "bio.md"), []byte(fmt.Sprintf(contentFile, "bio.md")), 0755)) assert.NoError(afero.WriteFile(mm, filepath.Join(archetypeDir, "pages", "bio.md"), []byte(fmt.Sprintf(contentFile, "bio.md")), 0755))
assert.NoError(afero.WriteFile(fs.Source, filepath.Join(archetypeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0755)) assert.NoError(afero.WriteFile(mm, filepath.Join(archetypeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0755))
assert.NoError(afero.WriteFile(fs.Source, filepath.Join(archetypeDir, "resources", "hugo2.xml"), []byte(`hugo2: {{ printf "no template handling in here" }}`), 0755)) assert.NoError(afero.WriteFile(mm, filepath.Join(archetypeDir, "resources", "hugo2.xml"), []byte(`hugo2: {{ printf "no template handling in here" }}`), 0755))
assert.NoError(afero.WriteFile(fs.Source, filepath.Join(archetypeThemeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0755)) assert.NoError(afero.WriteFile(mm, filepath.Join(archetypeThemeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0755))
assert.NoError(afero.WriteFile(fs.Source, filepath.Join(archetypeThemeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0755)) assert.NoError(afero.WriteFile(mm, filepath.Join(archetypeThemeDir, "resources", "hugo1.json"), []byte(`hugo1: {{ printf "no template handling in here" }}`), 0755))
assert.NoError(initFs(mm))
cfg, fs := newTestCfg(assert, mm)
h, err := hugolib.NewHugoSites(deps.DepsCfg{Cfg: cfg, Fs: fs}) h, err := hugolib.NewHugoSites(deps.DepsCfg{Cfg: cfg, Fs: fs})
assert.NoError(err) assert.NoError(err)
@ -135,7 +143,7 @@ i18n: {{ T "hugo" }}
} }
func initFs(fs *hugofs.Fs) error { func initFs(fs afero.Fs) error {
perm := os.FileMode(0755) perm := os.FileMode(0755)
var err error var err error
@ -146,8 +154,8 @@ func initFs(fs *hugofs.Fs) error {
filepath.Join("themes", "sample", "archetypes"), filepath.Join("themes", "sample", "archetypes"),
} }
for _, dir := range dirs { for _, dir := range dirs {
err = fs.Source.Mkdir(dir, perm) err = fs.Mkdir(dir, perm)
if err != nil { if err != nil && !os.IsExist(err) {
return err return err
} }
} }
@ -198,7 +206,7 @@ Some text.
`, `,
}, },
} { } {
f, err := fs.Source.Create(v.path) f, err := fs.Create(v.path)
if err != nil { if err != nil {
return err return err
} }
@ -221,6 +229,7 @@ func assertContains(assert *require.Assertions, v interface{}, matches ...string
// TODO(bep) extract common testing package with this and some others // TODO(bep) extract common testing package with this and some others
func readFileFromFs(t *testing.T, fs afero.Fs, filename string) string { func readFileFromFs(t *testing.T, fs afero.Fs, filename string) string {
t.Helper()
filename = filepath.FromSlash(filename) filename = filepath.FromSlash(filename)
b, err := afero.ReadFile(fs, filename) b, err := afero.ReadFile(fs, filename)
if err != nil { if err != nil {
@ -238,12 +247,11 @@ func readFileFromFs(t *testing.T, fs afero.Fs, filename string) string {
return string(b) return string(b)
} }
func newTestCfg(assert *require.Assertions) (*viper.Viper, *hugofs.Fs) { func newTestCfg(assert *require.Assertions, mm afero.Fs) (*viper.Viper, *hugofs.Fs) {
cfg := ` cfg := `
theme = "mytheme" theme = "mytheme"
[languages] [languages]
[languages.en] [languages.en]
weight = 1 weight = 1
@ -254,8 +262,13 @@ languageName = "Nynorsk"
contentDir = "content_nn" contentDir = "content_nn"
` `
if mm == nil {
mm = afero.NewMemMapFs()
}
mm := afero.NewMemMapFs() mm.MkdirAll(filepath.FromSlash("content_nn"), 0777)
mm.MkdirAll(filepath.FromSlash("themes/mytheme"), 0777)
assert.NoError(afero.WriteFile(mm, filepath.Join("i18n", "en.toml"), []byte(`[hugo] assert.NoError(afero.WriteFile(mm, filepath.Join("i18n", "en.toml"), []byte(`[hugo]
other = "Hugo Rocks!"`), 0755)) other = "Hugo Rocks!"`), 0755))

6
deps/deps.go vendored
View file

@ -159,11 +159,11 @@ func (d *Deps) TemplateHandler() tpl.TemplateHandler {
func (d *Deps) LoadResources() error { func (d *Deps) LoadResources() error {
// Note that the translations need to be loaded before the templates. // Note that the translations need to be loaded before the templates.
if err := d.translationProvider.Update(d); err != nil { if err := d.translationProvider.Update(d); err != nil {
return err return errors.Wrap(err, "loading translations")
} }
if err := d.templateProvider.Update(d); err != nil { if err := d.templateProvider.Update(d); err != nil {
return err return errors.Wrap(err, "loading templates")
} }
return nil return nil
@ -210,7 +210,7 @@ func New(cfg DepsCfg) (*Deps, error) {
ps, err := helpers.NewPathSpec(fs, cfg.Language) ps, err := helpers.NewPathSpec(fs, cfg.Language)
if err != nil { if err != nil {
return nil, err return nil, errors.Wrap(err, "create PathSpec")
} }
fileCaches, err := filecache.NewCaches(ps) fileCaches, err := filecache.NewCaches(ps)

14
go.mod
View file

@ -12,41 +12,43 @@ require (
github.com/bep/debounce v1.2.0 github.com/bep/debounce v1.2.0
github.com/bep/gitmap v1.1.0 github.com/bep/gitmap v1.1.0
github.com/bep/go-tocss v0.6.0 github.com/bep/go-tocss v0.6.0
github.com/cpuguy83/go-md2man v1.0.8 // indirect
github.com/disintegration/imaging v1.6.0 github.com/disintegration/imaging v1.6.0
github.com/dustin/go-humanize v1.0.0 github.com/dustin/go-humanize v1.0.0
github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385 github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385
github.com/fortytw2/leaktest v1.3.0 github.com/fortytw2/leaktest v1.3.0
github.com/fsnotify/fsnotify v1.4.7 github.com/fsnotify/fsnotify v1.4.7
github.com/go-errors/errors v1.0.1
github.com/gobwas/glob v0.2.3 github.com/gobwas/glob v0.2.3
github.com/gohugoio/testmodBuilder/mods v0.0.0-20190520184928-c56af20f2e95
github.com/google/go-cmp v0.3.0 github.com/google/go-cmp v0.3.0
github.com/gorilla/websocket v1.4.0 github.com/gorilla/websocket v1.4.0
github.com/hashicorp/go-immutable-radix v1.0.0 github.com/hashicorp/go-immutable-radix v1.0.0
github.com/hashicorp/go-uuid v1.0.1 // indirect github.com/hashicorp/go-uuid v1.0.1 // indirect
github.com/inconshreveable/mousetrap v1.0.0 // indirect
github.com/jdkato/prose v1.1.0 github.com/jdkato/prose v1.1.0
github.com/kyokomi/emoji v1.5.1 github.com/kyokomi/emoji v1.5.1
github.com/magefile/mage v1.4.0 github.com/magefile/mage v1.4.0
github.com/magiconair/properties v1.8.1 // indirect github.com/magiconair/properties v1.8.1 // indirect
github.com/markbates/inflect v1.0.0 github.com/markbates/inflect v1.0.0
github.com/mattn/go-isatty v0.0.8 github.com/mattn/go-isatty v0.0.8
github.com/mattn/go-runewidth v0.0.3 // indirect
github.com/miekg/mmark v1.3.6 github.com/miekg/mmark v1.3.6
github.com/mitchellh/hashstructure v1.0.0 github.com/mitchellh/hashstructure v1.0.0
github.com/mitchellh/mapstructure v1.1.2 github.com/mitchellh/mapstructure v1.1.2
github.com/muesli/smartcrop v0.0.0-20180228075044-f6ebaa786a12 github.com/muesli/smartcrop v0.0.0-20180228075044-f6ebaa786a12
github.com/ncw/rclone v1.48.0
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 // indirect github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 // indirect
github.com/nicksnyder/go-i18n v1.10.0 github.com/nicksnyder/go-i18n v1.10.0
github.com/niklasfasching/go-org v0.1.1 github.com/niklasfasching/go-org v0.1.1
github.com/olekukonko/tablewriter v0.0.0-20180506121414-d4647c9c7a84 github.com/olekukonko/tablewriter v0.0.0-20180506121414-d4647c9c7a84
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7
github.com/pelletier/go-toml v1.4.0 // indirect github.com/pelletier/go-toml v1.4.0 // indirect
github.com/pkg/errors v0.8.1 github.com/pkg/errors v0.8.1
github.com/rogpeppe/go-internal v1.3.0
github.com/russross/blackfriday v1.5.3-0.20190124082335-a477dd164691 github.com/russross/blackfriday v1.5.3-0.20190124082335-a477dd164691
github.com/sanity-io/litter v1.1.0 github.com/sanity-io/litter v1.1.0
github.com/spf13/afero v1.2.2 github.com/spf13/afero v1.2.2
github.com/spf13/cast v1.3.0 github.com/spf13/cast v1.3.0
github.com/spf13/cobra v0.0.3 github.com/spf13/cobra v0.0.4-0.20190321000552-67fc4837d267
github.com/spf13/fsync v0.0.0-20170320142552-12a01e648f05 github.com/spf13/fsync v0.9.0
github.com/spf13/jwalterweatherman v1.1.0 github.com/spf13/jwalterweatherman v1.1.0
github.com/spf13/pflag v1.0.3 github.com/spf13/pflag v1.0.3
github.com/spf13/viper v1.4.0 github.com/spf13/viper v1.4.0
@ -67,3 +69,5 @@ require (
) )
replace github.com/markbates/inflect => github.com/markbates/inflect v0.0.0-20171215194931-a12c3aec81a6 replace github.com/markbates/inflect => github.com/markbates/inflect v0.0.0-20171215194931-a12c3aec81a6
go 1.13

92
go.sum
View file

@ -1,3 +1,4 @@
bazil.org/fuse v0.0.0-20180421153158-65cc252bf669/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8=
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.37.4/go.mod h1:NHPJ89PdicEuT9hdPXMROBD91xc5uRDxsMtSB16k7hw= cloud.google.com/go v0.37.4/go.mod h1:NHPJ89PdicEuT9hdPXMROBD91xc5uRDxsMtSB16k7hw=
@ -19,6 +20,7 @@ github.com/Azure/azure-sdk-for-go v27.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9mo
github.com/Azure/azure-service-bus-go v0.4.1/go.mod h1:d9ho9e/06euiTwGpKxmlbpPhFUsfCsq6a4tZ68r51qI= github.com/Azure/azure-service-bus-go v0.4.1/go.mod h1:d9ho9e/06euiTwGpKxmlbpPhFUsfCsq6a4tZ68r51qI=
github.com/Azure/azure-storage-blob-go v0.6.0 h1:SEATKb3LIHcaSIX+E6/K4kJpwfuozFEsmt5rS56N6CE= github.com/Azure/azure-storage-blob-go v0.6.0 h1:SEATKb3LIHcaSIX+E6/K4kJpwfuozFEsmt5rS56N6CE=
github.com/Azure/azure-storage-blob-go v0.6.0/go.mod h1:oGfmITT1V6x//CswqY2gtAHND+xIP64/qL7a5QJix0Y= github.com/Azure/azure-storage-blob-go v0.6.0/go.mod h1:oGfmITT1V6x//CswqY2gtAHND+xIP64/qL7a5QJix0Y=
github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8=
github.com/Azure/go-autorest v11.0.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest v11.0.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest v11.1.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest v11.1.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest v11.1.2+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest v11.1.2+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
@ -35,6 +37,11 @@ github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
github.com/Unknwon/goconfig v0.0.0-20181105214110-56bd8ab18619 h1:6X8iB881g299aNEv6KXrcjL31iLOH7yA6NXoQX+MbDg=
github.com/Unknwon/goconfig v0.0.0-20181105214110-56bd8ab18619/go.mod h1:wngxua9XCNjvHjDiTiV26DaKDT+0c63QR6H5hjVUUxw=
github.com/a8m/tree v0.0.0-20181222104329-6a0b80129de4/go.mod h1:FSdwKX97koS5efgm8WevNf7XS3PqtyFkKDDXrz778cg=
github.com/abbot/go-http-auth v0.4.0 h1:QjmvZ5gSC7jm3Zg54DqWE/T5m1t2AfDu6QlXJT0EVT0=
github.com/abbot/go-http-auth v0.4.0/go.mod h1:Cz6ARTIzApMJDzh5bRMSUou6UMSp0IEXg9km/ci7TJM=
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U= github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI= github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
github.com/alecthomas/chroma v0.6.4 h1:Gn37/7W4a1qkmKLzfUpDy2rt3jt4X8CWycb4Gm7L360= github.com/alecthomas/chroma v0.6.4 h1:Gn37/7W4a1qkmKLzfUpDy2rt3jt4X8CWycb4Gm7L360=
@ -48,10 +55,12 @@ github.com/alecthomas/repr v0.0.0-20181024024818-d37bc2a10ba1 h1:GDQdwm/gAcJcLAK
github.com/alecthomas/repr v0.0.0-20181024024818-d37bc2a10ba1/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ= github.com/alecthomas/repr v0.0.0-20181024024818-d37bc2a10ba1/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/anacrolix/dms v0.0.0-20180117034613-8af4925bffb5/go.mod h1:DGqLjaZ3ziKKNRt+U5Q9PLWJ52Q/4rxfaaH/b3QYKaE=
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
github.com/aws/aws-sdk-go v1.15.27/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= github.com/aws/aws-sdk-go v1.15.27/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=
github.com/aws/aws-sdk-go v1.18.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.18.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.19.11/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.19.16/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.19.16/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.19.40 h1:omRrS4bCM/IbzU6UEb8Ojg1PvlElZzYZkOh8vWWgFMc= github.com/aws/aws-sdk-go v1.19.40 h1:omRrS4bCM/IbzU6UEb8Ojg1PvlElZzYZkOh8vWWgFMc=
github.com/aws/aws-sdk-go v1.19.40/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.19.40/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
@ -63,19 +72,24 @@ github.com/bep/gitmap v1.1.0 h1:vOMIdVB+2gd1VbfJPNJdLqAmn0af6NK98t4fK/GoCdA=
github.com/bep/gitmap v1.1.0/go.mod h1:g9VRETxFUXNWzMiuxOwcudo6DfZkW9jOsOW0Ft4kYaY= github.com/bep/gitmap v1.1.0/go.mod h1:g9VRETxFUXNWzMiuxOwcudo6DfZkW9jOsOW0Ft4kYaY=
github.com/bep/go-tocss v0.6.0 h1:lJf+nIjsQDpifUr+NgHi9QMBnrr9cFvMvEBT+uV9Q9E= github.com/bep/go-tocss v0.6.0 h1:lJf+nIjsQDpifUr+NgHi9QMBnrr9cFvMvEBT+uV9Q9E=
github.com/bep/go-tocss v0.6.0/go.mod h1:d9d3crzlTl+PUZLFzBUjfFCpp68K+ku10mzTlnqU/+A= github.com/bep/go-tocss v0.6.0/go.mod h1:d9d3crzlTl+PUZLFzBUjfFCpp68K+ku10mzTlnqU/+A=
github.com/billziss-gh/cgofuse v1.1.0/go.mod h1:LJjoaUojlVjgo5GQoEJTcJNqZJeRU0nCR84CyxKt2YM=
github.com/census-instrumentation/opencensus-proto v0.2.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.2.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927 h1:SKI1/fuSdodxmNNyVBR8d7X/HuLnRpvvFO0AgyQk764= github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927 h1:SKI1/fuSdodxmNNyVBR8d7X/HuLnRpvvFO0AgyQk764=
github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927/go.mod h1:h/aW8ynjgkuj+NQRlZcDbAbM1ORAbXjXX77sX7T289U= github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927/go.mod h1:h/aW8ynjgkuj+NQRlZcDbAbM1ORAbXjXX77sX7T289U=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI=
github.com/coreos/bbolt v1.3.2 h1:wZwiHHUieZCquLkDL0B8UhzreNWsPHooDAG3q34zk0s=
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/cpuguy83/go-md2man v1.0.8 h1:DwoNytLphI8hzS2Af4D0dfaEaiSq2bN05mEm4R6vf8M= github.com/cpuguy83/go-md2man v1.0.8 h1:DwoNytLphI8hzS2Af4D0dfaEaiSq2bN05mEm4R6vf8M=
github.com/cpuguy83/go-md2man v1.0.8/go.mod h1:N6JayAiVKtlHSnuTCeuLSQVs75hb8q+dYQLjr7cDsKY= github.com/cpuguy83/go-md2man v1.0.8/go.mod h1:N6JayAiVKtlHSnuTCeuLSQVs75hb8q+dYQLjr7cDsKY=
github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ= github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ=
github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk= github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -86,8 +100,10 @@ github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8
github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8= github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8=
github.com/disintegration/imaging v1.6.0 h1:nVPXRUUQ36Z7MNf0O77UzgnOb1mkMMor7lmJMJXc/mA= github.com/disintegration/imaging v1.6.0 h1:nVPXRUUQ36Z7MNf0O77UzgnOb1mkMMor7lmJMJXc/mA=
github.com/disintegration/imaging v1.6.0/go.mod h1:xuIt+sRxDFrHS0drzXUlCJthkJ8k7lkkUojDSR247MQ= github.com/disintegration/imaging v1.6.0/go.mod h1:xuIt+sRxDFrHS0drzXUlCJthkJ8k7lkkUojDSR247MQ=
github.com/djherbis/times v1.2.0/go.mod h1:CGMZlo255K5r4Yw0b9RRfFQpM2y7uOmxg4jm9HsaVf8=
github.com/dlclark/regexp2 v1.1.6 h1:CqB4MjHw0MFCDj+PHHjiESmHX+N7t0tJzKvC6M97BRg= github.com/dlclark/regexp2 v1.1.6 h1:CqB4MjHw0MFCDj+PHHjiESmHX+N7t0tJzKvC6M97BRg=
github.com/dlclark/regexp2 v1.1.6/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dlclark/regexp2 v1.1.6/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/dropbox/dropbox-sdk-go-unofficial v5.4.0+incompatible/go.mod h1:lr+LhMM3F6Y3lW1T9j2U5l7QeuWm87N9+PPXo3yH4qY=
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo= github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
@ -95,6 +111,7 @@ github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1
github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385 h1:clC1lXBpe2kTj2VHdaIu9ajZQe4kcEY9j0NsnDDBZ3o= github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385 h1:clC1lXBpe2kTj2VHdaIu9ajZQe4kcEY9j0NsnDDBZ3o=
github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM= github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM=
github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g=
github.com/fortytw2/leaktest v1.2.0 h1:cj6GCiwJDH7l3tMHLjZDo0QqPtrXJiWSI9JgpeQKw+Q= github.com/fortytw2/leaktest v1.2.0 h1:cj6GCiwJDH7l3tMHLjZDo0QqPtrXJiWSI9JgpeQKw+Q=
github.com/fortytw2/leaktest v1.2.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/fortytw2/leaktest v1.2.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=
@ -102,6 +119,7 @@ github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHqu
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q=
github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
@ -110,9 +128,14 @@ github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
github.com/goftp/file-driver v0.0.0-20180502053751-5d604a0fc0c9/go.mod h1:GpOj6zuVBG3Inr9qjEnuVTgBlk2lZ1S9DcoFiXWyKss=
github.com/goftp/server v0.0.0-20190304020633-eabccc535b5a/go.mod h1:k/SS6VWkxY7dHPhoMQ8IdRu8L4lQtmGbhyXGg+vCnXE=
github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
github.com/gohugoio/testmodBuilder/mods v0.0.0-20190520184928-c56af20f2e95 h1:sgew0XCnZwnzpWxTt3V8LLiCO7OQi3C6dycaE67wfkU=
github.com/gohugoio/testmodBuilder/mods v0.0.0-20190520184928-c56af20f2e95/go.mod h1:bOlVlCa1/RajcHpXkrUXPSHB/Re1UnlXxD1Qp8SKOd8=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
@ -130,6 +153,7 @@ github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY= github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no= github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/martian v2.1.1-0.20190517191504-25dcb96d9e51+incompatible h1:xmapqc1AyLoB+ddYT6r04bD9lIjlOqGaREovi0SzFaE= github.com/google/martian v2.1.1-0.20190517191504-25dcb96d9e51+incompatible h1:xmapqc1AyLoB+ddYT6r04bD9lIjlOqGaREovi0SzFaE=
@ -143,6 +167,8 @@ github.com/googleapis/gax-go v2.0.2+incompatible h1:silFMLAnr330+NRuag/VjIGF7TLp
github.com/googleapis/gax-go v2.0.2+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY= github.com/googleapis/gax-go v2.0.2+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY=
github.com/googleapis/gax-go/v2 v2.0.4 h1:hU4mGcQI4DaAYW+IbTun+2qEZVFxK0ySjQLTbS0VQKc= github.com/googleapis/gax-go/v2 v2.0.4 h1:hU4mGcQI4DaAYW+IbTun+2qEZVFxK0ySjQLTbS0VQKc=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20190411002643-bd77b112433e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q= github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
@ -168,16 +194,22 @@ github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NH
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/jdkato/prose v1.1.0 h1:LpvmDGwbKGTgdCH3a8VJL56sr7p/wOFPw/R4lM4PfFg= github.com/jdkato/prose v1.1.0 h1:LpvmDGwbKGTgdCH3a8VJL56sr7p/wOFPw/R4lM4PfFg=
github.com/jdkato/prose v1.1.0/go.mod h1:jkF0lkxaX5PFSlk9l4Gh9Y+T57TqUZziWT7uZbW5ADg= github.com/jdkato/prose v1.1.0/go.mod h1:jkF0lkxaX5PFSlk9l4Gh9Y+T57TqUZziWT7uZbW5ADg=
github.com/jlaffaye/ftp v0.0.0-20190519203911-8f5b34ce006f/go.mod h1:lli8NYPQOFy3O++YmYbqVgOcQ1JPCwdOy+5zSjKJ9qY=
github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af h1:pmfjZENx5imkbgOkpRUYLnmbU7UEFbjtDA2hxJ1ichM= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af h1:pmfjZENx5imkbgOkpRUYLnmbU7UEFbjtDA2hxJ1ichM=
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8=
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/koofr/go-httpclient v0.0.0-20180104120329-03786175608a/go.mod h1:3xszwh+rNrYk1r9SStc4iJ326gne1OaBcrdB1ACsbzI=
github.com/koofr/go-koofrclient v0.0.0-20190131164641-7f327592caff/go.mod h1:MRAz4Gsxd+OzrZ0owwrUHc0zLESL+1Y5syqK/sJxK2A=
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
@ -187,6 +219,7 @@ github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kyokomi/emoji v1.5.1 h1:qp9dub1mW7C4MlvoRENH6EAENb9skEFOvIEbp1Waj38= github.com/kyokomi/emoji v1.5.1 h1:qp9dub1mW7C4MlvoRENH6EAENb9skEFOvIEbp1Waj38=
github.com/kyokomi/emoji v1.5.1/go.mod h1:mZ6aGCD7yk8j6QY6KICwnZ2pxoszVseX1DNoGtU2tBA= github.com/kyokomi/emoji v1.5.1/go.mod h1:mZ6aGCD7yk8j6QY6KICwnZ2pxoszVseX1DNoGtU2tBA=
github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ=
github.com/magefile/mage v1.4.0 h1:RI7B1CgnPAuu2O9lWszwya61RLmfL0KCdo+QyyI/Bhk= github.com/magefile/mage v1.4.0 h1:RI7B1CgnPAuu2O9lWszwya61RLmfL0KCdo+QyyI/Bhk=
github.com/magefile/mage v1.4.0/go.mod h1:IUDi13rsHje59lecXokTfGX0QIzO45uVPlXnJYsXepA= github.com/magefile/mage v1.4.0/go.mod h1:IUDi13rsHje59lecXokTfGX0QIzO45uVPlXnJYsXepA=
github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY= github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY=
@ -199,13 +232,18 @@ github.com/matryer/try v0.0.0-20161228173917-9ac251b645a2/go.mod h1:0KeJpeMD6o+O
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs= github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs=
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc=
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.8 h1:HLtExJ+uU2HOZ+wI0Tt5DtUDrx8yhUqDcp7fYERX4CE= github.com/mattn/go-isatty v0.0.8 h1:HLtExJ+uU2HOZ+wI0Tt5DtUDrx8yhUqDcp7fYERX4CE=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-runewidth v0.0.3 h1:a+kO+98RDGEfo6asOGMmpodZq4FNtnGP54yps8BzLR4= github.com/mattn/go-runewidth v0.0.3 h1:a+kO+98RDGEfo6asOGMmpodZq4FNtnGP54yps8BzLR4=
github.com/mattn/go-runewidth v0.0.3/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-runewidth v0.0.3/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
github.com/mattn/go-runewidth v0.0.4 h1:2BvfKmzob6Bmd4YsL0zygOqfdFnK7GR4QL06Do4/p7Y=
github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/miekg/mmark v1.3.6 h1:t47x5vThdwgLJzofNsbsAl7gmIiJ7kbDQN5BxwBmwvY= github.com/miekg/mmark v1.3.6 h1:t47x5vThdwgLJzofNsbsAl7gmIiJ7kbDQN5BxwBmwvY=
github.com/miekg/mmark v1.3.6/go.mod h1:w7r9mkTvpS55jlfyn22qJ618itLryxXBhA7Jp3FIlkw= github.com/miekg/mmark v1.3.6/go.mod h1:w7r9mkTvpS55jlfyn22qJ618itLryxXBhA7Jp3FIlkw=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/hashstructure v1.0.0 h1:ZkRJX1CyOoTkar7p/mLS5TZU4nJ1Rn/F8u9dGS02Q3Y= github.com/mitchellh/hashstructure v1.0.0 h1:ZkRJX1CyOoTkar7p/mLS5TZU4nJ1Rn/F8u9dGS02Q3Y=
github.com/mitchellh/hashstructure v1.0.0/go.mod h1:QjSHrPWS+BGUVBYkbTZWEnOh3G1DutKwClXU/ABz6AQ= github.com/mitchellh/hashstructure v1.0.0/go.mod h1:QjSHrPWS+BGUVBYkbTZWEnOh3G1DutKwClXU/ABz6AQ=
@ -214,29 +252,43 @@ github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh
github.com/muesli/smartcrop v0.0.0-20180228075044-f6ebaa786a12 h1:l0X/8IDy2UoK+oXcQFMRSIOcyuYb5iEPytPGplnM41Y= github.com/muesli/smartcrop v0.0.0-20180228075044-f6ebaa786a12 h1:l0X/8IDy2UoK+oXcQFMRSIOcyuYb5iEPytPGplnM41Y=
github.com/muesli/smartcrop v0.0.0-20180228075044-f6ebaa786a12/go.mod h1:i2fCI/UorTfgEpPPLWiFBv4pye+YAG78RwcQLUkocpI= github.com/muesli/smartcrop v0.0.0-20180228075044-f6ebaa786a12/go.mod h1:i2fCI/UorTfgEpPPLWiFBv4pye+YAG78RwcQLUkocpI=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/ncw/go-acd v0.0.0-20171120105400-887eb06ab6a2/go.mod h1:MLIrzg7gp/kzVBxRE1olT7CWYMCklcUWU+ekoxOD9x0=
github.com/ncw/rclone v1.48.0 h1:Rc7A4YEQDeMPgnc1IzA6PsJ4YikyP+zS68rgGMYKJ7o=
github.com/ncw/rclone v1.48.0/go.mod h1:CXDUKN1OQ3Y2ya1Ma6jTZ7m9ZarGzF3ZTHsdPLHWWzY=
github.com/ncw/swift v1.0.47/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM=
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ= github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ=
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8= github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
github.com/nicksnyder/go-i18n v1.10.0 h1:5AzlPKvXBH4qBzmZ09Ua9Gipyruv6uApMcrNZdo96+Q= github.com/nicksnyder/go-i18n v1.10.0 h1:5AzlPKvXBH4qBzmZ09Ua9Gipyruv6uApMcrNZdo96+Q=
github.com/nicksnyder/go-i18n v1.10.0/go.mod h1:HrK7VCrbOvQoUAQ7Vpy7i87N7JZZZ7R2xBGjv0j365Q= github.com/nicksnyder/go-i18n v1.10.0/go.mod h1:HrK7VCrbOvQoUAQ7Vpy7i87N7JZZZ7R2xBGjv0j365Q=
github.com/niklasfasching/go-org v0.1.1 h1:yoU+dQLuc8P1Y0nxY3DV9SoBQTXSy9CogEvhOXSq9/Q= github.com/niklasfasching/go-org v0.1.1 h1:yoU+dQLuc8P1Y0nxY3DV9SoBQTXSy9CogEvhOXSq9/Q=
github.com/niklasfasching/go-org v0.1.1/go.mod h1:AsLD6X7djzRIz4/RFZu8vwRL0VGjUvGZCCH1Nz0VdrU= github.com/niklasfasching/go-org v0.1.1/go.mod h1:AsLD6X7djzRIz4/RFZu8vwRL0VGjUvGZCCH1Nz0VdrU=
github.com/nsf/termbox-go v0.0.0-20190325093121-288510b9734e/go.mod h1:IuKpRQcYE1Tfu+oAQqaLisqDeXgjyyltCfsaoYN18NQ=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/okzk/sdnotify v0.0.0-20180710141335-d9becc38acbd/go.mod h1:4soZNh0zW0LtYGdQ416i0jO0EIqMGcbtaspRS4BDvRQ=
github.com/olekukonko/tablewriter v0.0.0-20180506121414-d4647c9c7a84 h1:fiKJgB4JDUd43CApkmCeTSQlWjtTtABrU2qsgbuP0BI= github.com/olekukonko/tablewriter v0.0.0-20180506121414-d4647c9c7a84 h1:fiKJgB4JDUd43CApkmCeTSQlWjtTtABrU2qsgbuP0BI=
github.com/olekukonko/tablewriter v0.0.0-20180506121414-d4647c9c7a84/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= github.com/olekukonko/tablewriter v0.0.0-20180506121414-d4647c9c7a84/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7 h1:lDH9UUVJtmYCjyT0CI4q8xvlXPxeZ0gYCVvWbmPlp88=
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw= github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc= github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
github.com/pelletier/go-toml v1.4.0 h1:u3Z1r+oOXJIkxqw34zVhyPgjBsm6X2wn21NWs/HfSeg= github.com/pelletier/go-toml v1.4.0 h1:u3Z1r+oOXJIkxqw34zVhyPgjBsm6X2wn21NWs/HfSeg=
github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo= github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo=
github.com/pengsrc/go-shared v0.2.0/go.mod h1:jVblp62SafmidSkvWrXyxAme3gaTfEtWwRPGz5cpvHg=
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pkg/errors v0.8.0 h1:WdK/asTD0HN+q6hsWO3/vpuAkAr+tw6aNJNDFFf0+qw= github.com/pkg/errors v0.8.0 h1:WdK/asTD0HN+q6hsWO3/vpuAkAr+tw6aNJNDFFf0+qw=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/sftp v1.10.1-0.20190523025818-e98a7bef6829/go.mod h1:NxmoDg/QLVWluQDUYG7XBZTLUpKeFa8e3aMf1BfjyHk=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
@ -253,14 +305,27 @@ github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/rfjakob/eme v0.0.0-20171028163933-2222dbd4ba46 h1:w2CpS5muK+jyydnmlkqpAhzKmHmMBzBkfYUDjQNS1Dk=
github.com/rfjakob/eme v0.0.0-20171028163933-2222dbd4ba46/go.mod h1:U2bmx0hDj8EyDdcxmD5t3XHDnBFnyNNc22n1R4008eM=
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
github.com/rogpeppe/go-internal v1.3.0 h1:RR9dF3JtopPvtkroDZuVD7qquD0bnHlKSqaQhgwt8yk=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
github.com/russross/blackfriday v1.5.3-0.20190124082335-a477dd164691 h1:auJkuUc4uOuZNoH9jGLvqVaDLiuCOh/LY+Qw5NBFo4I= github.com/russross/blackfriday v1.5.3-0.20190124082335-a477dd164691 h1:auJkuUc4uOuZNoH9jGLvqVaDLiuCOh/LY+Qw5NBFo4I=
github.com/russross/blackfriday v1.5.3-0.20190124082335-a477dd164691/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday v1.5.3-0.20190124082335-a477dd164691/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
github.com/sanity-io/litter v1.1.0 h1:BllcKWa3VbZmOZbDCoszYLk7zCsKHz5Beossi8SUcTc= github.com/sanity-io/litter v1.1.0 h1:BllcKWa3VbZmOZbDCoszYLk7zCsKHz5Beossi8SUcTc=
github.com/sanity-io/litter v1.1.0/go.mod h1:CJ0VCw2q4qKU7LaQr3n7UOSHzgEMgcGco7N/SkZQPjw= github.com/sanity-io/litter v1.1.0/go.mod h1:CJ0VCw2q4qKU7LaQr3n7UOSHzgEMgcGco7N/SkZQPjw=
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sevlyar/go-daemon v0.1.4/go.mod h1:6dJpPatBT9eUwM5VCw9Bt6CdX9Tk6UWvhW3MebLDRKE=
github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/skratchdot/open-golang v0.0.0-20190402232053-79abb63cd66e/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/assertions v0.0.0-20190401211740-f487f9de1cd3/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI= github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI=
@ -271,14 +336,19 @@ github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8=
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cobra v0.0.3 h1:ZlrZ4XsMRm04Fr5pSFxBgfND2EBVa1nLpiy1stUsX/8= github.com/spf13/cobra v0.0.3 h1:ZlrZ4XsMRm04Fr5pSFxBgfND2EBVa1nLpiy1stUsX/8=
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
github.com/spf13/cobra v0.0.4-0.20190321000552-67fc4837d267 h1:I9j1PLS64+NgCtkgbomGInboj1NFH1KF1tkVKlt3yF4=
github.com/spf13/cobra v0.0.4-0.20190321000552-67fc4837d267/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
github.com/spf13/fsync v0.0.0-20170320142552-12a01e648f05 h1:pQHm7pxjSgC54M1rtLSLmju25phy6RgYf3p4O6XanYE= github.com/spf13/fsync v0.0.0-20170320142552-12a01e648f05 h1:pQHm7pxjSgC54M1rtLSLmju25phy6RgYf3p4O6XanYE=
github.com/spf13/fsync v0.0.0-20170320142552-12a01e648f05/go.mod h1:jdsEoy1w+v0NpuwXZEaRAH6ADTDmzfRnE2eVwshwFrM= github.com/spf13/fsync v0.0.0-20170320142552-12a01e648f05/go.mod h1:jdsEoy1w+v0NpuwXZEaRAH6ADTDmzfRnE2eVwshwFrM=
github.com/spf13/fsync v0.9.0 h1:f9CEt3DOB2mnHxZaftmEOFWjABEvKM/xpf3cUwJrGOY=
github.com/spf13/fsync v0.9.0/go.mod h1:fNtJEfG3HiltN3y4cPOz6MLjos9+2pIEqLIgszqhp/0=
github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk=
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
github.com/spf13/viper v1.4.0 h1:yXHLWeravcrgGyFSyCgdYpXQ9dR9c/WED3pg1RhxqEU= github.com/spf13/viper v1.4.0 h1:yXHLWeravcrgGyFSyCgdYpXQ9dR9c/WED3pg1RhxqEU=
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4= github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
@ -288,6 +358,7 @@ github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/t3rm1n4l/go-mega v0.0.0-20190430100803-72151b53bb44/go.mod h1:XWL4vDyd3JKmJx+hZWUVgCNmmhZ2dTBcaNDcxH465s0=
github.com/tdewolff/minify/v2 v2.3.7 h1:nhk7MKYRdTDwTxqEQZKLDkLe04tDHht8mBI+VJrsYvk= github.com/tdewolff/minify/v2 v2.3.7 h1:nhk7MKYRdTDwTxqEQZKLDkLe04tDHht8mBI+VJrsYvk=
github.com/tdewolff/minify/v2 v2.3.7/go.mod h1:DD1stRlSx6JsHfl1+E/HVMQeXiec9rD1UQ0epklIZLc= github.com/tdewolff/minify/v2 v2.3.7/go.mod h1:DD1stRlSx6JsHfl1+E/HVMQeXiec9rD1UQ0epklIZLc=
github.com/tdewolff/parse/v2 v2.3.5 h1:/uS8JfhwVJsNkEh769GM5ENv6L9LOh2Z9uW3tCdlhs0= github.com/tdewolff/parse/v2 v2.3.5 h1:/uS8JfhwVJsNkEh769GM5ENv6L9LOh2Z9uW3tCdlhs0=
@ -300,14 +371,17 @@ github.com/uber-go/atomic v1.3.2/go.mod h1:/Ct5t2lcmbJ4OSe/waGBoaVvVqtO0bmtfVNex
github.com/uber/jaeger-client-go v2.15.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk= github.com/uber/jaeger-client-go v2.15.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk=
github.com/uber/jaeger-lib v1.5.0/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U= github.com/uber/jaeger-lib v1.5.0/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U=
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
github.com/wellington/go-libsass v0.9.3-0.20181113175235-c63644206701 h1:9vG9vvVNVupO4Y7uwFkRgIMNe9rdaJMCINDe8vhAhLo= github.com/wellington/go-libsass v0.9.3-0.20181113175235-c63644206701 h1:9vG9vvVNVupO4Y7uwFkRgIMNe9rdaJMCINDe8vhAhLo=
github.com/wellington/go-libsass v0.9.3-0.20181113175235-c63644206701/go.mod h1:mxgxgam0N0E+NAUMHLcu20Ccfc3mVpDkyrLDayqfiTs= github.com/wellington/go-libsass v0.9.3-0.20181113175235-c63644206701/go.mod h1:mxgxgam0N0E+NAUMHLcu20Ccfc3mVpDkyrLDayqfiTs=
github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4=
github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I= github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I=
github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y= github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y=
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
github.com/yosssi/ace v0.0.5 h1:tUkIP/BLdKqrlrPwcmH0shwEEhTRHoGnc1wFIWmaBUA= github.com/yosssi/ace v0.0.5 h1:tUkIP/BLdKqrlrPwcmH0shwEEhTRHoGnc1wFIWmaBUA=
github.com/yosssi/ace v0.0.5/go.mod h1:ALfIzm2vT7t5ZE7uoIZqF3TQ7SAOyupFZnkrF5id+K0= github.com/yosssi/ace v0.0.5/go.mod h1:ALfIzm2vT7t5ZE7uoIZqF3TQ7SAOyupFZnkrF5id+K0=
github.com/yunify/qingstor-sdk-go v2.2.15+incompatible/go.mod h1:w6wqLDQ5bBTzxGJ55581UrSwLrsTAsdo9N6yX/8d9RY=
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.mongodb.org/mongo-driver v1.0.1/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM= go.mongodb.org/mongo-driver v1.0.1/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM=
go.opencensus.io v0.15.0/go.mod h1:UffZAU+4sDEINUGP/B7UfBBkq4fqLu9zXAX7ke6CHW0= go.opencensus.io v0.15.0/go.mod h1:UffZAU+4sDEINUGP/B7UfBBkq4fqLu9zXAX7ke6CHW0=
@ -325,8 +399,13 @@ gocloud.dev v0.15.0 h1:Tl8dkOHWVZiYBYPxG2ouhpfmluoQGt3mY323DaAHaC8=
gocloud.dev v0.15.0/go.mod h1:ShXCyJaGrJu9y/7a6+DSCyBb9MFGZ1P5wwPa0Wu6w34= gocloud.dev v0.15.0/go.mod h1:ShXCyJaGrJu9y/7a6+DSCyBb9MFGZ1P5wwPa0Wu6w34=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181001203147-e3636079e1a4/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181001203147-e3636079e1a4/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190131182504-b8fe1690c613/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190422183909-d864b10871cd/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190422183909-d864b10871cd/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5 h1:58fnuSXlxZmFdJyvtTFVmVhcMLU6v5fEb/ok4wyqtNU=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81 h1:00VmoueYNlNz/aHIilyyQz/MHSqGoWJzpFv/HW8xpzI= golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81 h1:00VmoueYNlNz/aHIilyyQz/MHSqGoWJzpFv/HW8xpzI=
golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs=
@ -355,6 +434,8 @@ golang.org/x/net v0.0.0-20190424112056-4829fb13d2c6/go.mod h1:t9HGtf8HONx5eT2rtn
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190522155817-f3200d17e092 h1:4QSRKanuywn15aTZvI/mIDEgPQpswuFndXpOj3rKEco= golang.org/x/net v0.0.0-20190522155817-f3200d17e092 h1:4QSRKanuywn15aTZvI/mIDEgPQpswuFndXpOj3rKEco=
golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190606173856-1492cefac77f h1:IWHgpgFqnL5AhBUBZSgBdjl2vkQUEzcY+JNKWfcgAU0=
golang.org/x/net v0.0.0-20190606173856-1492cefac77f/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421 h1:Wo7BWFiOk0QRFMLYMqJGFMd9CgUAcGx7V+qEg/h5IBI= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421 h1:Wo7BWFiOk0QRFMLYMqJGFMd9CgUAcGx7V+qEg/h5IBI=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -379,11 +460,16 @@ golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5h
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190221075227-b4e8571b14e0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190530182044-ad28b68e88f1 h1:R4dVlxdmKenVdMRS/tTspEpSTRWINYrHD8ySIU9yCIU=
golang.org/x/sys v0.0.0-20190530182044-ad28b68e88f1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606203320-7fc4e5ec1444/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190712062909-fae7ac547cb7 h1:LepdCS8Gf/MVejFIt8lsiexZATdoGVyp5bcyS+rYoUI= golang.org/x/sys v0.0.0-20190712062909-fae7ac547cb7 h1:LepdCS8Gf/MVejFIt8lsiexZATdoGVyp5bcyS+rYoUI=
golang.org/x/sys v0.0.0-20190712062909-fae7ac547cb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190712062909-fae7ac547cb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
@ -394,6 +480,7 @@ golang.org/x/text v0.3.1-0.20181227161524-e6919f6577db/go.mod h1:bEr9sfX3Q8Zfm5f
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4 h1:SvFZT6jyqRaOeXpc5h/JSfZenJ2O330aBsf7JfSUXmQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@ -402,7 +489,9 @@ golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGm
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190422233926-fe54fb35175b/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190422233926-fe54fb35175b/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190606174628-0139d5756a7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7 h1:9zdDQZ7Thm29KFXgAX/+yaf3eVbP7djjWp/dXAppNCc= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7 h1:9zdDQZ7Thm29KFXgAX/+yaf3eVbP7djjWp/dXAppNCc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@ -413,6 +502,7 @@ google.golang.org/api v0.5.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEt
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508= google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.0 h1:Tfd7cKwKbFRsI8RMAD3oqqw7JPFRrvFlOsfbgVkjOOw= google.golang.org/appengine v1.6.0 h1:Tfd7cKwKbFRsI8RMAD3oqqw7JPFRrvFlOsfbgVkjOOw=
google.golang.org/appengine v1.6.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
@ -427,6 +517,7 @@ google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3
google.golang.org/grpc v1.19.0 h1:cfg4PD8YEdSFnm7qLV4++93WcmhH2nIUhMjhdCvl3j8= google.golang.org/grpc v1.19.0 h1:cfg4PD8YEdSFnm7qLV4++93WcmhH2nIUhMjhdCvl3j8=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.19.1/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.19.1/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.0 h1:G+97AoqBnmZIT91cLG/EkCoK9NSelj64P8bOHHNmGn0= google.golang.org/grpc v1.21.0 h1:G+97AoqBnmZIT91cLG/EkCoK9NSelj64P8bOHHNmGn0=
google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
@ -435,6 +526,7 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8
gopkg.in/check.v1 v1.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=

View file

@ -511,12 +511,6 @@ func TotalWords(s string) int {
return n return n
} }
// Old implementation only kept for benchmark comparison.
// TODO(bep) remove
func totalWordsOld(s string) int {
return len(strings.Fields(s))
}
// TruncateWordsByRune truncates words by runes. // TruncateWordsByRune truncates words by runes.
func (c *ContentSpec) TruncateWordsByRune(in []string) (string, bool) { func (c *ContentSpec) TruncateWordsByRune(in []string) (string, bool) {
words := make([]string, len(in)) words := make([]string, len(in))

View file

@ -506,13 +506,3 @@ func BenchmarkTotalWords(b *testing.B) {
} }
} }
} }
func BenchmarkTotalWordsOld(b *testing.B) {
b.ResetTimer()
for i := 0; i < b.N; i++ {
wordCount := totalWordsOld(totalWordsBenchmarkString)
if wordCount != 400 {
b.Fatal("Wordcount error")
}
}
}

View file

@ -36,8 +36,7 @@ func init() {
} }
} }
sort.Strings(aliases) aliases = UniqueStringsSorted(aliases)
aliases = UniqueStrings(aliases)
lexerEntry := struct { lexerEntry := struct {
Name string Name string

View file

@ -22,15 +22,16 @@ import (
"net" "net"
"os" "os"
"path/filepath" "path/filepath"
"sort"
"strings" "strings"
"sync" "sync"
"unicode" "unicode"
"unicode/utf8" "unicode/utf8"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/common/hugo"
"github.com/spf13/afero" "github.com/spf13/afero"
"github.com/jdkato/prose/transform" "github.com/jdkato/prose/transform"
@ -106,7 +107,7 @@ func FirstUpper(s string) string {
// UniqueStrings returns a new slice with any duplicates removed. // UniqueStrings returns a new slice with any duplicates removed.
func UniqueStrings(s []string) []string { func UniqueStrings(s []string) []string {
var unique []string unique := make([]string, 0, len(s))
set := map[string]interface{}{} set := map[string]interface{}{}
for _, val := range s { for _, val := range s {
if _, ok := set[val]; !ok { if _, ok := set[val]; !ok {
@ -117,6 +118,40 @@ func UniqueStrings(s []string) []string {
return unique return unique
} }
// UniqueStringsReuse returns a slice with any duplicates removed.
// It will modify the input slice.
func UniqueStringsReuse(s []string) []string {
set := map[string]interface{}{}
result := s[:0]
for _, val := range s {
if _, ok := set[val]; !ok {
result = append(result, val)
set[val] = val
}
}
return result
}
// UniqueStringsReuse returns a sorted slice with any duplicates removed.
// It will modify the input slice.
func UniqueStringsSorted(s []string) []string {
if len(s) == 0 {
return nil
}
ss := sort.StringSlice(s)
ss.Sort()
i := 0
for j := 1; j < len(s); j++ {
if !ss.Less(i, j) {
continue
}
i++
s[i] = s[j]
}
return s[:i+1]
}
// ReaderToBytes takes an io.Reader argument, reads from it // ReaderToBytes takes an io.Reader argument, reads from it
// and returns bytes. // and returns bytes.
func ReaderToBytes(lines io.Reader) []byte { func ReaderToBytes(lines io.Reader) []byte {
@ -459,17 +494,15 @@ func PrintFs(fs afero.Fs, path string, w io.Writer) {
if fs == nil { if fs == nil {
return return
} }
afero.Walk(fs, path, func(path string, info os.FileInfo, err error) error { afero.Walk(fs, path, func(path string, info os.FileInfo, err error) error {
if info != nil && !info.IsDir() { var filename string
s := path var meta interface{}
if lang, ok := info.(hugofs.LanguageAnnouncer); ok { if fim, ok := info.(hugofs.FileMetaInfo); ok {
s = s + "\tLANG: " + lang.Lang() filename = fim.Meta().Filename()
} meta = fim.Meta()
if fp, ok := info.(hugofs.FilePather); ok {
s = s + "\tRF: " + fp.Filename() + "\tBP: " + fp.BaseDir()
}
fmt.Fprintln(w, " ", s)
} }
fmt.Fprintf(w, " %q %q\t\t%v\n", path, filename, meta)
return nil return nil
}) })
} }

View file

@ -234,6 +234,24 @@ func TestUniqueStrings(t *testing.T) {
} }
} }
func TestUniqueStringsReuse(t *testing.T) {
in := []string{"a", "b", "a", "b", "c", "", "a", "", "d"}
output := UniqueStringsReuse(in)
expected := []string{"a", "b", "c", "", "d"}
if !reflect.DeepEqual(output, expected) {
t.Errorf("Expected %#v, got %#v\n", expected, output)
}
}
func TestUniqueStringsSorted(t *testing.T) {
assert := require.New(t)
in := []string{"a", "a", "b", "c", "b", "", "a", "", "d"}
output := UniqueStringsSorted(in)
expected := []string{"", "a", "b", "c", "d"}
assert.Equal(expected, output)
assert.Nil(UniqueStringsSorted(nil))
}
func TestFindAvailablePort(t *testing.T) { func TestFindAvailablePort(t *testing.T) {
addr, err := FindAvailablePort() addr, err := FindAvailablePort()
assert.Nil(t, err) assert.Nil(t, err)
@ -328,3 +346,55 @@ func BenchmarkMD5FromFileFast(b *testing.B) {
} }
} }
func BenchmarkUniqueStrings(b *testing.B) {
input := []string{"a", "b", "d", "e", "d", "h", "a", "i"}
b.Run("Safe", func(b *testing.B) {
for i := 0; i < b.N; i++ {
result := UniqueStrings(input)
if len(result) != 6 {
b.Fatal(fmt.Sprintf("invalid count: %d", len(result)))
}
}
})
b.Run("Reuse slice", func(b *testing.B) {
b.StopTimer()
inputs := make([][]string, b.N)
for i := 0; i < b.N; i++ {
inputc := make([]string, len(input))
copy(inputc, input)
inputs[i] = inputc
}
b.StartTimer()
for i := 0; i < b.N; i++ {
inputc := inputs[i]
result := UniqueStringsReuse(inputc)
if len(result) != 6 {
b.Fatal(fmt.Sprintf("invalid count: %d", len(result)))
}
}
})
b.Run("Reuse slice sorted", func(b *testing.B) {
b.StopTimer()
inputs := make([][]string, b.N)
for i := 0; i < b.N; i++ {
inputc := make([]string, len(input))
copy(inputc, input)
inputs[i] = inputc
}
b.StartTimer()
for i := 0; i < b.N; i++ {
inputc := inputs[i]
result := UniqueStringsSorted(inputc)
if len(result) != 6 {
b.Fatal(fmt.Sprintf("invalid count: %d", len(result)))
}
}
})
}

View file

@ -26,6 +26,8 @@ import (
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/common/hugio" "github.com/gohugoio/hugo/common/hugio"
_errors "github.com/pkg/errors" _errors "github.com/pkg/errors"
"github.com/spf13/afero" "github.com/spf13/afero"
@ -172,32 +174,6 @@ func ReplaceExtension(path string, newExt string) string {
return f + "." + newExt return f + "." + newExt
} }
// GetFirstThemeDir gets the root directory of the first theme, if there is one.
// If there is no theme, returns the empty string.
func (p *PathSpec) GetFirstThemeDir() string {
if p.ThemeSet() {
return p.AbsPathify(filepath.Join(p.ThemesDir, p.Themes()[0]))
}
return ""
}
// GetThemesDir gets the absolute root theme dir path.
func (p *PathSpec) GetThemesDir() string {
if p.ThemeSet() {
return p.AbsPathify(p.ThemesDir)
}
return ""
}
// GetRelativeThemeDir gets the relative root directory of the current theme, if there is one.
// If there is no theme, returns the empty string.
func (p *PathSpec) GetRelativeThemeDir() string {
if p.ThemeSet() {
return strings.TrimPrefix(filepath.Join(p.ThemesDir, p.Themes()[0]), FilePathSeparator)
}
return ""
}
func makePathRelative(inPath string, possibleDirectories ...string) (string, error) { func makePathRelative(inPath string, possibleDirectories ...string) (string, error) {
for _, currentPath := range possibleDirectories { for _, currentPath := range possibleDirectories {
@ -379,6 +355,107 @@ func prettifyPath(in string, b filepathPathBridge) string {
return b.Join(b.Dir(in), name, "index"+ext) return b.Join(b.Dir(in), name, "index"+ext)
} }
type NamedSlice struct {
Name string
Slice []string
}
func (n NamedSlice) String() string {
if len(n.Slice) == 0 {
return n.Name
}
return fmt.Sprintf("%s%s{%s}", n.Name, FilePathSeparator, strings.Join(n.Slice, ","))
}
func ExtractAndGroupRootPaths(paths []string) []NamedSlice {
if len(paths) == 0 {
return nil
}
pathsCopy := make([]string, len(paths))
hadSlashPrefix := strings.HasPrefix(paths[0], FilePathSeparator)
for i, p := range paths {
pathsCopy[i] = strings.Trim(filepath.ToSlash(p), "/")
}
sort.Strings(pathsCopy)
pathsParts := make([][]string, len(pathsCopy))
for i, p := range pathsCopy {
pathsParts[i] = strings.Split(p, "/")
}
var groups [][]string
for i, p1 := range pathsParts {
c1 := -1
for j, p2 := range pathsParts {
if i == j {
continue
}
c2 := -1
for i, v := range p1 {
if i >= len(p2) {
break
}
if v != p2[i] {
break
}
c2 = i
}
if c1 == -1 || (c2 != -1 && c2 < c1) {
c1 = c2
}
}
if c1 != -1 {
groups = append(groups, p1[:c1+1])
} else {
groups = append(groups, p1)
}
}
groupsStr := make([]string, len(groups))
for i, g := range groups {
groupsStr[i] = strings.Join(g, "/")
}
groupsStr = UniqueStringsSorted(groupsStr)
var result []NamedSlice
for _, g := range groupsStr {
name := filepath.FromSlash(g)
if hadSlashPrefix {
name = FilePathSeparator + name
}
ns := NamedSlice{Name: name}
for _, p := range pathsCopy {
if !strings.HasPrefix(p, g) {
continue
}
p = strings.TrimPrefix(p, g)
if p != "" {
ns.Slice = append(ns.Slice, p)
}
}
ns.Slice = UniqueStrings(ExtractRootPaths(ns.Slice))
result = append(result, ns)
}
return result
}
// ExtractRootPaths extracts the root paths from the supplied list of paths. // ExtractRootPaths extracts the root paths from the supplied list of paths.
// The resulting root path will not contain any file separators, but there // The resulting root path will not contain any file separators, but there
// may be duplicates. // may be duplicates.
@ -425,98 +502,21 @@ func FindCWD() (string, error) {
return path, nil return path, nil
} }
// SymbolicWalk is like filepath.Walk, but it supports the root being a // SymbolicWalk is like filepath.Walk, but it follows symbolic links.
// symbolic link. It will still not follow symbolic links deeper down in func SymbolicWalk(fs afero.Fs, root string, walker hugofs.WalkFunc) error {
// the file structure. if _, isOs := fs.(*afero.OsFs); isOs {
func SymbolicWalk(fs afero.Fs, root string, walker filepath.WalkFunc) error { // Mainly to track symlinks.
fs = hugofs.NewBaseFileDecorator(fs)
// Sanity check
if root != "" && len(root) < 4 {
return errors.New("path is too short")
} }
// Handle the root first w := hugofs.NewWalkway(hugofs.WalkwayConfig{
fileInfo, realPath, err := getRealFileInfo(fs, root) Fs: fs,
Root: root,
WalkFn: walker,
})
if err != nil { return w.Walk()
return walker(root, nil, err)
}
if !fileInfo.IsDir() {
return fmt.Errorf("cannot walk regular file %s", root)
}
if err := walker(realPath, fileInfo, err); err != nil && err != filepath.SkipDir {
return err
}
// Some of Hugo's filesystems represents an ordered root folder, i.e. project first, then theme folders.
// Make sure that order is preserved. afero.Walk will sort the directories down in the file tree,
// but we don't care about that.
rootContent, err := readDir(fs, root, false)
if err != nil {
return walker(root, nil, err)
}
for _, fi := range rootContent {
if err := afero.Walk(fs, filepath.Join(root, fi.Name()), walker); err != nil {
return err
}
}
return nil
}
func readDir(fs afero.Fs, dirname string, doSort bool) ([]os.FileInfo, error) {
f, err := fs.Open(dirname)
if err != nil {
return nil, err
}
list, err := f.Readdir(-1)
f.Close()
if err != nil {
return nil, err
}
if doSort {
sort.Slice(list, func(i, j int) bool { return list[i].Name() < list[j].Name() })
}
return list, nil
}
func getRealFileInfo(fs afero.Fs, path string) (os.FileInfo, string, error) {
fileInfo, err := LstatIfPossible(fs, path)
realPath := path
if err != nil {
return nil, "", err
}
if fileInfo.Mode()&os.ModeSymlink == os.ModeSymlink {
link, err := filepath.EvalSymlinks(path)
if err != nil {
return nil, "", _errors.Wrapf(err, "Cannot read symbolic link %q", path)
}
fileInfo, err = LstatIfPossible(fs, link)
if err != nil {
return nil, "", _errors.Wrapf(err, "Cannot stat %q", link)
}
realPath = link
}
return fileInfo, realPath, nil
}
// GetRealPath returns the real file path for the given path, whether it is a
// symlink or not.
func GetRealPath(fs afero.Fs, path string) (string, error) {
_, realPath, err := getRealFileInfo(fs, path)
if err != nil {
return "", err
}
return realPath, nil
} }
// LstatIfPossible can be used to call Lstat if possible, else Stat. // LstatIfPossible can be used to call Lstat if possible, else Stat.

View file

@ -29,8 +29,6 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/assert"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
"github.com/spf13/afero" "github.com/spf13/afero"
"github.com/spf13/viper" "github.com/spf13/viper"
@ -73,18 +71,9 @@ func TestMakePath(t *testing.T) {
} }
func TestMakePathSanitized(t *testing.T) { func TestMakePathSanitized(t *testing.T) {
v := viper.New() v := newTestCfg()
v.Set("contentDir", "content")
v.Set("dataDir", "data")
v.Set("i18nDir", "i18n")
v.Set("layoutDir", "layouts")
v.Set("assetDir", "assets")
v.Set("resourceDir", "resources")
v.Set("publishDir", "public")
v.Set("archetypeDir", "archetypes")
l := langs.NewDefaultLanguage(v) p, _ := NewPathSpec(hugofs.NewMem(v), v)
p, _ := NewPathSpec(hugofs.NewMem(v), l)
tests := []struct { tests := []struct {
input string input string
@ -166,33 +155,6 @@ func TestGetRelativePath(t *testing.T) {
} }
} }
func TestGetRealPath(t *testing.T) {
if runtime.GOOS == "windows" && os.Getenv("CI") == "" {
t.Skip("Skip TestGetRealPath as os.Symlink needs administrator rights on Windows")
}
d1, _ := ioutil.TempDir("", "d1")
defer os.Remove(d1)
fs := afero.NewOsFs()
rp1, err := GetRealPath(fs, d1)
require.NoError(t, err)
assert.Equal(t, d1, rp1)
sym := filepath.Join(os.TempDir(), "d1sym")
err = os.Symlink(d1, sym)
require.NoError(t, err)
defer os.Remove(sym)
rp2, err := GetRealPath(fs, sym)
require.NoError(t, err)
// On OS X, the temp folder is itself a symbolic link (to /private...)
// This has to do for now.
assert.True(t, strings.HasSuffix(rp2, d1))
}
func TestMakePathRelative(t *testing.T) { func TestMakePathRelative(t *testing.T) {
type test struct { type test struct {
inPath, path1, path2, output string inPath, path1, path2, output string
@ -659,6 +621,29 @@ func TestPrettifyPath(t *testing.T) {
} }
func TestExtractAndGroupRootPaths(t *testing.T) {
in := []string{
filepath.FromSlash("/a/b/c/d"),
filepath.FromSlash("/a/b/c/e"),
filepath.FromSlash("/a/b/e/f"),
filepath.FromSlash("/a/b"),
filepath.FromSlash("/a/b/c/b/g"),
filepath.FromSlash("/c/d/e"),
}
inCopy := make([]string, len(in))
copy(inCopy, in)
result := ExtractAndGroupRootPaths(in)
assert := require.New(t)
assert.Equal(filepath.FromSlash("[/a/b/{c,e} /c/d/e]"), fmt.Sprint(result))
// Make sure the original is preserved
assert.Equal(inCopy, in)
}
func TestExtractRootPaths(t *testing.T) { func TestExtractRootPaths(t *testing.T) {
tests := []struct { tests := []struct {
input []string input []string

View file

@ -14,6 +14,7 @@
package helpers package helpers
import ( import (
"path/filepath"
"testing" "testing"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
@ -36,8 +37,12 @@ func TestNewPathSpecFromConfig(t *testing.T) {
v.Set("workingDir", "thework") v.Set("workingDir", "thework")
v.Set("staticDir", "thestatic") v.Set("staticDir", "thestatic")
v.Set("theme", "thetheme") v.Set("theme", "thetheme")
langs.LoadLanguageSettings(v, nil)
p, err := NewPathSpec(hugofs.NewMem(v), l) fs := hugofs.NewMem(v)
fs.Source.MkdirAll(filepath.FromSlash("thework/thethemes/thetheme"), 0777)
p, err := NewPathSpec(fs, l)
require.NoError(t, err) require.NoError(t, err)
require.True(t, p.CanonifyURLs) require.True(t, p.CanonifyURLs)
@ -50,5 +55,5 @@ func TestNewPathSpecFromConfig(t *testing.T) {
require.Equal(t, "http://base.com", p.BaseURL.String()) require.Equal(t, "http://base.com", p.BaseURL.String())
require.Equal(t, "thethemes", p.ThemesDir) require.Equal(t, "thethemes", p.ThemesDir)
require.Equal(t, "thework", p.WorkingDir) require.Equal(t, "thework", p.WorkingDir)
require.Equal(t, []string{"thetheme"}, p.Themes())
} }

View file

@ -5,6 +5,7 @@ import (
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/modules"
) )
func newTestPathSpec(fs *hugofs.Fs, v *viper.Viper) *PathSpec { func newTestPathSpec(fs *hugofs.Fs, v *viper.Viper) *PathSpec {
@ -42,6 +43,14 @@ func newTestCfg() *viper.Viper {
v.Set("resourceDir", "resources") v.Set("resourceDir", "resources")
v.Set("publishDir", "public") v.Set("publishDir", "public")
v.Set("archetypeDir", "archetypes") v.Set("archetypeDir", "archetypes")
langs.LoadLanguageSettings(v, nil)
langs.LoadLanguageSettings(v, nil)
mod, err := modules.CreateProjectModule(v)
if err != nil {
panic(err)
}
v.Set("allModules", modules.Modules{mod})
return v return v
} }

39
htesting/test_helpers.go Normal file
View file

@ -0,0 +1,39 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package htesting
import (
"runtime"
"strings"
"github.com/spf13/afero"
)
// CreateTempDir creates a temp dir in the given filesystem and
// returns the dirnam and a func that removes it when done.
func CreateTempDir(fs afero.Fs, prefix string) (string, func(), error) {
tempDir, err := afero.TempDir(fs, "", prefix)
if err != nil {
return "", nil, err
}
_, isOsFs := fs.(*afero.OsFs)
if isOsFs && runtime.GOOS == "darwin" && !strings.HasPrefix(tempDir, "/private") {
// To get the entry folder in line with the rest. This its a little bit
// mysterious, but so be it.
tempDir = "/private" + tempDir
}
return tempDir, func() { fs.RemoveAll(tempDir) }, nil
}

View file

@ -1,100 +0,0 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package htesting
import (
"html/template"
"time"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/navigation"
"github.com/gohugoio/hugo/resources/page"
"github.com/spf13/viper"
)
type testSite struct {
h hugo.Info
l *langs.Language
}
func (t testSite) Hugo() hugo.Info {
return t.h
}
func (t testSite) ServerPort() int {
return 1313
}
func (testSite) LastChange() (t time.Time) {
return
}
func (t testSite) Title() string {
return "foo"
}
func (t testSite) Sites() page.Sites {
return nil
}
func (t testSite) IsServer() bool {
return false
}
func (t testSite) Language() *langs.Language {
return t.l
}
func (t testSite) Pages() page.Pages {
return nil
}
func (t testSite) RegularPages() page.Pages {
return nil
}
func (t testSite) Menus() navigation.Menus {
return nil
}
func (t testSite) Taxonomies() interface{} {
return nil
}
func (t testSite) BaseURL() template.URL {
return ""
}
func (t testSite) Params() map[string]interface{} {
return nil
}
func (t testSite) Data() map[string]interface{} {
return nil
}
// NewTestHugoSite creates a new minimal test site.
func NewTestHugoSite() page.Site {
return testSite{
h: hugo.NewInfo(hugo.EnvironmentProduction),
l: langs.NewLanguage("en", newTestConfig()),
}
}
func newTestConfig() *viper.Viper {
v := viper.New()
v.Set("contentDir", "content")
return v
}

View file

@ -1,91 +0,0 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"os"
"github.com/spf13/afero"
)
// RealFilenameInfo is a thin wrapper around os.FileInfo adding the real filename.
type RealFilenameInfo interface {
os.FileInfo
// This is the real filename to the file in the underlying filesystem.
RealFilename() string
}
type realFilenameInfo struct {
os.FileInfo
realFilename string
}
func (f *realFilenameInfo) RealFilename() string {
return f.realFilename
}
// NewBasePathRealFilenameFs returns a new BasePathRealFilenameFs instance
// using base.
func NewBasePathRealFilenameFs(base *afero.BasePathFs) *BasePathRealFilenameFs {
return &BasePathRealFilenameFs{BasePathFs: base}
}
// BasePathRealFilenameFs is a thin wrapper around afero.BasePathFs that
// provides the real filename in Stat and LstatIfPossible.
type BasePathRealFilenameFs struct {
*afero.BasePathFs
}
// Stat returns the os.FileInfo structure describing a given file. If there is
// an error, it will be of type *os.PathError.
func (b *BasePathRealFilenameFs) Stat(name string) (os.FileInfo, error) {
fi, err := b.BasePathFs.Stat(name)
if err != nil {
return nil, err
}
if _, ok := fi.(RealFilenameInfo); ok {
return fi, nil
}
filename, err := b.RealPath(name)
if err != nil {
return nil, &os.PathError{Op: "stat", Path: name, Err: err}
}
return &realFilenameInfo{FileInfo: fi, realFilename: filename}, nil
}
// LstatIfPossible returns the os.FileInfo structure describing a given file.
// It attempts to use Lstat if supported or defers to the os. In addition to
// the FileInfo, a boolean is returned telling whether Lstat was called.
func (b *BasePathRealFilenameFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
fi, ok, err := b.BasePathFs.LstatIfPossible(name)
if err != nil {
return nil, false, err
}
if _, ok := fi.(RealFilenameInfo); ok {
return fi, ok, nil
}
filename, err := b.RealPath(name)
if err != nil {
return nil, false, &os.PathError{Op: "lstat", Path: name, Err: err}
}
return &realFilenameInfo{FileInfo: fi, realFilename: filename}, ok, nil
}

205
hugofs/decorators.go Normal file
View file

@ -0,0 +1,205 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"os"
"path/filepath"
"strings"
"github.com/pkg/errors"
"github.com/spf13/afero"
)
func decorateDirs(fs afero.Fs, meta FileMeta) afero.Fs {
ffs := &baseFileDecoratorFs{Fs: fs}
decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) {
if !fi.IsDir() {
// Leave regular files as they are.
return fi, nil
}
return decorateFileInfo(fi, fs, nil, "", "", meta), nil
}
ffs.decorate = decorator
return ffs
}
func decoratePath(fs afero.Fs, createPath func(name string) string) afero.Fs {
ffs := &baseFileDecoratorFs{Fs: fs}
decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) {
path := createPath(name)
return decorateFileInfo(fi, fs, nil, "", path, nil), nil
}
ffs.decorate = decorator
return ffs
}
// DecorateBasePathFs adds Path info to files and directories in the
// provided BasePathFs, using the base as base.
func DecorateBasePathFs(base *afero.BasePathFs) afero.Fs {
basePath, _ := base.RealPath("")
if !strings.HasSuffix(basePath, filepathSeparator) {
basePath += filepathSeparator
}
ffs := &baseFileDecoratorFs{Fs: base}
decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) {
path := strings.TrimPrefix(name, basePath)
return decorateFileInfo(fi, base, nil, "", path, nil), nil
}
ffs.decorate = decorator
return ffs
}
// NewBaseFileDecorator decorates the given Fs to provide the real filename
// and an Opener func. If
func NewBaseFileDecorator(fs afero.Fs) afero.Fs {
ffs := &baseFileDecoratorFs{Fs: fs}
decorator := func(fi os.FileInfo, filename string) (os.FileInfo, error) {
// Store away the original in case it's a symlink.
meta := FileMeta{metaKeyName: fi.Name()}
isSymlink := isSymlink(fi)
if isSymlink {
meta[metaKeyOriginalFilename] = filename
link, err := filepath.EvalSymlinks(filename)
if err != nil {
return nil, err
}
fi, err = fs.Stat(link)
if err != nil {
return nil, err
}
filename = link
meta[metaKeyIsSymlink] = true
}
opener := func() (afero.File, error) {
return ffs.open(filename)
}
return decorateFileInfo(fi, ffs, opener, filename, "", meta), nil
}
ffs.decorate = decorator
return ffs
}
type baseFileDecoratorFs struct {
afero.Fs
decorate func(fi os.FileInfo, filename string) (os.FileInfo, error)
}
func (fs *baseFileDecoratorFs) Stat(name string) (os.FileInfo, error) {
fi, err := fs.Fs.Stat(name)
if err != nil {
return nil, err
}
return fs.decorate(fi, name)
}
func (fs *baseFileDecoratorFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
var (
fi os.FileInfo
err error
ok bool
)
if lstater, isLstater := fs.Fs.(afero.Lstater); isLstater {
fi, ok, err = lstater.LstatIfPossible(name)
} else {
fi, err = fs.Fs.Stat(name)
}
if err != nil {
return nil, false, err
}
fi, err = fs.decorate(fi, name)
return fi, ok, err
}
func (fs *baseFileDecoratorFs) Open(name string) (afero.File, error) {
return fs.open(name)
}
func (fs *baseFileDecoratorFs) open(name string) (afero.File, error) {
f, err := fs.Fs.Open(name)
if err != nil {
return nil, err
}
return &baseFileDecoratorFile{File: f, fs: fs}, nil
}
type baseFileDecoratorFile struct {
afero.File
fs *baseFileDecoratorFs
}
func (l *baseFileDecoratorFile) Readdir(c int) (ofi []os.FileInfo, err error) {
dirnames, err := l.File.Readdirnames(c)
if err != nil {
return nil, err
}
fisp := make([]os.FileInfo, 0, len(dirnames))
for _, dirname := range dirnames {
filename := dirname
if l.Name() != "" && l.Name() != filepathSeparator {
filename = filepath.Join(l.Name(), dirname)
}
// We need to resolve any symlink info.
fi, _, err := lstatIfPossible(l.fs.Fs, filename)
if err != nil {
if os.IsNotExist(err) {
continue
}
return nil, err
}
fi, err = l.fs.decorate(fi, filename)
if err != nil {
return nil, errors.Wrap(err, "decorate")
}
fisp = append(fisp, fi)
}
return fisp, err
}

297
hugofs/fileinfo.go Normal file
View file

@ -0,0 +1,297 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package hugofs provides the file systems used by Hugo.
package hugofs
import (
"os"
"path/filepath"
"runtime"
"strings"
"time"
"github.com/gohugoio/hugo/hugofs/files"
"golang.org/x/text/unicode/norm"
"github.com/pkg/errors"
"github.com/spf13/cast"
"github.com/gohugoio/hugo/common/hreflect"
"github.com/spf13/afero"
)
const (
metaKeyFilename = "filename"
metaKeyOriginalFilename = "originalFilename"
metaKeyName = "name"
metaKeyPath = "path"
metaKeyPathWalk = "pathWalk"
metaKeyLang = "lang"
metaKeyWeight = "weight"
metaKeyOrdinal = "ordinal"
metaKeyFs = "fs"
metaKeyOpener = "opener"
metaKeyIsOrdered = "isOrdered"
metaKeyIsSymlink = "isSymlink"
metaKeySkipDir = "skipDir"
metaKeyClassifier = "classifier"
metaKeyTranslationBaseName = "translationBaseName"
metaKeyTranslationBaseNameWithExt = "translationBaseNameWithExt"
metaKeyTranslations = "translations"
metaKeyDecoraterPath = "decoratorPath"
)
type FileMeta map[string]interface{}
func (f FileMeta) GetInt(key string) int {
return cast.ToInt(f[key])
}
func (f FileMeta) GetString(key string) string {
return cast.ToString(f[key])
}
func (f FileMeta) GetBool(key string) bool {
return cast.ToBool(f[key])
}
func (f FileMeta) Filename() string {
return f.stringV(metaKeyFilename)
}
func (f FileMeta) OriginalFilename() string {
return f.stringV(metaKeyOriginalFilename)
}
func (f FileMeta) SkipDir() bool {
return f.GetBool(metaKeySkipDir)
}
func (f FileMeta) TranslationBaseName() string {
return f.stringV(metaKeyTranslationBaseName)
}
func (f FileMeta) TranslationBaseNameWithExt() string {
return f.stringV(metaKeyTranslationBaseNameWithExt)
}
func (f FileMeta) Translations() []string {
return cast.ToStringSlice(f[metaKeyTranslations])
}
func (f FileMeta) Name() string {
return f.stringV(metaKeyName)
}
func (f FileMeta) Classifier() string {
c := f.stringV(metaKeyClassifier)
if c != "" {
return c
}
return files.ContentClassFile // For sorting
}
func (f FileMeta) Lang() string {
return f.stringV(metaKeyLang)
}
func (f FileMeta) Path() string {
return f.stringV(metaKeyPath)
}
func (f FileMeta) Weight() int {
return f.GetInt(metaKeyWeight)
}
func (f FileMeta) Ordinal() int {
return f.GetInt(metaKeyOrdinal)
}
func (f FileMeta) IsOrdered() bool {
return f.GetBool(metaKeyIsOrdered)
}
// IsSymlink returns whether this comes from a symlinked file or directory.
func (f FileMeta) IsSymlink() bool {
return f.GetBool(metaKeyIsSymlink)
}
func (f FileMeta) Watch() bool {
if v, found := f["watch"]; found {
return v.(bool)
}
return false
}
func (f FileMeta) Fs() afero.Fs {
if v, found := f[metaKeyFs]; found {
return v.(afero.Fs)
}
return nil
}
func (f FileMeta) GetOpener() func() (afero.File, error) {
o, found := f[metaKeyOpener]
if !found {
return nil
}
return o.(func() (afero.File, error))
}
func (f FileMeta) Open() (afero.File, error) {
v, found := f[metaKeyOpener]
if !found {
return nil, errors.New("file opener not found")
}
return v.(func() (afero.File, error))()
}
func (f FileMeta) stringV(key string) string {
if v, found := f[key]; found {
return v.(string)
}
return ""
}
func (f FileMeta) setIfNotZero(key string, val interface{}) {
if !hreflect.IsTruthful(val) {
return
}
f[key] = val
}
type FileMetaInfo interface {
os.FileInfo
Meta() FileMeta
}
type fileInfoMeta struct {
os.FileInfo
m FileMeta
}
func (fi *fileInfoMeta) Meta() FileMeta {
return fi.m
}
func NewFileMetaInfo(fi os.FileInfo, m FileMeta) FileMetaInfo {
if fim, ok := fi.(FileMetaInfo); ok {
mergeFileMeta(fim.Meta(), m)
}
return &fileInfoMeta{FileInfo: fi, m: m}
}
// Merge metadata, last entry wins.
func mergeFileMeta(from, to FileMeta) {
if from == nil {
return
}
for k, v := range from {
if _, found := to[k]; !found {
to[k] = v
}
}
}
type dirNameOnlyFileInfo struct {
name string
}
func (fi *dirNameOnlyFileInfo) Name() string {
return fi.name
}
func (fi *dirNameOnlyFileInfo) Size() int64 {
panic("not implemented")
}
func (fi *dirNameOnlyFileInfo) Mode() os.FileMode {
return os.ModeDir
}
func (fi *dirNameOnlyFileInfo) ModTime() time.Time {
return time.Time{}
}
func (fi *dirNameOnlyFileInfo) IsDir() bool {
return true
}
func (fi *dirNameOnlyFileInfo) Sys() interface{} {
return nil
}
func newDirNameOnlyFileInfo(name string, isOrdered bool, fileOpener func() (afero.File, error)) FileMetaInfo {
name = normalizeFilename(name)
_, base := filepath.Split(name)
return NewFileMetaInfo(&dirNameOnlyFileInfo{name: base}, FileMeta{
metaKeyFilename: name,
metaKeyIsOrdered: isOrdered,
metaKeyOpener: fileOpener})
}
func decorateFileInfo(
fi os.FileInfo,
fs afero.Fs, opener func() (afero.File, error),
filename, filepath string, inMeta FileMeta) FileMetaInfo {
var meta FileMeta
var fim FileMetaInfo
filepath = strings.TrimPrefix(filepath, filepathSeparator)
var ok bool
if fim, ok = fi.(FileMetaInfo); ok {
meta = fim.Meta()
} else {
meta = make(FileMeta)
fim = NewFileMetaInfo(fi, meta)
}
meta.setIfNotZero(metaKeyOpener, opener)
meta.setIfNotZero(metaKeyFs, fs)
meta.setIfNotZero(metaKeyPath, normalizeFilename(filepath))
meta.setIfNotZero(metaKeyFilename, normalizeFilename(filename))
mergeFileMeta(inMeta, meta)
return fim
}
func isSymlink(fi os.FileInfo) bool {
return fi != nil && fi.Mode()&os.ModeSymlink == os.ModeSymlink
}
func fileInfosToFileMetaInfos(fis []os.FileInfo) []FileMetaInfo {
fims := make([]FileMetaInfo, len(fis))
for i, v := range fis {
fims[i] = v.(FileMetaInfo)
}
return fims
}
func normalizeFilename(filename string) string {
if filename == "" {
return ""
}
if runtime.GOOS == "darwin" {
// When a file system is HFS+, its filepath is in NFD form.
return norm.NFC.String(filename)
}
return filename
}

121
hugofs/files/classifier.go Normal file
View file

@ -0,0 +1,121 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package files
import (
"os"
"path/filepath"
"sort"
"strings"
)
var (
// This should be the only list of valid extensions for content files.
contentFileExtensions = []string{
"html", "htm",
"mdown", "markdown", "md",
"asciidoc", "adoc", "ad",
"rest", "rst",
"mmark",
"org",
"pandoc", "pdc"}
contentFileExtensionsSet map[string]bool
)
func init() {
contentFileExtensionsSet = make(map[string]bool)
for _, ext := range contentFileExtensions {
contentFileExtensionsSet[ext] = true
}
}
func IsContentFile(filename string) bool {
return contentFileExtensionsSet[strings.TrimPrefix(filepath.Ext(filename), ".")]
}
func IsContentExt(ext string) bool {
return contentFileExtensionsSet[ext]
}
const (
ContentClassLeaf = "leaf"
ContentClassBranch = "branch"
ContentClassFile = "zfile" // Sort below
ContentClassContent = "zcontent"
)
func ClassifyContentFile(filename string) string {
if !IsContentFile(filename) {
return ContentClassFile
}
if strings.HasPrefix(filename, "_index.") {
return ContentClassBranch
}
if strings.HasPrefix(filename, "index.") {
return ContentClassLeaf
}
return ContentClassContent
}
const (
ComponentFolderArchetypes = "archetypes"
ComponentFolderStatic = "static"
ComponentFolderLayouts = "layouts"
ComponentFolderContent = "content"
ComponentFolderData = "data"
ComponentFolderAssets = "assets"
ComponentFolderI18n = "i18n"
FolderResources = "resources"
)
var (
ComponentFolders = []string{
ComponentFolderArchetypes,
ComponentFolderStatic,
ComponentFolderLayouts,
ComponentFolderContent,
ComponentFolderData,
ComponentFolderAssets,
ComponentFolderI18n,
}
componentFoldersSet = make(map[string]bool)
)
func init() {
sort.Strings(ComponentFolders)
for _, f := range ComponentFolders {
componentFoldersSet[f] = true
}
}
// ResolveComponentFolder returns "content" from "content/blog/foo.md" etc.
func ResolveComponentFolder(filename string) string {
filename = strings.TrimPrefix(filename, string(os.PathSeparator))
for _, cf := range ComponentFolders {
if strings.HasPrefix(filename, cf) {
return cf
}
}
return ""
}
func IsComponentFolder(name string) bool {
return componentFoldersSet[name]
}

View file

@ -0,0 +1,49 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package files
import (
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
)
func TestIsContentFile(t *testing.T) {
assert := require.New(t)
assert.True(IsContentFile(filepath.FromSlash("my/file.md")))
assert.True(IsContentFile(filepath.FromSlash("my/file.ad")))
assert.False(IsContentFile(filepath.FromSlash("textfile.txt")))
assert.True(IsContentExt("md"))
assert.False(IsContentExt("json"))
}
func TestComponentFolders(t *testing.T) {
assert := require.New(t)
// It's important that these are absolutely right and not changed.
assert.Equal(len(ComponentFolders), len(componentFoldersSet))
assert.True(IsComponentFolder("archetypes"))
assert.True(IsComponentFolder("layouts"))
assert.True(IsComponentFolder("data"))
assert.True(IsComponentFolder("i18n"))
assert.True(IsComponentFolder("assets"))
assert.False(IsComponentFolder("resources"))
assert.True(IsComponentFolder("static"))
assert.True(IsComponentFolder("content"))
assert.False(IsComponentFolder("foo"))
assert.False(IsComponentFolder(""))
}

341
hugofs/filter_fs.go Normal file
View file

@ -0,0 +1,341 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"fmt"
"io"
"os"
"path/filepath"
"sort"
"strings"
"syscall"
"time"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/spf13/afero"
)
var (
_ afero.Fs = (*FilterFs)(nil)
_ afero.Lstater = (*FilterFs)(nil)
_ afero.File = (*filterDir)(nil)
)
func NewLanguageFs(langs map[string]int, fs afero.Fs) (afero.Fs, error) {
applyMeta := func(fs *FilterFs, name string, fis []os.FileInfo) {
for i, fi := range fis {
if fi.IsDir() {
filename := filepath.Join(name, fi.Name())
fis[i] = decorateFileInfo(fi, fs, fs.getOpener(filename), "", "", nil)
continue
}
meta := fi.(FileMetaInfo).Meta()
lang := meta.Lang()
fileLang, translationBaseName, translationBaseNameWithExt := langInfoFrom(langs, fi.Name())
weight := 0
if fileLang != "" {
weight = 1
if fileLang == lang {
// Give priority to myfile.sv.txt inside the sv filesystem.
weight++
}
lang = fileLang
}
fim := NewFileMetaInfo(fi, FileMeta{
metaKeyLang: lang,
metaKeyWeight: weight,
metaKeyOrdinal: langs[lang],
metaKeyTranslationBaseName: translationBaseName,
metaKeyTranslationBaseNameWithExt: translationBaseNameWithExt,
metaKeyClassifier: files.ClassifyContentFile(fi.Name()),
})
fis[i] = fim
}
}
all := func(fis []os.FileInfo) {
// Maps translation base name to a list of language codes.
translations := make(map[string][]string)
trackTranslation := func(meta FileMeta) {
name := meta.TranslationBaseNameWithExt()
translations[name] = append(translations[name], meta.Lang())
}
for _, fi := range fis {
if fi.IsDir() {
continue
}
meta := fi.(FileMetaInfo).Meta()
trackTranslation(meta)
}
for _, fi := range fis {
fim := fi.(FileMetaInfo)
langs := translations[fim.Meta().TranslationBaseNameWithExt()]
if len(langs) > 0 {
fim.Meta()["translations"] = sortAndremoveStringDuplicates(langs)
}
}
}
return &FilterFs{
fs: fs,
applyPerSource: applyMeta,
applyAll: all,
}, nil
}
func NewFilterFs(fs afero.Fs) (afero.Fs, error) {
applyMeta := func(fs *FilterFs, name string, fis []os.FileInfo) {
for i, fi := range fis {
if fi.IsDir() {
fis[i] = decorateFileInfo(fi, fs, fs.getOpener(fi.(FileMetaInfo).Meta().Filename()), "", "", nil)
}
}
}
ffs := &FilterFs{
fs: fs,
applyPerSource: applyMeta,
}
return ffs, nil
}
// FilterFs is an ordered composite filesystem.
type FilterFs struct {
fs afero.Fs
applyPerSource func(fs *FilterFs, name string, fis []os.FileInfo)
applyAll func(fis []os.FileInfo)
}
func (fs *FilterFs) Chmod(n string, m os.FileMode) error {
return syscall.EPERM
}
func (fs *FilterFs) Chtimes(n string, a, m time.Time) error {
return syscall.EPERM
}
func (fs *FilterFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
fi, b, err := lstatIfPossible(fs.fs, name)
if err != nil {
return nil, false, err
}
if fi.IsDir() {
return decorateFileInfo(fi, fs, fs.getOpener(name), "", "", nil), false, nil
}
fs.applyFilters(name, -1, fi)
return fi, b, nil
}
func (fs *FilterFs) Mkdir(n string, p os.FileMode) error {
return syscall.EPERM
}
func (fs *FilterFs) MkdirAll(n string, p os.FileMode) error {
return syscall.EPERM
}
func (fs *FilterFs) Name() string {
return "WeightedFileSystem"
}
func (fs *FilterFs) Open(name string) (afero.File, error) {
f, err := fs.fs.Open(name)
if err != nil {
return nil, err
}
return &filterDir{
File: f,
ffs: fs,
}, nil
}
func (fs *FilterFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
panic("not implemented")
}
func (fs *FilterFs) ReadDir(name string) ([]os.FileInfo, error) {
panic("not implemented")
}
func (fs *FilterFs) Remove(n string) error {
return syscall.EPERM
}
func (fs *FilterFs) RemoveAll(p string) error {
return syscall.EPERM
}
func (fs *FilterFs) Rename(o, n string) error {
return syscall.EPERM
}
func (fs *FilterFs) Stat(name string) (os.FileInfo, error) {
fi, _, err := fs.LstatIfPossible(name)
return fi, err
}
func (fs *FilterFs) Create(n string) (afero.File, error) {
return nil, syscall.EPERM
}
func (fs *FilterFs) getOpener(name string) func() (afero.File, error) {
return func() (afero.File, error) {
return fs.Open(name)
}
}
func (fs *FilterFs) applyFilters(name string, count int, fis ...os.FileInfo) ([]os.FileInfo, error) {
if fs.applyPerSource != nil {
fs.applyPerSource(fs, name, fis)
}
seen := make(map[string]bool)
var duplicates []int
for i, dir := range fis {
if !dir.IsDir() {
continue
}
if seen[dir.Name()] {
duplicates = append(duplicates, i)
} else {
seen[dir.Name()] = true
}
}
// Remove duplicate directories, keep first.
if len(duplicates) > 0 {
for i := len(duplicates) - 1; i >= 0; i-- {
idx := duplicates[i]
fis = append(fis[:idx], fis[idx+1:]...)
}
}
if fs.applyAll != nil {
fs.applyAll(fis)
}
if count > 0 && len(fis) >= count {
return fis[:count], nil
}
return fis, nil
}
type filterDir struct {
afero.File
ffs *FilterFs
}
func (f *filterDir) Readdir(count int) ([]os.FileInfo, error) {
fis, err := f.File.Readdir(-1)
if err != nil {
return nil, err
}
return f.ffs.applyFilters(f.Name(), count, fis...)
}
func (f *filterDir) Readdirnames(count int) ([]string, error) {
dirsi, err := f.Readdir(count)
if err != nil {
return nil, err
}
dirs := make([]string, len(dirsi))
for i, d := range dirsi {
dirs[i] = d.Name()
}
return dirs, nil
}
// Try to extract the language from the given filename.
// Any valid language identificator in the name will win over the
// language set on the file system, e.g. "mypost.en.md".
func langInfoFrom(languages map[string]int, name string) (string, string, string) {
var lang string
baseName := filepath.Base(name)
ext := filepath.Ext(baseName)
translationBaseName := baseName
if ext != "" {
translationBaseName = strings.TrimSuffix(translationBaseName, ext)
}
fileLangExt := filepath.Ext(translationBaseName)
fileLang := strings.TrimPrefix(fileLangExt, ".")
if _, found := languages[fileLang]; found {
lang = fileLang
translationBaseName = strings.TrimSuffix(translationBaseName, fileLangExt)
}
translationBaseNameWithExt := translationBaseName
if ext != "" {
translationBaseNameWithExt += ext
}
return lang, translationBaseName, translationBaseNameWithExt
}
func printFs(fs afero.Fs, path string, w io.Writer) {
if fs == nil {
return
}
afero.Walk(fs, path, func(path string, info os.FileInfo, err error) error {
fmt.Println("p:::", path)
return nil
})
}
func sortAndremoveStringDuplicates(s []string) []string {
ss := sort.StringSlice(s)
ss.Sort()
i := 0
for j := 1; j < len(s); j++ {
if !ss.Less(i, j) {
continue
}
i++
s[i] = s[j]
}
return s[:i+1]
}

48
hugofs/filter_fs_test.go Normal file
View file

@ -0,0 +1,48 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
)
func TestLangInfoFrom(t *testing.T) {
langs := map[string]int{
"sv": 10,
"en": 20,
}
assert := require.New(t)
tests := []struct {
input string
expected []string
}{
{"page.sv.md", []string{"sv", "page", "page.md"}},
{"page.en.md", []string{"en", "page", "page.md"}},
{"page.no.md", []string{"", "page.no", "page.no.md"}},
{filepath.FromSlash("tc-lib-color/class-Com.Tecnick.Color.Css"), []string{"", "class-Com.Tecnick.Color", "class-Com.Tecnick.Color.Css"}},
{filepath.FromSlash("class-Com.Tecnick.Color.sv.Css"), []string{"sv", "class-Com.Tecnick.Color", "class-Com.Tecnick.Color.Css"}},
}
for _, test := range tests {
v1, v2, v3 := langInfoFrom(langs, test.input)
assert.Equal(test.expected, []string{v1, v2, v3})
}
}

View file

@ -21,8 +21,10 @@ import (
"github.com/spf13/afero" "github.com/spf13/afero"
) )
// Os points to an Os Afero file system. var (
var Os = &afero.OsFs{} // Os points to the (real) Os filesystem.
Os = &afero.OsFs{}
)
// Fs abstracts the file system to separate source and destination file systems // Fs abstracts the file system to separate source and destination file systems
// and allows both to be mocked for testing. // and allows both to be mocked for testing.

View file

@ -14,6 +14,9 @@
package hugofs package hugofs
import ( import (
"os"
"path"
"github.com/spf13/afero" "github.com/spf13/afero"
) )
@ -30,8 +33,8 @@ type languageCompositeFs struct {
// This is a hybrid filesystem. To get a specific file in Open, Stat etc., use the full filename // This is a hybrid filesystem. To get a specific file in Open, Stat etc., use the full filename
// to the target filesystem. This information is available in Readdir, Stat etc. via the // to the target filesystem. This information is available in Readdir, Stat etc. via the
// special LanguageFileInfo FileInfo implementation. // special LanguageFileInfo FileInfo implementation.
func NewLanguageCompositeFs(base afero.Fs, overlay *LanguageFs) afero.Fs { func NewLanguageCompositeFs(base, overlay afero.Fs) afero.Fs {
return afero.NewReadOnlyFs(&languageCompositeFs{afero.NewCopyOnWriteFs(base, overlay).(*afero.CopyOnWriteFs)}) return &languageCompositeFs{afero.NewCopyOnWriteFs(base, overlay).(*afero.CopyOnWriteFs)}
} }
// Open takes the full path to the file in the target filesystem. If it is a directory, it gets merged // Open takes the full path to the file in the target filesystem. If it is a directory, it gets merged
@ -49,3 +52,36 @@ func (fs *languageCompositeFs) Open(name string) (afero.File, error) {
} }
return f, nil return f, nil
} }
// LanguageDirsMerger implements the afero.DirsMerger interface, which is used
// to merge two directories.
var LanguageDirsMerger = func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) {
m := make(map[string]FileMetaInfo)
getKey := func(fim FileMetaInfo) string {
return path.Join(fim.Meta().Lang(), fim.Name())
}
for _, fi := range lofi {
fim := fi.(FileMetaInfo)
m[getKey(fim)] = fim
}
for _, fi := range bofi {
fim := fi.(FileMetaInfo)
key := getKey(fim)
_, found := m[key]
if !found {
m[key] = fim
}
}
merged := make([]os.FileInfo, len(m))
i := 0
for _, v := range m {
merged[i] = v
i++
}
return merged, nil
}

View file

@ -1,107 +0,0 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"path/filepath"
"strings"
"testing"
"github.com/spf13/afero"
"github.com/stretchr/testify/require"
)
func TestCompositeLanguagFsTest(t *testing.T) {
assert := require.New(t)
languages := map[string]bool{
"sv": true,
"en": true,
"nn": true,
}
msv := afero.NewMemMapFs()
baseSv := "/content/sv"
lfssv := NewLanguageFs("sv", languages, afero.NewBasePathFs(msv, baseSv))
mnn := afero.NewMemMapFs()
baseNn := "/content/nn"
lfsnn := NewLanguageFs("nn", languages, afero.NewBasePathFs(mnn, baseNn))
men := afero.NewMemMapFs()
baseEn := "/content/en"
lfsen := NewLanguageFs("en", languages, afero.NewBasePathFs(men, baseEn))
// The order will be sv, en, nn
composite := NewLanguageCompositeFs(lfsnn, lfsen)
composite = NewLanguageCompositeFs(composite, lfssv)
afero.WriteFile(msv, filepath.Join(baseSv, "f1.txt"), []byte("some sv"), 0755)
afero.WriteFile(mnn, filepath.Join(baseNn, "f1.txt"), []byte("some nn"), 0755)
afero.WriteFile(men, filepath.Join(baseEn, "f1.txt"), []byte("some en"), 0755)
// Swedish is the top layer.
assertLangFile(t, composite, "f1.txt", "sv")
afero.WriteFile(msv, filepath.Join(baseSv, "f2.en.txt"), []byte("some sv"), 0755)
afero.WriteFile(mnn, filepath.Join(baseNn, "f2.en.txt"), []byte("some nn"), 0755)
afero.WriteFile(men, filepath.Join(baseEn, "f2.en.txt"), []byte("some en"), 0755)
// English is in the middle, but the most specific language match wins.
//assertLangFile(t, composite, "f2.en.txt", "en")
// Fetch some specific language versions
assertLangFile(t, composite, filepath.Join(baseNn, "f2.en.txt"), "nn")
assertLangFile(t, composite, filepath.Join(baseEn, "f2.en.txt"), "en")
assertLangFile(t, composite, filepath.Join(baseSv, "f2.en.txt"), "sv")
// Read the root
f, err := composite.Open("/")
assert.NoError(err)
defer f.Close()
files, err := f.Readdir(-1)
assert.NoError(err)
assert.Equal(4, len(files))
expected := map[string]bool{
filepath.FromSlash("/content/en/f1.txt"): true,
filepath.FromSlash("/content/nn/f1.txt"): true,
filepath.FromSlash("/content/sv/f1.txt"): true,
filepath.FromSlash("/content/en/f2.en.txt"): true,
}
got := make(map[string]bool)
for _, fi := range files {
fil, ok := fi.(*LanguageFileInfo)
assert.True(ok)
got[fil.Filename()] = true
}
assert.Equal(expected, got)
}
func assertLangFile(t testing.TB, fs afero.Fs, filename, match string) {
f, err := fs.Open(filename)
if err != nil {
t.Fatal(err)
}
defer f.Close()
b, err := afero.ReadAll(f)
if err != nil {
t.Fatal(err)
}
s := string(b)
if !strings.Contains(s, match) {
t.Fatalf("got %q expected it to contain %q", s, match)
}
}

View file

@ -1,346 +0,0 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/spf13/afero"
)
const hugoFsMarker = "__hugofs"
var (
_ LanguageAnnouncer = (*LanguageFileInfo)(nil)
_ FilePather = (*LanguageFileInfo)(nil)
_ afero.Lstater = (*LanguageFs)(nil)
)
// LanguageAnnouncer is aware of its language.
type LanguageAnnouncer interface {
Lang() string
TranslationBaseName() string
}
// FilePather is aware of its file's location.
type FilePather interface {
// Filename gets the full path and filename to the file.
Filename() string
// Path gets the content relative path including file name and extension.
// The directory is relative to the content root where "content" is a broad term.
Path() string
// RealName is FileInfo.Name in its original form.
RealName() string
BaseDir() string
}
// LanguageDirsMerger implements the afero.DirsMerger interface, which is used
// to merge two directories.
var LanguageDirsMerger = func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) {
m := make(map[string]*LanguageFileInfo)
for _, fi := range lofi {
fil, ok := fi.(*LanguageFileInfo)
if !ok {
return nil, fmt.Errorf("received %T, expected *LanguageFileInfo", fi)
}
m[fil.virtualName] = fil
}
for _, fi := range bofi {
fil, ok := fi.(*LanguageFileInfo)
if !ok {
return nil, fmt.Errorf("received %T, expected *LanguageFileInfo", fi)
}
existing, found := m[fil.virtualName]
if !found || existing.weight < fil.weight {
m[fil.virtualName] = fil
}
}
merged := make([]os.FileInfo, len(m))
i := 0
for _, v := range m {
merged[i] = v
i++
}
return merged, nil
}
// LanguageFileInfo is a super-set of os.FileInfo with additional information
// about the file in relation to its Hugo language.
type LanguageFileInfo struct {
os.FileInfo
lang string
baseDir string
realFilename string
relFilename string
name string
realName string
virtualName string
translationBaseName string
// We add some weight to the files in their own language's content directory.
weight int
}
// Filename returns a file's real filename including the base (ie.
// "/my/base/sect/page.md").
func (fi *LanguageFileInfo) Filename() string {
return fi.realFilename
}
// Path returns a file's filename relative to the base (ie. "sect/page.md").
func (fi *LanguageFileInfo) Path() string {
return fi.relFilename
}
// RealName returns a file's real base name (ie. "page.md").
func (fi *LanguageFileInfo) RealName() string {
return fi.realName
}
// BaseDir returns a file's base directory (ie. "/my/base").
func (fi *LanguageFileInfo) BaseDir() string {
return fi.baseDir
}
// Lang returns a file's language (ie. "sv").
func (fi *LanguageFileInfo) Lang() string {
return fi.lang
}
// TranslationBaseName returns the base filename without any extension or language
// identifiers (ie. "page").
func (fi *LanguageFileInfo) TranslationBaseName() string {
return fi.translationBaseName
}
// Name is the name of the file within this filesystem without any path info.
// It will be marked with language information so we can identify it as ours
// (ie. "__hugofs_sv_page.md").
func (fi *LanguageFileInfo) Name() string {
return fi.name
}
type languageFile struct {
afero.File
fs *LanguageFs
}
// Readdir creates FileInfo entries by calling Lstat if possible.
func (l *languageFile) Readdir(c int) (ofi []os.FileInfo, err error) {
names, err := l.File.Readdirnames(c)
if err != nil {
return nil, err
}
fis := make([]os.FileInfo, len(names))
for i, name := range names {
fi, _, err := l.fs.LstatIfPossible(filepath.Join(l.Name(), name))
if err != nil {
return nil, err
}
fis[i] = fi
}
return fis, err
}
// LanguageFs represents a language filesystem.
type LanguageFs struct {
// This Fs is usually created with a BasePathFs
basePath string
lang string
nameMarker string
languages map[string]bool
afero.Fs
}
// NewLanguageFs creates a new language filesystem.
func NewLanguageFs(lang string, languages map[string]bool, fs afero.Fs) *LanguageFs {
if lang == "" {
panic("no lang set for the language fs")
}
var basePath string
if bfs, ok := fs.(*afero.BasePathFs); ok {
basePath, _ = bfs.RealPath("")
}
marker := hugoFsMarker + "_" + lang + "_"
return &LanguageFs{lang: lang, languages: languages, basePath: basePath, Fs: fs, nameMarker: marker}
}
// Lang returns a language filesystem's language (ie. "sv").
func (fs *LanguageFs) Lang() string {
return fs.lang
}
// Stat returns the os.FileInfo of a given file.
func (fs *LanguageFs) Stat(name string) (os.FileInfo, error) {
name, err := fs.realName(name)
if err != nil {
return nil, err
}
fi, err := fs.Fs.Stat(name)
if err != nil {
return nil, err
}
return fs.newLanguageFileInfo(name, fi)
}
// Open opens the named file for reading.
func (fs *LanguageFs) Open(name string) (afero.File, error) {
name, err := fs.realName(name)
if err != nil {
return nil, err
}
f, err := fs.Fs.Open(name)
if err != nil {
return nil, err
}
return &languageFile{File: f, fs: fs}, nil
}
// LstatIfPossible returns the os.FileInfo structure describing a given file.
// It attempts to use Lstat if supported or defers to the os. In addition to
// the FileInfo, a boolean is returned telling whether Lstat was called.
func (fs *LanguageFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
name, err := fs.realName(name)
if err != nil {
return nil, false, err
}
var fi os.FileInfo
var b bool
if lif, ok := fs.Fs.(afero.Lstater); ok {
fi, b, err = lif.LstatIfPossible(name)
} else {
fi, err = fs.Fs.Stat(name)
}
if err != nil {
return nil, b, err
}
lfi, err := fs.newLanguageFileInfo(name, fi)
return lfi, b, err
}
func (fs *LanguageFs) realPath(name string) (string, error) {
if baseFs, ok := fs.Fs.(*afero.BasePathFs); ok {
return baseFs.RealPath(name)
}
return name, nil
}
func (fs *LanguageFs) realName(name string) (string, error) {
if strings.Contains(name, hugoFsMarker) {
if !strings.Contains(name, fs.nameMarker) {
return "", os.ErrNotExist
}
return strings.Replace(name, fs.nameMarker, "", 1), nil
}
if fs.basePath == "" {
return name, nil
}
return strings.TrimPrefix(name, fs.basePath), nil
}
func (fs *LanguageFs) newLanguageFileInfo(filename string, fi os.FileInfo) (*LanguageFileInfo, error) {
filename = filepath.Clean(filename)
_, name := filepath.Split(filename)
realName := name
virtualName := name
realPath, err := fs.realPath(filename)
if err != nil {
return nil, err
}
lang := fs.Lang()
baseNameNoExt := ""
if !fi.IsDir() {
// Try to extract the language from the file name.
// Any valid language identificator in the name will win over the
// language set on the file system, e.g. "mypost.en.md".
baseName := filepath.Base(name)
ext := filepath.Ext(baseName)
baseNameNoExt = baseName
if ext != "" {
baseNameNoExt = strings.TrimSuffix(baseNameNoExt, ext)
}
fileLangExt := filepath.Ext(baseNameNoExt)
fileLang := strings.TrimPrefix(fileLangExt, ".")
if fs.languages[fileLang] {
lang = fileLang
baseNameNoExt = strings.TrimSuffix(baseNameNoExt, fileLangExt)
}
// This connects the filename to the filesystem, not the language.
virtualName = baseNameNoExt + "." + lang + ext
name = fs.nameMarker + name
}
weight := 1
// If this file's language belongs in this directory, add some weight to it
// to make it more important.
if lang == fs.Lang() {
weight = 2
}
if fi.IsDir() {
// For directories we always want to start from the union view.
realPath = strings.TrimPrefix(realPath, fs.basePath)
}
return &LanguageFileInfo{
lang: lang,
weight: weight,
realFilename: realPath,
realName: realName,
relFilename: strings.TrimPrefix(strings.TrimPrefix(realPath, fs.basePath), string(os.PathSeparator)),
name: name,
virtualName: virtualName,
translationBaseName: baseNameNoExt,
baseDir: fs.basePath,
FileInfo: fi}, nil
}

View file

@ -1,100 +0,0 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"path/filepath"
"testing"
"github.com/spf13/afero"
"github.com/stretchr/testify/require"
)
func TestLanguagFs(t *testing.T) {
languages := map[string]bool{
"sv": true,
}
base := filepath.FromSlash("/my/base")
assert := require.New(t)
m := afero.NewMemMapFs()
bfs := afero.NewBasePathFs(m, base)
lfs := NewLanguageFs("sv", languages, bfs)
assert.NotNil(lfs)
assert.Equal("sv", lfs.Lang())
err := afero.WriteFile(lfs, filepath.FromSlash("sect/page.md"), []byte("abc"), 0777)
assert.NoError(err)
fi, err := lfs.Stat(filepath.FromSlash("sect/page.md"))
assert.NoError(err)
assert.Equal("__hugofs_sv_page.md", fi.Name())
languager, ok := fi.(LanguageAnnouncer)
assert.True(ok)
assert.Equal("sv", languager.Lang())
lfi, ok := fi.(*LanguageFileInfo)
assert.True(ok)
assert.Equal(filepath.FromSlash("/my/base/sect/page.md"), lfi.Filename())
assert.Equal(filepath.FromSlash("sect/page.md"), lfi.Path())
assert.Equal("page.sv.md", lfi.virtualName)
assert.Equal("__hugofs_sv_page.md", lfi.Name())
assert.Equal("page.md", lfi.RealName())
assert.Equal(filepath.FromSlash("/my/base"), lfi.BaseDir())
assert.Equal("sv", lfi.Lang())
assert.Equal("page", lfi.TranslationBaseName())
}
// Issue 4559
func TestFilenamesHandling(t *testing.T) {
languages := map[string]bool{
"sv": true,
}
base := filepath.FromSlash("/my/base")
assert := require.New(t)
m := afero.NewMemMapFs()
bfs := afero.NewBasePathFs(m, base)
lfs := NewLanguageFs("sv", languages, bfs)
assert.NotNil(lfs)
assert.Equal("sv", lfs.Lang())
for _, test := range []struct {
filename string
check func(fi *LanguageFileInfo)
}{
{"tc-lib-color/class-Com.Tecnick.Color.Css", func(fi *LanguageFileInfo) {
assert.Equal("class-Com.Tecnick.Color", fi.TranslationBaseName())
assert.Equal(filepath.FromSlash("/my/base"), fi.BaseDir())
assert.Equal(filepath.FromSlash("tc-lib-color/class-Com.Tecnick.Color.Css"), fi.Path())
assert.Equal("class-Com.Tecnick.Color.Css", fi.RealName())
assert.Equal(filepath.FromSlash("/my/base/tc-lib-color/class-Com.Tecnick.Color.Css"), fi.Filename())
}},
{"tc-lib-color/class-Com.Tecnick.Color.sv.Css", func(fi *LanguageFileInfo) {
assert.Equal("class-Com.Tecnick.Color", fi.TranslationBaseName())
assert.Equal("class-Com.Tecnick.Color.sv.Css", fi.RealName())
assert.Equal(filepath.FromSlash("/my/base/tc-lib-color/class-Com.Tecnick.Color.sv.Css"), fi.Filename())
}},
} {
err := afero.WriteFile(lfs, filepath.FromSlash(test.filename), []byte("abc"), 0777)
assert.NoError(err)
fi, err := lfs.Stat(filepath.FromSlash(test.filename))
assert.NoError(err)
lfi, ok := fi.(*LanguageFileInfo)
assert.True(ok)
assert.Equal("sv", lfi.Lang())
test.check(lfi)
}
}

85
hugofs/nosymlink_fs.go Normal file
View file

@ -0,0 +1,85 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"errors"
"os"
"github.com/spf13/afero"
)
var (
ErrPermissionSymlink = errors.New("symlinks not allowed in this filesystem")
)
func NewNoSymlinkFs(fs afero.Fs) afero.Fs {
return &noSymlinkFs{Fs: fs}
}
// noSymlinkFs is a filesystem that prevents symlinking.
type noSymlinkFs struct {
afero.Fs
}
func (fs *noSymlinkFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
return fs.stat(name)
}
func (fs *noSymlinkFs) Stat(name string) (os.FileInfo, error) {
fi, _, err := fs.stat(name)
return fi, err
}
func (fs *noSymlinkFs) stat(name string) (os.FileInfo, bool, error) {
var (
fi os.FileInfo
wasLstat bool
err error
)
if lstater, ok := fs.Fs.(afero.Lstater); ok {
fi, wasLstat, err = lstater.LstatIfPossible(name)
} else {
fi, err = fs.Fs.Stat(name)
}
var metaIsSymlink bool
if fim, ok := fi.(FileMetaInfo); ok {
metaIsSymlink = fim.Meta().IsSymlink()
}
if metaIsSymlink || isSymlink(fi) {
return nil, wasLstat, ErrPermissionSymlink
}
return fi, wasLstat, err
}
func (fs *noSymlinkFs) Open(name string) (afero.File, error) {
if _, _, err := fs.stat(name); err != nil {
return nil, err
}
return fs.Fs.Open(name)
}
func (fs *noSymlinkFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
if _, _, err := fs.stat(name); err != nil {
return nil, err
}
return fs.Fs.OpenFile(name, flag, perm)
}

97
hugofs/nosymlink_test.go Normal file
View file

@ -0,0 +1,97 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"os"
"path/filepath"
"testing"
"github.com/gohugoio/hugo/htesting"
"github.com/spf13/afero"
"github.com/stretchr/testify/require"
)
func TestNoSymlinkFs(t *testing.T) {
if skipSymlink() {
t.Skip("Skip; os.Symlink needs administrator rights on Windows")
}
assert := require.New(t)
workDir, clean, err := htesting.CreateTempDir(Os, "hugo-nosymlink")
assert.NoError(err)
defer clean()
wd, _ := os.Getwd()
defer func() {
os.Chdir(wd)
}()
blogDir := filepath.Join(workDir, "blog")
blogFile := filepath.Join(blogDir, "a.txt")
assert.NoError(os.MkdirAll(blogDir, 0777))
afero.WriteFile(Os, filepath.Join(blogFile), []byte("content"), 0777)
os.Chdir(workDir)
assert.NoError(os.Symlink("blog", "symlinkdedir"))
os.Chdir(blogDir)
assert.NoError(os.Symlink("a.txt", "symlinkdedfile.txt"))
fs := NewNoSymlinkFs(Os)
ls := fs.(afero.Lstater)
symlinkedDir := filepath.Join(workDir, "symlinkdedir")
symlinkedFile := filepath.Join(blogDir, "symlinkdedfile.txt")
// Check Stat and Lstat
for _, stat := range []func(name string) (os.FileInfo, error){
func(name string) (os.FileInfo, error) {
return fs.Stat(name)
},
func(name string) (os.FileInfo, error) {
fi, _, err := ls.LstatIfPossible(name)
return fi, err
},
} {
_, err = stat(symlinkedDir)
assert.Equal(ErrPermissionSymlink, err)
_, err = stat(symlinkedFile)
assert.Equal(ErrPermissionSymlink, err)
fi, err := stat(filepath.Join(workDir, "blog"))
assert.NoError(err)
assert.NotNil(fi)
fi, err = stat(blogFile)
assert.NoError(err)
assert.NotNil(fi)
}
// Check Open
_, err = fs.Open(symlinkedDir)
assert.Equal(ErrPermissionSymlink, err)
_, err = fs.OpenFile(symlinkedDir, os.O_RDWR|os.O_APPEND|os.O_CREATE, 0666)
assert.Equal(ErrPermissionSymlink, err)
_, err = fs.OpenFile(symlinkedFile, os.O_RDWR|os.O_APPEND|os.O_CREATE, 0666)
assert.Equal(ErrPermissionSymlink, err)
_, err = fs.Open(symlinkedFile)
assert.Equal(ErrPermissionSymlink, err)
f, err := fs.Open(blogDir)
assert.NoError(err)
f.Close()
f, err = fs.Open(blogFile)
assert.NoError(err)
f.Close()
// os.OpenFile(logFile, os.O_RDWR|os.O_APPEND|os.O_CREATE, 0666)
}

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -14,10 +14,14 @@
package hugofs package hugofs
import ( import (
"fmt"
"os" "os"
"path/filepath" "path/filepath"
"strings" "strings"
"time"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/pkg/errors"
radix "github.com/hashicorp/go-immutable-radix" radix "github.com/hashicorp/go-immutable-radix"
"github.com/spf13/afero" "github.com/spf13/afero"
@ -25,90 +29,235 @@ import (
var filepathSeparator = string(filepath.Separator) var filepathSeparator = string(filepath.Separator)
// NewRootMappingFs creates a new RootMappingFs on top of the provided with
// of root mappings with some optional metadata about the root.
// Note that From represents a virtual root that maps to the actual filename in To.
func NewRootMappingFs(fs afero.Fs, rms ...RootMapping) (*RootMappingFs, error) {
rootMapToReal := radix.New().Txn()
for _, rm := range rms {
(&rm).clean()
fromBase := files.ResolveComponentFolder(rm.From)
if fromBase == "" {
panic("unrecognised component folder in" + rm.From)
}
if len(rm.To) < 2 {
panic(fmt.Sprintf("invalid root mapping; from/to: %s/%s", rm.From, rm.To))
}
_, err := fs.Stat(rm.To)
if err != nil {
if os.IsNotExist(err) {
continue
}
return nil, err
}
// Extract "blog" from "content/blog"
rm.path = strings.TrimPrefix(strings.TrimPrefix(rm.From, fromBase), filepathSeparator)
key := []byte(rm.rootKey())
var mappings []RootMapping
v, found := rootMapToReal.Get(key)
if found {
// There may be more than one language pointing to the same root.
mappings = v.([]RootMapping)
}
mappings = append(mappings, rm)
rootMapToReal.Insert(key, mappings)
}
rfs := &RootMappingFs{Fs: fs,
virtualRoots: rms,
rootMapToReal: rootMapToReal.Commit().Root()}
return rfs, nil
}
// NewRootMappingFsFromFromTo is a convenicence variant of NewRootMappingFs taking
// From and To as string pairs.
func NewRootMappingFsFromFromTo(fs afero.Fs, fromTo ...string) (*RootMappingFs, error) {
rms := make([]RootMapping, len(fromTo)/2)
for i, j := 0, 0; j < len(fromTo); i, j = i+1, j+2 {
rms[i] = RootMapping{
From: fromTo[j],
To: fromTo[j+1],
}
}
return NewRootMappingFs(fs, rms...)
}
type RootMapping struct {
From string
To string
path string // The virtual mount point, e.g. "blog".
Meta FileMeta // File metadata (lang etc.)
}
func (rm *RootMapping) clean() {
rm.From = filepath.Clean(rm.From)
rm.To = filepath.Clean(rm.To)
}
func (r RootMapping) filename(name string) string {
return filepath.Join(r.To, strings.TrimPrefix(name, r.From))
}
func (r RootMapping) rootKey() string {
return r.From
}
// A RootMappingFs maps several roots into one. Note that the root of this filesystem // A RootMappingFs maps several roots into one. Note that the root of this filesystem
// is directories only, and they will be returned in Readdir and Readdirnames // is directories only, and they will be returned in Readdir and Readdirnames
// in the order given. // in the order given.
type RootMappingFs struct { type RootMappingFs struct {
afero.Fs afero.Fs
rootMapToReal *radix.Node rootMapToReal *radix.Node
virtualRoots []string virtualRoots []RootMapping
filter func(r RootMapping) bool
} }
type rootMappingFile struct { func (fs *RootMappingFs) Dirs(base string) ([]FileMetaInfo, error) {
afero.File roots := fs.getRootsWithPrefix(base)
fs *RootMappingFs
name string
}
type rootMappingFileInfo struct { if roots == nil {
name string return nil, nil
}
func (fi *rootMappingFileInfo) Name() string {
return fi.name
}
func (fi *rootMappingFileInfo) Size() int64 {
panic("not implemented")
}
func (fi *rootMappingFileInfo) Mode() os.FileMode {
return os.ModeDir
}
func (fi *rootMappingFileInfo) ModTime() time.Time {
panic("not implemented")
}
func (fi *rootMappingFileInfo) IsDir() bool {
return true
}
func (fi *rootMappingFileInfo) Sys() interface{} {
return nil
}
func newRootMappingDirFileInfo(name string) *rootMappingFileInfo {
return &rootMappingFileInfo{name: name}
}
// NewRootMappingFs creates a new RootMappingFs on top of the provided with
// a list of from, to string pairs of root mappings.
// Note that 'from' represents a virtual root that maps to the actual filename in 'to'.
func NewRootMappingFs(fs afero.Fs, fromTo ...string) (*RootMappingFs, error) {
rootMapToReal := radix.New().Txn()
var virtualRoots []string
for i := 0; i < len(fromTo); i += 2 {
vr := filepath.Clean(fromTo[i])
rr := filepath.Clean(fromTo[i+1])
// We need to preserve the original order for Readdir
virtualRoots = append(virtualRoots, vr)
rootMapToReal.Insert([]byte(vr), rr)
} }
return &RootMappingFs{Fs: fs, fss := make([]FileMetaInfo, len(roots))
virtualRoots: virtualRoots, for i, r := range roots {
rootMapToReal: rootMapToReal.Commit().Root()}, nil bfs := afero.NewBasePathFs(fs.Fs, r.To)
bfs = decoratePath(bfs, func(name string) string {
p := strings.TrimPrefix(name, r.To)
if r.path != "" {
// Make sure it's mounted to a any sub path, e.g. blog
p = filepath.Join(r.path, p)
}
p = strings.TrimLeft(p, filepathSeparator)
return p
})
fs := decorateDirs(bfs, r.Meta)
fi, err := fs.Stat("")
if err != nil {
return nil, errors.Wrap(err, "RootMappingFs.Dirs")
}
fss[i] = fi.(FileMetaInfo)
}
return fss, nil
}
// LstatIfPossible returns the os.FileInfo structure describing a given file.
func (fs *RootMappingFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
fis, b, err := fs.doLstat(name, false)
if err != nil {
return nil, b, err
}
return fis[0], b, nil
}
func (fs *RootMappingFs) virtualDirOpener(name string, isRoot bool) func() (afero.File, error) {
return func() (afero.File, error) { return &rootMappingFile{name: name, isRoot: isRoot, fs: fs}, nil }
}
func (fs *RootMappingFs) doLstat(name string, allowMultiple bool) ([]FileMetaInfo, bool, error) {
if fs.isRoot(name) {
return []FileMetaInfo{newDirNameOnlyFileInfo(name, true, fs.virtualDirOpener(name, true))}, false, nil
}
roots := fs.getRoots(name)
if len(roots) == 0 {
roots := fs.getRootsWithPrefix(name)
if len(roots) != 0 {
// We have root mappings below name, let's make it look like
// a directory.
return []FileMetaInfo{newDirNameOnlyFileInfo(name, true, fs.virtualDirOpener(name, false))}, false, nil
}
return nil, false, os.ErrNotExist
}
var (
fis []FileMetaInfo
b bool
fi os.FileInfo
root RootMapping
err error
)
for _, root = range roots {
fi, b, err = fs.statRoot(root, name)
if err != nil {
if os.IsNotExist(err) {
continue
}
return nil, false, err
}
fim := fi.(FileMetaInfo)
fis = append(fis, fim)
}
if len(fis) == 0 {
return nil, false, os.ErrNotExist
}
if allowMultiple || len(fis) == 1 {
return fis, b, nil
}
// Open it in this composite filesystem.
opener := func() (afero.File, error) {
return fs.Open(name)
}
return []FileMetaInfo{decorateFileInfo(fi, fs, opener, "", "", root.Meta)}, b, nil
}
// Open opens the namedrootMappingFile file for reading.
func (fs *RootMappingFs) Open(name string) (afero.File, error) {
if fs.isRoot(name) {
return &rootMappingFile{name: name, fs: fs, isRoot: true}, nil
}
fis, _, err := fs.doLstat(name, true)
if err != nil {
return nil, err
}
if len(fis) == 1 {
fi := fis[0]
meta := fi.(FileMetaInfo).Meta()
f, err := meta.Open()
if err != nil {
return nil, err
}
return &rootMappingFile{File: f, fs: fs, name: name, meta: meta}, nil
}
return fs.newUnionFile(fis...)
} }
// Stat returns the os.FileInfo structure describing a given file. If there is // Stat returns the os.FileInfo structure describing a given file. If there is
// an error, it will be of type *os.PathError. // an error, it will be of type *os.PathError.
func (fs *RootMappingFs) Stat(name string) (os.FileInfo, error) { func (fs *RootMappingFs) Stat(name string) (os.FileInfo, error) {
if fs.isRoot(name) { fi, _, err := fs.LstatIfPossible(name)
return newRootMappingDirFileInfo(name), nil return fi, err
}
realName := fs.realName(name)
fi, err := fs.Fs.Stat(realName) }
if rfi, ok := fi.(RealFilenameInfo); ok {
return rfi, err
}
return &realFilenameInfo{FileInfo: fi, realFilename: realName}, err
// Filter creates a copy of this filesystem with the applied filter.
func (fs RootMappingFs) Filter(f func(m RootMapping) bool) *RootMappingFs {
fs.filter = f
return &fs
} }
func (fs *RootMappingFs) isRoot(name string) bool { func (fs *RootMappingFs) isRoot(name string) bool {
@ -116,60 +265,193 @@ func (fs *RootMappingFs) isRoot(name string) bool {
} }
// Open opens the named file for reading. func (fs *RootMappingFs) getRoots(name string) []RootMapping {
func (fs *RootMappingFs) Open(name string) (afero.File, error) { nameb := []byte(filepath.Clean(name))
if fs.isRoot(name) { _, v, found := fs.rootMapToReal.LongestPrefix(nameb)
return &rootMappingFile{name: name, fs: fs}, nil if !found {
return nil
} }
realName := fs.realName(name)
f, err := fs.Fs.Open(realName) rm := v.([]RootMapping)
if fs.filter != nil {
var filtered []RootMapping
for _, m := range rm {
if fs.filter(m) {
filtered = append(filtered, m)
}
}
return filtered
}
return rm
}
func (fs *RootMappingFs) getRootsWithPrefix(prefix string) []RootMapping {
if fs.isRoot(prefix) {
return fs.virtualRoots
}
prefixb := []byte(filepath.Clean(prefix))
var roots []RootMapping
fs.rootMapToReal.WalkPrefix(prefixb, func(b []byte, v interface{}) bool {
roots = append(roots, v.([]RootMapping)...)
return false
})
return roots
}
func (fs *RootMappingFs) newUnionFile(fis ...FileMetaInfo) (afero.File, error) {
meta := fis[0].Meta()
f, err := meta.Open()
if err != nil { if err != nil {
return nil, err return nil, err
} }
return &rootMappingFile{File: f, name: name, fs: fs}, nil rf := &rootMappingFile{File: f, fs: fs, name: meta.Name(), meta: meta}
if len(fis) == 1 {
return rf, err
}
next, err := fs.newUnionFile(fis[1:]...)
if err != nil {
return nil, err
}
uf := &afero.UnionFile{Base: rf, Layer: next}
uf.Merger = func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) {
// Ignore duplicate directory entries
seen := make(map[string]bool)
var result []os.FileInfo
for _, fis := range [][]os.FileInfo{bofi, lofi} {
for _, fi := range fis {
if fi.IsDir() && seen[fi.Name()] {
continue
}
if fi.IsDir() {
seen[fi.Name()] = true
}
result = append(result, fi)
}
}
return result, nil
}
return uf, nil
} }
// LstatIfPossible returns the os.FileInfo structure describing a given file. func (fs *RootMappingFs) statRoot(root RootMapping, name string) (os.FileInfo, bool, error) {
// It attempts to use Lstat if supported or defers to the os. In addition to filename := root.filename(name)
// the FileInfo, a boolean is returned telling whether Lstat was called.
func (fs *RootMappingFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
if fs.isRoot(name) { var b bool
return newRootMappingDirFileInfo(name), false, nil var fi os.FileInfo
} var err error
name = fs.realName(name)
if ls, ok := fs.Fs.(afero.Lstater); ok { if ls, ok := fs.Fs.(afero.Lstater); ok {
fi, b, err := ls.LstatIfPossible(name) fi, b, err = ls.LstatIfPossible(filename)
return &realFilenameInfo{FileInfo: fi, realFilename: name}, b, err if err != nil {
return nil, b, err
}
} else {
fi, err = fs.Fs.Stat(filename)
if err != nil {
return nil, b, err
}
} }
fi, err := fs.Stat(name)
return fi, false, err // Opens the real directory/file.
opener := func() (afero.File, error) {
return fs.Fs.Open(filename)
}
if fi.IsDir() {
_, name = filepath.Split(name)
fi = newDirNameOnlyFileInfo(name, false, opener)
}
return decorateFileInfo(fi, fs.Fs, opener, "", "", root.Meta), b, nil
} }
func (fs *RootMappingFs) realName(name string) string { type rootMappingFile struct {
key, val, found := fs.rootMapToReal.LongestPrefix([]byte(filepath.Clean(name))) afero.File
if !found { fs *RootMappingFs
return name name string
} meta FileMeta
keystr := string(key) isRoot bool
}
return filepath.Join(val.(string), strings.TrimPrefix(name, keystr)) func (f *rootMappingFile) Close() error {
if f.File == nil {
return nil
}
return f.File.Close()
}
func (f *rootMappingFile) Name() string {
return f.name
} }
func (f *rootMappingFile) Readdir(count int) ([]os.FileInfo, error) { func (f *rootMappingFile) Readdir(count int) ([]os.FileInfo, error) {
if f.File == nil { if f.File == nil {
dirsn := make([]os.FileInfo, 0) dirsn := make([]os.FileInfo, 0)
for i := 0; i < len(f.fs.virtualRoots); i++ { roots := f.fs.getRootsWithPrefix(f.name)
if count != -1 && i >= count { seen := make(map[string]bool)
j := 0
for _, rm := range roots {
if count != -1 && j >= count {
break break
} }
dirsn = append(dirsn, newRootMappingDirFileInfo(f.fs.virtualRoots[i]))
opener := func() (afero.File, error) {
return f.fs.Open(rm.From)
}
name := rm.From
if !f.isRoot {
_, name = filepath.Split(rm.From)
}
if seen[name] {
continue
}
seen[name] = true
j++
fi := newDirNameOnlyFileInfo(name, false, opener)
if rm.Meta != nil {
mergeFileMeta(rm.Meta, fi.Meta())
}
dirsn = append(dirsn, fi)
} }
return dirsn, nil return dirsn, nil
} }
return f.File.Readdir(count)
if f.File == nil {
panic(fmt.Sprintf("no File for %q", f.name))
}
fis, err := f.File.Readdir(count)
if err != nil {
return nil, err
}
for i, fi := range fis {
fis[i] = decorateFileInfo(fi, f.fs, nil, "", "", f.meta)
}
return fis, nil
} }
func (f *rootMappingFile) Readdirnames(count int) ([]string, error) { func (f *rootMappingFile) Readdirnames(count int) ([]string, error) {
@ -183,14 +465,3 @@ func (f *rootMappingFile) Readdirnames(count int) ([]string, error) {
} }
return dirss, nil return dirss, nil
} }
func (f *rootMappingFile) Name() string {
return f.name
}
func (f *rootMappingFile) Close() error {
if f.File == nil {
return nil
}
return f.File.Close()
}

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -19,24 +19,115 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/spf13/viper"
"github.com/gohugoio/hugo/htesting"
"github.com/spf13/afero" "github.com/spf13/afero"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestRootMappingFsRealName(t *testing.T) { func TestLanguageRootMapping(t *testing.T) {
assert := require.New(t) assert := require.New(t)
fs := afero.NewMemMapFs() v := viper.New()
v.Set("contentDir", "content")
fs := NewBaseFileDecorator(afero.NewMemMapFs())
assert.NoError(afero.WriteFile(fs, filepath.Join("content/sv/svdir", "main.txt"), []byte("main sv"), 0755))
assert.NoError(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", "sv-f.txt"), []byte("some sv blog content"), 0755))
assert.NoError(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent", "en-f.txt"), []byte("some en blog content in a"), 0755))
assert.NoError(afero.WriteFile(fs, filepath.Join("themes/a/myotherenblogcontent", "en-f2.txt"), []byte("some en content"), 0755))
assert.NoError(afero.WriteFile(fs, filepath.Join("themes/a/mysvdocs", "sv-docs.txt"), []byte("some sv docs content"), 0755))
assert.NoError(afero.WriteFile(fs, filepath.Join("themes/b/myenblogcontent", "en-b-f.txt"), []byte("some en content"), 0755))
rfs, err := NewRootMappingFs(fs,
RootMapping{
From: "content/blog", // Virtual path, first element is one of content, static, layouts etc.
To: "themes/a/mysvblogcontent", // Real path
Meta: FileMeta{"lang": "sv"},
},
RootMapping{
From: "content/blog",
To: "themes/a/myenblogcontent",
Meta: FileMeta{"lang": "en"},
},
RootMapping{
From: "content/blog",
To: "content/sv",
Meta: FileMeta{"lang": "sv"},
},
RootMapping{
From: "content/blog",
To: "themes/a/myotherenblogcontent",
Meta: FileMeta{"lang": "en"},
},
RootMapping{
From: "content/docs",
To: "themes/a/mysvdocs",
Meta: FileMeta{"lang": "sv"},
},
)
rfs, err := NewRootMappingFs(fs, "f1", "f1t", "f2", "f2t")
assert.NoError(err) assert.NoError(err)
assert.Equal(filepath.FromSlash("f1t/foo/file.txt"), rfs.realName(filepath.Join("f1", "foo", "file.txt"))) collected, err := collectFilenames(rfs, "content", "content")
assert.NoError(err)
assert.Equal([]string{"blog/en-f.txt", "blog/en-f2.txt", "blog/sv-f.txt", "blog/svdir/main.txt", "docs/sv-docs.txt"}, collected)
bfs := afero.NewBasePathFs(rfs, "content")
collected, err = collectFilenames(bfs, "", "")
assert.NoError(err)
assert.Equal([]string{"blog/en-f.txt", "blog/en-f2.txt", "blog/sv-f.txt", "blog/svdir/main.txt", "docs/sv-docs.txt"}, collected)
dirs, err := rfs.Dirs(filepath.FromSlash("content/blog"))
assert.NoError(err)
assert.Equal(4, len(dirs))
getDirnames := func(name string, rfs *RootMappingFs) []string {
filename := filepath.FromSlash(name)
f, err := rfs.Open(filename)
assert.NoError(err)
names, err := f.Readdirnames(-1)
f.Close()
assert.NoError(err)
info, err := rfs.Stat(filename)
assert.NoError(err)
f2, err := info.(FileMetaInfo).Meta().Open()
assert.NoError(err)
names2, err := f2.Readdirnames(-1)
assert.NoError(err)
assert.Equal(names, names2)
f2.Close()
return names
}
rfsEn := rfs.Filter(func(rm RootMapping) bool {
return rm.Meta.Lang() == "en"
})
assert.Equal([]string{"en-f.txt", "en-f2.txt"}, getDirnames("content/blog", rfsEn))
rfsSv := rfs.Filter(func(rm RootMapping) bool {
return rm.Meta.Lang() == "sv"
})
assert.Equal([]string{"sv-f.txt", "svdir"}, getDirnames("content/blog", rfsSv))
// Make sure we have not messed with the original
assert.Equal([]string{"sv-f.txt", "en-f.txt", "svdir", "en-f2.txt"}, getDirnames("content/blog", rfs))
assert.Equal([]string{"blog", "docs"}, getDirnames("content", rfsSv))
assert.Equal([]string{"blog", "docs"}, getDirnames("content", rfs))
} }
func TestRootMappingFsDirnames(t *testing.T) { func TestRootMappingFsDirnames(t *testing.T) {
assert := require.New(t) assert := require.New(t)
fs := afero.NewMemMapFs() fs := NewBaseFileDecorator(afero.NewMemMapFs())
testfile := "myfile.txt" testfile := "myfile.txt"
assert.NoError(fs.Mkdir("f1t", 0755)) assert.NoError(fs.Mkdir("f1t", 0755))
@ -44,13 +135,14 @@ func TestRootMappingFsDirnames(t *testing.T) {
assert.NoError(fs.Mkdir("f3t", 0755)) assert.NoError(fs.Mkdir("f3t", 0755))
assert.NoError(afero.WriteFile(fs, filepath.Join("f2t", testfile), []byte("some content"), 0755)) assert.NoError(afero.WriteFile(fs, filepath.Join("f2t", testfile), []byte("some content"), 0755))
rfs, err := NewRootMappingFs(fs, "bf1", "f1t", "cf2", "f2t", "af3", "f3t") rfs, err := NewRootMappingFsFromFromTo(fs, "static/bf1", "f1t", "static/cf2", "f2t", "static/af3", "f3t")
assert.NoError(err) assert.NoError(err)
fif, err := rfs.Stat(filepath.Join("cf2", testfile)) fif, err := rfs.Stat(filepath.Join("static/cf2", testfile))
assert.NoError(err) assert.NoError(err)
assert.Equal("myfile.txt", fif.Name()) assert.Equal("myfile.txt", fif.Name())
assert.Equal(filepath.FromSlash("f2t/myfile.txt"), fif.(RealFilenameInfo).RealFilename()) fifm := fif.(FileMetaInfo).Meta()
assert.Equal(filepath.FromSlash("f2t/myfile.txt"), fifm.Filename())
root, err := rfs.Open(filepathSeparator) root, err := rfs.Open(filepathSeparator)
assert.NoError(err) assert.NoError(err)
@ -61,6 +153,91 @@ func TestRootMappingFsDirnames(t *testing.T) {
} }
func TestRootMappingFsFilename(t *testing.T) {
assert := require.New(t)
workDir, clean, err := htesting.CreateTempDir(Os, "hugo-root-filename")
assert.NoError(err)
defer clean()
fs := NewBaseFileDecorator(Os)
testfilename := filepath.Join(workDir, "f1t/foo/file.txt")
assert.NoError(fs.MkdirAll(filepath.Join(workDir, "f1t/foo"), 0777))
assert.NoError(afero.WriteFile(fs, testfilename, []byte("content"), 0666))
rfs, err := NewRootMappingFsFromFromTo(fs, "static/f1", filepath.Join(workDir, "f1t"), "static/f2", filepath.Join(workDir, "f2t"))
assert.NoError(err)
fi, err := rfs.Stat(filepath.FromSlash("static/f1/foo/file.txt"))
assert.NoError(err)
fim := fi.(FileMetaInfo)
assert.Equal(testfilename, fim.Meta().Filename())
_, err = rfs.Stat(filepath.FromSlash("static/f1"))
assert.NoError(err)
}
func TestRootMappingFsMount(t *testing.T) {
assert := require.New(t)
fs := NewBaseFileDecorator(afero.NewMemMapFs())
testfile := "test.txt"
assert.NoError(afero.WriteFile(fs, filepath.Join("themes/a/mynoblogcontent", testfile), []byte("some no content"), 0755))
assert.NoError(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent", testfile), []byte("some en content"), 0755))
assert.NoError(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", testfile), []byte("some sv content"), 0755))
assert.NoError(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", "other.txt"), []byte("some sv content"), 0755))
bfs := afero.NewBasePathFs(fs, "themes/a").(*afero.BasePathFs)
rm := []RootMapping{
RootMapping{From: "content/blog",
To: "mynoblogcontent",
Meta: FileMeta{"lang": "no"},
},
RootMapping{From: "content/blog",
To: "myenblogcontent",
Meta: FileMeta{"lang": "en"},
},
RootMapping{From: "content/blog",
To: "mysvblogcontent",
Meta: FileMeta{"lang": "sv"},
},
}
rfs, err := NewRootMappingFs(bfs, rm...)
assert.NoError(err)
blog, err := rfs.Stat(filepath.FromSlash("content/blog"))
assert.NoError(err)
blogm := blog.(FileMetaInfo).Meta()
assert.Equal("sv", blogm.Lang()) // Last match
f, err := blogm.Open()
assert.NoError(err)
defer f.Close()
dirs1, err := f.Readdirnames(-1)
assert.NoError(err)
// Union with duplicate dir names filtered.
assert.Equal([]string{"test.txt", "test.txt", "other.txt", "test.txt"}, dirs1)
files, err := afero.ReadDir(rfs, filepath.FromSlash("content/blog"))
assert.NoError(err)
assert.Equal(4, len(files))
testfilefi := files[1]
assert.Equal(testfile, testfilefi.Name())
testfilem := testfilefi.(FileMetaInfo).Meta()
assert.Equal(filepath.FromSlash("themes/a/mynoblogcontent/test.txt"), testfilem.Filename())
tf, err := testfilem.Open()
assert.NoError(err)
defer tf.Close()
c, err := ioutil.ReadAll(tf)
assert.NoError(err)
assert.Equal("some no content", string(c))
}
func TestRootMappingFsOs(t *testing.T) { func TestRootMappingFsOs(t *testing.T) {
assert := require.New(t) assert := require.New(t)
fs := afero.NewOsFs() fs := afero.NewOsFs()
@ -77,10 +254,10 @@ func TestRootMappingFsOs(t *testing.T) {
assert.NoError(fs.Mkdir(filepath.Join(d, "f3t"), 0755)) assert.NoError(fs.Mkdir(filepath.Join(d, "f3t"), 0755))
assert.NoError(afero.WriteFile(fs, filepath.Join(d, "f2t", testfile), []byte("some content"), 0755)) assert.NoError(afero.WriteFile(fs, filepath.Join(d, "f2t", testfile), []byte("some content"), 0755))
rfs, err := NewRootMappingFs(fs, "bf1", filepath.Join(d, "f1t"), "cf2", filepath.Join(d, "f2t"), "af3", filepath.Join(d, "f3t")) rfs, err := NewRootMappingFsFromFromTo(fs, "static/bf1", filepath.Join(d, "f1t"), "static/cf2", filepath.Join(d, "f2t"), "static/af3", filepath.Join(d, "f3t"))
assert.NoError(err) assert.NoError(err)
fif, err := rfs.Stat(filepath.Join("cf2", testfile)) fif, err := rfs.Stat(filepath.Join("static/cf2", testfile))
assert.NoError(err) assert.NoError(err)
assert.Equal("myfile.txt", fif.Name()) assert.Equal("myfile.txt", fif.Name())

293
hugofs/slice_fs.go Normal file
View file

@ -0,0 +1,293 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"os"
"syscall"
"time"
"github.com/pkg/errors"
"github.com/spf13/afero"
)
var (
_ afero.Fs = (*SliceFs)(nil)
_ afero.Lstater = (*SliceFs)(nil)
_ afero.File = (*sliceDir)(nil)
)
func NewSliceFs(dirs ...FileMetaInfo) (afero.Fs, error) {
if len(dirs) == 0 {
return NoOpFs, nil
}
for _, dir := range dirs {
if !dir.IsDir() {
return nil, errors.New("this fs supports directories only")
}
}
fs := &SliceFs{
dirs: dirs,
}
return fs, nil
}
// SliceFs is an ordered composite filesystem.
type SliceFs struct {
dirs []FileMetaInfo
}
func (fs *SliceFs) Chmod(n string, m os.FileMode) error {
return syscall.EPERM
}
func (fs *SliceFs) Chtimes(n string, a, m time.Time) error {
return syscall.EPERM
}
func (fs *SliceFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
fi, _, err := fs.pickFirst(name)
if err != nil {
return nil, false, err
}
if fi.IsDir() {
return decorateFileInfo(fi, fs, fs.getOpener(name), "", "", nil), false, nil
}
return nil, false, errors.Errorf("lstat: files not supported: %q", name)
}
func (fs *SliceFs) Mkdir(n string, p os.FileMode) error {
return syscall.EPERM
}
func (fs *SliceFs) MkdirAll(n string, p os.FileMode) error {
return syscall.EPERM
}
func (fs *SliceFs) Name() string {
return "SliceFs"
}
func (fs *SliceFs) Open(name string) (afero.File, error) {
fi, idx, err := fs.pickFirst(name)
if err != nil {
return nil, err
}
if !fi.IsDir() {
panic("currently only dirs in here")
}
return &sliceDir{
lfs: fs,
idx: idx,
dirname: name,
}, nil
}
func (fs *SliceFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
panic("not implemented")
}
func (fs *SliceFs) ReadDir(name string) ([]os.FileInfo, error) {
panic("not implemented")
}
func (fs *SliceFs) Remove(n string) error {
return syscall.EPERM
}
func (fs *SliceFs) RemoveAll(p string) error {
return syscall.EPERM
}
func (fs *SliceFs) Rename(o, n string) error {
return syscall.EPERM
}
func (fs *SliceFs) Stat(name string) (os.FileInfo, error) {
fi, _, err := fs.LstatIfPossible(name)
return fi, err
}
func (fs *SliceFs) Create(n string) (afero.File, error) {
return nil, syscall.EPERM
}
func (fs *SliceFs) getOpener(name string) func() (afero.File, error) {
return func() (afero.File, error) {
return fs.Open(name)
}
}
func (fs *SliceFs) pickFirst(name string) (os.FileInfo, int, error) {
for i, mfs := range fs.dirs {
meta := mfs.Meta()
fs := meta.Fs()
fi, _, err := lstatIfPossible(fs, name)
if err == nil {
// Gotta match!
return fi, i, nil
}
if !os.IsNotExist(err) {
// Real error
return nil, -1, err
}
}
// Not found
return nil, -1, os.ErrNotExist
}
func (fs *SliceFs) readDirs(name string, startIdx, count int) ([]os.FileInfo, error) {
collect := func(lfs FileMeta) ([]os.FileInfo, error) {
d, err := lfs.Fs().Open(name)
if err != nil {
if !os.IsNotExist(err) {
return nil, err
}
return nil, nil
} else {
defer d.Close()
dirs, err := d.Readdir(-1)
if err != nil {
return nil, err
}
return dirs, nil
}
}
var dirs []os.FileInfo
for i := startIdx; i < len(fs.dirs); i++ {
mfs := fs.dirs[i]
fis, err := collect(mfs.Meta())
if err != nil {
return nil, err
}
dirs = append(dirs, fis...)
}
seen := make(map[string]bool)
var duplicates []int
for i, fi := range dirs {
if !fi.IsDir() {
continue
}
if seen[fi.Name()] {
duplicates = append(duplicates, i)
} else {
// Make sure it's opened by this filesystem.
dirs[i] = decorateFileInfo(fi, fs, fs.getOpener(fi.(FileMetaInfo).Meta().Filename()), "", "", nil)
seen[fi.Name()] = true
}
}
// Remove duplicate directories, keep first.
if len(duplicates) > 0 {
for i := len(duplicates) - 1; i >= 0; i-- {
idx := duplicates[i]
dirs = append(dirs[:idx], dirs[idx+1:]...)
}
}
if count > 0 && len(dirs) >= count {
return dirs[:count], nil
}
return dirs, nil
}
type sliceDir struct {
lfs *SliceFs
idx int
dirname string
}
func (f *sliceDir) Close() error {
return nil
}
func (f *sliceDir) Name() string {
return f.dirname
}
func (f *sliceDir) Read(p []byte) (n int, err error) {
panic("not implemented")
}
func (f *sliceDir) ReadAt(p []byte, off int64) (n int, err error) {
panic("not implemented")
}
func (f *sliceDir) Readdir(count int) ([]os.FileInfo, error) {
return f.lfs.readDirs(f.dirname, f.idx, count)
}
func (f *sliceDir) Readdirnames(count int) ([]string, error) {
dirsi, err := f.Readdir(count)
if err != nil {
return nil, err
}
dirs := make([]string, len(dirsi))
for i, d := range dirsi {
dirs[i] = d.Name()
}
return dirs, nil
}
func (f *sliceDir) Seek(offset int64, whence int) (int64, error) {
panic("not implemented")
}
func (f *sliceDir) Stat() (os.FileInfo, error) {
panic("not implemented")
}
func (f *sliceDir) Sync() error {
panic("not implemented")
}
func (f *sliceDir) Truncate(size int64) error {
panic("not implemented")
}
func (f *sliceDir) Write(p []byte) (n int, err error) {
panic("not implemented")
}
func (f *sliceDir) WriteAt(p []byte, off int64) (n int, err error) {
panic("not implemented")
}
func (f *sliceDir) WriteString(s string) (ret int, err error) {
panic("not implemented")
}

308
hugofs/walk.go Normal file
View file

@ -0,0 +1,308 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"fmt"
"os"
"path/filepath"
"sort"
"strings"
"github.com/gohugoio/hugo/common/loggers"
"github.com/pkg/errors"
"github.com/spf13/afero"
)
type (
WalkFunc func(path string, info FileMetaInfo, err error) error
WalkHook func(dir FileMetaInfo, path string, readdir []FileMetaInfo) ([]FileMetaInfo, error)
)
type Walkway struct {
fs afero.Fs
root string
basePath string
logger *loggers.Logger
// May be pre-set
fi FileMetaInfo
dirEntries []FileMetaInfo
walkFn WalkFunc
walked bool
// We may traverse symbolic links and bite ourself.
seen map[string]bool
// Optional hooks
hookPre WalkHook
hookPost WalkHook
}
type WalkwayConfig struct {
Fs afero.Fs
Root string
BasePath string
Logger *loggers.Logger
// One or both of these may be pre-set.
Info FileMetaInfo
DirEntries []FileMetaInfo
WalkFn WalkFunc
HookPre WalkHook
HookPost WalkHook
}
func NewWalkway(cfg WalkwayConfig) *Walkway {
var fs afero.Fs
if cfg.Info != nil {
fs = cfg.Info.Meta().Fs()
} else {
fs = cfg.Fs
}
basePath := cfg.BasePath
if basePath != "" && !strings.HasSuffix(basePath, filepathSeparator) {
basePath += filepathSeparator
}
logger := cfg.Logger
if logger == nil {
logger = loggers.NewWarningLogger()
}
return &Walkway{
fs: fs,
root: cfg.Root,
basePath: basePath,
fi: cfg.Info,
dirEntries: cfg.DirEntries,
walkFn: cfg.WalkFn,
hookPre: cfg.HookPre,
hookPost: cfg.HookPost,
logger: logger,
seen: make(map[string]bool)}
}
func (w *Walkway) Walk() error {
if w.walked {
panic("this walkway is already walked")
}
w.walked = true
if w.fs == NoOpFs {
return nil
}
var fi FileMetaInfo
if w.fi != nil {
fi = w.fi
} else {
info, _, err := lstatIfPossible(w.fs, w.root)
if err != nil {
if os.IsNotExist(err) {
return nil
}
if err == ErrPermissionSymlink {
w.logger.WARN.Printf("Unsupported symlink found in %q, skipping.", w.root)
return nil
}
return w.walkFn(w.root, nil, errors.Wrapf(err, "walk: %q", w.root))
}
fi = info.(FileMetaInfo)
}
if !fi.IsDir() {
return w.walkFn(w.root, nil, errors.New("file to walk must be a directory"))
}
return w.walk(w.root, fi, w.dirEntries, w.walkFn)
}
// if the filesystem supports it, use Lstat, else use fs.Stat
func lstatIfPossible(fs afero.Fs, path string) (os.FileInfo, bool, error) {
if lfs, ok := fs.(afero.Lstater); ok {
fi, b, err := lfs.LstatIfPossible(path)
return fi, b, err
}
fi, err := fs.Stat(path)
return fi, false, err
}
// walk recursively descends path, calling walkFn.
// It follow symlinks if supported by the filesystem, but only the same path once.
func (w *Walkway) walk(path string, info FileMetaInfo, dirEntries []FileMetaInfo, walkFn WalkFunc) error {
err := walkFn(path, info, nil)
if err != nil {
if info.IsDir() && err == filepath.SkipDir {
return nil
}
return err
}
if !info.IsDir() {
return nil
}
meta := info.Meta()
filename := meta.Filename()
if dirEntries == nil {
f, err := w.fs.Open(path)
if err != nil {
return walkFn(path, info, errors.Wrapf(err, "walk: open %q (%q)", path, w.root))
}
fis, err := f.Readdir(-1)
f.Close()
if err != nil {
if err == ErrPermissionSymlink {
w.logger.WARN.Printf("Unsupported symlink found in %q, skipping.", filename)
return nil
}
return walkFn(path, info, errors.Wrap(err, "walk: Readdir"))
}
dirEntries = fileInfosToFileMetaInfos(fis)
if !meta.IsOrdered() {
sort.Slice(dirEntries, func(i, j int) bool {
fii := dirEntries[i]
fij := dirEntries[j]
fim, fjm := fii.Meta(), fij.Meta()
// Pull bundle headers to the top.
ficlass, fjclass := fim.Classifier(), fjm.Classifier()
if ficlass != fjclass {
return ficlass < fjclass
}
// With multiple content dirs with different languages,
// there can be duplicate files, and a weight will be added
// to the closest one.
fiw, fjw := fim.Weight(), fjm.Weight()
if fiw != fjw {
return fiw > fjw
}
// Explicit order set.
fio, fjo := fim.Ordinal(), fjm.Ordinal()
if fio != fjo {
return fio < fjo
}
// When we walk into a symlink, we keep the reference to
// the original name.
fin, fjn := fim.Name(), fjm.Name()
if fin != "" && fjn != "" {
return fin < fjn
}
return fii.Name() < fij.Name()
})
}
}
// First add some metadata to the dir entries
for _, fi := range dirEntries {
fim := fi.(FileMetaInfo)
meta := fim.Meta()
// Note that we use the original Name even if it's a symlink.
name := meta.Name()
if name == "" {
name = fim.Name()
}
if name == "" {
panic(fmt.Sprintf("[%s] no name set in %v", path, meta))
}
pathn := filepath.Join(path, name)
pathMeta := pathn
if w.basePath != "" {
pathMeta = strings.TrimPrefix(pathn, w.basePath)
}
meta[metaKeyPath] = normalizeFilename(pathMeta)
meta[metaKeyPathWalk] = pathn
if fim.IsDir() && w.isSeen(meta.Filename()) {
// Prevent infinite recursion
// Possible cyclic reference
meta[metaKeySkipDir] = true
}
}
if w.hookPre != nil {
dirEntries, err = w.hookPre(info, path, dirEntries)
if err != nil {
if err == filepath.SkipDir {
return nil
}
return err
}
}
for _, fi := range dirEntries {
fim := fi.(FileMetaInfo)
meta := fim.Meta()
if meta.SkipDir() {
continue
}
err := w.walk(meta.GetString(metaKeyPathWalk), fim, nil, walkFn)
if err != nil {
if !fi.IsDir() || err != filepath.SkipDir {
return err
}
}
}
if w.hookPost != nil {
dirEntries, err = w.hookPost(info, path, dirEntries)
if err != nil {
if err == filepath.SkipDir {
return nil
}
return err
}
}
return nil
}
func (w *Walkway) isSeen(filename string) bool {
if filename == "" {
return false
}
if w.seen[filename] {
return true
}
w.seen[filename] = true
return false
}

225
hugofs/walk_test.go Normal file
View file

@ -0,0 +1,225 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"fmt"
"os"
"path/filepath"
"runtime"
"strings"
"testing"
"github.com/gohugoio/hugo/common/hugo"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/htesting"
"github.com/spf13/afero"
"github.com/stretchr/testify/require"
)
func TestWalk(t *testing.T) {
assert := require.New(t)
fs := NewBaseFileDecorator(afero.NewMemMapFs())
afero.WriteFile(fs, "b.txt", []byte("content"), 0777)
afero.WriteFile(fs, "c.txt", []byte("content"), 0777)
afero.WriteFile(fs, "a.txt", []byte("content"), 0777)
names, err := collectFilenames(fs, "", "")
assert.NoError(err)
assert.Equal([]string{"a.txt", "b.txt", "c.txt"}, names)
}
func TestWalkRootMappingFs(t *testing.T) {
assert := require.New(t)
fs := NewBaseFileDecorator(afero.NewMemMapFs())
testfile := "test.txt"
assert.NoError(afero.WriteFile(fs, filepath.Join("a/b", testfile), []byte("some content"), 0755))
assert.NoError(afero.WriteFile(fs, filepath.Join("c/d", testfile), []byte("some content"), 0755))
assert.NoError(afero.WriteFile(fs, filepath.Join("e/f", testfile), []byte("some content"), 0755))
rm := []RootMapping{
RootMapping{
From: "static/b",
To: "e/f",
},
RootMapping{
From: "static/a",
To: "c/d",
},
RootMapping{
From: "static/c",
To: "a/b",
},
}
rfs, err := NewRootMappingFs(fs, rm...)
assert.NoError(err)
bfs := afero.NewBasePathFs(rfs, "static")
names, err := collectFilenames(bfs, "", "")
assert.NoError(err)
assert.Equal([]string{"a/test.txt", "b/test.txt", "c/test.txt"}, names)
}
func skipSymlink() bool {
return runtime.GOOS == "windows" && os.Getenv("CI") == ""
}
func TestWalkSymbolicLink(t *testing.T) {
if skipSymlink() {
t.Skip("Skip; os.Symlink needs administrator rights on Windows")
}
assert := require.New(t)
workDir, clean, err := htesting.CreateTempDir(Os, "hugo-walk-sym")
assert.NoError(err)
defer clean()
wd, _ := os.Getwd()
defer func() {
os.Chdir(wd)
}()
fs := NewBaseFileDecorator(Os)
blogDir := filepath.Join(workDir, "blog")
docsDir := filepath.Join(workDir, "docs")
blogReal := filepath.Join(blogDir, "real")
blogRealSub := filepath.Join(blogReal, "sub")
assert.NoError(os.MkdirAll(blogRealSub, 0777))
assert.NoError(os.MkdirAll(docsDir, 0777))
afero.WriteFile(fs, filepath.Join(blogRealSub, "a.txt"), []byte("content"), 0777)
afero.WriteFile(fs, filepath.Join(docsDir, "b.txt"), []byte("content"), 0777)
os.Chdir(blogDir)
assert.NoError(os.Symlink("real", "symlinked"))
os.Chdir(blogReal)
assert.NoError(os.Symlink("../real", "cyclic"))
os.Chdir(docsDir)
assert.NoError(os.Symlink("../blog/real/cyclic", "docsreal"))
t.Run("OS Fs", func(t *testing.T) {
assert := require.New(t)
names, err := collectFilenames(fs, workDir, workDir)
assert.NoError(err)
assert.Equal([]string{"blog/real/sub/a.txt", "docs/b.txt"}, names)
})
t.Run("BasePath Fs", func(t *testing.T) {
if hugo.GoMinorVersion() < 12 {
// https://github.com/golang/go/issues/30520
// This is fixed in Go 1.13 and in the latest Go 1.12
t.Skip("skip this for Go <= 1.11 due to a bug in Go's stdlib")
}
assert := require.New(t)
docsFs := afero.NewBasePathFs(fs, docsDir)
names, err := collectFilenames(docsFs, "", "")
assert.NoError(err)
// Note: the docsreal folder is considered cyclic when walking from the root, but this works.
assert.Equal([]string{"b.txt", "docsreal/sub/a.txt"}, names)
})
}
func collectFilenames(fs afero.Fs, base, root string) ([]string, error) {
var names []string
walkFn := func(path string, info FileMetaInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
filename := info.Meta().Path()
filename = filepath.ToSlash(filename)
names = append(names, filename)
return nil
}
w := NewWalkway(WalkwayConfig{Fs: fs, BasePath: base, Root: root, WalkFn: walkFn})
err := w.Walk()
return names, err
}
func BenchmarkWalk(b *testing.B) {
assert := require.New(b)
fs := NewBaseFileDecorator(afero.NewMemMapFs())
writeFiles := func(dir string, numfiles int) {
for i := 0; i < numfiles; i++ {
filename := filepath.Join(dir, fmt.Sprintf("file%d.txt", i))
assert.NoError(afero.WriteFile(fs, filename, []byte("content"), 0777))
}
}
const numFilesPerDir = 20
writeFiles("root", numFilesPerDir)
writeFiles("root/l1_1", numFilesPerDir)
writeFiles("root/l1_1/l2_1", numFilesPerDir)
writeFiles("root/l1_1/l2_2", numFilesPerDir)
writeFiles("root/l1_2", numFilesPerDir)
writeFiles("root/l1_2/l2_1", numFilesPerDir)
writeFiles("root/l1_3", numFilesPerDir)
walkFn := func(path string, info FileMetaInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
filename := info.Meta().Filename()
if !strings.HasPrefix(filename, "root") {
return errors.New(filename)
}
return nil
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
w := NewWalkway(WalkwayConfig{Fs: fs, Root: "root", WalkFn: walkFn})
if err := w.Walk(); err != nil {
b.Fatal(err)
}
}
}

View file

@ -14,21 +14,24 @@
package hugolib package hugolib
import ( import (
"fmt"
"os" "os"
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/cache/filecache"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/parser/metadecoders" "github.com/gohugoio/hugo/parser/metadecoders"
"github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/hugolib/paths" "github.com/gohugoio/hugo/hugolib/paths"
"github.com/pkg/errors"
_errors "github.com/pkg/errors"
"github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/modules"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/config/privacy" "github.com/gohugoio/hugo/config/privacy"
@ -67,7 +70,8 @@ func loadSiteConfig(cfg config.Provider) (scfg SiteConfig, err error) {
// ConfigSourceDescriptor describes where to find the config (e.g. config.toml etc.). // ConfigSourceDescriptor describes where to find the config (e.g. config.toml etc.).
type ConfigSourceDescriptor struct { type ConfigSourceDescriptor struct {
Fs afero.Fs Fs afero.Fs
Logger *loggers.Logger
// Path to the config file to use, e.g. /my/project/config.toml // Path to the config file to use, e.g. /my/project/config.toml
Filename string Filename string
@ -84,6 +88,9 @@ type ConfigSourceDescriptor struct {
// production, development // production, development
Environment string Environment string
// Defaults to os.Environ if not set.
Environ []string
} }
func (d ConfigSourceDescriptor) configFilenames() []string { func (d ConfigSourceDescriptor) configFilenames() []string {
@ -111,51 +118,43 @@ var ErrNoConfigFile = errors.New("Unable to locate config file or config directo
// LoadConfig loads Hugo configuration into a new Viper and then adds // LoadConfig loads Hugo configuration into a new Viper and then adds
// a set of defaults. // a set of defaults.
func LoadConfig(d ConfigSourceDescriptor, doWithConfig ...func(cfg config.Provider) error) (*viper.Viper, []string, error) { func LoadConfig(d ConfigSourceDescriptor, doWithConfig ...func(cfg config.Provider) error) (*viper.Viper, []string, error) {
if d.Environment == "" { if d.Environment == "" {
d.Environment = hugo.EnvironmentProduction d.Environment = hugo.EnvironmentProduction
} }
if len(d.Environ) == 0 {
d.Environ = os.Environ()
}
var configFiles []string var configFiles []string
v := viper.New() v := viper.New()
l := configLoader{ConfigSourceDescriptor: d} l := configLoader{ConfigSourceDescriptor: d}
v.AutomaticEnv()
v.SetEnvPrefix("hugo")
var cerr error
for _, name := range d.configFilenames() { for _, name := range d.configFilenames() {
var filename string var filename string
if filename, cerr = l.loadConfig(name, v); cerr != nil && cerr != ErrNoConfigFile { filename, err := l.loadConfig(name, v)
return nil, nil, cerr if err == nil {
configFiles = append(configFiles, filename)
} else if err != ErrNoConfigFile {
return nil, nil, err
} }
configFiles = append(configFiles, filename)
} }
if d.AbsConfigDir != "" { if d.AbsConfigDir != "" {
dirnames, err := l.loadConfigFromConfigDir(v) dirnames, err := l.loadConfigFromConfigDir(v)
if err == nil { if err == nil {
configFiles = append(configFiles, dirnames...) configFiles = append(configFiles, dirnames...)
} else if err != ErrNoConfigFile {
return nil, nil, err
} }
cerr = err
} }
if err := loadDefaultSettingsFor(v); err != nil { if err := loadDefaultSettingsFor(v); err != nil {
return v, configFiles, err return v, configFiles, err
} }
if cerr == nil {
themeConfigFiles, err := l.loadThemeConfig(v)
if err != nil {
return v, configFiles, err
}
if len(themeConfigFiles) > 0 {
configFiles = append(configFiles, themeConfigFiles...)
}
}
// We create languages based on the settings, so we need to make sure that // We create languages based on the settings, so we need to make sure that
// all configuration is loaded/set before doing that. // all configuration is loaded/set before doing that.
for _, d := range doWithConfig { for _, d := range doWithConfig {
@ -164,12 +163,75 @@ func LoadConfig(d ConfigSourceDescriptor, doWithConfig ...func(cfg config.Provid
} }
} }
// Apply environment overrides
if len(d.Environ) > 0 {
// Extract all that start with the HUGO_ prefix
const hugoEnvPrefix = "HUGO_"
var hugoEnv []string
for _, v := range d.Environ {
key, val := config.SplitEnvVar(v)
if strings.HasPrefix(key, hugoEnvPrefix) {
hugoEnv = append(hugoEnv, strings.ToLower(strings.TrimPrefix(key, hugoEnvPrefix)), val)
}
}
if len(hugoEnv) > 0 {
for i := 0; i < len(hugoEnv); i += 2 {
key, valStr := strings.ToLower(hugoEnv[i]), hugoEnv[i+1]
existing, nestedKey, owner, err := maps.GetNestedParamFn(key, "_", v.Get)
if err != nil {
return v, configFiles, err
}
if existing != nil {
val, err := metadecoders.Default.UnmarshalStringTo(valStr, existing)
if err != nil {
continue
}
if owner != nil {
owner[nestedKey] = val
} else {
v.Set(key, val)
}
} else {
v.Set(key, valStr)
}
}
}
}
modulesConfig, err := l.loadModulesConfig(v)
if err != nil {
return v, configFiles, err
}
mods, modulesConfigFiles, err := l.collectModules(modulesConfig, v)
if err != nil {
return v, configFiles, err
}
if err := loadLanguageSettings(v, nil); err != nil { if err := loadLanguageSettings(v, nil); err != nil {
return v, configFiles, err return v, configFiles, err
} }
return v, configFiles, cerr // Apply default project mounts.
if err := modules.ApplyProjectConfigDefaults(v, mods[len(mods)-1]); err != nil {
return v, configFiles, err
}
if len(modulesConfigFiles) > 0 {
configFiles = append(configFiles, modulesConfigFiles...)
}
return v, configFiles, nil
}
func loadLanguageSettings(cfg config.Provider, oldLangs langs.Languages) error {
_, err := langs.LoadLanguageSettings(cfg, oldLangs)
return err
} }
type configLoader struct { type configLoader struct {
@ -334,145 +396,79 @@ func (l configLoader) loadConfigFromConfigDir(v *viper.Viper) ([]string, error)
return dirnames, nil return dirnames, nil
} }
func loadLanguageSettings(cfg config.Provider, oldLangs langs.Languages) error { func (l configLoader) loadModulesConfig(v1 *viper.Viper) (modules.Config, error) {
defaultLang := cfg.GetString("defaultContentLanguage") modConfig, err := modules.DecodeConfig(v1)
if err != nil {
var languages map[string]interface{} return modules.Config{}, err
languagesFromConfig := cfg.GetStringMap("languages")
disableLanguages := cfg.GetStringSlice("disableLanguages")
if len(disableLanguages) == 0 {
languages = languagesFromConfig
} else {
languages = make(map[string]interface{})
for k, v := range languagesFromConfig {
for _, disabled := range disableLanguages {
if disabled == defaultLang {
return fmt.Errorf("cannot disable default language %q", defaultLang)
}
if strings.EqualFold(k, disabled) {
v.(map[string]interface{})["disabled"] = true
break
}
}
languages[k] = v
}
} }
var ( return modConfig, nil
languages2 langs.Languages
err error
)
if len(languages) == 0 {
languages2 = append(languages2, langs.NewDefaultLanguage(cfg))
} else {
languages2, err = toSortedLanguages(cfg, languages)
if err != nil {
return _errors.Wrap(err, "Failed to parse multilingual config")
}
}
if oldLangs != nil {
// When in multihost mode, the languages are mapped to a server, so
// some structural language changes will need a restart of the dev server.
// The validation below isn't complete, but should cover the most
// important cases.
var invalid bool
if languages2.IsMultihost() != oldLangs.IsMultihost() {
invalid = true
} else {
if languages2.IsMultihost() && len(languages2) != len(oldLangs) {
invalid = true
}
}
if invalid {
return errors.New("language change needing a server restart detected")
}
if languages2.IsMultihost() {
// We need to transfer any server baseURL to the new language
for i, ol := range oldLangs {
nl := languages2[i]
nl.Set("baseURL", ol.GetString("baseURL"))
}
}
}
// The defaultContentLanguage is something the user has to decide, but it needs
// to match a language in the language definition list.
langExists := false
for _, lang := range languages2 {
if lang.Lang == defaultLang {
langExists = true
break
}
}
if !langExists {
return fmt.Errorf("site config value %q for defaultContentLanguage does not match any language definition", defaultLang)
}
cfg.Set("languagesSorted", languages2)
cfg.Set("multilingual", len(languages2) > 1)
multihost := languages2.IsMultihost()
if multihost {
cfg.Set("defaultContentLanguageInSubdir", true)
cfg.Set("multihost", true)
}
if multihost {
// The baseURL may be provided at the language level. If that is true,
// then every language must have a baseURL. In this case we always render
// to a language sub folder, which is then stripped from all the Permalink URLs etc.
for _, l := range languages2 {
burl := l.GetLocal("baseURL")
if burl == nil {
return errors.New("baseURL must be set on all or none of the languages")
}
}
}
return nil
} }
func (l configLoader) loadThemeConfig(v1 *viper.Viper) ([]string, error) { func (l configLoader) collectModules(modConfig modules.Config, v1 *viper.Viper) (modules.Modules, []string, error) {
workingDir := l.WorkingDir
if workingDir == "" {
workingDir = v1.GetString("workingDir")
}
themesDir := paths.AbsPathify(l.WorkingDir, v1.GetString("themesDir")) themesDir := paths.AbsPathify(l.WorkingDir, v1.GetString("themesDir"))
themes := config.GetStringSlicePreserveString(v1, "theme")
themeConfigs, err := paths.CollectThemes(l.Fs, themesDir, themes) ignoreVendor := v1.GetBool("ignoreVendor")
filecacheConfigs, err := filecache.DecodeConfig(l.Fs, v1)
if err != nil { if err != nil {
return nil, err return nil, nil, err
}
v1.Set("filecacheConfigs", filecacheConfigs)
modulesClient := modules.NewClient(modules.ClientConfig{
Fs: l.Fs,
Logger: l.Logger,
WorkingDir: workingDir,
ThemesDir: themesDir,
CacheDir: filecacheConfigs.CacheDirModules(),
ModuleConfig: modConfig,
IgnoreVendor: ignoreVendor,
})
v1.Set("modulesClient", modulesClient)
moduleConfig, err := modulesClient.Collect()
if err != nil {
return nil, nil, err
} }
if len(themeConfigs) == 0 { // Avoid recreating these later.
return nil, nil v1.Set("allModules", moduleConfig.ActiveModules)
}
v1.Set("allThemes", themeConfigs) if len(moduleConfig.ActiveModules) == 0 {
return nil, nil, nil
}
var configFilenames []string var configFilenames []string
for _, tc := range themeConfigs { for _, tc := range moduleConfig.ActiveModules {
if tc.ConfigFilename != "" { if tc.ConfigFilename() != "" {
configFilenames = append(configFilenames, tc.ConfigFilename) if tc.Watch() {
configFilenames = append(configFilenames, tc.ConfigFilename())
}
if err := l.applyThemeConfig(v1, tc); err != nil { if err := l.applyThemeConfig(v1, tc); err != nil {
return nil, err return nil, nil, err
} }
} }
} }
return configFilenames, nil if moduleConfig.GoModulesFilename != "" {
// We want to watch this for changes and trigger rebuild on version
// changes etc.
configFilenames = append(configFilenames, moduleConfig.GoModulesFilename)
}
return moduleConfig.ActiveModules, configFilenames, nil
} }
func (l configLoader) applyThemeConfig(v1 *viper.Viper, theme paths.ThemeConfig) error { func (l configLoader) applyThemeConfig(v1 *viper.Viper, theme modules.Module) error {
const ( const (
paramsKey = "params" paramsKey = "params"
@ -480,22 +476,12 @@ func (l configLoader) applyThemeConfig(v1 *viper.Viper, theme paths.ThemeConfig)
menuKey = "menus" menuKey = "menus"
) )
v2 := theme.Cfg v2 := theme.Cfg()
for _, key := range []string{paramsKey, "outputformats", "mediatypes"} { for _, key := range []string{paramsKey, "outputformats", "mediatypes"} {
l.mergeStringMapKeepLeft("", key, v1, v2) l.mergeStringMapKeepLeft("", key, v1, v2)
} }
themeLower := strings.ToLower(theme.Name)
themeParamsNamespace := paramsKey + "." + themeLower
// Set namespaced params
if v2.IsSet(paramsKey) && !v1.IsSet(themeParamsNamespace) {
// Set it in the default store to make sure it gets in the same or
// behind the others.
v1.SetDefault(themeParamsNamespace, v2.Get(paramsKey))
}
// Only add params and new menu entries, we do not add language definitions. // Only add params and new menu entries, we do not add language definitions.
if v1.IsSet(languagesKey) && v2.IsSet(languagesKey) { if v1.IsSet(languagesKey) && v2.IsSet(languagesKey) {
v1Langs := v1.GetStringMap(languagesKey) v1Langs := v1.GetStringMap(languagesKey)
@ -508,12 +494,6 @@ func (l configLoader) applyThemeConfig(v1 *viper.Viper, theme paths.ThemeConfig)
if k == "" { if k == "" {
continue continue
} }
langParamsKey := languagesKey + "." + k + "." + paramsKey
langParamsThemeNamespace := langParamsKey + "." + themeLower
// Set namespaced params
if v2.IsSet(langParamsKey) && !v1.IsSet(langParamsThemeNamespace) {
v1.SetDefault(langParamsThemeNamespace, v2.Get(langParamsKey))
}
langMenuKey := languagesKey + "." + k + "." + menuKey langMenuKey := languagesKey + "." + k + "." + menuKey
if v2.IsSet(langMenuKey) { if v2.IsSet(langMenuKey) {
@ -577,18 +557,23 @@ func loadDefaultSettingsFor(v *viper.Viper) error {
v.RegisterAlias("indexes", "taxonomies") v.RegisterAlias("indexes", "taxonomies")
/*
TODO(bep) from 0.56 these are configured as module mounts.
v.SetDefault("contentDir", "content")
v.SetDefault("layoutDir", "layouts")
v.SetDefault("assetDir", "assets")
v.SetDefault("staticDir", "static")
v.SetDefault("dataDir", "data")
v.SetDefault("i18nDir", "i18n")
v.SetDefault("archetypeDir", "archetypes")
*/
v.SetDefault("cleanDestinationDir", false) v.SetDefault("cleanDestinationDir", false)
v.SetDefault("watch", false) v.SetDefault("watch", false)
v.SetDefault("metaDataFormat", "toml") v.SetDefault("metaDataFormat", "toml")
v.SetDefault("contentDir", "content")
v.SetDefault("layoutDir", "layouts")
v.SetDefault("assetDir", "assets")
v.SetDefault("staticDir", "static")
v.SetDefault("resourceDir", "resources") v.SetDefault("resourceDir", "resources")
v.SetDefault("archetypeDir", "archetypes")
v.SetDefault("publishDir", "public") v.SetDefault("publishDir", "public")
v.SetDefault("dataDir", "data")
v.SetDefault("i18nDir", "i18n")
v.SetDefault("themesDir", "themes") v.SetDefault("themesDir", "themes")
v.SetDefault("buildDrafts", false) v.SetDefault("buildDrafts", false)
v.SetDefault("buildFuture", false) v.SetDefault("buildFuture", false)
@ -635,5 +620,6 @@ func loadDefaultSettingsFor(v *viper.Viper) error {
v.SetDefault("disableFastRender", false) v.SetDefault("disableFastRender", false)
v.SetDefault("timeout", 10000) // 10 seconds v.SetDefault("timeout", 10000) // 10 seconds
v.SetDefault("enableInlineShortcodes", false) v.SetDefault("enableInlineShortcodes", false)
return nil return nil
} }

View file

@ -14,6 +14,9 @@
package hugolib package hugolib
import ( import (
"bytes"
"fmt"
"path/filepath"
"testing" "testing"
"github.com/spf13/afero" "github.com/spf13/afero"
@ -40,10 +43,7 @@ func TestLoadConfig(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
assert.Equal("side", cfg.GetString("paginatePath")) assert.Equal("side", cfg.GetString("paginatePath"))
// default
assert.Equal("layouts", cfg.GetString("layoutDir"))
// no themes
assert.False(cfg.IsSet("allThemes"))
} }
func TestLoadMultiConfig(t *testing.T) { func TestLoadMultiConfig(t *testing.T) {
@ -188,11 +188,6 @@ map[string]interface {}{
"p1": "p1 main", "p1": "p1 main",
"p2": "p2 main", "p2": "p2 main",
"p3": "p3 theme", "p3": "p3 theme",
"test-theme": map[string]interface {}{
"p1": "p1 theme",
"p2": "p2 theme",
"p3": "p3 theme",
},
"top": "top", "top": "top",
}`, got["params"]) }`, got["params"])
@ -257,10 +252,6 @@ map[string]interface {}{
"params": map[string]interface {}{ "params": map[string]interface {}{
"pl1": "p1-en-main", "pl1": "p1-en-main",
"pl2": "p2-en-theme", "pl2": "p2-en-theme",
"test-theme": map[string]interface {}{
"pl1": "p1-en-theme",
"pl2": "p2-en-theme",
},
}, },
}, },
"nb": map[string]interface {}{ "nb": map[string]interface {}{
@ -275,11 +266,6 @@ map[string]interface {}{
"params": map[string]interface {}{ "params": map[string]interface {}{
"pl1": "p1-nb-main", "pl1": "p1-nb-main",
"pl2": "p2-nb-theme", "pl2": "p2-nb-theme",
"test-theme": map[string]interface {}{
"pl1": "p1-nb-theme",
"pl2": "p2-nb-theme",
"top": "top-nb-theme",
},
}, },
}, },
} }
@ -397,3 +383,142 @@ privacyEnhanced = true
assert.True(b.H.Sites[0].Info.Config().Privacy.YouTube.PrivacyEnhanced) assert.True(b.H.Sites[0].Info.Config().Privacy.YouTube.PrivacyEnhanced)
} }
func TestLoadConfigModules(t *testing.T) {
t.Parallel()
assert := require.New(t)
// https://github.com/gohugoio/hugoThemes#themetoml
const (
// Before Hugo 0.56 each theme/component could have its own theme.toml
// with some settings, mostly used on the Hugo themes site.
// To preserve combability we read these files into the new "modules"
// section in config.toml.
o1t = `
name = "Component o1"
license = "MIT"
min_version = 0.38
`
// This is the component's config.toml, using the old theme syntax.
o1c = `
theme = ["n2"]
`
n1 = `
title = "Component n1"
[module]
description = "Component n1 description"
[module.hugoVersion]
min = "0.40.0"
max = "0.50.0"
extended = true
[[module.imports]]
path="o1"
[[module.imports]]
path="n3"
`
n2 = `
title = "Component n2"
`
n3 = `
title = "Component n3"
`
n4 = `
title = "Component n4"
`
)
b := newTestSitesBuilder(t)
writeThemeFiles := func(name, configTOML, themeTOML string) {
b.WithSourceFile(filepath.Join("themes", name, "data", "module.toml"), fmt.Sprintf("name=%q", name))
if configTOML != "" {
b.WithSourceFile(filepath.Join("themes", name, "config.toml"), configTOML)
}
if themeTOML != "" {
b.WithSourceFile(filepath.Join("themes", name, "theme.toml"), themeTOML)
}
}
writeThemeFiles("n1", n1, "")
writeThemeFiles("n2", n2, "")
writeThemeFiles("n3", n3, "")
writeThemeFiles("n4", n4, "")
writeThemeFiles("o1", o1c, o1t)
b.WithConfigFile("toml", `
[module]
[[module.imports]]
path="n1"
[[module.imports]]
path="n4"
`)
b.Build(BuildCfg{})
modulesClient := b.H.Paths.ModulesClient
var graphb bytes.Buffer
modulesClient.Graph(&graphb)
assert.Equal(`project n1
n1 o1
o1 n2
n1 n3
project n4
`, graphb.String())
}
func TestLoadConfigWithOsEnvOverrides(t *testing.T) {
assert := require.New(t)
baseConfig := `
environment = "production"
enableGitInfo = true
intSlice = [5,7,9]
floatSlice = [3.14, 5.19]
stringSlice = ["a", "b"]
[imaging]
anchor = "smart"
quality = 75
resamplefilter = "CatmullRom"
`
b := newTestSitesBuilder(t).WithConfigFile("toml", baseConfig)
b.WithEnviron(
"HUGO_ENVIRONMENT", "test",
"HUGO_NEW", "new", // key not in config.toml
"HUGO_ENABLEGITINFO", "false",
"HUGO_IMAGING_ANCHOR", "top",
"HUGO_STRINGSLICE", `["c", "d"]`,
"HUGO_INTSLICE", `[5, 8, 9]`,
"HUGO_FLOATSLICE", `[5.32]`,
)
b.Build(BuildCfg{})
cfg := b.H.Cfg
assert.Equal("test", cfg.Get("environment"))
assert.Equal(false, cfg.GetBool("enablegitinfo"))
assert.Equal("new", cfg.Get("new"))
assert.Equal("top", cfg.Get("imaging.anchor"))
assert.Equal(int64(75), cfg.Get("imaging.quality"))
assert.Equal([]interface{}{"c", "d"}, cfg.Get("stringSlice"))
assert.Equal([]interface{}{5.32}, cfg.Get("floatSlice"))
assert.Equal([]interface{}{5, 8, 9}, cfg.Get("intSlice"))
}

1
hugolib/data/hugo.toml Executable file
View file

@ -0,0 +1 @@
slogan = "Hugo Rocks!"

View file

@ -20,11 +20,7 @@ import (
"github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/deps"
"github.com/spf13/afero"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -80,8 +76,6 @@ categories:
# Doc # Doc
` `
mf := afero.NewMemMapFs()
disabledStr := "[]" disabledStr := "[]"
if len(disabled) > 0 { if len(disabled) > 0 {
@ -90,47 +84,41 @@ categories:
} }
siteConfig := fmt.Sprintf(siteConfigTemplate, disabledStr) siteConfig := fmt.Sprintf(siteConfigTemplate, disabledStr)
writeToFs(t, mf, "config.toml", siteConfig)
cfg, err := LoadConfigDefault(mf) b := newTestSitesBuilder(t).WithConfigFile("toml", siteConfig)
require.NoError(t, err)
fs := hugofs.NewFrom(mf, cfg) b.WithTemplates(
th := testHelper{cfg, fs, t} "index.html", "Home|{{ .Title }}|{{ .Content }}",
"_default/single.html", "Single|{{ .Title }}|{{ .Content }}",
"_default/list.html", "List|{{ .Title }}|{{ .Content }}",
"_default/terms.html", "Terms List|{{ .Title }}|{{ .Content }}",
"layouts/404.html", "Page Not Found",
)
writeSource(t, fs, "layouts/index.html", "Home|{{ .Title }}|{{ .Content }}") b.WithContent(
writeSource(t, fs, "layouts/_default/single.html", "Single|{{ .Title }}|{{ .Content }}") "sect/p1.md", fmt.Sprintf(pageTemplate, "P1", "- tag1"),
writeSource(t, fs, "layouts/_default/list.html", "List|{{ .Title }}|{{ .Content }}") "categories/_index.md", newTestPage("Category Terms", "2017-01-01", 10),
writeSource(t, fs, "layouts/_default/terms.html", "Terms List|{{ .Title }}|{{ .Content }}") "tags/tag1/_index.md", newTestPage("Tag1 List", "2017-01-01", 10),
writeSource(t, fs, "layouts/404.html", "Page Not Found") )
writeSource(t, fs, "content/sect/p1.md", fmt.Sprintf(pageTemplate, "P1", "- tag1")) b.Build(BuildCfg{})
h := b.H
writeNewContentFile(t, fs.Source, "Category Terms", "2017-01-01", "content/categories/_index.md", 10)
writeNewContentFile(t, fs.Source, "Tag1 List", "2017-01-01", "content/tags/tag1/_index.md", 10)
h, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
require.NoError(t, err)
require.Len(t, h.Sites, 1) require.Len(t, h.Sites, 1)
err = h.Build(BuildCfg{}) assertDisabledKinds(b, h.Sites[0], disabled...)
require.NoError(t, err)
assertDisabledKinds(th, h.Sites[0], disabled...)
} }
func assertDisabledKinds(th testHelper, s *Site, disabled ...string) { func assertDisabledKinds(b *sitesBuilder, s *Site, disabled ...string) {
assertDisabledKind(th, assertDisabledKind(b,
func(isDisabled bool) bool { func(isDisabled bool) bool {
if isDisabled { if isDisabled {
return len(s.RegularPages()) == 0 return len(s.RegularPages()) == 0
} }
return len(s.RegularPages()) > 0 return len(s.RegularPages()) > 0
}, disabled, page.KindPage, "public/sect/p1/index.html", "Single|P1") }, disabled, page.KindPage, "public/sect/p1/index.html", "Single|P1")
assertDisabledKind(th, assertDisabledKind(b,
func(isDisabled bool) bool { func(isDisabled bool) bool {
p := s.getPage(page.KindHome) p := s.getPage(page.KindHome)
if isDisabled { if isDisabled {
@ -138,7 +126,7 @@ func assertDisabledKinds(th testHelper, s *Site, disabled ...string) {
} }
return p != nil return p != nil
}, disabled, page.KindHome, "public/index.html", "Home") }, disabled, page.KindHome, "public/index.html", "Home")
assertDisabledKind(th, assertDisabledKind(b,
func(isDisabled bool) bool { func(isDisabled bool) bool {
p := s.getPage(page.KindSection, "sect") p := s.getPage(page.KindSection, "sect")
if isDisabled { if isDisabled {
@ -146,7 +134,7 @@ func assertDisabledKinds(th testHelper, s *Site, disabled ...string) {
} }
return p != nil return p != nil
}, disabled, page.KindSection, "public/sect/index.html", "Sects") }, disabled, page.KindSection, "public/sect/index.html", "Sects")
assertDisabledKind(th, assertDisabledKind(b,
func(isDisabled bool) bool { func(isDisabled bool) bool {
p := s.getPage(page.KindTaxonomy, "tags", "tag1") p := s.getPage(page.KindTaxonomy, "tags", "tag1")
@ -156,7 +144,7 @@ func assertDisabledKinds(th testHelper, s *Site, disabled ...string) {
return p != nil return p != nil
}, disabled, page.KindTaxonomy, "public/tags/tag1/index.html", "Tag1") }, disabled, page.KindTaxonomy, "public/tags/tag1/index.html", "Tag1")
assertDisabledKind(th, assertDisabledKind(b,
func(isDisabled bool) bool { func(isDisabled bool) bool {
p := s.getPage(page.KindTaxonomyTerm, "tags") p := s.getPage(page.KindTaxonomyTerm, "tags")
if isDisabled { if isDisabled {
@ -165,7 +153,7 @@ func assertDisabledKinds(th testHelper, s *Site, disabled ...string) {
return p != nil return p != nil
}, disabled, page.KindTaxonomyTerm, "public/tags/index.html", "Tags") }, disabled, page.KindTaxonomyTerm, "public/tags/index.html", "Tags")
assertDisabledKind(th, assertDisabledKind(b,
func(isDisabled bool) bool { func(isDisabled bool) bool {
p := s.getPage(page.KindTaxonomyTerm, "categories") p := s.getPage(page.KindTaxonomyTerm, "categories")
@ -175,7 +163,7 @@ func assertDisabledKinds(th testHelper, s *Site, disabled ...string) {
return p != nil return p != nil
}, disabled, page.KindTaxonomyTerm, "public/categories/index.html", "Category Terms") }, disabled, page.KindTaxonomyTerm, "public/categories/index.html", "Category Terms")
assertDisabledKind(th, assertDisabledKind(b,
func(isDisabled bool) bool { func(isDisabled bool) bool {
p := s.getPage(page.KindTaxonomy, "categories", "hugo") p := s.getPage(page.KindTaxonomy, "categories", "hugo")
if isDisabled { if isDisabled {
@ -185,15 +173,15 @@ func assertDisabledKinds(th testHelper, s *Site, disabled ...string) {
}, disabled, page.KindTaxonomy, "public/categories/hugo/index.html", "Hugo") }, disabled, page.KindTaxonomy, "public/categories/hugo/index.html", "Hugo")
// The below have no page in any collection. // The below have no page in any collection.
assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindRSS, "public/index.xml", "<link>") assertDisabledKind(b, func(isDisabled bool) bool { return true }, disabled, kindRSS, "public/index.xml", "<link>")
assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindSitemap, "public/sitemap.xml", "sitemap") assertDisabledKind(b, func(isDisabled bool) bool { return true }, disabled, kindSitemap, "public/sitemap.xml", "sitemap")
assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kindRobotsTXT, "public/robots.txt", "User-agent") assertDisabledKind(b, func(isDisabled bool) bool { return true }, disabled, kindRobotsTXT, "public/robots.txt", "User-agent")
assertDisabledKind(th, func(isDisabled bool) bool { return true }, disabled, kind404, "public/404.html", "Page Not Found") assertDisabledKind(b, func(isDisabled bool) bool { return true }, disabled, kind404, "public/404.html", "Page Not Found")
} }
func assertDisabledKind(th testHelper, kindAssert func(bool) bool, disabled []string, kind, path, matcher string) { func assertDisabledKind(b *sitesBuilder, kindAssert func(bool) bool, disabled []string, kind, path, matcher string) {
isDisabled := stringSliceContains(kind, disabled...) isDisabled := stringSliceContains(kind, disabled...)
require.True(th.T, kindAssert(isDisabled), fmt.Sprintf("%s: %t", kind, isDisabled)) require.True(b.T, kindAssert(isDisabled), fmt.Sprintf("%s: %t", kind, isDisabled))
if kind == kindRSS && !isDisabled { if kind == kindRSS && !isDisabled {
// If the home page is also disabled, there is not RSS to look for. // If the home page is also disabled, there is not RSS to look for.
@ -204,20 +192,11 @@ func assertDisabledKind(th testHelper, kindAssert func(bool) bool, disabled []st
if isDisabled { if isDisabled {
// Path should not exist // Path should not exist
fileExists, err := helpers.Exists(path, th.Fs.Destination) fileExists, err := helpers.Exists(path, b.Fs.Destination)
require.False(th.T, fileExists) require.False(b.T, fileExists)
require.NoError(th.T, err) require.NoError(b.T, err)
} else { } else {
th.assertFileContent(path, matcher) b.AssertFileContent(path, matcher)
} }
} }
func stringSliceContains(k string, values ...string) bool {
for _, v := range values {
if k == v {
return true
}
}
return false
}

View file

@ -16,82 +16,64 @@ package hugolib
import ( import (
"strings" "strings"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/hugofs/files"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/hugofs"
"github.com/spf13/afero"
"github.com/gohugoio/hugo/source" "github.com/gohugoio/hugo/source"
) )
// fileInfo implements the File and ReadableFile interface. // fileInfo implements the File and ReadableFile interface.
var ( var (
_ source.File = (*fileInfo)(nil) _ source.File = (*fileInfo)(nil)
_ source.ReadableFile = (*fileInfo)(nil)
_ pathLangFile = (*fileInfo)(nil)
) )
// A partial interface to prevent ambigous compiler error.
type basePather interface {
Filename() string
RealName() string
BaseDir() string
}
type fileInfo struct { type fileInfo struct {
bundleTp bundleDirType source.File
source.ReadableFile
basePather
overriddenLang string overriddenLang string
}
// Set if the content language for this file is disabled. func (fi *fileInfo) Open() (afero.File, error) {
disabled bool f, err := fi.FileInfo().Meta().Open()
if err != nil {
err = errors.Wrap(err, "fileInfo")
}
return f, err
} }
func (fi *fileInfo) Lang() string { func (fi *fileInfo) Lang() string {
if fi.overriddenLang != "" { if fi.overriddenLang != "" {
return fi.overriddenLang return fi.overriddenLang
} }
return fi.ReadableFile.Lang() return fi.File.Lang()
}
func (fi *fileInfo) Filename() string {
if fi == nil || fi.basePather == nil {
return ""
}
return fi.basePather.Filename()
} }
func (fi *fileInfo) String() string { func (fi *fileInfo) String() string {
if fi == nil || fi.ReadableFile == nil { if fi == nil || fi.File == nil {
return "" return ""
} }
return fi.Path() return fi.Path()
} }
func (fi *fileInfo) isOwner() bool { // TODO(bep) rename
return fi.bundleTp > bundleNot func newFileInfo(sp *source.SourceSpec, fi hugofs.FileMetaInfo) (*fileInfo, error) {
}
func IsContentFile(filename string) bool { baseFi, err := sp.NewFileInfo(fi)
return contentFileExtensionsSet[strings.TrimPrefix(helpers.Ext(filename), ".")] if err != nil {
} return nil, err
func (fi *fileInfo) isContentFile() bool {
return contentFileExtensionsSet[fi.Ext()]
}
func newFileInfo(sp *source.SourceSpec, baseDir, filename string, fi pathLangFileFi, tp bundleDirType) *fileInfo {
baseFi := sp.NewFileInfo(baseDir, filename, tp == bundleLeaf, fi)
f := &fileInfo{
bundleTp: tp,
ReadableFile: baseFi,
basePather: fi,
} }
lang := f.Lang() f := &fileInfo{
f.disabled = lang != "" && sp.DisabledLanguages[lang] File: baseFi,
}
return f return f, nil
} }
@ -108,7 +90,7 @@ const (
// Returns the given file's name's bundle type and whether it is a content // Returns the given file's name's bundle type and whether it is a content
// file or not. // file or not.
func classifyBundledFile(name string) (bundleDirType, bool) { func classifyBundledFile(name string) (bundleDirType, bool) {
if !IsContentFile(name) { if !files.IsContentFile(name) {
return bundleNot, false return bundleNot, false
} }
if strings.HasPrefix(name, "_index.") { if strings.HasPrefix(name, "_index.") {

View file

@ -22,6 +22,7 @@ import (
func TestFileInfo(t *testing.T) { func TestFileInfo(t *testing.T) {
t.Run("String", func(t *testing.T) { t.Run("String", func(t *testing.T) {
t.Parallel()
assert := require.New(t) assert := require.New(t)
fi := &fileInfo{} fi := &fileInfo{}
_, err := cast.ToStringE(fi) _, err := cast.ToStringE(fi)

View file

@ -16,27 +16,27 @@
package filesystems package filesystems
import ( import (
"errors" "io"
"os" "os"
"path"
"path/filepath" "path/filepath"
"strings" "strings"
"sync"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/hugofs/files"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/modules"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
"fmt" "fmt"
"github.com/gohugoio/hugo/hugolib/paths" "github.com/gohugoio/hugo/hugolib/paths"
"github.com/gohugoio/hugo/langs"
"github.com/spf13/afero" "github.com/spf13/afero"
) )
// When we create a virtual filesystem with data and i18n bundles for the project and the themes,
// this is the name of the project's virtual root. It got it's funky name to make sure
// (or very unlikely) that it collides with a theme name.
const projectVirtualFolder = "__h__project"
var filePathSeparator = string(filepath.Separator) var filePathSeparator = string(filepath.Separator)
// BaseFs contains the core base filesystems used by Hugo. The name "base" is used // BaseFs contains the core base filesystems used by Hugo. The name "base" is used
@ -51,16 +51,43 @@ type BaseFs struct {
// This usually maps to /my-project/public. // This usually maps to /my-project/public.
PublishFs afero.Fs PublishFs afero.Fs
themeFs afero.Fs theBigFs *filesystemsCollector
}
// TODO(bep) improve the "theme interaction" func (fs *BaseFs) WatchDirs() []hugofs.FileMetaInfo {
AbsThemeDirs []string var dirs []hugofs.FileMetaInfo
for _, dir := range fs.AllDirs() {
if dir.Meta().Watch() {
dirs = append(dirs, dir)
}
}
return dirs
}
func (fs *BaseFs) AllDirs() []hugofs.FileMetaInfo {
var dirs []hugofs.FileMetaInfo
for _, dirSet := range [][]hugofs.FileMetaInfo{
fs.Archetypes.Dirs,
fs.I18n.Dirs,
fs.Data.Dirs,
fs.Content.Dirs,
fs.Assets.Dirs,
fs.Layouts.Dirs,
//fs.Resources.Dirs,
fs.StaticDirs,
} {
dirs = append(dirs, dirSet...)
}
return dirs
} }
// RelContentDir tries to create a path relative to the content root from // RelContentDir tries to create a path relative to the content root from
// the given filename. The return value is the path and language code. // the given filename. The return value is the path and language code.
func (b *BaseFs) RelContentDir(filename string) string { func (b *BaseFs) RelContentDir(filename string) string {
for _, dirname := range b.SourceFilesystems.Content.Dirnames { for _, dir := range b.SourceFilesystems.Content.Dirs {
dirname := dir.Meta().Filename()
if strings.HasPrefix(filename, dirname) { if strings.HasPrefix(filename, dirname) {
rel := strings.TrimPrefix(filename, dirname) rel := strings.TrimPrefix(filename, dirname)
return strings.TrimPrefix(rel, filePathSeparator) return strings.TrimPrefix(rel, filePathSeparator)
@ -80,16 +107,22 @@ type SourceFilesystems struct {
Layouts *SourceFilesystem Layouts *SourceFilesystem
Archetypes *SourceFilesystem Archetypes *SourceFilesystem
Assets *SourceFilesystem Assets *SourceFilesystem
Resources *SourceFilesystem
// This is a unified read-only view of the project's and themes' workdir. // Writable filesystem on top the project's resources directory,
Work *SourceFilesystem // with any sub module's resource fs layered below.
ResourcesCache afero.Fs
// The project folder.
Work afero.Fs
// When in multihost we have one static filesystem per language. The sync // When in multihost we have one static filesystem per language. The sync
// static files is currently done outside of the Hugo build (where there is // static files is currently done outside of the Hugo build (where there is
// a concept of a site per language). // a concept of a site per language).
// When in non-multihost mode there will be one entry in this map with a blank key. // When in non-multihost mode there will be one entry in this map with a blank key.
Static map[string]*SourceFilesystem Static map[string]*SourceFilesystem
// All the /static dirs (including themes/modules).
StaticDirs []hugofs.FileMetaInfo
} }
// A SourceFilesystem holds the filesystem for a given source type in Hugo (data, // A SourceFilesystem holds the filesystem for a given source type in Hugo (data,
@ -99,12 +132,9 @@ type SourceFilesystem struct {
// This is a virtual composite filesystem. It expects path relative to a context. // This is a virtual composite filesystem. It expects path relative to a context.
Fs afero.Fs Fs afero.Fs
// This is the base source filesystem. In real Hugo, this will be the OS filesystem. // This filesystem as separate root directories, starting from project and down
// Use this if you need to resolve items in Dirnames below. // to the themes/modules.
SourceFs afero.Fs Dirs []hugofs.FileMetaInfo
// Dirnames is absolute filenames to the directories in this filesystem.
Dirnames []string
// When syncing a source folder to the target (e.g. /public), this may // When syncing a source folder to the target (e.g. /public), this may
// be set to publish into a subfolder. This is used for static syncing // be set to publish into a subfolder. This is used for static syncing
@ -207,7 +237,8 @@ func (s SourceFilesystems) MakeStaticPathRelative(filename string) string {
// MakePathRelative creates a relative path from the given filename. // MakePathRelative creates a relative path from the given filename.
// It will return an empty string if the filename is not a member of this filesystem. // It will return an empty string if the filename is not a member of this filesystem.
func (d *SourceFilesystem) MakePathRelative(filename string) string { func (d *SourceFilesystem) MakePathRelative(filename string) string {
for _, currentPath := range d.Dirnames { for _, dir := range d.Dirs {
currentPath := dir.(hugofs.FileMetaInfo).Meta().Filename()
if strings.HasPrefix(filename, currentPath) { if strings.HasPrefix(filename, currentPath) {
return strings.TrimPrefix(filename, currentPath) return strings.TrimPrefix(filename, currentPath)
} }
@ -220,8 +251,8 @@ func (d *SourceFilesystem) RealFilename(rel string) string {
if err != nil { if err != nil {
return rel return rel
} }
if realfi, ok := fi.(hugofs.RealFilenameInfo); ok { if realfi, ok := fi.(hugofs.FileMetaInfo); ok {
return realfi.RealFilename() return realfi.Meta().Filename()
} }
return rel return rel
@ -229,8 +260,8 @@ func (d *SourceFilesystem) RealFilename(rel string) string {
// Contains returns whether the given filename is a member of the current filesystem. // Contains returns whether the given filename is a member of the current filesystem.
func (d *SourceFilesystem) Contains(filename string) bool { func (d *SourceFilesystem) Contains(filename string) bool {
for _, dir := range d.Dirnames { for _, dir := range d.Dirs {
if strings.HasPrefix(filename, dir) { if strings.HasPrefix(filename, dir.Meta().Filename()) {
return true return true
} }
} }
@ -241,9 +272,12 @@ func (d *SourceFilesystem) Contains(filename string) bool {
// path. // path.
func (d *SourceFilesystem) RealDirs(from string) []string { func (d *SourceFilesystem) RealDirs(from string) []string {
var dirnames []string var dirnames []string
for _, dir := range d.Dirnames { for _, dir := range d.Dirs {
dirname := filepath.Join(dir, from) meta := dir.Meta()
if _, err := d.SourceFs.Stat(dirname); err == nil { dirname := filepath.Join(meta.Filename(), from)
_, err := meta.Fs().Stat(from)
if err == nil {
dirnames = append(dirnames, dirname) dirnames = append(dirnames, dirname)
} }
} }
@ -254,40 +288,18 @@ func (d *SourceFilesystem) RealDirs(from string) []string {
// the same across sites/languages. // the same across sites/languages.
func WithBaseFs(b *BaseFs) func(*BaseFs) error { func WithBaseFs(b *BaseFs) func(*BaseFs) error {
return func(bb *BaseFs) error { return func(bb *BaseFs) error {
bb.themeFs = b.themeFs bb.theBigFs = b.theBigFs
bb.AbsThemeDirs = b.AbsThemeDirs bb.SourceFilesystems = b.SourceFilesystems
return nil return nil
} }
} }
func newRealBase(base afero.Fs) afero.Fs {
return hugofs.NewBasePathRealFilenameFs(base.(*afero.BasePathFs))
}
// NewBase builds the filesystems used by Hugo given the paths and options provided.NewBase // NewBase builds the filesystems used by Hugo given the paths and options provided.NewBase
func NewBase(p *paths.Paths, options ...func(*BaseFs) error) (*BaseFs, error) { func NewBase(p *paths.Paths, options ...func(*BaseFs) error) (*BaseFs, error) {
fs := p.Fs fs := p.Fs
publishFs := afero.NewBasePathFs(fs.Destination, p.AbsPublishDir) publishFs := afero.NewBasePathFs(fs.Destination, p.AbsPublishDir)
contentFs, absContentDirs, err := createContentFs(fs.Source, p.WorkingDir, p.DefaultContentLanguage, p.Languages)
if err != nil {
return nil, err
}
// Make sure we don't have any overlapping content dirs. That will never work.
for i, d1 := range absContentDirs {
for j, d2 := range absContentDirs {
if i == j {
continue
}
if strings.HasPrefix(d1, d2) || strings.HasPrefix(d2, d1) {
return nil, fmt.Errorf("found overlapping content dirs (%q and %q)", d1, d2)
}
}
}
b := &BaseFs{ b := &BaseFs{
PublishFs: publishFs, PublishFs: publishFs,
} }
@ -298,463 +310,395 @@ func NewBase(p *paths.Paths, options ...func(*BaseFs) error) (*BaseFs, error) {
} }
} }
if b.theBigFs != nil && b.SourceFilesystems != nil {
return b, nil
}
builder := newSourceFilesystemsBuilder(p, b) builder := newSourceFilesystemsBuilder(p, b)
sourceFilesystems, err := builder.Build() sourceFilesystems, err := builder.Build()
if err != nil { if err != nil {
return nil, err return nil, errors.Wrap(err, "build filesystems")
}
sourceFilesystems.Content = &SourceFilesystem{
SourceFs: fs.Source,
Fs: contentFs,
Dirnames: absContentDirs,
} }
b.SourceFilesystems = sourceFilesystems b.SourceFilesystems = sourceFilesystems
b.themeFs = builder.themeFs b.theBigFs = builder.theBigFs
b.AbsThemeDirs = builder.absThemeDirs
return b, nil return b, nil
} }
type sourceFilesystemsBuilder struct { type sourceFilesystemsBuilder struct {
p *paths.Paths p *paths.Paths
result *SourceFilesystems sourceFs afero.Fs
themeFs afero.Fs result *SourceFilesystems
hasTheme bool theBigFs *filesystemsCollector
absThemeDirs []string
} }
func newSourceFilesystemsBuilder(p *paths.Paths, b *BaseFs) *sourceFilesystemsBuilder { func newSourceFilesystemsBuilder(p *paths.Paths, b *BaseFs) *sourceFilesystemsBuilder {
return &sourceFilesystemsBuilder{p: p, themeFs: b.themeFs, absThemeDirs: b.AbsThemeDirs, result: &SourceFilesystems{}} sourceFs := hugofs.NewBaseFileDecorator(p.Fs.Source)
return &sourceFilesystemsBuilder{p: p, sourceFs: sourceFs, theBigFs: b.theBigFs, result: &SourceFilesystems{}}
} }
func (b *sourceFilesystemsBuilder) newSourceFilesystem(fs afero.Fs, dirs []hugofs.FileMetaInfo) *SourceFilesystem {
return &SourceFilesystem{
Fs: fs,
Dirs: dirs,
}
}
func (b *sourceFilesystemsBuilder) Build() (*SourceFilesystems, error) { func (b *sourceFilesystemsBuilder) Build() (*SourceFilesystems, error) {
if b.themeFs == nil && b.p.ThemeSet() {
themeFs, absThemeDirs, err := createThemesOverlayFs(b.p) if b.theBigFs == nil {
theBigFs, err := b.createMainOverlayFs(b.p)
if err != nil { if err != nil {
return nil, err return nil, errors.Wrap(err, "create main fs")
} }
if themeFs == nil {
panic("createThemesFs returned nil") b.theBigFs = theBigFs
}
createView := func(componentID string) *SourceFilesystem {
if b.theBigFs == nil || b.theBigFs.overlayMounts == nil {
return b.newSourceFilesystem(hugofs.NoOpFs, nil)
} }
b.themeFs = themeFs
b.absThemeDirs = absThemeDirs dirs := b.theBigFs.overlayDirs[componentID]
return b.newSourceFilesystem(afero.NewBasePathFs(b.theBigFs.overlayMounts, componentID), dirs)
} }
b.hasTheme = len(b.absThemeDirs) > 0 b.theBigFs.finalizeDirs()
sfs, err := b.createRootMappingFs("dataDir", "data") b.result.Archetypes = createView(files.ComponentFolderArchetypes)
if err != nil { b.result.Layouts = createView(files.ComponentFolderLayouts)
return nil, err b.result.Assets = createView(files.ComponentFolderAssets)
} b.result.ResourcesCache = b.theBigFs.overlayResources
b.result.Data = sfs
sfs, err = b.createRootMappingFs("i18nDir", "i18n") // Data, i18n and content cannot use the overlay fs
if err != nil { dataDirs := b.theBigFs.overlayDirs[files.ComponentFolderData]
return nil, err dataFs, err := hugofs.NewSliceFs(dataDirs...)
}
b.result.I18n = sfs
sfs, err = b.createFs(false, true, "layoutDir", "layouts")
if err != nil {
return nil, err
}
b.result.Layouts = sfs
sfs, err = b.createFs(false, true, "archetypeDir", "archetypes")
if err != nil {
return nil, err
}
b.result.Archetypes = sfs
sfs, err = b.createFs(false, true, "assetDir", "assets")
if err != nil {
return nil, err
}
b.result.Assets = sfs
sfs, err = b.createFs(true, false, "resourceDir", "resources")
if err != nil { if err != nil {
return nil, err return nil, err
} }
b.result.Resources = sfs b.result.Data = b.newSourceFilesystem(dataFs, dataDirs)
sfs, err = b.createFs(false, true, "", "") i18nDirs := b.theBigFs.overlayDirs[files.ComponentFolderI18n]
i18nFs, err := hugofs.NewSliceFs(i18nDirs...)
if err != nil { if err != nil {
return nil, err return nil, err
} }
b.result.Work = sfs b.result.I18n = b.newSourceFilesystem(i18nFs, i18nDirs)
err = b.createStaticFs() contentDirs := b.theBigFs.overlayDirs[files.ComponentFolderContent]
contentBfs := afero.NewBasePathFs(b.theBigFs.overlayMountsContent, files.ComponentFolderContent)
contentFs, err := hugofs.NewLanguageFs(b.p.LanguagesDefaultFirst.AsOrdinalSet(), contentBfs)
if err != nil { if err != nil {
return nil, err return nil, errors.Wrap(err, "create content filesystem")
}
b.result.Content = b.newSourceFilesystem(contentFs, contentDirs)
b.result.Work = afero.NewReadOnlyFs(b.theBigFs.overlayFull)
// Create static filesystem(s)
ms := make(map[string]*SourceFilesystem)
b.result.Static = ms
b.result.StaticDirs = b.theBigFs.overlayDirs[files.ComponentFolderStatic]
if b.theBigFs.staticPerLanguage != nil {
// Multihost mode
for k, v := range b.theBigFs.staticPerLanguage {
sfs := b.newSourceFilesystem(v, b.result.StaticDirs)
sfs.PublishFolder = k
ms[k] = sfs
}
} else {
bfs := afero.NewBasePathFs(b.theBigFs.overlayMounts, files.ComponentFolderStatic)
ms[""] = b.newSourceFilesystem(bfs, b.result.StaticDirs)
} }
return b.result, nil return b.result, nil
} }
func (b *sourceFilesystemsBuilder) createFs( func (b *sourceFilesystemsBuilder) createMainOverlayFs(p *paths.Paths) (*filesystemsCollector, error) {
mkdir bool,
readOnly bool, var staticFsMap map[string]afero.Fs
dirKey, themeFolder string) (*SourceFilesystem, error) { if b.p.Cfg.GetBool("multihost") {
s := &SourceFilesystem{ staticFsMap = make(map[string]afero.Fs)
SourceFs: b.p.Fs.Source,
} }
if themeFolder == "" { collector := &filesystemsCollector{
themeFolder = filePathSeparator sourceProject: b.sourceFs,
sourceModules: hugofs.NewNoSymlinkFs(b.sourceFs),
overlayDirs: make(map[string][]hugofs.FileMetaInfo),
staticPerLanguage: staticFsMap,
} }
var dir string mods := p.AllModules
if dirKey != "" {
dir = b.p.Cfg.GetString(dirKey) if len(mods) == 0 {
if dir == "" { return collector, nil
return s, fmt.Errorf("config %q not set", dirKey) }
modsReversed := make([]mountsDescriptor, len(mods))
// The theme components are ordered from left to right.
// We need to revert it to get the
// overlay logic below working as expected, with the project on top (last).
for i, mod := range mods {
dir := mod.Dir()
if i < len(mods)-1 {
i = len(mods) - 2 - i
}
isMainProject := mod.Owner() == nil
modsReversed[i] = mountsDescriptor{
mounts: mod.Mounts(),
dir: dir,
watch: mod.Watch(),
isMainProject: isMainProject,
} }
} }
var fs afero.Fs err := b.createOverlayFs(collector, modsReversed)
absDir := b.p.AbsPathify(dir) return collector, err
existsInSource := b.existsInSource(absDir)
if !existsInSource && mkdir {
// We really need this directory. Make it.
if err := b.p.Fs.Source.MkdirAll(absDir, 0777); err == nil {
existsInSource = true
}
}
if existsInSource {
fs = newRealBase(afero.NewBasePathFs(b.p.Fs.Source, absDir))
s.Dirnames = []string{absDir}
}
if b.hasTheme {
if !strings.HasPrefix(themeFolder, filePathSeparator) {
themeFolder = filePathSeparator + themeFolder
}
themeFolderFs := newRealBase(afero.NewBasePathFs(b.themeFs, themeFolder))
if fs == nil {
fs = themeFolderFs
} else {
fs = afero.NewCopyOnWriteFs(themeFolderFs, fs)
}
for _, absThemeDir := range b.absThemeDirs {
absThemeFolderDir := filepath.Join(absThemeDir, themeFolder)
if b.existsInSource(absThemeFolderDir) {
s.Dirnames = append(s.Dirnames, absThemeFolderDir)
}
}
}
if fs == nil {
s.Fs = hugofs.NoOpFs
} else if readOnly {
s.Fs = afero.NewReadOnlyFs(fs)
} else {
s.Fs = fs
}
return s, nil
} }
// Used for data, i18n -- we cannot use overlay filsesystems for those, but we need func (b *sourceFilesystemsBuilder) isContentMount(mnt modules.Mount) bool {
// to keep a strict order. return strings.HasPrefix(mnt.Target, files.ComponentFolderContent)
func (b *sourceFilesystemsBuilder) createRootMappingFs(dirKey, themeFolder string) (*SourceFilesystem, error) {
s := &SourceFilesystem{
SourceFs: b.p.Fs.Source,
}
projectDir := b.p.Cfg.GetString(dirKey)
if projectDir == "" {
return nil, fmt.Errorf("config %q not set", dirKey)
}
var fromTo []string
to := b.p.AbsPathify(projectDir)
if b.existsInSource(to) {
s.Dirnames = []string{to}
fromTo = []string{projectVirtualFolder, to}
}
for _, theme := range b.p.AllThemes {
to := b.p.AbsPathify(filepath.Join(b.p.ThemesDir, theme.Name, themeFolder))
if b.existsInSource(to) {
s.Dirnames = append(s.Dirnames, to)
from := theme
fromTo = append(fromTo, from.Name, to)
}
}
if len(fromTo) == 0 {
s.Fs = hugofs.NoOpFs
return s, nil
}
fs, err := hugofs.NewRootMappingFs(b.p.Fs.Source, fromTo...)
if err != nil {
return nil, err
}
s.Fs = afero.NewReadOnlyFs(fs)
return s, nil
} }
func (b *sourceFilesystemsBuilder) existsInSource(abspath string) bool { func (b *sourceFilesystemsBuilder) createModFs(
exists, _ := afero.Exists(b.p.Fs.Source, abspath) collector *filesystemsCollector,
return exists md mountsDescriptor) error {
}
func (b *sourceFilesystemsBuilder) createStaticFs() error { var (
isMultihost := b.p.Cfg.GetBool("multihost") fromTo []hugofs.RootMapping
ms := make(map[string]*SourceFilesystem) fromToContent []hugofs.RootMapping
b.result.Static = ms )
if isMultihost { absPathify := func(path string) string {
for _, l := range b.p.Languages { return paths.AbsPathify(md.dir, path)
s := &SourceFilesystem{
SourceFs: b.p.Fs.Source,
PublishFolder: l.Lang}
staticDirs := removeDuplicatesKeepRight(getStaticDirs(l))
if len(staticDirs) == 0 {
continue
}
for _, dir := range staticDirs {
absDir := b.p.AbsPathify(dir)
if !b.existsInSource(absDir) {
continue
}
s.Dirnames = append(s.Dirnames, absDir)
}
fs, err := createOverlayFs(b.p.Fs.Source, s.Dirnames)
if err != nil {
return err
}
if b.hasTheme {
themeFolder := "static"
fs = afero.NewCopyOnWriteFs(newRealBase(afero.NewBasePathFs(b.themeFs, themeFolder)), fs)
for _, absThemeDir := range b.absThemeDirs {
s.Dirnames = append(s.Dirnames, filepath.Join(absThemeDir, themeFolder))
}
}
s.Fs = fs
ms[l.Lang] = s
}
return nil
} }
s := &SourceFilesystem{ seen := make(map[string]bool)
SourceFs: b.p.Fs.Source,
}
var staticDirs []string var mounts []modules.Mount
for _, l := range b.p.Languages { OUTER:
staticDirs = append(staticDirs, getStaticDirs(l)...) for i, mount := range md.mounts {
} key := path.Join(mount.Lang, mount.Source, mount.Target)
if seen[key] {
staticDirs = removeDuplicatesKeepRight(staticDirs)
if len(staticDirs) == 0 {
return nil
}
for _, dir := range staticDirs {
absDir := b.p.AbsPathify(dir)
if !b.existsInSource(absDir) {
continue continue
} }
s.Dirnames = append(s.Dirnames, absDir) seen[key] = true
// Prevent overlapping mounts
for j, mount2 := range md.mounts {
if j == i || mount2.Target != mount.Target {
continue
}
source := mount.Source
if !strings.HasSuffix(source, filePathSeparator) {
source += filePathSeparator
}
if strings.HasPrefix(mount2.Source, source) {
continue OUTER
}
}
mounts = append(mounts, mount)
} }
fs, err := createOverlayFs(b.p.Fs.Source, s.Dirnames) for _, mount := range mounts {
mountWeight := 1
if md.isMainProject {
mountWeight++
}
rm := hugofs.RootMapping{
From: mount.Target,
To: absPathify(mount.Source),
Meta: hugofs.FileMeta{
"watch": md.watch,
"mountWeight": mountWeight,
},
}
isContentMount := b.isContentMount(mount)
lang := mount.Lang
if lang == "" && isContentMount {
lang = b.p.DefaultContentLanguage
}
rm.Meta["lang"] = lang
if isContentMount {
fromToContent = append(fromToContent, rm)
} else {
fromTo = append(fromTo, rm)
}
}
modBase := collector.sourceProject
if !md.isMainProject {
modBase = collector.sourceModules
}
rmfs, err := hugofs.NewRootMappingFs(modBase, fromTo...)
if err != nil {
return err
}
rmfsContent, err := hugofs.NewRootMappingFs(modBase, fromToContent...)
if err != nil { if err != nil {
return err return err
} }
if b.hasTheme { // We need to keep the ordered list of directories for watching and
themeFolder := "static" // some special merge operations (data, i18n).
fs = afero.NewCopyOnWriteFs(newRealBase(afero.NewBasePathFs(b.themeFs, themeFolder)), fs) collector.addDirs(rmfs)
for _, absThemeDir := range b.absThemeDirs { collector.addDirs(rmfsContent)
s.Dirnames = append(s.Dirnames, filepath.Join(absThemeDir, themeFolder))
if collector.staticPerLanguage != nil {
for _, l := range b.p.Languages {
lang := l.Lang
lfs := rmfs.Filter(func(rm hugofs.RootMapping) bool {
rlang := rm.Meta.Lang()
return rlang == "" || rlang == lang
})
bfs := afero.NewBasePathFs(lfs, files.ComponentFolderStatic)
sfs, found := collector.staticPerLanguage[lang]
if found {
collector.staticPerLanguage[lang] = afero.NewCopyOnWriteFs(sfs, bfs)
} else {
collector.staticPerLanguage[lang] = bfs
}
} }
} }
s.Fs = fs getResourcesDir := func() string {
ms[""] = s if md.isMainProject {
return b.p.AbsResourcesDir
}
return absPathify(files.FolderResources)
}
if collector.overlayMounts == nil {
collector.overlayMounts = rmfs
collector.overlayMountsContent = rmfsContent
collector.overlayFull = afero.NewBasePathFs(modBase, md.dir)
collector.overlayResources = afero.NewBasePathFs(modBase, getResourcesDir())
} else {
collector.overlayMounts = afero.NewCopyOnWriteFs(collector.overlayMounts, rmfs)
collector.overlayMountsContent = hugofs.NewLanguageCompositeFs(collector.overlayMountsContent, rmfsContent)
collector.overlayFull = afero.NewCopyOnWriteFs(collector.overlayFull, afero.NewBasePathFs(modBase, md.dir))
collector.overlayResources = afero.NewCopyOnWriteFs(collector.overlayResources, afero.NewBasePathFs(modBase, getResourcesDir()))
}
return nil return nil
} }
func getStaticDirs(cfg config.Provider) []string { func printFs(fs afero.Fs, path string, w io.Writer) {
var staticDirs []string if fs == nil {
for i := -1; i <= 10; i++ { return
staticDirs = append(staticDirs, getStringOrStringSlice(cfg, "staticDir", i)...)
} }
return staticDirs afero.Walk(fs, path, func(path string, info os.FileInfo, err error) error {
} if err != nil {
return err
func getStringOrStringSlice(cfg config.Provider, key string, id int) []string {
if id >= 0 {
key = fmt.Sprintf("%s%d", key, id)
}
return config.GetStringSlicePreserveString(cfg, key)
}
func createContentFs(fs afero.Fs,
workingDir,
defaultContentLanguage string,
languages langs.Languages) (afero.Fs, []string, error) {
var contentLanguages langs.Languages
var contentDirSeen = make(map[string]bool)
languageSet := make(map[string]bool)
// The default content language needs to be first.
for _, language := range languages {
if language.Lang == defaultContentLanguage {
contentLanguages = append(contentLanguages, language)
contentDirSeen[language.ContentDir] = true
} }
languageSet[language.Lang] = true if info.IsDir() {
} return nil
for _, language := range languages {
if contentDirSeen[language.ContentDir] {
continue
} }
if language.ContentDir == "" { var filename string
language.ContentDir = defaultContentLanguage if fim, ok := info.(hugofs.FileMetaInfo); ok {
filename = fim.Meta().Filename()
} }
contentDirSeen[language.ContentDir] = true fmt.Fprintf(w, " %q %q\n", path, filename)
contentLanguages = append(contentLanguages, language) return nil
})
}
type filesystemsCollector struct {
sourceProject afero.Fs // Source for project folders
sourceModules afero.Fs // Source for modules/themes
overlayMounts afero.Fs
overlayMountsContent afero.Fs
overlayFull afero.Fs
overlayResources afero.Fs
// Maps component type (layouts, static, content etc.) an ordered list of
// directories representing the overlay filesystems above.
overlayDirs map[string][]hugofs.FileMetaInfo
// Set if in multihost mode
staticPerLanguage map[string]afero.Fs
finalizerInit sync.Once
}
func (c *filesystemsCollector) addDirs(rfs *hugofs.RootMappingFs) {
for _, componentFolder := range files.ComponentFolders {
dirs, err := rfs.Dirs(componentFolder)
if err == nil {
c.overlayDirs[componentFolder] = append(c.overlayDirs[componentFolder], dirs...)
}
} }
}
var absContentDirs []string func (c *filesystemsCollector) finalizeDirs() {
c.finalizerInit.Do(func() {
fs, err := createContentOverlayFs(fs, workingDir, contentLanguages, languageSet, &absContentDirs) // Order the directories from top to bottom (project, theme a, theme ...).
return fs, absContentDirs, err for _, dirs := range c.overlayDirs {
c.reverseFis(dirs)
}
})
} }
func createContentOverlayFs(source afero.Fs, func (c *filesystemsCollector) reverseFis(fis []hugofs.FileMetaInfo) {
workingDir string, for i := len(fis)/2 - 1; i >= 0; i-- {
languages langs.Languages, opp := len(fis) - 1 - i
languageSet map[string]bool, fis[i], fis[opp] = fis[opp], fis[i]
absContentDirs *[]string) (afero.Fs, error) { }
if len(languages) == 0 { }
return source, nil
type mountsDescriptor struct {
mounts []modules.Mount
dir string
watch bool // whether this is a candidate for watching in server mode.
isMainProject bool
}
func (b *sourceFilesystemsBuilder) createOverlayFs(collector *filesystemsCollector, mounts []mountsDescriptor) error {
if len(mounts) == 0 {
return nil
} }
language := languages[0] err := b.createModFs(collector, mounts[0])
contentDir := language.ContentDir
if contentDir == "" {
panic("missing contentDir")
}
absContentDir := paths.AbsPathify(workingDir, language.ContentDir)
if !strings.HasSuffix(absContentDir, paths.FilePathSeparator) {
absContentDir += paths.FilePathSeparator
}
// If root, remove the second '/'
if absContentDir == "//" {
absContentDir = paths.FilePathSeparator
}
if len(absContentDir) < 6 {
return nil, fmt.Errorf("invalid content dir %q: Path is too short", absContentDir)
}
*absContentDirs = append(*absContentDirs, absContentDir)
overlay := hugofs.NewLanguageFs(language.Lang, languageSet, afero.NewBasePathFs(source, absContentDir))
if len(languages) == 1 {
return overlay, nil
}
base, err := createContentOverlayFs(source, workingDir, languages[1:], languageSet, absContentDirs)
if err != nil { if err != nil {
return nil, err return err
} }
return hugofs.NewLanguageCompositeFs(base, overlay), nil if len(mounts) == 1 {
return nil
}
} return b.createOverlayFs(collector, mounts[1:])
func createThemesOverlayFs(p *paths.Paths) (afero.Fs, []string, error) {
themes := p.AllThemes
if len(themes) == 0 {
panic("AllThemes not set")
}
themesDir := p.AbsPathify(p.ThemesDir)
if themesDir == "" {
return nil, nil, errors.New("no themes dir set")
}
absPaths := make([]string, len(themes))
// The themes are ordered from left to right. We need to revert it to get the
// overlay logic below working as expected.
for i := 0; i < len(themes); i++ {
absPaths[i] = filepath.Join(themesDir, themes[len(themes)-1-i].Name)
}
fs, err := createOverlayFs(p.Fs.Source, absPaths)
fs = hugofs.NewNoLstatFs(fs)
return fs, absPaths, err
}
func createOverlayFs(source afero.Fs, absPaths []string) (afero.Fs, error) {
if len(absPaths) == 0 {
return hugofs.NoOpFs, nil
}
if len(absPaths) == 1 {
return afero.NewReadOnlyFs(newRealBase(afero.NewBasePathFs(source, absPaths[0]))), nil
}
base := afero.NewReadOnlyFs(newRealBase(afero.NewBasePathFs(source, absPaths[0])))
overlay, err := createOverlayFs(source, absPaths[1:])
if err != nil {
return nil, err
}
return afero.NewCopyOnWriteFs(base, overlay), nil
}
func removeDuplicatesKeepRight(in []string) []string {
seen := make(map[string]bool)
var out []string
for i := len(in) - 1; i >= 0; i-- {
v := in[i]
if seen[v] {
continue
}
out = append([]string{v}, out...)
seen[v] = true
}
return out
} }

View file

@ -1,4 +1,4 @@
// Copyright 2018 The Hugo Authors. All rights reserved. // Copyright 2019 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -18,18 +18,59 @@ import (
"fmt" "fmt"
"os" "os"
"path/filepath" "path/filepath"
"strings"
"testing" "testing"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/langs"
"github.com/spf13/afero" "github.com/spf13/afero"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/hugolib/paths" "github.com/gohugoio/hugo/hugolib/paths"
"github.com/gohugoio/hugo/modules"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func initConfig(fs afero.Fs, cfg config.Provider) error {
if _, err := langs.LoadLanguageSettings(cfg, nil); err != nil {
return err
}
modConfig, err := modules.DecodeConfig(cfg)
if err != nil {
return err
}
workingDir := cfg.GetString("workingDir")
themesDir := cfg.GetString("themesDir")
if !filepath.IsAbs(themesDir) {
themesDir = filepath.Join(workingDir, themesDir)
}
modulesClient := modules.NewClient(modules.ClientConfig{
Fs: fs,
WorkingDir: workingDir,
ThemesDir: themesDir,
ModuleConfig: modConfig,
IgnoreVendor: true,
})
moduleConfig, err := modulesClient.Collect()
if err != nil {
return err
}
if err := modules.ApplyProjectConfigDefaults(cfg, moduleConfig.ActiveModules[len(moduleConfig.ActiveModules)-1]); err != nil {
return err
}
cfg.Set("allModules", moduleConfig.ActiveModules)
return nil
}
func TestNewBaseFs(t *testing.T) { func TestNewBaseFs(t *testing.T) {
assert := require.New(t) assert := require.New(t)
v := viper.New() v := viper.New()
@ -40,16 +81,21 @@ func TestNewBaseFs(t *testing.T) {
workingDir := filepath.FromSlash("/my/work") workingDir := filepath.FromSlash("/my/work")
v.Set("workingDir", workingDir) v.Set("workingDir", workingDir)
v.Set("contentDir", "content")
v.Set("themesDir", "themes") v.Set("themesDir", "themes")
v.Set("defaultContentLanguage", "en")
v.Set("theme", themes[:1]) v.Set("theme", themes[:1])
// Write some data to the themes // Write some data to the themes
for _, theme := range themes { for _, theme := range themes {
for _, dir := range []string{"i18n", "data", "archetypes", "layouts"} { for _, dir := range []string{"i18n", "data", "archetypes", "layouts"} {
base := filepath.Join(workingDir, "themes", theme, dir) base := filepath.Join(workingDir, "themes", theme, dir)
filename := filepath.Join(base, fmt.Sprintf("theme-file-%s.txt", theme)) filenameTheme := filepath.Join(base, fmt.Sprintf("theme-file-%s.txt", theme))
filenameOverlap := filepath.Join(base, "f3.txt")
fs.Source.Mkdir(base, 0755) fs.Source.Mkdir(base, 0755)
afero.WriteFile(fs.Source, filename, []byte(fmt.Sprintf("content:%s:%s", theme, dir)), 0755) content := []byte(fmt.Sprintf("content:%s:%s", theme, dir))
afero.WriteFile(fs.Source, filenameTheme, content, 0755)
afero.WriteFile(fs.Source, filenameOverlap, content, 0755)
} }
// Write some files to the root of the theme // Write some files to the root of the theme
base := filepath.Join(workingDir, "themes", theme) base := filepath.Join(workingDir, "themes", theme)
@ -73,6 +119,7 @@ theme = ["atheme"]
setConfigAndWriteSomeFilesTo(fs.Source, v, "resourceDir", "myrsesource", 10) setConfigAndWriteSomeFilesTo(fs.Source, v, "resourceDir", "myrsesource", 10)
v.Set("publishDir", "public") v.Set("publishDir", "public")
assert.NoError(initConfig(fs.Source, v))
p, err := paths.New(fs, v) p, err := paths.New(fs, v)
assert.NoError(err) assert.NoError(err)
@ -85,33 +132,26 @@ theme = ["atheme"]
assert.NoError(err) assert.NoError(err)
dirnames, err := root.Readdirnames(-1) dirnames, err := root.Readdirnames(-1)
assert.NoError(err) assert.NoError(err)
assert.Equal([]string{projectVirtualFolder, "btheme", "atheme"}, dirnames) assert.Equal([]string{"f1.txt", "f2.txt", "f3.txt", "f4.txt", "f3.txt", "theme-file-btheme.txt", "f3.txt", "theme-file-atheme.txt"}, dirnames)
ff, err := bfs.I18n.Fs.Open("myi18n")
assert.NoError(err)
_, err = ff.Readdirnames(-1)
assert.NoError(err)
root, err = bfs.Data.Fs.Open("") root, err = bfs.Data.Fs.Open("")
assert.NoError(err) assert.NoError(err)
dirnames, err = root.Readdirnames(-1) dirnames, err = root.Readdirnames(-1)
assert.NoError(err) assert.NoError(err)
assert.Equal([]string{projectVirtualFolder, "btheme", "atheme"}, dirnames) assert.Equal([]string{"f1.txt", "f2.txt", "f3.txt", "f4.txt", "f5.txt", "f6.txt", "f7.txt", "f3.txt", "theme-file-btheme.txt", "f3.txt", "theme-file-atheme.txt"}, dirnames)
ff, err = bfs.I18n.Fs.Open("mydata")
assert.NoError(err) //printFs(bfs.Work, "", os.Stdout)
_, err = ff.Readdirnames(-1)
assert.NoError(err) checkFileCount(bfs.Layouts.Fs, "", assert, 7)
checkFileCount(bfs.Content.Fs, "", assert, 3) checkFileCount(bfs.Content.Fs, "", assert, 3)
checkFileCount(bfs.I18n.Fs, "", assert, 6) // 4 + 2 themes checkFileCount(bfs.I18n.Fs, "", assert, 8) // 4 + 4 themes
checkFileCount(bfs.Layouts.Fs, "", assert, 7)
checkFileCount(bfs.Static[""].Fs, "", assert, 6) checkFileCount(bfs.Static[""].Fs, "", assert, 6)
checkFileCount(bfs.Data.Fs, "", assert, 9) // 7 + 2 themes checkFileCount(bfs.Data.Fs, "", assert, 11) // 7 + 4 themes
checkFileCount(bfs.Archetypes.Fs, "", assert, 10) // 8 + 2 themes checkFileCount(bfs.Archetypes.Fs, "", assert, 10) // 8 + 2 themes
checkFileCount(bfs.Assets.Fs, "", assert, 9) checkFileCount(bfs.Assets.Fs, "", assert, 9)
checkFileCount(bfs.Resources.Fs, "", assert, 10) checkFileCount(bfs.Work, "", assert, 82)
checkFileCount(bfs.Work.Fs, "", assert, 78)
assert.Equal([]string{filepath.FromSlash("/my/work/mydata"), filepath.FromSlash("/my/work/themes/btheme/data"), filepath.FromSlash("/my/work/themes/atheme/data")}, bfs.Data.Dirnames)
assert.True(bfs.IsData(filepath.Join(workingDir, "mydata", "file1.txt"))) assert.True(bfs.IsData(filepath.Join(workingDir, "mydata", "file1.txt")))
assert.True(bfs.IsI18n(filepath.Join(workingDir, "myi18n", "file1.txt"))) assert.True(bfs.IsI18n(filepath.Join(workingDir, "myi18n", "file1.txt")))
@ -125,13 +165,13 @@ theme = ["atheme"]
assert.Equal("file1.txt", rel) assert.Equal("file1.txt", rel)
// Check Work fs vs theme // Check Work fs vs theme
checkFileContent(bfs.Work.Fs, "file-root.txt", assert, "content-project") checkFileContent(bfs.Work, "file-root.txt", assert, "content-project")
checkFileContent(bfs.Work.Fs, "theme-root-atheme.txt", assert, "content:atheme") checkFileContent(bfs.Work, "theme-root-atheme.txt", assert, "content:atheme")
// https://github.com/gohugoio/hugo/issues/5318 // https://github.com/gohugoio/hugo/issues/5318
// Check both project and theme. // Check both project and theme.
for _, fs := range []afero.Fs{bfs.Archetypes.Fs, bfs.Layouts.Fs} { for _, fs := range []afero.Fs{bfs.Archetypes.Fs, bfs.Layouts.Fs} {
for _, filename := range []string{"/file1.txt", "/theme-file-atheme.txt"} { for _, filename := range []string{"/f1.txt", "/theme-file-atheme.txt"} {
filename = filepath.FromSlash(filename) filename = filepath.FromSlash(filename)
f, err := fs.Open(filename) f, err := fs.Open(filename)
assert.NoError(err) assert.NoError(err)
@ -153,6 +193,7 @@ func createConfig() *viper.Viper {
v.Set("assetDir", "myassets") v.Set("assetDir", "myassets")
v.Set("resourceDir", "resources") v.Set("resourceDir", "resources")
v.Set("publishDir", "public") v.Set("publishDir", "public")
v.Set("defaultContentLanguage", "en")
return v return v
} }
@ -161,17 +202,18 @@ func TestNewBaseFsEmpty(t *testing.T) {
assert := require.New(t) assert := require.New(t)
v := createConfig() v := createConfig()
fs := hugofs.NewMem(v) fs := hugofs.NewMem(v)
assert.NoError(initConfig(fs.Source, v))
p, err := paths.New(fs, v) p, err := paths.New(fs, v)
assert.NoError(err) assert.NoError(err)
bfs, err := NewBase(p) bfs, err := NewBase(p)
assert.NoError(err) assert.NoError(err)
assert.NotNil(bfs) assert.NotNil(bfs)
assert.Equal(hugofs.NoOpFs, bfs.Archetypes.Fs) assert.NotNil(bfs.Archetypes.Fs)
assert.Equal(hugofs.NoOpFs, bfs.Layouts.Fs) assert.NotNil(bfs.Layouts.Fs)
assert.Equal(hugofs.NoOpFs, bfs.Data.Fs) assert.NotNil(bfs.Data.Fs)
assert.Equal(hugofs.NoOpFs, bfs.Assets.Fs) assert.NotNil(bfs.I18n.Fs)
assert.Equal(hugofs.NoOpFs, bfs.I18n.Fs) assert.NotNil(bfs.Work)
assert.NotNil(bfs.Work.Fs)
assert.NotNil(bfs.Content.Fs) assert.NotNil(bfs.Content.Fs)
assert.NotNil(bfs.Static) assert.NotNil(bfs.Static)
} }
@ -217,11 +259,14 @@ func TestRealDirs(t *testing.T) {
afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "myassets", "js", "f2", "a1.js")), []byte("content"), 0755) afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "myassets", "js", "f2", "a1.js")), []byte("content"), 0755)
afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "myassets", "js", "a2.js")), []byte("content"), 0755) afero.WriteFile(sfs, filepath.Join(filepath.Join(root, "myassets", "js", "a2.js")), []byte("content"), 0755)
assert.NoError(initConfig(fs.Source, v))
p, err := paths.New(fs, v) p, err := paths.New(fs, v)
assert.NoError(err) assert.NoError(err)
bfs, err := NewBase(p) bfs, err := NewBase(p)
assert.NoError(err) assert.NoError(err)
assert.NotNil(bfs) assert.NotNil(bfs)
checkFileCount(bfs.Assets.Fs, "", assert, 6) checkFileCount(bfs.Assets.Fs, "", assert, 6)
realDirs := bfs.Assets.RealDirs("scss") realDirs := bfs.Assets.RealDirs("scss")
@ -229,13 +274,7 @@ func TestRealDirs(t *testing.T) {
assert.Equal(filepath.Join(root, "myassets/scss"), realDirs[0]) assert.Equal(filepath.Join(root, "myassets/scss"), realDirs[0])
assert.Equal(filepath.Join(themesDir, "mytheme/assets/scss"), realDirs[len(realDirs)-1]) assert.Equal(filepath.Join(themesDir, "mytheme/assets/scss"), realDirs[len(realDirs)-1])
checkFileCount(bfs.Resources.Fs, "", assert, 3) assert.NotNil(bfs.theBigFs)
assert.NotNil(bfs.themeFs)
fi, b, err := bfs.themeFs.(afero.Lstater).LstatIfPossible(filepath.Join("resources", "t1.txt"))
assert.NoError(err)
assert.False(b)
assert.Equal("t1.txt", fi.Name())
} }
@ -245,20 +284,25 @@ func TestStaticFs(t *testing.T) {
workDir := "mywork" workDir := "mywork"
v.Set("workingDir", workDir) v.Set("workingDir", workDir)
v.Set("themesDir", "themes") v.Set("themesDir", "themes")
v.Set("theme", "t1") v.Set("theme", []string{"t1", "t2"})
fs := hugofs.NewMem(v) fs := hugofs.NewMem(v)
themeStaticDir := filepath.Join(workDir, "themes", "t1", "static") themeStaticDir := filepath.Join(workDir, "themes", "t1", "static")
themeStaticDir2 := filepath.Join(workDir, "themes", "t2", "static")
afero.WriteFile(fs.Source, filepath.Join(workDir, "mystatic", "f1.txt"), []byte("Hugo Rocks!"), 0755) afero.WriteFile(fs.Source, filepath.Join(workDir, "mystatic", "f1.txt"), []byte("Hugo Rocks!"), 0755)
afero.WriteFile(fs.Source, filepath.Join(themeStaticDir, "f1.txt"), []byte("Hugo Themes Rocks!"), 0755) afero.WriteFile(fs.Source, filepath.Join(themeStaticDir, "f1.txt"), []byte("Hugo Themes Rocks!"), 0755)
afero.WriteFile(fs.Source, filepath.Join(themeStaticDir, "f2.txt"), []byte("Hugo Themes Still Rocks!"), 0755) afero.WriteFile(fs.Source, filepath.Join(themeStaticDir, "f2.txt"), []byte("Hugo Themes Still Rocks!"), 0755)
afero.WriteFile(fs.Source, filepath.Join(themeStaticDir2, "f2.txt"), []byte("Hugo Themes Rocks in t2!"), 0755)
assert.NoError(initConfig(fs.Source, v))
p, err := paths.New(fs, v) p, err := paths.New(fs, v)
assert.NoError(err) assert.NoError(err)
bfs, err := NewBase(p) bfs, err := NewBase(p)
assert.NoError(err) assert.NoError(err)
sfs := bfs.StaticFs("en") sfs := bfs.StaticFs("en")
checkFileContent(sfs, "f1.txt", assert, "Hugo Rocks!") checkFileContent(sfs, "f1.txt", assert, "Hugo Rocks!")
checkFileContent(sfs, "f2.txt", assert, "Hugo Themes Still Rocks!") checkFileContent(sfs, "f2.txt", assert, "Hugo Themes Still Rocks!")
@ -272,21 +316,19 @@ func TestStaticFsMultiHost(t *testing.T) {
v.Set("workingDir", workDir) v.Set("workingDir", workDir)
v.Set("themesDir", "themes") v.Set("themesDir", "themes")
v.Set("theme", "t1") v.Set("theme", "t1")
v.Set("multihost", true) v.Set("defaultContentLanguage", "en")
vn := viper.New() langConfig := map[string]interface{}{
vn.Set("staticDir", "nn_static") "no": map[string]interface{}{
"staticDir": "static_no",
en := langs.NewLanguage("en", v) "baseURL": "https://example.org/no/",
no := langs.NewLanguage("no", v) },
no.Set("staticDir", "static_no") "en": map[string]interface{}{
"baseURL": "https://example.org/en/",
languages := langs.Languages{ },
en,
no,
} }
v.Set("languagesSorted", languages) v.Set("languages", langConfig)
fs := hugofs.NewMem(v) fs := hugofs.NewMem(v)
@ -298,6 +340,8 @@ func TestStaticFsMultiHost(t *testing.T) {
afero.WriteFile(fs.Source, filepath.Join(themeStaticDir, "f1.txt"), []byte("Hugo Themes Rocks!"), 0755) afero.WriteFile(fs.Source, filepath.Join(themeStaticDir, "f1.txt"), []byte("Hugo Themes Rocks!"), 0755)
afero.WriteFile(fs.Source, filepath.Join(themeStaticDir, "f2.txt"), []byte("Hugo Themes Still Rocks!"), 0755) afero.WriteFile(fs.Source, filepath.Join(themeStaticDir, "f2.txt"), []byte("Hugo Themes Still Rocks!"), 0755)
assert.NoError(initConfig(fs.Source, v))
p, err := paths.New(fs, v) p, err := paths.New(fs, v)
assert.NoError(err) assert.NoError(err)
bfs, err := NewBase(p) bfs, err := NewBase(p)
@ -312,9 +356,9 @@ func TestStaticFsMultiHost(t *testing.T) {
} }
func checkFileCount(fs afero.Fs, dirname string, assert *require.Assertions, expected int) { func checkFileCount(fs afero.Fs, dirname string, assert *require.Assertions, expected int) {
count, _, err := countFileaAndGetDirs(fs, dirname) count, fnames, err := countFileaAndGetFilenames(fs, dirname)
assert.NoError(err) assert.NoError(err, fnames)
assert.Equal(expected, count) assert.Equal(expected, count, fnames)
} }
func checkFileContent(fs afero.Fs, filename string, assert *require.Assertions, expected ...string) { func checkFileContent(fs afero.Fs, filename string, assert *require.Assertions, expected ...string) {
@ -329,27 +373,38 @@ func checkFileContent(fs afero.Fs, filename string, assert *require.Assertions,
} }
} }
func countFileaAndGetDirs(fs afero.Fs, dirname string) (int, []string, error) { func countFileaAndGetFilenames(fs afero.Fs, dirname string) (int, []string, error) {
if fs == nil { if fs == nil {
return 0, nil, errors.New("no fs") return 0, nil, errors.New("no fs")
} }
counter := 0 counter := 0
var dirs []string var filenames []string
afero.Walk(fs, dirname, func(path string, info os.FileInfo, err error) error { wf := func(path string, info hugofs.FileMetaInfo, err error) error {
if info != nil { if err != nil {
if !info.IsDir() { return err
counter++ }
} else if info.Name() != "." { if !info.IsDir() {
dirs = append(dirs, filepath.Join(path, info.Name())) counter++
} }
if info.Name() != "." {
name := info.Name()
name = strings.Replace(name, filepath.FromSlash("/my/work"), "WORK_DIR", 1)
filenames = append(filenames, name)
} }
return nil return nil
}) }
return counter, dirs, nil w := hugofs.NewWalkway(hugofs.WalkwayConfig{Fs: fs, Root: dirname, WalkFn: wf})
if err := w.Walk(); err != nil {
return -1, nil, err
}
return counter, filenames, nil
} }
func setConfigAndWriteSomeFilesTo(fs afero.Fs, v *viper.Viper, key, val string, num int) { func setConfigAndWriteSomeFilesTo(fs afero.Fs, v *viper.Viper, key, val string, num int) {
@ -357,7 +412,7 @@ func setConfigAndWriteSomeFilesTo(fs afero.Fs, v *viper.Viper, key, val string,
v.Set(key, val) v.Set(key, val)
fs.Mkdir(val, 0755) fs.Mkdir(val, 0755)
for i := 0; i < num; i++ { for i := 0; i < num; i++ {
filename := filepath.Join(workingDir, val, fmt.Sprintf("file%d.txt", i+1)) filename := filepath.Join(workingDir, val, fmt.Sprintf("f%d.txt", i+1))
afero.WriteFile(fs, filename, []byte(fmt.Sprintf("content:%s:%d", key, i+1)), 0755) afero.WriteFile(fs, filename, []byte(fmt.Sprintf("content:%s:%d", key, i+1)), 0755)
} }
} }

View file

@ -0,0 +1,492 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"math/rand"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/gohugoio/hugo/common/loggers"
"github.com/spf13/afero"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/htesting"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/testmodBuilder/mods"
"github.com/spf13/viper"
"github.com/stretchr/testify/require"
)
// TODO(bep) this fails when testmodBuilder is also building ...
func TestHugoModules(t *testing.T) {
t.Parallel()
if hugo.GoMinorVersion() < 12 {
// https://github.com/golang/go/issues/26794
// There were some concurrent issues with Go modules in < Go 12.
t.Skip("skip this for Go <= 1.11 due to a bug in Go's stdlib")
}
if testing.Short() {
t.Skip()
}
rnd := rand.New(rand.NewSource(time.Now().UnixNano()))
gooss := []string{"linux", "darwin", "windows"}
goos := gooss[rnd.Intn(len(gooss))]
ignoreVendor := rnd.Intn(2) == 0
testmods := mods.CreateModules(goos).Collect()
rnd.Shuffle(len(testmods), func(i, j int) { testmods[i], testmods[j] = testmods[j], testmods[i] })
for _, m := range testmods[:2] {
assert := require.New(t)
v := viper.New()
workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-modules-test")
assert.NoError(err)
defer clean()
configTemplate := `
baseURL = "https://example.com"
title = "My Modular Site"
workingDir = %q
theme = %q
ignoreVendor = %t
`
config := fmt.Sprintf(configTemplate, workingDir, m.Path(), ignoreVendor)
b := newTestSitesBuilder(t)
// Need to use OS fs for this.
b.Fs = hugofs.NewDefault(v)
b.WithWorkingDir(workingDir).WithConfigFile("toml", config)
b.WithContent("page.md", `
---
title: "Foo"
---
`)
b.WithTemplates("home.html", `
{{ $mod := .Site.Data.modinfo.module }}
Mod Name: {{ $mod.name }}
Mod Version: {{ $mod.version }}
----
{{ range $k, $v := .Site.Data.modinfo }}
- {{ $k }}: {{ range $kk, $vv := $v }}{{ $kk }}: {{ $vv }}|{{ end -}}
{{ end }}
`)
b.WithSourceFile("go.mod", `
module github.com/gohugoio/tests/testHugoModules
`)
b.Build(BuildCfg{})
// Verify that go.mod is autopopulated with all the modules in config.toml.
b.AssertFileContent("go.mod", m.Path())
b.AssertFileContent("public/index.html",
"Mod Name: "+m.Name(),
"Mod Version: v1.4.0")
b.AssertFileContent("public/index.html", createChildModMatchers(m, ignoreVendor, m.Vendor)...)
}
}
func createChildModMatchers(m *mods.Md, ignoreVendor, vendored bool) []string {
// Child depdendencies are one behind.
expectMinorVersion := 3
if !ignoreVendor && vendored {
// Vendored modules are stuck at v1.1.0.
expectMinorVersion = 1
}
expectVersion := fmt.Sprintf("v1.%d.0", expectMinorVersion)
var matchers []string
for _, mm := range m.Children {
matchers = append(
matchers,
fmt.Sprintf("%s: name: %s|version: %s", mm.Name(), mm.Name(), expectVersion))
matchers = append(matchers, createChildModMatchers(mm, ignoreVendor, vendored || mm.Vendor)...)
}
return matchers
}
func TestModulesWithContent(t *testing.T) {
t.Parallel()
b := newTestSitesBuilder(t).WithWorkingDir("/site").WithConfigFile("toml", `
baseURL="https://example.org"
workingDir="/site"
defaultContentLanguage = "en"
[module]
[[module.imports]]
path="a"
[[module.imports.mounts]]
source="myacontent"
target="content/blog"
lang="en"
[[module.imports]]
path="b"
[[module.imports.mounts]]
source="mybcontent"
target="content/blog"
lang="nn"
[[module.imports]]
path="c"
[[module.imports]]
path="d"
[languages]
[languages.en]
title = "Title in English"
languageName = "English"
weight = 1
[languages.nn]
languageName = "Nynorsk"
weight = 2
title = "Tittel på nynorsk"
[languages.nb]
languageName = "Bokmål"
weight = 3
title = "Tittel på bokmål"
[languages.fr]
languageName = "French"
weight = 4
title = "French Title"
`)
b.WithTemplatesAdded("index.html", `
{{ range .Site.RegularPages }}
|{{ .Title }}|{{ .RelPermalink }}|{{ .Plain }}
{{ end }}
{{ $data := .Site.Data }}
Data Common: {{ $data.common.value }}
Data C: {{ $data.c.value }}
Data D: {{ $data.d.value }}
All Data: {{ $data }}
i18n hello: {{ i18n "hello" . }}
i18n theme: {{ i18n "theme" . }}
i18n theme2: {{ i18n "theme2" . }}
`)
content := func(id string) string {
return fmt.Sprintf(`---
title: Title %s
---
Content %s
`, id, id)
}
i18nContent := func(id, value string) string {
return fmt.Sprintf(`
[%s]
other = %q
`, id, value)
}
// Content files
b.WithSourceFile("themes/a/myacontent/page.md", content("theme-a-en"))
b.WithSourceFile("themes/b/mybcontent/page.md", content("theme-b-nn"))
b.WithSourceFile("themes/c/content/blog/c.md", content("theme-c-nn"))
// Data files
b.WithSourceFile("data/common.toml", `value="Project"`)
b.WithSourceFile("themes/c/data/common.toml", `value="Theme C"`)
b.WithSourceFile("themes/c/data/c.toml", `value="Hugo Rocks!"`)
b.WithSourceFile("themes/d/data/c.toml", `value="Hugo Rodcks!"`)
b.WithSourceFile("themes/d/data/d.toml", `value="Hugo Rodks!"`)
// i18n files
b.WithSourceFile("i18n/en.toml", i18nContent("hello", "Project"))
b.WithSourceFile("themes/c/en.toml", i18nContent("hello", "Theme C"))
b.WithSourceFile("themes/c/i18n/en.toml", i18nContent("theme", "Theme C"))
b.WithSourceFile("themes/d/i18n/en.toml", i18nContent("theme", "Theme D"))
b.WithSourceFile("themes/d/i18n/en.toml", i18nContent("theme2", "Theme2 D"))
// Static files
b.WithSourceFile("themes/c/static/hello.txt", `Hugo Rocks!"`)
b.Build(BuildCfg{})
b.AssertFileContent("public/index.html", "|Title theme-a-en|/blog/page/|Content theme-a-en")
b.AssertFileContent("public/nn/index.html", "|Title theme-b-nn|/nn/blog/page/|Content theme-b-nn")
// Data
b.AssertFileContent("public/index.html",
"Data Common: Project",
"Data C: Hugo Rocks!",
"Data D: Hugo Rodks!",
)
// i18n
b.AssertFileContent("public/index.html",
"i18n hello: Project",
"i18n theme: Theme C",
"i18n theme2: Theme2 D",
)
}
func TestModulesIgnoreConfig(t *testing.T) {
b := newTestSitesBuilder(t).WithWorkingDir("/site").WithConfigFile("toml", `
baseURL="https://example.org"
workingDir="/site"
[module]
[[module.imports]]
path="a"
ignoreConfig=true
`)
b.WithSourceFile("themes/a/config.toml", `
[params]
a = "Should Be Ignored!"
`)
b.WithTemplatesAdded("index.html", `Params: {{ .Site.Params }}`)
b.Build(BuildCfg{})
b.AssertFileContentFn("public/index.html", func(s string) bool {
return !strings.Contains(s, "Ignored")
})
}
func TestModulesDisabled(t *testing.T) {
b := newTestSitesBuilder(t).WithWorkingDir("/site").WithConfigFile("toml", `
baseURL="https://example.org"
workingDir="/site"
[module]
[[module.imports]]
path="a"
[[module.imports]]
path="b"
disabled=true
`)
b.WithSourceFile("themes/a/config.toml", `
[params]
a = "A param"
`)
b.WithSourceFile("themes/b/config.toml", `
[params]
b = "B param"
`)
b.WithTemplatesAdded("index.html", `Params: {{ .Site.Params }}`)
b.Build(BuildCfg{})
b.AssertFileContentFn("public/index.html", func(s string) bool {
return strings.Contains(s, "A param") && !strings.Contains(s, "B param")
})
}
func TestModulesIncompatible(t *testing.T) {
b := newTestSitesBuilder(t).WithWorkingDir("/site").WithConfigFile("toml", `
baseURL="https://example.org"
workingDir="/site"
[module]
[[module.imports]]
path="ok"
[[module.imports]]
path="incompat1"
[[module.imports]]
path="incompat2"
`)
b.WithSourceFile("themes/ok/data/ok.toml", `title = "OK"`)
b.WithSourceFile("themes/incompat1/config.toml", `
[module]
[module.hugoVersion]
min = "0.33.2"
max = "0.45.0"
`)
// Old setup.
b.WithSourceFile("themes/incompat2/theme.toml", `
min_version = "5.0.0"
`)
logger := loggers.NewWarningLogger()
b.WithLogger(logger)
b.Build(BuildCfg{})
assert := require.New(t)
assert.Equal(uint64(2), logger.WarnCounter.Count())
}
func TestModulesSymlinks(t *testing.T) {
skipSymlink(t)
wd, _ := os.Getwd()
defer func() {
os.Chdir(wd)
}()
assert := require.New(t)
// We need to use the OS fs for this.
cfg := viper.New()
fs := hugofs.NewFrom(hugofs.Os, cfg)
workDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-mod-sym")
assert.NoError(err)
defer clean()
const homeTemplate = `
Data: {{ .Site.Data }}
`
createDirsAndFiles := func(baseDir string) {
for _, dir := range files.ComponentFolders {
realDir := filepath.Join(baseDir, dir, "real")
assert.NoError(os.MkdirAll(realDir, 0777))
assert.NoError(afero.WriteFile(fs.Source, filepath.Join(realDir, "data.toml"), []byte("[hello]\nother = \"hello\""), 0777))
}
assert.NoError(afero.WriteFile(fs.Source, filepath.Join(baseDir, "layouts", "index.html"), []byte(homeTemplate), 0777))
}
// Create project dirs and files.
createDirsAndFiles(workDir)
// Create one module inside the default themes folder.
themeDir := filepath.Join(workDir, "themes", "mymod")
createDirsAndFiles(themeDir)
createSymlinks := func(baseDir, id string) {
for _, dir := range files.ComponentFolders {
assert.NoError(os.Chdir(filepath.Join(baseDir, dir)))
assert.NoError(os.Symlink("real", fmt.Sprintf("realsym%s", id)))
assert.NoError(os.Chdir(filepath.Join(baseDir, dir, "real")))
assert.NoError(os.Symlink("data.toml", fmt.Sprintf(filepath.FromSlash("datasym%s.toml"), id)))
}
}
createSymlinks(workDir, "project")
createSymlinks(themeDir, "mod")
config := `
baseURL = "https://example.com"
theme="mymod"
defaultContentLanguage="nn"
defaultContentLanguageInSubDir=true
[languages]
[languages.nn]
weight = 1
[languages.en]
weight = 2
`
b := newTestSitesBuilder(t).WithNothingAdded().WithWorkingDir(workDir)
b.Fs = fs
b.WithConfigFile("toml", config)
assert.NoError(os.Chdir(workDir))
b.Build(BuildCfg{})
b.AssertFileContentFn(filepath.Join("public", "en", "index.html"), func(s string) bool {
// Symbolic links only followed in project. There should be WARNING logs.
return !strings.Contains(s, "symmod") && strings.Contains(s, "symproject")
})
bfs := b.H.BaseFs
for _, componentFs := range []afero.Fs{
bfs.Archetypes.Fs,
bfs.Content.Fs,
bfs.Data.Fs,
bfs.Assets.Fs,
bfs.Static[""].Fs,
bfs.I18n.Fs} {
for i, id := range []string{"mod", "project"} {
statCheck := func(fs afero.Fs, filename string) {
shouldFail := i == 0
_, err := fs.Stat(filepath.FromSlash(filename))
if err != nil {
if strings.HasSuffix(filename, "toml") && strings.Contains(err.Error(), "files not supported") {
// OK
return
}
}
if shouldFail {
assert.Error(err)
assert.Equal(hugofs.ErrPermissionSymlink, err)
} else {
assert.NoError(err)
}
}
statCheck(componentFs, fmt.Sprintf("realsym%s", id))
statCheck(componentFs, fmt.Sprintf("real/datasym%s.toml", id))
}
}
}

View file

@ -22,6 +22,8 @@ import (
"strings" "strings"
"sync" "sync"
radix "github.com/hashicorp/go-immutable-radix"
"github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/parser/metadecoders" "github.com/gohugoio/hugo/parser/metadecoders"
@ -33,7 +35,6 @@ import (
"github.com/bep/gitmap" "github.com/bep/gitmap"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/spf13/afero"
"github.com/gohugoio/hugo/publisher" "github.com/gohugoio/hugo/publisher"
@ -199,10 +200,11 @@ func (h *HugoSites) IsMultihost() bool {
return h != nil && h.multihost return h != nil && h.multihost
} }
func (h *HugoSites) LanguageSet() map[string]bool { // TODO(bep) consolidate
set := make(map[string]bool) func (h *HugoSites) LanguageSet() map[string]int {
for _, s := range h.Sites { set := make(map[string]int)
set[s.language.Lang] = true for i, s := range h.Sites {
set[s.language.Lang] = i
} }
return set return set
} }
@ -222,14 +224,6 @@ func (h *HugoSites) PrintProcessingStats(w io.Writer) {
helpers.ProcessingStatsTable(w, stats...) helpers.ProcessingStatsTable(w, stats...)
} }
func (h *HugoSites) langSite() map[string]*Site {
m := make(map[string]*Site)
for _, s := range h.Sites {
m[s.language.Lang] = s
}
return m
}
// GetContentPage finds a Page with content given the absolute filename. // GetContentPage finds a Page with content given the absolute filename.
// Returns nil if none found. // Returns nil if none found.
func (h *HugoSites) GetContentPage(filename string) page.Page { func (h *HugoSites) GetContentPage(filename string) page.Page {
@ -265,7 +259,7 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
langConfig, err := newMultiLingualFromSites(cfg.Cfg, sites...) langConfig, err := newMultiLingualFromSites(cfg.Cfg, sites...)
if err != nil { if err != nil {
return nil, err return nil, errors.Wrap(err, "failed to create language config")
} }
var contentChangeTracker *contentChangeMap var contentChangeTracker *contentChangeMap
@ -288,8 +282,11 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
} }
h.init.data.Add(func() (interface{}, error) { h.init.data.Add(func() (interface{}, error) {
err := h.loadData(h.PathSpec.BaseFs.Data.Fs) err := h.loadData(h.PathSpec.BaseFs.Data.Dirs)
return err, nil if err != nil {
return nil, errors.Wrap(err, "failed to load data")
}
return nil, nil
}) })
h.init.translations.Add(func() (interface{}, error) { h.init.translations.Add(func() (interface{}, error) {
@ -303,7 +300,10 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
h.init.gitInfo.Add(func() (interface{}, error) { h.init.gitInfo.Add(func() (interface{}, error) {
err := h.loadGitInfo() err := h.loadGitInfo()
return nil, err if err != nil {
return nil, errors.Wrap(err, "failed to load Git info")
}
return nil, nil
}) })
for _, s := range sites { for _, s := range sites {
@ -311,7 +311,7 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
} }
if err := applyDeps(cfg, sites...); err != nil { if err := applyDeps(cfg, sites...); err != nil {
return nil, err return nil, errors.Wrap(err, "add site dependencies")
} }
h.Deps = sites[0].Deps h.Deps = sites[0].Deps
@ -319,7 +319,12 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
// Only needed in server mode. // Only needed in server mode.
// TODO(bep) clean up the running vs watching terms // TODO(bep) clean up the running vs watching terms
if cfg.Running { if cfg.Running {
contentChangeTracker = &contentChangeMap{pathSpec: h.PathSpec, symContent: make(map[string]map[string]bool)} contentChangeTracker = &contentChangeMap{
pathSpec: h.PathSpec,
symContent: make(map[string]map[string]bool),
leafBundles: radix.New(),
branchBundles: make(map[string]bool),
}
h.ContentChanges = contentChangeTracker h.ContentChanges = contentChangeTracker
} }
@ -371,7 +376,7 @@ func applyDeps(cfg deps.DepsCfg, sites ...*Site) error {
siteConfig, err := loadSiteConfig(s.language) siteConfig, err := loadSiteConfig(s.language)
if err != nil { if err != nil {
return err return errors.Wrap(err, "load site config")
} }
s.siteConfigConfig = siteConfig s.siteConfigConfig = siteConfig
s.siteRefLinker, err = newSiteRefLinker(s.language, s) s.siteRefLinker, err = newSiteRefLinker(s.language, s)
@ -388,17 +393,17 @@ func applyDeps(cfg deps.DepsCfg, sites ...*Site) error {
var err error var err error
d, err = deps.New(cfg) d, err = deps.New(cfg)
if err != nil { if err != nil {
return err return errors.Wrap(err, "create deps")
} }
d.OutputFormatsConfig = s.outputFormatsConfig d.OutputFormatsConfig = s.outputFormatsConfig
if err := onCreated(d); err != nil { if err := onCreated(d); err != nil {
return err return errors.Wrap(err, "on created")
} }
if err = d.LoadResources(); err != nil { if err = d.LoadResources(); err != nil {
return err return errors.Wrap(err, "load resources")
} }
} else { } else {
@ -418,7 +423,7 @@ func applyDeps(cfg deps.DepsCfg, sites ...*Site) error {
func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) { func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) {
sites, err := createSitesFromConfig(cfg) sites, err := createSitesFromConfig(cfg)
if err != nil { if err != nil {
return nil, err return nil, errors.Wrap(err, "from config")
} }
return newHugoSites(cfg, sites...) return newHugoSites(cfg, sites...)
} }
@ -800,41 +805,45 @@ func (h *HugoSites) Pages() page.Pages {
return h.Sites[0].AllPages() return h.Sites[0].AllPages()
} }
func (h *HugoSites) loadData(fs afero.Fs) (err error) { func (h *HugoSites) loadData(fis []hugofs.FileMetaInfo) (err error) {
spec := source.NewSourceSpec(h.PathSpec, fs) spec := source.NewSourceSpec(h.PathSpec, nil)
fileSystem := spec.NewFilesystem("")
h.data = make(map[string]interface{}) h.data = make(map[string]interface{})
for _, r := range fileSystem.Files() { for _, fi := range fis {
if err := h.handleDataFile(r); err != nil { fileSystem := spec.NewFilesystemFromFileMetaInfo(fi)
files, err := fileSystem.Files()
if err != nil {
return err return err
} }
for _, r := range files {
if err := h.handleDataFile(r); err != nil {
return err
}
}
} }
return return
} }
func (h *HugoSites) handleDataFile(r source.ReadableFile) error { func (h *HugoSites) handleDataFile(r source.File) error {
var current map[string]interface{} var current map[string]interface{}
f, err := r.Open() f, err := r.FileInfo().Meta().Open()
if err != nil { if err != nil {
return errors.Wrapf(err, "Failed to open data file %q:", r.LogicalName()) return errors.Wrapf(err, "data: failed to open %q:", r.LogicalName())
} }
defer f.Close() defer f.Close()
// Crawl in data tree to insert data // Crawl in data tree to insert data
current = h.data current = h.data
keyParts := strings.Split(r.Dir(), helpers.FilePathSeparator) keyParts := strings.Split(r.Dir(), helpers.FilePathSeparator)
// The first path element is the virtual folder (typically theme name), which is
// not part of the key. for _, key := range keyParts {
if len(keyParts) > 1 { if key != "" {
for _, key := range keyParts[1:] { if _, ok := current[key]; !ok {
if key != "" { current[key] = make(map[string]interface{})
if _, ok := current[key]; !ok {
current[key] = make(map[string]interface{})
}
current = current[key].(map[string]interface{})
} }
current = current[key].(map[string]interface{})
} }
} }
@ -848,15 +857,10 @@ func (h *HugoSites) handleDataFile(r source.ReadableFile) error {
} }
// filepath.Walk walks the files in lexical order, '/' comes before '.' // filepath.Walk walks the files in lexical order, '/' comes before '.'
// this warning could happen if
// 1. A theme uses the same key; the main data folder wins
// 2. A sub folder uses the same key: the sub folder wins
higherPrecedentData := current[r.BaseFileName()] higherPrecedentData := current[r.BaseFileName()]
switch data.(type) { switch data.(type) {
case nil: case nil:
// hear the crickets?
case map[string]interface{}: case map[string]interface{}:
switch higherPrecedentData.(type) { switch higherPrecedentData.(type) {
@ -868,7 +872,11 @@ func (h *HugoSites) handleDataFile(r source.ReadableFile) error {
higherPrecedentMap := higherPrecedentData.(map[string]interface{}) higherPrecedentMap := higherPrecedentData.(map[string]interface{})
for key, value := range data.(map[string]interface{}) { for key, value := range data.(map[string]interface{}) {
if _, exists := higherPrecedentMap[key]; exists { if _, exists := higherPrecedentMap[key]; exists {
h.Log.WARN.Printf("Data for key '%s' in path '%s' is overridden by higher precedence data already in the data tree", key, r.Path()) // this warning could happen if
// 1. A theme uses the same key; the main data folder wins
// 2. A sub folder uses the same key: the sub folder wins
// TODO(bep) figure out a way to detect 2) above and make that a WARN
h.Log.INFO.Printf("Data for key '%s' in path '%s' is overridden by higher precedence data already in the data tree", key, r.Path())
} else { } else {
higherPrecedentMap[key] = value higherPrecedentMap[key] = value
} }
@ -896,12 +904,12 @@ func (h *HugoSites) handleDataFile(r source.ReadableFile) error {
} }
func (h *HugoSites) errWithFileContext(err error, f source.File) error { func (h *HugoSites) errWithFileContext(err error, f source.File) error {
rfi, ok := f.FileInfo().(hugofs.RealFilenameInfo) fim, ok := f.FileInfo().(hugofs.FileMetaInfo)
if !ok { if !ok {
return err return err
} }
realFilename := rfi.RealFilename() realFilename := fim.Meta().Filename()
err, _ = herrors.WithFileContextForFile( err, _ = herrors.WithFileContextForFile(
err, err,
@ -913,8 +921,8 @@ func (h *HugoSites) errWithFileContext(err error, f source.File) error {
return err return err
} }
func (h *HugoSites) readData(f source.ReadableFile) (interface{}, error) { func (h *HugoSites) readData(f source.File) (interface{}, error) {
file, err := f.Open() file, err := f.FileInfo().Meta().Open()
if err != nil { if err != nil {
return nil, errors.Wrap(err, "readData: failed to open data file") return nil, errors.Wrap(err, "readData: failed to open data file")
} }
@ -939,9 +947,14 @@ func (h *HugoSites) findPagesByShortcode(shortcode string) page.Pages {
// Used in partial reloading to determine if the change is in a bundle. // Used in partial reloading to determine if the change is in a bundle.
type contentChangeMap struct { type contentChangeMap struct {
mu sync.RWMutex mu sync.RWMutex
branches []string
leafs []string // Holds directories with leaf bundles.
leafBundles *radix.Tree
leafBundlesTxn *radix.Txn
// Holds directories with branch bundles.
branchBundles map[string]bool
pathSpec *helpers.PathSpec pathSpec *helpers.PathSpec
@ -950,9 +963,22 @@ type contentChangeMap struct {
// locations in /content -- which is really cool, but also means we have to // locations in /content -- which is really cool, but also means we have to
// go an extra mile to handle changes. // go an extra mile to handle changes.
// This map is only used in watch mode. // This map is only used in watch mode.
// It maps either file to files or the real dir to a set of content directories where it is in use. // It maps either file to files or the real dir to a set of content directories
symContent map[string]map[string]bool // where it is in use.
symContentMu sync.Mutex symContentMu sync.Mutex
symContent map[string]map[string]bool
}
func (m *contentChangeMap) start() {
m.mu.Lock()
m.leafBundlesTxn = m.leafBundles.Txn()
m.mu.Unlock()
}
func (m *contentChangeMap) stop() {
m.mu.Lock()
m.leafBundles = m.leafBundlesTxn.Commit()
m.mu.Unlock()
} }
func (m *contentChangeMap) add(filename string, tp bundleDirType) { func (m *contentChangeMap) add(filename string, tp bundleDirType) {
@ -961,68 +987,63 @@ func (m *contentChangeMap) add(filename string, tp bundleDirType) {
dir = strings.TrimPrefix(dir, ".") dir = strings.TrimPrefix(dir, ".")
switch tp { switch tp {
case bundleBranch: case bundleBranch:
m.branches = append(m.branches, dir) m.branchBundles[dir] = true
case bundleLeaf: case bundleLeaf:
m.leafs = append(m.leafs, dir) m.leafBundlesTxn.Insert([]byte(dir), true)
default: default:
panic("invalid bundle type") panic("invalid bundle type")
} }
m.mu.Unlock() m.mu.Unlock()
} }
// Track the addition of bundle dirs.
func (m *contentChangeMap) handleBundles(b *bundleDirs) {
for _, bd := range b.bundles {
m.add(bd.fi.Path(), bd.tp)
}
}
// resolveAndRemove resolves the given filename to the root folder of a bundle, if relevant.
// It also removes the entry from the map. It will be re-added again by the partial
// build if it still is a bundle.
func (m *contentChangeMap) resolveAndRemove(filename string) (string, string, bundleDirType) { func (m *contentChangeMap) resolveAndRemove(filename string) (string, string, bundleDirType) {
m.mu.RLock() m.mu.RLock()
defer m.mu.RUnlock() defer m.mu.RUnlock()
// Bundles share resources, so we need to start from the virtual root. // Bundles share resources, so we need to start from the virtual root.
relPath := m.pathSpec.RelContentDir(filename) relFilename := m.pathSpec.RelContentDir(filename)
dir, name := filepath.Split(relPath) dir, name := filepath.Split(relFilename)
if !strings.HasSuffix(dir, helpers.FilePathSeparator) { if !strings.HasSuffix(dir, helpers.FilePathSeparator) {
dir += helpers.FilePathSeparator dir += helpers.FilePathSeparator
} }
if _, found := m.branchBundles[dir]; found {
delete(m.branchBundles, dir)
return dir, dir, bundleBranch
}
if key, _, found := m.leafBundles.Root().LongestPrefix([]byte(dir)); found {
m.leafBundlesTxn.Delete(key)
dir = string(key)
return dir, dir, bundleLeaf
}
fileTp, isContent := classifyBundledFile(name) fileTp, isContent := classifyBundledFile(name)
// This may be a member of a bundle. Start with branch bundles, the most specific.
if fileTp == bundleBranch || (fileTp == bundleNot && !isContent) {
for i, b := range m.branches {
if b == dir {
m.branches = append(m.branches[:i], m.branches[i+1:]...)
return dir, b, bundleBranch
}
}
}
// And finally the leaf bundles, which can contain anything.
for i, l := range m.leafs {
if strings.HasPrefix(dir, l) {
m.leafs = append(m.leafs[:i], m.leafs[i+1:]...)
return dir, l, bundleLeaf
}
}
if isContent && fileTp != bundleNot { if isContent && fileTp != bundleNot {
// A new bundle. // A new bundle.
return dir, dir, fileTp return dir, dir, fileTp
} }
// Not part of any bundle
return dir, filename, bundleNot return dir, filename, bundleNot
} }
func (m *contentChangeMap) addSymbolicLinkMapping(from, to string) { func (m *contentChangeMap) addSymbolicLinkMapping(fim hugofs.FileMetaInfo) {
meta := fim.Meta()
if !meta.IsSymlink() {
return
}
m.symContentMu.Lock() m.symContentMu.Lock()
from, to := meta.Filename(), meta.OriginalFilename()
if fim.IsDir() {
if !strings.HasSuffix(from, helpers.FilePathSeparator) {
from += helpers.FilePathSeparator
}
}
mm, found := m.symContent[from] mm, found := m.symContent[from]
if !found { if !found {
mm = make(map[string]bool) mm = make(map[string]bool)
m.symContent[from] = mm m.symContent[from] = mm
@ -1044,5 +1065,6 @@ func (m *contentChangeMap) GetSymbolicLinkMappings(dir string) []string {
} }
sort.Strings(dirs) sort.Strings(dirs)
return dirs return dirs
} }

View file

@ -22,7 +22,7 @@ import (
"github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/output"
"errors" "github.com/pkg/errors"
"github.com/fsnotify/fsnotify" "github.com/fsnotify/fsnotify"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
@ -82,11 +82,11 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
if len(events) > 0 { if len(events) > 0 {
// Rebuild // Rebuild
if err := h.initRebuild(conf); err != nil { if err := h.initRebuild(conf); err != nil {
return err return errors.Wrap(err, "initRebuild")
} }
} else { } else {
if err := h.initSites(conf); err != nil { if err := h.initSites(conf); err != nil {
return err return errors.Wrap(err, "initSites")
} }
} }
@ -97,7 +97,7 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
} }
trace.WithRegion(ctx, "process", f) trace.WithRegion(ctx, "process", f)
if err != nil { if err != nil {
return err return errors.Wrap(err, "process")
} }
f = func() { f = func() {

View file

@ -40,7 +40,6 @@ func (t testSiteBuildErrorAsserter) assertErrorMessage(e1, e2 string) {
} }
func TestSiteBuildErrors(t *testing.T) { func TestSiteBuildErrors(t *testing.T) {
t.Parallel()
const ( const (
yamlcontent = "yamlcontent" yamlcontent = "yamlcontent"
@ -226,7 +225,9 @@ func TestSiteBuildErrors(t *testing.T) {
} }
for _, test := range tests { for _, test := range tests {
test := test
t.Run(test.name, func(t *testing.T) { t.Run(test.name, func(t *testing.T) {
t.Parallel()
assert := require.New(t) assert := require.New(t)
errorAsserter := testSiteBuildErrorAsserter{ errorAsserter := testSiteBuildErrorAsserter{
assert: assert, assert: assert,

View file

@ -2,10 +2,10 @@ package hugolib
import ( import (
"fmt" "fmt"
"os"
"strings" "strings"
"testing" "testing"
"os"
"path/filepath" "path/filepath"
"time" "time"
@ -246,6 +246,8 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
require.Equal(t, "en", enSite.language.Lang) require.Equal(t, "en", enSite.language.Lang)
//dumpPages(enSite.RegularPages()...)
assert.Equal(5, len(enSite.RegularPages())) assert.Equal(5, len(enSite.RegularPages()))
assert.Equal(32, len(enSite.AllPages())) assert.Equal(32, len(enSite.AllPages()))
@ -447,7 +449,7 @@ func TestMultiSitesRebuild(t *testing.T) {
require.NotNil(t, homeEn) require.NotNil(t, homeEn)
assert.Len(homeEn.Translations(), 3) assert.Len(homeEn.Translations(), 3)
contentFs := b.H.BaseFs.Content.Fs contentFs := b.H.Fs.Source
for i, this := range []struct { for i, this := range []struct {
preFunc func(t *testing.T) preFunc func(t *testing.T)
@ -480,9 +482,9 @@ func TestMultiSitesRebuild(t *testing.T) {
}, },
{ {
func(t *testing.T) { func(t *testing.T) {
writeNewContentFile(t, contentFs, "new_en_1", "2016-07-31", "new1.en.md", -5) writeNewContentFile(t, contentFs, "new_en_1", "2016-07-31", "content/new1.en.md", -5)
writeNewContentFile(t, contentFs, "new_en_2", "1989-07-30", "new2.en.md", -10) writeNewContentFile(t, contentFs, "new_en_2", "1989-07-30", "content/new2.en.md", -10)
writeNewContentFile(t, contentFs, "new_fr_1", "2016-07-30", "new1.fr.md", 10) writeNewContentFile(t, contentFs, "new_fr_1", "2016-07-30", "content/new1.fr.md", 10)
}, },
[]fsnotify.Event{ []fsnotify.Event{
{Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Create}, {Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Create},
@ -503,7 +505,7 @@ func TestMultiSitesRebuild(t *testing.T) {
}, },
{ {
func(t *testing.T) { func(t *testing.T) {
p := "sect/doc1.en.md" p := "content/sect/doc1.en.md"
doc1 := readFileFromFs(t, contentFs, p) doc1 := readFileFromFs(t, contentFs, p)
doc1 += "CHANGED" doc1 += "CHANGED"
writeToFs(t, contentFs, p, doc1) writeToFs(t, contentFs, p, doc1)
@ -519,7 +521,7 @@ func TestMultiSitesRebuild(t *testing.T) {
// Rename a file // Rename a file
{ {
func(t *testing.T) { func(t *testing.T) {
if err := contentFs.Rename("new1.en.md", "new1renamed.en.md"); err != nil { if err := contentFs.Rename("content/new1.en.md", "content/new1renamed.en.md"); err != nil {
t.Fatalf("Rename failed: %s", err) t.Fatalf("Rename failed: %s", err)
} }
}, },
@ -672,38 +674,6 @@ title = "Svenska"
} }
func TestChangeDefaultLanguage(t *testing.T) {
t.Parallel()
assert := require.New(t)
b := newMultiSiteTestBuilder(t, "", "", map[string]interface{}{
"DefaultContentLanguage": "fr",
"DefaultContentLanguageInSubdir": false,
})
b.CreateSites().Build(BuildCfg{})
b.AssertFileContent("public/sect/doc1/index.html", "Single", "Bonjour")
b.AssertFileContent("public/en/sect/doc2/index.html", "Single", "Hello")
// Switch language
b.WithNewConfigData(map[string]interface{}{
"DefaultContentLanguage": "en",
"DefaultContentLanguageInSubdir": false,
})
assert.NoError(b.LoadConfig())
err := b.H.Build(BuildCfg{NewConfig: b.Cfg})
if err != nil {
t.Fatalf("Failed to rebuild sites: %s", err)
}
// Default language is now en, so that should now be the "root" language
b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Bonjour")
b.AssertFileContent("public/sect/doc2/index.html", "Single", "Hello")
}
// https://github.com/gohugoio/hugo/issues/4706 // https://github.com/gohugoio/hugo/issues/4706
func TestContentStressTest(t *testing.T) { func TestContentStressTest(t *testing.T) {
b := newTestSitesBuilder(t) b := newTestSitesBuilder(t)
@ -1261,16 +1231,19 @@ var multiSiteJSONConfigTemplate = `
` `
func writeSource(t testing.TB, fs *hugofs.Fs, filename, content string) { func writeSource(t testing.TB, fs *hugofs.Fs, filename, content string) {
t.Helper()
writeToFs(t, fs.Source, filename, content) writeToFs(t, fs.Source, filename, content)
} }
func writeToFs(t testing.TB, fs afero.Fs, filename, content string) { func writeToFs(t testing.TB, fs afero.Fs, filename, content string) {
t.Helper()
if err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte(content), 0755); err != nil { if err := afero.WriteFile(fs, filepath.FromSlash(filename), []byte(content), 0755); err != nil {
t.Fatalf("Failed to write file: %s", err) t.Fatalf("Failed to write file: %s", err)
} }
} }
func readDestination(t testing.TB, fs *hugofs.Fs, filename string) string { func readDestination(t testing.TB, fs *hugofs.Fs, filename string) string {
t.Helper()
return readFileFromFs(t, fs.Destination, filename) return readFileFromFs(t, fs.Destination, filename)
} }
@ -1287,6 +1260,7 @@ func readSource(t *testing.T, fs *hugofs.Fs, filename string) string {
} }
func readFileFromFs(t testing.TB, fs afero.Fs, filename string) string { func readFileFromFs(t testing.TB, fs afero.Fs, filename string) string {
t.Helper()
filename = filepath.Clean(filename) filename = filepath.Clean(filename)
b, err := afero.ReadFile(fs, filename) b, err := afero.ReadFile(fs, filename)
if err != nil { if err != nil {
@ -1309,7 +1283,7 @@ func readFileFromFs(t testing.TB, fs afero.Fs, filename string) string {
} }
helpers.PrintFs(fs, root, os.Stdout) helpers.PrintFs(fs, root, os.Stdout)
Fatalf(t, "Failed to read file: %s", err) t.Fatalf("Failed to read file: %s", err)
} }
return string(b) return string(b)
} }

View file

@ -1,268 +0,0 @@
// Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"os"
"path/filepath"
"testing"
"github.com/gohugoio/hugo/common/loggers"
)
func TestThemesGraph(t *testing.T) {
t.Parallel()
const (
themeStandalone = `
title = "Theme Standalone"
[params]
v1 = "v1s"
v2 = "v2s"
`
themeCyclic = `
title = "Theme Cyclic"
theme = "theme3"
[params]
v1 = "v1c"
v2 = "v2c"
`
theme1 = `
title = "Theme #1"
theme = "themeStandalone"
[params]
v2 = "v21"
`
theme2 = `
title = "Theme #2"
theme = "theme1"
[params]
v1 = "v12"
`
theme3 = `
title = "Theme #3"
theme = ["theme2", "themeStandalone", "themeCyclic"]
[params]
v1 = "v13"
v2 = "v24"
`
theme4 = `
title = "Theme #4"
theme = "theme3"
[params]
v1 = "v14"
v2 = "v24"
`
site1 = `
theme = "theme4"
[params]
v1 = "site"
`
site2 = `
theme = ["theme2", "themeStandalone"]
`
)
var (
testConfigs = []struct {
siteConfig string
// The name of theme somewhere in the middle to write custom key/files.
offset string
check func(b *sitesBuilder)
}{
{site1, "theme3", func(b *sitesBuilder) {
// site1: theme4 theme3 theme2 theme1 themeStandalone themeCyclic
// Check data
// theme3 should win the offset competition
b.AssertFileContent("public/index.html", "theme1o::[offset][v]theme3", "theme4o::[offset][v]theme3", "themeStandaloneo::[offset][v]theme3")
b.AssertFileContent("public/index.html", "nproject::[inner][other]project|[project][other]project|[theme][other]theme4|[theme1][other]theme1")
b.AssertFileContent("public/index.html", "ntheme::[inner][other]theme4|[theme][other]theme4|[theme1][other]theme1|[theme2][other]theme2|[theme3][other]theme3")
b.AssertFileContent("public/index.html", "theme1::[inner][other]project|[project][other]project|[theme][other]theme1|[theme1][other]theme1|")
b.AssertFileContent("public/index.html", "theme4::[inner][other]project|[project][other]project|[theme][other]theme4|[theme4][other]theme4|")
// Check layouts
b.AssertFileContent("public/index.html", "partial ntheme: theme4", "partial theme2o: theme3")
// Check i18n
b.AssertFileContent("public/index.html", "i18n: project theme4")
// Check static files
// TODO(bep) static files not currently part of the build b.AssertFileContent("public/nproject.txt", "TODO")
// Check site params
b.AssertFileContent("public/index.html", "v1::site", "v2::v24")
}},
{site2, "", func(b *sitesBuilder) {
// site2: theme2 theme1 themeStandalone
b.AssertFileContent("public/index.html", "nproject::[inner][other]project|[project][other]project|[theme][other]theme2|[theme1][other]theme1|[theme2][other]theme2|[themeStandalone][other]themeStandalone|")
b.AssertFileContent("public/index.html", "ntheme::[inner][other]theme2|[theme][other]theme2|[theme1][other]theme1|[theme2][other]theme2|[themeStandalone][other]themeStandalone|")
b.AssertFileContent("public/index.html", "i18n: project theme2")
b.AssertFileContent("public/index.html", "partial ntheme: theme2")
// Params only set in themes
b.AssertFileContent("public/index.html", "v1::v12", "v2::v21")
}},
}
themeConfigs = []struct {
name string
config string
}{
{"themeStandalone", themeStandalone},
{"themeCyclic", themeCyclic},
{"theme1", theme1},
{"theme2", theme2},
{"theme3", theme3},
{"theme4", theme4},
}
)
for i, testConfig := range testConfigs {
t.Log(fmt.Sprintf("Test %d", i))
b := newTestSitesBuilder(t).WithLogger(loggers.NewErrorLogger())
b.WithConfigFile("toml", testConfig.siteConfig)
for _, tc := range themeConfigs {
var variationsNameBase = []string{"nproject", "ntheme", tc.name}
themeRoot := filepath.Join("themes", tc.name)
b.WithSourceFile(filepath.Join(themeRoot, "config.toml"), tc.config)
b.WithSourceFile(filepath.Join("layouts", "partials", "m.html"), `{{- range $k, $v := . }}{{ $k }}::{{ template "printv" $v }}
{{ end }}
{{ define "printv" }}
{{- $tp := printf "%T" . -}}
{{- if (strings.HasSuffix $tp "map[string]interface {}") -}}
{{- range $k, $v := . }}[{{ $k }}]{{ template "printv" $v }}{{ end -}}
{{- else -}}
{{- . }}|
{{- end -}}
{{ end }}
`)
for _, nameVariaton := range variationsNameBase {
roots := []string{"", themeRoot}
for _, root := range roots {
name := tc.name
if root == "" {
name = "project"
}
if nameVariaton == "ntheme" && name == "project" {
continue
}
// static
b.WithSourceFile(filepath.Join(root, "static", nameVariaton+".txt"), name)
// layouts
if i == 1 {
b.WithSourceFile(filepath.Join(root, "layouts", "partials", "theme2o.html"), "Not Set")
}
b.WithSourceFile(filepath.Join(root, "layouts", "partials", nameVariaton+".html"), name)
if root != "" && testConfig.offset == tc.name {
for _, tc2 := range themeConfigs {
b.WithSourceFile(filepath.Join(root, "layouts", "partials", tc2.name+"o.html"), name)
}
}
// i18n + data
var dataContent string
if root == "" {
dataContent = fmt.Sprintf(`
[%s]
other = %q
[inner]
other = %q
`, name, name, name)
} else {
dataContent = fmt.Sprintf(`
[%s]
other = %q
[inner]
other = %q
[theme]
other = %q
`, name, name, name, name)
}
b.WithSourceFile(filepath.Join(root, "data", nameVariaton+".toml"), dataContent)
b.WithSourceFile(filepath.Join(root, "i18n", "en.toml"), dataContent)
// If an offset is set, duplicate a data key with a winner in the middle.
if root != "" && testConfig.offset == tc.name {
for _, tc2 := range themeConfigs {
dataContent := fmt.Sprintf(`
[offset]
v = %q
`, tc.name)
b.WithSourceFile(filepath.Join(root, "data", tc2.name+"o.toml"), dataContent)
}
}
}
}
}
for _, themeConfig := range themeConfigs {
b.WithSourceFile(filepath.Join("themes", "config.toml"), themeConfig.config)
}
b.WithContent(filepath.Join("content", "page.md"), `---
title: "Page"
---
`)
homeTpl := `
data: {{ partial "m" .Site.Data }}
i18n: {{ i18n "inner" }} {{ i18n "theme" }}
partial ntheme: {{ partial "ntheme" . }}
partial theme2o: {{ partial "theme2o" . }}
params: {{ partial "m" .Site.Params }}
`
b.WithTemplates(filepath.Join("layouts", "home.html"), homeTpl)
b.Build(BuildCfg{})
var _ = os.Stdout
// printFs(b.H.Deps.BaseFs.LayoutsFs, "", os.Stdout)
testConfig.check(b)
}
}

View file

@ -211,7 +211,11 @@ Content.
_ = os.Stdout _ = os.Stdout
b.Build(BuildCfg{}) err := b.BuildE(BuildCfg{})
//dumpPages(b.H.Sites[1].RegularPages()...)
assert.NoError(err)
assert.Equal(3, len(b.H.Sites)) assert.Equal(3, len(b.H.Sites))
@ -222,7 +226,8 @@ Content.
b.AssertFileContent("/my/project/public/en/mystatic/file1.yaml", "en") b.AssertFileContent("/my/project/public/en/mystatic/file1.yaml", "en")
b.AssertFileContent("/my/project/public/nn/mystatic/file1.yaml", "nn") b.AssertFileContent("/my/project/public/nn/mystatic/file1.yaml", "nn")
//dumpPages(nnSite.RegularPages...) //dumpPages(nnSite.RegularPages()...)
assert.Equal(12, len(nnSite.RegularPages())) assert.Equal(12, len(nnSite.RegularPages()))
assert.Equal(13, len(enSite.RegularPages())) assert.Equal(13, len(enSite.RegularPages()))
@ -281,9 +286,9 @@ Content.
assert.Equal("/en/sect/mybundle/", bundleEn.RelPermalink()) assert.Equal("/en/sect/mybundle/", bundleEn.RelPermalink())
assert.Equal("/sv/sect/mybundle/", bundleSv.RelPermalink()) assert.Equal("/sv/sect/mybundle/", bundleSv.RelPermalink())
assert.Equal(4, len(bundleEn.Resources()))
assert.Equal(4, len(bundleNn.Resources())) assert.Equal(4, len(bundleNn.Resources()))
assert.Equal(4, len(bundleSv.Resources())) assert.Equal(4, len(bundleSv.Resources()))
assert.Equal(4, len(bundleEn.Resources()))
b.AssertFileContent("/my/project/public/en/sect/mybundle/index.html", "image/png: /en/sect/mybundle/logo.png") b.AssertFileContent("/my/project/public/en/sect/mybundle/index.html", "image/png: /en/sect/mybundle/logo.png")
b.AssertFileContent("/my/project/public/nn/sect/mybundle/index.html", "image/png: /nn/sect/mybundle/logo.png") b.AssertFileContent("/my/project/public/nn/sect/mybundle/index.html", "image/png: /nn/sect/mybundle/logo.png")

View file

@ -18,8 +18,6 @@ import (
"fmt" "fmt"
"github.com/spf13/afero"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -45,11 +43,10 @@ title = "Section Menu"
sectionPagesMenu = "sect" sectionPagesMenu = "sect"
` `
th, h := newTestSitesFromConfig( b := newTestSitesBuilder(t).WithConfigFile("toml", siteConfig)
t,
afero.NewMemMapFs(), b.WithTemplates(
siteConfig, "partials/menu.html",
"layouts/partials/menu.html",
`{{- $p := .page -}} `{{- $p := .page -}}
{{- $m := .menu -}} {{- $m := .menu -}}
{{ range (index $p.Site.Menus $m) -}} {{ range (index $p.Site.Menus $m) -}}
@ -58,28 +55,25 @@ sectionPagesMenu = "sect"
{{- if $p.HasMenuCurrent $m . }}HasMenuCurrent{{ else }}-{{ end -}}| {{- if $p.HasMenuCurrent $m . }}HasMenuCurrent{{ else }}-{{ end -}}|
{{- end -}} {{- end -}}
`, `,
"layouts/_default/single.html", "_default/single.html",
`Single|{{ .Title }} `Single|{{ .Title }}
Menu Sect: {{ partial "menu.html" (dict "page" . "menu" "sect") }} Menu Sect: {{ partial "menu.html" (dict "page" . "menu" "sect") }}
Menu Main: {{ partial "menu.html" (dict "page" . "menu" "main") }}`, Menu Main: {{ partial "menu.html" (dict "page" . "menu" "main") }}`,
"layouts/_default/list.html", "List|{{ .Title }}|{{ .Content }}", "_default/list.html", "List|{{ .Title }}|{{ .Content }}",
) )
require.Len(t, h.Sites, 1)
fs := th.Fs b.WithContent(
"sect1/p1.md", fmt.Sprintf(menuPageTemplate, "p1", 1, "main", "atitle1", 40),
"sect1/p2.md", fmt.Sprintf(menuPageTemplate, "p2", 2, "main", "atitle2", 30),
"sect2/p3.md", fmt.Sprintf(menuPageTemplate, "p3", 3, "main", "atitle3", 20),
"sect2/p4.md", fmt.Sprintf(menuPageTemplate, "p4", 4, "main", "atitle4", 10),
"sect3/p5.md", fmt.Sprintf(menuPageTemplate, "p5", 5, "main", "atitle5", 5),
"sect1/_index.md", newTestPage("Section One", "2017-01-01", 100),
"sect5/_index.md", newTestPage("Section Five", "2017-01-01", 10),
)
writeSource(t, fs, "content/sect1/p1.md", fmt.Sprintf(menuPageTemplate, "p1", 1, "main", "atitle1", 40)) b.Build(BuildCfg{})
writeSource(t, fs, "content/sect1/p2.md", fmt.Sprintf(menuPageTemplate, "p2", 2, "main", "atitle2", 30)) h := b.H
writeSource(t, fs, "content/sect2/p3.md", fmt.Sprintf(menuPageTemplate, "p3", 3, "main", "atitle3", 20))
writeSource(t, fs, "content/sect2/p4.md", fmt.Sprintf(menuPageTemplate, "p4", 4, "main", "atitle4", 10))
writeSource(t, fs, "content/sect3/p5.md", fmt.Sprintf(menuPageTemplate, "p5", 5, "main", "atitle5", 5))
writeNewContentFile(t, fs.Source, "Section One", "2017-01-01", "content/sect1/_index.md", 100)
writeNewContentFile(t, fs.Source, "Section Five", "2017-01-01", "content/sect5/_index.md", 10)
err := h.Build(BuildCfg{})
require.NoError(t, err)
s := h.Sites[0] s := h.Sites[0]
@ -90,7 +84,7 @@ Menu Main: {{ partial "menu.html" (dict "page" . "menu" "main") }}`,
// There is only one menu in the page, but it is "member of" 2 // There is only one menu in the page, but it is "member of" 2
require.Len(t, p1, 1) require.Len(t, p1, 1)
th.assertFileContent("public/sect1/p1/index.html", "Single", b.AssertFileContent("public/sect1/p1/index.html", "Single",
"Menu Sect: "+ "Menu Sect: "+
"/sect5/|Section Five|Section Five|10|-|-|"+ "/sect5/|Section Five|Section Five|10|-|-|"+
"/sect1/|Section One|Section One|100|-|HasMenuCurrent|"+ "/sect1/|Section One|Section One|100|-|HasMenuCurrent|"+
@ -104,7 +98,7 @@ Menu Main: {{ partial "menu.html" (dict "page" . "menu" "main") }}`,
"/sect1/p1/|p1|atitle1|40|IsMenuCurrent|-|", "/sect1/p1/|p1|atitle1|40|IsMenuCurrent|-|",
) )
th.assertFileContent("public/sect2/p3/index.html", "Single", b.AssertFileContent("public/sect2/p3/index.html", "Single",
"Menu Sect: "+ "Menu Sect: "+
"/sect5/|Section Five|Section Five|10|-|-|"+ "/sect5/|Section Five|Section Five|10|-|-|"+
"/sect1/|Section One|Section One|100|-|-|"+ "/sect1/|Section One|Section One|100|-|-|"+

View file

@ -16,17 +16,11 @@ package hugolib
import ( import (
"sync" "sync"
"github.com/gohugoio/hugo/common/maps"
"sort"
"errors" "errors"
"fmt"
"github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/spf13/cast"
) )
// Multilingual manages the all languages used in a multilingual site. // Multilingual manages the all languages used in a multilingual site.
@ -88,53 +82,3 @@ func (s *Site) multilingualEnabled() bool {
} }
return s.h.multilingual != nil && s.h.multilingual.enabled() return s.h.multilingual != nil && s.h.multilingual.enabled()
} }
func toSortedLanguages(cfg config.Provider, l map[string]interface{}) (langs.Languages, error) {
languages := make(langs.Languages, len(l))
i := 0
for lang, langConf := range l {
langsMap, err := cast.ToStringMapE(langConf)
if err != nil {
return nil, fmt.Errorf("Language config is not a map: %T", langConf)
}
language := langs.NewLanguage(lang, cfg)
for loki, v := range langsMap {
switch loki {
case "title":
language.Title = cast.ToString(v)
case "languagename":
language.LanguageName = cast.ToString(v)
case "weight":
language.Weight = cast.ToInt(v)
case "contentdir":
language.ContentDir = cast.ToString(v)
case "disabled":
language.Disabled = cast.ToBool(v)
case "params":
m := cast.ToStringMap(v)
// Needed for case insensitive fetching of params values
maps.ToLower(m)
for k, vv := range m {
language.SetParam(k, vv)
}
}
// Put all into the Params map
language.SetParam(loki, v)
// Also set it in the configuration map (for baseURL etc.)
language.Set(loki, v)
}
languages[i] = language
i++
}
sort.Sort(languages)
return languages, nil
}

View file

@ -23,6 +23,8 @@ import (
"sort" "sort"
"strings" "strings"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/bep/gitmap" "github.com/bep/gitmap"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
@ -290,7 +292,9 @@ func (p *pageState) getLayoutDescriptor() output.LayoutDescriptor {
switch p.Kind() { switch p.Kind() {
case page.KindSection: case page.KindSection:
section = sections[0] if len(sections) > 0 {
section = sections[0]
}
case page.KindTaxonomyTerm: case page.KindTaxonomyTerm:
section = p.getTaxonomyNodeInfo().singular section = p.getTaxonomyNodeInfo().singular
case page.KindTaxonomy: case page.KindTaxonomy:
@ -365,6 +369,7 @@ func (p *pageState) renderResources() (err error) {
var toBeDeleted []int var toBeDeleted []int
for i, r := range p.Resources() { for i, r := range p.Resources() {
if _, ok := r.(page.Page); ok { if _, ok := r.(page.Page); ok {
// Pages gets rendered with the owning page but we count them here. // Pages gets rendered with the owning page but we count them here.
p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Pages) p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Pages)
@ -491,14 +496,6 @@ func (p *pageState) addSectionToParent() {
p.parent.subSections = append(p.parent.subSections, p) p.parent.subSections = append(p.parent.subSections, p)
} }
func (p *pageState) contentMarkupType() string {
if p.m.markup != "" {
return p.m.markup
}
return p.File().Ext()
}
func (p *pageState) mapContent(meta *pageMeta) error { func (p *pageState) mapContent(meta *pageMeta) error {
s := p.shortcodeState s := p.shortcodeState
@ -843,6 +840,7 @@ func (ps pageStatePages) findPagePosByFilnamePrefix(prefix string) int {
func (s *Site) sectionsFromFile(fi source.File) []string { func (s *Site) sectionsFromFile(fi source.File) []string {
dirname := fi.Dir() dirname := fi.Dir()
dirname = strings.Trim(dirname, helpers.FilePathSeparator) dirname = strings.Trim(dirname, helpers.FilePathSeparator)
if dirname == "" { if dirname == "" {
return nil return nil
@ -850,7 +848,7 @@ func (s *Site) sectionsFromFile(fi source.File) []string {
parts := strings.Split(dirname, helpers.FilePathSeparator) parts := strings.Split(dirname, helpers.FilePathSeparator)
if fii, ok := fi.(*fileInfo); ok { if fii, ok := fi.(*fileInfo); ok {
if fii.bundleTp == bundleLeaf && len(parts) > 0 { if len(parts) > 0 && fii.FileInfo().Meta().Classifier() == files.ContentClassLeaf {
// my-section/mybundle/index.md => my-section // my-section/mybundle/index.md => my-section
return parts[:len(parts)-1] return parts[:len(parts)-1]
} }

View file

@ -21,6 +21,8 @@ import (
"strings" "strings"
"time" "time"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/related" "github.com/gohugoio/hugo/related"
@ -591,15 +593,14 @@ func (p *pageMeta) applyDefaultValues() error {
} }
if p.IsNode() { if p.IsNode() {
p.bundleType = "branch" p.bundleType = files.ContentClassBranch
} else { } else {
source := p.File() source := p.File()
if fi, ok := source.(*fileInfo); ok { if fi, ok := source.(*fileInfo); ok {
switch fi.bundleTp { class := fi.FileInfo().Meta().Classifier()
case bundleBranch: switch class {
p.bundleType = "branch" case files.ContentClassBranch, files.ContentClassLeaf:
case bundleLeaf: p.bundleType = class
p.bundleType = "leaf"
} }
} }
} }

View file

@ -63,8 +63,9 @@ func TestPermalink(t *testing.T) {
} }
for i, test := range tests { for i, test := range tests {
test := test
t.Run(fmt.Sprintf("%s-%d", test.file, i), func(t *testing.T) { t.Run(fmt.Sprintf("%s-%d", test.file, i), func(t *testing.T) {
t.Parallel()
cfg, fs := newTestCfg() cfg, fs := newTestCfg()
cfg.Set("uglyURLs", test.uglyURLs) cfg.Set("uglyURLs", test.uglyURLs)

View file

@ -18,6 +18,8 @@ import (
"html/template" "html/template"
"os" "os"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/loggers"
"path/filepath" "path/filepath"
@ -29,7 +31,6 @@ import (
"github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/resources/resource"
"github.com/spf13/afero" "github.com/spf13/afero"
"github.com/spf13/viper" "github.com/spf13/viper"
@ -301,6 +302,7 @@ func checkPageTitle(t *testing.T, page page.Page, title string) {
} }
func checkPageContent(t *testing.T, page page.Page, expected string, msg ...interface{}) { func checkPageContent(t *testing.T, page page.Page, expected string, msg ...interface{}) {
t.Helper()
a := normalizeContent(expected) a := normalizeContent(expected)
b := normalizeContent(content(page)) b := normalizeContent(content(page))
if a != b { if a != b {
@ -387,11 +389,13 @@ func testAllMarkdownEnginesForPages(t *testing.T,
continue continue
} }
cfg, fs := newTestCfg() cfg, fs := newTestCfg(func(cfg config.Provider) error {
for k, v := range settings {
cfg.Set(k, v)
}
return nil
for k, v := range settings { })
cfg.Set(k, v)
}
contentDir := "content" contentDir := "content"
@ -413,7 +417,10 @@ func testAllMarkdownEnginesForPages(t *testing.T,
homePath := fmt.Sprintf("_index.%s", e.ext) homePath := fmt.Sprintf("_index.%s", e.ext)
writeSource(t, fs, filepath.Join(contentDir, homePath), homePage) writeSource(t, fs, filepath.Join(contentDir, homePath), homePage)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true}) b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Cfg: cfg}).WithNothingAdded()
b.Build(BuildCfg{SkipRender: true})
s := b.H.Sites[0]
require.Len(t, s.RegularPages(), len(pageSources)) require.Len(t, s.RegularPages(), len(pageSources))
@ -770,6 +777,9 @@ func TestPageWithLastmodFromGitInfo(t *testing.T) {
fs := hugofs.NewFrom(hugofs.Os, cfg) fs := hugofs.NewFrom(hugofs.Os, cfg)
fs.Destination = &afero.MemMapFs{} fs.Destination = &afero.MemMapFs{}
wd, err := os.Getwd()
assrt.NoError(err)
cfg.Set("frontmatter", map[string]interface{}{ cfg.Set("frontmatter", map[string]interface{}{
"lastmod": []string{":git", "lastmod"}, "lastmod": []string{":git", "lastmod"},
}) })
@ -791,20 +801,15 @@ func TestPageWithLastmodFromGitInfo(t *testing.T) {
cfg.Set("languages", langConfig) cfg.Set("languages", langConfig)
cfg.Set("enableGitInfo", true) cfg.Set("enableGitInfo", true)
assrt.NoError(loadDefaultSettingsFor(cfg))
assrt.NoError(loadLanguageSettings(cfg, nil))
wd, err := os.Getwd()
assrt.NoError(err)
cfg.Set("workingDir", filepath.Join(wd, "testsite")) cfg.Set("workingDir", filepath.Join(wd, "testsite"))
h, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg}) b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Cfg: cfg}).WithNothingAdded()
b.Build(BuildCfg{SkipRender: true})
h := b.H
assrt.NoError(err)
assrt.Len(h.Sites, 2) assrt.Len(h.Sites, 2)
require.NoError(t, h.Build(BuildCfg{SkipRender: true}))
enSite := h.Sites[0] enSite := h.Sites[0]
assrt.Len(enSite.RegularPages(), 1) assrt.Len(enSite.RegularPages(), 1)
@ -820,10 +825,10 @@ func TestPageWithLastmodFromGitInfo(t *testing.T) {
} }
func TestPageWithFrontMatterConfig(t *testing.T) { func TestPageWithFrontMatterConfig(t *testing.T) {
t.Parallel()
for _, dateHandler := range []string{":filename", ":fileModTime"} { for _, dateHandler := range []string{":filename", ":fileModTime"} {
dateHandler := dateHandler
t.Run(fmt.Sprintf("dateHandler=%q", dateHandler), func(t *testing.T) { t.Run(fmt.Sprintf("dateHandler=%q", dateHandler), func(t *testing.T) {
t.Parallel()
assrt := require.New(t) assrt := require.New(t)
cfg, fs := newTestCfg() cfg, fs := newTestCfg()
@ -852,8 +857,10 @@ Content
c2fi, err := fs.Source.Stat(c2) c2fi, err := fs.Source.Stat(c2)
assrt.NoError(err) assrt.NoError(err)
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true}) b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Cfg: cfg}).WithNothingAdded()
b.Build(BuildCfg{SkipRender: true})
s := b.H.Sites[0]
assrt.Len(s.RegularPages(), 2) assrt.Len(s.RegularPages(), 2)
noSlug := s.RegularPages()[0] noSlug := s.RegularPages()[0]
@ -1051,10 +1058,8 @@ func TestPageWithEmoji(t *testing.T) {
for _, enableEmoji := range []bool{true, false} { for _, enableEmoji := range []bool{true, false} {
v := viper.New() v := viper.New()
v.Set("enableEmoji", enableEmoji) v.Set("enableEmoji", enableEmoji)
b := newTestSitesBuilder(t)
b.WithViper(v)
b.WithSimpleConfigFile() b := newTestSitesBuilder(t).WithViper(v)
b.WithContent("page-emoji.md", `--- b.WithContent("page-emoji.md", `---
title: "Hugo Smile" title: "Hugo Smile"
@ -1329,11 +1334,12 @@ func TestShouldBuild(t *testing.T) {
// "dot" in path: #1885 and #2110 // "dot" in path: #1885 and #2110
// disablePathToLower regression: #3374 // disablePathToLower regression: #3374
func TestPathIssues(t *testing.T) { func TestPathIssues(t *testing.T) {
t.Parallel()
for _, disablePathToLower := range []bool{false, true} { for _, disablePathToLower := range []bool{false, true} {
for _, uglyURLs := range []bool{false, true} { for _, uglyURLs := range []bool{false, true} {
disablePathToLower := disablePathToLower
uglyURLs := uglyURLs
t.Run(fmt.Sprintf("disablePathToLower=%t,uglyURLs=%t", disablePathToLower, uglyURLs), func(t *testing.T) { t.Run(fmt.Sprintf("disablePathToLower=%t,uglyURLs=%t", disablePathToLower, uglyURLs), func(t *testing.T) {
t.Parallel()
cfg, fs := newTestCfg() cfg, fs := newTestCfg()
th := testHelper{cfg, fs, t} th := testHelper{cfg, fs, t}

View file

@ -1,206 +0,0 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"context"
"fmt"
"math"
"path/filepath"
"github.com/gohugoio/hugo/config"
_errors "github.com/pkg/errors"
"golang.org/x/sync/errgroup"
)
type siteContentProcessor struct {
site *Site
handleContent contentHandler
ctx context.Context
// The input file bundles.
fileBundlesChan chan *bundleDir
// The input file singles.
fileSinglesChan chan *fileInfo
// These assets should be just copied to destination.
fileAssetsChan chan pathLangFile
numWorkers int
// The output Pages
pagesChan chan *pageState
// Used for partial rebuilds (aka. live reload)
// Will signal replacement of pages in the site collection.
partialBuild bool
}
func (s *siteContentProcessor) processBundle(b *bundleDir) {
select {
case s.fileBundlesChan <- b:
case <-s.ctx.Done():
}
}
func (s *siteContentProcessor) processSingle(fi *fileInfo) {
select {
case s.fileSinglesChan <- fi:
case <-s.ctx.Done():
}
}
func (s *siteContentProcessor) processAsset(asset pathLangFile) {
select {
case s.fileAssetsChan <- asset:
case <-s.ctx.Done():
}
}
func newSiteContentProcessor(ctx context.Context, partialBuild bool, s *Site) *siteContentProcessor {
numWorkers := config.GetNumWorkerMultiplier() * 3
numWorkers = int(math.Ceil(float64(numWorkers) / float64(len(s.h.Sites))))
return &siteContentProcessor{
ctx: ctx,
partialBuild: partialBuild,
site: s,
handleContent: newHandlerChain(s),
fileBundlesChan: make(chan *bundleDir, numWorkers),
fileSinglesChan: make(chan *fileInfo, numWorkers),
fileAssetsChan: make(chan pathLangFile, numWorkers),
numWorkers: numWorkers,
pagesChan: make(chan *pageState, numWorkers),
}
}
func (s *siteContentProcessor) closeInput() {
close(s.fileSinglesChan)
close(s.fileBundlesChan)
close(s.fileAssetsChan)
}
func (s *siteContentProcessor) process(ctx context.Context) error {
g1, ctx := errgroup.WithContext(ctx)
g2, ctx := errgroup.WithContext(ctx)
// There can be only one of these per site.
g1.Go(func() error {
for p := range s.pagesChan {
if p.s != s.site {
panic(fmt.Sprintf("invalid page site: %v vs %v", p.s, s))
}
p.forceRender = s.partialBuild
if p.forceRender {
s.site.replacePage(p)
} else {
s.site.addPage(p)
}
}
return nil
})
for i := 0; i < s.numWorkers; i++ {
g2.Go(func() error {
for {
select {
case f, ok := <-s.fileSinglesChan:
if !ok {
return nil
}
err := s.readAndConvertContentFile(f)
if err != nil {
return err
}
case <-ctx.Done():
return ctx.Err()
}
}
})
g2.Go(func() error {
for {
select {
case file, ok := <-s.fileAssetsChan:
if !ok {
return nil
}
f, err := s.site.BaseFs.Content.Fs.Open(file.Filename())
if err != nil {
return _errors.Wrap(err, "failed to open assets file")
}
filename := filepath.Join(s.site.GetTargetLanguageBasePath(), file.Path())
err = s.site.publish(&s.site.PathSpec.ProcessingStats.Files, filename, f)
f.Close()
if err != nil {
return err
}
case <-ctx.Done():
return ctx.Err()
}
}
})
g2.Go(func() error {
for {
select {
case bundle, ok := <-s.fileBundlesChan:
if !ok {
return nil
}
err := s.readAndConvertContentBundle(bundle)
if err != nil {
return err
}
case <-ctx.Done():
return ctx.Err()
}
}
})
}
err := g2.Wait()
close(s.pagesChan)
if err != nil {
return err
}
if err := g1.Wait(); err != nil {
return err
}
return nil
}
func (s *siteContentProcessor) readAndConvertContentFile(file *fileInfo) error {
ctx := &handlerContext{source: file, pages: s.pagesChan}
return s.handleContent(ctx).err
}
func (s *siteContentProcessor) readAndConvertContentBundle(bundle *bundleDir) error {
ctx := &handlerContext{bundle: bundle, pages: s.pagesChan}
return s.handleContent(ctx).err
}

View file

@ -1,773 +0,0 @@
// Copyright 2017-present The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"errors"
"fmt"
"os"
"path"
"path/filepath"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/common/loggers"
_errors "github.com/pkg/errors"
"sort"
"strings"
"sync"
"github.com/spf13/afero"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/helpers"
"golang.org/x/sync/errgroup"
"github.com/gohugoio/hugo/source"
)
var errSkipCyclicDir = errors.New("skip potential cyclic dir")
type capturer struct {
// To prevent symbolic link cycles: Visit same folder only once.
seen map[string]bool
seenMu sync.Mutex
handler captureResultHandler
sourceSpec *source.SourceSpec
fs afero.Fs
logger *loggers.Logger
// Filenames limits the content to process to a list of filenames/directories.
// This is used for partial building in server mode.
filenames []string
// Used to determine how to handle content changes in server mode.
contentChanges *contentChangeMap
// Semaphore used to throttle the concurrent sub directory handling.
sem chan bool
}
func newCapturer(
logger *loggers.Logger,
sourceSpec *source.SourceSpec,
handler captureResultHandler,
contentChanges *contentChangeMap,
filenames ...string) *capturer {
numWorkers := config.GetNumWorkerMultiplier()
// TODO(bep) the "index" vs "_index" check/strings should be moved in one place.
isBundleHeader := func(filename string) bool {
base := filepath.Base(filename)
name := helpers.Filename(base)
return IsContentFile(base) && (name == "index" || name == "_index")
}
// Make sure that any bundle header files are processed before the others. This makes
// sure that any bundle head is processed before its resources.
sort.Slice(filenames, func(i, j int) bool {
a, b := filenames[i], filenames[j]
ac, bc := isBundleHeader(a), isBundleHeader(b)
if ac {
return true
}
if bc {
return false
}
return a < b
})
c := &capturer{
sem: make(chan bool, numWorkers),
handler: handler,
sourceSpec: sourceSpec,
fs: sourceSpec.SourceFs,
logger: logger,
contentChanges: contentChanges,
seen: make(map[string]bool),
filenames: filenames}
return c
}
// Captured files and bundles ready to be processed will be passed on to
// these channels.
type captureResultHandler interface {
handleSingles(fis ...*fileInfo)
handleCopyFile(fi pathLangFile)
captureBundlesHandler
}
type captureBundlesHandler interface {
handleBundles(b *bundleDirs)
}
type captureResultHandlerChain struct {
handlers []captureBundlesHandler
}
func (c *captureResultHandlerChain) handleSingles(fis ...*fileInfo) {
for _, h := range c.handlers {
if hh, ok := h.(captureResultHandler); ok {
hh.handleSingles(fis...)
}
}
}
func (c *captureResultHandlerChain) handleBundles(b *bundleDirs) {
for _, h := range c.handlers {
h.handleBundles(b)
}
}
func (c *captureResultHandlerChain) handleCopyFile(file pathLangFile) {
for _, h := range c.handlers {
if hh, ok := h.(captureResultHandler); ok {
hh.handleCopyFile(file)
}
}
}
func (c *capturer) capturePartial(filenames ...string) error {
handled := make(map[string]bool)
for _, filename := range filenames {
dir, resolvedFilename, tp := c.contentChanges.resolveAndRemove(filename)
if handled[resolvedFilename] {
continue
}
handled[resolvedFilename] = true
switch tp {
case bundleLeaf:
if err := c.handleDir(resolvedFilename); err != nil {
// Directory may have been deleted.
if !os.IsNotExist(err) {
return err
}
}
case bundleBranch:
if err := c.handleBranchDir(resolvedFilename); err != nil {
// Directory may have been deleted.
if !os.IsNotExist(err) {
return err
}
}
default:
fi, err := c.resolveRealPath(resolvedFilename)
if os.IsNotExist(err) {
// File has been deleted.
continue
}
// Just in case the owning dir is a new symlink -- this will
// create the proper mapping for it.
c.resolveRealPath(dir)
f, active := c.newFileInfo(fi, tp)
if active {
c.copyOrHandleSingle(f)
}
}
}
return nil
}
func (c *capturer) capture() error {
if len(c.filenames) > 0 {
return c.capturePartial(c.filenames...)
}
err := c.handleDir(helpers.FilePathSeparator)
if err != nil {
return err
}
return nil
}
func (c *capturer) handleNestedDir(dirname string) error {
select {
case c.sem <- true:
var g errgroup.Group
g.Go(func() error {
defer func() {
<-c.sem
}()
return c.handleDir(dirname)
})
return g.Wait()
default:
// For deeply nested file trees, waiting for a semaphore wil deadlock.
return c.handleDir(dirname)
}
}
// This handles a bundle branch and its resources only. This is used
// in server mode on changes. If this dir does not (anymore) represent a bundle
// branch, the handling is upgraded to the full handleDir method.
func (c *capturer) handleBranchDir(dirname string) error {
files, err := c.readDir(dirname)
if err != nil {
return err
}
var (
dirType bundleDirType
)
for _, fi := range files {
if !fi.IsDir() {
tp, _ := classifyBundledFile(fi.RealName())
if dirType == bundleNot {
dirType = tp
}
if dirType == bundleLeaf {
return c.handleDir(dirname)
}
}
}
if dirType != bundleBranch {
return c.handleDir(dirname)
}
dirs := newBundleDirs(bundleBranch, c)
var secondPass []*fileInfo
// Handle potential bundle headers first.
for _, fi := range files {
if fi.IsDir() {
continue
}
tp, isContent := classifyBundledFile(fi.RealName())
f, active := c.newFileInfo(fi, tp)
if !active {
continue
}
if !f.isOwner() {
if !isContent {
// This is a partial update -- we only care about the files that
// is in this bundle.
secondPass = append(secondPass, f)
}
continue
}
dirs.addBundleHeader(f)
}
for _, f := range secondPass {
dirs.addBundleFiles(f)
}
c.handler.handleBundles(dirs)
return nil
}
func (c *capturer) handleDir(dirname string) error {
files, err := c.readDir(dirname)
if err != nil {
return err
}
type dirState int
const (
dirStateDefault dirState = iota
dirStateAssetsOnly
dirStateSinglesOnly
)
var (
fileBundleTypes = make([]bundleDirType, len(files))
// Start with the assumption that this dir contains only non-content assets (images etc.)
// If that is still true after we had a first look at the list of files, we
// can just copy the files to destination. We will still have to look at the
// sub-folders for potential bundles.
state = dirStateAssetsOnly
// Start with the assumption that this dir is not a bundle.
// A directory is a bundle if it contains a index content file,
// e.g. index.md (a leaf bundle) or a _index.md (a branch bundle).
bundleType = bundleNot
)
/* First check for any content files.
- If there are none, then this is a assets folder only (images etc.)
and we can just plainly copy them to
destination.
- If this is a section with no image etc. or similar, we can just handle it
as it was a single content file.
*/
var hasNonContent, isBranch bool
for i, fi := range files {
if !fi.IsDir() {
tp, isContent := classifyBundledFile(fi.RealName())
fileBundleTypes[i] = tp
if !isBranch {
isBranch = tp == bundleBranch
}
if isContent {
// This is not a assets-only folder.
state = dirStateDefault
} else {
hasNonContent = true
}
}
}
if isBranch && !hasNonContent {
// This is a section or similar with no need for any bundle handling.
state = dirStateSinglesOnly
}
if state > dirStateDefault {
return c.handleNonBundle(dirname, files, state == dirStateSinglesOnly)
}
var fileInfos = make([]*fileInfo, 0, len(files))
for i, fi := range files {
currentType := bundleNot
if !fi.IsDir() {
currentType = fileBundleTypes[i]
if bundleType == bundleNot && currentType != bundleNot {
bundleType = currentType
}
}
if bundleType == bundleNot && currentType != bundleNot {
bundleType = currentType
}
f, active := c.newFileInfo(fi, currentType)
if !active {
continue
}
fileInfos = append(fileInfos, f)
}
var todo []*fileInfo
if bundleType != bundleLeaf {
for _, fi := range fileInfos {
if fi.FileInfo().IsDir() {
// Handle potential nested bundles.
if err := c.handleNestedDir(fi.Path()); err != nil {
return err
}
} else if bundleType == bundleNot || (!fi.isOwner() && fi.isContentFile()) {
// Not in a bundle.
c.copyOrHandleSingle(fi)
} else {
// This is a section folder or similar with non-content files in it.
todo = append(todo, fi)
}
}
} else {
todo = fileInfos
}
if len(todo) == 0 {
return nil
}
dirs, err := c.createBundleDirs(todo, bundleType)
if err != nil {
return err
}
// Send the bundle to the next step in the processor chain.
c.handler.handleBundles(dirs)
return nil
}
func (c *capturer) handleNonBundle(
dirname string,
fileInfos pathLangFileFis,
singlesOnly bool) error {
for _, fi := range fileInfos {
if fi.IsDir() {
if err := c.handleNestedDir(fi.Filename()); err != nil {
return err
}
} else {
if singlesOnly {
f, active := c.newFileInfo(fi, bundleNot)
if !active {
continue
}
c.handler.handleSingles(f)
} else {
c.handler.handleCopyFile(fi)
}
}
}
return nil
}
func (c *capturer) copyOrHandleSingle(fi *fileInfo) {
if fi.isContentFile() {
c.handler.handleSingles(fi)
} else {
// These do not currently need any further processing.
c.handler.handleCopyFile(fi)
}
}
func (c *capturer) createBundleDirs(fileInfos []*fileInfo, bundleType bundleDirType) (*bundleDirs, error) {
dirs := newBundleDirs(bundleType, c)
for _, fi := range fileInfos {
if fi.FileInfo().IsDir() {
var collector func(fis ...*fileInfo)
if bundleType == bundleBranch {
// All files in the current directory are part of this bundle.
// Trying to include sub folders in these bundles are filled with ambiguity.
collector = func(fis ...*fileInfo) {
for _, fi := range fis {
c.copyOrHandleSingle(fi)
}
}
} else {
// All nested files and directories are part of this bundle.
collector = func(fis ...*fileInfo) {
fileInfos = append(fileInfos, fis...)
}
}
err := c.collectFiles(fi.Path(), collector)
if err != nil {
return nil, err
}
} else if fi.isOwner() {
// There can be more than one language, so:
// 1. Content files must be attached to its language's bundle.
// 2. Other files must be attached to all languages.
// 3. Every content file needs a bundle header.
dirs.addBundleHeader(fi)
}
}
for _, fi := range fileInfos {
if fi.FileInfo().IsDir() || fi.isOwner() {
continue
}
if fi.isContentFile() {
if bundleType != bundleBranch {
dirs.addBundleContentFile(fi)
}
} else {
dirs.addBundleFiles(fi)
}
}
return dirs, nil
}
func (c *capturer) collectFiles(dirname string, handleFiles func(fis ...*fileInfo)) error {
filesInDir, err := c.readDir(dirname)
if err != nil {
return err
}
for _, fi := range filesInDir {
if fi.IsDir() {
err := c.collectFiles(fi.Filename(), handleFiles)
if err != nil {
return err
}
} else {
f, active := c.newFileInfo(fi, bundleNot)
if active {
handleFiles(f)
}
}
}
return nil
}
func (c *capturer) readDir(dirname string) (pathLangFileFis, error) {
if c.sourceSpec.IgnoreFile(dirname) {
return nil, nil
}
dir, err := c.fs.Open(dirname)
if err != nil {
return nil, err
}
defer dir.Close()
fis, err := dir.Readdir(-1)
if err != nil {
return nil, err
}
pfis := make(pathLangFileFis, 0, len(fis))
for _, fi := range fis {
fip := fi.(pathLangFileFi)
if !c.sourceSpec.IgnoreFile(fip.Filename()) {
err := c.resolveRealPathIn(fip)
if err != nil {
// It may have been deleted in the meantime.
if err == errSkipCyclicDir || os.IsNotExist(err) {
continue
}
return nil, err
}
pfis = append(pfis, fip)
}
}
return pfis, nil
}
func (c *capturer) newFileInfo(fi pathLangFileFi, tp bundleDirType) (*fileInfo, bool) {
f := newFileInfo(c.sourceSpec, "", "", fi, tp)
return f, !f.disabled
}
type pathLangFile interface {
hugofs.LanguageAnnouncer
hugofs.FilePather
}
type pathLangFileFi interface {
os.FileInfo
pathLangFile
}
type pathLangFileFis []pathLangFileFi
type bundleDirs struct {
tp bundleDirType
// Maps languages to bundles.
bundles map[string]*bundleDir
// Keeps track of language overrides for non-content files, e.g. logo.en.png.
langOverrides map[string]bool
c *capturer
}
func newBundleDirs(tp bundleDirType, c *capturer) *bundleDirs {
return &bundleDirs{tp: tp, bundles: make(map[string]*bundleDir), langOverrides: make(map[string]bool), c: c}
}
type bundleDir struct {
tp bundleDirType
fi *fileInfo
resources map[string]*fileInfo
}
func (b bundleDir) clone() *bundleDir {
b.resources = make(map[string]*fileInfo)
fic := *b.fi
b.fi = &fic
return &b
}
func newBundleDir(fi *fileInfo, bundleType bundleDirType) *bundleDir {
return &bundleDir{fi: fi, tp: bundleType, resources: make(map[string]*fileInfo)}
}
func (b *bundleDirs) addBundleContentFile(fi *fileInfo) {
dir, found := b.bundles[fi.Lang()]
if !found {
// Every bundled content file needs a bundle header.
// If one does not exist in its language, we pick the default
// language version, or a random one if that doesn't exist, either.
tl := b.c.sourceSpec.DefaultContentLanguage
ldir, found := b.bundles[tl]
if !found {
// Just pick one.
for _, v := range b.bundles {
ldir = v
break
}
}
if ldir == nil {
panic(fmt.Sprintf("bundle not found for file %q", fi.Filename()))
}
dir = ldir.clone()
dir.fi.overriddenLang = fi.Lang()
b.bundles[fi.Lang()] = dir
}
dir.resources[fi.Path()] = fi
}
func (b *bundleDirs) addBundleFiles(fi *fileInfo) {
dir := filepath.ToSlash(fi.Dir())
p := dir + fi.TranslationBaseName() + "." + fi.Ext()
for lang, bdir := range b.bundles {
key := path.Join(lang, p)
// Given mypage.de.md (German translation) and mypage.md we pick the most
// specific for that language.
if fi.Lang() == lang || !b.langOverrides[key] {
bdir.resources[key] = fi
}
b.langOverrides[key] = true
}
}
func (b *bundleDirs) addBundleHeader(fi *fileInfo) {
b.bundles[fi.Lang()] = newBundleDir(fi, b.tp)
}
func (c *capturer) isSeen(dirname string) bool {
c.seenMu.Lock()
defer c.seenMu.Unlock()
seen := c.seen[dirname]
c.seen[dirname] = true
if seen {
c.logger.INFO.Printf("Content dir %q already processed; skipped to avoid infinite recursion.", dirname)
return true
}
return false
}
func (c *capturer) resolveRealPath(path string) (pathLangFileFi, error) {
fileInfo, err := c.lstatIfPossible(path)
if err != nil {
return nil, err
}
return fileInfo, c.resolveRealPathIn(fileInfo)
}
func (c *capturer) resolveRealPathIn(fileInfo pathLangFileFi) error {
basePath := fileInfo.BaseDir()
path := fileInfo.Filename()
realPath := path
if fileInfo.Mode()&os.ModeSymlink == os.ModeSymlink {
link, err := filepath.EvalSymlinks(path)
if err != nil {
return _errors.Wrapf(err, "Cannot read symbolic link %q, error was:", path)
}
// This is a file on the outside of any base fs, so we have to use the os package.
sfi, err := os.Stat(link)
if err != nil {
return _errors.Wrapf(err, "Cannot stat %q, error was:", link)
}
// TODO(bep) improve all of this.
if a, ok := fileInfo.(*hugofs.LanguageFileInfo); ok {
a.FileInfo = sfi
}
realPath = link
if realPath != path && sfi.IsDir() && c.isSeen(realPath) {
// Avoid cyclic symlinks.
// Note that this may prevent some uses that isn't cyclic and also
// potential useful, but this implementation is both robust and simple:
// We stop at the first directory that we have seen before, e.g.
// /content/blog will only be processed once.
return errSkipCyclicDir
}
if c.contentChanges != nil {
// Keep track of symbolic links in watch mode.
var from, to string
if sfi.IsDir() {
from = realPath
to = path
if !strings.HasSuffix(to, helpers.FilePathSeparator) {
to = to + helpers.FilePathSeparator
}
if !strings.HasSuffix(from, helpers.FilePathSeparator) {
from = from + helpers.FilePathSeparator
}
if !strings.HasSuffix(basePath, helpers.FilePathSeparator) {
basePath = basePath + helpers.FilePathSeparator
}
if strings.HasPrefix(from, basePath) {
// With symbolic links inside /content we need to keep
// a reference to both. This may be confusing with --navigateToChanged
// but the user has chosen this him or herself.
c.contentChanges.addSymbolicLinkMapping(from, from)
}
} else {
from = realPath
to = path
}
c.contentChanges.addSymbolicLinkMapping(from, to)
}
}
return nil
}
func (c *capturer) lstatIfPossible(path string) (pathLangFileFi, error) {
fi, err := helpers.LstatIfPossible(c.fs, path)
if err != nil {
return nil, err
}
return fi.(pathLangFileFi), nil
}

View file

@ -1,272 +0,0 @@
// Copyright 2017-present The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
"os"
"path"
"path/filepath"
"sort"
"github.com/gohugoio/hugo/common/loggers"
"runtime"
"strings"
"sync"
"testing"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/source"
"github.com/stretchr/testify/require"
)
type storeFilenames struct {
sync.Mutex
filenames []string
copyNames []string
dirKeys []string
}
func (s *storeFilenames) handleSingles(fis ...*fileInfo) {
s.Lock()
defer s.Unlock()
for _, fi := range fis {
s.filenames = append(s.filenames, filepath.ToSlash(fi.Filename()))
}
}
func (s *storeFilenames) handleBundles(d *bundleDirs) {
s.Lock()
defer s.Unlock()
var keys []string
for _, b := range d.bundles {
res := make([]string, len(b.resources))
i := 0
for _, r := range b.resources {
res[i] = path.Join(r.Lang(), filepath.ToSlash(r.Filename()))
i++
}
sort.Strings(res)
keys = append(keys, path.Join("__bundle", b.fi.Lang(), filepath.ToSlash(b.fi.Filename()), "resources", strings.Join(res, "|")))
}
s.dirKeys = append(s.dirKeys, keys...)
}
func (s *storeFilenames) handleCopyFile(file pathLangFile) {
s.Lock()
defer s.Unlock()
s.copyNames = append(s.copyNames, filepath.ToSlash(file.Filename()))
}
func (s *storeFilenames) sortedStr() string {
s.Lock()
defer s.Unlock()
sort.Strings(s.filenames)
sort.Strings(s.dirKeys)
sort.Strings(s.copyNames)
return "\nF:\n" + strings.Join(s.filenames, "\n") + "\nD:\n" + strings.Join(s.dirKeys, "\n") +
"\nC:\n" + strings.Join(s.copyNames, "\n") + "\n"
}
func TestPageBundlerCaptureSymlinks(t *testing.T) {
if runtime.GOOS == "windows" && os.Getenv("CI") == "" {
t.Skip("Skip TestPageBundlerCaptureSymlinks as os.Symlink needs administrator rights on Windows")
}
assert := require.New(t)
ps, clean, workDir := newTestBundleSymbolicSources(t)
sourceSpec := source.NewSourceSpec(ps, ps.BaseFs.Content.Fs)
defer clean()
fileStore := &storeFilenames{}
logger := loggers.NewErrorLogger()
c := newCapturer(logger, sourceSpec, fileStore, nil)
assert.NoError(c.capture())
expected := `
F:
/base/a/page_s.md
/base/a/regular.md
/base/symbolic1/s1.md
/base/symbolic1/s2.md
/base/symbolic3/circus/a/page_s.md
/base/symbolic3/circus/a/regular.md
D:
__bundle/en/base/symbolic2/a1/index.md/resources/en/base/symbolic2/a1/logo.png|en/base/symbolic2/a1/page.md
C:
/base/symbolic3/s1.png
/base/symbolic3/s2.png
`
got := strings.Replace(fileStore.sortedStr(), filepath.ToSlash(workDir), "", -1)
got = strings.Replace(got, "//", "/", -1)
if expected != got {
diff := helpers.DiffStringSlices(strings.Fields(expected), strings.Fields(got))
t.Log(got)
t.Fatalf("Failed:\n%s", diff)
}
}
func TestPageBundlerCaptureBasic(t *testing.T) {
t.Parallel()
assert := require.New(t)
fs, cfg := newTestBundleSources(t)
assert.NoError(loadDefaultSettingsFor(cfg))
assert.NoError(loadLanguageSettings(cfg, nil))
ps, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
sourceSpec := source.NewSourceSpec(ps, ps.BaseFs.Content.Fs)
fileStore := &storeFilenames{}
c := newCapturer(loggers.NewErrorLogger(), sourceSpec, fileStore, nil)
assert.NoError(c.capture())
expected := `
F:
/work/base/_1.md
/work/base/a/1.md
/work/base/a/2.md
/work/base/assets/pages/mypage.md
D:
__bundle/en/work/base/_index.md/resources/en/work/base/_1.png
__bundle/en/work/base/a/b/index.md/resources/en/work/base/a/b/ab1.md
__bundle/en/work/base/b/my-bundle/index.md/resources/en/work/base/b/my-bundle/1.md|en/work/base/b/my-bundle/2.md|en/work/base/b/my-bundle/c/logo.png|en/work/base/b/my-bundle/custom-mime.bep|en/work/base/b/my-bundle/sunset1.jpg|en/work/base/b/my-bundle/sunset2.jpg
__bundle/en/work/base/c/bundle/index.md/resources/en/work/base/c/bundle/logo-은행.png
__bundle/en/work/base/root/index.md/resources/en/work/base/root/1.md|en/work/base/root/c/logo.png
C:
/work/base/assets/pic1.png
/work/base/assets/pic2.png
/work/base/images/hugo-logo.png
`
got := fileStore.sortedStr()
if expected != got {
diff := helpers.DiffStringSlices(strings.Fields(expected), strings.Fields(got))
t.Log(got)
t.Fatalf("Failed:\n%s", diff)
}
}
func TestPageBundlerCaptureMultilingual(t *testing.T) {
t.Parallel()
assert := require.New(t)
fs, cfg := newTestBundleSourcesMultilingual(t)
assert.NoError(loadDefaultSettingsFor(cfg))
assert.NoError(loadLanguageSettings(cfg, nil))
ps, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
sourceSpec := source.NewSourceSpec(ps, ps.BaseFs.Content.Fs)
fileStore := &storeFilenames{}
c := newCapturer(loggers.NewErrorLogger(), sourceSpec, fileStore, nil)
assert.NoError(c.capture())
expected := `
F:
/work/base/1s/mypage.md
/work/base/1s/mypage.nn.md
/work/base/bb/_1.md
/work/base/bb/_1.nn.md
/work/base/bb/en.md
/work/base/bc/page.md
/work/base/bc/page.nn.md
/work/base/be/_index.md
/work/base/be/page.md
/work/base/be/page.nn.md
D:
__bundle/en/work/base/bb/_index.md/resources/en/work/base/bb/a.png|en/work/base/bb/b.png|nn/work/base/bb/c.nn.png
__bundle/en/work/base/bc/_index.md/resources/en/work/base/bc/logo-bc.png
__bundle/en/work/base/bd/index.md/resources/en/work/base/bd/page.md
__bundle/en/work/base/bf/my-bf-bundle/index.md/resources/en/work/base/bf/my-bf-bundle/page.md
__bundle/en/work/base/lb/index.md/resources/en/work/base/lb/1.md|en/work/base/lb/2.md|en/work/base/lb/c/d/deep.png|en/work/base/lb/c/logo.png|en/work/base/lb/c/one.png|en/work/base/lb/c/page.md
__bundle/nn/work/base/bb/_index.nn.md/resources/en/work/base/bb/a.png|nn/work/base/bb/b.nn.png|nn/work/base/bb/c.nn.png
__bundle/nn/work/base/bd/index.md/resources/nn/work/base/bd/page.nn.md
__bundle/nn/work/base/bf/my-bf-bundle/index.nn.md/resources
__bundle/nn/work/base/lb/index.nn.md/resources/en/work/base/lb/c/d/deep.png|en/work/base/lb/c/one.png|nn/work/base/lb/2.nn.md|nn/work/base/lb/c/logo.nn.png
C:
/work/base/1s/mylogo.png
/work/base/bb/b/d.nn.png
`
got := fileStore.sortedStr()
if expected != got {
diff := helpers.DiffStringSlices(strings.Fields(expected), strings.Fields(got))
t.Log(got)
t.Fatalf("Failed:\n%s", strings.Join(diff, "\n"))
}
}
type noOpFileStore int
func (noOpFileStore) handleSingles(fis ...*fileInfo) {}
func (noOpFileStore) handleBundles(b *bundleDirs) {}
func (noOpFileStore) handleCopyFile(file pathLangFile) {}
func BenchmarkPageBundlerCapture(b *testing.B) {
capturers := make([]*capturer, b.N)
for i := 0; i < b.N; i++ {
cfg, fs := newTestCfg()
ps, _ := helpers.NewPathSpec(fs, cfg)
sourceSpec := source.NewSourceSpec(ps, fs.Source)
base := fmt.Sprintf("base%d", i)
for j := 1; j <= 5; j++ {
js := fmt.Sprintf("j%d", j)
writeSource(b, fs, filepath.Join(base, js, "index.md"), "content")
writeSource(b, fs, filepath.Join(base, js, "logo1.png"), "content")
writeSource(b, fs, filepath.Join(base, js, "sub", "logo2.png"), "content")
writeSource(b, fs, filepath.Join(base, js, "section", "_index.md"), "content")
writeSource(b, fs, filepath.Join(base, js, "section", "logo.png"), "content")
writeSource(b, fs, filepath.Join(base, js, "section", "sub", "logo.png"), "content")
for k := 1; k <= 5; k++ {
ks := fmt.Sprintf("k%d", k)
writeSource(b, fs, filepath.Join(base, js, ks, "logo1.png"), "content")
writeSource(b, fs, filepath.Join(base, js, "section", ks, "logo.png"), "content")
}
}
for i := 1; i <= 5; i++ {
writeSource(b, fs, filepath.Join(base, "assetsonly", fmt.Sprintf("image%d.png", i)), "image")
}
for i := 1; i <= 5; i++ {
writeSource(b, fs, filepath.Join(base, "contentonly", fmt.Sprintf("c%d.md", i)), "content")
}
capturers[i] = newCapturer(loggers.NewErrorLogger(), sourceSpec, new(noOpFileStore), nil, base)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
err := capturers[i].capture()
if err != nil {
b.Fatal(err)
}
}
}

View file

@ -1,305 +0,0 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"errors"
"fmt"
"path/filepath"
"github.com/gohugoio/hugo/common/hugio"
"strings"
"github.com/gohugoio/hugo/resources"
"github.com/gohugoio/hugo/resources/resource"
)
var (
// This should be the only list of valid extensions for content files.
contentFileExtensions = []string{
"html", "htm",
"mdown", "markdown", "md",
"asciidoc", "adoc", "ad",
"rest", "rst",
"mmark",
"org",
"pandoc", "pdc"}
contentFileExtensionsSet map[string]bool
)
func init() {
contentFileExtensionsSet = make(map[string]bool)
for _, ext := range contentFileExtensions {
contentFileExtensionsSet[ext] = true
}
}
func newHandlerChain(s *Site) contentHandler {
c := &contentHandlers{s: s}
contentFlow := c.parsePage(
c.handlePageContent(),
)
c.rootHandler = c.processFirstMatch(
contentFlow,
// Creates a file resource (image, CSS etc.) if there is a parent
// page set on the current context.
c.createResource(),
// Everything that isn't handled above, will just be copied
// to destination.
c.copyFile(),
)
return c.rootHandler
}
type contentHandlers struct {
s *Site
rootHandler contentHandler
}
func (c *contentHandlers) processFirstMatch(handlers ...contentHandler) func(ctx *handlerContext) handlerResult {
return func(ctx *handlerContext) handlerResult {
for _, h := range handlers {
res := h(ctx)
if res.handled || res.err != nil {
return res
}
}
return handlerResult{err: errors.New("no matching handler found")}
}
}
type handlerContext struct {
// These are the pages stored in Site.
pages chan<- *pageState
doNotAddToSiteCollections bool
currentPage *pageState
parentPage *pageState
bundle *bundleDir
source *fileInfo
// Relative path to the target.
target string
}
func (c *handlerContext) ext() string {
if c.currentPage != nil {
return c.currentPage.contentMarkupType()
}
if c.bundle != nil {
return c.bundle.fi.Ext()
} else {
return c.source.Ext()
}
}
func (c *handlerContext) targetPath() string {
if c.target != "" {
return c.target
}
return c.source.Filename()
}
func (c *handlerContext) file() *fileInfo {
if c.bundle != nil {
return c.bundle.fi
}
return c.source
}
// Create a copy with the current context as its parent.
func (c handlerContext) childCtx(fi *fileInfo) *handlerContext {
if c.currentPage == nil {
panic("Need a Page to create a child context")
}
c.target = strings.TrimPrefix(fi.Path(), c.bundle.fi.Dir())
c.source = fi
c.doNotAddToSiteCollections = c.bundle != nil && c.bundle.tp != bundleBranch
c.bundle = nil
c.parentPage = c.currentPage
c.currentPage = nil
return &c
}
func (c *handlerContext) supports(exts ...string) bool {
ext := c.ext()
for _, s := range exts {
if s == ext {
return true
}
}
return false
}
func (c *handlerContext) isContentFile() bool {
return contentFileExtensionsSet[c.ext()]
}
type (
handlerResult struct {
err error
handled bool
result interface{}
}
contentHandler func(ctx *handlerContext) handlerResult
)
var (
notHandled handlerResult
)
func (c *contentHandlers) parsePage(h contentHandler) contentHandler {
return func(ctx *handlerContext) handlerResult {
if !ctx.isContentFile() {
return notHandled
}
result := handlerResult{handled: true}
fi := ctx.file()
content := func() (hugio.ReadSeekCloser, error) {
f, err := fi.Open()
if err != nil {
return nil, fmt.Errorf("failed to open content file %q: %s", fi.Filename(), err)
}
return f, nil
}
ps, err := newPageWithContent(fi, c.s, ctx.parentPage != nil, content)
if err != nil {
return handlerResult{err: err}
}
if !c.s.shouldBuild(ps) {
if !ctx.doNotAddToSiteCollections {
ctx.pages <- ps
}
return result
}
ctx.currentPage = ps
if ctx.bundle != nil {
// Add the bundled files
for _, fi := range ctx.bundle.resources {
childCtx := ctx.childCtx(fi)
res := c.rootHandler(childCtx)
if res.err != nil {
return res
}
if res.result != nil {
switch resv := res.result.(type) {
case *pageState:
resv.m.resourcePath = filepath.ToSlash(childCtx.target)
resv.parent = ps
ps.addResources(resv)
case resource.Resource:
ps.addResources(resv)
default:
panic("Unknown type")
}
}
}
}
return h(ctx)
}
}
func (c *contentHandlers) handlePageContent() contentHandler {
return func(ctx *handlerContext) handlerResult {
p := ctx.currentPage
if !ctx.doNotAddToSiteCollections {
ctx.pages <- p
}
return handlerResult{handled: true, result: p}
}
}
func (c *contentHandlers) createResource() contentHandler {
return func(ctx *handlerContext) handlerResult {
if ctx.parentPage == nil {
return notHandled
}
// TODO(bep) consolidate with multihost logic + clean up
outputFormats := ctx.parentPage.m.outputFormats()
seen := make(map[string]bool)
var targetBasePaths []string
// Make sure bundled resources are published to all of the ouptput formats'
// sub paths.
for _, f := range outputFormats {
p := f.Path
if seen[p] {
continue
}
seen[p] = true
targetBasePaths = append(targetBasePaths, p)
}
resource, err := c.s.ResourceSpec.New(
resources.ResourceSourceDescriptor{
TargetPaths: ctx.parentPage.getTargetPaths,
SourceFile: ctx.source,
RelTargetFilename: ctx.target,
TargetBasePaths: targetBasePaths,
})
return handlerResult{err: err, handled: true, result: resource}
}
}
func (c *contentHandlers) copyFile() contentHandler {
return func(ctx *handlerContext) handlerResult {
f, err := c.s.BaseFs.Content.Fs.Open(ctx.source.Filename())
if err != nil {
err := fmt.Errorf("failed to open file in copyFile: %s", err)
return handlerResult{err: err}
}
target := ctx.targetPath()
defer f.Close()
if err := c.s.publish(&c.s.PathSpec.ProcessingStats.Files, target, f); err != nil {
return handlerResult{err: err}
}
return handlerResult{handled: true}
}
}

View file

@ -16,18 +16,18 @@ package hugolib
import ( import (
"os" "os"
"path" "path"
"runtime" "regexp"
"strings" "strings"
"testing" "testing"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/helpers"
"io" "io"
"github.com/spf13/afero" "github.com/gohugoio/hugo/htesting"
"github.com/gohugoio/hugo/media" "github.com/gohugoio/hugo/media"
@ -36,7 +36,6 @@ import (
"fmt" "fmt"
"github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/hugofs"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -54,8 +53,11 @@ func TestPageBundlerSiteRegular(t *testing.T) {
if baseURLPathId == "" { if baseURLPathId == "" {
baseURLPathId = "NONE" baseURLPathId = "NONE"
} }
ugly := ugly
canonify := canonify
t.Run(fmt.Sprintf("ugly=%t,canonify=%t,path=%s", ugly, canonify, baseURLPathId), t.Run(fmt.Sprintf("ugly=%t,canonify=%t,path=%s", ugly, canonify, baseURLPathId),
func(t *testing.T) { func(t *testing.T) {
t.Parallel()
baseURL := baseBaseURL + baseURLPath baseURL := baseBaseURL + baseURLPath
relURLBase := baseURLPath relURLBase := baseURLPath
if canonify { if canonify {
@ -65,14 +67,12 @@ func TestPageBundlerSiteRegular(t *testing.T) {
fs, cfg := newTestBundleSources(t) fs, cfg := newTestBundleSources(t)
cfg.Set("baseURL", baseURL) cfg.Set("baseURL", baseURL)
cfg.Set("canonifyURLs", canonify) cfg.Set("canonifyURLs", canonify)
assert.NoError(loadDefaultSettingsFor(cfg))
assert.NoError(loadLanguageSettings(cfg, nil))
cfg.Set("permalinks", map[string]string{ cfg.Set("permalinks", map[string]string{
"a": ":sections/:filename", "a": ":sections/:filename",
"b": ":year/:slug/", "b": ":year/:slug/",
"c": ":sections/:slug", "c": ":sections/:slug",
"": ":filename/", "/": ":filename/",
}) })
cfg.Set("outputFormats", map[string]interface{}{ cfg.Set("outputFormats", map[string]interface{}{
@ -92,9 +92,11 @@ func TestPageBundlerSiteRegular(t *testing.T) {
cfg.Set("uglyURLs", ugly) cfg.Set("uglyURLs", ugly)
s := buildSingleSite(t, deps.DepsCfg{Logger: loggers.NewErrorLogger(), Fs: fs, Cfg: cfg}, BuildCfg{}) b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Logger: loggers.NewErrorLogger(), Fs: fs, Cfg: cfg}).WithNothingAdded()
th := testHelper{s.Cfg, s.Fs, t} b.Build(BuildCfg{})
s := b.H.Sites[0]
assert.Len(s.RegularPages(), 8) assert.Len(s.RegularPages(), 8)
@ -127,22 +129,22 @@ func TestPageBundlerSiteRegular(t *testing.T) {
// Check both output formats // Check both output formats
rel, filename := relFilename("/a/1/", "index.html") rel, filename := relFilename("/a/1/", "index.html")
th.assertFileContent(filepath.Join("/work/public", filename), b.AssertFileContent(filepath.Join("/work/public", filename),
"TheContent", "TheContent",
"Single RelPermalink: "+rel, "Single RelPermalink: "+rel,
) )
rel, filename = relFilename("/cpath/a/1/", "cindex.html") rel, filename = relFilename("/cpath/a/1/", "cindex.html")
th.assertFileContent(filepath.Join("/work/public", filename), b.AssertFileContent(filepath.Join("/work/public", filename),
"TheContent", "TheContent",
"Single RelPermalink: "+rel, "Single RelPermalink: "+rel,
) )
th.assertFileContent(filepath.FromSlash("/work/public/images/hugo-logo.png"), "content") b.AssertFileContent(filepath.FromSlash("/work/public/images/hugo-logo.png"), "content")
// This should be just copied to destination. // This should be just copied to destination.
th.assertFileContent(filepath.FromSlash("/work/public/assets/pic1.png"), "content") b.AssertFileContent(filepath.FromSlash("/work/public/assets/pic1.png"), "content")
leafBundle1 := s.getPage(page.KindPage, "b/my-bundle/index.md") leafBundle1 := s.getPage(page.KindPage, "b/my-bundle/index.md")
assert.NotNil(leafBundle1) assert.NotNil(leafBundle1)
@ -159,8 +161,8 @@ func TestPageBundlerSiteRegular(t *testing.T) {
assert.NotNil(rootBundle) assert.NotNil(rootBundle)
assert.True(rootBundle.Parent().IsHome()) assert.True(rootBundle.Parent().IsHome())
if !ugly { if !ugly {
th.assertFileContent(filepath.FromSlash("/work/public/root/index.html"), "Single RelPermalink: "+relURLBase+"/root/") b.AssertFileContent(filepath.FromSlash("/work/public/root/index.html"), "Single RelPermalink: "+relURLBase+"/root/")
th.assertFileContent(filepath.FromSlash("/work/public/cpath/root/cindex.html"), "Single RelPermalink: "+relURLBase+"/cpath/root/") b.AssertFileContent(filepath.FromSlash("/work/public/cpath/root/cindex.html"), "Single RelPermalink: "+relURLBase+"/cpath/root/")
} }
leafBundle2 := s.getPage(page.KindPage, "a/b/index.md") leafBundle2 := s.getPage(page.KindPage, "a/b/index.md")
@ -172,6 +174,7 @@ func TestPageBundlerSiteRegular(t *testing.T) {
assert.Len(pageResources, 2) assert.Len(pageResources, 2)
firstPage := pageResources[0].(page.Page) firstPage := pageResources[0].(page.Page)
secondPage := pageResources[1].(page.Page) secondPage := pageResources[1].(page.Page)
assert.Equal(filepath.FromSlash("/work/base/b/my-bundle/1.md"), firstPage.File().Filename(), secondPage.File().Filename()) assert.Equal(filepath.FromSlash("/work/base/b/my-bundle/1.md"), firstPage.File().Filename(), secondPage.File().Filename())
assert.Contains(content(firstPage), "TheContent") assert.Contains(content(firstPage), "TheContent")
assert.Equal(6, len(leafBundle1.Resources())) assert.Equal(6, len(leafBundle1.Resources()))
@ -201,17 +204,17 @@ func TestPageBundlerSiteRegular(t *testing.T) {
} }
if ugly { if ugly {
th.assertFileContent("/work/public/2017/pageslug.html", b.AssertFileContent("/work/public/2017/pageslug.html",
relPermalinker("Single RelPermalink: %s/2017/pageslug.html"), relPermalinker("Single RelPermalink: %s/2017/pageslug.html"),
permalinker("Single Permalink: %s/2017/pageslug.html"), permalinker("Single Permalink: %s/2017/pageslug.html"),
relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"), relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"),
permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg")) permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg"))
} else { } else {
th.assertFileContent("/work/public/2017/pageslug/index.html", b.AssertFileContent("/work/public/2017/pageslug/index.html",
relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"), relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"),
permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg")) permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg"))
th.assertFileContent("/work/public/cpath/2017/pageslug/cindex.html", b.AssertFileContent("/work/public/cpath/2017/pageslug/cindex.html",
relPermalinker("Single RelPermalink: %s/cpath/2017/pageslug/"), relPermalinker("Single RelPermalink: %s/cpath/2017/pageslug/"),
relPermalinker("Short Sunset RelPermalink: %s/cpath/2017/pageslug/sunset2.jpg"), relPermalinker("Short Sunset RelPermalink: %s/cpath/2017/pageslug/sunset2.jpg"),
relPermalinker("Sunset RelPermalink: %s/cpath/2017/pageslug/sunset1.jpg"), relPermalinker("Sunset RelPermalink: %s/cpath/2017/pageslug/sunset1.jpg"),
@ -219,15 +222,15 @@ func TestPageBundlerSiteRegular(t *testing.T) {
) )
} }
th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/c/logo.png"), "content") b.AssertFileContent(filepath.FromSlash("/work/public/2017/pageslug/c/logo.png"), "content")
th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/c/logo.png"), "content") b.AssertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/c/logo.png"), "content")
th.assertFileNotExist("/work/public/cpath/cpath/2017/pageslug/c/logo.png") assert.False(b.CheckExists("/work/public/cpath/cpath/2017/pageslug/c/logo.png"))
// Custom media type defined in site config. // Custom media type defined in site config.
assert.Len(leafBundle1.Resources().ByType("bepsays"), 1) assert.Len(leafBundle1.Resources().ByType("bepsays"), 1)
if ugly { if ugly {
th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug.html"), b.AssertFileContent(filepath.FromSlash("/work/public/2017/pageslug.html"),
"TheContent", "TheContent",
relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"), relPermalinker("Sunset RelPermalink: %s/2017/pageslug/sunset1.jpg"),
permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg"), permalinker("Sunset Permalink: %s/2017/pageslug/sunset1.jpg"),
@ -245,19 +248,19 @@ func TestPageBundlerSiteRegular(t *testing.T) {
) )
// https://github.com/gohugoio/hugo/issues/5882 // https://github.com/gohugoio/hugo/issues/5882
th.assertFileContent( b.AssertFileContent(
filepath.FromSlash("/work/public/2017/pageslug.html"), "0: Page RelPermalink: |") filepath.FromSlash("/work/public/2017/pageslug.html"), "0: Page RelPermalink: |")
th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug.html"), "TheContent") b.AssertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug.html"), "TheContent")
// 은행 // 은행
th.assertFileContent(filepath.FromSlash("/work/public/c/은행/logo-은행.png"), "은행 PNG") b.AssertFileContent(filepath.FromSlash("/work/public/c/은행/logo-은행.png"), "은행 PNG")
} else { } else {
th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/index.html"), "TheContent") b.AssertFileContent(filepath.FromSlash("/work/public/2017/pageslug/index.html"), "TheContent")
th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/cindex.html"), "TheContent") b.AssertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/cindex.html"), "TheContent")
th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/index.html"), "Single Title") b.AssertFileContent(filepath.FromSlash("/work/public/2017/pageslug/index.html"), "Single Title")
th.assertFileContent(filepath.FromSlash("/work/public/root/index.html"), "Single Title") b.AssertFileContent(filepath.FromSlash("/work/public/root/index.html"), "Single Title")
} }
@ -272,25 +275,26 @@ func TestPageBundlerSiteMultilingual(t *testing.T) {
t.Parallel() t.Parallel()
for _, ugly := range []bool{false, true} { for _, ugly := range []bool{false, true} {
ugly := ugly
t.Run(fmt.Sprintf("ugly=%t", ugly), t.Run(fmt.Sprintf("ugly=%t", ugly),
func(t *testing.T) { func(t *testing.T) {
t.Parallel()
assert := require.New(t) assert := require.New(t)
fs, cfg := newTestBundleSourcesMultilingual(t) fs, cfg := newTestBundleSourcesMultilingual(t)
cfg.Set("uglyURLs", ugly) cfg.Set("uglyURLs", ugly)
assert.NoError(loadDefaultSettingsFor(cfg)) b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Cfg: cfg}).WithNothingAdded()
assert.NoError(loadLanguageSettings(cfg, nil)) b.Build(BuildCfg{SkipRender: true})
sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
assert.NoError(err)
assert.Equal(2, len(sites.Sites))
assert.NoError(sites.Build(BuildCfg{})) sites := b.H
assert.Equal(2, len(sites.Sites))
s := sites.Sites[0] s := sites.Sites[0]
assert.Equal(8, len(s.RegularPages())) assert.Equal(8, len(s.RegularPages()))
assert.Equal(16, len(s.Pages())) assert.Equal(16, len(s.Pages()))
//dumpPages(s.AllPages()...)
assert.Equal(31, len(s.AllPages())) assert.Equal(31, len(s.AllPages()))
bundleWithSubPath := s.getPage(page.KindPage, "lb/index") bundleWithSubPath := s.getPage(page.KindPage, "lb/index")
@ -358,15 +362,12 @@ func TestMultilingualDisableLanguage(t *testing.T) {
fs, cfg := newTestBundleSourcesMultilingual(t) fs, cfg := newTestBundleSourcesMultilingual(t)
cfg.Set("disableLanguages", []string{"nn"}) cfg.Set("disableLanguages", []string{"nn"})
assert.NoError(loadDefaultSettingsFor(cfg)) b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Cfg: cfg}).WithNothingAdded()
assert.NoError(loadLanguageSettings(cfg, nil)) b.Build(BuildCfg{})
sites := b.H
sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
assert.NoError(err)
assert.Equal(1, len(sites.Sites)) assert.Equal(1, len(sites.Sites))
assert.NoError(sites.Build(BuildCfg{}))
s := sites.Sites[0] s := sites.Sites[0]
assert.Equal(8, len(s.RegularPages())) assert.Equal(8, len(s.RegularPages()))
@ -383,20 +384,98 @@ func TestMultilingualDisableLanguage(t *testing.T) {
} }
func TestPageBundlerSiteWitSymbolicLinksInContent(t *testing.T) { func TestPageBundlerSiteWitSymbolicLinksInContent(t *testing.T) {
if runtime.GOOS == "windows" && os.Getenv("CI") == "" { skipSymlink(t)
t.Skip("Skip TestPageBundlerSiteWitSymbolicLinksInContent as os.Symlink needs administrator rights on Windows")
} wd, _ := os.Getwd()
defer func() {
os.Chdir(wd)
}()
assert := require.New(t) assert := require.New(t)
ps, clean, workDir := newTestBundleSymbolicSources(t) // We need to use the OS fs for this.
cfg := viper.New()
fs := hugofs.NewFrom(hugofs.Os, cfg)
workDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugosym")
assert.NoError(err)
contentDirName := "content"
contentDir := filepath.Join(workDir, contentDirName)
assert.NoError(os.MkdirAll(filepath.Join(contentDir, "a"), 0777))
for i := 1; i <= 3; i++ {
assert.NoError(os.MkdirAll(filepath.Join(workDir, fmt.Sprintf("symcontent%d", i)), 0777))
}
assert.NoError(os.MkdirAll(filepath.Join(workDir, "symcontent2", "a1"), 0777))
// Symlinked sections inside content.
os.Chdir(contentDir)
for i := 1; i <= 3; i++ {
assert.NoError(os.Symlink(filepath.FromSlash(fmt.Sprintf(("../symcontent%d"), i)), fmt.Sprintf("symbolic%d", i)))
}
assert.NoError(os.Chdir(filepath.Join(contentDir, "a")))
// Create a symlink to one single content file
assert.NoError(os.Symlink(filepath.FromSlash("../../symcontent2/a1/page.md"), "page_s.md"))
assert.NoError(os.Chdir(filepath.FromSlash("../../symcontent3")))
// Create a circular symlink. Will print some warnings.
assert.NoError(os.Symlink(filepath.Join("..", contentDirName), filepath.FromSlash("circus")))
assert.NoError(os.Chdir(workDir))
defer clean() defer clean()
cfg := ps.Cfg cfg.Set("workingDir", workDir)
fs := ps.Fs cfg.Set("contentDir", contentDirName)
cfg.Set("baseURL", "https://example.com")
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg, Logger: loggers.NewErrorLogger()}, BuildCfg{}) layout := `{{ .Title }}|{{ .Content }}`
pageContent := `---
slug: %s
date: 2017-10-09
---
th := testHelper{s.Cfg, s.Fs, t} TheContent.
`
b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{
Fs: fs,
Cfg: cfg,
})
b.WithTemplates(
"_default/single.html", layout,
"_default/list.html", layout,
)
b.WithContent(
"a/regular.md", fmt.Sprintf(pageContent, "a1"),
)
b.WithSourceFile(
"symcontent1/s1.md", fmt.Sprintf(pageContent, "s1"),
"symcontent1/s2.md", fmt.Sprintf(pageContent, "s2"),
// Regular files inside symlinked folder.
"symcontent1/s1.md", fmt.Sprintf(pageContent, "s1"),
"symcontent1/s2.md", fmt.Sprintf(pageContent, "s2"),
// A bundle
"symcontent2/a1/index.md", fmt.Sprintf(pageContent, ""),
"symcontent2/a1/page.md", fmt.Sprintf(pageContent, "page"),
"symcontent2/a1/logo.png", "image",
// Assets
"symcontent3/s1.png", "image",
"symcontent3/s2.png", "image",
)
b.Build(BuildCfg{})
s := b.H.Sites[0]
assert.Equal(7, len(s.RegularPages())) assert.Equal(7, len(s.RegularPages()))
a1Bundle := s.getPage(page.KindPage, "symbolic2/a1/index.md") a1Bundle := s.getPage(page.KindPage, "symbolic2/a1/index.md")
@ -404,9 +483,9 @@ func TestPageBundlerSiteWitSymbolicLinksInContent(t *testing.T) {
assert.Equal(2, len(a1Bundle.Resources())) assert.Equal(2, len(a1Bundle.Resources()))
assert.Equal(1, len(a1Bundle.Resources().ByType(pageResourceType))) assert.Equal(1, len(a1Bundle.Resources().ByType(pageResourceType)))
th.assertFileContent(filepath.FromSlash(workDir+"/public/a/page/index.html"), "TheContent") b.AssertFileContent(filepath.FromSlash(workDir+"/public/a/page/index.html"), "TheContent")
th.assertFileContent(filepath.FromSlash(workDir+"/public/symbolic1/s1/index.html"), "TheContent") b.AssertFileContent(filepath.FromSlash(workDir+"/public/symbolic1/s1/index.html"), "TheContent")
th.assertFileContent(filepath.FromSlash(workDir+"/public/symbolic2/a1/index.html"), "TheContent") b.AssertFileContent(filepath.FromSlash(workDir+"/public/symbolic2/a1/index.html"), "TheContent")
} }
@ -576,7 +655,7 @@ Single content.
} }
func newTestBundleSources(t *testing.T) (*hugofs.Fs, *viper.Viper) { func newTestBundleSources(t *testing.T) (*hugofs.Fs, *viper.Viper) {
cfg, fs := newTestCfg() cfg, fs := newTestCfgBasic()
assert := require.New(t) assert := require.New(t)
workDir := "/work" workDir := "/work"
@ -743,7 +822,7 @@ Content for 은행.
} }
func newTestBundleSourcesMultilingual(t *testing.T) (*hugofs.Fs, *viper.Viper) { func newTestBundleSourcesMultilingual(t *testing.T) (*hugofs.Fs, *viper.Viper) {
cfg, fs := newTestCfg() cfg, fs := newTestCfgBasic()
workDir := "/work" workDir := "/work"
cfg.Set("workingDir", workDir) cfg.Set("workingDir", workDir)
@ -825,90 +904,6 @@ TheContent.
return fs, cfg return fs, cfg
} }
func newTestBundleSymbolicSources(t *testing.T) (*helpers.PathSpec, func(), string) {
assert := require.New(t)
// We need to use the OS fs for this.
cfg := viper.New()
fs := hugofs.NewFrom(hugofs.Os, cfg)
fs.Destination = &afero.MemMapFs{}
loadDefaultSettingsFor(cfg)
workDir, clean, err := createTempDir("hugosym")
assert.NoError(err)
contentDir := "base"
cfg.Set("workingDir", workDir)
cfg.Set("contentDir", contentDir)
cfg.Set("baseURL", "https://example.com")
if err := loadLanguageSettings(cfg, nil); err != nil {
t.Fatal(err)
}
layout := `{{ .Title }}|{{ .Content }}`
pageContent := `---
slug: %s
date: 2017-10-09
---
TheContent.
`
fs.Source.MkdirAll(filepath.Join(workDir, "layouts", "_default"), 0777)
fs.Source.MkdirAll(filepath.Join(workDir, contentDir), 0777)
fs.Source.MkdirAll(filepath.Join(workDir, contentDir, "a"), 0777)
for i := 1; i <= 3; i++ {
fs.Source.MkdirAll(filepath.Join(workDir, fmt.Sprintf("symcontent%d", i)), 0777)
}
fs.Source.MkdirAll(filepath.Join(workDir, "symcontent2", "a1"), 0777)
writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "single.html"), layout)
writeSource(t, fs, filepath.Join(workDir, "layouts", "_default", "list.html"), layout)
writeSource(t, fs, filepath.Join(workDir, contentDir, "a", "regular.md"), fmt.Sprintf(pageContent, "a1"))
// Regular files inside symlinked folder.
writeSource(t, fs, filepath.Join(workDir, "symcontent1", "s1.md"), fmt.Sprintf(pageContent, "s1"))
writeSource(t, fs, filepath.Join(workDir, "symcontent1", "s2.md"), fmt.Sprintf(pageContent, "s2"))
// A bundle
writeSource(t, fs, filepath.Join(workDir, "symcontent2", "a1", "index.md"), fmt.Sprintf(pageContent, ""))
writeSource(t, fs, filepath.Join(workDir, "symcontent2", "a1", "page.md"), fmt.Sprintf(pageContent, "page"))
writeSource(t, fs, filepath.Join(workDir, "symcontent2", "a1", "logo.png"), "image")
// Assets
writeSource(t, fs, filepath.Join(workDir, "symcontent3", "s1.png"), "image")
writeSource(t, fs, filepath.Join(workDir, "symcontent3", "s2.png"), "image")
wd, _ := os.Getwd()
defer func() {
os.Chdir(wd)
}()
// Symlinked sections inside content.
os.Chdir(filepath.Join(workDir, contentDir))
for i := 1; i <= 3; i++ {
assert.NoError(os.Symlink(filepath.FromSlash(fmt.Sprintf(("../symcontent%d"), i)), fmt.Sprintf("symbolic%d", i)))
}
os.Chdir(filepath.Join(workDir, contentDir, "a"))
// Create a symlink to one single content file
assert.NoError(os.Symlink(filepath.FromSlash("../../symcontent2/a1/page.md"), "page_s.md"))
os.Chdir(filepath.FromSlash("../../symcontent3"))
// Create a circular symlink. Will print some warnings.
assert.NoError(os.Symlink(filepath.Join("..", contentDir), filepath.FromSlash("circus")))
os.Chdir(workDir)
assert.NoError(err)
ps, _ := helpers.NewPathSpec(fs, cfg)
return ps, clean, workDir
}
// https://github.com/gohugoio/hugo/issues/5858 // https://github.com/gohugoio/hugo/issues/5858
func TestBundledResourcesWhenMultipleOutputFormats(t *testing.T) { func TestBundledResourcesWhenMultipleOutputFormats(t *testing.T) {
t.Parallel() t.Parallel()
@ -971,3 +966,112 @@ slug: %s
assert.True(b.CheckExists("public/about/services2/this-is-another-slug/index.html")) assert.True(b.CheckExists("public/about/services2/this-is-another-slug/index.html"))
} }
func TestBundleMisc(t *testing.T) {
config := `
baseURL = "https://example.com"
defaultContentLanguage = "en"
defaultContentLanguageInSubdir = true
ignoreFiles = ["README\\.md", "content/en/ignore"]
[Languages]
[Languages.en]
weight = 99999
contentDir = "content/en"
[Languages.nn]
weight = 20
contentDir = "content/nn"
[Languages.sv]
weight = 30
contentDir = "content/sv"
[Languages.nb]
weight = 40
contentDir = "content/nb"
`
const pageContent = `---
title: %q
---
`
createPage := func(s string) string {
return fmt.Sprintf(pageContent, s)
}
b := newTestSitesBuilder(t).WithConfigFile("toml", config)
b.WithLogger(loggers.NewWarningLogger())
b.WithTemplates("_default/list.html", `{{ range .Site.Pages }}
{{ .Kind }}|{{ .Path }}|{{ with .CurrentSection }}CurrentSection: {{ .Path }}{{ end }}|{{ .RelPermalink }}{{ end }}
`)
b.WithTemplates("_default/single.html", `Single: {{ .Title }}`)
b.WithContent("en/sect1/sect2/_index.md", createPage("en: Sect 2"))
b.WithContent("en/sect1/sect2/page.md", createPage("en: Page"))
b.WithContent("en/sect1/sect2/data-branch.json", "mydata")
b.WithContent("nn/sect1/sect2/page.md", createPage("nn: Page"))
b.WithContent("nn/sect1/sect2/data-branch.json", "my nn data")
// En only
b.WithContent("en/enonly/myen.md", createPage("en: Page"))
b.WithContent("en/enonly/myendata.json", "mydata")
// Leaf
b.WithContent("nn/b1/index.md", createPage("nn: leaf"))
b.WithContent("en/b1/index.md", createPage("en: leaf"))
b.WithContent("sv/b1/index.md", createPage("sv: leaf"))
b.WithContent("nb/b1/index.md", createPage("nb: leaf"))
// Should be ignored
b.WithContent("en/ignore/page.md", createPage("en: ignore"))
b.WithContent("en/README.md", createPage("en: ignore"))
// Both leaf and branch bundle in same dir
b.WithContent("en/b2/index.md", `---
slug: leaf
---
`)
b.WithContent("en/b2/_index.md", createPage("en: branch"))
b.WithContent("en/b1/data1.json", "en: data")
b.WithContent("sv/b1/data1.json", "sv: data")
b.WithContent("sv/b1/data2.json", "sv: data2")
b.WithContent("nb/b1/data2.json", "nb: data2")
b.Build(BuildCfg{})
b.AssertFileContent("public/en/index.html",
filepath.FromSlash("section|sect1/sect2/_index.md|CurrentSection: sect1/sect2/_index.md"),
"myen.md|CurrentSection: enonly")
b.AssertFileContentFn("public/en/index.html", func(s string) bool {
// Check ignored files
return !regexp.MustCompile("README|ignore").MatchString(s)
})
b.AssertFileContent("public/nn/index.html", filepath.FromSlash("page|sect1/sect2/page.md|CurrentSection: sect1"))
b.AssertFileContentFn("public/nn/index.html", func(s string) bool {
return !strings.Contains(s, "enonly")
})
// Check order of inherited data file
b.AssertFileContent("public/nb/b1/data1.json", "en: data") // Default content
b.AssertFileContent("public/nn/b1/data2.json", "sv: data") // First match
b.AssertFileContent("public/en/enonly/myen/index.html", "Single: en: Page")
b.AssertFileContent("public/en/enonly/myendata.json", "mydata")
assert := require.New(t)
assert.False(b.CheckExists("public/sv/enonly/myen/index.html"))
// Both leaf and branch bundle in same dir
// We log a warning about it, but we keep both.
b.AssertFileContent("public/en/b2/index.html",
"/en/b2/leaf/",
filepath.FromSlash("section|sect1/sect2/_index.md|CurrentSection: sect1/sect2/_index.md"))
}

View file

@ -148,7 +148,6 @@ func newPageCollectionsFromPages(pages pageStatePages) *PageCollections {
for _, p := range pageCollection { for _, p := range pageCollection {
if p.IsPage() { if p.IsPage() {
sourceRef := p.sourceRef() sourceRef := p.sourceRef()
if sourceRef != "" { if sourceRef != "" {
// index the canonical ref // index the canonical ref
// e.g. /section/article.md // e.g. /section/article.md

779
hugolib/pages_capture.go Normal file
View file

@ -0,0 +1,779 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"context"
"fmt"
"os"
pth "path"
"path/filepath"
"strings"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/resources"
"github.com/pkg/errors"
"golang.org/x/sync/errgroup"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/resources/resource"
"github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/common/loggers"
"github.com/spf13/afero"
)
func newPagesCollector(
sp *source.SourceSpec,
logger *loggers.Logger,
contentTracker *contentChangeMap,
proc pagesCollectorProcessorProvider, filenames ...string) *pagesCollector {
return &pagesCollector{
fs: sp.SourceFs,
proc: proc,
sp: sp,
logger: logger,
filenames: filenames,
tracker: contentTracker,
}
}
func newPagesProcessor(h *HugoSites, sp *source.SourceSpec, partialBuild bool) *pagesProcessor {
return &pagesProcessor{
h: h,
sp: sp,
partialBuild: partialBuild,
numWorkers: config.GetNumWorkerMultiplier() * 3,
}
}
type fileinfoBundle struct {
header hugofs.FileMetaInfo
resources []hugofs.FileMetaInfo
}
func (b *fileinfoBundle) containsResource(name string) bool {
for _, r := range b.resources {
if r.Name() == name {
return true
}
}
return false
}
type pageBundles map[string]*fileinfoBundle
type pagesCollector struct {
sp *source.SourceSpec
fs afero.Fs
logger *loggers.Logger
// Ordered list (bundle headers first) used in partial builds.
filenames []string
// Content files tracker used in partial builds.
tracker *contentChangeMap
proc pagesCollectorProcessorProvider
}
type contentDirKey struct {
dirname string
filename string
tp bundleDirType
}
// Collect.
func (c *pagesCollector) Collect() error {
c.proc.Start(context.Background())
if c.tracker != nil {
c.tracker.start()
defer c.tracker.stop()
}
var collectErr error
if len(c.filenames) == 0 {
// Collect everything.
collectErr = c.collectDir("", false, nil)
} else {
dirs := make(map[contentDirKey]bool)
for _, filename := range c.filenames {
dir, filename, btype := c.tracker.resolveAndRemove(filename)
dirs[contentDirKey{dir, filename, btype}] = true
}
for dir, _ := range dirs {
switch dir.tp {
case bundleLeaf, bundleBranch:
collectErr = c.collectDir(dir.dirname, true, nil)
default:
// We always start from a directory.
collectErr = c.collectDir(dir.dirname, true, func(fim hugofs.FileMetaInfo) bool {
return strings.HasSuffix(dir.filename, fim.Meta().Path())
})
}
if collectErr != nil {
break
}
}
}
err := c.proc.Wait()
if collectErr != nil {
return collectErr
}
return err
}
func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func(fim hugofs.FileMetaInfo) bool) error {
fi, err := c.fs.Stat(dirname)
if err != nil {
if os.IsNotExist(err) {
// May have been deleted.
return nil
}
return err
}
handleDir := func(
btype bundleDirType,
dir hugofs.FileMetaInfo,
path string,
readdir []hugofs.FileMetaInfo) error {
if btype > bundleNot && c.tracker != nil {
c.tracker.add(path, btype)
}
if btype == bundleBranch {
if err := c.handleBundleBranch(readdir); err != nil {
return err
}
// A branch bundle is only this directory level, so keep walking.
return nil
} else if btype == bundleLeaf {
if err := c.handleBundleLeaf(dir, path, readdir); err != nil {
return err
}
return nil
}
if err := c.handleFiles(readdir...); err != nil {
return err
}
return nil
}
filter := func(fim hugofs.FileMetaInfo) bool {
if fim.Meta().SkipDir() {
return false
}
if c.sp.IgnoreFile(fim.Meta().Filename()) {
return false
}
if inFilter != nil {
return inFilter(fim)
}
return true
}
preHook := func(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) ([]hugofs.FileMetaInfo, error) {
var btype bundleDirType
filtered := readdir[:0]
for _, fi := range readdir {
if filter(fi) {
filtered = append(filtered, fi)
if c.tracker != nil {
// Track symlinks.
c.tracker.addSymbolicLinkMapping(fi)
}
}
}
readdir = filtered
// We merge language directories, so there can be duplicates, but they
// will be ordered, most important first.
var duplicates []int
seen := make(map[string]bool)
for i, fi := range readdir {
if fi.IsDir() {
continue
}
meta := fi.Meta()
class := meta.Classifier()
translationBase := meta.TranslationBaseNameWithExt()
key := pth.Join(meta.Lang(), translationBase)
if seen[key] {
duplicates = append(duplicates, i)
continue
}
seen[key] = true
var thisBtype bundleDirType
switch class {
case files.ContentClassLeaf:
thisBtype = bundleLeaf
case files.ContentClassBranch:
thisBtype = bundleBranch
}
// Folders with both index.md and _index.md type of files have
// undefined behaviour and can never work.
// The branch variant will win because of sort order, but log
// a warning about it.
if thisBtype > bundleNot && btype > bundleNot && thisBtype != btype {
c.logger.WARN.Printf("Content directory %q have both index.* and _index.* files, pick one.", dir.Meta().Filename())
// Reclassify it so it will be handled as a content file inside the
// section, which is in line with the <= 0.55 behaviour.
meta["classifier"] = files.ContentClassContent
} else if thisBtype > bundleNot {
btype = thisBtype
}
}
if len(duplicates) > 0 {
for i := len(duplicates) - 1; i >= 0; i-- {
idx := duplicates[i]
readdir = append(readdir[:idx], readdir[idx+1:]...)
}
}
err := handleDir(btype, dir, path, readdir)
if err != nil {
return nil, err
}
if btype == bundleLeaf || partial {
return nil, filepath.SkipDir
}
// Keep walking.
return readdir, nil
}
var postHook hugofs.WalkHook
if c.tracker != nil {
postHook = func(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) ([]hugofs.FileMetaInfo, error) {
if c.tracker == nil {
// Nothing to do.
return readdir, nil
}
return readdir, nil
}
}
wfn := func(path string, info hugofs.FileMetaInfo, err error) error {
if err != nil {
return err
}
return nil
}
w := hugofs.NewWalkway(hugofs.WalkwayConfig{
Fs: c.fs,
Logger: c.logger,
Root: dirname,
Info: fi.(hugofs.FileMetaInfo),
HookPre: preHook,
HookPost: postHook,
WalkFn: wfn})
return w.Walk()
}
func (c *pagesCollector) isBundleHeader(fi hugofs.FileMetaInfo) bool {
class := fi.Meta().Classifier()
return class == files.ContentClassLeaf || class == files.ContentClassBranch
}
func (c *pagesCollector) getLang(fi hugofs.FileMetaInfo) string {
lang := fi.Meta().Lang()
if lang != "" {
return lang
}
return c.sp.DefaultContentLanguage
}
func (c *pagesCollector) addToBundle(info hugofs.FileMetaInfo, btyp bundleDirType, bundles pageBundles) error {
getBundle := func(lang string) *fileinfoBundle {
return bundles[lang]
}
cloneBundle := func(lang string) *fileinfoBundle {
// Every bundled file needs a content file header.
// Use the default content language if found, else just
// pick one.
var (
source *fileinfoBundle
found bool
)
source, found = bundles[c.sp.DefaultContentLanguage]
if !found {
for _, b := range bundles {
source = b
break
}
}
if source == nil {
panic(fmt.Sprintf("no source found, %d", len(bundles)))
}
clone := c.cloneFileInfo(source.header)
clone.Meta()["lang"] = lang
return &fileinfoBundle{
header: clone,
}
}
lang := c.getLang(info)
bundle := getBundle(lang)
isBundleHeader := c.isBundleHeader(info)
classifier := info.Meta().Classifier()
if bundle == nil {
if isBundleHeader {
bundle = &fileinfoBundle{header: info}
bundles[lang] = bundle
} else {
if btyp == bundleBranch {
// No special logic for branch bundles.
// Every language needs its own _index.md file.
return c.handleFiles(info)
}
bundle = cloneBundle(lang)
bundles[lang] = bundle
}
}
if !isBundleHeader {
bundle.resources = append(bundle.resources, info)
}
if classifier == files.ContentClassFile {
translations := info.Meta().Translations()
if len(translations) < len(bundles) {
for lang, b := range bundles {
if !stringSliceContains(lang, translations...) && !b.containsResource(info.Name()) {
// Clone and add it to the bundle.
clone := c.cloneFileInfo(info)
clone.Meta()["lang"] = lang
b.resources = append(b.resources, clone)
}
}
}
}
return nil
}
func (c *pagesCollector) cloneFileInfo(fi hugofs.FileMetaInfo) hugofs.FileMetaInfo {
cm := hugofs.FileMeta{}
meta := fi.Meta()
if meta == nil {
panic(fmt.Sprintf("not meta: %v", fi.Name()))
}
for k, v := range meta {
cm[k] = v
}
return hugofs.NewFileMetaInfo(fi, cm)
}
func (c *pagesCollector) handleBundleBranch(readdir []hugofs.FileMetaInfo) error {
// Maps bundles to its language.
bundles := pageBundles{}
for _, fim := range readdir {
if fim.IsDir() {
continue
}
meta := fim.Meta()
switch meta.Classifier() {
case files.ContentClassContent:
if err := c.handleFiles(fim); err != nil {
return err
}
default:
if err := c.addToBundle(fim, bundleBranch, bundles); err != nil {
return err
}
}
}
return c.proc.Process(bundles)
}
func (c *pagesCollector) handleBundleLeaf(dir hugofs.FileMetaInfo, path string, readdir []hugofs.FileMetaInfo) error {
// Maps bundles to its language.
bundles := pageBundles{}
walk := func(path string, info hugofs.FileMetaInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
return c.addToBundle(info, bundleLeaf, bundles)
}
// Start a new walker from the given path.
w := hugofs.NewWalkway(hugofs.WalkwayConfig{
Root: path,
Fs: c.fs,
Logger: c.logger,
Info: dir,
DirEntries: readdir,
WalkFn: walk})
if err := w.Walk(); err != nil {
return err
}
return c.proc.Process(bundles)
}
func (c *pagesCollector) handleFiles(fis ...hugofs.FileMetaInfo) error {
for _, fi := range fis {
if fi.IsDir() {
continue
}
if err := c.proc.Process(fi); err != nil {
return err
}
}
return nil
}
type pagesCollectorProcessorProvider interface {
Process(item interface{}) error
Start(ctx context.Context) context.Context
Wait() error
}
type pagesProcessor struct {
h *HugoSites
sp *source.SourceSpec
itemChan chan interface{}
itemGroup *errgroup.Group
// The output Pages
pagesChan chan *pageState
pagesGroup *errgroup.Group
numWorkers int
partialBuild bool
}
func (proc *pagesProcessor) Process(item interface{}) error {
proc.itemChan <- item
return nil
}
func (proc *pagesProcessor) Start(ctx context.Context) context.Context {
proc.pagesChan = make(chan *pageState, proc.numWorkers)
proc.pagesGroup, ctx = errgroup.WithContext(ctx)
proc.itemChan = make(chan interface{}, proc.numWorkers)
proc.itemGroup, ctx = errgroup.WithContext(ctx)
proc.pagesGroup.Go(func() error {
for p := range proc.pagesChan {
s := p.s
p.forceRender = proc.partialBuild
if p.forceRender {
s.replacePage(p)
} else {
s.addPage(p)
}
}
return nil
})
for i := 0; i < proc.numWorkers; i++ {
proc.itemGroup.Go(func() error {
for item := range proc.itemChan {
select {
case <-proc.h.Done():
return nil
default:
if err := proc.process(item); err != nil {
proc.h.SendError(err)
}
}
}
return nil
})
}
return ctx
}
func (proc *pagesProcessor) Wait() error {
close(proc.itemChan)
err := proc.itemGroup.Wait()
close(proc.pagesChan)
if err != nil {
return err
}
return proc.pagesGroup.Wait()
}
func (proc *pagesProcessor) newPageFromBundle(b *fileinfoBundle) (*pageState, error) {
p, err := proc.newPageFromFi(b.header, nil)
if err != nil {
return nil, err
}
if len(b.resources) > 0 {
resources := make(resource.Resources, len(b.resources))
for i, rfi := range b.resources {
meta := rfi.Meta()
classifier := meta.Classifier()
var r resource.Resource
switch classifier {
case files.ContentClassContent:
rp, err := proc.newPageFromFi(rfi, p)
if err != nil {
return nil, err
}
rp.m.resourcePath = filepath.ToSlash(strings.TrimPrefix(rp.Path(), p.File().Dir()))
r = rp
case files.ContentClassFile:
r, err = proc.newResource(rfi, p)
if err != nil {
return nil, err
}
default:
panic(fmt.Sprintf("invalid classifier: %q", classifier))
}
resources[i] = r
}
p.addResources(resources...)
}
return p, nil
}
func (proc *pagesProcessor) newPageFromFi(fim hugofs.FileMetaInfo, owner *pageState) (*pageState, error) {
fi, err := newFileInfo(proc.sp, fim)
if err != nil {
return nil, err
}
var s *Site
meta := fim.Meta()
if owner != nil {
s = owner.s
} else {
lang := meta.Lang()
s = proc.getSite(lang)
}
r := func() (hugio.ReadSeekCloser, error) {
return meta.Open()
}
p, err := newPageWithContent(fi, s, owner != nil, r)
if err != nil {
return nil, err
}
p.parent = owner
return p, nil
}
func (proc *pagesProcessor) newResource(fim hugofs.FileMetaInfo, owner *pageState) (resource.Resource, error) {
// TODO(bep) consolidate with multihost logic + clean up
outputFormats := owner.m.outputFormats()
seen := make(map[string]bool)
var targetBasePaths []string
// Make sure bundled resources are published to all of the ouptput formats'
// sub paths.
for _, f := range outputFormats {
p := f.Path
if seen[p] {
continue
}
seen[p] = true
targetBasePaths = append(targetBasePaths, p)
}
meta := fim.Meta()
r := func() (hugio.ReadSeekCloser, error) {
return meta.Open()
}
target := strings.TrimPrefix(meta.Path(), owner.File().Dir())
return owner.s.ResourceSpec.New(
resources.ResourceSourceDescriptor{
TargetPaths: owner.getTargetPaths,
OpenReadSeekCloser: r,
FileInfo: fim,
RelTargetFilename: target,
TargetBasePaths: targetBasePaths,
})
}
func (proc *pagesProcessor) getSite(lang string) *Site {
if lang == "" {
return proc.h.Sites[0]
}
for _, s := range proc.h.Sites {
if lang == s.Lang() {
return s
}
}
return proc.h.Sites[0]
}
func (proc *pagesProcessor) copyFile(fim hugofs.FileMetaInfo) error {
meta := fim.Meta()
s := proc.getSite(meta.Lang())
f, err := meta.Open()
if err != nil {
return errors.Wrap(err, "copyFile: failed to open")
}
target := filepath.Join(s.PathSpec.GetTargetLanguageBasePath(), meta.Path())
defer f.Close()
return s.publish(&s.PathSpec.ProcessingStats.Files, target, f)
}
func (proc *pagesProcessor) process(item interface{}) error {
send := func(p *pageState, err error) {
if err != nil {
proc.sendError(err)
} else {
proc.pagesChan <- p
}
}
switch v := item.(type) {
// Page bundles mapped to their language.
case pageBundles:
for _, bundle := range v {
if proc.shouldSkip(bundle.header) {
continue
}
send(proc.newPageFromBundle(bundle))
}
case hugofs.FileMetaInfo:
if proc.shouldSkip(v) {
return nil
}
meta := v.Meta()
classifier := meta.Classifier()
switch classifier {
case files.ContentClassContent:
send(proc.newPageFromFi(v, nil))
case files.ContentClassFile:
proc.sendError(proc.copyFile(v))
default:
panic(fmt.Sprintf("invalid classifier: %q", classifier))
}
default:
panic(fmt.Sprintf("unrecognized item type in Process: %T", item))
}
return nil
}
func (proc *pagesProcessor) sendError(err error) {
if err == nil {
return
}
proc.h.SendError(err)
}
func (proc *pagesProcessor) shouldSkip(fim hugofs.FileMetaInfo) bool {
return proc.sp.DisabledLanguages[fim.Meta().Lang()]
}
func stringSliceContains(k string, values ...string) bool {
for _, v := range values {
if k == v {
return true
}
}
return false
}

View file

@ -0,0 +1,88 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"context"
"fmt"
"path/filepath"
"testing"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/hugofs"
"github.com/spf13/afero"
"github.com/stretchr/testify/require"
)
func TestPagesCapture(t *testing.T) {
cfg, hfs := newTestCfg()
fs := hfs.Source
assert := require.New(t)
var writeFile = func(filename string) {
assert.NoError(afero.WriteFile(fs, filepath.FromSlash(filename), []byte(fmt.Sprintf("content-%s", filename)), 0755))
}
writeFile("_index.md")
writeFile("logo.png")
writeFile("root.md")
writeFile("blog/index.md")
writeFile("blog/hello.md")
writeFile("blog/images/sunset.png")
writeFile("pages/page1.md")
writeFile("pages/page2.md")
writeFile("pages/page.png")
ps, err := helpers.NewPathSpec(hugofs.NewFrom(fs, cfg), cfg)
assert.NoError(err)
sourceSpec := source.NewSourceSpec(ps, fs)
t.Run("Collect", func(t *testing.T) {
assert := require.New(t)
proc := &testPagesCollectorProcessor{}
c := newPagesCollector(sourceSpec, loggers.NewErrorLogger(), nil, proc)
assert.NoError(c.Collect())
assert.Equal(4, len(proc.items))
})
t.Run("error in Wait", func(t *testing.T) {
assert := require.New(t)
c := newPagesCollector(sourceSpec, loggers.NewErrorLogger(), nil,
&testPagesCollectorProcessor{waitErr: errors.New("failed")})
assert.Error(c.Collect())
})
}
type testPagesCollectorProcessor struct {
items []interface{}
waitErr error
}
func (proc *testPagesCollectorProcessor) Process(item interface{}) error {
proc.items = append(proc.items, item)
return nil
}
func (proc *testPagesCollectorProcessor) Start(ctx context.Context) context.Context {
return ctx
}
func (proc *testPagesCollectorProcessor) Wait() error { return proc.waitErr }

View file

@ -20,6 +20,7 @@ import (
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/modules"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
@ -39,7 +40,6 @@ type Paths struct {
// Directories // Directories
// TODO(bep) when we have trimmed down mos of the dirs usage outside of this package, make // TODO(bep) when we have trimmed down mos of the dirs usage outside of this package, make
// these into an interface. // these into an interface.
ContentDir string
ThemesDir string ThemesDir string
WorkingDir string WorkingDir string
@ -62,8 +62,9 @@ type Paths struct {
UglyURLs bool UglyURLs bool
CanonifyURLs bool CanonifyURLs bool
Language *langs.Language Language *langs.Language
Languages langs.Languages Languages langs.Languages
LanguagesDefaultFirst langs.Languages
// The PathSpec looks up its config settings in both the current language // The PathSpec looks up its config settings in both the current language
// and then in the global Viper config. // and then in the global Viper config.
@ -74,8 +75,8 @@ type Paths struct {
DefaultContentLanguage string DefaultContentLanguage string
multilingual bool multilingual bool
themes []string AllModules modules.Modules
AllThemes []ThemeConfig ModulesClient *modules.Client
} }
func New(fs *hugofs.Fs, cfg config.Provider) (*Paths, error) { func New(fs *hugofs.Fs, cfg config.Provider) (*Paths, error) {
@ -91,12 +92,6 @@ func New(fs *hugofs.Fs, cfg config.Provider) (*Paths, error) {
resourceDir := filepath.Clean(cfg.GetString("resourceDir")) resourceDir := filepath.Clean(cfg.GetString("resourceDir"))
publishDir := filepath.Clean(cfg.GetString("publishDir")) publishDir := filepath.Clean(cfg.GetString("publishDir"))
if contentDir == "" {
return nil, fmt.Errorf("contentDir not set")
}
if resourceDir == "" {
return nil, fmt.Errorf("resourceDir not set")
}
if publishDir == "" { if publishDir == "" {
return nil, fmt.Errorf("publishDir not set") return nil, fmt.Errorf("publishDir not set")
} }
@ -104,8 +99,9 @@ func New(fs *hugofs.Fs, cfg config.Provider) (*Paths, error) {
defaultContentLanguage := cfg.GetString("defaultContentLanguage") defaultContentLanguage := cfg.GetString("defaultContentLanguage")
var ( var (
language *langs.Language language *langs.Language
languages langs.Languages languages langs.Languages
languagesDefaultFirst langs.Languages
) )
if l, ok := cfg.(*langs.Language); ok { if l, ok := cfg.(*langs.Language); ok {
@ -117,6 +113,12 @@ func New(fs *hugofs.Fs, cfg config.Provider) (*Paths, error) {
languages = l languages = l
} }
if l, ok := cfg.Get("languagesSortedDefaultFirst").(langs.Languages); ok {
languagesDefaultFirst = l
}
//
if len(languages) == 0 { if len(languages) == 0 {
// We have some old tests that does not test the entire chain, hence // We have some old tests that does not test the entire chain, hence
// they have no languages. So create one so we get the proper filesystem. // they have no languages. So create one so we get the proper filesystem.
@ -156,33 +158,30 @@ func New(fs *hugofs.Fs, cfg config.Provider) (*Paths, error) {
UglyURLs: cfg.GetBool("uglyURLs"), UglyURLs: cfg.GetBool("uglyURLs"),
CanonifyURLs: cfg.GetBool("canonifyURLs"), CanonifyURLs: cfg.GetBool("canonifyURLs"),
ContentDir: contentDir,
ThemesDir: cfg.GetString("themesDir"), ThemesDir: cfg.GetString("themesDir"),
WorkingDir: workingDir, WorkingDir: workingDir,
AbsResourcesDir: absResourcesDir, AbsResourcesDir: absResourcesDir,
AbsPublishDir: absPublishDir, AbsPublishDir: absPublishDir,
themes: config.GetStringSlicePreserveString(cfg, "theme"),
multilingual: cfg.GetBool("multilingual"), multilingual: cfg.GetBool("multilingual"),
defaultContentLanguageInSubdir: cfg.GetBool("defaultContentLanguageInSubdir"), defaultContentLanguageInSubdir: cfg.GetBool("defaultContentLanguageInSubdir"),
DefaultContentLanguage: defaultContentLanguage, DefaultContentLanguage: defaultContentLanguage,
Language: language, Language: language,
Languages: languages, Languages: languages,
LanguagesDefaultFirst: languagesDefaultFirst,
MultihostTargetBasePaths: multihostTargetBasePaths, MultihostTargetBasePaths: multihostTargetBasePaths,
PaginatePath: cfg.GetString("paginatePath"), PaginatePath: cfg.GetString("paginatePath"),
} }
if !cfg.IsSet("theme") && cfg.IsSet("allThemes") { if cfg.IsSet("allModules") {
p.AllThemes = cfg.Get("allThemes").([]ThemeConfig) p.AllModules = cfg.Get("allModules").(modules.Modules)
} else { }
p.AllThemes, err = collectThemeNames(p)
if err != nil { if cfg.IsSet("modulesClient") {
return nil, err p.ModulesClient = cfg.Get("modulesClient").(*modules.Client)
}
} }
// TODO(bep) remove this, eventually // TODO(bep) remove this, eventually
@ -207,15 +206,6 @@ func (p *Paths) Lang() string {
return p.Language.Lang return p.Language.Lang
} }
// ThemeSet checks whether a theme is in use or not.
func (p *Paths) ThemeSet() bool {
return len(p.themes) > 0
}
func (p *Paths) Themes() []string {
return p.themes
}
func (p *Paths) GetTargetLanguageBasePath() string { func (p *Paths) GetTargetLanguageBasePath() string {
if p.Languages.IsMultihost() { if p.Languages.IsMultihost() {
// In a multihost configuration all assets will be published below the language code. // In a multihost configuration all assets will be published below the language code.
@ -269,6 +259,18 @@ func (p *Paths) AbsPathify(inPath string) string {
return AbsPathify(p.WorkingDir, inPath) return AbsPathify(p.WorkingDir, inPath)
} }
// RelPathify trims any WorkingDir prefix from the given filename. If
// the filename is not considered to be absolute, the path is just cleaned.
func (p *Paths) RelPathify(filename string) string {
filename = filepath.Clean(filename)
if !filepath.IsAbs(filename) {
return filename
}
return strings.TrimPrefix(strings.TrimPrefix(filename, p.WorkingDir), FilePathSeparator)
}
// AbsPathify creates an absolute path if given a working dir and arelative path. // AbsPathify creates an absolute path if given a working dir and arelative path.
// If already absolute, the path is just cleaned. // If already absolute, the path is just cleaned.
func AbsPathify(workingDir, inPath string) string { func AbsPathify(workingDir, inPath string) string {

View file

@ -16,6 +16,8 @@ package paths
import ( import (
"testing" "testing"
"github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -27,14 +29,19 @@ func TestNewPaths(t *testing.T) {
v := viper.New() v := viper.New()
fs := hugofs.NewMem(v) fs := hugofs.NewMem(v)
v.Set("languages", map[string]interface{}{
"no": map[string]interface{}{},
"en": map[string]interface{}{},
})
v.Set("defaultContentLanguageInSubdir", true) v.Set("defaultContentLanguageInSubdir", true)
v.Set("defaultContentLanguage", "no") v.Set("defaultContentLanguage", "no")
v.Set("multilingual", true)
v.Set("contentDir", "content") v.Set("contentDir", "content")
v.Set("workingDir", "work") v.Set("workingDir", "work")
v.Set("resourceDir", "resources") v.Set("resourceDir", "resources")
v.Set("publishDir", "public") v.Set("publishDir", "public")
langs.LoadLanguageSettings(v, nil)
p, err := New(fs, v) p, err := New(fs, v)
assert.NoError(err) assert.NoError(err)

View file

@ -1,154 +0,0 @@
// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package paths
import (
"path/filepath"
"strings"
"github.com/gohugoio/hugo/config"
"github.com/spf13/afero"
"github.com/spf13/cast"
)
type ThemeConfig struct {
// The theme name as provided by the folder name below /themes.
Name string
// Optional configuration filename (e.g. "/themes/mytheme/config.json").
ConfigFilename string
// Optional config read from the ConfigFile above.
Cfg config.Provider
}
// Create file system, an ordered theme list from left to right, no duplicates.
type themesCollector struct {
themesDir string
fs afero.Fs
seen map[string]bool
themes []ThemeConfig
}
func (c *themesCollector) isSeen(theme string) bool {
loki := strings.ToLower(theme)
if c.seen[loki] {
return true
}
c.seen[loki] = true
return false
}
func (c *themesCollector) addAndRecurse(themes ...string) error {
for i := 0; i < len(themes); i++ {
theme := themes[i]
configFilename := c.getConfigFileIfProvided(theme)
if !c.isSeen(theme) {
tc, err := c.add(theme, configFilename)
if err != nil {
return err
}
if err := c.addThemeNamesFromTheme(tc); err != nil {
return err
}
}
}
return nil
}
func (c *themesCollector) add(name, configFilename string) (ThemeConfig, error) {
var cfg config.Provider
var tc ThemeConfig
if configFilename != "" {
var err error
cfg, err = config.FromFile(c.fs, configFilename)
if err != nil {
return tc, err
}
}
tc = ThemeConfig{Name: name, ConfigFilename: configFilename, Cfg: cfg}
c.themes = append(c.themes, tc)
return tc, nil
}
func collectThemeNames(p *Paths) ([]ThemeConfig, error) {
return CollectThemes(p.Fs.Source, p.AbsPathify(p.ThemesDir), p.Themes())
}
func CollectThemes(fs afero.Fs, themesDir string, themes []string) ([]ThemeConfig, error) {
if len(themes) == 0 {
return nil, nil
}
c := &themesCollector{
fs: fs,
themesDir: themesDir,
seen: make(map[string]bool)}
for i := 0; i < len(themes); i++ {
theme := themes[i]
if err := c.addAndRecurse(theme); err != nil {
return nil, err
}
}
return c.themes, nil
}
func (c *themesCollector) getConfigFileIfProvided(theme string) string {
configDir := filepath.Join(c.themesDir, theme)
var (
configFilename string
exists bool
)
// Viper supports more, but this is the sub-set supported by Hugo.
for _, configFormats := range config.ValidConfigFileExtensions {
configFilename = filepath.Join(configDir, "config."+configFormats)
exists, _ = afero.Exists(c.fs, configFilename)
if exists {
break
}
}
if !exists {
// No theme config set.
return ""
}
return configFilename
}
func (c *themesCollector) addThemeNamesFromTheme(theme ThemeConfig) error {
if theme.Cfg != nil && theme.Cfg.IsSet("theme") {
v := theme.Cfg.Get("theme")
switch vv := v.(type) {
case []string:
return c.addAndRecurse(vv...)
case []interface{}:
return c.addAndRecurse(cast.ToStringSlice(vv)...)
default:
return c.addAndRecurse(cast.ToString(vv))
}
}
return nil
}

View file

@ -18,6 +18,8 @@ import (
"path/filepath" "path/filepath"
"testing" "testing"
"github.com/gohugoio/hugo/htesting"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -33,17 +35,17 @@ func TestSCSSWithIncludePaths(t *testing.T) {
t.Skip("Skip SCSS") t.Skip("Skip SCSS")
} }
assert := require.New(t) assert := require.New(t)
workDir, clean, err := createTempDir("hugo-scss-include") workDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-scss-include")
assert.NoError(err) assert.NoError(err)
defer clean() defer clean()
v := viper.New() v := viper.New()
v.Set("workingDir", workDir) v.Set("workingDir", workDir)
b := newTestSitesBuilder(t).WithLogger(loggers.NewErrorLogger()) b := newTestSitesBuilder(t).WithLogger(loggers.NewErrorLogger())
b.WithViper(v)
b.WithWorkingDir(workDir)
// Need to use OS fs for this. // Need to use OS fs for this.
b.Fs = hugofs.NewDefault(v) b.Fs = hugofs.NewDefault(v)
b.WithWorkingDir(workDir)
b.WithViper(v)
fooDir := filepath.Join(workDir, "node_modules", "foo") fooDir := filepath.Join(workDir, "node_modules", "foo")
scssDir := filepath.Join(workDir, "assets", "scss") scssDir := filepath.Join(workDir, "assets", "scss")
@ -84,7 +86,7 @@ func TestSCSSWithThemeOverrides(t *testing.T) {
t.Skip("Skip SCSS") t.Skip("Skip SCSS")
} }
assert := require.New(t) assert := require.New(t)
workDir, clean, err := createTempDir("hugo-scss-include") workDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-scss-include")
assert.NoError(err) assert.NoError(err)
defer clean() defer clean()
@ -95,10 +97,10 @@ func TestSCSSWithThemeOverrides(t *testing.T) {
v.Set("workingDir", workDir) v.Set("workingDir", workDir)
v.Set("theme", theme) v.Set("theme", theme)
b := newTestSitesBuilder(t).WithLogger(loggers.NewErrorLogger()) b := newTestSitesBuilder(t).WithLogger(loggers.NewErrorLogger())
b.WithViper(v)
b.WithWorkingDir(workDir)
// Need to use OS fs for this. // Need to use OS fs for this.
b.Fs = hugofs.NewDefault(v) b.Fs = hugofs.NewDefault(v)
b.WithWorkingDir(workDir)
b.WithViper(v)
fooDir := filepath.Join(workDir, "node_modules", "foo") fooDir := filepath.Join(workDir, "node_modules", "foo")
scssDir := filepath.Join(workDir, "assets", "scss") scssDir := filepath.Join(workDir, "assets", "scss")
@ -385,14 +387,15 @@ CSV2: {{ $csv2 }}
} }
for _, test := range tests { for _, test := range tests {
if !test.shouldRun() { test := test
t.Log("Skip", test.name) t.Run(test.name, func(t *testing.T) {
continue if !test.shouldRun() {
} t.Skip()
}
t.Parallel()
b := newTestSitesBuilder(t).WithLogger(loggers.NewErrorLogger()) b := newTestSitesBuilder(t).WithLogger(loggers.NewErrorLogger())
b.WithSimpleConfigFile() b.WithContent("_index.md", `
b.WithContent("_index.md", `
--- ---
title: Home title: Home
--- ---
@ -400,37 +403,37 @@ title: Home
Home. Home.
`, `,
"page1.md", ` "page1.md", `
--- ---
title: Hello1 title: Hello1
--- ---
Hello1 Hello1
`, `,
"page2.md", ` "page2.md", `
--- ---
title: Hello2 title: Hello2
--- ---
Hello2 Hello2
`, `,
"t1.txt", "t1t|", "t1.txt", "t1t|",
"t2.txt", "t2t|", "t2.txt", "t2t|",
) )
b.WithSourceFile(filepath.Join("assets", "css", "styles1.css"), ` b.WithSourceFile(filepath.Join("assets", "css", "styles1.css"), `
h1 { h1 {
font-style: bold; font-style: bold;
} }
`) `)
b.WithSourceFile(filepath.Join("assets", "js", "script1.js"), ` b.WithSourceFile(filepath.Join("assets", "js", "script1.js"), `
var x; var x;
x = 5; x = 5;
document.getElementById("demo").innerHTML = x * 10; document.getElementById("demo").innerHTML = x * 10;
`) `)
b.WithSourceFile(filepath.Join("assets", "mydata", "json1.json"), ` b.WithSourceFile(filepath.Join("assets", "mydata", "json1.json"), `
{ {
"employees":[ "employees":[
{"firstName":"John", "lastName":"Doe"}, {"firstName":"John", "lastName":"Doe"},
@ -440,19 +443,19 @@ document.getElementById("demo").innerHTML = x * 10;
} }
`) `)
b.WithSourceFile(filepath.Join("assets", "mydata", "svg1.svg"), ` b.WithSourceFile(filepath.Join("assets", "mydata", "svg1.svg"), `
<svg height="100" width="100"> <svg height="100" width="100">
<line x1="5" y1="10" x2="20" y2="40"/> <line x1="5" y1="10" x2="20" y2="40"/>
</svg> </svg>
`) `)
b.WithSourceFile(filepath.Join("assets", "mydata", "xml1.xml"), ` b.WithSourceFile(filepath.Join("assets", "mydata", "xml1.xml"), `
<hello> <hello>
<world>Hugo Rocks!</<world> <world>Hugo Rocks!</<world>
</hello> </hello>
`) `)
b.WithSourceFile(filepath.Join("assets", "mydata", "html1.html"), ` b.WithSourceFile(filepath.Join("assets", "mydata", "html1.html"), `
<html> <html>
<a href="#"> <a href="#">
Cool Cool
@ -460,7 +463,7 @@ Cool
</html> </html>
`) `)
b.WithSourceFile(filepath.Join("assets", "scss", "styles2.scss"), ` b.WithSourceFile(filepath.Join("assets", "scss", "styles2.scss"), `
$color: #333; $color: #333;
body { body {
@ -468,7 +471,7 @@ body {
} }
`) `)
b.WithSourceFile(filepath.Join("assets", "sass", "styles3.sass"), ` b.WithSourceFile(filepath.Join("assets", "sass", "styles3.sass"), `
$color: #333; $color: #333;
.content-navigation .content-navigation
@ -476,10 +479,11 @@ $color: #333;
`) `)
t.Log("Test", test.name) test.prepare(b)
test.prepare(b) b.Build(BuildCfg{})
b.Build(BuildCfg{}) test.verify(b)
test.verify(b)
})
} }
} }

View file

@ -26,10 +26,6 @@ import (
"strings" "strings"
"testing" "testing"
"github.com/spf13/viper"
"github.com/spf13/afero"
"github.com/gohugoio/hugo/deps" "github.com/gohugoio/hugo/deps"
"github.com/gohugoio/hugo/helpers" "github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/tpl"
@ -54,12 +50,8 @@ title: "Title"
writeSource(t, fs, "content/simple.md", contentFile) writeSource(t, fs, "content/simple.md", contentFile)
h, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg, WithTemplate: withTemplate}) b := newTestSitesBuilderFromDepsCfg(t, deps.DepsCfg{Fs: fs, Cfg: cfg, WithTemplate: withTemplate}).WithNothingAdded()
err := b.BuildE(BuildCfg{})
require.NoError(t, err)
require.Len(t, h.Sites, 1)
err = h.Build(BuildCfg{})
if err != nil && !expectError { if err != nil && !expectError {
t.Fatalf("Shortcode rendered error %s.", err) t.Fatalf("Shortcode rendered error %s.", err)
@ -69,6 +61,9 @@ title: "Title"
t.Fatalf("No error from shortcode") t.Fatalf("No error from shortcode")
} }
h := b.H
require.Len(t, h.Sites, 1)
require.Len(t, h.Sites[0].RegularPages(), 1) require.Len(t, h.Sites[0].RegularPages(), 1)
output := strings.TrimSpace(content(h.Sites[0].RegularPages()[0])) output := strings.TrimSpace(content(h.Sites[0].RegularPages()[0]))
@ -78,7 +73,7 @@ title: "Title"
expected = strings.TrimSpace(expected) expected = strings.TrimSpace(expected)
if output != expected { if output != expected {
Fatalf(t, "Shortcode render didn't match. got \n%q but expected \n%q", output, expected) t.Fatalf("Shortcode render didn't match. got \n%q but expected \n%q", output, expected)
} }
} }
@ -341,7 +336,6 @@ func TestShortcodeWrappedInPIssue(t *testing.T) {
} }
func TestExtractShortcodes(t *testing.T) { func TestExtractShortcodes(t *testing.T) {
t.Parallel()
b := newTestSitesBuilder(t).WithSimpleConfigFile() b := newTestSitesBuilder(t).WithSimpleConfigFile()
b.WithTemplates( b.WithTemplates(
@ -413,7 +407,10 @@ title: "Shortcodes Galore!"
{"inline", `{{< my.inline >}}Hi{{< /my.inline >}}`, regexpCheck("my.inline;inline:true;closing:true;inner:{Hi};")}, {"inline", `{{< my.inline >}}Hi{{< /my.inline >}}`, regexpCheck("my.inline;inline:true;closing:true;inner:{Hi};")},
} { } {
test := test
t.Run(test.name, func(t *testing.T) { t.Run(test.name, func(t *testing.T) {
t.Parallel()
assert := require.New(t) assert := require.New(t)
counter := 0 counter := 0
@ -437,7 +434,6 @@ title: "Shortcodes Galore!"
} }
func TestShortcodesInSite(t *testing.T) { func TestShortcodesInSite(t *testing.T) {
t.Parallel()
baseURL := "http://foo/bar" baseURL := "http://foo/bar"
tests := []struct { tests := []struct {
@ -577,7 +573,9 @@ title: "Foo"
s := buildSingleSite(t, deps.DepsCfg{WithTemplate: addTemplates, Fs: fs, Cfg: cfg}, BuildCfg{}) s := buildSingleSite(t, deps.DepsCfg{WithTemplate: addTemplates, Fs: fs, Cfg: cfg}, BuildCfg{})
for i, test := range tests { for i, test := range tests {
test := test
t.Run(fmt.Sprintf("test=%d;contentPath=%s", i, test.contentPath), func(t *testing.T) { t.Run(fmt.Sprintf("test=%d;contentPath=%s", i, test.contentPath), func(t *testing.T) {
t.Parallel()
if strings.HasSuffix(test.contentPath, ".ad") && !helpers.HasAsciidoc() { if strings.HasSuffix(test.contentPath, ".ad") && !helpers.HasAsciidoc() {
t.Skip("Skip Asciidoc test case as no Asciidoc present.") t.Skip("Skip Asciidoc test case as no Asciidoc present.")
} else if strings.HasSuffix(test.contentPath, ".rst") && !helpers.HasRst() { } else if strings.HasSuffix(test.contentPath, ".rst") && !helpers.HasRst() {
@ -632,9 +630,8 @@ outputs: ["CSV"]
CSV: {{< myShort >}} CSV: {{< myShort >}}
` `
mf := afero.NewMemMapFs() b := newTestSitesBuilder(t).WithConfigFile("toml", siteConfig)
b.WithTemplates(
th, h := newTestSitesFromConfig(t, mf, siteConfig,
"layouts/_default/single.html", `Single HTML: {{ .Title }}|{{ .Content }}`, "layouts/_default/single.html", `Single HTML: {{ .Title }}|{{ .Content }}`,
"layouts/_default/single.json", `Single JSON: {{ .Title }}|{{ .Content }}`, "layouts/_default/single.json", `Single JSON: {{ .Title }}|{{ .Content }}`,
"layouts/_default/single.csv", `Single CSV: {{ .Title }}|{{ .Content }}`, "layouts/_default/single.csv", `Single CSV: {{ .Title }}|{{ .Content }}`,
@ -651,14 +648,13 @@ CSV: {{< myShort >}}
"layouts/shortcodes/myInner.html", `myInner:--{{- .Inner -}}--`, "layouts/shortcodes/myInner.html", `myInner:--{{- .Inner -}}--`,
) )
fs := th.Fs b.WithContent("_index.md", fmt.Sprintf(pageTemplate, "Home"),
"sect/mypage.md", fmt.Sprintf(pageTemplate, "Single"),
"sect/mycsvpage.md", fmt.Sprintf(pageTemplateCSVOnly, "Single CSV"),
)
writeSource(t, fs, "content/_index.md", fmt.Sprintf(pageTemplate, "Home")) b.Build(BuildCfg{})
writeSource(t, fs, "content/sect/mypage.md", fmt.Sprintf(pageTemplate, "Single")) h := b.H
writeSource(t, fs, "content/sect/mycsvpage.md", fmt.Sprintf(pageTemplateCSVOnly, "Single CSV"))
err := h.Build(BuildCfg{})
require.NoError(t, err)
require.Len(t, h.Sites, 1) require.Len(t, h.Sites, 1)
s := h.Sites[0] s := h.Sites[0]
@ -666,7 +662,7 @@ CSV: {{< myShort >}}
require.NotNil(t, home) require.NotNil(t, home)
require.Len(t, home.OutputFormats(), 3) require.Len(t, home.OutputFormats(), 3)
th.assertFileContent("public/index.html", b.AssertFileContent("public/index.html",
"Home HTML", "Home HTML",
"ShortHTML", "ShortHTML",
"ShortNoExt", "ShortNoExt",
@ -674,7 +670,7 @@ CSV: {{< myShort >}}
"myInner:--ShortHTML--", "myInner:--ShortHTML--",
) )
th.assertFileContent("public/amp/index.html", b.AssertFileContent("public/amp/index.html",
"Home AMP", "Home AMP",
"ShortAMP", "ShortAMP",
"ShortNoExt", "ShortNoExt",
@ -682,7 +678,7 @@ CSV: {{< myShort >}}
"myInner:--ShortAMP--", "myInner:--ShortAMP--",
) )
th.assertFileContent("public/index.ics", b.AssertFileContent("public/index.ics",
"Home Calendar", "Home Calendar",
"ShortCalendar", "ShortCalendar",
"ShortNoExt", "ShortNoExt",
@ -690,7 +686,7 @@ CSV: {{< myShort >}}
"myInner:--ShortCalendar--", "myInner:--ShortCalendar--",
) )
th.assertFileContent("public/sect/mypage/index.html", b.AssertFileContent("public/sect/mypage/index.html",
"Single HTML", "Single HTML",
"ShortHTML", "ShortHTML",
"ShortNoExt", "ShortNoExt",
@ -698,7 +694,7 @@ CSV: {{< myShort >}}
"myInner:--ShortHTML--", "myInner:--ShortHTML--",
) )
th.assertFileContent("public/sect/mypage/index.json", b.AssertFileContent("public/sect/mypage/index.json",
"Single JSON", "Single JSON",
"ShortJSON", "ShortJSON",
"ShortNoExt", "ShortNoExt",
@ -706,7 +702,7 @@ CSV: {{< myShort >}}
"myInner:--ShortJSON--", "myInner:--ShortJSON--",
) )
th.assertFileContent("public/amp/sect/mypage/index.html", b.AssertFileContent("public/amp/sect/mypage/index.html",
// No special AMP template // No special AMP template
"Single HTML", "Single HTML",
"ShortAMP", "ShortAMP",
@ -715,7 +711,7 @@ CSV: {{< myShort >}}
"myInner:--ShortAMP--", "myInner:--ShortAMP--",
) )
th.assertFileContent("public/sect/mycsvpage/index.csv", b.AssertFileContent("public/sect/mycsvpage/index.csv",
"Single CSV", "Single CSV",
"ShortCSV", "ShortCSV",
) )
@ -864,10 +860,6 @@ weight: %d
--- ---
C-%s` C-%s`
v := viper.New()
v.Set("timeout", 500)
templates = append(templates, []string{"shortcodes/c.html", contentShortcode}...) templates = append(templates, []string{"shortcodes/c.html", contentShortcode}...)
templates = append(templates, []string{"_default/single.html", "Single Content: {{ .Content }}"}...) templates = append(templates, []string{"_default/single.html", "Single Content: {{ .Content }}"}...)
templates = append(templates, []string{"_default/list.html", "List Content: {{ .Content }}"}...) templates = append(templates, []string{"_default/list.html", "List Content: {{ .Content }}"}...)
@ -884,21 +876,21 @@ C-%s`
builder := newTestSitesBuilder(t).WithDefaultMultiSiteConfig() builder := newTestSitesBuilder(t).WithDefaultMultiSiteConfig()
builder.WithViper(v).WithContent(content...).WithTemplates(templates...).CreateSites().Build(BuildCfg{}) builder.WithContent(content...).WithTemplates(templates...).CreateSites().Build(BuildCfg{})
s := builder.H.Sites[0] s := builder.H.Sites[0]
assert.Equal(3, len(s.RegularPages())) assert.Equal(3, len(s.RegularPages()))
builder.AssertFileContent("public/section1/index.html", builder.AssertFileContent("public/en/section1/index.html",
"List Content: <p>Logo:P1:|P2:logo.png/PNG logo|:P1: P1:|P2:docs1p1/<p>C-s1p1</p>\n|", "List Content: <p>Logo:P1:|P2:logo.png/PNG logo|:P1: P1:|P2:docs1p1/<p>C-s1p1</p>\n|",
"BP1:P1:|P2:docbp1/<p>C-bp1</p>", "BP1:P1:|P2:docbp1/<p>C-bp1</p>",
) )
builder.AssertFileContent("public/b1/index.html", builder.AssertFileContent("public/en/b1/index.html",
"Single Content: <p>Logo:P1:|P2:logo.png/PNG logo|:P1: P1:|P2:docs1p1/<p>C-s1p1</p>\n|", "Single Content: <p>Logo:P1:|P2:logo.png/PNG logo|:P1: P1:|P2:docs1p1/<p>C-s1p1</p>\n|",
"P2:docbp1/<p>C-bp1</p>", "P2:docbp1/<p>C-bp1</p>",
) )
builder.AssertFileContent("public/section2/s2p1/index.html", builder.AssertFileContent("public/en/section2/s2p1/index.html",
"Single Content: <p>Logo:P1:|P2:logo.png/PNG logo|:P1: P1:|P2:docs1p1/<p>C-s1p1</p>\n|", "Single Content: <p>Logo:P1:|P2:logo.png/PNG logo|:P1: P1:|P2:docs1p1/<p>C-s1p1</p>\n|",
"P2:docbp1/<p>C-bp1</p>", "P2:docbp1/<p>C-bp1</p>",
) )
@ -1062,8 +1054,10 @@ String: {{ . | safeHTML }}
func TestInlineShortcodes(t *testing.T) { func TestInlineShortcodes(t *testing.T) {
for _, enableInlineShortcodes := range []bool{true, false} { for _, enableInlineShortcodes := range []bool{true, false} {
enableInlineShortcodes := enableInlineShortcodes
t.Run(fmt.Sprintf("enableInlineShortcodes=%t", enableInlineShortcodes), t.Run(fmt.Sprintf("enableInlineShortcodes=%t", enableInlineShortcodes),
func(t *testing.T) { func(t *testing.T) {
t.Parallel()
conf := fmt.Sprintf(` conf := fmt.Sprintf(`
baseURL = "https://example.com" baseURL = "https://example.com"
enableInlineShortcodes = %t enableInlineShortcodes = %t

Some files were not shown because too many files have changed in this diff Show more