mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
Create a struct with all of Hugo's config options
Primary motivation is documentation, but it will also hopefully simplify the code. Also, * Lower case the default output format names; this is in line with the custom ones (map keys) and how it's treated all the places. This avoids doing `stringds.EqualFold` everywhere. Closes #10896 Closes #10620
This commit is contained in:
parent
6aededf6b4
commit
241b21b0fd
337 changed files with 13377 additions and 14898 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1,2 +1,3 @@
|
||||||
|
|
||||||
*.test
|
*.test
|
||||||
|
imports.*
|
2
cache/docs.go
vendored
Normal file
2
cache/docs.go
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
// Package cache contains the differenct cache implementations.
|
||||||
|
package cache
|
28
cache/filecache/filecache.go
vendored
28
cache/filecache/filecache.go
vendored
|
@ -35,7 +35,7 @@ import (
|
||||||
var ErrFatal = errors.New("fatal filecache error")
|
var ErrFatal = errors.New("fatal filecache error")
|
||||||
|
|
||||||
const (
|
const (
|
||||||
filecacheRootDirname = "filecache"
|
FilecacheRootDirname = "filecache"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Cache caches a set of files in a directory. This is usually a file on
|
// Cache caches a set of files in a directory. This is usually a file on
|
||||||
|
@ -301,7 +301,7 @@ func (c *Cache) isExpired(modTime time.Time) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
// For testing
|
// For testing
|
||||||
func (c *Cache) getString(id string) string {
|
func (c *Cache) GetString(id string) string {
|
||||||
id = cleanID(id)
|
id = cleanID(id)
|
||||||
|
|
||||||
c.nlocker.Lock(id)
|
c.nlocker.Lock(id)
|
||||||
|
@ -328,38 +328,24 @@ func (f Caches) Get(name string) *Cache {
|
||||||
// NewCaches creates a new set of file caches from the given
|
// NewCaches creates a new set of file caches from the given
|
||||||
// configuration.
|
// configuration.
|
||||||
func NewCaches(p *helpers.PathSpec) (Caches, error) {
|
func NewCaches(p *helpers.PathSpec) (Caches, error) {
|
||||||
var dcfg Configs
|
dcfg := p.Cfg.GetConfigSection("caches").(Configs)
|
||||||
if c, ok := p.Cfg.Get("filecacheConfigs").(Configs); ok {
|
|
||||||
dcfg = c
|
|
||||||
} else {
|
|
||||||
var err error
|
|
||||||
dcfg, err = DecodeConfig(p.Fs.Source, p.Cfg)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fs := p.Fs.Source
|
fs := p.Fs.Source
|
||||||
|
|
||||||
m := make(Caches)
|
m := make(Caches)
|
||||||
for k, v := range dcfg {
|
for k, v := range dcfg {
|
||||||
var cfs afero.Fs
|
var cfs afero.Fs
|
||||||
|
|
||||||
if v.isResourceDir {
|
if v.IsResourceDir {
|
||||||
cfs = p.BaseFs.ResourcesCache
|
cfs = p.BaseFs.ResourcesCache
|
||||||
} else {
|
} else {
|
||||||
cfs = fs
|
cfs = fs
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfs == nil {
|
if cfs == nil {
|
||||||
// TODO(bep) we still have some places that do not initialize the
|
panic("nil fs")
|
||||||
// full dependencies of a site, e.g. the import Jekyll command.
|
|
||||||
// That command does not need these caches, so let us just continue
|
|
||||||
// for now.
|
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
|
|
||||||
baseDir := v.Dir
|
baseDir := v.DirCompiled
|
||||||
|
|
||||||
if err := cfs.MkdirAll(baseDir, 0777); err != nil && !os.IsExist(err) {
|
if err := cfs.MkdirAll(baseDir, 0777); err != nil && !os.IsExist(err) {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -368,7 +354,7 @@ func NewCaches(p *helpers.PathSpec) (Caches, error) {
|
||||||
bfs := afero.NewBasePathFs(cfs, baseDir)
|
bfs := afero.NewBasePathFs(cfs, baseDir)
|
||||||
|
|
||||||
var pruneAllRootDir string
|
var pruneAllRootDir string
|
||||||
if k == cacheKeyModules {
|
if k == CacheKeyModules {
|
||||||
pruneAllRootDir = "pkg"
|
pruneAllRootDir = "pkg"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
103
cache/filecache/filecache_config.go
vendored
103
cache/filecache/filecache_config.go
vendored
|
@ -11,6 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
|
// Package filecache provides a file based cache for Hugo.
|
||||||
package filecache
|
package filecache
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
@ -21,11 +22,8 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/maps"
|
"github.com/gohugoio/hugo/common/maps"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/gohugoio/hugo/config"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
|
||||||
|
|
||||||
"errors"
|
"errors"
|
||||||
|
|
||||||
"github.com/mitchellh/mapstructure"
|
"github.com/mitchellh/mapstructure"
|
||||||
|
@ -33,98 +31,102 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
cachesConfigKey = "caches"
|
|
||||||
|
|
||||||
resourcesGenDir = ":resourceDir/_gen"
|
resourcesGenDir = ":resourceDir/_gen"
|
||||||
cacheDirProject = ":cacheDir/:project"
|
cacheDirProject = ":cacheDir/:project"
|
||||||
)
|
)
|
||||||
|
|
||||||
var defaultCacheConfig = Config{
|
var defaultCacheConfig = FileCacheConfig{
|
||||||
MaxAge: -1, // Never expire
|
MaxAge: -1, // Never expire
|
||||||
Dir: cacheDirProject,
|
Dir: cacheDirProject,
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
cacheKeyGetJSON = "getjson"
|
CacheKeyGetJSON = "getjson"
|
||||||
cacheKeyGetCSV = "getcsv"
|
CacheKeyGetCSV = "getcsv"
|
||||||
cacheKeyImages = "images"
|
CacheKeyImages = "images"
|
||||||
cacheKeyAssets = "assets"
|
CacheKeyAssets = "assets"
|
||||||
cacheKeyModules = "modules"
|
CacheKeyModules = "modules"
|
||||||
cacheKeyGetResource = "getresource"
|
CacheKeyGetResource = "getresource"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Configs map[string]Config
|
type Configs map[string]FileCacheConfig
|
||||||
|
|
||||||
|
// For internal use.
|
||||||
func (c Configs) CacheDirModules() string {
|
func (c Configs) CacheDirModules() string {
|
||||||
return c[cacheKeyModules].Dir
|
return c[CacheKeyModules].DirCompiled
|
||||||
}
|
}
|
||||||
|
|
||||||
var defaultCacheConfigs = Configs{
|
var defaultCacheConfigs = Configs{
|
||||||
cacheKeyModules: {
|
CacheKeyModules: {
|
||||||
MaxAge: -1,
|
MaxAge: -1,
|
||||||
Dir: ":cacheDir/modules",
|
Dir: ":cacheDir/modules",
|
||||||
},
|
},
|
||||||
cacheKeyGetJSON: defaultCacheConfig,
|
CacheKeyGetJSON: defaultCacheConfig,
|
||||||
cacheKeyGetCSV: defaultCacheConfig,
|
CacheKeyGetCSV: defaultCacheConfig,
|
||||||
cacheKeyImages: {
|
CacheKeyImages: {
|
||||||
MaxAge: -1,
|
MaxAge: -1,
|
||||||
Dir: resourcesGenDir,
|
Dir: resourcesGenDir,
|
||||||
},
|
},
|
||||||
cacheKeyAssets: {
|
CacheKeyAssets: {
|
||||||
MaxAge: -1,
|
MaxAge: -1,
|
||||||
Dir: resourcesGenDir,
|
Dir: resourcesGenDir,
|
||||||
},
|
},
|
||||||
cacheKeyGetResource: Config{
|
CacheKeyGetResource: FileCacheConfig{
|
||||||
MaxAge: -1, // Never expire
|
MaxAge: -1, // Never expire
|
||||||
Dir: cacheDirProject,
|
Dir: cacheDirProject,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
type Config struct {
|
type FileCacheConfig struct {
|
||||||
// Max age of cache entries in this cache. Any items older than this will
|
// Max age of cache entries in this cache. Any items older than this will
|
||||||
// be removed and not returned from the cache.
|
// be removed and not returned from the cache.
|
||||||
// a negative value means forever, 0 means cache is disabled.
|
// A negative value means forever, 0 means cache is disabled.
|
||||||
|
// Hugo is leninent with what types it accepts here, but we recommend using
|
||||||
|
// a duration string, a sequence of decimal numbers, each with optional fraction and a unit suffix,
|
||||||
|
// such as "300ms", "1.5h" or "2h45m".
|
||||||
|
// Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h".
|
||||||
MaxAge time.Duration
|
MaxAge time.Duration
|
||||||
|
|
||||||
// The directory where files are stored.
|
// The directory where files are stored.
|
||||||
Dir string
|
Dir string
|
||||||
|
DirCompiled string `json:"-"`
|
||||||
|
|
||||||
// Will resources/_gen will get its own composite filesystem that
|
// Will resources/_gen will get its own composite filesystem that
|
||||||
// also checks any theme.
|
// also checks any theme.
|
||||||
isResourceDir bool
|
IsResourceDir bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetJSONCache gets the file cache for getJSON.
|
// GetJSONCache gets the file cache for getJSON.
|
||||||
func (f Caches) GetJSONCache() *Cache {
|
func (f Caches) GetJSONCache() *Cache {
|
||||||
return f[cacheKeyGetJSON]
|
return f[CacheKeyGetJSON]
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetCSVCache gets the file cache for getCSV.
|
// GetCSVCache gets the file cache for getCSV.
|
||||||
func (f Caches) GetCSVCache() *Cache {
|
func (f Caches) GetCSVCache() *Cache {
|
||||||
return f[cacheKeyGetCSV]
|
return f[CacheKeyGetCSV]
|
||||||
}
|
}
|
||||||
|
|
||||||
// ImageCache gets the file cache for processed images.
|
// ImageCache gets the file cache for processed images.
|
||||||
func (f Caches) ImageCache() *Cache {
|
func (f Caches) ImageCache() *Cache {
|
||||||
return f[cacheKeyImages]
|
return f[CacheKeyImages]
|
||||||
}
|
}
|
||||||
|
|
||||||
// ModulesCache gets the file cache for Hugo Modules.
|
// ModulesCache gets the file cache for Hugo Modules.
|
||||||
func (f Caches) ModulesCache() *Cache {
|
func (f Caches) ModulesCache() *Cache {
|
||||||
return f[cacheKeyModules]
|
return f[CacheKeyModules]
|
||||||
}
|
}
|
||||||
|
|
||||||
// AssetsCache gets the file cache for assets (processed resources, SCSS etc.).
|
// AssetsCache gets the file cache for assets (processed resources, SCSS etc.).
|
||||||
func (f Caches) AssetsCache() *Cache {
|
func (f Caches) AssetsCache() *Cache {
|
||||||
return f[cacheKeyAssets]
|
return f[CacheKeyAssets]
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetResourceCache gets the file cache for remote resources.
|
// GetResourceCache gets the file cache for remote resources.
|
||||||
func (f Caches) GetResourceCache() *Cache {
|
func (f Caches) GetResourceCache() *Cache {
|
||||||
return f[cacheKeyGetResource]
|
return f[CacheKeyGetResource]
|
||||||
}
|
}
|
||||||
|
|
||||||
func DecodeConfig(fs afero.Fs, cfg config.Provider) (Configs, error) {
|
func DecodeConfig(fs afero.Fs, bcfg config.BaseConfig, m map[string]any) (Configs, error) {
|
||||||
c := make(Configs)
|
c := make(Configs)
|
||||||
valid := make(map[string]bool)
|
valid := make(map[string]bool)
|
||||||
// Add defaults
|
// Add defaults
|
||||||
|
@ -133,8 +135,6 @@ func DecodeConfig(fs afero.Fs, cfg config.Provider) (Configs, error) {
|
||||||
valid[k] = true
|
valid[k] = true
|
||||||
}
|
}
|
||||||
|
|
||||||
m := cfg.GetStringMap(cachesConfigKey)
|
|
||||||
|
|
||||||
_, isOsFs := fs.(*afero.OsFs)
|
_, isOsFs := fs.(*afero.OsFs)
|
||||||
|
|
||||||
for k, v := range m {
|
for k, v := range m {
|
||||||
|
@ -170,9 +170,6 @@ func DecodeConfig(fs afero.Fs, cfg config.Provider) (Configs, error) {
|
||||||
c[name] = cc
|
c[name] = cc
|
||||||
}
|
}
|
||||||
|
|
||||||
// This is a very old flag in Hugo, but we need to respect it.
|
|
||||||
disabled := cfg.GetBool("ignoreCache")
|
|
||||||
|
|
||||||
for k, v := range c {
|
for k, v := range c {
|
||||||
dir := filepath.ToSlash(filepath.Clean(v.Dir))
|
dir := filepath.ToSlash(filepath.Clean(v.Dir))
|
||||||
hadSlash := strings.HasPrefix(dir, "/")
|
hadSlash := strings.HasPrefix(dir, "/")
|
||||||
|
@ -180,12 +177,12 @@ func DecodeConfig(fs afero.Fs, cfg config.Provider) (Configs, error) {
|
||||||
|
|
||||||
for i, part := range parts {
|
for i, part := range parts {
|
||||||
if strings.HasPrefix(part, ":") {
|
if strings.HasPrefix(part, ":") {
|
||||||
resolved, isResource, err := resolveDirPlaceholder(fs, cfg, part)
|
resolved, isResource, err := resolveDirPlaceholder(fs, bcfg, part)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return c, err
|
return c, err
|
||||||
}
|
}
|
||||||
if isResource {
|
if isResource {
|
||||||
v.isResourceDir = true
|
v.IsResourceDir = true
|
||||||
}
|
}
|
||||||
parts[i] = resolved
|
parts[i] = resolved
|
||||||
}
|
}
|
||||||
|
@ -195,33 +192,29 @@ func DecodeConfig(fs afero.Fs, cfg config.Provider) (Configs, error) {
|
||||||
if hadSlash {
|
if hadSlash {
|
||||||
dir = "/" + dir
|
dir = "/" + dir
|
||||||
}
|
}
|
||||||
v.Dir = filepath.Clean(filepath.FromSlash(dir))
|
v.DirCompiled = filepath.Clean(filepath.FromSlash(dir))
|
||||||
|
|
||||||
if !v.isResourceDir {
|
if !v.IsResourceDir {
|
||||||
if isOsFs && !filepath.IsAbs(v.Dir) {
|
if isOsFs && !filepath.IsAbs(v.DirCompiled) {
|
||||||
return c, fmt.Errorf("%q must resolve to an absolute directory", v.Dir)
|
return c, fmt.Errorf("%q must resolve to an absolute directory", v.DirCompiled)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Avoid cache in root, e.g. / (Unix) or c:\ (Windows)
|
// Avoid cache in root, e.g. / (Unix) or c:\ (Windows)
|
||||||
if len(strings.TrimPrefix(v.Dir, filepath.VolumeName(v.Dir))) == 1 {
|
if len(strings.TrimPrefix(v.DirCompiled, filepath.VolumeName(v.DirCompiled))) == 1 {
|
||||||
return c, fmt.Errorf("%q is a root folder and not allowed as cache dir", v.Dir)
|
return c, fmt.Errorf("%q is a root folder and not allowed as cache dir", v.DirCompiled)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !strings.HasPrefix(v.Dir, "_gen") {
|
if !strings.HasPrefix(v.DirCompiled, "_gen") {
|
||||||
// We do cache eviction (file removes) and since the user can set
|
// We do cache eviction (file removes) and since the user can set
|
||||||
// his/hers own cache directory, we really want to make sure
|
// his/hers own cache directory, we really want to make sure
|
||||||
// we do not delete any files that do not belong to this cache.
|
// we do not delete any files that do not belong to this cache.
|
||||||
// We do add the cache name as the root, but this is an extra safe
|
// We do add the cache name as the root, but this is an extra safe
|
||||||
// guard. We skip the files inside /resources/_gen/ because
|
// guard. We skip the files inside /resources/_gen/ because
|
||||||
// that would be breaking.
|
// that would be breaking.
|
||||||
v.Dir = filepath.Join(v.Dir, filecacheRootDirname, k)
|
v.DirCompiled = filepath.Join(v.DirCompiled, FilecacheRootDirname, k)
|
||||||
} else {
|
} else {
|
||||||
v.Dir = filepath.Join(v.Dir, k)
|
v.DirCompiled = filepath.Join(v.DirCompiled, k)
|
||||||
}
|
|
||||||
|
|
||||||
if disabled {
|
|
||||||
v.MaxAge = 0
|
|
||||||
}
|
}
|
||||||
|
|
||||||
c[k] = v
|
c[k] = v
|
||||||
|
@ -231,17 +224,15 @@ func DecodeConfig(fs afero.Fs, cfg config.Provider) (Configs, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resolves :resourceDir => /myproject/resources etc., :cacheDir => ...
|
// Resolves :resourceDir => /myproject/resources etc., :cacheDir => ...
|
||||||
func resolveDirPlaceholder(fs afero.Fs, cfg config.Provider, placeholder string) (cacheDir string, isResource bool, err error) {
|
func resolveDirPlaceholder(fs afero.Fs, bcfg config.BaseConfig, placeholder string) (cacheDir string, isResource bool, err error) {
|
||||||
workingDir := cfg.GetString("workingDir")
|
|
||||||
|
|
||||||
switch strings.ToLower(placeholder) {
|
switch strings.ToLower(placeholder) {
|
||||||
case ":resourcedir":
|
case ":resourcedir":
|
||||||
return "", true, nil
|
return "", true, nil
|
||||||
case ":cachedir":
|
case ":cachedir":
|
||||||
d, err := helpers.GetCacheDir(fs, cfg)
|
return bcfg.CacheDir, false, nil
|
||||||
return d, false, err
|
|
||||||
case ":project":
|
case ":project":
|
||||||
return filepath.Base(workingDir), false, nil
|
return filepath.Base(bcfg.WorkingDir), false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return "", false, fmt.Errorf("%q is not a valid placeholder (valid values are :cacheDir or :resourceDir)", placeholder)
|
return "", false, fmt.Errorf("%q is not a valid placeholder (valid values are :cacheDir or :resourceDir)", placeholder)
|
||||||
|
|
88
cache/filecache/filecache_config_test.go
vendored
88
cache/filecache/filecache_config_test.go
vendored
|
@ -11,18 +11,19 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package filecache
|
package filecache_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/cache/filecache"
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/gohugoio/hugo/config"
|
||||||
|
"github.com/gohugoio/hugo/config/testconfig"
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
qt "github.com/frankban/quicktest"
|
||||||
)
|
)
|
||||||
|
@ -57,22 +58,20 @@ dir = "/path/to/c4"
|
||||||
cfg, err := config.FromConfigString(configStr, "toml")
|
cfg, err := config.FromConfigString(configStr, "toml")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
fs := afero.NewMemMapFs()
|
fs := afero.NewMemMapFs()
|
||||||
decoded, err := DecodeConfig(fs, cfg)
|
decoded := testconfig.GetTestConfigs(fs, cfg).Base.Caches
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
|
|
||||||
c.Assert(len(decoded), qt.Equals, 6)
|
c.Assert(len(decoded), qt.Equals, 6)
|
||||||
|
|
||||||
c2 := decoded["getcsv"]
|
c2 := decoded["getcsv"]
|
||||||
c.Assert(c2.MaxAge.String(), qt.Equals, "11h0m0s")
|
c.Assert(c2.MaxAge.String(), qt.Equals, "11h0m0s")
|
||||||
c.Assert(c2.Dir, qt.Equals, filepath.FromSlash("/path/to/c2/filecache/getcsv"))
|
c.Assert(c2.DirCompiled, qt.Equals, filepath.FromSlash("/path/to/c2/filecache/getcsv"))
|
||||||
|
|
||||||
c3 := decoded["images"]
|
c3 := decoded["images"]
|
||||||
c.Assert(c3.MaxAge, qt.Equals, time.Duration(-1))
|
c.Assert(c3.MaxAge, qt.Equals, time.Duration(-1))
|
||||||
c.Assert(c3.Dir, qt.Equals, filepath.FromSlash("/path/to/c3/filecache/images"))
|
c.Assert(c3.DirCompiled, qt.Equals, filepath.FromSlash("/path/to/c3/filecache/images"))
|
||||||
|
|
||||||
c4 := decoded["getresource"]
|
c4 := decoded["getresource"]
|
||||||
c.Assert(c4.MaxAge, qt.Equals, time.Duration(-1))
|
c.Assert(c4.MaxAge, qt.Equals, time.Duration(-1))
|
||||||
c.Assert(c4.Dir, qt.Equals, filepath.FromSlash("/path/to/c4/filecache/getresource"))
|
c.Assert(c4.DirCompiled, qt.Equals, filepath.FromSlash("/path/to/c4/filecache/getresource"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDecodeConfigIgnoreCache(t *testing.T) {
|
func TestDecodeConfigIgnoreCache(t *testing.T) {
|
||||||
|
@ -106,9 +105,7 @@ dir = "/path/to/c4"
|
||||||
cfg, err := config.FromConfigString(configStr, "toml")
|
cfg, err := config.FromConfigString(configStr, "toml")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
fs := afero.NewMemMapFs()
|
fs := afero.NewMemMapFs()
|
||||||
decoded, err := DecodeConfig(fs, cfg)
|
decoded := testconfig.GetTestConfigs(fs, cfg).Base.Caches
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
|
|
||||||
c.Assert(len(decoded), qt.Equals, 6)
|
c.Assert(len(decoded), qt.Equals, 6)
|
||||||
|
|
||||||
for _, v := range decoded {
|
for _, v := range decoded {
|
||||||
|
@ -118,7 +115,7 @@ dir = "/path/to/c4"
|
||||||
|
|
||||||
func TestDecodeConfigDefault(t *testing.T) {
|
func TestDecodeConfigDefault(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
cfg := newTestConfig()
|
cfg := config.New()
|
||||||
|
|
||||||
if runtime.GOOS == "windows" {
|
if runtime.GOOS == "windows" {
|
||||||
cfg.Set("resourceDir", "c:\\cache\\resources")
|
cfg.Set("resourceDir", "c:\\cache\\resources")
|
||||||
|
@ -128,71 +125,22 @@ func TestDecodeConfigDefault(t *testing.T) {
|
||||||
cfg.Set("resourceDir", "/cache/resources")
|
cfg.Set("resourceDir", "/cache/resources")
|
||||||
cfg.Set("cacheDir", "/cache/thecache")
|
cfg.Set("cacheDir", "/cache/thecache")
|
||||||
}
|
}
|
||||||
|
cfg.Set("workingDir", filepath.FromSlash("/my/cool/hugoproject"))
|
||||||
|
|
||||||
fs := afero.NewMemMapFs()
|
fs := afero.NewMemMapFs()
|
||||||
|
decoded := testconfig.GetTestConfigs(fs, cfg).Base.Caches
|
||||||
decoded, err := DecodeConfig(fs, cfg)
|
|
||||||
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
|
|
||||||
c.Assert(len(decoded), qt.Equals, 6)
|
c.Assert(len(decoded), qt.Equals, 6)
|
||||||
|
|
||||||
imgConfig := decoded[cacheKeyImages]
|
imgConfig := decoded[filecache.CacheKeyImages]
|
||||||
jsonConfig := decoded[cacheKeyGetJSON]
|
jsonConfig := decoded[filecache.CacheKeyGetJSON]
|
||||||
|
|
||||||
if runtime.GOOS == "windows" {
|
if runtime.GOOS == "windows" {
|
||||||
c.Assert(imgConfig.Dir, qt.Equals, filepath.FromSlash("_gen/images"))
|
c.Assert(imgConfig.DirCompiled, qt.Equals, filepath.FromSlash("_gen/images"))
|
||||||
} else {
|
} else {
|
||||||
c.Assert(imgConfig.Dir, qt.Equals, "_gen/images")
|
c.Assert(imgConfig.DirCompiled, qt.Equals, "_gen/images")
|
||||||
c.Assert(jsonConfig.Dir, qt.Equals, "/cache/thecache/hugoproject/filecache/getjson")
|
c.Assert(jsonConfig.DirCompiled, qt.Equals, "/cache/thecache/hugoproject/filecache/getjson")
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Assert(imgConfig.isResourceDir, qt.Equals, true)
|
c.Assert(imgConfig.IsResourceDir, qt.Equals, true)
|
||||||
c.Assert(jsonConfig.isResourceDir, qt.Equals, false)
|
c.Assert(jsonConfig.IsResourceDir, qt.Equals, false)
|
||||||
}
|
|
||||||
|
|
||||||
func TestDecodeConfigInvalidDir(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
c := qt.New(t)
|
|
||||||
|
|
||||||
configStr := `
|
|
||||||
resourceDir = "myresources"
|
|
||||||
contentDir = "content"
|
|
||||||
dataDir = "data"
|
|
||||||
i18nDir = "i18n"
|
|
||||||
layoutDir = "layouts"
|
|
||||||
assetDir = "assets"
|
|
||||||
archeTypedir = "archetypes"
|
|
||||||
|
|
||||||
[caches]
|
|
||||||
[caches.getJSON]
|
|
||||||
maxAge = "10m"
|
|
||||||
dir = "/"
|
|
||||||
|
|
||||||
`
|
|
||||||
if runtime.GOOS == "windows" {
|
|
||||||
configStr = strings.Replace(configStr, "/", "c:\\\\", 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
cfg, err := config.FromConfigString(configStr, "toml")
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
fs := afero.NewMemMapFs()
|
|
||||||
|
|
||||||
_, err = DecodeConfig(fs, cfg)
|
|
||||||
c.Assert(err, qt.Not(qt.IsNil))
|
|
||||||
}
|
|
||||||
|
|
||||||
func newTestConfig() config.Provider {
|
|
||||||
cfg := config.NewWithTestDefaults()
|
|
||||||
cfg.Set("workingDir", filepath.FromSlash("/my/cool/hugoproject"))
|
|
||||||
cfg.Set("contentDir", "content")
|
|
||||||
cfg.Set("dataDir", "data")
|
|
||||||
cfg.Set("resourceDir", "resources")
|
|
||||||
cfg.Set("i18nDir", "i18n")
|
|
||||||
cfg.Set("layoutDir", "layouts")
|
|
||||||
cfg.Set("archetypeDir", "archetypes")
|
|
||||||
cfg.Set("assetDir", "assets")
|
|
||||||
|
|
||||||
return cfg
|
|
||||||
}
|
}
|
||||||
|
|
2
cache/filecache/filecache_pruner.go
vendored
2
cache/filecache/filecache_pruner.go
vendored
|
@ -31,7 +31,6 @@ import (
|
||||||
func (c Caches) Prune() (int, error) {
|
func (c Caches) Prune() (int, error) {
|
||||||
counter := 0
|
counter := 0
|
||||||
for k, cache := range c {
|
for k, cache := range c {
|
||||||
|
|
||||||
count, err := cache.Prune(false)
|
count, err := cache.Prune(false)
|
||||||
|
|
||||||
counter += count
|
counter += count
|
||||||
|
@ -58,6 +57,7 @@ func (c *Cache) Prune(force bool) (int, error) {
|
||||||
counter := 0
|
counter := 0
|
||||||
|
|
||||||
err := afero.Walk(c.Fs, "", func(name string, info os.FileInfo, err error) error {
|
err := afero.Walk(c.Fs, "", func(name string, info os.FileInfo, err error) error {
|
||||||
|
|
||||||
if info == nil {
|
if info == nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
13
cache/filecache/filecache_pruner_test.go
vendored
13
cache/filecache/filecache_pruner_test.go
vendored
|
@ -11,13 +11,14 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package filecache
|
package filecache_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/cache/filecache"
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
qt "github.com/frankban/quicktest"
|
||||||
|
@ -52,10 +53,10 @@ maxAge = "200ms"
|
||||||
dir = ":resourceDir/_gen"
|
dir = ":resourceDir/_gen"
|
||||||
`
|
`
|
||||||
|
|
||||||
for _, name := range []string{cacheKeyGetCSV, cacheKeyGetJSON, cacheKeyAssets, cacheKeyImages} {
|
for _, name := range []string{filecache.CacheKeyGetCSV, filecache.CacheKeyGetJSON, filecache.CacheKeyAssets, filecache.CacheKeyImages} {
|
||||||
msg := qt.Commentf("cache: %s", name)
|
msg := qt.Commentf("cache: %s", name)
|
||||||
p := newPathsSpec(t, afero.NewMemMapFs(), configStr)
|
p := newPathsSpec(t, afero.NewMemMapFs(), configStr)
|
||||||
caches, err := NewCaches(p)
|
caches, err := filecache.NewCaches(p)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
cache := caches[name]
|
cache := caches[name]
|
||||||
for i := 0; i < 10; i++ {
|
for i := 0; i < 10; i++ {
|
||||||
|
@ -75,7 +76,7 @@ dir = ":resourceDir/_gen"
|
||||||
|
|
||||||
for i := 0; i < 10; i++ {
|
for i := 0; i < 10; i++ {
|
||||||
id := fmt.Sprintf("i%d", i)
|
id := fmt.Sprintf("i%d", i)
|
||||||
v := cache.getString(id)
|
v := cache.GetString(id)
|
||||||
if i < 5 {
|
if i < 5 {
|
||||||
c.Assert(v, qt.Equals, "")
|
c.Assert(v, qt.Equals, "")
|
||||||
} else {
|
} else {
|
||||||
|
@ -83,7 +84,7 @@ dir = ":resourceDir/_gen"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
caches, err = NewCaches(p)
|
caches, err = filecache.NewCaches(p)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
cache = caches[name]
|
cache = caches[name]
|
||||||
// Touch one and then prune.
|
// Touch one and then prune.
|
||||||
|
@ -98,7 +99,7 @@ dir = ":resourceDir/_gen"
|
||||||
// Now only the i5 should be left.
|
// Now only the i5 should be left.
|
||||||
for i := 0; i < 10; i++ {
|
for i := 0; i < 10; i++ {
|
||||||
id := fmt.Sprintf("i%d", i)
|
id := fmt.Sprintf("i%d", i)
|
||||||
v := cache.getString(id)
|
v := cache.GetString(id)
|
||||||
if i != 5 {
|
if i != 5 {
|
||||||
c.Assert(v, qt.Equals, "")
|
c.Assert(v, qt.Equals, "")
|
||||||
} else {
|
} else {
|
||||||
|
|
88
cache/filecache/filecache_test.go
vendored
88
cache/filecache/filecache_test.go
vendored
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package filecache
|
package filecache_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
|
@ -23,13 +23,10 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/gobwas/glob"
|
"github.com/gohugoio/hugo/cache/filecache"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/langs"
|
|
||||||
"github.com/gohugoio/hugo/modules"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/hugio"
|
"github.com/gohugoio/hugo/common/hugio"
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/gohugoio/hugo/config"
|
||||||
|
"github.com/gohugoio/hugo/config/testconfig"
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
|
@ -83,27 +80,19 @@ dir = ":cacheDir/c"
|
||||||
|
|
||||||
p := newPathsSpec(t, osfs, configStr)
|
p := newPathsSpec(t, osfs, configStr)
|
||||||
|
|
||||||
caches, err := NewCaches(p)
|
caches, err := filecache.NewCaches(p)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
cache := caches.Get("GetJSON")
|
cache := caches.Get("GetJSON")
|
||||||
c.Assert(cache, qt.Not(qt.IsNil))
|
c.Assert(cache, qt.Not(qt.IsNil))
|
||||||
c.Assert(cache.maxAge.String(), qt.Equals, "10h0m0s")
|
|
||||||
|
|
||||||
bfs, ok := cache.Fs.(*afero.BasePathFs)
|
bfs, ok := cache.Fs.(*afero.BasePathFs)
|
||||||
c.Assert(ok, qt.Equals, true)
|
c.Assert(ok, qt.Equals, true)
|
||||||
filename, err := bfs.RealPath("key")
|
filename, err := bfs.RealPath("key")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
if test.cacheDir != "" {
|
|
||||||
c.Assert(filename, qt.Equals, filepath.Join(test.cacheDir, "c/"+filecacheRootDirname+"/getjson/key"))
|
|
||||||
} else {
|
|
||||||
// Temp dir.
|
|
||||||
c.Assert(filename, qt.Matches, ".*hugo_cache.*"+filecacheRootDirname+".*key")
|
|
||||||
}
|
|
||||||
|
|
||||||
cache = caches.Get("Images")
|
cache = caches.Get("Images")
|
||||||
c.Assert(cache, qt.Not(qt.IsNil))
|
c.Assert(cache, qt.Not(qt.IsNil))
|
||||||
c.Assert(cache.maxAge, qt.Equals, time.Duration(-1))
|
|
||||||
bfs, ok = cache.Fs.(*afero.BasePathFs)
|
bfs, ok = cache.Fs.(*afero.BasePathFs)
|
||||||
c.Assert(ok, qt.Equals, true)
|
c.Assert(ok, qt.Equals, true)
|
||||||
filename, _ = bfs.RealPath("key")
|
filename, _ = bfs.RealPath("key")
|
||||||
|
@ -125,7 +114,7 @@ dir = ":cacheDir/c"
|
||||||
return []byte("bcd"), nil
|
return []byte("bcd"), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, ca := range []*Cache{caches.ImageCache(), caches.AssetsCache(), caches.GetJSONCache(), caches.GetCSVCache()} {
|
for _, ca := range []*filecache.Cache{caches.ImageCache(), caches.AssetsCache(), caches.GetJSONCache(), caches.GetCSVCache()} {
|
||||||
for i := 0; i < 2; i++ {
|
for i := 0; i < 2; i++ {
|
||||||
info, r, err := ca.GetOrCreate("a", rf("abc"))
|
info, r, err := ca.GetOrCreate("a", rf("abc"))
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
@ -160,7 +149,7 @@ dir = ":cacheDir/c"
|
||||||
c.Assert(info.Name, qt.Equals, "mykey")
|
c.Assert(info.Name, qt.Equals, "mykey")
|
||||||
io.WriteString(w, "Hugo is great!")
|
io.WriteString(w, "Hugo is great!")
|
||||||
w.Close()
|
w.Close()
|
||||||
c.Assert(caches.ImageCache().getString("mykey"), qt.Equals, "Hugo is great!")
|
c.Assert(caches.ImageCache().GetString("mykey"), qt.Equals, "Hugo is great!")
|
||||||
|
|
||||||
info, r, err := caches.ImageCache().Get("mykey")
|
info, r, err := caches.ImageCache().Get("mykey")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
@ -201,7 +190,7 @@ dir = "/cache/c"
|
||||||
|
|
||||||
p := newPathsSpec(t, afero.NewMemMapFs(), configStr)
|
p := newPathsSpec(t, afero.NewMemMapFs(), configStr)
|
||||||
|
|
||||||
caches, err := NewCaches(p)
|
caches, err := filecache.NewCaches(p)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
const cacheName = "getjson"
|
const cacheName = "getjson"
|
||||||
|
@ -244,11 +233,11 @@ func TestFileCacheReadOrCreateErrorInRead(t *testing.T) {
|
||||||
|
|
||||||
var result string
|
var result string
|
||||||
|
|
||||||
rf := func(failLevel int) func(info ItemInfo, r io.ReadSeeker) error {
|
rf := func(failLevel int) func(info filecache.ItemInfo, r io.ReadSeeker) error {
|
||||||
return func(info ItemInfo, r io.ReadSeeker) error {
|
return func(info filecache.ItemInfo, r io.ReadSeeker) error {
|
||||||
if failLevel > 0 {
|
if failLevel > 0 {
|
||||||
if failLevel > 1 {
|
if failLevel > 1 {
|
||||||
return ErrFatal
|
return filecache.ErrFatal
|
||||||
}
|
}
|
||||||
return errors.New("fail")
|
return errors.New("fail")
|
||||||
}
|
}
|
||||||
|
@ -260,8 +249,8 @@ func TestFileCacheReadOrCreateErrorInRead(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bf := func(s string) func(info ItemInfo, w io.WriteCloser) error {
|
bf := func(s string) func(info filecache.ItemInfo, w io.WriteCloser) error {
|
||||||
return func(info ItemInfo, w io.WriteCloser) error {
|
return func(info filecache.ItemInfo, w io.WriteCloser) error {
|
||||||
defer w.Close()
|
defer w.Close()
|
||||||
result = s
|
result = s
|
||||||
_, err := w.Write([]byte(s))
|
_, err := w.Write([]byte(s))
|
||||||
|
@ -269,7 +258,7 @@ func TestFileCacheReadOrCreateErrorInRead(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
cache := NewCache(afero.NewMemMapFs(), 100*time.Hour, "")
|
cache := filecache.NewCache(afero.NewMemMapFs(), 100*time.Hour, "")
|
||||||
|
|
||||||
const id = "a32"
|
const id = "a32"
|
||||||
|
|
||||||
|
@ -283,60 +272,15 @@ func TestFileCacheReadOrCreateErrorInRead(t *testing.T) {
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(result, qt.Equals, "v3")
|
c.Assert(result, qt.Equals, "v3")
|
||||||
_, err = cache.ReadOrCreate(id, rf(2), bf("v3"))
|
_, err = cache.ReadOrCreate(id, rf(2), bf("v3"))
|
||||||
c.Assert(err, qt.Equals, ErrFatal)
|
c.Assert(err, qt.Equals, filecache.ErrFatal)
|
||||||
}
|
|
||||||
|
|
||||||
func TestCleanID(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
c.Assert(cleanID(filepath.FromSlash("/a/b//c.txt")), qt.Equals, filepath.FromSlash("a/b/c.txt"))
|
|
||||||
c.Assert(cleanID(filepath.FromSlash("a/b//c.txt")), qt.Equals, filepath.FromSlash("a/b/c.txt"))
|
|
||||||
}
|
|
||||||
|
|
||||||
func initConfig(fs afero.Fs, cfg config.Provider) error {
|
|
||||||
if _, err := langs.LoadLanguageSettings(cfg, nil); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
modConfig, err := modules.DecodeConfig(cfg)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
workingDir := cfg.GetString("workingDir")
|
|
||||||
themesDir := cfg.GetString("themesDir")
|
|
||||||
if !filepath.IsAbs(themesDir) {
|
|
||||||
themesDir = filepath.Join(workingDir, themesDir)
|
|
||||||
}
|
|
||||||
globAll := glob.MustCompile("**", '/')
|
|
||||||
modulesClient := modules.NewClient(modules.ClientConfig{
|
|
||||||
Fs: fs,
|
|
||||||
WorkingDir: workingDir,
|
|
||||||
ThemesDir: themesDir,
|
|
||||||
ModuleConfig: modConfig,
|
|
||||||
IgnoreVendor: globAll,
|
|
||||||
})
|
|
||||||
|
|
||||||
moduleConfig, err := modulesClient.Collect()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := modules.ApplyProjectConfigDefaults(cfg, moduleConfig.ActiveModules[len(moduleConfig.ActiveModules)-1]); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
cfg.Set("allModules", moduleConfig.ActiveModules)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func newPathsSpec(t *testing.T, fs afero.Fs, configStr string) *helpers.PathSpec {
|
func newPathsSpec(t *testing.T, fs afero.Fs, configStr string) *helpers.PathSpec {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
cfg, err := config.FromConfigString(configStr, "toml")
|
cfg, err := config.FromConfigString(configStr, "toml")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
initConfig(fs, cfg)
|
acfg := testconfig.GetTestConfig(fs, cfg)
|
||||||
config.SetBaseTestDefaults(cfg)
|
p, err := helpers.NewPathSpec(hugofs.NewFrom(fs, acfg.BaseConfig()), acfg, nil)
|
||||||
p, err := helpers.NewPathSpec(hugofs.NewFrom(fs, cfg), cfg, nil)
|
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
9
cache/filecache/integration_test.go
vendored
9
cache/filecache/integration_test.go
vendored
|
@ -15,6 +15,9 @@ package filecache_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
|
jww "github.com/spf13/jwalterweatherman"
|
||||||
|
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -62,6 +65,7 @@ title: "Home"
|
||||||
-- assets/a/pixel.png --
|
-- assets/a/pixel.png --
|
||||||
iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==
|
iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==
|
||||||
-- layouts/index.html --
|
-- layouts/index.html --
|
||||||
|
{{ warnf "HOME!" }}
|
||||||
{{ $img := resources.GetMatch "**.png" }}
|
{{ $img := resources.GetMatch "**.png" }}
|
||||||
{{ $img = $img.Resize "3x3" }}
|
{{ $img = $img.Resize "3x3" }}
|
||||||
{{ $img.RelPermalink }}
|
{{ $img.RelPermalink }}
|
||||||
|
@ -71,10 +75,11 @@ iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAA
|
||||||
`
|
`
|
||||||
|
|
||||||
b := hugolib.NewIntegrationTestBuilder(
|
b := hugolib.NewIntegrationTestBuilder(
|
||||||
hugolib.IntegrationTestConfig{T: t, TxtarString: files, RunGC: true, NeedsOsFS: true},
|
hugolib.IntegrationTestConfig{T: t, TxtarString: files, Running: true, RunGC: true, NeedsOsFS: true, LogLevel: jww.LevelInfo},
|
||||||
).Build()
|
).Build()
|
||||||
|
|
||||||
b.Assert(b.GCCount, qt.Equals, 0)
|
b.Assert(b.GCCount, qt.Equals, 0)
|
||||||
|
b.Assert(b.H, qt.IsNotNil)
|
||||||
|
|
||||||
imagesCacheDir := filepath.Join("_gen", "images")
|
imagesCacheDir := filepath.Join("_gen", "images")
|
||||||
_, err := b.H.BaseFs.ResourcesCache.Stat(imagesCacheDir)
|
_, err := b.H.BaseFs.ResourcesCache.Stat(imagesCacheDir)
|
||||||
|
@ -86,9 +91,11 @@ iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAA
|
||||||
time.Sleep(300 * time.Millisecond)
|
time.Sleep(300 * time.Millisecond)
|
||||||
|
|
||||||
b.RenameFile("assets/a/pixel.png", "assets/b/pixel2.png").Build()
|
b.RenameFile("assets/a/pixel.png", "assets/b/pixel2.png").Build()
|
||||||
|
|
||||||
b.Assert(b.GCCount, qt.Equals, 1)
|
b.Assert(b.GCCount, qt.Equals, 1)
|
||||||
// Build it again to GC the empty a dir.
|
// Build it again to GC the empty a dir.
|
||||||
b.Build()
|
b.Build()
|
||||||
|
|
||||||
_, err = b.H.BaseFs.ResourcesCache.Stat(filepath.Join(imagesCacheDir, "a"))
|
_, err = b.H.BaseFs.ResourcesCache.Stat(filepath.Join(imagesCacheDir, "a"))
|
||||||
b.Assert(err, qt.Not(qt.IsNil))
|
b.Assert(err, qt.Not(qt.IsNil))
|
||||||
_, err = b.H.BaseFs.ResourcesCache.Stat(imagesCacheDir)
|
_, err = b.H.BaseFs.ResourcesCache.Stat(imagesCacheDir)
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -14,331 +14,28 @@
|
||||||
package commands
|
package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"github.com/bep/simplecobra"
|
||||||
"os"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/hugo"
|
|
||||||
"github.com/gohugoio/hugo/common/loggers"
|
|
||||||
hpaths "github.com/gohugoio/hugo/common/paths"
|
|
||||||
"github.com/gohugoio/hugo/config"
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type commandsBuilder struct {
|
// newExec wires up all of Hugo's CLI.
|
||||||
hugoBuilderCommon
|
func newExec() (*simplecobra.Exec, error) {
|
||||||
|
rootCmd := &rootCommand{
|
||||||
commands []cmder
|
commands: []simplecobra.Commander{
|
||||||
}
|
newVersionCmd(),
|
||||||
|
newEnvCommand(),
|
||||||
func newCommandsBuilder() *commandsBuilder {
|
newServerCommand(),
|
||||||
return &commandsBuilder{}
|
newDeployCommand(),
|
||||||
}
|
newConfigCommand(),
|
||||||
|
newNewCommand(),
|
||||||
func (b *commandsBuilder) addCommands(commands ...cmder) *commandsBuilder {
|
newConvertCommand(),
|
||||||
b.commands = append(b.commands, commands...)
|
newImportCommand(),
|
||||||
return b
|
newListCommand(),
|
||||||
}
|
newModCommands(),
|
||||||
|
newGenCommand(),
|
||||||
func (b *commandsBuilder) addAll() *commandsBuilder {
|
newReleaseCommand(),
|
||||||
b.addCommands(
|
|
||||||
b.newServerCmd(),
|
|
||||||
newVersionCmd(),
|
|
||||||
newEnvCmd(),
|
|
||||||
b.newConfigCmd(),
|
|
||||||
b.newDeployCmd(),
|
|
||||||
b.newConvertCmd(),
|
|
||||||
b.newNewCmd(),
|
|
||||||
b.newListCmd(),
|
|
||||||
newImportCmd(),
|
|
||||||
newGenCmd(),
|
|
||||||
createReleaser(),
|
|
||||||
b.newModCmd(),
|
|
||||||
)
|
|
||||||
|
|
||||||
return b
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *commandsBuilder) build() *hugoCmd {
|
|
||||||
h := b.newHugoCmd()
|
|
||||||
addCommands(h.getCommand(), b.commands...)
|
|
||||||
return h
|
|
||||||
}
|
|
||||||
|
|
||||||
func addCommands(root *cobra.Command, commands ...cmder) {
|
|
||||||
for _, command := range commands {
|
|
||||||
cmd := command.getCommand()
|
|
||||||
if cmd == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
root.AddCommand(cmd)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type baseCmd struct {
|
|
||||||
cmd *cobra.Command
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ commandsBuilderGetter = (*baseBuilderCmd)(nil)
|
|
||||||
|
|
||||||
// Used in tests.
|
|
||||||
type commandsBuilderGetter interface {
|
|
||||||
getCommandsBuilder() *commandsBuilder
|
|
||||||
}
|
|
||||||
|
|
||||||
type baseBuilderCmd struct {
|
|
||||||
*baseCmd
|
|
||||||
*commandsBuilder
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *baseBuilderCmd) getCommandsBuilder() *commandsBuilder {
|
|
||||||
return b.commandsBuilder
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *baseCmd) getCommand() *cobra.Command {
|
|
||||||
return c.cmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func newBaseCmd(cmd *cobra.Command) *baseCmd {
|
|
||||||
return &baseCmd{cmd: cmd}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *commandsBuilder) newBuilderCmd(cmd *cobra.Command) *baseBuilderCmd {
|
|
||||||
bcmd := &baseBuilderCmd{commandsBuilder: b, baseCmd: &baseCmd{cmd: cmd}}
|
|
||||||
bcmd.hugoBuilderCommon.handleFlags(cmd)
|
|
||||||
return bcmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *commandsBuilder) newBuilderBasicCmd(cmd *cobra.Command) *baseBuilderCmd {
|
|
||||||
bcmd := &baseBuilderCmd{commandsBuilder: b, baseCmd: &baseCmd{cmd: cmd}}
|
|
||||||
bcmd.hugoBuilderCommon.handleCommonBuilderFlags(cmd)
|
|
||||||
return bcmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *baseCmd) flagsToConfig(cfg config.Provider) {
|
|
||||||
initializeFlags(c.cmd, cfg)
|
|
||||||
}
|
|
||||||
|
|
||||||
type hugoCmd struct {
|
|
||||||
*baseBuilderCmd
|
|
||||||
|
|
||||||
// Need to get the sites once built.
|
|
||||||
c *commandeer
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ cmder = (*nilCommand)(nil)
|
|
||||||
|
|
||||||
type nilCommand struct{}
|
|
||||||
|
|
||||||
func (c *nilCommand) getCommand() *cobra.Command {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *nilCommand) flagsToConfig(cfg config.Provider) {
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *commandsBuilder) newHugoCmd() *hugoCmd {
|
|
||||||
cc := &hugoCmd{}
|
|
||||||
|
|
||||||
cc.baseBuilderCmd = b.newBuilderCmd(&cobra.Command{
|
|
||||||
Use: "hugo",
|
|
||||||
Short: "hugo builds your site",
|
|
||||||
Long: `hugo is the main command, used to build your Hugo site.
|
|
||||||
|
|
||||||
Hugo is a Fast and Flexible Static Site Generator
|
|
||||||
built with love by spf13 and friends in Go.
|
|
||||||
|
|
||||||
Complete documentation is available at https://gohugo.io/.`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
defer cc.timeTrack(time.Now(), "Total")
|
|
||||||
cfgInit := func(c *commandeer) error {
|
|
||||||
if cc.buildWatch {
|
|
||||||
c.Set("disableLiveReload", true)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// prevent cobra printing error so it can be handled here (before the timeTrack prints)
|
|
||||||
cmd.SilenceErrors = true
|
|
||||||
|
|
||||||
c, err := initializeConfig(true, true, cc.buildWatch, &cc.hugoBuilderCommon, cc, cfgInit)
|
|
||||||
if err != nil {
|
|
||||||
cmd.PrintErrln("Error:", err.Error())
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
cc.c = c
|
|
||||||
|
|
||||||
err = c.build()
|
|
||||||
if err != nil {
|
|
||||||
cmd.PrintErrln("Error:", err.Error())
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
},
|
},
|
||||||
})
|
}
|
||||||
|
|
||||||
cc.cmd.PersistentFlags().StringVar(&cc.cfgFile, "config", "", "config file (default is hugo.yaml|json|toml)")
|
return simplecobra.New(rootCmd)
|
||||||
cc.cmd.PersistentFlags().StringVar(&cc.cfgDir, "configDir", "config", "config dir")
|
|
||||||
cc.cmd.PersistentFlags().BoolVar(&cc.quiet, "quiet", false, "build in quiet mode")
|
|
||||||
|
|
||||||
// Set bash-completion
|
|
||||||
_ = cc.cmd.PersistentFlags().SetAnnotation("config", cobra.BashCompFilenameExt, config.ValidConfigFileExtensions)
|
|
||||||
|
|
||||||
cc.cmd.PersistentFlags().BoolVarP(&cc.verbose, "verbose", "v", false, "verbose output")
|
|
||||||
cc.cmd.PersistentFlags().BoolVarP(&cc.debug, "debug", "", false, "debug output")
|
|
||||||
cc.cmd.PersistentFlags().BoolVar(&cc.logging, "log", false, "enable Logging")
|
|
||||||
cc.cmd.PersistentFlags().StringVar(&cc.logFile, "logFile", "", "log File path (if set, logging enabled automatically)")
|
|
||||||
cc.cmd.PersistentFlags().BoolVar(&cc.verboseLog, "verboseLog", false, "verbose logging")
|
|
||||||
|
|
||||||
cc.cmd.Flags().BoolVarP(&cc.buildWatch, "watch", "w", false, "watch filesystem for changes and recreate as needed")
|
|
||||||
|
|
||||||
cc.cmd.Flags().Bool("renderToMemory", false, "render to memory (only useful for benchmark testing)")
|
|
||||||
|
|
||||||
// Set bash-completion
|
|
||||||
_ = cc.cmd.PersistentFlags().SetAnnotation("logFile", cobra.BashCompFilenameExt, []string{})
|
|
||||||
|
|
||||||
cc.cmd.SetGlobalNormalizationFunc(helpers.NormalizeHugoFlags)
|
|
||||||
cc.cmd.SilenceUsage = true
|
|
||||||
|
|
||||||
return cc
|
|
||||||
}
|
|
||||||
|
|
||||||
type hugoBuilderCommon struct {
|
|
||||||
source string
|
|
||||||
baseURL string
|
|
||||||
environment string
|
|
||||||
|
|
||||||
buildWatch bool
|
|
||||||
panicOnWarning bool
|
|
||||||
poll string
|
|
||||||
clock string
|
|
||||||
|
|
||||||
gc bool
|
|
||||||
|
|
||||||
// Profile flags (for debugging of performance problems)
|
|
||||||
cpuprofile string
|
|
||||||
memprofile string
|
|
||||||
mutexprofile string
|
|
||||||
traceprofile string
|
|
||||||
printm bool
|
|
||||||
|
|
||||||
// TODO(bep) var vs string
|
|
||||||
logging bool
|
|
||||||
verbose bool
|
|
||||||
verboseLog bool
|
|
||||||
debug bool
|
|
||||||
quiet bool
|
|
||||||
|
|
||||||
cfgFile string
|
|
||||||
cfgDir string
|
|
||||||
logFile string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cc *hugoBuilderCommon) timeTrack(start time.Time, name string) {
|
|
||||||
if cc.quiet {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
elapsed := time.Since(start)
|
|
||||||
fmt.Printf("%s in %v ms\n", name, int(1000*elapsed.Seconds()))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cc *hugoBuilderCommon) getConfigDir(baseDir string) string {
|
|
||||||
if cc.cfgDir != "" {
|
|
||||||
return hpaths.AbsPathify(baseDir, cc.cfgDir)
|
|
||||||
}
|
|
||||||
|
|
||||||
if v, found := os.LookupEnv("HUGO_CONFIGDIR"); found {
|
|
||||||
return hpaths.AbsPathify(baseDir, v)
|
|
||||||
}
|
|
||||||
|
|
||||||
return hpaths.AbsPathify(baseDir, "config")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cc *hugoBuilderCommon) getEnvironment(isServer bool) string {
|
|
||||||
if cc.environment != "" {
|
|
||||||
return cc.environment
|
|
||||||
}
|
|
||||||
|
|
||||||
if v, found := os.LookupEnv("HUGO_ENVIRONMENT"); found {
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
|
|
||||||
// Used by Netlify and Forestry
|
|
||||||
if v, found := os.LookupEnv("HUGO_ENV"); found {
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
|
|
||||||
if isServer {
|
|
||||||
return hugo.EnvironmentDevelopment
|
|
||||||
}
|
|
||||||
|
|
||||||
return hugo.EnvironmentProduction
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cc *hugoBuilderCommon) handleCommonBuilderFlags(cmd *cobra.Command) {
|
|
||||||
cmd.PersistentFlags().StringVarP(&cc.source, "source", "s", "", "filesystem path to read files relative from")
|
|
||||||
cmd.PersistentFlags().SetAnnotation("source", cobra.BashCompSubdirsInDir, []string{})
|
|
||||||
cmd.PersistentFlags().StringVarP(&cc.environment, "environment", "e", "", "build environment")
|
|
||||||
cmd.PersistentFlags().StringP("themesDir", "", "", "filesystem path to themes directory")
|
|
||||||
cmd.PersistentFlags().StringP("ignoreVendorPaths", "", "", "ignores any _vendor for module paths matching the given Glob pattern")
|
|
||||||
cmd.PersistentFlags().StringVar(&cc.clock, "clock", "", "set the clock used by Hugo, e.g. --clock 2021-11-06T22:30:00.00+09:00")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cc *hugoBuilderCommon) handleFlags(cmd *cobra.Command) {
|
|
||||||
cc.handleCommonBuilderFlags(cmd)
|
|
||||||
cmd.Flags().Bool("cleanDestinationDir", false, "remove files from destination not found in static directories")
|
|
||||||
cmd.Flags().BoolP("buildDrafts", "D", false, "include content marked as draft")
|
|
||||||
cmd.Flags().BoolP("buildFuture", "F", false, "include content with publishdate in the future")
|
|
||||||
cmd.Flags().BoolP("buildExpired", "E", false, "include expired content")
|
|
||||||
cmd.Flags().StringP("contentDir", "c", "", "filesystem path to content directory")
|
|
||||||
cmd.Flags().StringP("layoutDir", "l", "", "filesystem path to layout directory")
|
|
||||||
cmd.Flags().StringP("cacheDir", "", "", "filesystem path to cache directory. Defaults: $TMPDIR/hugo_cache/")
|
|
||||||
cmd.Flags().BoolP("ignoreCache", "", false, "ignores the cache directory")
|
|
||||||
cmd.Flags().StringP("destination", "d", "", "filesystem path to write files to")
|
|
||||||
cmd.Flags().StringSliceP("theme", "t", []string{}, "themes to use (located in /themes/THEMENAME/)")
|
|
||||||
cmd.Flags().StringVarP(&cc.baseURL, "baseURL", "b", "", "hostname (and path) to the root, e.g. https://spf13.com/")
|
|
||||||
cmd.Flags().Bool("enableGitInfo", false, "add Git revision, date, author, and CODEOWNERS info to the pages")
|
|
||||||
cmd.Flags().BoolVar(&cc.gc, "gc", false, "enable to run some cleanup tasks (remove unused cache files) after the build")
|
|
||||||
cmd.Flags().StringVar(&cc.poll, "poll", "", "set this to a poll interval, e.g --poll 700ms, to use a poll based approach to watch for file system changes")
|
|
||||||
cmd.Flags().BoolVar(&cc.panicOnWarning, "panicOnWarning", false, "panic on first WARNING log")
|
|
||||||
cmd.Flags().Bool("templateMetrics", false, "display metrics about template executions")
|
|
||||||
cmd.Flags().Bool("templateMetricsHints", false, "calculate some improvement hints when combined with --templateMetrics")
|
|
||||||
cmd.Flags().BoolP("forceSyncStatic", "", false, "copy all files when static is changed.")
|
|
||||||
cmd.Flags().BoolP("noTimes", "", false, "don't sync modification time of files")
|
|
||||||
cmd.Flags().BoolP("noChmod", "", false, "don't sync permission mode of files")
|
|
||||||
cmd.Flags().BoolP("noBuildLock", "", false, "don't create .hugo_build.lock file")
|
|
||||||
cmd.Flags().BoolP("printI18nWarnings", "", false, "print missing translations")
|
|
||||||
cmd.Flags().BoolP("printPathWarnings", "", false, "print warnings on duplicate target paths etc.")
|
|
||||||
cmd.Flags().BoolP("printUnusedTemplates", "", false, "print warnings on unused templates.")
|
|
||||||
cmd.Flags().StringVarP(&cc.cpuprofile, "profile-cpu", "", "", "write cpu profile to `file`")
|
|
||||||
cmd.Flags().StringVarP(&cc.memprofile, "profile-mem", "", "", "write memory profile to `file`")
|
|
||||||
cmd.Flags().BoolVarP(&cc.printm, "printMemoryUsage", "", false, "print memory usage to screen at intervals")
|
|
||||||
cmd.Flags().StringVarP(&cc.mutexprofile, "profile-mutex", "", "", "write Mutex profile to `file`")
|
|
||||||
cmd.Flags().StringVarP(&cc.traceprofile, "trace", "", "", "write trace to `file` (not useful in general)")
|
|
||||||
|
|
||||||
// Hide these for now.
|
|
||||||
cmd.Flags().MarkHidden("profile-cpu")
|
|
||||||
cmd.Flags().MarkHidden("profile-mem")
|
|
||||||
cmd.Flags().MarkHidden("profile-mutex")
|
|
||||||
|
|
||||||
cmd.Flags().StringSlice("disableKinds", []string{}, "disable different kind of pages (home, RSS etc.)")
|
|
||||||
|
|
||||||
cmd.Flags().Bool("minify", false, "minify any supported output format (HTML, XML etc.)")
|
|
||||||
|
|
||||||
// Set bash-completion.
|
|
||||||
// Each flag must first be defined before using the SetAnnotation() call.
|
|
||||||
_ = cmd.Flags().SetAnnotation("source", cobra.BashCompSubdirsInDir, []string{})
|
|
||||||
_ = cmd.Flags().SetAnnotation("cacheDir", cobra.BashCompSubdirsInDir, []string{})
|
|
||||||
_ = cmd.Flags().SetAnnotation("destination", cobra.BashCompSubdirsInDir, []string{})
|
|
||||||
_ = cmd.Flags().SetAnnotation("theme", cobra.BashCompSubdirsInDir, []string{"themes"})
|
|
||||||
}
|
|
||||||
|
|
||||||
func checkErr(logger loggers.Logger, err error, s ...string) {
|
|
||||||
if err == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
for _, message := range s {
|
|
||||||
logger.Errorln(message)
|
|
||||||
}
|
|
||||||
logger.Errorln(err)
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,411 +0,0 @@
|
||||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/config"
|
|
||||||
|
|
||||||
"github.com/spf13/afero"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/types"
|
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestExecute(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
|
|
||||||
createSite := func(c *qt.C) string {
|
|
||||||
dir := createSimpleTestSite(t, testSiteConfig{})
|
|
||||||
return dir
|
|
||||||
}
|
|
||||||
|
|
||||||
c.Run("hugo", func(c *qt.C) {
|
|
||||||
dir := createSite(c)
|
|
||||||
resp := Execute([]string{"-s=" + dir})
|
|
||||||
c.Assert(resp.Err, qt.IsNil)
|
|
||||||
result := resp.Result
|
|
||||||
c.Assert(len(result.Sites) == 1, qt.Equals, true)
|
|
||||||
c.Assert(len(result.Sites[0].RegularPages()) == 2, qt.Equals, true)
|
|
||||||
c.Assert(result.Sites[0].Info.Params()["myparam"], qt.Equals, "paramproduction")
|
|
||||||
})
|
|
||||||
|
|
||||||
c.Run("hugo, set environment", func(c *qt.C) {
|
|
||||||
dir := createSite(c)
|
|
||||||
resp := Execute([]string{"-s=" + dir, "-e=staging"})
|
|
||||||
c.Assert(resp.Err, qt.IsNil)
|
|
||||||
result := resp.Result
|
|
||||||
c.Assert(result.Sites[0].Info.Params()["myparam"], qt.Equals, "paramstaging")
|
|
||||||
})
|
|
||||||
|
|
||||||
c.Run("convert toJSON", func(c *qt.C) {
|
|
||||||
dir := createSite(c)
|
|
||||||
output := filepath.Join(dir, "myjson")
|
|
||||||
resp := Execute([]string{"convert", "toJSON", "-s=" + dir, "-e=staging", "-o=" + output})
|
|
||||||
c.Assert(resp.Err, qt.IsNil)
|
|
||||||
converted := readFileFrom(c, filepath.Join(output, "content", "p1.md"))
|
|
||||||
c.Assert(converted, qt.Equals, "{\n \"title\": \"P1\",\n \"weight\": 1\n}\n\nContent\n\n", qt.Commentf(converted))
|
|
||||||
})
|
|
||||||
|
|
||||||
c.Run("config, set environment", func(c *qt.C) {
|
|
||||||
dir := createSite(c)
|
|
||||||
out, err := captureStdout(func() error {
|
|
||||||
resp := Execute([]string{"config", "-s=" + dir, "-e=staging"})
|
|
||||||
return resp.Err
|
|
||||||
})
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(out, qt.Contains, "params = map[myparam:paramstaging]", qt.Commentf(out))
|
|
||||||
})
|
|
||||||
|
|
||||||
c.Run("deploy, environment set", func(c *qt.C) {
|
|
||||||
dir := createSite(c)
|
|
||||||
resp := Execute([]string{"deploy", "-s=" + dir, "-e=staging", "--target=mydeployment", "--dryRun"})
|
|
||||||
c.Assert(resp.Err, qt.Not(qt.IsNil))
|
|
||||||
c.Assert(resp.Err.Error(), qt.Contains, `no driver registered for "hugocloud"`)
|
|
||||||
})
|
|
||||||
|
|
||||||
c.Run("list", func(c *qt.C) {
|
|
||||||
dir := createSite(c)
|
|
||||||
out, err := captureStdout(func() error {
|
|
||||||
resp := Execute([]string{"list", "all", "-s=" + dir, "-e=staging"})
|
|
||||||
return resp.Err
|
|
||||||
})
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(out, qt.Contains, "p1.md")
|
|
||||||
})
|
|
||||||
|
|
||||||
c.Run("new theme", func(c *qt.C) {
|
|
||||||
dir := createSite(c)
|
|
||||||
themesDir := filepath.Join(dir, "mythemes")
|
|
||||||
resp := Execute([]string{"new", "theme", "mytheme", "-s=" + dir, "-e=staging", "--themesDir=" + themesDir})
|
|
||||||
c.Assert(resp.Err, qt.IsNil)
|
|
||||||
themeTOML := readFileFrom(c, filepath.Join(themesDir, "mytheme", "theme.toml"))
|
|
||||||
c.Assert(themeTOML, qt.Contains, "name = \"Mytheme\"")
|
|
||||||
})
|
|
||||||
|
|
||||||
c.Run("new site", func(c *qt.C) {
|
|
||||||
dir := createSite(c)
|
|
||||||
siteDir := filepath.Join(dir, "mysite")
|
|
||||||
resp := Execute([]string{"new", "site", siteDir, "-e=staging"})
|
|
||||||
c.Assert(resp.Err, qt.IsNil)
|
|
||||||
config := readFileFrom(c, filepath.Join(siteDir, "config.toml"))
|
|
||||||
c.Assert(config, qt.Contains, "baseURL = 'http://example.org/'")
|
|
||||||
checkNewSiteInited(c, siteDir)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func checkNewSiteInited(c *qt.C, basepath string) {
|
|
||||||
paths := []string{
|
|
||||||
filepath.Join(basepath, "archetypes"),
|
|
||||||
filepath.Join(basepath, "assets"),
|
|
||||||
filepath.Join(basepath, "content"),
|
|
||||||
filepath.Join(basepath, "data"),
|
|
||||||
filepath.Join(basepath, "layouts"),
|
|
||||||
filepath.Join(basepath, "static"),
|
|
||||||
filepath.Join(basepath, "themes"),
|
|
||||||
filepath.Join(basepath, "config.toml"),
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, path := range paths {
|
|
||||||
_, err := os.Stat(path)
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func readFileFrom(c *qt.C, filename string) string {
|
|
||||||
c.Helper()
|
|
||||||
filename = filepath.Clean(filename)
|
|
||||||
b, err := afero.ReadFile(hugofs.Os, filename)
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
return string(b)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestFlags(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
|
|
||||||
noOpRunE := func(cmd *cobra.Command, args []string) error {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
args []string
|
|
||||||
check func(c *qt.C, cmd *serverCmd)
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
// https://github.com/gohugoio/hugo/issues/7642
|
|
||||||
name: "ignoreVendorPaths",
|
|
||||||
args: []string{"server", "--ignoreVendorPaths=github.com/**"},
|
|
||||||
check: func(c *qt.C, cmd *serverCmd) {
|
|
||||||
cfg := config.NewWithTestDefaults()
|
|
||||||
cmd.flagsToConfig(cfg)
|
|
||||||
c.Assert(cfg.Get("ignoreVendorPaths"), qt.Equals, "github.com/**")
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Persistent flags",
|
|
||||||
args: []string{
|
|
||||||
"server",
|
|
||||||
"--config=myconfig.toml",
|
|
||||||
"--configDir=myconfigdir",
|
|
||||||
"--contentDir=mycontent",
|
|
||||||
"--disableKinds=page,home",
|
|
||||||
"--environment=testing",
|
|
||||||
"--configDir=myconfigdir",
|
|
||||||
"--layoutDir=mylayouts",
|
|
||||||
"--theme=mytheme",
|
|
||||||
"--gc",
|
|
||||||
"--themesDir=mythemes",
|
|
||||||
"--cleanDestinationDir",
|
|
||||||
"--navigateToChanged",
|
|
||||||
"--disableLiveReload",
|
|
||||||
"--noHTTPCache",
|
|
||||||
"--printI18nWarnings",
|
|
||||||
"--destination=/tmp/mydestination",
|
|
||||||
"-b=https://example.com/b/",
|
|
||||||
"--port=1366",
|
|
||||||
"--renderToDisk",
|
|
||||||
"--source=mysource",
|
|
||||||
"--printPathWarnings",
|
|
||||||
"--printUnusedTemplates",
|
|
||||||
},
|
|
||||||
check: func(c *qt.C, sc *serverCmd) {
|
|
||||||
c.Assert(sc, qt.Not(qt.IsNil))
|
|
||||||
c.Assert(sc.navigateToChanged, qt.Equals, true)
|
|
||||||
c.Assert(sc.disableLiveReload, qt.Equals, true)
|
|
||||||
c.Assert(sc.noHTTPCache, qt.Equals, true)
|
|
||||||
c.Assert(sc.renderToDisk, qt.Equals, true)
|
|
||||||
c.Assert(sc.serverPort, qt.Equals, 1366)
|
|
||||||
c.Assert(sc.environment, qt.Equals, "testing")
|
|
||||||
|
|
||||||
cfg := config.NewWithTestDefaults()
|
|
||||||
sc.flagsToConfig(cfg)
|
|
||||||
c.Assert(cfg.GetString("publishDir"), qt.Equals, "/tmp/mydestination")
|
|
||||||
c.Assert(cfg.GetString("contentDir"), qt.Equals, "mycontent")
|
|
||||||
c.Assert(cfg.GetString("layoutDir"), qt.Equals, "mylayouts")
|
|
||||||
c.Assert(cfg.GetStringSlice("theme"), qt.DeepEquals, []string{"mytheme"})
|
|
||||||
c.Assert(cfg.GetString("themesDir"), qt.Equals, "mythemes")
|
|
||||||
c.Assert(cfg.GetString("baseURL"), qt.Equals, "https://example.com/b/")
|
|
||||||
|
|
||||||
c.Assert(cfg.Get("disableKinds"), qt.DeepEquals, []string{"page", "home"})
|
|
||||||
|
|
||||||
c.Assert(cfg.GetBool("gc"), qt.Equals, true)
|
|
||||||
|
|
||||||
// The flag is named printPathWarnings
|
|
||||||
c.Assert(cfg.GetBool("logPathWarnings"), qt.Equals, true)
|
|
||||||
|
|
||||||
// The flag is named printI18nWarnings
|
|
||||||
c.Assert(cfg.GetBool("logI18nWarnings"), qt.Equals, true)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range tests {
|
|
||||||
c.Run(test.name, func(c *qt.C) {
|
|
||||||
b := newCommandsBuilder()
|
|
||||||
root := b.addAll().build()
|
|
||||||
|
|
||||||
for _, cmd := range b.commands {
|
|
||||||
if cmd.getCommand() == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
// We are only interested in the flag handling here.
|
|
||||||
cmd.getCommand().RunE = noOpRunE
|
|
||||||
}
|
|
||||||
rootCmd := root.getCommand()
|
|
||||||
rootCmd.SetArgs(test.args)
|
|
||||||
c.Assert(rootCmd.Execute(), qt.IsNil)
|
|
||||||
test.check(c, b.commands[0].(*serverCmd))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCommandsExecute(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
|
|
||||||
dir := createSimpleTestSite(t, testSiteConfig{})
|
|
||||||
dirOut := t.TempDir()
|
|
||||||
|
|
||||||
sourceFlag := fmt.Sprintf("-s=%s", dir)
|
|
||||||
|
|
||||||
tests := []struct {
|
|
||||||
commands []string
|
|
||||||
flags []string
|
|
||||||
expectErrToContain string
|
|
||||||
}{
|
|
||||||
// TODO(bep) permission issue on my OSX? "operation not permitted" {[]string{"check", "ulimit"}, nil, false},
|
|
||||||
{[]string{"env"}, nil, ""},
|
|
||||||
{[]string{"version"}, nil, ""},
|
|
||||||
// no args = hugo build
|
|
||||||
{nil, []string{sourceFlag}, ""},
|
|
||||||
{nil, []string{sourceFlag, "--renderToMemory"}, ""},
|
|
||||||
{[]string{"completion", "bash"}, nil, ""},
|
|
||||||
{[]string{"completion", "fish"}, nil, ""},
|
|
||||||
{[]string{"completion", "powershell"}, nil, ""},
|
|
||||||
{[]string{"completion", "zsh"}, nil, ""},
|
|
||||||
{[]string{"config"}, []string{sourceFlag}, ""},
|
|
||||||
{[]string{"convert", "toTOML"}, []string{sourceFlag, "-o=" + filepath.Join(dirOut, "toml")}, ""},
|
|
||||||
{[]string{"convert", "toYAML"}, []string{sourceFlag, "-o=" + filepath.Join(dirOut, "yaml")}, ""},
|
|
||||||
{[]string{"convert", "toJSON"}, []string{sourceFlag, "-o=" + filepath.Join(dirOut, "json")}, ""},
|
|
||||||
{[]string{"gen", "chromastyles"}, []string{"--style=manni"}, ""},
|
|
||||||
{[]string{"gen", "doc"}, []string{"--dir=" + filepath.Join(dirOut, "doc")}, ""},
|
|
||||||
{[]string{"gen", "man"}, []string{"--dir=" + filepath.Join(dirOut, "man")}, ""},
|
|
||||||
{[]string{"list", "drafts"}, []string{sourceFlag}, ""},
|
|
||||||
{[]string{"list", "expired"}, []string{sourceFlag}, ""},
|
|
||||||
{[]string{"list", "future"}, []string{sourceFlag}, ""},
|
|
||||||
{[]string{"new", "new-page.md"}, []string{sourceFlag}, ""},
|
|
||||||
{[]string{"new", "site", filepath.Join(dirOut, "new-site")}, nil, ""},
|
|
||||||
{[]string{"unknowncommand"}, nil, "unknown command"},
|
|
||||||
// TODO(bep) cli refactor fix https://github.com/gohugoio/hugo/issues/4450
|
|
||||||
//{[]string{"new", "theme", filepath.Join(dirOut, "new-theme")}, nil,false},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range tests {
|
|
||||||
name := "hugo"
|
|
||||||
if len(test.commands) > 0 {
|
|
||||||
name = test.commands[0]
|
|
||||||
}
|
|
||||||
c.Run(name, func(c *qt.C) {
|
|
||||||
b := newCommandsBuilder().addAll().build()
|
|
||||||
hugoCmd := b.getCommand()
|
|
||||||
test.flags = append(test.flags, "--quiet")
|
|
||||||
hugoCmd.SetArgs(append(test.commands, test.flags...))
|
|
||||||
|
|
||||||
// TODO(bep) capture output and add some simple asserts
|
|
||||||
// TODO(bep) misspelled subcommands does not return an error. We should investigate this
|
|
||||||
// but before that, check for "Error: unknown command".
|
|
||||||
|
|
||||||
_, err := hugoCmd.ExecuteC()
|
|
||||||
if test.expectErrToContain != "" {
|
|
||||||
c.Assert(err, qt.Not(qt.IsNil))
|
|
||||||
c.Assert(err.Error(), qt.Contains, test.expectErrToContain)
|
|
||||||
} else {
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Assert that we have not left any development debug artifacts in
|
|
||||||
// the code.
|
|
||||||
if b.c != nil {
|
|
||||||
_, ok := b.c.publishDirFs.(types.DevMarker)
|
|
||||||
c.Assert(ok, qt.Equals, false)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type testSiteConfig struct {
|
|
||||||
configTOML string
|
|
||||||
contentDir string
|
|
||||||
}
|
|
||||||
|
|
||||||
func createSimpleTestSite(t testing.TB, cfg testSiteConfig) string {
|
|
||||||
dir := t.TempDir()
|
|
||||||
|
|
||||||
cfgStr := `
|
|
||||||
|
|
||||||
baseURL = "https://example.org"
|
|
||||||
title = "Hugo Commands"
|
|
||||||
|
|
||||||
|
|
||||||
`
|
|
||||||
|
|
||||||
contentDir := "content"
|
|
||||||
|
|
||||||
if cfg.configTOML != "" {
|
|
||||||
cfgStr = cfg.configTOML
|
|
||||||
}
|
|
||||||
if cfg.contentDir != "" {
|
|
||||||
contentDir = cfg.contentDir
|
|
||||||
}
|
|
||||||
|
|
||||||
os.MkdirAll(filepath.Join(dir, "public"), 0777)
|
|
||||||
|
|
||||||
// Just the basic. These are for CLI tests, not site testing.
|
|
||||||
writeFile(t, filepath.Join(dir, "config.toml"), cfgStr)
|
|
||||||
writeFile(t, filepath.Join(dir, "config", "staging", "params.toml"), `myparam="paramstaging"`)
|
|
||||||
writeFile(t, filepath.Join(dir, "config", "staging", "deployment.toml"), `
|
|
||||||
[[targets]]
|
|
||||||
name = "mydeployment"
|
|
||||||
URL = "hugocloud://hugotestbucket"
|
|
||||||
`)
|
|
||||||
|
|
||||||
writeFile(t, filepath.Join(dir, "config", "testing", "params.toml"), `myparam="paramtesting"`)
|
|
||||||
writeFile(t, filepath.Join(dir, "config", "production", "params.toml"), `myparam="paramproduction"`)
|
|
||||||
|
|
||||||
writeFile(t, filepath.Join(dir, "static", "myfile.txt"), `Hello World!`)
|
|
||||||
|
|
||||||
writeFile(t, filepath.Join(dir, contentDir, "p1.md"), `
|
|
||||||
---
|
|
||||||
title: "P1"
|
|
||||||
weight: 1
|
|
||||||
---
|
|
||||||
|
|
||||||
Content
|
|
||||||
|
|
||||||
`)
|
|
||||||
|
|
||||||
writeFile(t, filepath.Join(dir, contentDir, "hügö.md"), `
|
|
||||||
---
|
|
||||||
weight: 2
|
|
||||||
---
|
|
||||||
|
|
||||||
This is hügö.
|
|
||||||
|
|
||||||
`)
|
|
||||||
|
|
||||||
writeFile(t, filepath.Join(dir, "layouts", "_default", "single.html"), `
|
|
||||||
|
|
||||||
Single: {{ .Title }}|{{ .Content }}
|
|
||||||
|
|
||||||
`)
|
|
||||||
|
|
||||||
writeFile(t, filepath.Join(dir, "layouts", "404.html"), `
|
|
||||||
404: {{ .Title }}|Not Found.
|
|
||||||
|
|
||||||
`)
|
|
||||||
|
|
||||||
writeFile(t, filepath.Join(dir, "layouts", "_default", "list.html"), `
|
|
||||||
|
|
||||||
List: {{ .Title }}
|
|
||||||
Environment: {{ hugo.Environment }}
|
|
||||||
|
|
||||||
For issue 9788:
|
|
||||||
{{ $foo :="abc" | resources.FromString "foo.css" | minify | resources.PostProcess }}
|
|
||||||
PostProcess: {{ $foo.RelPermalink }}
|
|
||||||
|
|
||||||
`)
|
|
||||||
|
|
||||||
return dir
|
|
||||||
}
|
|
||||||
|
|
||||||
func writeFile(t testing.TB, filename, content string) {
|
|
||||||
must(t, os.MkdirAll(filepath.Dir(filename), os.FileMode(0755)))
|
|
||||||
must(t, os.WriteFile(filename, []byte(content), os.FileMode(0755)))
|
|
||||||
}
|
|
||||||
|
|
||||||
func must(t testing.TB, err error) {
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -9,129 +9,93 @@
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.Print the version number of Hug
|
// limitations under the License.
|
||||||
|
|
||||||
package commands
|
package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
|
||||||
"os"
|
"os"
|
||||||
"reflect"
|
|
||||||
"regexp"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/maps"
|
"github.com/bep/simplecobra"
|
||||||
|
"github.com/gohugoio/hugo/modules"
|
||||||
"github.com/gohugoio/hugo/parser"
|
"github.com/gohugoio/hugo/parser"
|
||||||
"github.com/gohugoio/hugo/parser/metadecoders"
|
"github.com/gohugoio/hugo/parser/metadecoders"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/modules"
|
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ cmder = (*configCmd)(nil)
|
// newConfigCommand creates a new config command and its subcommands.
|
||||||
|
func newConfigCommand() *configCommand {
|
||||||
type configCmd struct {
|
return &configCommand{
|
||||||
*baseBuilderCmd
|
commands: []simplecobra.Commander{
|
||||||
}
|
&configMountsCommand{},
|
||||||
|
},
|
||||||
func (b *commandsBuilder) newConfigCmd() *configCmd {
|
|
||||||
cc := &configCmd{}
|
|
||||||
cmd := &cobra.Command{
|
|
||||||
Use: "config",
|
|
||||||
Short: "Print the site configuration",
|
|
||||||
Long: `Print the site configuration, both default and custom settings.`,
|
|
||||||
RunE: cc.printConfig,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
printMountsCmd := &cobra.Command{
|
|
||||||
Use: "mounts",
|
|
||||||
Short: "Print the configured file mounts",
|
|
||||||
RunE: cc.printMounts,
|
|
||||||
}
|
|
||||||
|
|
||||||
cmd.AddCommand(printMountsCmd)
|
|
||||||
|
|
||||||
cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
|
|
||||||
|
|
||||||
return cc
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *configCmd) printMounts(cmd *cobra.Command, args []string) error {
|
type configCommand struct {
|
||||||
cfg, err := initializeConfig(true, false, false, &c.hugoBuilderCommon, c, nil)
|
r *rootCommand
|
||||||
|
|
||||||
|
commands []simplecobra.Commander
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *configCommand) Commands() []simplecobra.Commander {
|
||||||
|
return c.commands
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *configCommand) Name() string {
|
||||||
|
return "config"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *configCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||||
|
conf, err := c.r.ConfigFromProvider(c.r.configVersionID.Load(), flagsToCfg(cd, nil))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
config := conf.configs.Base
|
||||||
|
|
||||||
allModules := cfg.Cfg.Get("allmodules").(modules.Modules)
|
// Print it as JSON.
|
||||||
|
dec := json.NewEncoder(os.Stdout)
|
||||||
|
dec.SetIndent("", " ")
|
||||||
|
dec.SetEscapeHTML(false)
|
||||||
|
|
||||||
for _, m := range allModules {
|
if err := dec.Encode(parser.ReplacingJSONMarshaller{Value: config, KeysToLower: true, OmitEmpty: true}); err != nil {
|
||||||
if err := parser.InterfaceToConfig(&modMounts{m: m, verbose: c.verbose}, metadecoders.JSON, os.Stdout); err != nil {
|
return err
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *configCmd) printConfig(cmd *cobra.Command, args []string) error {
|
func (c *configCommand) WithCobraCommand(cmd *cobra.Command) error {
|
||||||
cfg, err := initializeConfig(true, false, false, &c.hugoBuilderCommon, c, nil)
|
cmd.Short = "Print the site configuration"
|
||||||
if err != nil {
|
cmd.Long = `Print the site configuration, both default and custom settings.`
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
allSettings := cfg.Cfg.Get("").(maps.Params)
|
|
||||||
|
|
||||||
// We need to clean up this, but we store objects in the config that
|
|
||||||
// isn't really interesting to the end user, so filter these.
|
|
||||||
ignoreKeysRe := regexp.MustCompile("client|sorted|filecacheconfigs|allmodules|multilingual")
|
|
||||||
|
|
||||||
separator := ": "
|
|
||||||
|
|
||||||
if len(cfg.configFiles) > 0 && strings.HasSuffix(cfg.configFiles[0], ".toml") {
|
|
||||||
separator = " = "
|
|
||||||
}
|
|
||||||
|
|
||||||
var keys []string
|
|
||||||
for k := range allSettings {
|
|
||||||
if ignoreKeysRe.MatchString(k) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
keys = append(keys, k)
|
|
||||||
}
|
|
||||||
sort.Strings(keys)
|
|
||||||
for _, k := range keys {
|
|
||||||
kv := reflect.ValueOf(allSettings[k])
|
|
||||||
if kv.Kind() == reflect.String {
|
|
||||||
fmt.Printf("%s%s\"%+v\"\n", k, separator, allSettings[k])
|
|
||||||
} else {
|
|
||||||
fmt.Printf("%s%s%+v\n", k, separator, allSettings[k])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type modMounts struct {
|
func (c *configCommand) Init(cd, runner *simplecobra.Commandeer) error {
|
||||||
verbose bool
|
c.r = cd.Root.Command.(*rootCommand)
|
||||||
m modules.Module
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type modMount struct {
|
type configModMount struct {
|
||||||
Source string `json:"source"`
|
Source string `json:"source"`
|
||||||
Target string `json:"target"`
|
Target string `json:"target"`
|
||||||
Lang string `json:"lang,omitempty"`
|
Lang string `json:"lang,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type configModMounts struct {
|
||||||
|
verbose bool
|
||||||
|
m modules.Module
|
||||||
|
}
|
||||||
|
|
||||||
// MarshalJSON is for internal use only.
|
// MarshalJSON is for internal use only.
|
||||||
func (m *modMounts) MarshalJSON() ([]byte, error) {
|
func (m *configModMounts) MarshalJSON() ([]byte, error) {
|
||||||
var mounts []modMount
|
var mounts []configModMount
|
||||||
|
|
||||||
for _, mount := range m.m.Mounts() {
|
for _, mount := range m.m.Mounts() {
|
||||||
mounts = append(mounts, modMount{
|
mounts = append(mounts, configModMount{
|
||||||
Source: mount.Source,
|
Source: mount.Source,
|
||||||
Target: mount.Target,
|
Target: mount.Target,
|
||||||
Lang: mount.Lang,
|
Lang: mount.Lang,
|
||||||
|
@ -154,7 +118,7 @@ func (m *modMounts) MarshalJSON() ([]byte, error) {
|
||||||
Meta map[string]any `json:"meta"`
|
Meta map[string]any `json:"meta"`
|
||||||
HugoVersion modules.HugoVersion `json:"hugoVersion"`
|
HugoVersion modules.HugoVersion `json:"hugoVersion"`
|
||||||
|
|
||||||
Mounts []modMount `json:"mounts"`
|
Mounts []configModMount `json:"mounts"`
|
||||||
}{
|
}{
|
||||||
Path: m.m.Path(),
|
Path: m.m.Path(),
|
||||||
Version: m.m.Version(),
|
Version: m.m.Version(),
|
||||||
|
@ -168,12 +132,12 @@ func (m *modMounts) MarshalJSON() ([]byte, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
return json.Marshal(&struct {
|
return json.Marshal(&struct {
|
||||||
Path string `json:"path"`
|
Path string `json:"path"`
|
||||||
Version string `json:"version"`
|
Version string `json:"version"`
|
||||||
Time time.Time `json:"time"`
|
Time time.Time `json:"time"`
|
||||||
Owner string `json:"owner"`
|
Owner string `json:"owner"`
|
||||||
Dir string `json:"dir"`
|
Dir string `json:"dir"`
|
||||||
Mounts []modMount `json:"mounts"`
|
Mounts []configModMount `json:"mounts"`
|
||||||
}{
|
}{
|
||||||
Path: m.m.Path(),
|
Path: m.m.Path(),
|
||||||
Version: m.m.Version(),
|
Version: m.m.Version(),
|
||||||
|
@ -184,3 +148,40 @@ func (m *modMounts) MarshalJSON() ([]byte, error) {
|
||||||
})
|
})
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type configMountsCommand struct {
|
||||||
|
configCmd *configCommand
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *configMountsCommand) Commands() []simplecobra.Commander {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *configMountsCommand) Name() string {
|
||||||
|
return "mounts"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *configMountsCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||||
|
r := c.configCmd.r
|
||||||
|
conf, err := r.ConfigFromProvider(r.configVersionID.Load(), flagsToCfg(cd, nil))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, m := range conf.configs.Modules {
|
||||||
|
if err := parser.InterfaceToConfig(&configModMounts{m: m, verbose: r.verbose}, metadecoders.JSON, os.Stdout); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *configMountsCommand) WithCobraCommand(cmd *cobra.Command) error {
|
||||||
|
cmd.Short = "Print the configured file mounts"
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *configMountsCommand) Init(cd, runner *simplecobra.Commandeer) error {
|
||||||
|
c.configCmd = cd.Parent.Command.(*configCommand)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -15,122 +15,119 @@ package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/parser/pageparser"
|
"github.com/bep/simplecobra"
|
||||||
|
"github.com/gohugoio/hugo/config"
|
||||||
"github.com/gohugoio/hugo/resources/page"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
|
"github.com/gohugoio/hugo/hugolib"
|
||||||
"github.com/gohugoio/hugo/parser"
|
"github.com/gohugoio/hugo/parser"
|
||||||
"github.com/gohugoio/hugo/parser/metadecoders"
|
"github.com/gohugoio/hugo/parser/metadecoders"
|
||||||
|
"github.com/gohugoio/hugo/parser/pageparser"
|
||||||
"github.com/gohugoio/hugo/hugolib"
|
"github.com/gohugoio/hugo/resources/page"
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ cmder = (*convertCmd)(nil)
|
func newConvertCommand() *convertCommand {
|
||||||
|
var c *convertCommand
|
||||||
|
c = &convertCommand{
|
||||||
|
commands: []simplecobra.Commander{
|
||||||
|
&simpleCommand{
|
||||||
|
name: "toJSON",
|
||||||
|
short: "Convert front matter to JSON",
|
||||||
|
long: `toJSON converts all front matter in the content directory
|
||||||
|
to use JSON for the front matter.`,
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
return c.convertContents(metadecoders.JSON)
|
||||||
|
},
|
||||||
|
withc: func(cmd *cobra.Command) {
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&simpleCommand{
|
||||||
|
name: "toTOML",
|
||||||
|
short: "Convert front matter to TOML",
|
||||||
|
long: `toTOML converts all front matter in the content directory
|
||||||
|
to use TOML for the front matter.`,
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
return c.convertContents(metadecoders.TOML)
|
||||||
|
},
|
||||||
|
withc: func(cmd *cobra.Command) {
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&simpleCommand{
|
||||||
|
name: "toYAML",
|
||||||
|
short: "Convert front matter to YAML",
|
||||||
|
long: `toYAML converts all front matter in the content directory
|
||||||
|
to use YAML for the front matter.`,
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
return c.convertContents(metadecoders.YAML)
|
||||||
|
},
|
||||||
|
withc: func(cmd *cobra.Command) {
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
type convertCmd struct {
|
type convertCommand struct {
|
||||||
|
// Flags.
|
||||||
outputDir string
|
outputDir string
|
||||||
unsafe bool
|
unsafe bool
|
||||||
|
|
||||||
*baseBuilderCmd
|
// Deps.
|
||||||
|
r *rootCommand
|
||||||
|
h *hugolib.HugoSites
|
||||||
|
|
||||||
|
// Commmands.
|
||||||
|
commands []simplecobra.Commander
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *commandsBuilder) newConvertCmd() *convertCmd {
|
func (c *convertCommand) Commands() []simplecobra.Commander {
|
||||||
cc := &convertCmd{}
|
return c.commands
|
||||||
|
|
||||||
cmd := &cobra.Command{
|
|
||||||
Use: "convert",
|
|
||||||
Short: "Convert your content to different formats",
|
|
||||||
Long: `Convert your content (e.g. front matter) to different formats.
|
|
||||||
|
|
||||||
See convert's subcommands toJSON, toTOML and toYAML for more information.`,
|
|
||||||
RunE: nil,
|
|
||||||
}
|
|
||||||
|
|
||||||
cmd.AddCommand(
|
|
||||||
&cobra.Command{
|
|
||||||
Use: "toJSON",
|
|
||||||
Short: "Convert front matter to JSON",
|
|
||||||
Long: `toJSON converts all front matter in the content directory
|
|
||||||
to use JSON for the front matter.`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
return cc.convertContents(metadecoders.JSON)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
&cobra.Command{
|
|
||||||
Use: "toTOML",
|
|
||||||
Short: "Convert front matter to TOML",
|
|
||||||
Long: `toTOML converts all front matter in the content directory
|
|
||||||
to use TOML for the front matter.`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
return cc.convertContents(metadecoders.TOML)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
&cobra.Command{
|
|
||||||
Use: "toYAML",
|
|
||||||
Short: "Convert front matter to YAML",
|
|
||||||
Long: `toYAML converts all front matter in the content directory
|
|
||||||
to use YAML for the front matter.`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
return cc.convertContents(metadecoders.YAML)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
cmd.PersistentFlags().StringVarP(&cc.outputDir, "output", "o", "", "filesystem path to write files to")
|
|
||||||
cmd.PersistentFlags().BoolVar(&cc.unsafe, "unsafe", false, "enable less safe operations, please backup first")
|
|
||||||
|
|
||||||
cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
|
|
||||||
|
|
||||||
return cc
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cc *convertCmd) convertContents(format metadecoders.Format) error {
|
func (c *convertCommand) Name() string {
|
||||||
if cc.outputDir == "" && !cc.unsafe {
|
return "convert"
|
||||||
return newUserError("Unsafe operation not allowed, use --unsafe or set a different output path")
|
}
|
||||||
}
|
|
||||||
|
|
||||||
c, err := initializeConfig(true, false, false, &cc.hugoBuilderCommon, cc, nil)
|
func (c *convertCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
c.Cfg.Set("buildDrafts", true)
|
|
||||||
|
|
||||||
h, err := hugolib.NewHugoSites(*c.DepsCfg)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := h.Build(hugolib.BuildCfg{SkipRender: true}); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
site := h.Sites[0]
|
|
||||||
|
|
||||||
site.Log.Println("processing", len(site.AllPages()), "content files")
|
|
||||||
for _, p := range site.AllPages() {
|
|
||||||
if err := cc.convertAndSavePage(p, site, format); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cc *convertCmd) convertAndSavePage(p page.Page, site *hugolib.Site, targetFormat metadecoders.Format) error {
|
func (c *convertCommand) WithCobraCommand(cmd *cobra.Command) error {
|
||||||
|
cmd.Short = "Convert your content to different formats"
|
||||||
|
cmd.Long = `Convert your content (e.g. front matter) to different formats.
|
||||||
|
|
||||||
|
See convert's subcommands toJSON, toTOML and toYAML for more information.`
|
||||||
|
|
||||||
|
cmd.PersistentFlags().StringVarP(&c.outputDir, "output", "o", "", "filesystem path to write files to")
|
||||||
|
cmd.PersistentFlags().BoolVar(&c.unsafe, "unsafe", false, "enable less safe operations, please backup first")
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *convertCommand) Init(cd, runner *simplecobra.Commandeer) error {
|
||||||
|
c.r = cd.Root.Command.(*rootCommand)
|
||||||
|
cfg := config.New()
|
||||||
|
cfg.Set("buildDrafts", true)
|
||||||
|
h, err := c.r.Hugo(flagsToCfg(cd, cfg))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
c.h = h
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *convertCommand) convertAndSavePage(p page.Page, site *hugolib.Site, targetFormat metadecoders.Format) error {
|
||||||
// The resources are not in .Site.AllPages.
|
// The resources are not in .Site.AllPages.
|
||||||
for _, r := range p.Resources().ByType("page") {
|
for _, r := range p.Resources().ByType("page") {
|
||||||
if err := cc.convertAndSavePage(r.(page.Page), site, targetFormat); err != nil {
|
if err := c.convertAndSavePage(r.(page.Page), site, targetFormat); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -140,9 +137,9 @@ func (cc *convertCmd) convertAndSavePage(p page.Page, site *hugolib.Site, target
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
errMsg := fmt.Errorf("Error processing file %q", p.File().Path())
|
errMsg := fmt.Errorf("error processing file %q", p.File().Path())
|
||||||
|
|
||||||
site.Log.Infoln("Attempting to convert", p.File().Filename())
|
site.Log.Infoln("ttempting to convert", p.File().Filename())
|
||||||
|
|
||||||
f := p.File()
|
f := p.File()
|
||||||
file, err := f.FileInfo().Meta().Open()
|
file, err := f.FileInfo().Meta().Open()
|
||||||
|
@ -182,26 +179,45 @@ func (cc *convertCmd) convertAndSavePage(p page.Page, site *hugolib.Site, target
|
||||||
|
|
||||||
newFilename := p.File().Filename()
|
newFilename := p.File().Filename()
|
||||||
|
|
||||||
if cc.outputDir != "" {
|
if c.outputDir != "" {
|
||||||
contentDir := strings.TrimSuffix(newFilename, p.File().Path())
|
contentDir := strings.TrimSuffix(newFilename, p.File().Path())
|
||||||
contentDir = filepath.Base(contentDir)
|
contentDir = filepath.Base(contentDir)
|
||||||
|
|
||||||
newFilename = filepath.Join(cc.outputDir, contentDir, p.File().Path())
|
newFilename = filepath.Join(c.outputDir, contentDir, p.File().Path())
|
||||||
}
|
}
|
||||||
|
|
||||||
fs := hugofs.Os
|
fs := hugofs.Os
|
||||||
if err := helpers.WriteToDisk(newFilename, &newContent, fs); err != nil {
|
if err := helpers.WriteToDisk(newFilename, &newContent, fs); err != nil {
|
||||||
return fmt.Errorf("Failed to save file %q:: %w", newFilename, err)
|
return fmt.Errorf("failed to save file %q:: %w", newFilename, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type parsedFile struct {
|
func (c *convertCommand) convertContents(format metadecoders.Format) error {
|
||||||
frontMatterFormat metadecoders.Format
|
if c.outputDir == "" && !c.unsafe {
|
||||||
frontMatterSource []byte
|
return newUserError("Unsafe operation not allowed, use --unsafe or set a different output path")
|
||||||
frontMatter map[string]any
|
}
|
||||||
|
|
||||||
// Everything after Front Matter
|
if err := c.h.Build(hugolib.BuildCfg{SkipRender: true}); err != nil {
|
||||||
content []byte
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
site := c.h.Sites[0]
|
||||||
|
|
||||||
|
var pagesBackedByFile page.Pages
|
||||||
|
for _, p := range site.AllPages() {
|
||||||
|
if p.File().IsZero() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pagesBackedByFile = append(pagesBackedByFile, p)
|
||||||
|
}
|
||||||
|
|
||||||
|
site.Log.Println("processing", len(pagesBackedByFile), "content files")
|
||||||
|
for _, p := range site.AllPages() {
|
||||||
|
if err := c.convertAndSavePage(p, site, format); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -14,76 +14,58 @@
|
||||||
//go:build !nodeploy
|
//go:build !nodeploy
|
||||||
// +build !nodeploy
|
// +build !nodeploy
|
||||||
|
|
||||||
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
package commands
|
package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
|
||||||
|
"github.com/bep/simplecobra"
|
||||||
"github.com/gohugoio/hugo/deploy"
|
"github.com/gohugoio/hugo/deploy"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ cmder = (*deployCmd)(nil)
|
func newDeployCommand() simplecobra.Commander {
|
||||||
|
|
||||||
// deployCmd supports deploying sites to Cloud providers.
|
return &simpleCommand{
|
||||||
type deployCmd struct {
|
name: "deploy",
|
||||||
*baseBuilderCmd
|
short: "Deploy your site to a Cloud provider.",
|
||||||
|
long: `Deploy your site to a Cloud provider.
|
||||||
invalidateCDN bool
|
|
||||||
maxDeletes int
|
|
||||||
workers int
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: In addition to the "deploy" command, consider adding a "--deploy"
|
|
||||||
// flag for the default command; this would build the site and then deploy it.
|
|
||||||
// It's not obvious how to do this; would all of the deploy-specific flags
|
|
||||||
// have to exist at the top level as well?
|
|
||||||
|
|
||||||
// TODO: The output files change every time "hugo" is executed, it looks
|
|
||||||
// like because of map order randomization. This means that you can
|
|
||||||
// run "hugo && hugo deploy" again and again and upload new stuff every time. Is
|
|
||||||
// this intended?
|
|
||||||
|
|
||||||
func (b *commandsBuilder) newDeployCmd() *deployCmd {
|
|
||||||
cc := &deployCmd{}
|
|
||||||
|
|
||||||
cmd := &cobra.Command{
|
|
||||||
Use: "deploy",
|
|
||||||
Short: "Deploy your site to a Cloud provider.",
|
|
||||||
Long: `Deploy your site to a Cloud provider.
|
|
||||||
|
|
||||||
See https://gohugo.io/hosting-and-deployment/hugo-deploy/ for detailed
|
See https://gohugo.io/hosting-and-deployment/hugo-deploy/ for detailed
|
||||||
documentation.
|
documentation.
|
||||||
`,
|
`,
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
h, err := r.Hugo(flagsToCfgWithAdditionalConfigBase(cd, nil, "deployment"))
|
||||||
cfgInit := func(c *commandeer) error {
|
|
||||||
c.Set("invalidateCDN", cc.invalidateCDN)
|
|
||||||
c.Set("maxDeletes", cc.maxDeletes)
|
|
||||||
c.Set("workers", cc.workers)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
comm, err := initializeConfig(true, true, false, &cc.hugoBuilderCommon, cc, cfgInit)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
deployer, err := deploy.New(comm.Cfg, comm.hugo().PathSpec.PublishFs)
|
deployer, err := deploy.New(h.Configs.GetFirstLanguageConfig(), h.PathSpec.PublishFs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return deployer.Deploy(context.Background())
|
return deployer.Deploy(ctx)
|
||||||
|
},
|
||||||
|
withc: func(cmd *cobra.Command) {
|
||||||
|
cmd.Flags().String("target", "", "target deployment from deployments section in config file; defaults to the first one")
|
||||||
|
cmd.Flags().Bool("confirm", false, "ask for confirmation before making changes to the target")
|
||||||
|
cmd.Flags().Bool("dryRun", false, "dry run")
|
||||||
|
cmd.Flags().Bool("force", false, "force upload of all files")
|
||||||
|
cmd.Flags().Bool("invalidateCDN", true, "invalidate the CDN cache listed in the deployment target")
|
||||||
|
cmd.Flags().Int("maxDeletes", 256, "maximum # of files to delete, or -1 to disable")
|
||||||
|
cmd.Flags().Int("workers", 10, "number of workers to transfer files. defaults to 10")
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd.Flags().String("target", "", "target deployment from deployments section in config file; defaults to the first one")
|
|
||||||
cmd.Flags().Bool("confirm", false, "ask for confirmation before making changes to the target")
|
|
||||||
cmd.Flags().Bool("dryRun", false, "dry run")
|
|
||||||
cmd.Flags().Bool("force", false, "force upload of all files")
|
|
||||||
cmd.Flags().BoolVar(&cc.invalidateCDN, "invalidateCDN", true, "invalidate the CDN cache listed in the deployment target")
|
|
||||||
cmd.Flags().IntVar(&cc.maxDeletes, "maxDeletes", 256, "maximum # of files to delete, or -1 to disable")
|
|
||||||
cmd.Flags().IntVar(&cc.workers, "workers", 10, "number of workers to transfer files. defaults to 10")
|
|
||||||
|
|
||||||
cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
|
|
||||||
|
|
||||||
return cc
|
|
||||||
}
|
}
|
||||||
|
|
48
commands/deploy_off.go
Normal file
48
commands/deploy_off.go
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
//go:build nodeploy
|
||||||
|
// +build nodeploy
|
||||||
|
|
||||||
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package commands
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
func newDeployCommand() simplecobra.Commander {
|
||||||
|
return &simpleCommand{
|
||||||
|
name: "deploy",
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
withc: func(cmd *cobra.Command) {
|
||||||
|
cmd.Hidden = true
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2016 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -14,55 +14,50 @@
|
||||||
package commands
|
package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"runtime"
|
"runtime"
|
||||||
|
|
||||||
|
"github.com/bep/simplecobra"
|
||||||
"github.com/gohugoio/hugo/common/hugo"
|
"github.com/gohugoio/hugo/common/hugo"
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
jww "github.com/spf13/jwalterweatherman"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ cmder = (*envCmd)(nil)
|
func newEnvCommand() simplecobra.Commander {
|
||||||
|
return &simpleCommand{
|
||||||
type envCmd struct {
|
name: "env",
|
||||||
*baseCmd
|
short: "Print Hugo version and environment info",
|
||||||
}
|
long: "Print Hugo version and environment info. This is useful in Hugo bug reports",
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
func newEnvCmd() *envCmd {
|
r.Printf("%s\n", hugo.BuildVersionString())
|
||||||
return &envCmd{
|
r.Printf("GOOS=%q\n", runtime.GOOS)
|
||||||
baseCmd: newBaseCmd(&cobra.Command{
|
r.Printf("GOARCH=%q\n", runtime.GOARCH)
|
||||||
Use: "env",
|
r.Printf("GOVERSION=%q\n", runtime.Version())
|
||||||
Short: "Print Hugo version and environment info",
|
|
||||||
Long: `Print Hugo version and environment info. This is useful in Hugo bug reports.
|
|
||||||
|
|
||||||
If you add the -v flag, you will get a full dependency list.
|
|
||||||
`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
printHugoVersion()
|
|
||||||
jww.FEEDBACK.Printf("GOOS=%q\n", runtime.GOOS)
|
|
||||||
jww.FEEDBACK.Printf("GOARCH=%q\n", runtime.GOARCH)
|
|
||||||
jww.FEEDBACK.Printf("GOVERSION=%q\n", runtime.Version())
|
|
||||||
|
|
||||||
isVerbose, _ := cmd.Flags().GetBool("verbose")
|
|
||||||
|
|
||||||
if isVerbose {
|
|
||||||
deps := hugo.GetDependencyList()
|
|
||||||
for _, dep := range deps {
|
|
||||||
jww.FEEDBACK.Printf("%s\n", dep)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// These are also included in the GetDependencyList above;
|
|
||||||
// always print these as these are most likely the most useful to know about.
|
|
||||||
deps := hugo.GetDependencyListNonGo()
|
|
||||||
for _, dep := range deps {
|
|
||||||
jww.FEEDBACK.Printf("%s\n", dep)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
if r.verbose {
|
||||||
|
deps := hugo.GetDependencyList()
|
||||||
|
for _, dep := range deps {
|
||||||
|
r.Printf("%s\n", dep)
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
return nil
|
// These are also included in the GetDependencyList above;
|
||||||
},
|
// always print these as these are most likely the most useful to know about.
|
||||||
}),
|
deps := hugo.GetDependencyListNonGo()
|
||||||
|
for _, dep := range deps {
|
||||||
|
r.Printf("%s\n", dep)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newVersionCmd() simplecobra.Commander {
|
||||||
|
return &simpleCommand{
|
||||||
|
name: "version",
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
r.Println(hugo.BuildVersionString())
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
short: "Print Hugo version and environment info",
|
||||||
|
long: "Print Hugo version and environment info. This is useful in Hugo bug reports.",
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
207
commands/gen.go
207
commands/gen.go
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -14,27 +14,200 @@
|
||||||
package commands
|
package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/alecthomas/chroma/v2"
|
||||||
|
"github.com/alecthomas/chroma/v2/formatters/html"
|
||||||
|
"github.com/alecthomas/chroma/v2/styles"
|
||||||
|
"github.com/bep/simplecobra"
|
||||||
|
"github.com/gohugoio/hugo/common/hugo"
|
||||||
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
"github.com/spf13/cobra/doc"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ cmder = (*genCmd)(nil)
|
func newGenCommand() *genCommand {
|
||||||
|
var (
|
||||||
|
// Flags.
|
||||||
|
gendocdir string
|
||||||
|
genmandir string
|
||||||
|
|
||||||
|
// Chroma flags.
|
||||||
|
style string
|
||||||
|
highlightStyle string
|
||||||
|
linesStyle string
|
||||||
|
)
|
||||||
|
|
||||||
|
newChromaStyles := func() simplecobra.Commander {
|
||||||
|
return &simpleCommand{
|
||||||
|
name: "chromastyles",
|
||||||
|
short: "Generate CSS stylesheet for the Chroma code highlighter",
|
||||||
|
long: `Generate CSS stylesheet for the Chroma code highlighter for a given style. This stylesheet is needed if markup.highlight.noClasses is disabled in config.
|
||||||
|
|
||||||
|
See https://xyproto.github.io/splash/docs/all.html for a preview of the available styles`,
|
||||||
|
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
builder := styles.Get(style).Builder()
|
||||||
|
if highlightStyle != "" {
|
||||||
|
builder.Add(chroma.LineHighlight, highlightStyle)
|
||||||
|
}
|
||||||
|
if linesStyle != "" {
|
||||||
|
builder.Add(chroma.LineNumbers, linesStyle)
|
||||||
|
}
|
||||||
|
style, err := builder.Build()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
formatter := html.New(html.WithAllClasses(true))
|
||||||
|
formatter.WriteCSS(os.Stdout, style)
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
withc: func(cmd *cobra.Command) {
|
||||||
|
cmd.PersistentFlags().StringVar(&style, "style", "friendly", "highlighter style (see https://xyproto.github.io/splash/docs/)")
|
||||||
|
cmd.PersistentFlags().StringVar(&highlightStyle, "highlightStyle", "bg:#ffffcc", "style used for highlighting lines (see https://github.com/alecthomas/chroma)")
|
||||||
|
cmd.PersistentFlags().StringVar(&linesStyle, "linesStyle", "", "style used for line numbers (see https://github.com/alecthomas/chroma)")
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
newMan := func() simplecobra.Commander {
|
||||||
|
return &simpleCommand{
|
||||||
|
name: "man",
|
||||||
|
short: "Generate man pages for the Hugo CLI",
|
||||||
|
long: `This command automatically generates up-to-date man pages of Hugo's
|
||||||
|
command-line interface. By default, it creates the man page files
|
||||||
|
in the "man" directory under the current directory.`,
|
||||||
|
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
header := &doc.GenManHeader{
|
||||||
|
Section: "1",
|
||||||
|
Manual: "Hugo Manual",
|
||||||
|
Source: fmt.Sprintf("Hugo %s", hugo.CurrentVersion),
|
||||||
|
}
|
||||||
|
if !strings.HasSuffix(genmandir, helpers.FilePathSeparator) {
|
||||||
|
genmandir += helpers.FilePathSeparator
|
||||||
|
}
|
||||||
|
if found, _ := helpers.Exists(genmandir, hugofs.Os); !found {
|
||||||
|
r.Println("Directory", genmandir, "does not exist, creating...")
|
||||||
|
if err := hugofs.Os.MkdirAll(genmandir, 0777); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cd.CobraCommand.Root().DisableAutoGenTag = true
|
||||||
|
|
||||||
|
r.Println("Generating Hugo man pages in", genmandir, "...")
|
||||||
|
doc.GenManTree(cd.CobraCommand.Root(), header, genmandir)
|
||||||
|
|
||||||
|
r.Println("Done.")
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
withc: func(cmd *cobra.Command) {
|
||||||
|
cmd.PersistentFlags().StringVar(&genmandir, "dir", "man/", "the directory to write the man pages.")
|
||||||
|
// For bash-completion
|
||||||
|
cmd.PersistentFlags().SetAnnotation("dir", cobra.BashCompSubdirsInDir, []string{})
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
newGen := func() simplecobra.Commander {
|
||||||
|
const gendocFrontmatterTemplate = `---
|
||||||
|
title: "%s"
|
||||||
|
slug: %s
|
||||||
|
url: %s
|
||||||
|
---
|
||||||
|
`
|
||||||
|
|
||||||
|
return &simpleCommand{
|
||||||
|
name: "doc",
|
||||||
|
short: "Generate Markdown documentation for the Hugo CLI.",
|
||||||
|
long: `Generate Markdown documentation for the Hugo CLI.
|
||||||
|
This command is, mostly, used to create up-to-date documentation
|
||||||
|
of Hugo's command-line interface for https://gohugo.io/.
|
||||||
|
|
||||||
|
It creates one Markdown file per command with front matter suitable
|
||||||
|
for rendering in Hugo.`,
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
cd.CobraCommand.VisitParents(func(c *cobra.Command) {
|
||||||
|
// Disable the "Auto generated by spf13/cobra on DATE"
|
||||||
|
// as it creates a lot of diffs.
|
||||||
|
c.DisableAutoGenTag = true
|
||||||
|
})
|
||||||
|
if !strings.HasSuffix(gendocdir, helpers.FilePathSeparator) {
|
||||||
|
gendocdir += helpers.FilePathSeparator
|
||||||
|
}
|
||||||
|
if found, _ := helpers.Exists(gendocdir, hugofs.Os); !found {
|
||||||
|
r.Println("Directory", gendocdir, "does not exist, creating...")
|
||||||
|
if err := hugofs.Os.MkdirAll(gendocdir, 0777); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
prepender := func(filename string) string {
|
||||||
|
name := filepath.Base(filename)
|
||||||
|
base := strings.TrimSuffix(name, path.Ext(name))
|
||||||
|
url := "/commands/" + strings.ToLower(base) + "/"
|
||||||
|
return fmt.Sprintf(gendocFrontmatterTemplate, strings.Replace(base, "_", " ", -1), base, url)
|
||||||
|
}
|
||||||
|
|
||||||
|
linkHandler := func(name string) string {
|
||||||
|
base := strings.TrimSuffix(name, path.Ext(name))
|
||||||
|
return "/commands/" + strings.ToLower(base) + "/"
|
||||||
|
}
|
||||||
|
r.Println("Generating Hugo command-line documentation in", gendocdir, "...")
|
||||||
|
doc.GenMarkdownTreeCustom(cd.CobraCommand.Root(), gendocdir, prepender, linkHandler)
|
||||||
|
r.Println("Done.")
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
withc: func(cmd *cobra.Command) {
|
||||||
|
cmd.PersistentFlags().StringVar(&gendocdir, "dir", "/tmp/hugodoc/", "the directory to write the doc.")
|
||||||
|
// For bash-completion
|
||||||
|
cmd.PersistentFlags().SetAnnotation("dir", cobra.BashCompSubdirsInDir, []string{})
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
return &genCommand{
|
||||||
|
commands: []simplecobra.Commander{
|
||||||
|
newChromaStyles(),
|
||||||
|
newGen(),
|
||||||
|
newMan(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
type genCmd struct {
|
|
||||||
*baseCmd
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func newGenCmd() *genCmd {
|
type genCommand struct {
|
||||||
cc := &genCmd{}
|
rootCmd *rootCommand
|
||||||
cc.baseCmd = newBaseCmd(&cobra.Command{
|
|
||||||
Use: "gen",
|
|
||||||
Short: "A collection of several useful generators.",
|
|
||||||
})
|
|
||||||
|
|
||||||
cc.cmd.AddCommand(
|
commands []simplecobra.Commander
|
||||||
newGenDocCmd().getCommand(),
|
}
|
||||||
newGenManCmd().getCommand(),
|
|
||||||
createGenDocsHelper().getCommand(),
|
func (c *genCommand) Commands() []simplecobra.Commander {
|
||||||
createGenChromaStyles().getCommand())
|
return c.commands
|
||||||
|
}
|
||||||
return cc
|
|
||||||
|
func (c *genCommand) Name() string {
|
||||||
|
return "gen"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *genCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *genCommand) WithCobraCommand(cmd *cobra.Command) error {
|
||||||
|
cmd.Short = "A collection of several useful generators."
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *genCommand) Init(cd, runner *simplecobra.Commandeer) error {
|
||||||
|
c.rootCmd = cd.Root.Command.(*rootCommand)
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,72 +0,0 @@
|
||||||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
|
|
||||||
"github.com/alecthomas/chroma/v2"
|
|
||||||
"github.com/alecthomas/chroma/v2/formatters/html"
|
|
||||||
"github.com/alecthomas/chroma/v2/styles"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
var _ cmder = (*genChromaStyles)(nil)
|
|
||||||
|
|
||||||
type genChromaStyles struct {
|
|
||||||
style string
|
|
||||||
highlightStyle string
|
|
||||||
linesStyle string
|
|
||||||
*baseCmd
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(bep) highlight
|
|
||||||
func createGenChromaStyles() *genChromaStyles {
|
|
||||||
g := &genChromaStyles{
|
|
||||||
baseCmd: newBaseCmd(&cobra.Command{
|
|
||||||
Use: "chromastyles",
|
|
||||||
Short: "Generate CSS stylesheet for the Chroma code highlighter",
|
|
||||||
Long: `Generate CSS stylesheet for the Chroma code highlighter for a given style. This stylesheet is needed if markup.highlight.noClasses is disabled in config.
|
|
||||||
|
|
||||||
See https://xyproto.github.io/splash/docs/all.html for a preview of the available styles`,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
|
|
||||||
g.cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
|
||||||
return g.generate()
|
|
||||||
}
|
|
||||||
|
|
||||||
g.cmd.PersistentFlags().StringVar(&g.style, "style", "friendly", "highlighter style (see https://xyproto.github.io/splash/docs/)")
|
|
||||||
g.cmd.PersistentFlags().StringVar(&g.highlightStyle, "highlightStyle", "bg:#ffffcc", "style used for highlighting lines (see https://github.com/alecthomas/chroma)")
|
|
||||||
g.cmd.PersistentFlags().StringVar(&g.linesStyle, "linesStyle", "", "style used for line numbers (see https://github.com/alecthomas/chroma)")
|
|
||||||
|
|
||||||
return g
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *genChromaStyles) generate() error {
|
|
||||||
builder := styles.Get(g.style).Builder()
|
|
||||||
if g.highlightStyle != "" {
|
|
||||||
builder.Add(chroma.LineHighlight, g.highlightStyle)
|
|
||||||
}
|
|
||||||
if g.linesStyle != "" {
|
|
||||||
builder.Add(chroma.LineNumbers, g.linesStyle)
|
|
||||||
}
|
|
||||||
style, err := builder.Build()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
formatter := html.New(html.WithAllClasses(true))
|
|
||||||
formatter.WriteCSS(os.Stdout, style)
|
|
||||||
return nil
|
|
||||||
}
|
|
|
@ -1,98 +0,0 @@
|
||||||
// Copyright 2016 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"path"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
"github.com/spf13/cobra/doc"
|
|
||||||
jww "github.com/spf13/jwalterweatherman"
|
|
||||||
)
|
|
||||||
|
|
||||||
var _ cmder = (*genDocCmd)(nil)
|
|
||||||
|
|
||||||
type genDocCmd struct {
|
|
||||||
gendocdir string
|
|
||||||
*baseCmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func newGenDocCmd() *genDocCmd {
|
|
||||||
const gendocFrontmatterTemplate = `---
|
|
||||||
title: "%s"
|
|
||||||
slug: %s
|
|
||||||
url: %s
|
|
||||||
---
|
|
||||||
`
|
|
||||||
|
|
||||||
cc := &genDocCmd{}
|
|
||||||
|
|
||||||
cc.baseCmd = newBaseCmd(&cobra.Command{
|
|
||||||
Use: "doc",
|
|
||||||
Short: "Generate Markdown documentation for the Hugo CLI.",
|
|
||||||
Long: `Generate Markdown documentation for the Hugo CLI.
|
|
||||||
|
|
||||||
This command is, mostly, used to create up-to-date documentation
|
|
||||||
of Hugo's command-line interface for https://gohugo.io/.
|
|
||||||
|
|
||||||
It creates one Markdown file per command with front matter suitable
|
|
||||||
for rendering in Hugo.`,
|
|
||||||
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
cmd.VisitParents(func(c *cobra.Command) {
|
|
||||||
// Disable the "Auto generated by spf13/cobra on DATE"
|
|
||||||
// as it creates a lot of diffs.
|
|
||||||
c.DisableAutoGenTag = true
|
|
||||||
})
|
|
||||||
|
|
||||||
if !strings.HasSuffix(cc.gendocdir, helpers.FilePathSeparator) {
|
|
||||||
cc.gendocdir += helpers.FilePathSeparator
|
|
||||||
}
|
|
||||||
if found, _ := helpers.Exists(cc.gendocdir, hugofs.Os); !found {
|
|
||||||
jww.FEEDBACK.Println("Directory", cc.gendocdir, "does not exist, creating...")
|
|
||||||
if err := hugofs.Os.MkdirAll(cc.gendocdir, 0777); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
prepender := func(filename string) string {
|
|
||||||
name := filepath.Base(filename)
|
|
||||||
base := strings.TrimSuffix(name, path.Ext(name))
|
|
||||||
url := "/commands/" + strings.ToLower(base) + "/"
|
|
||||||
return fmt.Sprintf(gendocFrontmatterTemplate, strings.Replace(base, "_", " ", -1), base, url)
|
|
||||||
}
|
|
||||||
|
|
||||||
linkHandler := func(name string) string {
|
|
||||||
base := strings.TrimSuffix(name, path.Ext(name))
|
|
||||||
return "/commands/" + strings.ToLower(base) + "/"
|
|
||||||
}
|
|
||||||
jww.FEEDBACK.Println("Generating Hugo command-line documentation in", cc.gendocdir, "...")
|
|
||||||
doc.GenMarkdownTreeCustom(cmd.Root(), cc.gendocdir, prepender, linkHandler)
|
|
||||||
jww.FEEDBACK.Println("Done.")
|
|
||||||
|
|
||||||
return nil
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
cc.cmd.PersistentFlags().StringVar(&cc.gendocdir, "dir", "/tmp/hugodoc/", "the directory to write the doc.")
|
|
||||||
|
|
||||||
// For bash-completion
|
|
||||||
cc.cmd.PersistentFlags().SetAnnotation("dir", cobra.BashCompSubdirsInDir, []string{})
|
|
||||||
|
|
||||||
return cc
|
|
||||||
}
|
|
|
@ -1,71 +0,0 @@
|
||||||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/docshelper"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
var _ cmder = (*genDocsHelper)(nil)
|
|
||||||
|
|
||||||
type genDocsHelper struct {
|
|
||||||
target string
|
|
||||||
*baseCmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func createGenDocsHelper() *genDocsHelper {
|
|
||||||
g := &genDocsHelper{
|
|
||||||
baseCmd: newBaseCmd(&cobra.Command{
|
|
||||||
Use: "docshelper",
|
|
||||||
Short: "Generate some data files for the Hugo docs.",
|
|
||||||
Hidden: true,
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
|
|
||||||
g.cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
|
||||||
return g.generate()
|
|
||||||
}
|
|
||||||
|
|
||||||
g.cmd.PersistentFlags().StringVarP(&g.target, "dir", "", "docs/data", "data dir")
|
|
||||||
|
|
||||||
return g
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *genDocsHelper) generate() error {
|
|
||||||
fmt.Println("Generate docs data to", g.target)
|
|
||||||
|
|
||||||
targetFile := filepath.Join(g.target, "docs.json")
|
|
||||||
|
|
||||||
f, err := os.Create(targetFile)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
|
|
||||||
enc := json.NewEncoder(f)
|
|
||||||
enc.SetIndent("", " ")
|
|
||||||
|
|
||||||
if err := enc.Encode(docshelper.GetDocProvider()); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
fmt.Println("Done!")
|
|
||||||
return nil
|
|
||||||
}
|
|
|
@ -1,77 +0,0 @@
|
||||||
// Copyright 2016 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/hugo"
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
"github.com/spf13/cobra/doc"
|
|
||||||
jww "github.com/spf13/jwalterweatherman"
|
|
||||||
)
|
|
||||||
|
|
||||||
var _ cmder = (*genManCmd)(nil)
|
|
||||||
|
|
||||||
type genManCmd struct {
|
|
||||||
genmandir string
|
|
||||||
*baseCmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func newGenManCmd() *genManCmd {
|
|
||||||
cc := &genManCmd{}
|
|
||||||
|
|
||||||
cc.baseCmd = newBaseCmd(&cobra.Command{
|
|
||||||
Use: "man",
|
|
||||||
Short: "Generate man pages for the Hugo CLI",
|
|
||||||
Long: `This command automatically generates up-to-date man pages of Hugo's
|
|
||||||
command-line interface. By default, it creates the man page files
|
|
||||||
in the "man" directory under the current directory.`,
|
|
||||||
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
header := &doc.GenManHeader{
|
|
||||||
Section: "1",
|
|
||||||
Manual: "Hugo Manual",
|
|
||||||
Source: fmt.Sprintf("Hugo %s", hugo.CurrentVersion),
|
|
||||||
}
|
|
||||||
if !strings.HasSuffix(cc.genmandir, helpers.FilePathSeparator) {
|
|
||||||
cc.genmandir += helpers.FilePathSeparator
|
|
||||||
}
|
|
||||||
if found, _ := helpers.Exists(cc.genmandir, hugofs.Os); !found {
|
|
||||||
jww.FEEDBACK.Println("Directory", cc.genmandir, "does not exist, creating...")
|
|
||||||
if err := hugofs.Os.MkdirAll(cc.genmandir, 0777); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
cmd.Root().DisableAutoGenTag = true
|
|
||||||
|
|
||||||
jww.FEEDBACK.Println("Generating Hugo man pages in", cc.genmandir, "...")
|
|
||||||
doc.GenManTree(cmd.Root(), header, cc.genmandir)
|
|
||||||
|
|
||||||
jww.FEEDBACK.Println("Done.")
|
|
||||||
|
|
||||||
return nil
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
cc.cmd.PersistentFlags().StringVar(&cc.genmandir, "dir", "man/", "the directory to write the man pages.")
|
|
||||||
|
|
||||||
// For bash-completion
|
|
||||||
cc.cmd.PersistentFlags().SetAnnotation("dir", cobra.BashCompSubdirsInDir, []string{})
|
|
||||||
|
|
||||||
return cc
|
|
||||||
}
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -11,16 +11,22 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
// Package commands defines and implements command-line commands and flags
|
|
||||||
// used by Hugo. Commands and flags are implemented using Cobra.
|
|
||||||
package commands
|
package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"regexp"
|
"log"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/bep/simplecobra"
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/gohugoio/hugo/config"
|
||||||
"github.com/spf13/cobra"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
"github.com/spf13/pflag"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -30,50 +36,101 @@ const (
|
||||||
showCursor = ansiEsc + "[?25h"
|
showCursor = ansiEsc + "[?25h"
|
||||||
)
|
)
|
||||||
|
|
||||||
type flagsToConfigHandler interface {
|
func newUserError(a ...any) *simplecobra.CommandError {
|
||||||
flagsToConfig(cfg config.Provider)
|
return &simplecobra.CommandError{Err: errors.New(fmt.Sprint(a...))}
|
||||||
}
|
}
|
||||||
|
|
||||||
type cmder interface {
|
func setValueFromFlag(flags *pflag.FlagSet, key string, cfg config.Provider, targetKey string, force bool) {
|
||||||
flagsToConfigHandler
|
key = strings.TrimSpace(key)
|
||||||
getCommand() *cobra.Command
|
if (force && flags.Lookup(key) != nil) || flags.Changed(key) {
|
||||||
|
f := flags.Lookup(key)
|
||||||
|
configKey := key
|
||||||
|
if targetKey != "" {
|
||||||
|
configKey = targetKey
|
||||||
|
}
|
||||||
|
// Gotta love this API.
|
||||||
|
switch f.Value.Type() {
|
||||||
|
case "bool":
|
||||||
|
bv, _ := flags.GetBool(key)
|
||||||
|
cfg.Set(configKey, bv)
|
||||||
|
case "string":
|
||||||
|
cfg.Set(configKey, f.Value.String())
|
||||||
|
case "stringSlice":
|
||||||
|
bv, _ := flags.GetStringSlice(key)
|
||||||
|
cfg.Set(configKey, bv)
|
||||||
|
case "int":
|
||||||
|
iv, _ := flags.GetInt(key)
|
||||||
|
cfg.Set(configKey, iv)
|
||||||
|
default:
|
||||||
|
panic(fmt.Sprintf("update switch with %s", f.Value.Type()))
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// commandError is an error used to signal different error situations in command handling.
|
func flagsToCfg(cd *simplecobra.Commandeer, cfg config.Provider) config.Provider {
|
||||||
type commandError struct {
|
return flagsToCfgWithAdditionalConfigBase(cd, cfg, "")
|
||||||
s string
|
|
||||||
userError bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c commandError) Error() string {
|
func flagsToCfgWithAdditionalConfigBase(cd *simplecobra.Commandeer, cfg config.Provider, additionalConfigBase string) config.Provider {
|
||||||
return c.s
|
if cfg == nil {
|
||||||
}
|
cfg = config.New()
|
||||||
|
|
||||||
func (c commandError) isUserError() bool {
|
|
||||||
return c.userError
|
|
||||||
}
|
|
||||||
|
|
||||||
func newUserError(a ...any) commandError {
|
|
||||||
return commandError{s: fmt.Sprintln(a...), userError: true}
|
|
||||||
}
|
|
||||||
|
|
||||||
func newSystemError(a ...any) commandError {
|
|
||||||
return commandError{s: fmt.Sprintln(a...), userError: false}
|
|
||||||
}
|
|
||||||
|
|
||||||
func newSystemErrorF(format string, a ...any) commandError {
|
|
||||||
return commandError{s: fmt.Sprintf(format, a...), userError: false}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Catch some of the obvious user errors from Cobra.
|
|
||||||
// We don't want to show the usage message for every error.
|
|
||||||
// The below may be to generic. Time will show.
|
|
||||||
var userErrorRegexp = regexp.MustCompile("unknown flag")
|
|
||||||
|
|
||||||
func isUserError(err error) bool {
|
|
||||||
if cErr, ok := err.(commandError); ok && cErr.isUserError() {
|
|
||||||
return true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return userErrorRegexp.MatchString(err.Error())
|
// Flags with a different name in the config.
|
||||||
|
keyMap := map[string]string{
|
||||||
|
"minify": "minifyOutput",
|
||||||
|
"destination": "publishDir",
|
||||||
|
"printI18nWarnings": "logI18nWarnings",
|
||||||
|
"printPathWarnings": "logPathWarnings",
|
||||||
|
"editor": "newContentEditor",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Flags that we for some reason don't want to expose in the site config.
|
||||||
|
internalKeySet := map[string]bool{
|
||||||
|
"quiet": true,
|
||||||
|
"verbose": true,
|
||||||
|
"watch": true,
|
||||||
|
"disableLiveReload": true,
|
||||||
|
"liveReloadPort": true,
|
||||||
|
"renderToMemory": true,
|
||||||
|
"clock": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd := cd.CobraCommand
|
||||||
|
flags := cmd.Flags()
|
||||||
|
|
||||||
|
flags.VisitAll(func(f *pflag.Flag) {
|
||||||
|
if f.Changed {
|
||||||
|
targetKey := f.Name
|
||||||
|
if internalKeySet[targetKey] {
|
||||||
|
targetKey = "internal." + targetKey
|
||||||
|
} else if mapped, ok := keyMap[targetKey]; ok {
|
||||||
|
targetKey = mapped
|
||||||
|
}
|
||||||
|
setValueFromFlag(flags, f.Name, cfg, targetKey, false)
|
||||||
|
if additionalConfigBase != "" {
|
||||||
|
setValueFromFlag(flags, f.Name, cfg, additionalConfigBase+"."+targetKey, true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return cfg
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func mkdir(x ...string) {
|
||||||
|
p := filepath.Join(x...)
|
||||||
|
err := os.MkdirAll(p, 0777) // before umask
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func touchFile(fs afero.Fs, filename string) {
|
||||||
|
mkdir(filepath.Dir(filename))
|
||||||
|
err := helpers.WriteToDisk(filename, bytes.NewReader([]byte{}), fs)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,206 +0,0 @@
|
||||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"math/rand"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/bep/clock"
|
|
||||||
qt "github.com/frankban/quicktest"
|
|
||||||
"github.com/gohugoio/hugo/common/htime"
|
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
|
||||||
"github.com/spf13/afero"
|
|
||||||
"golang.org/x/tools/txtar"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Issue #5662
|
|
||||||
func TestHugoWithContentDirOverride(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
c := qt.New(t)
|
|
||||||
|
|
||||||
files := `
|
|
||||||
-- config.toml --
|
|
||||||
baseURL = "https://example.org"
|
|
||||||
title = "Hugo Commands"
|
|
||||||
-- mycontent/p1.md --
|
|
||||||
---
|
|
||||||
title: "P1"
|
|
||||||
---
|
|
||||||
-- layouts/_default/single.html --
|
|
||||||
Page: {{ .Title }}|
|
|
||||||
|
|
||||||
`
|
|
||||||
s := newTestHugoCmdBuilder(c, files, []string{"-c", "mycontent"}).Build()
|
|
||||||
s.AssertFileContent("public/p1/index.html", `Page: P1|`)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
// Issue #9794
|
|
||||||
func TestHugoStaticFilesMultipleStaticAndManyFolders(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
c := qt.New(t)
|
|
||||||
|
|
||||||
files := `
|
|
||||||
-- config.toml --
|
|
||||||
baseURL = "https://example.org"
|
|
||||||
theme = "mytheme"
|
|
||||||
-- layouts/index.html --
|
|
||||||
Home.
|
|
||||||
|
|
||||||
`
|
|
||||||
const (
|
|
||||||
numDirs = 33
|
|
||||||
numFilesMax = 12
|
|
||||||
)
|
|
||||||
|
|
||||||
r := rand.New(rand.NewSource(32))
|
|
||||||
|
|
||||||
for i := 0; i < numDirs; i++ {
|
|
||||||
for j := 0; j < r.Intn(numFilesMax); j++ {
|
|
||||||
if j%3 == 0 {
|
|
||||||
files += fmt.Sprintf("-- themes/mytheme/static/d%d/f%d.txt --\nHellot%d-%d\n", i, j, i, j)
|
|
||||||
files += fmt.Sprintf("-- themes/mytheme/static/d%d/ft%d.txt --\nHellot%d-%d\n", i, j, i, j)
|
|
||||||
}
|
|
||||||
files += fmt.Sprintf("-- static/d%d/f%d.txt --\nHello%d-%d\n", i, j, i, j)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
r = rand.New(rand.NewSource(32))
|
|
||||||
|
|
||||||
s := newTestHugoCmdBuilder(c, files, []string{"-c", "mycontent"}).Build()
|
|
||||||
for i := 0; i < numDirs; i++ {
|
|
||||||
for j := 0; j < r.Intn(numFilesMax); j++ {
|
|
||||||
if j%3 == 0 {
|
|
||||||
if j%3 == 0 {
|
|
||||||
s.AssertFileContent(fmt.Sprintf("public/d%d/ft%d.txt", i, j), fmt.Sprintf("Hellot%d-%d", i, j))
|
|
||||||
}
|
|
||||||
s.AssertFileContent(fmt.Sprintf("public/d%d/f%d.txt", i, j), fmt.Sprintf("Hello%d-%d", i, j))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
// Issue #8787
|
|
||||||
func TestHugoListCommandsWithClockFlag(t *testing.T) {
|
|
||||||
t.Cleanup(func() { htime.Clock = clock.System() })
|
|
||||||
|
|
||||||
c := qt.New(t)
|
|
||||||
|
|
||||||
files := `
|
|
||||||
-- config.toml --
|
|
||||||
baseURL = "https://example.org"
|
|
||||||
title = "Hugo Commands"
|
|
||||||
timeZone = "UTC"
|
|
||||||
-- content/past.md --
|
|
||||||
---
|
|
||||||
title: "Past"
|
|
||||||
date: 2000-11-06
|
|
||||||
---
|
|
||||||
-- content/future.md --
|
|
||||||
---
|
|
||||||
title: "Future"
|
|
||||||
date: 2200-11-06
|
|
||||||
---
|
|
||||||
-- layouts/_default/single.html --
|
|
||||||
Page: {{ .Title }}|
|
|
||||||
|
|
||||||
`
|
|
||||||
s := newTestHugoCmdBuilder(c, files, []string{"list", "future"})
|
|
||||||
s.captureOut = true
|
|
||||||
s.Build()
|
|
||||||
p := filepath.Join("content", "future.md")
|
|
||||||
s.AssertStdout(p + ",2200-11-06T00:00:00Z")
|
|
||||||
|
|
||||||
s = newTestHugoCmdBuilder(c, files, []string{"list", "future", "--clock", "2300-11-06"}).Build()
|
|
||||||
s.AssertStdout("")
|
|
||||||
}
|
|
||||||
|
|
||||||
type testHugoCmdBuilder struct {
|
|
||||||
*qt.C
|
|
||||||
|
|
||||||
fs afero.Fs
|
|
||||||
dir string
|
|
||||||
files string
|
|
||||||
args []string
|
|
||||||
|
|
||||||
captureOut bool
|
|
||||||
out string
|
|
||||||
}
|
|
||||||
|
|
||||||
func newTestHugoCmdBuilder(c *qt.C, files string, args []string) *testHugoCmdBuilder {
|
|
||||||
s := &testHugoCmdBuilder{C: c, files: files, args: args}
|
|
||||||
s.dir = s.TempDir()
|
|
||||||
s.fs = afero.NewBasePathFs(hugofs.Os, s.dir)
|
|
||||||
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *testHugoCmdBuilder) Build() *testHugoCmdBuilder {
|
|
||||||
data := txtar.Parse([]byte(s.files))
|
|
||||||
|
|
||||||
for _, f := range data.Files {
|
|
||||||
filename := filepath.Clean(f.Name)
|
|
||||||
data := bytes.TrimSuffix(f.Data, []byte("\n"))
|
|
||||||
s.Assert(s.fs.MkdirAll(filepath.Dir(filename), 0777), qt.IsNil)
|
|
||||||
s.Assert(afero.WriteFile(s.fs, filename, data, 0666), qt.IsNil)
|
|
||||||
}
|
|
||||||
|
|
||||||
hugoCmd := newCommandsBuilder().addAll().build()
|
|
||||||
cmd := hugoCmd.getCommand()
|
|
||||||
args := append(s.args, "-s="+s.dir, "--quiet")
|
|
||||||
cmd.SetArgs(args)
|
|
||||||
|
|
||||||
if s.captureOut {
|
|
||||||
out, err := captureStdout(func() error {
|
|
||||||
_, err := cmd.ExecuteC()
|
|
||||||
return err
|
|
||||||
})
|
|
||||||
s.Assert(err, qt.IsNil)
|
|
||||||
s.out = out
|
|
||||||
} else {
|
|
||||||
_, err := cmd.ExecuteC()
|
|
||||||
s.Assert(err, qt.IsNil)
|
|
||||||
}
|
|
||||||
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *testHugoCmdBuilder) AssertFileContent(filename string, matches ...string) {
|
|
||||||
s.Helper()
|
|
||||||
data, err := afero.ReadFile(s.fs, filename)
|
|
||||||
s.Assert(err, qt.IsNil)
|
|
||||||
content := strings.TrimSpace(string(data))
|
|
||||||
for _, m := range matches {
|
|
||||||
lines := strings.Split(m, "\n")
|
|
||||||
for _, match := range lines {
|
|
||||||
match = strings.TrimSpace(match)
|
|
||||||
if match == "" || strings.HasPrefix(match, "#") {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
s.Assert(content, qt.Contains, match, qt.Commentf(m))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *testHugoCmdBuilder) AssertStdout(match string) {
|
|
||||||
s.Helper()
|
|
||||||
content := strings.TrimSpace(s.out)
|
|
||||||
s.Assert(content, qt.Contains, strings.TrimSpace(match))
|
|
||||||
}
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -15,252 +15,96 @@ package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
|
||||||
|
jww "github.com/spf13/jwalterweatherman"
|
||||||
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
"unicode"
|
"unicode"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/parser/pageparser"
|
"github.com/bep/simplecobra"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/htime"
|
"github.com/gohugoio/hugo/common/htime"
|
||||||
"github.com/gohugoio/hugo/common/hugio"
|
"github.com/gohugoio/hugo/common/hugio"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/parser/metadecoders"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/maps"
|
"github.com/gohugoio/hugo/common/maps"
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
"github.com/gohugoio/hugo/parser"
|
"github.com/gohugoio/hugo/parser"
|
||||||
|
"github.com/gohugoio/hugo/parser/metadecoders"
|
||||||
|
"github.com/gohugoio/hugo/parser/pageparser"
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
jww "github.com/spf13/jwalterweatherman"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ cmder = (*importCmd)(nil)
|
func newImportCommand() *importCommand {
|
||||||
|
var c *importCommand
|
||||||
type importCmd struct {
|
c = &importCommand{
|
||||||
*baseCmd
|
commands: []simplecobra.Commander{
|
||||||
}
|
&simpleCommand{
|
||||||
|
name: "jekyll",
|
||||||
func newImportCmd() *importCmd {
|
short: "hugo import from Jekyll",
|
||||||
cc := &importCmd{}
|
long: `hugo import from Jekyll.
|
||||||
|
|
||||||
cc.baseCmd = newBaseCmd(&cobra.Command{
|
|
||||||
Use: "import",
|
|
||||||
Short: "Import your site from others.",
|
|
||||||
Long: `Import your site from other web site generators like Jekyll.
|
|
||||||
|
|
||||||
Import requires a subcommand, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.",
|
|
||||||
RunE: nil,
|
|
||||||
})
|
|
||||||
|
|
||||||
importJekyllCmd := &cobra.Command{
|
|
||||||
Use: "jekyll",
|
|
||||||
Short: "hugo import from Jekyll",
|
|
||||||
Long: `hugo import from Jekyll.
|
|
||||||
|
|
||||||
Import from Jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.",
|
Import from Jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.",
|
||||||
RunE: cc.importFromJekyll,
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
}
|
if len(args) < 2 {
|
||||||
|
return newUserError(`import from jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.")
|
||||||
importJekyllCmd.Flags().Bool("force", false, "allow import into non-empty target directory")
|
|
||||||
|
|
||||||
cc.cmd.AddCommand(importJekyllCmd)
|
|
||||||
|
|
||||||
return cc
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *importCmd) importFromJekyll(cmd *cobra.Command, args []string) error {
|
|
||||||
if len(args) < 2 {
|
|
||||||
return newUserError(`import from jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.")
|
|
||||||
}
|
|
||||||
|
|
||||||
jekyllRoot, err := filepath.Abs(filepath.Clean(args[0]))
|
|
||||||
if err != nil {
|
|
||||||
return newUserError("path error:", args[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
targetDir, err := filepath.Abs(filepath.Clean(args[1]))
|
|
||||||
if err != nil {
|
|
||||||
return newUserError("path error:", args[1])
|
|
||||||
}
|
|
||||||
|
|
||||||
jww.INFO.Println("Import Jekyll from:", jekyllRoot, "to:", targetDir)
|
|
||||||
|
|
||||||
if strings.HasPrefix(filepath.Dir(targetDir), jekyllRoot) {
|
|
||||||
return newUserError("abort: target path should not be inside the Jekyll root")
|
|
||||||
}
|
|
||||||
|
|
||||||
forceImport, _ := cmd.Flags().GetBool("force")
|
|
||||||
|
|
||||||
fs := afero.NewOsFs()
|
|
||||||
jekyllPostDirs, hasAnyPost := i.getJekyllDirInfo(fs, jekyllRoot)
|
|
||||||
if !hasAnyPost {
|
|
||||||
return errors.New("abort: jekyll root contains neither posts nor drafts")
|
|
||||||
}
|
|
||||||
|
|
||||||
err = i.createSiteFromJekyll(jekyllRoot, targetDir, jekyllPostDirs, forceImport)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return newUserError(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
jww.FEEDBACK.Println("Importing...")
|
|
||||||
|
|
||||||
fileCount := 0
|
|
||||||
callback := func(path string, fi hugofs.FileMetaInfo, err error) error {
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if fi.IsDir() {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
relPath, err := filepath.Rel(jekyllRoot, path)
|
|
||||||
if err != nil {
|
|
||||||
return newUserError("get rel path error:", path)
|
|
||||||
}
|
|
||||||
|
|
||||||
relPath = filepath.ToSlash(relPath)
|
|
||||||
draft := false
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case strings.Contains(relPath, "_posts/"):
|
|
||||||
relPath = filepath.Join("content/post", strings.Replace(relPath, "_posts/", "", -1))
|
|
||||||
case strings.Contains(relPath, "_drafts/"):
|
|
||||||
relPath = filepath.Join("content/draft", strings.Replace(relPath, "_drafts/", "", -1))
|
|
||||||
draft = true
|
|
||||||
default:
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
fileCount++
|
|
||||||
return convertJekyllPost(path, relPath, targetDir, draft)
|
|
||||||
}
|
|
||||||
|
|
||||||
for jekyllPostDir, hasAnyPostInDir := range jekyllPostDirs {
|
|
||||||
if hasAnyPostInDir {
|
|
||||||
if err = helpers.SymbolicWalk(hugofs.Os, filepath.Join(jekyllRoot, jekyllPostDir), callback); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
jww.FEEDBACK.Println("Congratulations!", fileCount, "post(s) imported!")
|
|
||||||
jww.FEEDBACK.Println("Now, start Hugo by yourself:\n" +
|
|
||||||
"$ git clone https://github.com/spf13/herring-cove.git " + args[1] + "/themes/herring-cove")
|
|
||||||
jww.FEEDBACK.Println("$ cd " + args[1] + "\n$ hugo server --theme=herring-cove")
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *importCmd) getJekyllDirInfo(fs afero.Fs, jekyllRoot string) (map[string]bool, bool) {
|
|
||||||
postDirs := make(map[string]bool)
|
|
||||||
hasAnyPost := false
|
|
||||||
if entries, err := os.ReadDir(jekyllRoot); err == nil {
|
|
||||||
for _, entry := range entries {
|
|
||||||
if entry.IsDir() {
|
|
||||||
subDir := filepath.Join(jekyllRoot, entry.Name())
|
|
||||||
if isPostDir, hasAnyPostInDir := i.retrieveJekyllPostDir(fs, subDir); isPostDir {
|
|
||||||
postDirs[entry.Name()] = hasAnyPostInDir
|
|
||||||
if hasAnyPostInDir {
|
|
||||||
hasAnyPost = true
|
|
||||||
}
|
}
|
||||||
}
|
return c.importFromJekyll(args)
|
||||||
}
|
},
|
||||||
}
|
withc: func(cmd *cobra.Command) {
|
||||||
}
|
cmd.Flags().BoolVar(&c.force, "force", false, "allow import into non-empty target directory")
|
||||||
return postDirs, hasAnyPost
|
},
|
||||||
}
|
},
|
||||||
|
},
|
||||||
func (i *importCmd) retrieveJekyllPostDir(fs afero.Fs, dir string) (bool, bool) {
|
|
||||||
if strings.HasSuffix(dir, "_posts") || strings.HasSuffix(dir, "_drafts") {
|
|
||||||
isEmpty, _ := helpers.IsEmpty(dir, fs)
|
|
||||||
return true, !isEmpty
|
|
||||||
}
|
|
||||||
|
|
||||||
if entries, err := os.ReadDir(dir); err == nil {
|
|
||||||
for _, entry := range entries {
|
|
||||||
if entry.IsDir() {
|
|
||||||
subDir := filepath.Join(dir, entry.Name())
|
|
||||||
if isPostDir, hasAnyPost := i.retrieveJekyllPostDir(fs, subDir); isPostDir {
|
|
||||||
return isPostDir, hasAnyPost
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false, true
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *importCmd) createSiteFromJekyll(jekyllRoot, targetDir string, jekyllPostDirs map[string]bool, force bool) error {
|
|
||||||
fs := &afero.OsFs{}
|
|
||||||
if exists, _ := helpers.Exists(targetDir, fs); exists {
|
|
||||||
if isDir, _ := helpers.IsDir(targetDir, fs); !isDir {
|
|
||||||
return errors.New("target path \"" + targetDir + "\" exists but is not a directory")
|
|
||||||
}
|
|
||||||
|
|
||||||
isEmpty, _ := helpers.IsEmpty(targetDir, fs)
|
|
||||||
|
|
||||||
if !isEmpty && !force {
|
|
||||||
return errors.New("target path \"" + targetDir + "\" exists and is not empty")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
jekyllConfig := i.loadJekyllConfig(fs, jekyllRoot)
|
|
||||||
|
|
||||||
mkdir(targetDir, "layouts")
|
|
||||||
mkdir(targetDir, "content")
|
|
||||||
mkdir(targetDir, "archetypes")
|
|
||||||
mkdir(targetDir, "static")
|
|
||||||
mkdir(targetDir, "data")
|
|
||||||
mkdir(targetDir, "themes")
|
|
||||||
|
|
||||||
i.createConfigFromJekyll(fs, targetDir, "yaml", jekyllConfig)
|
|
||||||
|
|
||||||
i.copyJekyllFilesAndFolders(jekyllRoot, filepath.Join(targetDir, "static"), jekyllPostDirs)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (i *importCmd) loadJekyllConfig(fs afero.Fs, jekyllRoot string) map[string]any {
|
|
||||||
path := filepath.Join(jekyllRoot, "_config.yml")
|
|
||||||
|
|
||||||
exists, err := helpers.Exists(path, fs)
|
|
||||||
|
|
||||||
if err != nil || !exists {
|
|
||||||
jww.WARN.Println("_config.yaml not found: Is the specified Jekyll root correct?")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
f, err := fs.Open(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
defer f.Close()
|
|
||||||
|
|
||||||
b, err := io.ReadAll(f)
|
|
||||||
if err != nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
c, err := metadecoders.Default.UnmarshalToMap(b, metadecoders.YAML)
|
|
||||||
if err != nil {
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return c
|
return c
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *importCmd) createConfigFromJekyll(fs afero.Fs, inpath string, kind metadecoders.Format, jekyllConfig map[string]any) (err error) {
|
type importCommand struct {
|
||||||
|
r *rootCommand
|
||||||
|
|
||||||
|
force bool
|
||||||
|
|
||||||
|
commands []simplecobra.Commander
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *importCommand) Commands() []simplecobra.Commander {
|
||||||
|
return c.commands
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *importCommand) Name() string {
|
||||||
|
return "import"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *importCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *importCommand) WithCobraCommand(cmd *cobra.Command) error {
|
||||||
|
cmd.Short = "Import your site from others."
|
||||||
|
cmd.Long = `Import your site from other web site generators like Jekyll.
|
||||||
|
|
||||||
|
Import requires a subcommand, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`."
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *importCommand) Init(cd, runner *simplecobra.Commandeer) error {
|
||||||
|
c.r = cd.Root.Command.(*rootCommand)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *importCommand) createConfigFromJekyll(fs afero.Fs, inpath string, kind metadecoders.Format, jekyllConfig map[string]any) (err error) {
|
||||||
title := "My New Hugo Site"
|
title := "My New Hugo Site"
|
||||||
baseURL := "http://example.org/"
|
baseURL := "http://example.org/"
|
||||||
|
|
||||||
|
@ -293,10 +137,209 @@ func (i *importCmd) createConfigFromJekyll(fs afero.Fs, inpath string, kind meta
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
return helpers.WriteToDisk(filepath.Join(inpath, "config."+string(kind)), &buf, fs)
|
return helpers.WriteToDisk(filepath.Join(inpath, "hugo."+string(kind)), &buf, fs)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *importCmd) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPostDirs map[string]bool) (err error) {
|
func (c *importCommand) getJekyllDirInfo(fs afero.Fs, jekyllRoot string) (map[string]bool, bool) {
|
||||||
|
postDirs := make(map[string]bool)
|
||||||
|
hasAnyPost := false
|
||||||
|
if entries, err := os.ReadDir(jekyllRoot); err == nil {
|
||||||
|
for _, entry := range entries {
|
||||||
|
if entry.IsDir() {
|
||||||
|
subDir := filepath.Join(jekyllRoot, entry.Name())
|
||||||
|
if isPostDir, hasAnyPostInDir := c.retrieveJekyllPostDir(fs, subDir); isPostDir {
|
||||||
|
postDirs[entry.Name()] = hasAnyPostInDir
|
||||||
|
if hasAnyPostInDir {
|
||||||
|
hasAnyPost = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return postDirs, hasAnyPost
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *importCommand) createSiteFromJekyll(jekyllRoot, targetDir string, jekyllPostDirs map[string]bool) error {
|
||||||
|
fs := &afero.OsFs{}
|
||||||
|
if exists, _ := helpers.Exists(targetDir, fs); exists {
|
||||||
|
if isDir, _ := helpers.IsDir(targetDir, fs); !isDir {
|
||||||
|
return errors.New("target path \"" + targetDir + "\" exists but is not a directory")
|
||||||
|
}
|
||||||
|
|
||||||
|
isEmpty, _ := helpers.IsEmpty(targetDir, fs)
|
||||||
|
|
||||||
|
if !isEmpty && !c.force {
|
||||||
|
return errors.New("target path \"" + targetDir + "\" exists and is not empty")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
jekyllConfig := c.loadJekyllConfig(fs, jekyllRoot)
|
||||||
|
|
||||||
|
mkdir(targetDir, "layouts")
|
||||||
|
mkdir(targetDir, "content")
|
||||||
|
mkdir(targetDir, "archetypes")
|
||||||
|
mkdir(targetDir, "static")
|
||||||
|
mkdir(targetDir, "data")
|
||||||
|
mkdir(targetDir, "themes")
|
||||||
|
|
||||||
|
c.createConfigFromJekyll(fs, targetDir, "yaml", jekyllConfig)
|
||||||
|
|
||||||
|
c.copyJekyllFilesAndFolders(jekyllRoot, filepath.Join(targetDir, "static"), jekyllPostDirs)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *importCommand) convertJekyllContent(m any, content string) (string, error) {
|
||||||
|
metadata, _ := maps.ToStringMapE(m)
|
||||||
|
|
||||||
|
lines := strings.Split(content, "\n")
|
||||||
|
var resultLines []string
|
||||||
|
for _, line := range lines {
|
||||||
|
resultLines = append(resultLines, strings.Trim(line, "\r\n"))
|
||||||
|
}
|
||||||
|
|
||||||
|
content = strings.Join(resultLines, "\n")
|
||||||
|
|
||||||
|
excerptSep := "<!--more-->"
|
||||||
|
if value, ok := metadata["excerpt_separator"]; ok {
|
||||||
|
if str, strOk := value.(string); strOk {
|
||||||
|
content = strings.Replace(content, strings.TrimSpace(str), excerptSep, -1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
replaceList := []struct {
|
||||||
|
re *regexp.Regexp
|
||||||
|
replace string
|
||||||
|
}{
|
||||||
|
{regexp.MustCompile("(?i)<!-- more -->"), "<!--more-->"},
|
||||||
|
{regexp.MustCompile(`\{%\s*raw\s*%\}\s*(.*?)\s*\{%\s*endraw\s*%\}`), "$1"},
|
||||||
|
{regexp.MustCompile(`{%\s*endhighlight\s*%}`), "{{< / highlight >}}"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, replace := range replaceList {
|
||||||
|
content = replace.re.ReplaceAllString(content, replace.replace)
|
||||||
|
}
|
||||||
|
|
||||||
|
replaceListFunc := []struct {
|
||||||
|
re *regexp.Regexp
|
||||||
|
replace func(string) string
|
||||||
|
}{
|
||||||
|
// Octopress image tag: http://octopress.org/docs/plugins/image-tag/
|
||||||
|
{regexp.MustCompile(`{%\s+img\s*(.*?)\s*%}`), c.replaceImageTag},
|
||||||
|
{regexp.MustCompile(`{%\s*highlight\s*(.*?)\s*%}`), c.replaceHighlightTag},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, replace := range replaceListFunc {
|
||||||
|
content = replace.re.ReplaceAllStringFunc(content, replace.replace)
|
||||||
|
}
|
||||||
|
|
||||||
|
var buf bytes.Buffer
|
||||||
|
if len(metadata) != 0 {
|
||||||
|
err := parser.InterfaceToFrontMatter(m, metadecoders.YAML, &buf)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
buf.WriteString(content)
|
||||||
|
|
||||||
|
return buf.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *importCommand) convertJekyllMetaData(m any, postName string, postDate time.Time, draft bool) (any, error) {
|
||||||
|
metadata, err := maps.ToStringMapE(m)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if draft {
|
||||||
|
metadata["draft"] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
for key, value := range metadata {
|
||||||
|
lowerKey := strings.ToLower(key)
|
||||||
|
|
||||||
|
switch lowerKey {
|
||||||
|
case "layout":
|
||||||
|
delete(metadata, key)
|
||||||
|
case "permalink":
|
||||||
|
if str, ok := value.(string); ok {
|
||||||
|
metadata["url"] = str
|
||||||
|
}
|
||||||
|
delete(metadata, key)
|
||||||
|
case "category":
|
||||||
|
if str, ok := value.(string); ok {
|
||||||
|
metadata["categories"] = []string{str}
|
||||||
|
}
|
||||||
|
delete(metadata, key)
|
||||||
|
case "excerpt_separator":
|
||||||
|
if key != lowerKey {
|
||||||
|
delete(metadata, key)
|
||||||
|
metadata[lowerKey] = value
|
||||||
|
}
|
||||||
|
case "date":
|
||||||
|
if str, ok := value.(string); ok {
|
||||||
|
re := regexp.MustCompile(`(\d+):(\d+):(\d+)`)
|
||||||
|
r := re.FindAllStringSubmatch(str, -1)
|
||||||
|
if len(r) > 0 {
|
||||||
|
hour, _ := strconv.Atoi(r[0][1])
|
||||||
|
minute, _ := strconv.Atoi(r[0][2])
|
||||||
|
second, _ := strconv.Atoi(r[0][3])
|
||||||
|
postDate = time.Date(postDate.Year(), postDate.Month(), postDate.Day(), hour, minute, second, 0, time.UTC)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
delete(metadata, key)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
metadata["date"] = postDate.Format(time.RFC3339)
|
||||||
|
|
||||||
|
return metadata, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *importCommand) convertJekyllPost(path, relPath, targetDir string, draft bool) error {
|
||||||
|
jww.TRACE.Println("Converting", path)
|
||||||
|
|
||||||
|
filename := filepath.Base(path)
|
||||||
|
postDate, postName, err := c.parseJekyllFilename(filename)
|
||||||
|
if err != nil {
|
||||||
|
c.r.Printf("Failed to parse filename '%s': %s. Skipping.", filename, err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
jww.TRACE.Println(filename, postDate, postName)
|
||||||
|
|
||||||
|
targetFile := filepath.Join(targetDir, relPath)
|
||||||
|
targetParentDir := filepath.Dir(targetFile)
|
||||||
|
os.MkdirAll(targetParentDir, 0777)
|
||||||
|
|
||||||
|
contentBytes, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
c.r.logger.Errorln("Read file error:", path)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
pf, err := pageparser.ParseFrontMatterAndContent(bytes.NewReader(contentBytes))
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to parse file %q: %s", filename, err)
|
||||||
|
}
|
||||||
|
newmetadata, err := c.convertJekyllMetaData(pf.FrontMatter, postName, postDate, draft)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to convert metadata for file %q: %s", filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := c.convertJekyllContent(newmetadata, string(pf.Content))
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to convert content for file %q: %s", filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fs := hugofs.Os
|
||||||
|
if err := helpers.WriteToDisk(targetFile, strings.NewReader(content), fs); err != nil {
|
||||||
|
return fmt.Errorf("failed to save file %q: %s", filename, err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *importCommand) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPostDirs map[string]bool) (err error) {
|
||||||
fs := hugofs.Os
|
fs := hugofs.Os
|
||||||
|
|
||||||
fi, err := fs.Stat(jekyllRoot)
|
fi, err := fs.Stat(jekyllRoot)
|
||||||
|
@ -353,7 +396,116 @@ func (i *importCmd) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPos
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseJekyllFilename(filename string) (time.Time, string, error) {
|
func (c *importCommand) importFromJekyll(args []string) error {
|
||||||
|
|
||||||
|
jekyllRoot, err := filepath.Abs(filepath.Clean(args[0]))
|
||||||
|
if err != nil {
|
||||||
|
return newUserError("path error:", args[0])
|
||||||
|
}
|
||||||
|
|
||||||
|
targetDir, err := filepath.Abs(filepath.Clean(args[1]))
|
||||||
|
if err != nil {
|
||||||
|
return newUserError("path error:", args[1])
|
||||||
|
}
|
||||||
|
|
||||||
|
c.r.Println("Import Jekyll from:", jekyllRoot, "to:", targetDir)
|
||||||
|
|
||||||
|
if strings.HasPrefix(filepath.Dir(targetDir), jekyllRoot) {
|
||||||
|
return newUserError("abort: target path should not be inside the Jekyll root")
|
||||||
|
}
|
||||||
|
|
||||||
|
fs := afero.NewOsFs()
|
||||||
|
jekyllPostDirs, hasAnyPost := c.getJekyllDirInfo(fs, jekyllRoot)
|
||||||
|
if !hasAnyPost {
|
||||||
|
return errors.New("abort: jekyll root contains neither posts nor drafts")
|
||||||
|
}
|
||||||
|
|
||||||
|
err = c.createSiteFromJekyll(jekyllRoot, targetDir, jekyllPostDirs)
|
||||||
|
if err != nil {
|
||||||
|
return newUserError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
c.r.Println("Importing...")
|
||||||
|
|
||||||
|
fileCount := 0
|
||||||
|
callback := func(path string, fi hugofs.FileMetaInfo, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if fi.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
relPath, err := filepath.Rel(jekyllRoot, path)
|
||||||
|
if err != nil {
|
||||||
|
return newUserError("get rel path error:", path)
|
||||||
|
}
|
||||||
|
|
||||||
|
relPath = filepath.ToSlash(relPath)
|
||||||
|
draft := false
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case strings.Contains(relPath, "_posts/"):
|
||||||
|
relPath = filepath.Join("content/post", strings.Replace(relPath, "_posts/", "", -1))
|
||||||
|
case strings.Contains(relPath, "_drafts/"):
|
||||||
|
relPath = filepath.Join("content/draft", strings.Replace(relPath, "_drafts/", "", -1))
|
||||||
|
draft = true
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
fileCount++
|
||||||
|
return c.convertJekyllPost(path, relPath, targetDir, draft)
|
||||||
|
}
|
||||||
|
|
||||||
|
for jekyllPostDir, hasAnyPostInDir := range jekyllPostDirs {
|
||||||
|
if hasAnyPostInDir {
|
||||||
|
if err = helpers.SymbolicWalk(hugofs.Os, filepath.Join(jekyllRoot, jekyllPostDir), callback); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.r.Println("Congratulations!", fileCount, "post(s) imported!")
|
||||||
|
c.r.Println("Now, start Hugo by yourself:\n" +
|
||||||
|
"$ git clone https://github.com/spf13/herring-cove.git " + args[1] + "/themes/herring-cove")
|
||||||
|
c.r.Println("$ cd " + args[1] + "\n$ hugo server --theme=herring-cove")
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *importCommand) loadJekyllConfig(fs afero.Fs, jekyllRoot string) map[string]any {
|
||||||
|
path := filepath.Join(jekyllRoot, "_config.yml")
|
||||||
|
|
||||||
|
exists, err := helpers.Exists(path, fs)
|
||||||
|
|
||||||
|
if err != nil || !exists {
|
||||||
|
c.r.Println("_config.yaml not found: Is the specified Jekyll root correct?")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := fs.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
b, err := io.ReadAll(f)
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
m, err := metadecoders.Default.UnmarshalToMap(b, metadecoders.YAML)
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *importCommand) parseJekyllFilename(filename string) (time.Time, string, error) {
|
||||||
re := regexp.MustCompile(`(\d+-\d+-\d+)-(.+)\..*`)
|
re := regexp.MustCompile(`(\d+-\d+-\d+)-(.+)\..*`)
|
||||||
r := re.FindAllStringSubmatch(filename, -1)
|
r := re.FindAllStringSubmatch(filename, -1)
|
||||||
if len(r) == 0 {
|
if len(r) == 0 {
|
||||||
|
@ -370,163 +522,7 @@ func parseJekyllFilename(filename string) (time.Time, string, error) {
|
||||||
return postDate, postName, nil
|
return postDate, postName, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func convertJekyllPost(path, relPath, targetDir string, draft bool) error {
|
func (c *importCommand) replaceHighlightTag(match string) string {
|
||||||
jww.TRACE.Println("Converting", path)
|
|
||||||
|
|
||||||
filename := filepath.Base(path)
|
|
||||||
postDate, postName, err := parseJekyllFilename(filename)
|
|
||||||
if err != nil {
|
|
||||||
jww.WARN.Printf("Failed to parse filename '%s': %s. Skipping.", filename, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
jww.TRACE.Println(filename, postDate, postName)
|
|
||||||
|
|
||||||
targetFile := filepath.Join(targetDir, relPath)
|
|
||||||
targetParentDir := filepath.Dir(targetFile)
|
|
||||||
os.MkdirAll(targetParentDir, 0777)
|
|
||||||
|
|
||||||
contentBytes, err := os.ReadFile(path)
|
|
||||||
if err != nil {
|
|
||||||
jww.ERROR.Println("Read file error:", path)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
pf, err := pageparser.ParseFrontMatterAndContent(bytes.NewReader(contentBytes))
|
|
||||||
if err != nil {
|
|
||||||
jww.ERROR.Println("Parse file error:", path)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
newmetadata, err := convertJekyllMetaData(pf.FrontMatter, postName, postDate, draft)
|
|
||||||
if err != nil {
|
|
||||||
jww.ERROR.Println("Convert metadata error:", path)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
content, err := convertJekyllContent(newmetadata, string(pf.Content))
|
|
||||||
if err != nil {
|
|
||||||
jww.ERROR.Println("Converting Jekyll error:", path)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
fs := hugofs.Os
|
|
||||||
if err := helpers.WriteToDisk(targetFile, strings.NewReader(content), fs); err != nil {
|
|
||||||
return fmt.Errorf("failed to save file %q: %s", filename, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func convertJekyllMetaData(m any, postName string, postDate time.Time, draft bool) (any, error) {
|
|
||||||
metadata, err := maps.ToStringMapE(m)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if draft {
|
|
||||||
metadata["draft"] = true
|
|
||||||
}
|
|
||||||
|
|
||||||
for key, value := range metadata {
|
|
||||||
lowerKey := strings.ToLower(key)
|
|
||||||
|
|
||||||
switch lowerKey {
|
|
||||||
case "layout":
|
|
||||||
delete(metadata, key)
|
|
||||||
case "permalink":
|
|
||||||
if str, ok := value.(string); ok {
|
|
||||||
metadata["url"] = str
|
|
||||||
}
|
|
||||||
delete(metadata, key)
|
|
||||||
case "category":
|
|
||||||
if str, ok := value.(string); ok {
|
|
||||||
metadata["categories"] = []string{str}
|
|
||||||
}
|
|
||||||
delete(metadata, key)
|
|
||||||
case "excerpt_separator":
|
|
||||||
if key != lowerKey {
|
|
||||||
delete(metadata, key)
|
|
||||||
metadata[lowerKey] = value
|
|
||||||
}
|
|
||||||
case "date":
|
|
||||||
if str, ok := value.(string); ok {
|
|
||||||
re := regexp.MustCompile(`(\d+):(\d+):(\d+)`)
|
|
||||||
r := re.FindAllStringSubmatch(str, -1)
|
|
||||||
if len(r) > 0 {
|
|
||||||
hour, _ := strconv.Atoi(r[0][1])
|
|
||||||
minute, _ := strconv.Atoi(r[0][2])
|
|
||||||
second, _ := strconv.Atoi(r[0][3])
|
|
||||||
postDate = time.Date(postDate.Year(), postDate.Month(), postDate.Day(), hour, minute, second, 0, time.UTC)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
delete(metadata, key)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
metadata["date"] = postDate.Format(time.RFC3339)
|
|
||||||
|
|
||||||
return metadata, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func convertJekyllContent(m any, content string) (string, error) {
|
|
||||||
metadata, _ := maps.ToStringMapE(m)
|
|
||||||
|
|
||||||
lines := strings.Split(content, "\n")
|
|
||||||
var resultLines []string
|
|
||||||
for _, line := range lines {
|
|
||||||
resultLines = append(resultLines, strings.Trim(line, "\r\n"))
|
|
||||||
}
|
|
||||||
|
|
||||||
content = strings.Join(resultLines, "\n")
|
|
||||||
|
|
||||||
excerptSep := "<!--more-->"
|
|
||||||
if value, ok := metadata["excerpt_separator"]; ok {
|
|
||||||
if str, strOk := value.(string); strOk {
|
|
||||||
content = strings.Replace(content, strings.TrimSpace(str), excerptSep, -1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
replaceList := []struct {
|
|
||||||
re *regexp.Regexp
|
|
||||||
replace string
|
|
||||||
}{
|
|
||||||
{regexp.MustCompile("(?i)<!-- more -->"), "<!--more-->"},
|
|
||||||
{regexp.MustCompile(`\{%\s*raw\s*%\}\s*(.*?)\s*\{%\s*endraw\s*%\}`), "$1"},
|
|
||||||
{regexp.MustCompile(`{%\s*endhighlight\s*%}`), "{{< / highlight >}}"},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, replace := range replaceList {
|
|
||||||
content = replace.re.ReplaceAllString(content, replace.replace)
|
|
||||||
}
|
|
||||||
|
|
||||||
replaceListFunc := []struct {
|
|
||||||
re *regexp.Regexp
|
|
||||||
replace func(string) string
|
|
||||||
}{
|
|
||||||
// Octopress image tag: http://octopress.org/docs/plugins/image-tag/
|
|
||||||
{regexp.MustCompile(`{%\s+img\s*(.*?)\s*%}`), replaceImageTag},
|
|
||||||
{regexp.MustCompile(`{%\s*highlight\s*(.*?)\s*%}`), replaceHighlightTag},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, replace := range replaceListFunc {
|
|
||||||
content = replace.re.ReplaceAllStringFunc(content, replace.replace)
|
|
||||||
}
|
|
||||||
|
|
||||||
var buf bytes.Buffer
|
|
||||||
if len(metadata) != 0 {
|
|
||||||
err := parser.InterfaceToFrontMatter(m, metadecoders.YAML, &buf)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
buf.WriteString(content)
|
|
||||||
|
|
||||||
return buf.String(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func replaceHighlightTag(match string) string {
|
|
||||||
r := regexp.MustCompile(`{%\s*highlight\s*(.*?)\s*%}`)
|
r := regexp.MustCompile(`{%\s*highlight\s*(.*?)\s*%}`)
|
||||||
parts := r.FindStringSubmatch(match)
|
parts := r.FindStringSubmatch(match)
|
||||||
lastQuote := rune(0)
|
lastQuote := rune(0)
|
||||||
|
@ -570,35 +566,55 @@ func replaceHighlightTag(match string) string {
|
||||||
return result.String()
|
return result.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
func replaceImageTag(match string) string {
|
func (c *importCommand) replaceImageTag(match string) string {
|
||||||
r := regexp.MustCompile(`{%\s+img\s*(\p{L}*)\s+([\S]*/[\S]+)\s+(\d*)\s*(\d*)\s*(.*?)\s*%}`)
|
r := regexp.MustCompile(`{%\s+img\s*(\p{L}*)\s+([\S]*/[\S]+)\s+(\d*)\s*(\d*)\s*(.*?)\s*%}`)
|
||||||
result := bytes.NewBufferString("{{< figure ")
|
result := bytes.NewBufferString("{{< figure ")
|
||||||
parts := r.FindStringSubmatch(match)
|
parts := r.FindStringSubmatch(match)
|
||||||
// Index 0 is the entire string, ignore
|
// Index 0 is the entire string, ignore
|
||||||
replaceOptionalPart(result, "class", parts[1])
|
c.replaceOptionalPart(result, "class", parts[1])
|
||||||
replaceOptionalPart(result, "src", parts[2])
|
c.replaceOptionalPart(result, "src", parts[2])
|
||||||
replaceOptionalPart(result, "width", parts[3])
|
c.replaceOptionalPart(result, "width", parts[3])
|
||||||
replaceOptionalPart(result, "height", parts[4])
|
c.replaceOptionalPart(result, "height", parts[4])
|
||||||
// title + alt
|
// title + alt
|
||||||
part := parts[5]
|
part := parts[5]
|
||||||
if len(part) > 0 {
|
if len(part) > 0 {
|
||||||
splits := strings.Split(part, "'")
|
splits := strings.Split(part, "'")
|
||||||
lenSplits := len(splits)
|
lenSplits := len(splits)
|
||||||
if lenSplits == 1 {
|
if lenSplits == 1 {
|
||||||
replaceOptionalPart(result, "title", splits[0])
|
c.replaceOptionalPart(result, "title", splits[0])
|
||||||
} else if lenSplits == 3 {
|
} else if lenSplits == 3 {
|
||||||
replaceOptionalPart(result, "title", splits[1])
|
c.replaceOptionalPart(result, "title", splits[1])
|
||||||
} else if lenSplits == 5 {
|
} else if lenSplits == 5 {
|
||||||
replaceOptionalPart(result, "title", splits[1])
|
c.replaceOptionalPart(result, "title", splits[1])
|
||||||
replaceOptionalPart(result, "alt", splits[3])
|
c.replaceOptionalPart(result, "alt", splits[3])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
result.WriteString(">}}")
|
result.WriteString(">}}")
|
||||||
return result.String()
|
return result.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
func replaceOptionalPart(buffer *bytes.Buffer, partName string, part string) {
|
func (c *importCommand) replaceOptionalPart(buffer *bytes.Buffer, partName string, part string) {
|
||||||
if len(part) > 0 {
|
if len(part) > 0 {
|
||||||
buffer.WriteString(partName + "=\"" + part + "\" ")
|
buffer.WriteString(partName + "=\"" + part + "\" ")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *importCommand) retrieveJekyllPostDir(fs afero.Fs, dir string) (bool, bool) {
|
||||||
|
if strings.HasSuffix(dir, "_posts") || strings.HasSuffix(dir, "_drafts") {
|
||||||
|
isEmpty, _ := helpers.IsEmpty(dir, fs)
|
||||||
|
return true, !isEmpty
|
||||||
|
}
|
||||||
|
|
||||||
|
if entries, err := os.ReadDir(dir); err == nil {
|
||||||
|
for _, entry := range entries {
|
||||||
|
if entry.IsDir() {
|
||||||
|
subDir := filepath.Join(dir, entry.Name())
|
||||||
|
if isPostDir, hasAnyPost := c.retrieveJekyllPostDir(fs, subDir); isPostDir {
|
||||||
|
return isPostDir, hasAnyPost
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false, true
|
||||||
|
}
|
|
@ -1,177 +0,0 @@
|
||||||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestParseJekyllFilename(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
filenameArray := []string{
|
|
||||||
"2015-01-02-test.md",
|
|
||||||
"2012-03-15-中文.markup",
|
|
||||||
}
|
|
||||||
|
|
||||||
expectResult := []struct {
|
|
||||||
postDate time.Time
|
|
||||||
postName string
|
|
||||||
}{
|
|
||||||
{time.Date(2015, time.January, 2, 0, 0, 0, 0, time.UTC), "test"},
|
|
||||||
{time.Date(2012, time.March, 15, 0, 0, 0, 0, time.UTC), "中文"},
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, filename := range filenameArray {
|
|
||||||
postDate, postName, err := parseJekyllFilename(filename)
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(expectResult[i].postDate.Format("2006-01-02"), qt.Equals, postDate.Format("2006-01-02"))
|
|
||||||
c.Assert(expectResult[i].postName, qt.Equals, postName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestConvertJekyllMetadata(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
testDataList := []struct {
|
|
||||||
metadata any
|
|
||||||
postName string
|
|
||||||
postDate time.Time
|
|
||||||
draft bool
|
|
||||||
expect string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
map[any]any{},
|
|
||||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
|
||||||
`{"date":"2015-10-01T00:00:00Z"}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
map[any]any{},
|
|
||||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), true,
|
|
||||||
`{"date":"2015-10-01T00:00:00Z","draft":true}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
map[any]any{"Permalink": "/permalink.html", "layout": "post"},
|
|
||||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
|
||||||
`{"date":"2015-10-01T00:00:00Z","url":"/permalink.html"}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
map[any]any{"permalink": "/permalink.html"},
|
|
||||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
|
||||||
`{"date":"2015-10-01T00:00:00Z","url":"/permalink.html"}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
map[any]any{"category": nil, "permalink": 123},
|
|
||||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
|
||||||
`{"date":"2015-10-01T00:00:00Z"}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
map[any]any{"Excerpt_Separator": "sep"},
|
|
||||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
|
||||||
`{"date":"2015-10-01T00:00:00Z","excerpt_separator":"sep"}`,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
map[any]any{"category": "book", "layout": "post", "Others": "Goods", "Date": "2015-10-01 12:13:11"},
|
|
||||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
|
||||||
`{"Others":"Goods","categories":["book"],"date":"2015-10-01T12:13:11Z"}`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, data := range testDataList {
|
|
||||||
result, err := convertJekyllMetaData(data.metadata, data.postName, data.postDate, data.draft)
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
jsonResult, err := json.Marshal(result)
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(string(jsonResult), qt.Equals, data.expect)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestConvertJekyllContent(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
testDataList := []struct {
|
|
||||||
metadata any
|
|
||||||
content string
|
|
||||||
expect string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
map[any]any{},
|
|
||||||
"Test content\r\n<!-- more -->\npart2 content", "Test content\n<!--more-->\npart2 content",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
map[any]any{},
|
|
||||||
"Test content\n<!-- More -->\npart2 content", "Test content\n<!--more-->\npart2 content",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
map[any]any{"excerpt_separator": "<!--sep-->"},
|
|
||||||
"Test content\n<!--sep-->\npart2 content",
|
|
||||||
"---\nexcerpt_separator: <!--sep-->\n---\nTest content\n<!--more-->\npart2 content",
|
|
||||||
},
|
|
||||||
{map[any]any{}, "{% raw %}text{% endraw %}", "text"},
|
|
||||||
{map[any]any{}, "{%raw%} text2 {%endraw %}", "text2"},
|
|
||||||
{
|
|
||||||
map[any]any{},
|
|
||||||
"{% highlight go %}\nvar s int\n{% endhighlight %}",
|
|
||||||
"{{< highlight go >}}\nvar s int\n{{< / highlight >}}",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
map[any]any{},
|
|
||||||
"{% highlight go linenos hl_lines=\"1 2\" %}\nvar s string\nvar i int\n{% endhighlight %}",
|
|
||||||
"{{< highlight go \"linenos=table,hl_lines=1 2\" >}}\nvar s string\nvar i int\n{{< / highlight >}}",
|
|
||||||
},
|
|
||||||
|
|
||||||
// Octopress image tag
|
|
||||||
{
|
|
||||||
map[any]any{},
|
|
||||||
"{% img http://placekitten.com/890/280 %}",
|
|
||||||
"{{< figure src=\"http://placekitten.com/890/280\" >}}",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
map[any]any{},
|
|
||||||
"{% img left http://placekitten.com/320/250 Place Kitten #2 %}",
|
|
||||||
"{{< figure class=\"left\" src=\"http://placekitten.com/320/250\" title=\"Place Kitten #2\" >}}",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
map[any]any{},
|
|
||||||
"{% img right http://placekitten.com/300/500 150 250 'Place Kitten #3' %}",
|
|
||||||
"{{< figure class=\"right\" src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #3\" >}}",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
map[any]any{},
|
|
||||||
"{% img right http://placekitten.com/300/500 150 250 'Place Kitten #4' 'An image of a very cute kitten' %}",
|
|
||||||
"{{< figure class=\"right\" src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
map[any]any{},
|
|
||||||
"{% img http://placekitten.com/300/500 150 250 'Place Kitten #4' 'An image of a very cute kitten' %}",
|
|
||||||
"{{< figure src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
map[any]any{},
|
|
||||||
"{% img right /placekitten/300/500 'Place Kitten #4' 'An image of a very cute kitten' %}",
|
|
||||||
"{{< figure class=\"right\" src=\"/placekitten/300/500\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
map[any]any{"category": "book", "layout": "post", "Date": "2015-10-01 12:13:11"},
|
|
||||||
"somecontent",
|
|
||||||
"---\nDate: \"2015-10-01 12:13:11\"\ncategory: book\nlayout: post\n---\nsomecontent",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, data := range testDataList {
|
|
||||||
result, err := convertJekyllContent(data.metadata, data.content)
|
|
||||||
c.Assert(result, qt.Equals, data.expect)
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,84 +0,0 @@
|
||||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"syscall"
|
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
jww "github.com/spf13/jwalterweatherman"
|
|
||||||
)
|
|
||||||
|
|
||||||
var _ cmder = (*limitCmd)(nil)
|
|
||||||
|
|
||||||
type limitCmd struct {
|
|
||||||
*baseCmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func newLimitCmd() *limitCmd {
|
|
||||||
ccmd := &cobra.Command{
|
|
||||||
Use: "ulimit",
|
|
||||||
Short: "Check system ulimit settings",
|
|
||||||
Long: `Hugo will inspect the current ulimit settings on the system.
|
|
||||||
This is primarily to ensure that Hugo can watch enough files on some OSs`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
var rLimit syscall.Rlimit
|
|
||||||
err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
|
|
||||||
if err != nil {
|
|
||||||
return newSystemError("Error Getting rlimit ", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
jww.FEEDBACK.Println("Current rLimit:", rLimit)
|
|
||||||
|
|
||||||
if rLimit.Cur >= newRlimit {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
jww.FEEDBACK.Println("Attempting to increase limit")
|
|
||||||
rLimit.Cur = newRlimit
|
|
||||||
err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit)
|
|
||||||
if err != nil {
|
|
||||||
return newSystemError("Error Setting rLimit ", err)
|
|
||||||
}
|
|
||||||
err = syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
|
|
||||||
if err != nil {
|
|
||||||
return newSystemError("Error Getting rLimit ", err)
|
|
||||||
}
|
|
||||||
jww.FEEDBACK.Println("rLimit after change:", rLimit)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
return &limitCmd{baseCmd: newBaseCmd(ccmd)}
|
|
||||||
}
|
|
||||||
|
|
||||||
const newRlimit = 10240
|
|
||||||
|
|
||||||
func tweakLimit() {
|
|
||||||
var rLimit syscall.Rlimit
|
|
||||||
err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
|
|
||||||
if err != nil {
|
|
||||||
jww.WARN.Println("Unable to get rlimit:", err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if rLimit.Cur < newRlimit {
|
|
||||||
rLimit.Cur = newRlimit
|
|
||||||
err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit)
|
|
||||||
if err != nil {
|
|
||||||
// This may not succeed, see https://github.com/golang/go/issues/30401
|
|
||||||
jww.INFO.Println("Unable to increase number of open files limit:", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,21 +0,0 @@
|
||||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
//go:build !darwin
|
|
||||||
// +build !darwin
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
func tweakLimit() {
|
|
||||||
// nothing to do
|
|
||||||
}
|
|
295
commands/list.go
295
commands/list.go
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -14,197 +14,154 @@
|
||||||
package commands
|
package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"encoding/csv"
|
"encoding/csv"
|
||||||
"os"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/bep/simplecobra"
|
||||||
|
"github.com/gohugoio/hugo/config"
|
||||||
"github.com/gohugoio/hugo/hugolib"
|
"github.com/gohugoio/hugo/hugolib"
|
||||||
|
"github.com/gohugoio/hugo/resources/page"
|
||||||
"github.com/gohugoio/hugo/resources/resource"
|
"github.com/gohugoio/hugo/resources/resource"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
jww "github.com/spf13/jwalterweatherman"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ cmder = (*listCmd)(nil)
|
// newListCommand creates a new list command and its subcommands.
|
||||||
|
func newListCommand() *listCommand {
|
||||||
|
|
||||||
type listCmd struct {
|
list := func(cd *simplecobra.Commandeer, r *rootCommand, createRecord func(page.Page) []string, opts ...any) error {
|
||||||
*baseBuilderCmd
|
bcfg := hugolib.BuildCfg{SkipRender: true}
|
||||||
}
|
cfg := config.New()
|
||||||
|
for i := 0; i < len(opts); i += 2 {
|
||||||
func (lc *listCmd) buildSites(config map[string]any) (*hugolib.HugoSites, error) {
|
cfg.Set(opts[i].(string), opts[i+1])
|
||||||
cfgInit := func(c *commandeer) error {
|
|
||||||
for key, value := range config {
|
|
||||||
c.Set(key, value)
|
|
||||||
}
|
}
|
||||||
|
h, err := r.Build(cd, bcfg, cfg)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
writer := csv.NewWriter(r.Out)
|
||||||
|
defer writer.Flush()
|
||||||
|
|
||||||
|
for _, p := range h.Pages() {
|
||||||
|
if record := createRecord(p); record != nil {
|
||||||
|
if err := writer.Write(record); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
c, err := initializeConfig(true, true, false, &lc.hugoBuilderCommon, lc, cfgInit)
|
return &listCommand{
|
||||||
if err != nil {
|
commands: []simplecobra.Commander{
|
||||||
return nil, err
|
&simpleCommand{
|
||||||
}
|
name: "drafts",
|
||||||
|
short: "List all drafts",
|
||||||
|
long: `List all of the drafts in your content directory.`,
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
createRecord := func(p page.Page) []string {
|
||||||
|
if !p.Draft() || p.File().IsZero() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return []string{
|
||||||
|
p.File().Path(),
|
||||||
|
p.PublishDate().Format(time.RFC3339)}
|
||||||
|
|
||||||
sites, err := hugolib.NewHugoSites(*c.DepsCfg)
|
|
||||||
if err != nil {
|
|
||||||
return nil, newSystemError("Error creating sites", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := sites.Build(hugolib.BuildCfg{SkipRender: true}); err != nil {
|
|
||||||
return nil, newSystemError("Error Processing Source Content", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return sites, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *commandsBuilder) newListCmd() *listCmd {
|
|
||||||
cc := &listCmd{}
|
|
||||||
|
|
||||||
cmd := &cobra.Command{
|
|
||||||
Use: "list",
|
|
||||||
Short: "Listing out various types of content",
|
|
||||||
Long: `Listing out various types of content.
|
|
||||||
|
|
||||||
List requires a subcommand, e.g. ` + "`hugo list drafts`.",
|
|
||||||
RunE: nil,
|
|
||||||
}
|
|
||||||
|
|
||||||
cmd.AddCommand(
|
|
||||||
&cobra.Command{
|
|
||||||
Use: "drafts",
|
|
||||||
Short: "List all drafts",
|
|
||||||
Long: `List all of the drafts in your content directory.`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
sites, err := cc.buildSites(map[string]any{"buildDrafts": true})
|
|
||||||
if err != nil {
|
|
||||||
return newSystemError("Error building sites", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, p := range sites.Pages() {
|
|
||||||
if p.Draft() {
|
|
||||||
jww.FEEDBACK.Println(strings.TrimPrefix(p.File().Filename(), sites.WorkingDir+string(os.PathSeparator)))
|
|
||||||
}
|
}
|
||||||
}
|
return list(cd, r, createRecord, "buildDrafts", true)
|
||||||
|
},
|
||||||
return nil
|
|
||||||
},
|
},
|
||||||
},
|
&simpleCommand{
|
||||||
&cobra.Command{
|
name: "future",
|
||||||
Use: "future",
|
short: "List all posts dated in the future",
|
||||||
Short: "List all posts dated in the future",
|
long: `List all of the posts in your content directory which will be posted in the future.`,
|
||||||
Long: `List all of the posts in your content directory which will be posted in the future.`,
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
createRecord := func(p page.Page) []string {
|
||||||
sites, err := cc.buildSites(map[string]any{"buildFuture": true})
|
if !resource.IsFuture(p) || p.File().IsZero() {
|
||||||
if err != nil {
|
return nil
|
||||||
return newSystemError("Error building sites", err)
|
}
|
||||||
}
|
return []string{
|
||||||
|
p.File().Path(),
|
||||||
if err != nil {
|
|
||||||
return newSystemError("Error building sites", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
writer := csv.NewWriter(os.Stdout)
|
|
||||||
defer writer.Flush()
|
|
||||||
|
|
||||||
for _, p := range sites.Pages() {
|
|
||||||
if resource.IsFuture(p) {
|
|
||||||
err := writer.Write([]string{
|
|
||||||
strings.TrimPrefix(p.File().Filename(), sites.WorkingDir+string(os.PathSeparator)),
|
|
||||||
p.PublishDate().Format(time.RFC3339),
|
p.PublishDate().Format(time.RFC3339),
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return newSystemError("Error writing future posts to stdout", err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
return list(cd, r, createRecord, "buildFuture", true)
|
||||||
|
},
|
||||||
return nil
|
|
||||||
},
|
},
|
||||||
},
|
&simpleCommand{
|
||||||
&cobra.Command{
|
name: "expired",
|
||||||
Use: "expired",
|
short: "List all posts already expired",
|
||||||
Short: "List all posts already expired",
|
long: `List all of the posts in your content directory which has already expired.`,
|
||||||
Long: `List all of the posts in your content directory which has already expired.`,
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
createRecord := func(p page.Page) []string {
|
||||||
sites, err := cc.buildSites(map[string]any{"buildExpired": true})
|
if !resource.IsExpired(p) || p.File().IsZero() {
|
||||||
if err != nil {
|
return nil
|
||||||
return newSystemError("Error building sites", err)
|
}
|
||||||
}
|
return []string{
|
||||||
|
p.File().Path(),
|
||||||
if err != nil {
|
p.PublishDate().Format(time.RFC3339),
|
||||||
return newSystemError("Error building sites", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
writer := csv.NewWriter(os.Stdout)
|
|
||||||
defer writer.Flush()
|
|
||||||
|
|
||||||
for _, p := range sites.Pages() {
|
|
||||||
if resource.IsExpired(p) {
|
|
||||||
err := writer.Write([]string{
|
|
||||||
strings.TrimPrefix(p.File().Filename(), sites.WorkingDir+string(os.PathSeparator)),
|
|
||||||
p.ExpiryDate().Format(time.RFC3339),
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return newSystemError("Error writing expired posts to stdout", err)
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
}
|
||||||
|
return list(cd, r, createRecord, "buildExpired", true)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&simpleCommand{
|
||||||
|
name: "all",
|
||||||
|
short: "List all posts",
|
||||||
|
long: `List all of the posts in your content directory, include drafts, future and expired pages.`,
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
createRecord := func(p page.Page) []string {
|
||||||
|
if p.File().IsZero() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return []string{
|
||||||
|
p.File().Path(),
|
||||||
|
p.PublishDate().Format(time.RFC3339),
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
return list(cd, r, createRecord, "buildDrafts", true, "buildFuture", true, "buildExpired", true)
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
&cobra.Command{
|
}
|
||||||
Use: "all",
|
|
||||||
Short: "List all posts",
|
|
||||||
Long: `List all of the posts in your content directory, include drafts, future and expired pages.`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
sites, err := cc.buildSites(map[string]any{
|
|
||||||
"buildExpired": true,
|
|
||||||
"buildDrafts": true,
|
|
||||||
"buildFuture": true,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return newSystemError("Error building sites", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
writer := csv.NewWriter(os.Stdout)
|
}
|
||||||
defer writer.Flush()
|
|
||||||
|
type listCommand struct {
|
||||||
writer.Write([]string{
|
commands []simplecobra.Commander
|
||||||
"path",
|
}
|
||||||
"slug",
|
|
||||||
"title",
|
func (c *listCommand) Commands() []simplecobra.Commander {
|
||||||
"date",
|
return c.commands
|
||||||
"expiryDate",
|
}
|
||||||
"publishDate",
|
|
||||||
"draft",
|
func (c *listCommand) Name() string {
|
||||||
"permalink",
|
return "list"
|
||||||
})
|
}
|
||||||
for _, p := range sites.Pages() {
|
|
||||||
if !p.IsPage() {
|
func (c *listCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||||
continue
|
// Do nothing.
|
||||||
}
|
return nil
|
||||||
err := writer.Write([]string{
|
}
|
||||||
strings.TrimPrefix(p.File().Filename(), sites.WorkingDir+string(os.PathSeparator)),
|
|
||||||
p.Slug(),
|
func (c *listCommand) WithCobraCommand(cmd *cobra.Command) error {
|
||||||
p.Title(),
|
cmd.Short = "Listing out various types of content"
|
||||||
p.Date().Format(time.RFC3339),
|
cmd.Long = `Listing out various types of content.
|
||||||
p.ExpiryDate().Format(time.RFC3339),
|
|
||||||
p.PublishDate().Format(time.RFC3339),
|
List requires a subcommand, e.g. hugo list drafts`
|
||||||
strconv.FormatBool(p.Draft()),
|
|
||||||
p.Permalink(),
|
return nil
|
||||||
})
|
}
|
||||||
if err != nil {
|
|
||||||
return newSystemError("Error writing posts to stdout", err)
|
func (c *listCommand) Init(cd, runner *simplecobra.Commandeer) error {
|
||||||
}
|
return nil
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
|
|
||||||
|
|
||||||
return cc
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,68 +0,0 @@
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/csv"
|
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
|
||||||
)
|
|
||||||
|
|
||||||
func captureStdout(f func() error) (string, error) {
|
|
||||||
old := os.Stdout
|
|
||||||
r, w, _ := os.Pipe()
|
|
||||||
os.Stdout = w
|
|
||||||
|
|
||||||
err := f()
|
|
||||||
|
|
||||||
w.Close()
|
|
||||||
os.Stdout = old
|
|
||||||
|
|
||||||
var buf bytes.Buffer
|
|
||||||
io.Copy(&buf, r)
|
|
||||||
return buf.String(), err
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestListAll(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
dir := createSimpleTestSite(t, testSiteConfig{})
|
|
||||||
|
|
||||||
hugoCmd := newCommandsBuilder().addAll().build()
|
|
||||||
cmd := hugoCmd.getCommand()
|
|
||||||
|
|
||||||
t.Cleanup(func() {
|
|
||||||
os.RemoveAll(dir)
|
|
||||||
})
|
|
||||||
|
|
||||||
cmd.SetArgs([]string{"-s=" + dir, "list", "all"})
|
|
||||||
|
|
||||||
out, err := captureStdout(func() error {
|
|
||||||
_, err := cmd.ExecuteC()
|
|
||||||
return err
|
|
||||||
})
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
|
|
||||||
r := csv.NewReader(strings.NewReader(out))
|
|
||||||
|
|
||||||
header, err := r.Read()
|
|
||||||
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(header, qt.DeepEquals, []string{
|
|
||||||
"path", "slug", "title",
|
|
||||||
"date", "expiryDate", "publishDate",
|
|
||||||
"draft", "permalink",
|
|
||||||
})
|
|
||||||
|
|
||||||
record, err := r.Read()
|
|
||||||
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(record, qt.DeepEquals, []string{
|
|
||||||
filepath.Join("content", "p1.md"), "", "P1",
|
|
||||||
"0001-01-01T00:00:00Z", "0001-01-01T00:00:00Z", "0001-01-01T00:00:00Z",
|
|
||||||
"false", "https://example.org/p1/",
|
|
||||||
})
|
|
||||||
}
|
|
447
commands/mod.go
447
commands/mod.go
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2020 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -14,87 +14,18 @@
|
||||||
package commands
|
package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/hugolib"
|
"github.com/bep/simplecobra"
|
||||||
|
"github.com/gohugoio/hugo/config"
|
||||||
"github.com/gohugoio/hugo/modules"
|
"github.com/gohugoio/hugo/modules/npm"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ cmder = (*modCmd)(nil)
|
const commonUsageMod = `
|
||||||
|
|
||||||
type modCmd struct {
|
|
||||||
*baseBuilderCmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *modCmd) newVerifyCmd() *cobra.Command {
|
|
||||||
var clean bool
|
|
||||||
|
|
||||||
verifyCmd := &cobra.Command{
|
|
||||||
Use: "verify",
|
|
||||||
Short: "Verify dependencies.",
|
|
||||||
Long: `Verify checks that the dependencies of the current module, which are stored in a local downloaded source cache, have not been modified since being downloaded.
|
|
||||||
`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
return c.withModsClient(true, func(c *modules.Client) error {
|
|
||||||
return c.Verify(clean)
|
|
||||||
})
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
verifyCmd.Flags().BoolVarP(&clean, "clean", "", false, "delete module cache for dependencies that fail verification")
|
|
||||||
|
|
||||||
return verifyCmd
|
|
||||||
}
|
|
||||||
|
|
||||||
var moduleNotFoundRe = regexp.MustCompile("module.*not found")
|
|
||||||
|
|
||||||
func (c *modCmd) newCleanCmd() *cobra.Command {
|
|
||||||
var pattern string
|
|
||||||
var all bool
|
|
||||||
cmd := &cobra.Command{
|
|
||||||
Use: "clean",
|
|
||||||
Short: "Delete the Hugo Module cache for the current project.",
|
|
||||||
Long: `Delete the Hugo Module cache for the current project.
|
|
||||||
|
|
||||||
Note that after you run this command, all of your dependencies will be re-downloaded next time you run "hugo".
|
|
||||||
|
|
||||||
Also note that if you configure a positive maxAge for the "modules" file cache, it will also be cleaned as part of "hugo --gc".
|
|
||||||
|
|
||||||
`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
if all {
|
|
||||||
com, err := c.initConfig(false)
|
|
||||||
|
|
||||||
if err != nil && com == nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
count, err := com.hugo().FileCaches.ModulesCache().Prune(true)
|
|
||||||
com.logger.Printf("Deleted %d files from module cache.", count)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return c.withModsClient(true, func(c *modules.Client) error {
|
|
||||||
return c.Clean(pattern)
|
|
||||||
})
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
cmd.Flags().StringVarP(&pattern, "pattern", "", "", `pattern matching module paths to clean (all if not set), e.g. "**hugo*"`)
|
|
||||||
cmd.Flags().BoolVarP(&all, "all", "", false, "clean entire module cache")
|
|
||||||
|
|
||||||
return cmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *commandsBuilder) newModCmd() *modCmd {
|
|
||||||
c := &modCmd{}
|
|
||||||
|
|
||||||
const commonUsage = `
|
|
||||||
Note that Hugo will always start out by resolving the components defined in the site
|
Note that Hugo will always start out by resolving the components defined in the site
|
||||||
configuration, provided by a _vendor directory (if no --ignoreVendorPaths flag provided),
|
configuration, provided by a _vendor directory (if no --ignoreVendorPaths flag provided),
|
||||||
Go Modules, or a folder inside the themes directory, in that order.
|
Go Modules, or a folder inside the themes directory, in that order.
|
||||||
|
@ -103,27 +34,156 @@ See https://gohugo.io/hugo-modules/ for more information.
|
||||||
|
|
||||||
`
|
`
|
||||||
|
|
||||||
cmd := &cobra.Command{
|
// buildConfigCommands creates a new config command and its subcommands.
|
||||||
Use: "mod",
|
func newModCommands() *modCommands {
|
||||||
Short: "Various Hugo Modules helpers.",
|
var (
|
||||||
Long: `Various helpers to help manage the modules in your project's dependency graph.
|
clean bool
|
||||||
|
pattern string
|
||||||
|
all bool
|
||||||
|
)
|
||||||
|
|
||||||
Most operations here requires a Go version installed on your system (>= Go 1.12) and the relevant VCS client (typically Git).
|
npmCommand := &simpleCommand{
|
||||||
This is not needed if you only operate on modules inside /themes or if you have vendored them via "hugo mod vendor".
|
name: "npm",
|
||||||
|
short: "Various npm helpers.",
|
||||||
|
long: `Various npm (Node package manager) helpers.`,
|
||||||
|
commands: []simplecobra.Commander{
|
||||||
|
&simpleCommand{
|
||||||
|
name: "pack",
|
||||||
|
short: "Experimental: Prepares and writes a composite package.json file for your project.",
|
||||||
|
long: `Prepares and writes a composite package.json file for your project.
|
||||||
|
|
||||||
` + commonUsage,
|
On first run it creates a "package.hugo.json" in the project root if not already there. This file will be used as a template file
|
||||||
|
with the base dependency set.
|
||||||
|
|
||||||
RunE: nil,
|
This set will be merged with all "package.hugo.json" files found in the dependency tree, picking the version closest to the project.
|
||||||
|
|
||||||
|
This command is marked as 'Experimental'. We think it's a great idea, so it's not likely to be
|
||||||
|
removed from Hugo, but we need to test this out in "real life" to get a feel of it,
|
||||||
|
so this may/will change in future versions of Hugo.
|
||||||
|
`,
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
h, err := r.Hugo(flagsToCfg(cd, nil))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return npm.Pack(h.BaseFs.SourceFs, h.BaseFs.Assets.Dirs)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd.AddCommand(newModNPMCmd(c))
|
return &modCommands{
|
||||||
|
commands: []simplecobra.Commander{
|
||||||
|
&simpleCommand{
|
||||||
|
name: "init",
|
||||||
|
short: "Initialize this project as a Hugo Module.",
|
||||||
|
long: `Initialize this project as a Hugo Module.
|
||||||
|
It will try to guess the module path, but you may help by passing it as an argument, e.g:
|
||||||
|
|
||||||
cmd.AddCommand(
|
hugo mod init github.com/gohugoio/testshortcodes
|
||||||
&cobra.Command{
|
|
||||||
Use: "get",
|
Note that Hugo Modules supports multi-module projects, so you can initialize a Hugo Module
|
||||||
DisableFlagParsing: true,
|
inside a subfolder on GitHub, as one example.
|
||||||
Short: "Resolves dependencies in your current Hugo Project.",
|
`,
|
||||||
Long: `
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
h, err := r.Hugo(flagsToCfg(cd, nil))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
var initPath string
|
||||||
|
if len(args) >= 1 {
|
||||||
|
initPath = args[0]
|
||||||
|
}
|
||||||
|
return h.Configs.ModulesClient.Init(initPath)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&simpleCommand{
|
||||||
|
name: "verify",
|
||||||
|
short: "Verify dependencies.",
|
||||||
|
long: `Verify checks that the dependencies of the current module, which are stored in a local downloaded source cache, have not been modified since being downloaded.`,
|
||||||
|
withc: func(cmd *cobra.Command) {
|
||||||
|
cmd.Flags().BoolVarP(&clean, "clean", "", false, "delete module cache for dependencies that fail verification")
|
||||||
|
},
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
conf, err := r.ConfigFromProvider(r.configVersionID.Load(), flagsToCfg(cd, nil))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
client := conf.configs.ModulesClient
|
||||||
|
return client.Verify(clean)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&simpleCommand{
|
||||||
|
name: "graph",
|
||||||
|
short: "Print a module dependency graph.",
|
||||||
|
long: `Print a module dependency graph with information about module status (disabled, vendored).
|
||||||
|
Note that for vendored modules, that is the version listed and not the one from go.mod.
|
||||||
|
`,
|
||||||
|
withc: func(cmd *cobra.Command) {
|
||||||
|
cmd.Flags().BoolVarP(&clean, "clean", "", false, "delete module cache for dependencies that fail verification")
|
||||||
|
},
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
conf, err := r.ConfigFromProvider(r.configVersionID.Load(), flagsToCfg(cd, nil))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
client := conf.configs.ModulesClient
|
||||||
|
return client.Graph(os.Stdout)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&simpleCommand{
|
||||||
|
name: "clean",
|
||||||
|
short: "Delete the Hugo Module cache for the current project.",
|
||||||
|
long: `Delete the Hugo Module cache for the current project.`,
|
||||||
|
withc: func(cmd *cobra.Command) {
|
||||||
|
cmd.Flags().StringVarP(&pattern, "pattern", "", "", `pattern matching module paths to clean (all if not set), e.g. "**hugo*"`)
|
||||||
|
cmd.Flags().BoolVarP(&all, "all", "", false, "clean entire module cache")
|
||||||
|
},
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
h, err := r.Hugo(flagsToCfg(cd, nil))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if all {
|
||||||
|
modCache := h.ResourceSpec.FileCaches.ModulesCache()
|
||||||
|
count, err := modCache.Prune(true)
|
||||||
|
r.Printf("Deleted %d files from module cache.", count)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return h.Configs.ModulesClient.Clean(pattern)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&simpleCommand{
|
||||||
|
name: "tidy",
|
||||||
|
short: "Remove unused entries in go.mod and go.sum.",
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
h, err := r.Hugo(flagsToCfg(cd, nil))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return h.Configs.ModulesClient.Tidy()
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&simpleCommand{
|
||||||
|
name: "vendor",
|
||||||
|
short: "Vendor all module dependencies into the _vendor directory.",
|
||||||
|
long: `Vendor all module dependencies into the _vendor directory.
|
||||||
|
If a module is vendored, that is where Hugo will look for it's dependencies.
|
||||||
|
`,
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
h, err := r.Hugo(flagsToCfg(cd, nil))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return h.Configs.ModulesClient.Vendor()
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
&simpleCommand{
|
||||||
|
name: "get",
|
||||||
|
short: "Resolves dependencies in your current Hugo Project.",
|
||||||
|
long: `
|
||||||
Resolves dependencies in your current Hugo Project.
|
Resolves dependencies in your current Hugo Project.
|
||||||
|
|
||||||
Some examples:
|
Some examples:
|
||||||
|
@ -142,152 +202,109 @@ Install the latest versions of all module dependencies:
|
||||||
hugo mod get -u ./... (recursive)
|
hugo mod get -u ./... (recursive)
|
||||||
|
|
||||||
Run "go help get" for more information. All flags available for "go get" is also relevant here.
|
Run "go help get" for more information. All flags available for "go get" is also relevant here.
|
||||||
` + commonUsage,
|
` + commonUsageMod,
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
withc: func(cmd *cobra.Command) {
|
||||||
// We currently just pass on the flags we get to Go and
|
cmd.DisableFlagParsing = true
|
||||||
// need to do the flag handling manually.
|
},
|
||||||
if len(args) == 1 && args[0] == "-h" {
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
return cmd.Help()
|
// We currently just pass on the flags we get to Go and
|
||||||
}
|
// need to do the flag handling manually.
|
||||||
|
if len(args) == 1 && args[0] == "-h" {
|
||||||
var lastArg string
|
return errHelp
|
||||||
if len(args) != 0 {
|
|
||||||
lastArg = args[len(args)-1]
|
|
||||||
}
|
|
||||||
|
|
||||||
if lastArg == "./..." {
|
|
||||||
args = args[:len(args)-1]
|
|
||||||
// Do a recursive update.
|
|
||||||
dirname, err := os.Getwd()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sanity check. We do recursive walking and want to avoid
|
var lastArg string
|
||||||
// accidents.
|
if len(args) != 0 {
|
||||||
if len(dirname) < 5 {
|
lastArg = args[len(args)-1]
|
||||||
return errors.New("must not be run from the file system root")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
filepath.Walk(dirname, func(path string, info os.FileInfo, err error) error {
|
if lastArg == "./..." {
|
||||||
if info.IsDir() {
|
args = args[:len(args)-1]
|
||||||
return nil
|
// Do a recursive update.
|
||||||
|
dirname, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if info.Name() == "go.mod" {
|
// Sanity chesimplecobra. We do recursive walking and want to avoid
|
||||||
// Found a module.
|
// accidents.
|
||||||
dir := filepath.Dir(path)
|
if len(dirname) < 5 {
|
||||||
fmt.Println("Update module in", dir)
|
return errors.New("must not be run from the file system root")
|
||||||
c.source = dir
|
}
|
||||||
err := c.withModsClient(false, func(c *modules.Client) error {
|
|
||||||
if len(args) == 1 && args[0] == "-h" {
|
filepath.Walk(dirname, func(path string, info os.FileInfo, err error) error {
|
||||||
return cmd.Help()
|
if info.IsDir() {
|
||||||
}
|
return nil
|
||||||
return c.Get(args...)
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
if info.Name() == "go.mod" {
|
||||||
|
// Found a module.
|
||||||
|
dir := filepath.Dir(path)
|
||||||
|
r.Println("Update module in", dir)
|
||||||
|
cfg := config.New()
|
||||||
|
cfg.Set("workingDir", dir)
|
||||||
|
conf, err := r.ConfigFromProvider(r.configVersionID.Load(), flagsToCfg(cd, cfg))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
client := conf.configs.ModulesClient
|
||||||
|
return client.Get(args...)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
return nil
|
return nil
|
||||||
})
|
} else {
|
||||||
|
conf, err := r.ConfigFromProvider(r.configVersionID.Load(), flagsToCfg(cd, nil))
|
||||||
return nil
|
if err != nil {
|
||||||
}
|
return err
|
||||||
|
}
|
||||||
return c.withModsClient(false, func(c *modules.Client) error {
|
client := conf.configs.ModulesClient
|
||||||
return c.Get(args...)
|
return client.Get(args...)
|
||||||
})
|
}
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
npmCommand,
|
||||||
},
|
},
|
||||||
&cobra.Command{
|
}
|
||||||
Use: "graph",
|
|
||||||
Short: "Print a module dependency graph.",
|
|
||||||
Long: `Print a module dependency graph with information about module status (disabled, vendored).
|
|
||||||
Note that for vendored modules, that is the version listed and not the one from go.mod.
|
|
||||||
`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
return c.withModsClient(true, func(c *modules.Client) error {
|
|
||||||
return c.Graph(os.Stdout)
|
|
||||||
})
|
|
||||||
},
|
|
||||||
},
|
|
||||||
&cobra.Command{
|
|
||||||
Use: "init",
|
|
||||||
Short: "Initialize this project as a Hugo Module.",
|
|
||||||
Long: `Initialize this project as a Hugo Module.
|
|
||||||
It will try to guess the module path, but you may help by passing it as an argument, e.g:
|
|
||||||
|
|
||||||
hugo mod init github.com/gohugoio/testshortcodes
|
|
||||||
|
|
||||||
Note that Hugo Modules supports multi-module projects, so you can initialize a Hugo Module
|
|
||||||
inside a subfolder on GitHub, as one example.
|
|
||||||
`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
var path string
|
|
||||||
if len(args) >= 1 {
|
|
||||||
path = args[0]
|
|
||||||
}
|
|
||||||
return c.withModsClient(false, func(c *modules.Client) error {
|
|
||||||
return c.Init(path)
|
|
||||||
})
|
|
||||||
},
|
|
||||||
},
|
|
||||||
&cobra.Command{
|
|
||||||
Use: "vendor",
|
|
||||||
Short: "Vendor all module dependencies into the _vendor directory.",
|
|
||||||
Long: `Vendor all module dependencies into the _vendor directory.
|
|
||||||
|
|
||||||
If a module is vendored, that is where Hugo will look for it's dependencies.
|
|
||||||
`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
return c.withModsClient(true, func(c *modules.Client) error {
|
|
||||||
return c.Vendor()
|
|
||||||
})
|
|
||||||
},
|
|
||||||
},
|
|
||||||
c.newVerifyCmd(),
|
|
||||||
&cobra.Command{
|
|
||||||
Use: "tidy",
|
|
||||||
Short: "Remove unused entries in go.mod and go.sum.",
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
return c.withModsClient(true, func(c *modules.Client) error {
|
|
||||||
return c.Tidy()
|
|
||||||
})
|
|
||||||
},
|
|
||||||
},
|
|
||||||
c.newCleanCmd(),
|
|
||||||
)
|
|
||||||
|
|
||||||
c.baseBuilderCmd = b.newBuilderCmd(cmd)
|
|
||||||
|
|
||||||
return c
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *modCmd) withModsClient(failOnMissingConfig bool, f func(*modules.Client) error) error {
|
type modCommands struct {
|
||||||
com, err := c.initConfig(failOnMissingConfig)
|
r *rootCommand
|
||||||
|
|
||||||
|
commands []simplecobra.Commander
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *modCommands) Commands() []simplecobra.Commander {
|
||||||
|
return c.commands
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *modCommands) Name() string {
|
||||||
|
return "mod"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *modCommands) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||||
|
_, err := c.r.ConfigFromProvider(c.r.configVersionID.Load(), nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
//config := conf.configs.Base
|
||||||
|
|
||||||
return f(com.hugo().ModulesClient)
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *modCmd) withHugo(f func(*hugolib.HugoSites) error) error {
|
func (c *modCommands) WithCobraCommand(cmd *cobra.Command) error {
|
||||||
com, err := c.initConfig(true)
|
cmd.Short = "Various Hugo Modules helpers."
|
||||||
if err != nil {
|
cmd.Long = `Various helpers to help manage the modules in your project's dependency graph.
|
||||||
return err
|
Most operations here requires a Go version installed on your system (>= Go 1.12) and the relevant VCS client (typically Git).
|
||||||
}
|
This is not needed if you only operate on modules inside /themes or if you have vendored them via "hugo mod vendor".
|
||||||
|
|
||||||
return f(com.hugo())
|
` + commonUsageMod
|
||||||
|
cmd.RunE = nil
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *modCmd) initConfig(failOnNoConfig bool) (*commandeer, error) {
|
func (c *modCommands) Init(cd, runner *simplecobra.Commandeer) error {
|
||||||
com, err := initializeConfig(failOnNoConfig, false, false, &c.hugoBuilderCommon, c, nil)
|
c.r = cd.Root.Command.(*rootCommand)
|
||||||
if err != nil {
|
return nil
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return com, nil
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,56 +0,0 @@
|
||||||
// Copyright 2020 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/gohugoio/hugo/hugolib"
|
|
||||||
"github.com/gohugoio/hugo/modules/npm"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
func newModNPMCmd(c *modCmd) *cobra.Command {
|
|
||||||
cmd := &cobra.Command{
|
|
||||||
Use: "npm",
|
|
||||||
Short: "Various npm helpers.",
|
|
||||||
Long: `Various npm (Node package manager) helpers.`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
return c.withHugo(func(h *hugolib.HugoSites) error {
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
cmd.AddCommand(&cobra.Command{
|
|
||||||
Use: "pack",
|
|
||||||
Short: "Experimental: Prepares and writes a composite package.json file for your project.",
|
|
||||||
Long: `Prepares and writes a composite package.json file for your project.
|
|
||||||
|
|
||||||
On first run it creates a "package.hugo.json" in the project root if not already there. This file will be used as a template file
|
|
||||||
with the base dependency set.
|
|
||||||
|
|
||||||
This set will be merged with all "package.hugo.json" files found in the dependency tree, picking the version closest to the project.
|
|
||||||
|
|
||||||
This command is marked as 'Experimental'. We think it's a great idea, so it's not likely to be
|
|
||||||
removed from Hugo, but we need to test this out in "real life" to get a feel of it,
|
|
||||||
so this may/will change in future versions of Hugo.
|
|
||||||
`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
return c.withHugo(func(h *hugolib.HugoSites) error {
|
|
||||||
return npm.Pack(h.BaseFs.SourceFs, h.BaseFs.Assets.Dirs)
|
|
||||||
})
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
return cmd
|
|
||||||
}
|
|
389
commands/new.go
389
commands/new.go
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -15,114 +15,351 @@ package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"os"
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/bep/simplecobra"
|
||||||
|
"github.com/gohugoio/hugo/common/htime"
|
||||||
|
"github.com/gohugoio/hugo/config"
|
||||||
"github.com/gohugoio/hugo/create"
|
"github.com/gohugoio/hugo/create"
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
"github.com/gohugoio/hugo/hugolib"
|
"github.com/gohugoio/hugo/parser"
|
||||||
|
"github.com/gohugoio/hugo/parser/metadecoders"
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
jww "github.com/spf13/jwalterweatherman"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ cmder = (*newCmd)(nil)
|
func newNewCommand() *newCommand {
|
||||||
|
var (
|
||||||
|
configFormat string
|
||||||
|
force bool
|
||||||
|
contentType string
|
||||||
|
)
|
||||||
|
|
||||||
type newCmd struct {
|
var c *newCommand
|
||||||
contentEditor string
|
c = &newCommand{
|
||||||
contentType string
|
commands: []simplecobra.Commander{
|
||||||
force bool
|
&simpleCommand{
|
||||||
|
name: "content",
|
||||||
|
use: "content [path]",
|
||||||
|
short: "Create new content for your site",
|
||||||
|
long: `Create a new content file and automatically set the date and title.
|
||||||
|
It will guess which kind of file to create based on the path provided.
|
||||||
|
|
||||||
|
You can also specify the kind with ` + "`-k KIND`" + `.
|
||||||
|
|
||||||
|
If archetypes are provided in your theme or site, they will be used.
|
||||||
|
|
||||||
|
Ensure you run this within the root directory of your site.`,
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
if len(args) < 1 {
|
||||||
|
return errors.New("path needs to be provided")
|
||||||
|
}
|
||||||
|
h, err := r.Hugo(flagsToCfg(cd, nil))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return create.NewContent(h, contentType, args[0], force)
|
||||||
|
},
|
||||||
|
withc: func(cmd *cobra.Command) {
|
||||||
|
cmd.Flags().StringVarP(&contentType, "kind", "k", "", "content type to create")
|
||||||
|
cmd.Flags().String("editor", "", "edit new content with this editor, if provided")
|
||||||
|
cmd.Flags().BoolVarP(&force, "force", "f", false, "overwrite file if it already exists")
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&simpleCommand{
|
||||||
|
name: "site",
|
||||||
|
use: "site [path]",
|
||||||
|
short: "Create a new site (skeleton)",
|
||||||
|
long: `Create a new site in the provided directory.
|
||||||
|
The new site will have the correct structure, but no content or theme yet.
|
||||||
|
Use ` + "`hugo new [contentPath]`" + ` to create new content.`,
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
if len(args) < 1 {
|
||||||
|
return errors.New("path needs to be provided")
|
||||||
|
}
|
||||||
|
createpath, err := filepath.Abs(filepath.Clean(args[0]))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
cfg := config.New()
|
||||||
|
cfg.Set("workingDir", createpath)
|
||||||
|
cfg.Set("publishDir", "public")
|
||||||
|
|
||||||
|
conf, err := r.ConfigFromProvider(r.configVersionID.Load(), flagsToCfg(cd, cfg))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
sourceFs := conf.fs.Source
|
||||||
|
|
||||||
|
archeTypePath := filepath.Join(createpath, "archetypes")
|
||||||
|
dirs := []string{
|
||||||
|
archeTypePath,
|
||||||
|
filepath.Join(createpath, "assets"),
|
||||||
|
filepath.Join(createpath, "content"),
|
||||||
|
filepath.Join(createpath, "data"),
|
||||||
|
filepath.Join(createpath, "layouts"),
|
||||||
|
filepath.Join(createpath, "static"),
|
||||||
|
filepath.Join(createpath, "themes"),
|
||||||
|
}
|
||||||
|
|
||||||
|
if exists, _ := helpers.Exists(createpath, sourceFs); exists {
|
||||||
|
if isDir, _ := helpers.IsDir(createpath, sourceFs); !isDir {
|
||||||
|
return errors.New(createpath + " already exists but not a directory")
|
||||||
|
}
|
||||||
|
|
||||||
|
isEmpty, _ := helpers.IsEmpty(createpath, sourceFs)
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case !isEmpty && !force:
|
||||||
|
return errors.New(createpath + " already exists and is not empty. See --force.")
|
||||||
|
|
||||||
|
case !isEmpty && force:
|
||||||
|
all := append(dirs, filepath.Join(createpath, "hugo."+configFormat))
|
||||||
|
for _, path := range all {
|
||||||
|
if exists, _ := helpers.Exists(path, sourceFs); exists {
|
||||||
|
return errors.New(path + " already exists")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, dir := range dirs {
|
||||||
|
if err := sourceFs.MkdirAll(dir, 0777); err != nil {
|
||||||
|
return fmt.Errorf("failed to create dir: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.newSiteCreateConfig(sourceFs, createpath, configFormat)
|
||||||
|
|
||||||
|
// Create a default archetype file.
|
||||||
|
helpers.SafeWriteToDisk(filepath.Join(archeTypePath, "default.md"),
|
||||||
|
strings.NewReader(create.DefaultArchetypeTemplateTemplate), sourceFs)
|
||||||
|
|
||||||
|
r.Printf("Congratulations! Your new Hugo site is created in %s.\n\n", createpath)
|
||||||
|
r.Println(c.newSiteNextStepsText())
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
withc: func(cmd *cobra.Command) {
|
||||||
|
cmd.Flags().StringVarP(&configFormat, "format", "f", "toml", "config file format")
|
||||||
|
cmd.Flags().BoolVar(&force, "force", false, "init inside non-empty directory")
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&simpleCommand{
|
||||||
|
name: "theme",
|
||||||
|
use: "theme [path]",
|
||||||
|
short: "Create a new site (skeleton)",
|
||||||
|
long: `Create a new site in the provided directory.
|
||||||
|
The new site will have the correct structure, but no content or theme yet.
|
||||||
|
Use ` + "`hugo new [contentPath]`" + ` to create new content.`,
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
h, err := r.Hugo(flagsToCfg(cd, nil))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
ps := h.PathSpec
|
||||||
|
sourceFs := ps.Fs.Source
|
||||||
|
themesDir := h.Configs.LoadingInfo.BaseConfig.ThemesDir
|
||||||
|
createpath := ps.AbsPathify(filepath.Join(themesDir, args[0]))
|
||||||
|
r.Println("Creating theme at", createpath)
|
||||||
|
|
||||||
|
if x, _ := helpers.Exists(createpath, sourceFs); x {
|
||||||
|
return errors.New(createpath + " already exists")
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, filename := range []string{
|
||||||
|
"index.html",
|
||||||
|
"404.html",
|
||||||
|
"_default/list.html",
|
||||||
|
"_default/single.html",
|
||||||
|
"partials/head.html",
|
||||||
|
"partials/header.html",
|
||||||
|
"partials/footer.html",
|
||||||
|
} {
|
||||||
|
touchFile(sourceFs, filepath.Join(createpath, "layouts", filename))
|
||||||
|
}
|
||||||
|
|
||||||
|
baseofDefault := []byte(`<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
{{- partial "head.html" . -}}
|
||||||
|
<body>
|
||||||
|
{{- partial "header.html" . -}}
|
||||||
|
<div id="content">
|
||||||
|
{{- block "main" . }}{{- end }}
|
||||||
|
</div>
|
||||||
|
{{- partial "footer.html" . -}}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
`)
|
||||||
|
|
||||||
|
err = helpers.WriteToDisk(filepath.Join(createpath, "layouts", "_default", "baseof.html"), bytes.NewReader(baseofDefault), sourceFs)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
mkdir(createpath, "archetypes")
|
||||||
|
|
||||||
|
archDefault := []byte("+++\n+++\n")
|
||||||
|
|
||||||
|
err = helpers.WriteToDisk(filepath.Join(createpath, "archetypes", "default.md"), bytes.NewReader(archDefault), sourceFs)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
mkdir(createpath, "static", "js")
|
||||||
|
mkdir(createpath, "static", "css")
|
||||||
|
|
||||||
|
by := []byte(`The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) ` + htime.Now().Format("2006") + ` YOUR_NAME_HERE
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
the Software without restriction, including without limitation the rights to
|
||||||
|
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||||
|
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||||
|
subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
|
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||||
|
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||||
|
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||||
|
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
|
`)
|
||||||
|
|
||||||
|
err = helpers.WriteToDisk(filepath.Join(createpath, "LICENSE"), bytes.NewReader(by), sourceFs)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
c.createThemeMD(ps.Fs.Source, createpath)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return c
|
||||||
|
|
||||||
*baseBuilderCmd
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *commandsBuilder) newNewCmd() *newCmd {
|
type newCommand struct {
|
||||||
cmd := &cobra.Command{
|
rootCmd *rootCommand
|
||||||
Use: "new [path]",
|
|
||||||
Short: "Create new content for your site",
|
commands []simplecobra.Commander
|
||||||
Long: `Create a new content file and automatically set the date and title.
|
}
|
||||||
|
|
||||||
|
func (c *newCommand) Commands() []simplecobra.Commander {
|
||||||
|
return c.commands
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *newCommand) Name() string {
|
||||||
|
return "new"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *newCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *newCommand) WithCobraCommand(cmd *cobra.Command) error {
|
||||||
|
cmd.Short = "Create new content for your site"
|
||||||
|
cmd.Long = `Create a new content file and automatically set the date and title.
|
||||||
It will guess which kind of file to create based on the path provided.
|
It will guess which kind of file to create based on the path provided.
|
||||||
|
|
||||||
You can also specify the kind with ` + "`-k KIND`" + `.
|
You can also specify the kind with ` + "`-k KIND`" + `.
|
||||||
|
|
||||||
If archetypes are provided in your theme or site, they will be used.
|
If archetypes are provided in your theme or site, they will be used.
|
||||||
|
|
||||||
Ensure you run this within the root directory of your site.`,
|
Ensure you run this within the root directory of your site.`
|
||||||
}
|
return nil
|
||||||
|
|
||||||
cc := &newCmd{baseBuilderCmd: b.newBuilderCmd(cmd)}
|
|
||||||
|
|
||||||
cmd.Flags().StringVarP(&cc.contentType, "kind", "k", "", "content type to create")
|
|
||||||
cmd.Flags().StringVar(&cc.contentEditor, "editor", "", "edit new content with this editor, if provided")
|
|
||||||
cmd.Flags().BoolVarP(&cc.force, "force", "f", false, "overwrite file if it already exists")
|
|
||||||
|
|
||||||
cmd.AddCommand(b.newNewSiteCmd().getCommand())
|
|
||||||
cmd.AddCommand(b.newNewThemeCmd().getCommand())
|
|
||||||
|
|
||||||
cmd.RunE = cc.newContent
|
|
||||||
|
|
||||||
return cc
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *newCmd) newContent(cmd *cobra.Command, args []string) error {
|
func (c *newCommand) Init(cd, runner *simplecobra.Commandeer) error {
|
||||||
cfgInit := func(c *commandeer) error {
|
c.rootCmd = cd.Root.Command.(*rootCommand)
|
||||||
if cmd.Flags().Changed("editor") {
|
return nil
|
||||||
c.Set("newContentEditor", n.contentEditor)
|
}
|
||||||
}
|
|
||||||
return nil
|
func (c *newCommand) newSiteCreateConfig(fs afero.Fs, inpath string, kind string) (err error) {
|
||||||
|
in := map[string]string{
|
||||||
|
"baseURL": "http://example.org/",
|
||||||
|
"title": "My New Hugo Site",
|
||||||
|
"languageCode": "en-us",
|
||||||
}
|
}
|
||||||
|
|
||||||
c, err := initializeConfig(true, true, false, &n.hugoBuilderCommon, n, cfgInit)
|
var buf bytes.Buffer
|
||||||
|
err = parser.InterfaceToConfig(in, metadecoders.FormatFromString(kind), &buf)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(args) < 1 {
|
return helpers.WriteToDisk(filepath.Join(inpath, "hugo."+kind), &buf, fs)
|
||||||
return newUserError("path needs to be provided")
|
|
||||||
}
|
|
||||||
|
|
||||||
return create.NewContent(c.hugo(), n.contentType, args[0], n.force)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func mkdir(x ...string) {
|
func (c *newCommand) newSiteNextStepsText() string {
|
||||||
p := filepath.Join(x...)
|
var nextStepsText bytes.Buffer
|
||||||
|
|
||||||
err := os.MkdirAll(p, 0777) // before umask
|
nextStepsText.WriteString(`Just a few more steps and you're ready to go:
|
||||||
|
|
||||||
|
1. Download a theme into the same-named folder.
|
||||||
|
Choose a theme from https://themes.gohugo.io/ or
|
||||||
|
create your own with the "hugo new theme <THEMENAME>" command.
|
||||||
|
2. Perhaps you want to add some content. You can add single files
|
||||||
|
with "hugo new `)
|
||||||
|
|
||||||
|
nextStepsText.WriteString(filepath.Join("<SECTIONNAME>", "<FILENAME>.<FORMAT>"))
|
||||||
|
|
||||||
|
nextStepsText.WriteString(`".
|
||||||
|
3. Start the built-in live server via "hugo server".
|
||||||
|
|
||||||
|
Visit https://gohugo.io/ for quickstart guide and full documentation.`)
|
||||||
|
|
||||||
|
return nextStepsText.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *newCommand) createThemeMD(fs afero.Fs, inpath string) (err error) {
|
||||||
|
|
||||||
|
by := []byte(`# theme.toml template for a Hugo theme
|
||||||
|
# See https://github.com/gohugoio/hugoThemes#themetoml for an example
|
||||||
|
|
||||||
|
name = "` + strings.Title(helpers.MakeTitle(filepath.Base(inpath))) + `"
|
||||||
|
license = "MIT"
|
||||||
|
licenselink = "https://github.com/yourname/yourtheme/blob/master/LICENSE"
|
||||||
|
description = ""
|
||||||
|
homepage = "http://example.com/"
|
||||||
|
tags = []
|
||||||
|
features = []
|
||||||
|
min_version = "0.41.0"
|
||||||
|
|
||||||
|
[author]
|
||||||
|
name = ""
|
||||||
|
homepage = ""
|
||||||
|
|
||||||
|
# If porting an existing theme
|
||||||
|
[original]
|
||||||
|
name = ""
|
||||||
|
homepage = ""
|
||||||
|
repo = ""
|
||||||
|
`)
|
||||||
|
|
||||||
|
err = helpers.WriteToDisk(filepath.Join(inpath, "theme.toml"), bytes.NewReader(by), fs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
jww.FATAL.Fatalln(err)
|
return
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
func touchFile(fs afero.Fs, x ...string) {
|
err = helpers.WriteToDisk(filepath.Join(inpath, "hugo.toml"), strings.NewReader("# Theme config.\n"), fs)
|
||||||
inpath := filepath.Join(x...)
|
|
||||||
mkdir(filepath.Dir(inpath))
|
|
||||||
err := helpers.WriteToDisk(inpath, bytes.NewReader([]byte{}), fs)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
jww.FATAL.Fatalln(err)
|
return
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
return nil
|
||||||
func newContentPathSection(h *hugolib.HugoSites, path string) (string, string) {
|
|
||||||
// Forward slashes is used in all examples. Convert if needed.
|
|
||||||
// Issue #1133
|
|
||||||
createpath := filepath.FromSlash(path)
|
|
||||||
|
|
||||||
if h != nil {
|
|
||||||
for _, dir := range h.BaseFs.Content.Dirs {
|
|
||||||
createpath = strings.TrimPrefix(createpath, dir.Meta().Filename)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var section string
|
|
||||||
// assume the first directory is the section (kind)
|
|
||||||
if strings.Contains(createpath[1:], helpers.FilePathSeparator) {
|
|
||||||
parts := strings.Split(strings.TrimPrefix(createpath, helpers.FilePathSeparator), helpers.FilePathSeparator)
|
|
||||||
if len(parts) > 0 {
|
|
||||||
section = parts[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
return createpath, section
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,29 +0,0 @@
|
||||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Issue #1133
|
|
||||||
func TestNewContentPathSectionWithForwardSlashes(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
p, s := newContentPathSection(nil, "/post/new.md")
|
|
||||||
c.Assert(p, qt.Equals, filepath.FromSlash("/post/new.md"))
|
|
||||||
c.Assert(s, qt.Equals, "post")
|
|
||||||
}
|
|
|
@ -1,167 +0,0 @@
|
||||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/config"
|
|
||||||
"github.com/gohugoio/hugo/parser/metadecoders"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/create"
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
|
||||||
"github.com/gohugoio/hugo/parser"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
jww "github.com/spf13/jwalterweatherman"
|
|
||||||
)
|
|
||||||
|
|
||||||
var _ cmder = (*newSiteCmd)(nil)
|
|
||||||
|
|
||||||
type newSiteCmd struct {
|
|
||||||
configFormat string
|
|
||||||
|
|
||||||
*baseBuilderCmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *commandsBuilder) newNewSiteCmd() *newSiteCmd {
|
|
||||||
cc := &newSiteCmd{}
|
|
||||||
|
|
||||||
cmd := &cobra.Command{
|
|
||||||
Use: "site [path]",
|
|
||||||
Short: "Create a new site (skeleton)",
|
|
||||||
Long: `Create a new site in the provided directory.
|
|
||||||
The new site will have the correct structure, but no content or theme yet.
|
|
||||||
Use ` + "`hugo new [contentPath]`" + ` to create new content.`,
|
|
||||||
RunE: cc.newSite,
|
|
||||||
}
|
|
||||||
|
|
||||||
cmd.Flags().StringVarP(&cc.configFormat, "format", "f", "toml", "config file format")
|
|
||||||
cmd.Flags().Bool("force", false, "init inside non-empty directory")
|
|
||||||
|
|
||||||
cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
|
|
||||||
|
|
||||||
return cc
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n *newSiteCmd) doNewSite(fs *hugofs.Fs, basepath string, force bool) error {
|
|
||||||
archeTypePath := filepath.Join(basepath, "archetypes")
|
|
||||||
dirs := []string{
|
|
||||||
archeTypePath,
|
|
||||||
filepath.Join(basepath, "assets"),
|
|
||||||
filepath.Join(basepath, "content"),
|
|
||||||
filepath.Join(basepath, "data"),
|
|
||||||
filepath.Join(basepath, "layouts"),
|
|
||||||
filepath.Join(basepath, "static"),
|
|
||||||
filepath.Join(basepath, "themes"),
|
|
||||||
}
|
|
||||||
|
|
||||||
if exists, _ := helpers.Exists(basepath, fs.Source); exists {
|
|
||||||
if isDir, _ := helpers.IsDir(basepath, fs.Source); !isDir {
|
|
||||||
return errors.New(basepath + " already exists but not a directory")
|
|
||||||
}
|
|
||||||
|
|
||||||
isEmpty, _ := helpers.IsEmpty(basepath, fs.Source)
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case !isEmpty && !force:
|
|
||||||
return errors.New(basepath + " already exists and is not empty. See --force.")
|
|
||||||
|
|
||||||
case !isEmpty && force:
|
|
||||||
// TODO(bep) eventually rename this to hugo.
|
|
||||||
all := append(dirs, filepath.Join(basepath, "config."+n.configFormat))
|
|
||||||
for _, path := range all {
|
|
||||||
if exists, _ := helpers.Exists(path, fs.Source); exists {
|
|
||||||
return errors.New(path + " already exists")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, dir := range dirs {
|
|
||||||
if err := fs.Source.MkdirAll(dir, 0777); err != nil {
|
|
||||||
return fmt.Errorf("Failed to create dir: %w", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
createConfig(fs, basepath, n.configFormat)
|
|
||||||
|
|
||||||
// Create a default archetype file.
|
|
||||||
helpers.SafeWriteToDisk(filepath.Join(archeTypePath, "default.md"),
|
|
||||||
strings.NewReader(create.DefaultArchetypeTemplateTemplate), fs.Source)
|
|
||||||
|
|
||||||
jww.FEEDBACK.Printf("Congratulations! Your new Hugo site is created in %s.\n\n", basepath)
|
|
||||||
jww.FEEDBACK.Println(nextStepsText())
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// newSite creates a new Hugo site and initializes a structured Hugo directory.
|
|
||||||
func (n *newSiteCmd) newSite(cmd *cobra.Command, args []string) error {
|
|
||||||
if len(args) < 1 {
|
|
||||||
return newUserError("path needs to be provided")
|
|
||||||
}
|
|
||||||
|
|
||||||
createpath, err := filepath.Abs(filepath.Clean(args[0]))
|
|
||||||
if err != nil {
|
|
||||||
return newUserError(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
forceNew, _ := cmd.Flags().GetBool("force")
|
|
||||||
cfg := config.New()
|
|
||||||
cfg.Set("workingDir", createpath)
|
|
||||||
cfg.Set("publishDir", "public")
|
|
||||||
return n.doNewSite(hugofs.NewDefault(cfg), createpath, forceNew)
|
|
||||||
}
|
|
||||||
|
|
||||||
func createConfig(fs *hugofs.Fs, inpath string, kind string) (err error) {
|
|
||||||
in := map[string]string{
|
|
||||||
"baseURL": "http://example.org/",
|
|
||||||
"title": "My New Hugo Site",
|
|
||||||
"languageCode": "en-us",
|
|
||||||
}
|
|
||||||
|
|
||||||
var buf bytes.Buffer
|
|
||||||
err = parser.InterfaceToConfig(in, metadecoders.FormatFromString(kind), &buf)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return helpers.WriteToDisk(filepath.Join(inpath, "config."+kind), &buf, fs.Source)
|
|
||||||
}
|
|
||||||
|
|
||||||
func nextStepsText() string {
|
|
||||||
var nextStepsText bytes.Buffer
|
|
||||||
|
|
||||||
nextStepsText.WriteString(`Just a few more steps and you're ready to go:
|
|
||||||
|
|
||||||
1. Download a theme into the same-named folder.
|
|
||||||
Choose a theme from https://themes.gohugo.io/ or
|
|
||||||
create your own with the "hugo new theme <THEMENAME>" command.
|
|
||||||
2. Perhaps you want to add some content. You can add single files
|
|
||||||
with "hugo new `)
|
|
||||||
|
|
||||||
nextStepsText.WriteString(filepath.Join("<SECTIONNAME>", "<FILENAME>.<FORMAT>"))
|
|
||||||
|
|
||||||
nextStepsText.WriteString(`".
|
|
||||||
3. Start the built-in live server via "hugo server".
|
|
||||||
|
|
||||||
Visit https://gohugo.io/ for quickstart guide and full documentation.`)
|
|
||||||
|
|
||||||
return nextStepsText.String()
|
|
||||||
}
|
|
|
@ -1,176 +0,0 @@
|
||||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"errors"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/htime"
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
jww "github.com/spf13/jwalterweatherman"
|
|
||||||
)
|
|
||||||
|
|
||||||
var _ cmder = (*newThemeCmd)(nil)
|
|
||||||
|
|
||||||
type newThemeCmd struct {
|
|
||||||
*baseBuilderCmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *commandsBuilder) newNewThemeCmd() *newThemeCmd {
|
|
||||||
cc := &newThemeCmd{}
|
|
||||||
|
|
||||||
cmd := &cobra.Command{
|
|
||||||
Use: "theme [name]",
|
|
||||||
Short: "Create a new theme",
|
|
||||||
Long: `Create a new theme (skeleton) called [name] in ./themes.
|
|
||||||
New theme is a skeleton. Please add content to the touched files. Add your
|
|
||||||
name to the copyright line in the license and adjust the theme.toml file
|
|
||||||
as you see fit.`,
|
|
||||||
RunE: cc.newTheme,
|
|
||||||
}
|
|
||||||
|
|
||||||
cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
|
|
||||||
|
|
||||||
return cc
|
|
||||||
}
|
|
||||||
|
|
||||||
// newTheme creates a new Hugo theme template
|
|
||||||
func (n *newThemeCmd) newTheme(cmd *cobra.Command, args []string) error {
|
|
||||||
c, err := initializeConfig(false, false, false, &n.hugoBuilderCommon, n, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(args) < 1 {
|
|
||||||
return newUserError("theme name needs to be provided")
|
|
||||||
}
|
|
||||||
|
|
||||||
createpath := c.hugo().PathSpec.AbsPathify(filepath.Join(c.Cfg.GetString("themesDir"), args[0]))
|
|
||||||
jww.FEEDBACK.Println("Creating theme at", createpath)
|
|
||||||
|
|
||||||
cfg := c.DepsCfg
|
|
||||||
|
|
||||||
if x, _ := helpers.Exists(createpath, cfg.Fs.Source); x {
|
|
||||||
return errors.New(createpath + " already exists")
|
|
||||||
}
|
|
||||||
|
|
||||||
mkdir(createpath, "layouts", "_default")
|
|
||||||
mkdir(createpath, "layouts", "partials")
|
|
||||||
|
|
||||||
touchFile(cfg.Fs.Source, createpath, "layouts", "index.html")
|
|
||||||
touchFile(cfg.Fs.Source, createpath, "layouts", "404.html")
|
|
||||||
touchFile(cfg.Fs.Source, createpath, "layouts", "_default", "list.html")
|
|
||||||
touchFile(cfg.Fs.Source, createpath, "layouts", "_default", "single.html")
|
|
||||||
|
|
||||||
baseofDefault := []byte(`<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
{{- partial "head.html" . -}}
|
|
||||||
<body>
|
|
||||||
{{- partial "header.html" . -}}
|
|
||||||
<div id="content">
|
|
||||||
{{- block "main" . }}{{- end }}
|
|
||||||
</div>
|
|
||||||
{{- partial "footer.html" . -}}
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
`)
|
|
||||||
err = helpers.WriteToDisk(filepath.Join(createpath, "layouts", "_default", "baseof.html"), bytes.NewReader(baseofDefault), cfg.Fs.Source)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
touchFile(cfg.Fs.Source, createpath, "layouts", "partials", "head.html")
|
|
||||||
touchFile(cfg.Fs.Source, createpath, "layouts", "partials", "header.html")
|
|
||||||
touchFile(cfg.Fs.Source, createpath, "layouts", "partials", "footer.html")
|
|
||||||
|
|
||||||
mkdir(createpath, "archetypes")
|
|
||||||
|
|
||||||
archDefault := []byte("+++\n+++\n")
|
|
||||||
|
|
||||||
err = helpers.WriteToDisk(filepath.Join(createpath, "archetypes", "default.md"), bytes.NewReader(archDefault), cfg.Fs.Source)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
mkdir(createpath, "static", "js")
|
|
||||||
mkdir(createpath, "static", "css")
|
|
||||||
|
|
||||||
by := []byte(`The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) ` + htime.Now().Format("2006") + ` YOUR_NAME_HERE
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
|
||||||
this software and associated documentation files (the "Software"), to deal in
|
|
||||||
the Software without restriction, including without limitation the rights to
|
|
||||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
|
||||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
|
||||||
subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
|
||||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
|
||||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
|
||||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
|
||||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
`)
|
|
||||||
|
|
||||||
err = helpers.WriteToDisk(filepath.Join(createpath, "LICENSE"), bytes.NewReader(by), cfg.Fs.Source)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
n.createThemeMD(cfg.Fs, createpath)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n *newThemeCmd) createThemeMD(fs *hugofs.Fs, inpath string) (err error) {
|
|
||||||
by := []byte(`# theme.toml template for a Hugo theme
|
|
||||||
# See https://github.com/gohugoio/hugoThemes#themetoml for an example
|
|
||||||
|
|
||||||
name = "` + strings.Title(helpers.MakeTitle(filepath.Base(inpath))) + `"
|
|
||||||
license = "MIT"
|
|
||||||
licenselink = "https://github.com/yourname/yourtheme/blob/master/LICENSE"
|
|
||||||
description = ""
|
|
||||||
homepage = "http://example.com/"
|
|
||||||
tags = []
|
|
||||||
features = []
|
|
||||||
min_version = "0.41.0"
|
|
||||||
|
|
||||||
[author]
|
|
||||||
name = ""
|
|
||||||
homepage = ""
|
|
||||||
|
|
||||||
# If porting an existing theme
|
|
||||||
[original]
|
|
||||||
name = ""
|
|
||||||
homepage = ""
|
|
||||||
repo = ""
|
|
||||||
`)
|
|
||||||
|
|
||||||
err = helpers.WriteToDisk(filepath.Join(inpath, "theme.toml"), bytes.NewReader(by), fs.Source)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
|
@ -1,51 +0,0 @@
|
||||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
//go:build nodeploy
|
|
||||||
// +build nodeploy
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
var _ cmder = (*deployCmd)(nil)
|
|
||||||
|
|
||||||
// deployCmd supports deploying sites to Cloud providers.
|
|
||||||
type deployCmd struct {
|
|
||||||
*baseBuilderCmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *commandsBuilder) newDeployCmd() *deployCmd {
|
|
||||||
cc := &deployCmd{}
|
|
||||||
|
|
||||||
cmd := &cobra.Command{
|
|
||||||
Use: "deploy",
|
|
||||||
Short: "Deploy your site to a Cloud provider.",
|
|
||||||
Long: `Deploy your site to a Cloud provider.
|
|
||||||
|
|
||||||
See https://gohugo.io/hosting-and-deployment/hugo-deploy/ for detailed
|
|
||||||
documentation.
|
|
||||||
`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
return errors.New("build without HUGO_BUILD_TAGS=nodeploy to use this command")
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
|
|
||||||
|
|
||||||
return cc
|
|
||||||
}
|
|
|
@ -1,7 +1,4 @@
|
||||||
//go:build release
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
// +build release
|
|
||||||
|
|
||||||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -17,55 +14,39 @@
|
||||||
package commands
|
package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/gohugoio/hugo/config"
|
"context"
|
||||||
|
|
||||||
|
"github.com/bep/simplecobra"
|
||||||
"github.com/gohugoio/hugo/releaser"
|
"github.com/gohugoio/hugo/releaser"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ cmder = (*releaseCommandeer)(nil)
|
// Note: This is a command only meant for internal use and must be run
|
||||||
|
// via "go run -tags release main.go release" on the actual code base that is in the release.
|
||||||
|
func newReleaseCommand() simplecobra.Commander {
|
||||||
|
|
||||||
type releaseCommandeer struct {
|
var (
|
||||||
cmd *cobra.Command
|
step int
|
||||||
|
skipPush bool
|
||||||
|
try bool
|
||||||
|
)
|
||||||
|
|
||||||
step int
|
return &simpleCommand{
|
||||||
skipPush bool
|
name: "release",
|
||||||
try bool
|
short: "Release a new version of Hugo.",
|
||||||
}
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
rel, err := releaser.New(skipPush, try, step)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
func createReleaser() cmder {
|
return rel.Run()
|
||||||
// Note: This is a command only meant for internal use and must be run
|
},
|
||||||
// via "go run -tags release main.go release" on the actual code base that is in the release.
|
withc: func(cmd *cobra.Command) {
|
||||||
r := &releaseCommandeer{
|
cmd.Hidden = true
|
||||||
cmd: &cobra.Command{
|
cmd.PersistentFlags().BoolVarP(&skipPush, "skip-push", "", false, "skip pushing to remote")
|
||||||
Use: "release",
|
cmd.PersistentFlags().BoolVarP(&try, "try", "", false, "no changes")
|
||||||
Short: "Release a new version of Hugo.",
|
cmd.PersistentFlags().IntVarP(&step, "step", "", 0, "step to run (1: set new version 2: prepare next dev version)")
|
||||||
Hidden: true,
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
r.cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
|
||||||
return r.release()
|
|
||||||
}
|
|
||||||
|
|
||||||
r.cmd.PersistentFlags().BoolVarP(&r.skipPush, "skip-push", "", false, "skip pushing to remote")
|
|
||||||
r.cmd.PersistentFlags().BoolVarP(&r.try, "try", "", false, "no changes")
|
|
||||||
r.cmd.PersistentFlags().IntVarP(&r.step, "step", "", 0, "step to run (1: set new version 2: prepare next dev version)")
|
|
||||||
|
|
||||||
return r
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *releaseCommandeer) getCommand() *cobra.Command {
|
|
||||||
return c.cmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *releaseCommandeer) flagsToConfig(cfg config.Provider) {
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *releaseCommandeer) release() error {
|
|
||||||
rel, err := releaser.New(r.skipPush, r.try, r.step)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return rel.Run()
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,21 +0,0 @@
|
||||||
//go:build !release
|
|
||||||
// +build !release
|
|
||||||
|
|
||||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
func createReleaser() cmder {
|
|
||||||
return &nilCommand{}
|
|
||||||
}
|
|
1287
commands/server.go
1287
commands/server.go
File diff suppressed because it is too large
Load diff
|
@ -1,31 +0,0 @@
|
||||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"io"
|
|
||||||
"net/url"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/transform"
|
|
||||||
"github.com/gohugoio/hugo/transform/livereloadinject"
|
|
||||||
)
|
|
||||||
|
|
||||||
func injectLiveReloadScript(src io.Reader, baseURL url.URL) string {
|
|
||||||
var b bytes.Buffer
|
|
||||||
chain := transform.Chain{livereloadinject.New(baseURL)}
|
|
||||||
chain.Apply(&b, src)
|
|
||||||
|
|
||||||
return b.String()
|
|
||||||
}
|
|
|
@ -1,429 +0,0 @@
|
||||||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"runtime"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/config"
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
|
||||||
"github.com/gohugoio/hugo/htesting"
|
|
||||||
"golang.org/x/sync/errgroup"
|
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Issue 9518
|
|
||||||
func TestServerPanicOnConfigError(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
|
|
||||||
config := `
|
|
||||||
[markup]
|
|
||||||
[markup.highlight]
|
|
||||||
linenos='table'
|
|
||||||
`
|
|
||||||
|
|
||||||
r := runServerTest(c,
|
|
||||||
serverTestOptions{
|
|
||||||
config: config,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
c.Assert(r.err, qt.IsNotNil)
|
|
||||||
c.Assert(r.err.Error(), qt.Contains, "cannot parse 'Highlight.LineNos' as bool:")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServer404(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
|
|
||||||
r := runServerTest(c,
|
|
||||||
serverTestOptions{
|
|
||||||
pathsToGet: []string{"this/does/not/exist"},
|
|
||||||
getNumHomes: 1,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
c.Assert(r.err, qt.IsNil)
|
|
||||||
pr := r.pathsResults["this/does/not/exist"]
|
|
||||||
c.Assert(pr.statusCode, qt.Equals, http.StatusNotFound)
|
|
||||||
c.Assert(pr.body, qt.Contains, "404: 404 Page not found|Not Found.")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServerPathEncodingIssues(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
|
|
||||||
// Issue 10287
|
|
||||||
c.Run("Unicode paths", func(c *qt.C) {
|
|
||||||
r := runServerTest(c,
|
|
||||||
serverTestOptions{
|
|
||||||
pathsToGet: []string{"hügö/"},
|
|
||||||
getNumHomes: 1,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
c.Assert(r.err, qt.IsNil)
|
|
||||||
c.Assert(r.pathsResults["hügö/"].body, qt.Contains, "This is hügö")
|
|
||||||
})
|
|
||||||
|
|
||||||
// Issue 10314
|
|
||||||
c.Run("Windows multilingual 404", func(c *qt.C) {
|
|
||||||
config := `
|
|
||||||
baseURL = 'https://example.org/'
|
|
||||||
title = 'Hugo Forum Topic #40568'
|
|
||||||
|
|
||||||
defaultContentLanguageInSubdir = true
|
|
||||||
|
|
||||||
[languages.en]
|
|
||||||
contentDir = 'content/en'
|
|
||||||
languageCode = 'en-US'
|
|
||||||
languageName = 'English'
|
|
||||||
weight = 1
|
|
||||||
|
|
||||||
[languages.es]
|
|
||||||
contentDir = 'content/es'
|
|
||||||
languageCode = 'es-ES'
|
|
||||||
languageName = 'Espanol'
|
|
||||||
weight = 2
|
|
||||||
|
|
||||||
[server]
|
|
||||||
[[server.redirects]]
|
|
||||||
from = '/en/**'
|
|
||||||
to = '/en/404.html'
|
|
||||||
status = 404
|
|
||||||
|
|
||||||
[[server.redirects]]
|
|
||||||
from = '/es/**'
|
|
||||||
to = '/es/404.html'
|
|
||||||
status = 404
|
|
||||||
`
|
|
||||||
r := runServerTest(c,
|
|
||||||
serverTestOptions{
|
|
||||||
config: config,
|
|
||||||
pathsToGet: []string{"en/this/does/not/exist", "es/this/does/not/exist"},
|
|
||||||
getNumHomes: 1,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
c.Assert(r.err, qt.IsNil)
|
|
||||||
pr1 := r.pathsResults["en/this/does/not/exist"]
|
|
||||||
pr2 := r.pathsResults["es/this/does/not/exist"]
|
|
||||||
c.Assert(pr1.statusCode, qt.Equals, http.StatusNotFound)
|
|
||||||
c.Assert(pr2.statusCode, qt.Equals, http.StatusNotFound)
|
|
||||||
c.Assert(pr1.body, qt.Contains, "404: 404 Page not found|Not Found.")
|
|
||||||
c.Assert(pr2.body, qt.Contains, "404: 404 Page not found|Not Found.")
|
|
||||||
|
|
||||||
})
|
|
||||||
|
|
||||||
}
|
|
||||||
func TestServerFlags(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
|
|
||||||
assertPublic := func(c *qt.C, r serverTestResult, renderStaticToDisk bool) {
|
|
||||||
c.Assert(r.err, qt.IsNil)
|
|
||||||
c.Assert(r.homesContent[0], qt.Contains, "Environment: development")
|
|
||||||
c.Assert(r.publicDirnames["myfile.txt"], qt.Equals, renderStaticToDisk)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range []struct {
|
|
||||||
flag string
|
|
||||||
assert func(c *qt.C, r serverTestResult)
|
|
||||||
}{
|
|
||||||
{"", func(c *qt.C, r serverTestResult) {
|
|
||||||
assertPublic(c, r, false)
|
|
||||||
}},
|
|
||||||
{"--renderToDisk", func(c *qt.C, r serverTestResult) {
|
|
||||||
assertPublic(c, r, true)
|
|
||||||
}},
|
|
||||||
{"--renderStaticToDisk", func(c *qt.C, r serverTestResult) {
|
|
||||||
assertPublic(c, r, true)
|
|
||||||
}},
|
|
||||||
} {
|
|
||||||
c.Run(test.flag, func(c *qt.C) {
|
|
||||||
config := `
|
|
||||||
baseURL="https://example.org"
|
|
||||||
`
|
|
||||||
|
|
||||||
var args []string
|
|
||||||
if test.flag != "" {
|
|
||||||
args = strings.Split(test.flag, "=")
|
|
||||||
}
|
|
||||||
|
|
||||||
opts := serverTestOptions{
|
|
||||||
config: config,
|
|
||||||
args: args,
|
|
||||||
getNumHomes: 1,
|
|
||||||
}
|
|
||||||
|
|
||||||
r := runServerTest(c, opts)
|
|
||||||
|
|
||||||
test.assert(c, r)
|
|
||||||
|
|
||||||
})
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestServerBugs(t *testing.T) {
|
|
||||||
// TODO(bep) this is flaky on Windows on GH Actions.
|
|
||||||
if htesting.IsGitHubAction() && runtime.GOOS == "windows" {
|
|
||||||
t.Skip("skipping on windows")
|
|
||||||
}
|
|
||||||
c := qt.New(t)
|
|
||||||
|
|
||||||
for _, test := range []struct {
|
|
||||||
name string
|
|
||||||
config string
|
|
||||||
flag string
|
|
||||||
numservers int
|
|
||||||
assert func(c *qt.C, r serverTestResult)
|
|
||||||
}{
|
|
||||||
{"PostProcess, memory", "", "", 1, func(c *qt.C, r serverTestResult) {
|
|
||||||
c.Assert(r.err, qt.IsNil)
|
|
||||||
c.Assert(r.homesContent[0], qt.Contains, "PostProcess: /foo.min.css")
|
|
||||||
}},
|
|
||||||
// Issue 9788
|
|
||||||
{"PostProcess, memory", "", "", 1, func(c *qt.C, r serverTestResult) {
|
|
||||||
c.Assert(r.err, qt.IsNil)
|
|
||||||
c.Assert(r.homesContent[0], qt.Contains, "PostProcess: /foo.min.css")
|
|
||||||
}},
|
|
||||||
{"PostProcess, disk", "", "--renderToDisk", 1, func(c *qt.C, r serverTestResult) {
|
|
||||||
c.Assert(r.err, qt.IsNil)
|
|
||||||
c.Assert(r.homesContent[0], qt.Contains, "PostProcess: /foo.min.css")
|
|
||||||
}},
|
|
||||||
// Issue 9901
|
|
||||||
{"Multihost", `
|
|
||||||
defaultContentLanguage = 'en'
|
|
||||||
[languages]
|
|
||||||
[languages.en]
|
|
||||||
baseURL = 'https://example.com'
|
|
||||||
title = 'My blog'
|
|
||||||
weight = 1
|
|
||||||
[languages.fr]
|
|
||||||
baseURL = 'https://example.fr'
|
|
||||||
title = 'Mon blogue'
|
|
||||||
weight = 2
|
|
||||||
`, "", 2, func(c *qt.C, r serverTestResult) {
|
|
||||||
c.Assert(r.err, qt.IsNil)
|
|
||||||
for i, s := range []string{"My blog", "Mon blogue"} {
|
|
||||||
c.Assert(r.homesContent[i], qt.Contains, s)
|
|
||||||
}
|
|
||||||
}},
|
|
||||||
} {
|
|
||||||
c.Run(test.name, func(c *qt.C) {
|
|
||||||
if test.config == "" {
|
|
||||||
test.config = `
|
|
||||||
baseURL="https://example.org"
|
|
||||||
`
|
|
||||||
}
|
|
||||||
|
|
||||||
var args []string
|
|
||||||
if test.flag != "" {
|
|
||||||
args = strings.Split(test.flag, "=")
|
|
||||||
}
|
|
||||||
|
|
||||||
opts := serverTestOptions{
|
|
||||||
config: test.config,
|
|
||||||
getNumHomes: test.numservers,
|
|
||||||
pathsToGet: []string{"this/does/not/exist"},
|
|
||||||
args: args,
|
|
||||||
}
|
|
||||||
|
|
||||||
r := runServerTest(c, opts)
|
|
||||||
pr := r.pathsResults["this/does/not/exist"]
|
|
||||||
c.Assert(pr.statusCode, qt.Equals, http.StatusNotFound)
|
|
||||||
c.Assert(pr.body, qt.Contains, "404: 404 Page not found|Not Found.")
|
|
||||||
test.assert(c, r)
|
|
||||||
|
|
||||||
})
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
type serverTestResult struct {
|
|
||||||
err error
|
|
||||||
homesContent []string
|
|
||||||
content404 string
|
|
||||||
publicDirnames map[string]bool
|
|
||||||
pathsResults map[string]pathResult
|
|
||||||
}
|
|
||||||
|
|
||||||
type pathResult struct {
|
|
||||||
statusCode int
|
|
||||||
body string
|
|
||||||
}
|
|
||||||
|
|
||||||
type serverTestOptions struct {
|
|
||||||
getNumHomes int
|
|
||||||
config string
|
|
||||||
pathsToGet []string
|
|
||||||
args []string
|
|
||||||
}
|
|
||||||
|
|
||||||
func runServerTest(c *qt.C, opts serverTestOptions) serverTestResult {
|
|
||||||
dir := createSimpleTestSite(c, testSiteConfig{configTOML: opts.config})
|
|
||||||
result := serverTestResult{
|
|
||||||
publicDirnames: make(map[string]bool),
|
|
||||||
pathsResults: make(map[string]pathResult),
|
|
||||||
}
|
|
||||||
|
|
||||||
sp, err := helpers.FindAvailablePort()
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
port := sp.Port
|
|
||||||
|
|
||||||
defer func() {
|
|
||||||
os.RemoveAll(dir)
|
|
||||||
}()
|
|
||||||
|
|
||||||
stop := make(chan bool)
|
|
||||||
|
|
||||||
b := newCommandsBuilder()
|
|
||||||
scmd := b.newServerCmdSignaled(stop)
|
|
||||||
|
|
||||||
cmd := scmd.getCommand()
|
|
||||||
args := append([]string{"-s=" + dir, fmt.Sprintf("-p=%d", port)}, opts.args...)
|
|
||||||
cmd.SetArgs(args)
|
|
||||||
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
wg, ctx := errgroup.WithContext(ctx)
|
|
||||||
|
|
||||||
wg.Go(func() error {
|
|
||||||
_, err := cmd.ExecuteC()
|
|
||||||
return err
|
|
||||||
})
|
|
||||||
|
|
||||||
if opts.getNumHomes > 0 {
|
|
||||||
// Esp. on slow CI machines, we need to wait a little before the web
|
|
||||||
// server is ready.
|
|
||||||
wait := 567 * time.Millisecond
|
|
||||||
if os.Getenv("CI") != "" {
|
|
||||||
wait = 2 * time.Second
|
|
||||||
}
|
|
||||||
time.Sleep(wait)
|
|
||||||
result.homesContent = make([]string, opts.getNumHomes)
|
|
||||||
for i := 0; i < opts.getNumHomes; i++ {
|
|
||||||
func() {
|
|
||||||
resp, err := http.Get(fmt.Sprintf("http://localhost:%d/", port+i))
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(resp.StatusCode, qt.Equals, http.StatusOK)
|
|
||||||
if err == nil {
|
|
||||||
defer resp.Body.Close()
|
|
||||||
result.homesContent[i] = helpers.ReaderToString(resp.Body)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, path := range opts.pathsToGet {
|
|
||||||
func() {
|
|
||||||
resp, err := http.Get(fmt.Sprintf("http://localhost:%d/%s", port, path))
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
pr := pathResult{
|
|
||||||
statusCode: resp.StatusCode,
|
|
||||||
}
|
|
||||||
|
|
||||||
if err == nil {
|
|
||||||
defer resp.Body.Close()
|
|
||||||
pr.body = helpers.ReaderToString(resp.Body)
|
|
||||||
}
|
|
||||||
result.pathsResults[path] = pr
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
|
|
||||||
time.Sleep(1 * time.Second)
|
|
||||||
|
|
||||||
select {
|
|
||||||
case <-stop:
|
|
||||||
case stop <- true:
|
|
||||||
}
|
|
||||||
|
|
||||||
pubFiles, err := os.ReadDir(filepath.Join(dir, "public"))
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
for _, f := range pubFiles {
|
|
||||||
result.publicDirnames[f.Name()] = true
|
|
||||||
}
|
|
||||||
|
|
||||||
result.err = wg.Wait()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestFixURL(t *testing.T) {
|
|
||||||
type data struct {
|
|
||||||
TestName string
|
|
||||||
CLIBaseURL string
|
|
||||||
CfgBaseURL string
|
|
||||||
AppendPort bool
|
|
||||||
Port int
|
|
||||||
Result string
|
|
||||||
}
|
|
||||||
tests := []data{
|
|
||||||
{"Basic http localhost", "", "http://foo.com", true, 1313, "http://localhost:1313/"},
|
|
||||||
{"Basic https production, http localhost", "", "https://foo.com", true, 1313, "http://localhost:1313/"},
|
|
||||||
{"Basic subdir", "", "http://foo.com/bar", true, 1313, "http://localhost:1313/bar/"},
|
|
||||||
{"Basic production", "http://foo.com", "http://foo.com", false, 80, "http://foo.com/"},
|
|
||||||
{"Production subdir", "http://foo.com/bar", "http://foo.com/bar", false, 80, "http://foo.com/bar/"},
|
|
||||||
{"No http", "", "foo.com", true, 1313, "//localhost:1313/"},
|
|
||||||
{"Override configured port", "", "foo.com:2020", true, 1313, "//localhost:1313/"},
|
|
||||||
{"No http production", "foo.com", "foo.com", false, 80, "//foo.com/"},
|
|
||||||
{"No http production with port", "foo.com", "foo.com", true, 2020, "//foo.com:2020/"},
|
|
||||||
{"No config", "", "", true, 1313, "//localhost:1313/"},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range tests {
|
|
||||||
t.Run(test.TestName, func(t *testing.T) {
|
|
||||||
b := newCommandsBuilder()
|
|
||||||
s := b.newServerCmd()
|
|
||||||
v := config.NewWithTestDefaults()
|
|
||||||
baseURL := test.CLIBaseURL
|
|
||||||
v.Set("baseURL", test.CfgBaseURL)
|
|
||||||
s.serverAppend = test.AppendPort
|
|
||||||
s.serverPort = test.Port
|
|
||||||
result, err := s.fixURL(v, baseURL, s.serverPort)
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("Unexpected error %s", err)
|
|
||||||
}
|
|
||||||
if result != test.Result {
|
|
||||||
t.Errorf("Expected %q, got %q", test.Result, result)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRemoveErrorPrefixFromLog(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
content := `ERROR 2018/10/07 13:11:12 Error while rendering "home": template: _default/baseof.html:4:3: executing "main" at <partial "logo" .>: error calling partial: template: partials/logo.html:5:84: executing "partials/logo.html" at <$resized.AHeight>: can't evaluate field AHeight in type *resource.Image
|
|
||||||
ERROR 2018/10/07 13:11:12 Rebuild failed: logged 1 error(s)
|
|
||||||
`
|
|
||||||
|
|
||||||
withoutError := removeErrorPrefixFromLog(content)
|
|
||||||
|
|
||||||
c.Assert(strings.Contains(withoutError, "ERROR"), qt.Equals, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
func isWindowsCI() bool {
|
|
||||||
return runtime.GOOS == "windows" && os.Getenv("CI") != ""
|
|
||||||
}
|
|
|
@ -1,129 +0,0 @@
|
||||||
// Copyright 2017 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"path/filepath"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/herrors"
|
|
||||||
"github.com/gohugoio/hugo/hugolib/filesystems"
|
|
||||||
|
|
||||||
"github.com/fsnotify/fsnotify"
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
|
||||||
"github.com/spf13/fsync"
|
|
||||||
)
|
|
||||||
|
|
||||||
type staticSyncer struct {
|
|
||||||
c *commandeer
|
|
||||||
}
|
|
||||||
|
|
||||||
func newStaticSyncer(c *commandeer) (*staticSyncer, error) {
|
|
||||||
return &staticSyncer{c: c}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *staticSyncer) isStatic(filename string) bool {
|
|
||||||
return s.c.hugo().BaseFs.SourceFilesystems.IsStatic(filename)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *staticSyncer) syncsStaticEvents(staticEvents []fsnotify.Event) error {
|
|
||||||
c := s.c
|
|
||||||
|
|
||||||
syncFn := func(sourceFs *filesystems.SourceFilesystem) (uint64, error) {
|
|
||||||
publishDir := helpers.FilePathSeparator
|
|
||||||
|
|
||||||
if sourceFs.PublishFolder != "" {
|
|
||||||
publishDir = filepath.Join(publishDir, sourceFs.PublishFolder)
|
|
||||||
}
|
|
||||||
|
|
||||||
syncer := fsync.NewSyncer()
|
|
||||||
syncer.NoTimes = c.Cfg.GetBool("noTimes")
|
|
||||||
syncer.NoChmod = c.Cfg.GetBool("noChmod")
|
|
||||||
syncer.ChmodFilter = chmodFilter
|
|
||||||
syncer.SrcFs = sourceFs.Fs
|
|
||||||
syncer.DestFs = c.Fs.PublishDir
|
|
||||||
if c.renderStaticToDisk {
|
|
||||||
syncer.DestFs = c.Fs.PublishDirStatic
|
|
||||||
}
|
|
||||||
|
|
||||||
// prevent spamming the log on changes
|
|
||||||
logger := helpers.NewDistinctErrorLogger()
|
|
||||||
|
|
||||||
for _, ev := range staticEvents {
|
|
||||||
// Due to our approach of layering both directories and the content's rendered output
|
|
||||||
// into one we can't accurately remove a file not in one of the source directories.
|
|
||||||
// If a file is in the local static dir and also in the theme static dir and we remove
|
|
||||||
// it from one of those locations we expect it to still exist in the destination
|
|
||||||
//
|
|
||||||
// If Hugo generates a file (from the content dir) over a static file
|
|
||||||
// the content generated file should take precedence.
|
|
||||||
//
|
|
||||||
// Because we are now watching and handling individual events it is possible that a static
|
|
||||||
// event that occupies the same path as a content generated file will take precedence
|
|
||||||
// until a regeneration of the content takes places.
|
|
||||||
//
|
|
||||||
// Hugo assumes that these cases are very rare and will permit this bad behavior
|
|
||||||
// The alternative is to track every single file and which pipeline rendered it
|
|
||||||
// and then to handle conflict resolution on every event.
|
|
||||||
|
|
||||||
fromPath := ev.Name
|
|
||||||
|
|
||||||
relPath, found := sourceFs.MakePathRelative(fromPath)
|
|
||||||
|
|
||||||
if !found {
|
|
||||||
// Not member of this virtual host.
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove || rename is harder and will require an assumption.
|
|
||||||
// Hugo takes the following approach:
|
|
||||||
// If the static file exists in any of the static source directories after this event
|
|
||||||
// Hugo will re-sync it.
|
|
||||||
// If it does not exist in all of the static directories Hugo will remove it.
|
|
||||||
//
|
|
||||||
// This assumes that Hugo has not generated content on top of a static file and then removed
|
|
||||||
// the source of that static file. In this case Hugo will incorrectly remove that file
|
|
||||||
// from the published directory.
|
|
||||||
if ev.Op&fsnotify.Rename == fsnotify.Rename || ev.Op&fsnotify.Remove == fsnotify.Remove {
|
|
||||||
if _, err := sourceFs.Fs.Stat(relPath); herrors.IsNotExist(err) {
|
|
||||||
// If file doesn't exist in any static dir, remove it
|
|
||||||
logger.Println("File no longer exists in static dir, removing", relPath)
|
|
||||||
_ = c.Fs.PublishDirStatic.RemoveAll(relPath)
|
|
||||||
|
|
||||||
} else if err == nil {
|
|
||||||
// If file still exists, sync it
|
|
||||||
logger.Println("Syncing", relPath, "to", publishDir)
|
|
||||||
|
|
||||||
if err := syncer.Sync(relPath, relPath); err != nil {
|
|
||||||
c.logger.Errorln(err)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
c.logger.Errorln(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// For all other event operations Hugo will sync static.
|
|
||||||
logger.Println("Syncing", relPath, "to", publishDir)
|
|
||||||
if err := syncer.Sync(filepath.Join(publishDir, relPath), relPath); err != nil {
|
|
||||||
c.logger.Errorln(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := c.doWithPublishDirs(syncFn)
|
|
||||||
return err
|
|
||||||
}
|
|
|
@ -1,44 +0,0 @@
|
||||||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package commands
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/gohugoio/hugo/common/hugo"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
jww "github.com/spf13/jwalterweatherman"
|
|
||||||
)
|
|
||||||
|
|
||||||
var _ cmder = (*versionCmd)(nil)
|
|
||||||
|
|
||||||
type versionCmd struct {
|
|
||||||
*baseCmd
|
|
||||||
}
|
|
||||||
|
|
||||||
func newVersionCmd() *versionCmd {
|
|
||||||
return &versionCmd{
|
|
||||||
newBaseCmd(&cobra.Command{
|
|
||||||
Use: "version",
|
|
||||||
Short: "Print the version number of Hugo",
|
|
||||||
Long: `All software has versions. This is Hugo's.`,
|
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
|
||||||
printHugoVersion()
|
|
||||||
return nil
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func printHugoVersion() {
|
|
||||||
jww.FEEDBACK.Println(hugo.BuildVersionString())
|
|
||||||
}
|
|
78
commands/xcommand_template.go
Normal file
78
commands/xcommand_template.go
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package commands
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/bep/simplecobra"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
func newSimpleTemplateCommand() simplecobra.Commander {
|
||||||
|
return &simpleCommand{
|
||||||
|
name: "template",
|
||||||
|
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
withc: func(cmd *cobra.Command) {
|
||||||
|
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func newTemplateCommand() *templateCommand {
|
||||||
|
return &templateCommand{
|
||||||
|
commands: []simplecobra.Commander{},
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
type templateCommand struct {
|
||||||
|
r *rootCommand
|
||||||
|
|
||||||
|
commands []simplecobra.Commander
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *templateCommand) Commands() []simplecobra.Commander {
|
||||||
|
return c.commands
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *templateCommand) Name() string {
|
||||||
|
return "template"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *templateCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||||
|
conf, err := c.r.ConfigFromProvider(c.r.configVersionID.Load(), flagsToCfg(cd, nil))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
fmt.Println("templateCommand.Run", conf)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *templateCommand) WithCobraCommand(cmd *cobra.Command) error {
|
||||||
|
cmd.Short = "Print the site configuration"
|
||||||
|
cmd.Long = `Print the site configuration, both default and custom settings.`
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *templateCommand) Init(cd, runner *simplecobra.Commandeer) error {
|
||||||
|
c.r = cd.Root.Command.(*rootCommand)
|
||||||
|
return nil
|
||||||
|
}
|
57
common/hstrings/strings.go
Normal file
57
common/hstrings/strings.go
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package hstrings
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/compare"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ compare.Eqer = StringEqualFold("")
|
||||||
|
|
||||||
|
// StringEqualFold is a string that implements the compare.Eqer interface and considers
|
||||||
|
// two strings equal if they are equal when folded to lower case.
|
||||||
|
// The compare.Eqer interface is used in Hugo to compare values in templates (e.g. using the eq template function).
|
||||||
|
type StringEqualFold string
|
||||||
|
|
||||||
|
func (s StringEqualFold) EqualFold(s2 string) bool {
|
||||||
|
return strings.EqualFold(string(s), s2)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s StringEqualFold) String() string {
|
||||||
|
return string(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s StringEqualFold) Eq(s2 any) bool {
|
||||||
|
switch ss := s2.(type) {
|
||||||
|
case string:
|
||||||
|
return s.EqualFold(ss)
|
||||||
|
case fmt.Stringer:
|
||||||
|
return s.EqualFold(ss.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// EqualAny returns whether a string is equal to any of the given strings.
|
||||||
|
func EqualAny(a string, b ...string) bool {
|
||||||
|
for _, s := range b {
|
||||||
|
if a == s {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2021 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package config
|
package hstrings
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
@ -19,22 +19,18 @@ import (
|
||||||
qt "github.com/frankban/quicktest"
|
qt "github.com/frankban/quicktest"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestCompositeConfig(t *testing.T) {
|
func TestStringEqualFold(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
|
|
||||||
c.Run("Set and get", func(c *qt.C) {
|
s1 := "A"
|
||||||
base, layer := New(), New()
|
s2 := "a"
|
||||||
cfg := NewCompositeConfig(base, layer)
|
|
||||||
|
|
||||||
layer.Set("a1", "av")
|
c.Assert(StringEqualFold(s1).EqualFold(s2), qt.Equals, true)
|
||||||
base.Set("b1", "bv")
|
c.Assert(StringEqualFold(s1).EqualFold(s1), qt.Equals, true)
|
||||||
cfg.Set("c1", "cv")
|
c.Assert(StringEqualFold(s2).EqualFold(s1), qt.Equals, true)
|
||||||
|
c.Assert(StringEqualFold(s2).EqualFold(s2), qt.Equals, true)
|
||||||
|
c.Assert(StringEqualFold(s1).EqualFold("b"), qt.Equals, false)
|
||||||
|
c.Assert(StringEqualFold(s1).Eq(s2), qt.Equals, true)
|
||||||
|
c.Assert(StringEqualFold(s1).Eq("b"), qt.Equals, false)
|
||||||
|
|
||||||
c.Assert(cfg.Get("a1"), qt.Equals, "av")
|
|
||||||
c.Assert(cfg.Get("b1"), qt.Equals, "bv")
|
|
||||||
c.Assert(cfg.Get("c1"), qt.Equals, "cv")
|
|
||||||
c.Assert(cfg.IsSet("c1"), qt.IsTrue)
|
|
||||||
c.Assert(layer.IsSet("c1"), qt.IsTrue)
|
|
||||||
c.Assert(base.IsSet("c1"), qt.IsFalse)
|
|
||||||
})
|
|
||||||
}
|
}
|
|
@ -14,6 +14,7 @@
|
||||||
package htime
|
package htime
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"log"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -163,3 +164,11 @@ func Since(t time.Time) time.Duration {
|
||||||
type AsTimeProvider interface {
|
type AsTimeProvider interface {
|
||||||
AsTime(zone *time.Location) time.Time
|
AsTime(zone *time.Location) time.Time
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StopWatch is a simple helper to measure time during development.
|
||||||
|
func StopWatch(name string) func() {
|
||||||
|
start := time.Now()
|
||||||
|
return func() {
|
||||||
|
log.Printf("StopWatch %q took %s", name, time.Since(start))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -46,8 +46,8 @@ var (
|
||||||
vendorInfo string
|
vendorInfo string
|
||||||
)
|
)
|
||||||
|
|
||||||
// Info contains information about the current Hugo environment
|
// HugoInfo contains information about the current Hugo environment
|
||||||
type Info struct {
|
type HugoInfo struct {
|
||||||
CommitHash string
|
CommitHash string
|
||||||
BuildDate string
|
BuildDate string
|
||||||
|
|
||||||
|
@ -64,30 +64,30 @@ type Info struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Version returns the current version as a comparable version string.
|
// Version returns the current version as a comparable version string.
|
||||||
func (i Info) Version() VersionString {
|
func (i HugoInfo) Version() VersionString {
|
||||||
return CurrentVersion.Version()
|
return CurrentVersion.Version()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generator a Hugo meta generator HTML tag.
|
// Generator a Hugo meta generator HTML tag.
|
||||||
func (i Info) Generator() template.HTML {
|
func (i HugoInfo) Generator() template.HTML {
|
||||||
return template.HTML(fmt.Sprintf(`<meta name="generator" content="Hugo %s">`, CurrentVersion.String()))
|
return template.HTML(fmt.Sprintf(`<meta name="generator" content="Hugo %s">`, CurrentVersion.String()))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i Info) IsProduction() bool {
|
func (i HugoInfo) IsProduction() bool {
|
||||||
return i.Environment == EnvironmentProduction
|
return i.Environment == EnvironmentProduction
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i Info) IsExtended() bool {
|
func (i HugoInfo) IsExtended() bool {
|
||||||
return IsExtended
|
return IsExtended
|
||||||
}
|
}
|
||||||
|
|
||||||
// Deps gets a list of dependencies for this Hugo build.
|
// Deps gets a list of dependencies for this Hugo build.
|
||||||
func (i Info) Deps() []*Dependency {
|
func (i HugoInfo) Deps() []*Dependency {
|
||||||
return i.deps
|
return i.deps
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewInfo creates a new Hugo Info object.
|
// NewInfo creates a new Hugo Info object.
|
||||||
func NewInfo(environment string, deps []*Dependency) Info {
|
func NewInfo(environment string, deps []*Dependency) HugoInfo {
|
||||||
if environment == "" {
|
if environment == "" {
|
||||||
environment = EnvironmentProduction
|
environment = EnvironmentProduction
|
||||||
}
|
}
|
||||||
|
@ -104,7 +104,7 @@ func NewInfo(environment string, deps []*Dependency) Info {
|
||||||
goVersion = bi.GoVersion
|
goVersion = bi.GoVersion
|
||||||
}
|
}
|
||||||
|
|
||||||
return Info{
|
return HugoInfo{
|
||||||
CommitHash: commitHash,
|
CommitHash: commitHash,
|
||||||
BuildDate: buildDate,
|
BuildDate: buildDate,
|
||||||
Environment: environment,
|
Environment: environment,
|
||||||
|
@ -115,7 +115,7 @@ func NewInfo(environment string, deps []*Dependency) Info {
|
||||||
|
|
||||||
// GetExecEnviron creates and gets the common os/exec environment used in the
|
// GetExecEnviron creates and gets the common os/exec environment used in the
|
||||||
// external programs we interact with via os/exec, e.g. postcss.
|
// external programs we interact with via os/exec, e.g. postcss.
|
||||||
func GetExecEnviron(workDir string, cfg config.Provider, fs afero.Fs) []string {
|
func GetExecEnviron(workDir string, cfg config.AllProvider, fs afero.Fs) []string {
|
||||||
var env []string
|
var env []string
|
||||||
nodepath := filepath.Join(workDir, "node_modules")
|
nodepath := filepath.Join(workDir, "node_modules")
|
||||||
if np := os.Getenv("NODE_PATH"); np != "" {
|
if np := os.Getenv("NODE_PATH"); np != "" {
|
||||||
|
@ -123,10 +123,9 @@ func GetExecEnviron(workDir string, cfg config.Provider, fs afero.Fs) []string {
|
||||||
}
|
}
|
||||||
config.SetEnvVars(&env, "NODE_PATH", nodepath)
|
config.SetEnvVars(&env, "NODE_PATH", nodepath)
|
||||||
config.SetEnvVars(&env, "PWD", workDir)
|
config.SetEnvVars(&env, "PWD", workDir)
|
||||||
config.SetEnvVars(&env, "HUGO_ENVIRONMENT", cfg.GetString("environment"))
|
config.SetEnvVars(&env, "HUGO_ENVIRONMENT", cfg.Environment())
|
||||||
config.SetEnvVars(&env, "HUGO_ENV", cfg.GetString("environment"))
|
config.SetEnvVars(&env, "HUGO_ENV", cfg.Environment())
|
||||||
|
config.SetEnvVars(&env, "HUGO_PUBLISHDIR", filepath.Join(workDir, cfg.BaseConfig().PublishDir))
|
||||||
config.SetEnvVars(&env, "HUGO_PUBLISHDIR", filepath.Join(workDir, cfg.GetString("publishDirOrig")))
|
|
||||||
|
|
||||||
if fs != nil {
|
if fs != nil {
|
||||||
fis, err := afero.ReadDir(fs, files.FolderJSConfig)
|
fis, err := afero.ReadDir(fs, files.FolderJSConfig)
|
||||||
|
|
|
@ -15,7 +15,6 @@ package loggers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// IgnorableLogger is a logger that ignores certain log statements.
|
// IgnorableLogger is a logger that ignores certain log statements.
|
||||||
|
@ -31,14 +30,13 @@ type ignorableLogger struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewIgnorableLogger wraps the given logger and ignores the log statement IDs given.
|
// NewIgnorableLogger wraps the given logger and ignores the log statement IDs given.
|
||||||
func NewIgnorableLogger(logger Logger, statements ...string) IgnorableLogger {
|
func NewIgnorableLogger(logger Logger, statements map[string]bool) IgnorableLogger {
|
||||||
statementsSet := make(map[string]bool)
|
if statements == nil {
|
||||||
for _, s := range statements {
|
statements = make(map[string]bool)
|
||||||
statementsSet[strings.ToLower(s)] = true
|
|
||||||
}
|
}
|
||||||
return ignorableLogger{
|
return ignorableLogger{
|
||||||
Logger: logger,
|
Logger: logger,
|
||||||
statements: statementsSet,
|
statements: statements,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -43,25 +43,25 @@ func ToStringMapE(in any) (map[string]any, error) {
|
||||||
// ToParamsAndPrepare converts in to Params and prepares it for use.
|
// ToParamsAndPrepare converts in to Params and prepares it for use.
|
||||||
// If in is nil, an empty map is returned.
|
// If in is nil, an empty map is returned.
|
||||||
// See PrepareParams.
|
// See PrepareParams.
|
||||||
func ToParamsAndPrepare(in any) (Params, bool) {
|
func ToParamsAndPrepare(in any) (Params, error) {
|
||||||
if types.IsNil(in) {
|
if types.IsNil(in) {
|
||||||
return Params{}, true
|
return Params{}, nil
|
||||||
}
|
}
|
||||||
m, err := ToStringMapE(in)
|
m, err := ToStringMapE(in)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, false
|
return nil, err
|
||||||
}
|
}
|
||||||
PrepareParams(m)
|
PrepareParams(m)
|
||||||
return m, true
|
return m, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// MustToParamsAndPrepare calls ToParamsAndPrepare and panics if it fails.
|
// MustToParamsAndPrepare calls ToParamsAndPrepare and panics if it fails.
|
||||||
func MustToParamsAndPrepare(in any) Params {
|
func MustToParamsAndPrepare(in any) Params {
|
||||||
if p, ok := ToParamsAndPrepare(in); ok {
|
p, err := ToParamsAndPrepare(in)
|
||||||
return p
|
if err != nil {
|
||||||
} else {
|
panic(fmt.Sprintf("cannot convert %T to maps.Params: %s", in, err))
|
||||||
panic(fmt.Sprintf("cannot convert %T to maps.Params", in))
|
|
||||||
}
|
}
|
||||||
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToStringMap converts in to map[string]interface{}.
|
// ToStringMap converts in to map[string]interface{}.
|
||||||
|
@ -96,6 +96,8 @@ func ToSliceStringMap(in any) ([]map[string]any, error) {
|
||||||
switch v := in.(type) {
|
switch v := in.(type) {
|
||||||
case []map[string]any:
|
case []map[string]any:
|
||||||
return v, nil
|
return v, nil
|
||||||
|
case Params:
|
||||||
|
return []map[string]any{v}, nil
|
||||||
case []any:
|
case []any:
|
||||||
var s []map[string]any
|
var s []map[string]any
|
||||||
for _, entry := range v {
|
for _, entry := range v {
|
||||||
|
@ -123,6 +125,23 @@ func LookupEqualFold[T any | string](m map[string]T, key string) (T, bool) {
|
||||||
return s, false
|
return s, false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// MergeShallow merges src into dst, but only if the key does not already exist in dst.
|
||||||
|
// The keys are compared case insensitively.
|
||||||
|
func MergeShallow(dst, src map[string]any) {
|
||||||
|
for k, v := range src {
|
||||||
|
found := false
|
||||||
|
for dk := range dst {
|
||||||
|
if strings.EqualFold(dk, k) {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !found {
|
||||||
|
dst[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type keyRename struct {
|
type keyRename struct {
|
||||||
pattern glob.Glob
|
pattern glob.Glob
|
||||||
newKey string
|
newKey string
|
||||||
|
|
|
@ -116,11 +116,11 @@ func TestToSliceStringMap(t *testing.T) {
|
||||||
|
|
||||||
func TestToParamsAndPrepare(t *testing.T) {
|
func TestToParamsAndPrepare(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
_, ok := ToParamsAndPrepare(map[string]any{"A": "av"})
|
_, err := ToParamsAndPrepare(map[string]any{"A": "av"})
|
||||||
c.Assert(ok, qt.IsTrue)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
params, ok := ToParamsAndPrepare(nil)
|
params, err := ToParamsAndPrepare(nil)
|
||||||
c.Assert(ok, qt.IsTrue)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(params, qt.DeepEquals, Params{})
|
c.Assert(params, qt.DeepEquals, Params{})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,30 +23,37 @@ import (
|
||||||
// Params is a map where all keys are lower case.
|
// Params is a map where all keys are lower case.
|
||||||
type Params map[string]any
|
type Params map[string]any
|
||||||
|
|
||||||
// Get does a lower case and nested search in this map.
|
// KeyParams is an utility struct for the WalkParams method.
|
||||||
|
type KeyParams struct {
|
||||||
|
Key string
|
||||||
|
Params Params
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetNested does a lower case and nested search in this map.
|
||||||
// It will return nil if none found.
|
// It will return nil if none found.
|
||||||
func (p Params) Get(indices ...string) any {
|
// Make all of these methods internal somehow.
|
||||||
|
func (p Params) GetNested(indices ...string) any {
|
||||||
v, _, _ := getNested(p, indices)
|
v, _, _ := getNested(p, indices)
|
||||||
return v
|
return v
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set overwrites values in p with values in pp for common or new keys.
|
// Set overwrites values in dst with values in src for common or new keys.
|
||||||
// This is done recursively.
|
// This is done recursively.
|
||||||
func (p Params) Set(pp Params) {
|
func SetParams(dst, src Params) {
|
||||||
for k, v := range pp {
|
for k, v := range src {
|
||||||
vv, found := p[k]
|
vv, found := dst[k]
|
||||||
if !found {
|
if !found {
|
||||||
p[k] = v
|
dst[k] = v
|
||||||
} else {
|
} else {
|
||||||
switch vvv := vv.(type) {
|
switch vvv := vv.(type) {
|
||||||
case Params:
|
case Params:
|
||||||
if pv, ok := v.(Params); ok {
|
if pv, ok := v.(Params); ok {
|
||||||
vvv.Set(pv)
|
SetParams(vvv, pv)
|
||||||
} else {
|
} else {
|
||||||
p[k] = v
|
dst[k] = v
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
p[k] = v
|
dst[k] = v
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -70,18 +77,17 @@ func (p Params) IsZero() bool {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Merge transfers values from pp to p for new keys.
|
// MergeParamsWithStrategy transfers values from src to dst for new keys using the merge strategy given.
|
||||||
// This is done recursively.
|
// This is done recursively.
|
||||||
func (p Params) Merge(pp Params) {
|
func MergeParamsWithStrategy(strategy string, dst, src Params) {
|
||||||
p.merge("", pp)
|
dst.merge(ParamsMergeStrategy(strategy), src)
|
||||||
}
|
}
|
||||||
|
|
||||||
// MergeRoot transfers values from pp to p for new keys where p is the
|
// MergeParamsWithStrategy transfers values from src to dst for new keys using the merge encoded in dst.
|
||||||
// root of the tree.
|
|
||||||
// This is done recursively.
|
// This is done recursively.
|
||||||
func (p Params) MergeRoot(pp Params) {
|
func MergeParams(dst, src Params) {
|
||||||
ms, _ := p.GetMergeStrategy()
|
ms, _ := dst.GetMergeStrategy()
|
||||||
p.merge(ms, pp)
|
dst.merge(ms, src)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p Params) merge(ps ParamsMergeStrategy, pp Params) {
|
func (p Params) merge(ps ParamsMergeStrategy, pp Params) {
|
||||||
|
@ -116,6 +122,7 @@ func (p Params) merge(ps ParamsMergeStrategy, pp Params) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// For internal use.
|
||||||
func (p Params) GetMergeStrategy() (ParamsMergeStrategy, bool) {
|
func (p Params) GetMergeStrategy() (ParamsMergeStrategy, bool) {
|
||||||
if v, found := p[mergeStrategyKey]; found {
|
if v, found := p[mergeStrategyKey]; found {
|
||||||
if s, ok := v.(ParamsMergeStrategy); ok {
|
if s, ok := v.(ParamsMergeStrategy); ok {
|
||||||
|
@ -125,6 +132,7 @@ func (p Params) GetMergeStrategy() (ParamsMergeStrategy, bool) {
|
||||||
return ParamsMergeStrategyShallow, false
|
return ParamsMergeStrategyShallow, false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// For internal use.
|
||||||
func (p Params) DeleteMergeStrategy() bool {
|
func (p Params) DeleteMergeStrategy() bool {
|
||||||
if _, found := p[mergeStrategyKey]; found {
|
if _, found := p[mergeStrategyKey]; found {
|
||||||
delete(p, mergeStrategyKey)
|
delete(p, mergeStrategyKey)
|
||||||
|
@ -133,7 +141,8 @@ func (p Params) DeleteMergeStrategy() bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p Params) SetDefaultMergeStrategy(s ParamsMergeStrategy) {
|
// For internal use.
|
||||||
|
func (p Params) SetMergeStrategy(s ParamsMergeStrategy) {
|
||||||
switch s {
|
switch s {
|
||||||
case ParamsMergeStrategyDeep, ParamsMergeStrategyNone, ParamsMergeStrategyShallow:
|
case ParamsMergeStrategyDeep, ParamsMergeStrategyNone, ParamsMergeStrategyShallow:
|
||||||
default:
|
default:
|
||||||
|
@ -187,7 +196,7 @@ func GetNestedParam(keyStr, separator string, candidates ...Params) (any, error)
|
||||||
|
|
||||||
keySegments := strings.Split(keyStr, separator)
|
keySegments := strings.Split(keyStr, separator)
|
||||||
for _, m := range candidates {
|
for _, m := range candidates {
|
||||||
if v := m.Get(keySegments...); v != nil {
|
if v := m.GetNested(keySegments...); v != nil {
|
||||||
return v, nil
|
return v, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -236,6 +245,55 @@ const (
|
||||||
mergeStrategyKey = "_merge"
|
mergeStrategyKey = "_merge"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// CleanConfigStringMapString removes any processing instructions from m,
|
||||||
|
// m will never be modified.
|
||||||
|
func CleanConfigStringMapString(m map[string]string) map[string]string {
|
||||||
|
if m == nil || len(m) == 0 {
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
if _, found := m[mergeStrategyKey]; !found {
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
// Create a new map and copy all the keys except the merge strategy key.
|
||||||
|
m2 := make(map[string]string, len(m)-1)
|
||||||
|
for k, v := range m {
|
||||||
|
if k != mergeStrategyKey {
|
||||||
|
m2[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return m2
|
||||||
|
}
|
||||||
|
|
||||||
|
// CleanConfigStringMap is the same as CleanConfigStringMapString but for
|
||||||
|
// map[string]any.
|
||||||
|
func CleanConfigStringMap(m map[string]any) map[string]any {
|
||||||
|
if m == nil || len(m) == 0 {
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
if _, found := m[mergeStrategyKey]; !found {
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
// Create a new map and copy all the keys except the merge strategy key.
|
||||||
|
m2 := make(map[string]any, len(m)-1)
|
||||||
|
for k, v := range m {
|
||||||
|
if k != mergeStrategyKey {
|
||||||
|
m2[k] = v
|
||||||
|
}
|
||||||
|
switch v2 := v.(type) {
|
||||||
|
case map[string]any:
|
||||||
|
m2[k] = CleanConfigStringMap(v2)
|
||||||
|
case Params:
|
||||||
|
var p Params = CleanConfigStringMap(v2)
|
||||||
|
m2[k] = p
|
||||||
|
case map[string]string:
|
||||||
|
m2[k] = CleanConfigStringMapString(v2)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
return m2
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
func toMergeStrategy(v any) ParamsMergeStrategy {
|
func toMergeStrategy(v any) ParamsMergeStrategy {
|
||||||
s := ParamsMergeStrategy(cast.ToString(v))
|
s := ParamsMergeStrategy(cast.ToString(v))
|
||||||
switch s {
|
switch s {
|
||||||
|
|
|
@ -81,7 +81,7 @@ func TestParamsSetAndMerge(t *testing.T) {
|
||||||
|
|
||||||
p1, p2 := createParamsPair()
|
p1, p2 := createParamsPair()
|
||||||
|
|
||||||
p1.Set(p2)
|
SetParams(p1, p2)
|
||||||
|
|
||||||
c.Assert(p1, qt.DeepEquals, Params{
|
c.Assert(p1, qt.DeepEquals, Params{
|
||||||
"a": "abv",
|
"a": "abv",
|
||||||
|
@ -97,7 +97,7 @@ func TestParamsSetAndMerge(t *testing.T) {
|
||||||
|
|
||||||
p1, p2 = createParamsPair()
|
p1, p2 = createParamsPair()
|
||||||
|
|
||||||
p1.Merge(p2)
|
MergeParamsWithStrategy("", p1, p2)
|
||||||
|
|
||||||
// Default is to do a shallow merge.
|
// Default is to do a shallow merge.
|
||||||
c.Assert(p1, qt.DeepEquals, Params{
|
c.Assert(p1, qt.DeepEquals, Params{
|
||||||
|
@ -111,8 +111,8 @@ func TestParamsSetAndMerge(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
p1, p2 = createParamsPair()
|
p1, p2 = createParamsPair()
|
||||||
p1.SetDefaultMergeStrategy(ParamsMergeStrategyNone)
|
p1.SetMergeStrategy(ParamsMergeStrategyNone)
|
||||||
p1.Merge(p2)
|
MergeParamsWithStrategy("", p1, p2)
|
||||||
p1.DeleteMergeStrategy()
|
p1.DeleteMergeStrategy()
|
||||||
|
|
||||||
c.Assert(p1, qt.DeepEquals, Params{
|
c.Assert(p1, qt.DeepEquals, Params{
|
||||||
|
@ -125,8 +125,8 @@ func TestParamsSetAndMerge(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
p1, p2 = createParamsPair()
|
p1, p2 = createParamsPair()
|
||||||
p1.SetDefaultMergeStrategy(ParamsMergeStrategyShallow)
|
p1.SetMergeStrategy(ParamsMergeStrategyShallow)
|
||||||
p1.Merge(p2)
|
MergeParamsWithStrategy("", p1, p2)
|
||||||
p1.DeleteMergeStrategy()
|
p1.DeleteMergeStrategy()
|
||||||
|
|
||||||
c.Assert(p1, qt.DeepEquals, Params{
|
c.Assert(p1, qt.DeepEquals, Params{
|
||||||
|
@ -140,8 +140,8 @@ func TestParamsSetAndMerge(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
p1, p2 = createParamsPair()
|
p1, p2 = createParamsPair()
|
||||||
p1.SetDefaultMergeStrategy(ParamsMergeStrategyDeep)
|
p1.SetMergeStrategy(ParamsMergeStrategyDeep)
|
||||||
p1.Merge(p2)
|
MergeParamsWithStrategy("", p1, p2)
|
||||||
p1.DeleteMergeStrategy()
|
p1.DeleteMergeStrategy()
|
||||||
|
|
||||||
c.Assert(p1, qt.DeepEquals, Params{
|
c.Assert(p1, qt.DeepEquals, Params{
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -11,32 +11,37 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package paths
|
package urls
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
// A BaseURL in Hugo is normally on the form scheme://path, but the
|
// A BaseURL in Hugo is normally on the form scheme://path, but the
|
||||||
// form scheme: is also valid (mailto:hugo@rules.com).
|
// form scheme: is also valid (mailto:hugo@rules.com).
|
||||||
type BaseURL struct {
|
type BaseURL struct {
|
||||||
url *url.URL
|
url *url.URL
|
||||||
urlStr string
|
WithPath string
|
||||||
|
WithoutPath string
|
||||||
|
BasePath string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b BaseURL) String() string {
|
func (b BaseURL) String() string {
|
||||||
if b.urlStr != "" {
|
return b.WithPath
|
||||||
return b.urlStr
|
|
||||||
}
|
|
||||||
return b.url.String()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b BaseURL) Path() string {
|
func (b BaseURL) Path() string {
|
||||||
return b.url.Path
|
return b.url.Path
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (b BaseURL) Port() int {
|
||||||
|
p, _ := strconv.Atoi(b.url.Port())
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
// HostURL returns the URL to the host root without any path elements.
|
// HostURL returns the URL to the host root without any path elements.
|
||||||
func (b BaseURL) HostURL() string {
|
func (b BaseURL) HostURL() string {
|
||||||
return strings.TrimSuffix(b.String(), b.Path())
|
return strings.TrimSuffix(b.String(), b.Path())
|
||||||
|
@ -44,7 +49,7 @@ func (b BaseURL) HostURL() string {
|
||||||
|
|
||||||
// WithProtocol returns the BaseURL prefixed with the given protocol.
|
// WithProtocol returns the BaseURL prefixed with the given protocol.
|
||||||
// The Protocol is normally of the form "scheme://", i.e. "webcal://".
|
// The Protocol is normally of the form "scheme://", i.e. "webcal://".
|
||||||
func (b BaseURL) WithProtocol(protocol string) (string, error) {
|
func (b BaseURL) WithProtocol(protocol string) (BaseURL, error) {
|
||||||
u := b.URL()
|
u := b.URL()
|
||||||
|
|
||||||
scheme := protocol
|
scheme := protocol
|
||||||
|
@ -62,10 +67,16 @@ func (b BaseURL) WithProtocol(protocol string) (string, error) {
|
||||||
if isFullProtocol && u.Opaque != "" {
|
if isFullProtocol && u.Opaque != "" {
|
||||||
u.Opaque = "//" + u.Opaque
|
u.Opaque = "//" + u.Opaque
|
||||||
} else if isOpaqueProtocol && u.Opaque == "" {
|
} else if isOpaqueProtocol && u.Opaque == "" {
|
||||||
return "", fmt.Errorf("cannot determine BaseURL for protocol %q", protocol)
|
return BaseURL{}, fmt.Errorf("cannot determine BaseURL for protocol %q", protocol)
|
||||||
}
|
}
|
||||||
|
|
||||||
return u.String(), nil
|
return newBaseURLFromURL(u)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b BaseURL) WithPort(port int) (BaseURL, error) {
|
||||||
|
u := b.URL()
|
||||||
|
u.Host = u.Hostname() + ":" + strconv.Itoa(port)
|
||||||
|
return newBaseURLFromURL(u)
|
||||||
}
|
}
|
||||||
|
|
||||||
// URL returns a copy of the internal URL.
|
// URL returns a copy of the internal URL.
|
||||||
|
@ -75,13 +86,25 @@ func (b BaseURL) URL() *url.URL {
|
||||||
return &c
|
return &c
|
||||||
}
|
}
|
||||||
|
|
||||||
func newBaseURLFromString(b string) (BaseURL, error) {
|
func NewBaseURLFromString(b string) (BaseURL, error) {
|
||||||
var result BaseURL
|
u, err := url.Parse(b)
|
||||||
|
|
||||||
base, err := url.Parse(b)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return result, err
|
return BaseURL{}, err
|
||||||
|
}
|
||||||
|
return newBaseURLFromURL(u)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func newBaseURLFromURL(u *url.URL) (BaseURL, error) {
|
||||||
|
baseURL := BaseURL{url: u, WithPath: u.String()}
|
||||||
|
var baseURLNoPath = baseURL.URL()
|
||||||
|
baseURLNoPath.Path = ""
|
||||||
|
baseURL.WithoutPath = baseURLNoPath.String()
|
||||||
|
|
||||||
|
basePath := u.Path
|
||||||
|
if basePath != "" && basePath != "/" {
|
||||||
|
baseURL.BasePath = basePath
|
||||||
}
|
}
|
||||||
|
|
||||||
return BaseURL{url: base, urlStr: base.String()}, nil
|
return baseURL, nil
|
||||||
}
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package paths
|
package urls
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
@ -21,46 +21,46 @@ import (
|
||||||
|
|
||||||
func TestBaseURL(t *testing.T) {
|
func TestBaseURL(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
b, err := newBaseURLFromString("http://example.com")
|
b, err := NewBaseURLFromString("http://example.com")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(b.String(), qt.Equals, "http://example.com")
|
c.Assert(b.String(), qt.Equals, "http://example.com")
|
||||||
|
|
||||||
p, err := b.WithProtocol("webcal://")
|
p, err := b.WithProtocol("webcal://")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(p, qt.Equals, "webcal://example.com")
|
c.Assert(p.String(), qt.Equals, "webcal://example.com")
|
||||||
|
|
||||||
p, err = b.WithProtocol("webcal")
|
p, err = b.WithProtocol("webcal")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(p, qt.Equals, "webcal://example.com")
|
c.Assert(p.String(), qt.Equals, "webcal://example.com")
|
||||||
|
|
||||||
_, err = b.WithProtocol("mailto:")
|
_, err = b.WithProtocol("mailto:")
|
||||||
c.Assert(err, qt.Not(qt.IsNil))
|
c.Assert(err, qt.Not(qt.IsNil))
|
||||||
|
|
||||||
b, err = newBaseURLFromString("mailto:hugo@rules.com")
|
b, err = NewBaseURLFromString("mailto:hugo@rules.com")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(b.String(), qt.Equals, "mailto:hugo@rules.com")
|
c.Assert(b.String(), qt.Equals, "mailto:hugo@rules.com")
|
||||||
|
|
||||||
// These are pretty constructed
|
// These are pretty constructed
|
||||||
p, err = b.WithProtocol("webcal")
|
p, err = b.WithProtocol("webcal")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(p, qt.Equals, "webcal:hugo@rules.com")
|
c.Assert(p.String(), qt.Equals, "webcal:hugo@rules.com")
|
||||||
|
|
||||||
p, err = b.WithProtocol("webcal://")
|
p, err = b.WithProtocol("webcal://")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(p, qt.Equals, "webcal://hugo@rules.com")
|
c.Assert(p.String(), qt.Equals, "webcal://hugo@rules.com")
|
||||||
|
|
||||||
// Test with "non-URLs". Some people will try to use these as a way to get
|
// Test with "non-URLs". Some people will try to use these as a way to get
|
||||||
// relative URLs working etc.
|
// relative URLs working etc.
|
||||||
b, err = newBaseURLFromString("/")
|
b, err = NewBaseURLFromString("/")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(b.String(), qt.Equals, "/")
|
c.Assert(b.String(), qt.Equals, "/")
|
||||||
|
|
||||||
b, err = newBaseURLFromString("")
|
b, err = NewBaseURLFromString("")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(b.String(), qt.Equals, "")
|
c.Assert(b.String(), qt.Equals, "")
|
||||||
|
|
||||||
// BaseURL with sub path
|
// BaseURL with sub path
|
||||||
b, err = newBaseURLFromString("http://example.com/sub")
|
b, err = NewBaseURLFromString("http://example.com/sub")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(b.String(), qt.Equals, "http://example.com/sub")
|
c.Assert(b.String(), qt.Equals, "http://example.com/sub")
|
||||||
c.Assert(b.HostURL(), qt.Equals, "http://example.com")
|
c.Assert(b.HostURL(), qt.Equals, "http://example.com")
|
813
config/allconfig/allconfig.go
Normal file
813
config/allconfig/allconfig.go
Normal file
|
@ -0,0 +1,813 @@
|
||||||
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
// Package allconfig contains the full configuration for Hugo.
|
||||||
|
// <docsmeta>{ "name": "Configuration", "description": "This section holds all configiration options in Hugo." }</docsmeta>
|
||||||
|
package allconfig
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"regexp"
|
||||||
|
"sort"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/cache/filecache"
|
||||||
|
"github.com/gohugoio/hugo/common/maps"
|
||||||
|
"github.com/gohugoio/hugo/common/urls"
|
||||||
|
"github.com/gohugoio/hugo/config"
|
||||||
|
"github.com/gohugoio/hugo/config/privacy"
|
||||||
|
"github.com/gohugoio/hugo/config/security"
|
||||||
|
"github.com/gohugoio/hugo/config/services"
|
||||||
|
"github.com/gohugoio/hugo/deploy"
|
||||||
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
"github.com/gohugoio/hugo/langs"
|
||||||
|
"github.com/gohugoio/hugo/markup/markup_config"
|
||||||
|
"github.com/gohugoio/hugo/media"
|
||||||
|
"github.com/gohugoio/hugo/minifiers"
|
||||||
|
"github.com/gohugoio/hugo/modules"
|
||||||
|
"github.com/gohugoio/hugo/navigation"
|
||||||
|
"github.com/gohugoio/hugo/output"
|
||||||
|
"github.com/gohugoio/hugo/related"
|
||||||
|
"github.com/gohugoio/hugo/resources/images"
|
||||||
|
"github.com/gohugoio/hugo/resources/page"
|
||||||
|
"github.com/gohugoio/hugo/resources/page/pagemeta"
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
|
||||||
|
xmaps "golang.org/x/exp/maps"
|
||||||
|
)
|
||||||
|
|
||||||
|
// InternalConfig is the internal configuration for Hugo, not read from any user provided config file.
|
||||||
|
type InternalConfig struct {
|
||||||
|
// Server mode?
|
||||||
|
Running bool
|
||||||
|
|
||||||
|
Quiet bool
|
||||||
|
Verbose bool
|
||||||
|
Clock string
|
||||||
|
Watch bool
|
||||||
|
DisableLiveReload bool
|
||||||
|
LiveReloadPort int
|
||||||
|
}
|
||||||
|
|
||||||
|
type Config struct {
|
||||||
|
// For internal use only.
|
||||||
|
Internal InternalConfig `mapstructure:"-" json:"-"`
|
||||||
|
// For internal use only.
|
||||||
|
C ConfigCompiled `mapstructure:"-" json:"-"`
|
||||||
|
|
||||||
|
RootConfig
|
||||||
|
|
||||||
|
// Author information.
|
||||||
|
Author map[string]any
|
||||||
|
|
||||||
|
// Social links.
|
||||||
|
Social map[string]string
|
||||||
|
|
||||||
|
// The build configuration section contains build-related configuration options.
|
||||||
|
// <docsmeta>{"identifiers": ["build"] }</docsmeta>
|
||||||
|
Build config.BuildConfig `mapstructure:"-"`
|
||||||
|
|
||||||
|
// The caches configuration section contains cache-related configuration options.
|
||||||
|
// <docsmeta>{"identifiers": ["caches"] }</docsmeta>
|
||||||
|
Caches filecache.Configs `mapstructure:"-"`
|
||||||
|
|
||||||
|
// The markup configuration section contains markup-related configuration options.
|
||||||
|
// <docsmeta>{"identifiers": ["markup"] }</docsmeta>
|
||||||
|
Markup markup_config.Config `mapstructure:"-"`
|
||||||
|
|
||||||
|
// The mediatypes configuration section maps the MIME type (a string) to a configuration object for that type.
|
||||||
|
// <docsmeta>{"identifiers": ["mediatypes"], "refs": ["types:media:type"] }</docsmeta>
|
||||||
|
MediaTypes *config.ConfigNamespace[map[string]media.MediaTypeConfig, media.Types] `mapstructure:"-"`
|
||||||
|
|
||||||
|
Imaging *config.ConfigNamespace[images.ImagingConfig, images.ImagingConfigInternal] `mapstructure:"-"`
|
||||||
|
|
||||||
|
// The outputformats configuration sections maps a format name (a string) to a configuration object for that format.
|
||||||
|
OutputFormats *config.ConfigNamespace[map[string]output.OutputFormatConfig, output.Formats] `mapstructure:"-"`
|
||||||
|
|
||||||
|
// The outputs configuration section maps a Page Kind (a string) to a slice of output formats.
|
||||||
|
// This can be overridden in the front matter.
|
||||||
|
Outputs map[string][]string `mapstructure:"-"`
|
||||||
|
|
||||||
|
// The cascade configuration section contains the top level front matter cascade configuration options,
|
||||||
|
// a slice of page matcher and params to apply to those pages.
|
||||||
|
Cascade *config.ConfigNamespace[[]page.PageMatcherParamsConfig, map[page.PageMatcher]maps.Params] `mapstructure:"-"`
|
||||||
|
|
||||||
|
// Menu configuration.
|
||||||
|
// <docsmeta>{"refs": ["config:languages:menus"] }</docsmeta>
|
||||||
|
Menus *config.ConfigNamespace[map[string]navigation.MenuConfig, navigation.Menus] `mapstructure:"-"`
|
||||||
|
|
||||||
|
// The deployment configuration section contains for hugo deploy.
|
||||||
|
Deployment deploy.DeployConfig `mapstructure:"-"`
|
||||||
|
|
||||||
|
// Module configuration.
|
||||||
|
Module modules.Config `mapstructure:"-"`
|
||||||
|
|
||||||
|
// Front matter configuration.
|
||||||
|
Frontmatter pagemeta.FrontmatterConfig `mapstructure:"-"`
|
||||||
|
|
||||||
|
// Minification configuration.
|
||||||
|
Minify minifiers.MinifyConfig `mapstructure:"-"`
|
||||||
|
|
||||||
|
// Permalink configuration.
|
||||||
|
Permalinks map[string]string `mapstructure:"-"`
|
||||||
|
|
||||||
|
// Taxonomy configuration.
|
||||||
|
Taxonomies map[string]string `mapstructure:"-"`
|
||||||
|
|
||||||
|
// Sitemap configuration.
|
||||||
|
Sitemap config.SitemapConfig `mapstructure:"-"`
|
||||||
|
|
||||||
|
// Related content configuration.
|
||||||
|
Related related.Config `mapstructure:"-"`
|
||||||
|
|
||||||
|
// Server configuration.
|
||||||
|
Server config.Server `mapstructure:"-"`
|
||||||
|
|
||||||
|
// Privacy configuration.
|
||||||
|
Privacy privacy.Config `mapstructure:"-"`
|
||||||
|
|
||||||
|
// Security configuration.
|
||||||
|
Security security.Config `mapstructure:"-"`
|
||||||
|
|
||||||
|
// Services configuration.
|
||||||
|
Services services.Config `mapstructure:"-"`
|
||||||
|
|
||||||
|
// User provided parameters.
|
||||||
|
// <docsmeta>{"refs": ["config:languages:params"] }</docsmeta>
|
||||||
|
Params maps.Params `mapstructure:"-"`
|
||||||
|
|
||||||
|
// The languages configuration sections maps a language code (a string) to a configuration object for that language.
|
||||||
|
Languages map[string]langs.LanguageConfig `mapstructure:"-"`
|
||||||
|
|
||||||
|
// UglyURLs configuration. Either a boolean or a sections map.
|
||||||
|
UglyURLs any `mapstructure:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type configCompiler interface {
|
||||||
|
CompileConfig() error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Config) cloneForLang() *Config {
|
||||||
|
x := c
|
||||||
|
// Collapse all static dirs to one.
|
||||||
|
x.StaticDir = x.staticDirs()
|
||||||
|
// These will go away soon ...
|
||||||
|
x.StaticDir0 = nil
|
||||||
|
x.StaticDir1 = nil
|
||||||
|
x.StaticDir2 = nil
|
||||||
|
x.StaticDir3 = nil
|
||||||
|
x.StaticDir4 = nil
|
||||||
|
x.StaticDir5 = nil
|
||||||
|
x.StaticDir6 = nil
|
||||||
|
x.StaticDir7 = nil
|
||||||
|
x.StaticDir8 = nil
|
||||||
|
x.StaticDir9 = nil
|
||||||
|
x.StaticDir10 = nil
|
||||||
|
|
||||||
|
return &x
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Config) CompileConfig() error {
|
||||||
|
s := c.Timeout
|
||||||
|
if _, err := strconv.Atoi(s); err == nil {
|
||||||
|
// A number, assume seconds.
|
||||||
|
s = s + "s"
|
||||||
|
}
|
||||||
|
timeout, err := time.ParseDuration(s)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to parse timeout: %s", err)
|
||||||
|
}
|
||||||
|
disabledKinds := make(map[string]bool)
|
||||||
|
for _, kind := range c.DisableKinds {
|
||||||
|
disabledKinds[strings.ToLower(kind)] = true
|
||||||
|
}
|
||||||
|
kindOutputFormats := make(map[string]output.Formats)
|
||||||
|
isRssDisabled := disabledKinds["rss"]
|
||||||
|
outputFormats := c.OutputFormats.Config
|
||||||
|
for kind, formats := range c.Outputs {
|
||||||
|
if disabledKinds[kind] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, format := range formats {
|
||||||
|
if isRssDisabled && format == "rss" {
|
||||||
|
// Legacy config.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
f, found := outputFormats.GetByName(format)
|
||||||
|
if !found {
|
||||||
|
return fmt.Errorf("unknown output format %q for kind %q", format, kind)
|
||||||
|
}
|
||||||
|
kindOutputFormats[kind] = append(kindOutputFormats[kind], f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
disabledLangs := make(map[string]bool)
|
||||||
|
for _, lang := range c.DisableLanguages {
|
||||||
|
if lang == c.DefaultContentLanguage {
|
||||||
|
return fmt.Errorf("cannot disable default content language %q", lang)
|
||||||
|
}
|
||||||
|
disabledLangs[lang] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
ignoredErrors := make(map[string]bool)
|
||||||
|
for _, err := range c.IgnoreErrors {
|
||||||
|
ignoredErrors[strings.ToLower(err)] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
baseURL, err := urls.NewBaseURLFromString(c.BaseURL)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
isUglyURL := func(section string) bool {
|
||||||
|
switch v := c.UglyURLs.(type) {
|
||||||
|
case bool:
|
||||||
|
return v
|
||||||
|
case map[string]bool:
|
||||||
|
return v[section]
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ignoreFile := func(s string) bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if len(c.IgnoreFiles) > 0 {
|
||||||
|
regexps := make([]*regexp.Regexp, len(c.IgnoreFiles))
|
||||||
|
for i, pattern := range c.IgnoreFiles {
|
||||||
|
var err error
|
||||||
|
regexps[i], err = regexp.Compile(pattern)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to compile ignoreFiles pattern %q: %s", pattern, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ignoreFile = func(s string) bool {
|
||||||
|
for _, r := range regexps {
|
||||||
|
if r.MatchString(s) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var clock time.Time
|
||||||
|
if c.Internal.Clock != "" {
|
||||||
|
var err error
|
||||||
|
clock, err = time.Parse(time.RFC3339, c.Internal.Clock)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to parse clock: %s", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.C = ConfigCompiled{
|
||||||
|
Timeout: timeout,
|
||||||
|
BaseURL: baseURL,
|
||||||
|
BaseURLLiveReload: baseURL,
|
||||||
|
DisabledKinds: disabledKinds,
|
||||||
|
DisabledLanguages: disabledLangs,
|
||||||
|
IgnoredErrors: ignoredErrors,
|
||||||
|
KindOutputFormats: kindOutputFormats,
|
||||||
|
CreateTitle: helpers.GetTitleFunc(c.TitleCaseStyle),
|
||||||
|
IsUglyURLSection: isUglyURL,
|
||||||
|
IgnoreFile: ignoreFile,
|
||||||
|
MainSections: c.MainSections,
|
||||||
|
Clock: clock,
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, s := range allDecoderSetups {
|
||||||
|
if getCompiler := s.getCompiler; getCompiler != nil {
|
||||||
|
if err := getCompiler(c).CompileConfig(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Config) IsKindEnabled(kind string) bool {
|
||||||
|
return !c.C.DisabledKinds[kind]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Config) IsLangDisabled(lang string) bool {
|
||||||
|
return c.C.DisabledLanguages[lang]
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConfigCompiled holds values and functions that are derived from the config.
|
||||||
|
type ConfigCompiled struct {
|
||||||
|
Timeout time.Duration
|
||||||
|
BaseURL urls.BaseURL
|
||||||
|
BaseURLLiveReload urls.BaseURL
|
||||||
|
KindOutputFormats map[string]output.Formats
|
||||||
|
DisabledKinds map[string]bool
|
||||||
|
DisabledLanguages map[string]bool
|
||||||
|
IgnoredErrors map[string]bool
|
||||||
|
CreateTitle func(s string) string
|
||||||
|
IsUglyURLSection func(section string) bool
|
||||||
|
IgnoreFile func(filename string) bool
|
||||||
|
MainSections []string
|
||||||
|
Clock time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
// This may be set after the config is compiled.
|
||||||
|
func (c *ConfigCompiled) SetMainSections(sections []string) {
|
||||||
|
c.MainSections = sections
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is set after the config is compiled by the server command.
|
||||||
|
func (c *ConfigCompiled) SetBaseURL(baseURL, baseURLLiveReload urls.BaseURL) {
|
||||||
|
c.BaseURL = baseURL
|
||||||
|
c.BaseURLLiveReload = baseURLLiveReload
|
||||||
|
}
|
||||||
|
|
||||||
|
// RootConfig holds all the top-level configuration options in Hugo
|
||||||
|
type RootConfig struct {
|
||||||
|
|
||||||
|
// The base URL of the site.
|
||||||
|
// Note that the default value is empty, but Hugo requires a valid URL (e.g. "https://example.com/") to work properly.
|
||||||
|
// <docsmeta>{"identifiers": ["URL"] }</docsmeta>
|
||||||
|
BaseURL string
|
||||||
|
|
||||||
|
// Whether to build content marked as draft.X
|
||||||
|
// <docsmeta>{"identifiers": ["draft"] }</docsmeta>
|
||||||
|
BuildDrafts bool
|
||||||
|
|
||||||
|
// Whether to build content with expiryDate in the past.
|
||||||
|
// <docsmeta>{"identifiers": ["expiryDate"] }</docsmeta>
|
||||||
|
BuildExpired bool
|
||||||
|
|
||||||
|
// Whether to build content with publishDate in the future.
|
||||||
|
// <docsmeta>{"identifiers": ["publishDate"] }</docsmeta>
|
||||||
|
BuildFuture bool
|
||||||
|
|
||||||
|
// Copyright information.
|
||||||
|
Copyright string
|
||||||
|
|
||||||
|
// The language to apply to content without any Clolanguage indicator.
|
||||||
|
DefaultContentLanguage string
|
||||||
|
|
||||||
|
// By defefault, we put the default content language in the root and the others below their language ID, e.g. /no/.
|
||||||
|
// Set this to true to put all languages below their language ID.
|
||||||
|
DefaultContentLanguageInSubdir bool
|
||||||
|
|
||||||
|
// Disable creation of alias redirect pages.
|
||||||
|
DisableAliases bool
|
||||||
|
|
||||||
|
// Disable lower casing of path segments.
|
||||||
|
DisablePathToLower bool
|
||||||
|
|
||||||
|
// Disable page kinds from build.
|
||||||
|
DisableKinds []string
|
||||||
|
|
||||||
|
// A list of languages to disable.
|
||||||
|
DisableLanguages []string
|
||||||
|
|
||||||
|
// Disable the injection of the Hugo generator tag on the home page.
|
||||||
|
DisableHugoGeneratorInject bool
|
||||||
|
|
||||||
|
// Enable replacement in Pages' Content of Emoji shortcodes with their equivalent Unicode characters.
|
||||||
|
// <docsmeta>{"identifiers": ["Content", "Unicode"] }</docsmeta>
|
||||||
|
EnableEmoji bool
|
||||||
|
|
||||||
|
// THe main section(s) of the site.
|
||||||
|
// If not set, Hugo will try to guess this from the content.
|
||||||
|
MainSections []string
|
||||||
|
|
||||||
|
// Enable robots.txt generation.
|
||||||
|
EnableRobotsTXT bool
|
||||||
|
|
||||||
|
// When enabled, Hugo will apply Git version information to each Page if possible, which
|
||||||
|
// can be used to keep lastUpdated in synch and to print version information.
|
||||||
|
// <docsmeta>{"identifiers": ["Page"] }</docsmeta>
|
||||||
|
EnableGitInfo bool
|
||||||
|
|
||||||
|
// Enable to track, calculate and print metrics.
|
||||||
|
TemplateMetrics bool
|
||||||
|
|
||||||
|
// Enable to track, print and calculate metric hints.
|
||||||
|
TemplateMetricsHints bool
|
||||||
|
|
||||||
|
// Enable to disable the build lock file.
|
||||||
|
NoBuildLock bool
|
||||||
|
|
||||||
|
// A list of error IDs to ignore.
|
||||||
|
IgnoreErrors []string
|
||||||
|
|
||||||
|
// A list of regexps that match paths to ignore.
|
||||||
|
// Deprecated: Use the settings on module imports.
|
||||||
|
IgnoreFiles []string
|
||||||
|
|
||||||
|
// Ignore cache.
|
||||||
|
IgnoreCache bool
|
||||||
|
|
||||||
|
// Enable to print greppable placeholders (on the form "[i18n] TRANSLATIONID") for missing translation strings.
|
||||||
|
EnableMissingTranslationPlaceholders bool
|
||||||
|
|
||||||
|
// Enable to print warnings for missing translation strings.
|
||||||
|
LogI18nWarnings bool
|
||||||
|
|
||||||
|
// ENable to print warnings for multiple files published to the same destination.
|
||||||
|
LogPathWarnings bool
|
||||||
|
|
||||||
|
// The configured environment. Default is "development" for server and "production" for build.
|
||||||
|
Environment string
|
||||||
|
|
||||||
|
// The default language code.
|
||||||
|
LanguageCode string
|
||||||
|
|
||||||
|
// Enable if the site content has CJK language (Chinese, Japanese, or Korean). This affects how Hugo counts words.
|
||||||
|
HasCJKLanguage bool
|
||||||
|
|
||||||
|
// The default number of pages per page when paginating.
|
||||||
|
Paginate int
|
||||||
|
|
||||||
|
// The path to use when creating pagination URLs, e.g. "page" in /page/2/.
|
||||||
|
PaginatePath string
|
||||||
|
|
||||||
|
// Whether to pluralize default list titles.
|
||||||
|
// Note that this currently only works for English, but you can provide your own title in the content file's front matter.
|
||||||
|
PluralizeListTitles bool
|
||||||
|
|
||||||
|
// Make all relative URLs absolute using the baseURL.
|
||||||
|
// <docsmeta>{"identifiers": ["baseURL"] }</docsmeta>
|
||||||
|
CanonifyURLs bool
|
||||||
|
|
||||||
|
// Enable this to make all relative URLs relative to content root. Note that this does not affect absolute URLs.
|
||||||
|
RelativeURLs bool
|
||||||
|
|
||||||
|
// Removes non-spacing marks from composite characters in content paths.
|
||||||
|
RemovePathAccents bool
|
||||||
|
|
||||||
|
// Whether to track and print unused templates during the build.
|
||||||
|
PrintUnusedTemplates bool
|
||||||
|
|
||||||
|
// URL to be used as a placeholder when a page reference cannot be found in ref or relref. Is used as-is.
|
||||||
|
RefLinksNotFoundURL string
|
||||||
|
|
||||||
|
// When using ref or relref to resolve page links and a link cannot be resolved, it will be logged with this log level.
|
||||||
|
// Valid values are ERROR (default) or WARNING. Any ERROR will fail the build (exit -1).
|
||||||
|
RefLinksErrorLevel string
|
||||||
|
|
||||||
|
// This will create a menu with all the sections as menu items and all the sections’ pages as “shadow-members”.
|
||||||
|
SectionPagesMenu string
|
||||||
|
|
||||||
|
// The length of text in words to show in a .Summary.
|
||||||
|
SummaryLength int
|
||||||
|
|
||||||
|
// The site title.
|
||||||
|
Title string
|
||||||
|
|
||||||
|
// The theme(s) to use.
|
||||||
|
// See Modules for more a more flexible way to load themes.
|
||||||
|
Theme []string
|
||||||
|
|
||||||
|
// Timeout for generating page contents, specified as a duration or in milliseconds.
|
||||||
|
Timeout string
|
||||||
|
|
||||||
|
// The time zone (or location), e.g. Europe/Oslo, used to parse front matter dates without such information and in the time function.
|
||||||
|
TimeZone string
|
||||||
|
|
||||||
|
// Set titleCaseStyle to specify the title style used by the title template function and the automatic section titles in Hugo.
|
||||||
|
// It defaults to AP Stylebook for title casing, but you can also set it to Chicago or Go (every word starts with a capital letter).
|
||||||
|
TitleCaseStyle string
|
||||||
|
|
||||||
|
// The editor used for opening up new content.
|
||||||
|
NewContentEditor string
|
||||||
|
|
||||||
|
// Don't sync modification time of files for the static mounts.
|
||||||
|
NoTimes bool
|
||||||
|
|
||||||
|
// Don't sync modification time of files for the static mounts.
|
||||||
|
NoChmod bool
|
||||||
|
|
||||||
|
// Clean the destination folder before a new build.
|
||||||
|
// This currently only handles static files.
|
||||||
|
CleanDestinationDir bool
|
||||||
|
|
||||||
|
// A Glob pattern of module paths to ignore in the _vendor folder.
|
||||||
|
IgnoreVendorPaths string
|
||||||
|
|
||||||
|
config.CommonDirs `mapstructure:",squash"`
|
||||||
|
|
||||||
|
// The odd constructs below are kept for backwards compatibility.
|
||||||
|
// Deprecated: Use module mount config instead.
|
||||||
|
StaticDir []string
|
||||||
|
// Deprecated: Use module mount config instead.
|
||||||
|
StaticDir0 []string
|
||||||
|
// Deprecated: Use module mount config instead.
|
||||||
|
StaticDir1 []string
|
||||||
|
// Deprecated: Use module mount config instead.
|
||||||
|
StaticDir2 []string
|
||||||
|
// Deprecated: Use module mount config instead.
|
||||||
|
StaticDir3 []string
|
||||||
|
// Deprecated: Use module mount config instead.
|
||||||
|
StaticDir4 []string
|
||||||
|
// Deprecated: Use module mount config instead.
|
||||||
|
StaticDir5 []string
|
||||||
|
// Deprecated: Use module mount config instead.
|
||||||
|
StaticDir6 []string
|
||||||
|
// Deprecated: Use module mount config instead.
|
||||||
|
StaticDir7 []string
|
||||||
|
// Deprecated: Use module mount config instead.
|
||||||
|
StaticDir8 []string
|
||||||
|
// Deprecated: Use module mount config instead.
|
||||||
|
StaticDir9 []string
|
||||||
|
// Deprecated: Use module mount config instead.
|
||||||
|
StaticDir10 []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c RootConfig) staticDirs() []string {
|
||||||
|
var dirs []string
|
||||||
|
dirs = append(dirs, c.StaticDir...)
|
||||||
|
dirs = append(dirs, c.StaticDir0...)
|
||||||
|
dirs = append(dirs, c.StaticDir1...)
|
||||||
|
dirs = append(dirs, c.StaticDir2...)
|
||||||
|
dirs = append(dirs, c.StaticDir3...)
|
||||||
|
dirs = append(dirs, c.StaticDir4...)
|
||||||
|
dirs = append(dirs, c.StaticDir5...)
|
||||||
|
dirs = append(dirs, c.StaticDir6...)
|
||||||
|
dirs = append(dirs, c.StaticDir7...)
|
||||||
|
dirs = append(dirs, c.StaticDir8...)
|
||||||
|
dirs = append(dirs, c.StaticDir9...)
|
||||||
|
dirs = append(dirs, c.StaticDir10...)
|
||||||
|
return helpers.UniqueStringsReuse(dirs)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Configs struct {
|
||||||
|
Base *Config
|
||||||
|
LoadingInfo config.LoadConfigResult
|
||||||
|
LanguageConfigMap map[string]*Config
|
||||||
|
LanguageConfigSlice []*Config
|
||||||
|
|
||||||
|
IsMultihost bool
|
||||||
|
Languages langs.Languages
|
||||||
|
LanguagesDefaultFirst langs.Languages
|
||||||
|
|
||||||
|
Modules modules.Modules
|
||||||
|
ModulesClient *modules.Client
|
||||||
|
|
||||||
|
configLangs []config.AllProvider
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Configs) IsZero() bool {
|
||||||
|
// A config always has at least one language.
|
||||||
|
return c == nil || len(c.Languages) == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Configs) Init() error {
|
||||||
|
c.configLangs = make([]config.AllProvider, len(c.Languages))
|
||||||
|
for i, l := range c.LanguagesDefaultFirst {
|
||||||
|
c.configLangs[i] = ConfigLanguage{
|
||||||
|
m: c,
|
||||||
|
config: c.LanguageConfigMap[l.Lang],
|
||||||
|
baseConfig: c.LoadingInfo.BaseConfig,
|
||||||
|
language: l,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(c.Modules) == 0 {
|
||||||
|
return errors.New("no modules loaded (ned at least the main module)")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply default project mounts.
|
||||||
|
if err := modules.ApplyProjectConfigDefaults(c.Modules[0], c.configLangs...); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Configs) ConfigLangs() []config.AllProvider {
|
||||||
|
return c.configLangs
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Configs) GetFirstLanguageConfig() config.AllProvider {
|
||||||
|
return c.configLangs[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c Configs) GetByLang(lang string) config.AllProvider {
|
||||||
|
for _, l := range c.configLangs {
|
||||||
|
if l.Language().Lang == lang {
|
||||||
|
return l
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// FromLoadConfigResult creates a new Config from res.
|
||||||
|
func FromLoadConfigResult(fs afero.Fs, res config.LoadConfigResult) (*Configs, error) {
|
||||||
|
if !res.Cfg.IsSet("languages") {
|
||||||
|
// We need at least one
|
||||||
|
lang := res.Cfg.GetString("defaultContentLanguage")
|
||||||
|
res.Cfg.Set("languages", maps.Params{lang: maps.Params{}})
|
||||||
|
}
|
||||||
|
bcfg := res.BaseConfig
|
||||||
|
cfg := res.Cfg
|
||||||
|
|
||||||
|
all := &Config{}
|
||||||
|
err := decodeConfigFromParams(fs, bcfg, cfg, all, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
langConfigMap := make(map[string]*Config)
|
||||||
|
var langConfigs []*Config
|
||||||
|
|
||||||
|
languagesConfig := cfg.GetStringMap("languages")
|
||||||
|
var isMultiHost bool
|
||||||
|
|
||||||
|
if err := all.CompileConfig(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for k, v := range languagesConfig {
|
||||||
|
mergedConfig := config.New()
|
||||||
|
var differentRootKeys []string
|
||||||
|
switch x := v.(type) {
|
||||||
|
case maps.Params:
|
||||||
|
for kk, vv := range x {
|
||||||
|
if kk == "baseurl" {
|
||||||
|
// baseURL configure don the language level is a multihost setup.
|
||||||
|
isMultiHost = true
|
||||||
|
}
|
||||||
|
mergedConfig.Set(kk, vv)
|
||||||
|
if cfg.IsSet(kk) {
|
||||||
|
rootv := cfg.Get(kk)
|
||||||
|
// This overrides a root key and potentially needs a merge.
|
||||||
|
if !reflect.DeepEqual(rootv, vv) {
|
||||||
|
switch vvv := vv.(type) {
|
||||||
|
case maps.Params:
|
||||||
|
differentRootKeys = append(differentRootKeys, kk)
|
||||||
|
|
||||||
|
// Use the language value as base.
|
||||||
|
mergedConfigEntry := xmaps.Clone(vvv)
|
||||||
|
// Merge in the root value.
|
||||||
|
maps.MergeParams(mergedConfigEntry, rootv.(maps.Params))
|
||||||
|
|
||||||
|
mergedConfig.Set(kk, mergedConfigEntry)
|
||||||
|
default:
|
||||||
|
// Apply new values to the root.
|
||||||
|
differentRootKeys = append(differentRootKeys, "")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Apply new values to the root.
|
||||||
|
differentRootKeys = append(differentRootKeys, "")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
differentRootKeys = helpers.UniqueStringsSorted(differentRootKeys)
|
||||||
|
|
||||||
|
if len(differentRootKeys) == 0 {
|
||||||
|
langConfigMap[k] = all
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a copy of the complete config and replace the root keys with the language specific ones.
|
||||||
|
clone := all.cloneForLang()
|
||||||
|
if err := decodeConfigFromParams(fs, bcfg, mergedConfig, clone, differentRootKeys); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to decode config for language %q: %w", k, err)
|
||||||
|
}
|
||||||
|
if err := clone.CompileConfig(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
langConfigMap[k] = clone
|
||||||
|
case maps.ParamsMergeStrategy:
|
||||||
|
default:
|
||||||
|
panic(fmt.Sprintf("unknown type in languages config: %T", v))
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var languages langs.Languages
|
||||||
|
defaultContentLanguage := all.DefaultContentLanguage
|
||||||
|
for k, v := range langConfigMap {
|
||||||
|
languageConf := v.Languages[k]
|
||||||
|
language, err := langs.NewLanguage(k, defaultContentLanguage, v.TimeZone, languageConf)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
languages = append(languages, language)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort the sites by language weight (if set) or lang.
|
||||||
|
sort.Slice(languages, func(i, j int) bool {
|
||||||
|
li := languages[i]
|
||||||
|
lj := languages[j]
|
||||||
|
if li.Weight != lj.Weight {
|
||||||
|
return li.Weight < lj.Weight
|
||||||
|
}
|
||||||
|
return li.Lang < lj.Lang
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, l := range languages {
|
||||||
|
langConfigs = append(langConfigs, langConfigMap[l.Lang])
|
||||||
|
}
|
||||||
|
|
||||||
|
var languagesDefaultFirst langs.Languages
|
||||||
|
for _, l := range languages {
|
||||||
|
if l.Lang == defaultContentLanguage {
|
||||||
|
languagesDefaultFirst = append(languagesDefaultFirst, l)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, l := range languages {
|
||||||
|
if l.Lang != defaultContentLanguage {
|
||||||
|
languagesDefaultFirst = append(languagesDefaultFirst, l)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bcfg.PublishDir = all.PublishDir
|
||||||
|
res.BaseConfig = bcfg
|
||||||
|
|
||||||
|
cm := &Configs{
|
||||||
|
Base: all,
|
||||||
|
LanguageConfigMap: langConfigMap,
|
||||||
|
LanguageConfigSlice: langConfigs,
|
||||||
|
LoadingInfo: res,
|
||||||
|
IsMultihost: isMultiHost,
|
||||||
|
Languages: languages,
|
||||||
|
LanguagesDefaultFirst: languagesDefaultFirst,
|
||||||
|
}
|
||||||
|
|
||||||
|
return cm, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodeConfigFromParams(fs afero.Fs, bcfg config.BaseConfig, p config.Provider, target *Config, keys []string) error {
|
||||||
|
|
||||||
|
var decoderSetups []decodeWeight
|
||||||
|
|
||||||
|
if len(keys) == 0 {
|
||||||
|
for _, v := range allDecoderSetups {
|
||||||
|
decoderSetups = append(decoderSetups, v)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for _, key := range keys {
|
||||||
|
if v, found := allDecoderSetups[key]; found {
|
||||||
|
decoderSetups = append(decoderSetups, v)
|
||||||
|
} else {
|
||||||
|
return fmt.Errorf("unknown config key %q", key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort them to get the dependency order right.
|
||||||
|
sort.Slice(decoderSetups, func(i, j int) bool {
|
||||||
|
ki, kj := decoderSetups[i], decoderSetups[j]
|
||||||
|
if ki.weight == kj.weight {
|
||||||
|
return ki.key < kj.key
|
||||||
|
}
|
||||||
|
return ki.weight < kj.weight
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, v := range decoderSetups {
|
||||||
|
p := decodeConfig{p: p, c: target, fs: fs, bcfg: bcfg}
|
||||||
|
if err := v.decode(v, p); err != nil {
|
||||||
|
return fmt.Errorf("failed to decode %q: %w", v.key, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func createDefaultOutputFormats(allFormats output.Formats) map[string][]string {
|
||||||
|
if len(allFormats) == 0 {
|
||||||
|
panic("no output formats")
|
||||||
|
}
|
||||||
|
rssOut, rssFound := allFormats.GetByName(output.RSSFormat.Name)
|
||||||
|
htmlOut, _ := allFormats.GetByName(output.HTMLFormat.Name)
|
||||||
|
|
||||||
|
defaultListTypes := []string{htmlOut.Name}
|
||||||
|
if rssFound {
|
||||||
|
defaultListTypes = append(defaultListTypes, rssOut.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
m := map[string][]string{
|
||||||
|
page.KindPage: {htmlOut.Name},
|
||||||
|
page.KindHome: defaultListTypes,
|
||||||
|
page.KindSection: defaultListTypes,
|
||||||
|
page.KindTerm: defaultListTypes,
|
||||||
|
page.KindTaxonomy: defaultListTypes,
|
||||||
|
}
|
||||||
|
|
||||||
|
// May be disabled
|
||||||
|
if rssFound {
|
||||||
|
m["rss"] = []string{rssOut.Name}
|
||||||
|
}
|
||||||
|
|
||||||
|
return m
|
||||||
|
}
|
325
config/allconfig/alldecoders.go
Normal file
325
config/allconfig/alldecoders.go
Normal file
|
@ -0,0 +1,325 @@
|
||||||
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package allconfig
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/cache/filecache"
|
||||||
|
"github.com/gohugoio/hugo/common/maps"
|
||||||
|
"github.com/gohugoio/hugo/common/types"
|
||||||
|
"github.com/gohugoio/hugo/config"
|
||||||
|
"github.com/gohugoio/hugo/config/privacy"
|
||||||
|
"github.com/gohugoio/hugo/config/security"
|
||||||
|
"github.com/gohugoio/hugo/config/services"
|
||||||
|
"github.com/gohugoio/hugo/deploy"
|
||||||
|
"github.com/gohugoio/hugo/langs"
|
||||||
|
"github.com/gohugoio/hugo/markup/markup_config"
|
||||||
|
"github.com/gohugoio/hugo/media"
|
||||||
|
"github.com/gohugoio/hugo/minifiers"
|
||||||
|
"github.com/gohugoio/hugo/modules"
|
||||||
|
"github.com/gohugoio/hugo/navigation"
|
||||||
|
"github.com/gohugoio/hugo/output"
|
||||||
|
"github.com/gohugoio/hugo/related"
|
||||||
|
"github.com/gohugoio/hugo/resources/images"
|
||||||
|
"github.com/gohugoio/hugo/resources/page"
|
||||||
|
"github.com/gohugoio/hugo/resources/page/pagemeta"
|
||||||
|
"github.com/mitchellh/mapstructure"
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
"github.com/spf13/cast"
|
||||||
|
)
|
||||||
|
|
||||||
|
type decodeConfig struct {
|
||||||
|
p config.Provider
|
||||||
|
c *Config
|
||||||
|
fs afero.Fs
|
||||||
|
bcfg config.BaseConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
type decodeWeight struct {
|
||||||
|
key string
|
||||||
|
decode func(decodeWeight, decodeConfig) error
|
||||||
|
getCompiler func(c *Config) configCompiler
|
||||||
|
weight int
|
||||||
|
}
|
||||||
|
|
||||||
|
var allDecoderSetups = map[string]decodeWeight{
|
||||||
|
"": {
|
||||||
|
key: "",
|
||||||
|
weight: -100, // Always first.
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
return mapstructure.WeakDecode(p.p.Get(""), &p.c.RootConfig)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"imaging": {
|
||||||
|
key: "imaging",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.Imaging, err = images.DecodeConfig(p.p.GetStringMap(d.key))
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"caches": {
|
||||||
|
key: "caches",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.Caches, err = filecache.DecodeConfig(p.fs, p.bcfg, p.p.GetStringMap(d.key))
|
||||||
|
if p.c.IgnoreCache {
|
||||||
|
// Set MaxAge in all caches to 0.
|
||||||
|
for k, cache := range p.c.Caches {
|
||||||
|
cache.MaxAge = 0
|
||||||
|
p.c.Caches[k] = cache
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"build": {
|
||||||
|
key: "build",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
p.c.Build = config.DecodeBuildConfig(p.p)
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"frontmatter": {
|
||||||
|
key: "frontmatter",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.Frontmatter, err = pagemeta.DecodeFrontMatterConfig(p.p)
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"markup": {
|
||||||
|
key: "markup",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.Markup, err = markup_config.Decode(p.p)
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"server": {
|
||||||
|
key: "server",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.Server, err = config.DecodeServer(p.p)
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
getCompiler: func(c *Config) configCompiler {
|
||||||
|
return &c.Server
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"minify": {
|
||||||
|
key: "minify",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.Minify, err = minifiers.DecodeConfig(p.p.Get(d.key))
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"mediaTypes": {
|
||||||
|
key: "mediaTypes",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.MediaTypes, err = media.DecodeTypes(p.p.GetStringMap(d.key))
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"outputs": {
|
||||||
|
key: "outputs",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
defaults := createDefaultOutputFormats(p.c.OutputFormats.Config)
|
||||||
|
m := p.p.GetStringMap("outputs")
|
||||||
|
p.c.Outputs = make(map[string][]string)
|
||||||
|
for k, v := range m {
|
||||||
|
s := types.ToStringSlicePreserveString(v)
|
||||||
|
for i, v := range s {
|
||||||
|
s[i] = strings.ToLower(v)
|
||||||
|
}
|
||||||
|
p.c.Outputs[k] = s
|
||||||
|
}
|
||||||
|
// Apply defaults.
|
||||||
|
for k, v := range defaults {
|
||||||
|
if _, found := p.c.Outputs[k]; !found {
|
||||||
|
p.c.Outputs[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"outputFormats": {
|
||||||
|
key: "outputFormats",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.OutputFormats, err = output.DecodeConfig(p.c.MediaTypes.Config, p.p.Get(d.key))
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"params": {
|
||||||
|
key: "params",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
p.c.Params = maps.CleanConfigStringMap(p.p.GetStringMap("params"))
|
||||||
|
if p.c.Params == nil {
|
||||||
|
p.c.Params = make(map[string]any)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Before Hugo 0.112.0 this was configured via site Params.
|
||||||
|
if mainSections, found := p.c.Params["mainsections"]; found {
|
||||||
|
p.c.MainSections = types.ToStringSlicePreserveString(mainSections)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"module": {
|
||||||
|
key: "module",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.Module, err = modules.DecodeConfig(p.p)
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"permalinks": {
|
||||||
|
key: "permalinks",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
p.c.Permalinks = maps.CleanConfigStringMapString(p.p.GetStringMapString(d.key))
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"sitemap": {
|
||||||
|
key: "sitemap",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.Sitemap, err = config.DecodeSitemap(config.SitemapConfig{Priority: -1, Filename: "sitemap.xml"}, p.p.GetStringMap(d.key))
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"taxonomies": {
|
||||||
|
key: "taxonomies",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
p.c.Taxonomies = maps.CleanConfigStringMapString(p.p.GetStringMapString(d.key))
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"related": {
|
||||||
|
key: "related",
|
||||||
|
weight: 100, // This needs to be decoded after taxonomies.
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
if p.p.IsSet(d.key) {
|
||||||
|
var err error
|
||||||
|
p.c.Related, err = related.DecodeConfig(p.p.GetParams(d.key))
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to decode related config: %w", err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
p.c.Related = related.DefaultConfig
|
||||||
|
if _, found := p.c.Taxonomies["tag"]; found {
|
||||||
|
p.c.Related.Add(related.IndexConfig{Name: "tags", Weight: 80})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"languages": {
|
||||||
|
key: "languages",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.Languages, err = langs.DecodeConfig(p.p.GetStringMap(d.key))
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"cascade": {
|
||||||
|
key: "cascade",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.Cascade, err = page.DecodeCascadeConfig(p.p.Get(d.key))
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"menus": {
|
||||||
|
key: "menus",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.Menus, err = navigation.DecodeConfig(p.p.Get(d.key))
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"privacy": {
|
||||||
|
key: "privacy",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.Privacy, err = privacy.DecodeConfig(p.p)
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"security": {
|
||||||
|
key: "security",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.Security, err = security.DecodeConfig(p.p)
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"services": {
|
||||||
|
key: "services",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.Services, err = services.DecodeConfig(p.p)
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"deployment": {
|
||||||
|
key: "deployment",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
var err error
|
||||||
|
p.c.Deployment, err = deploy.DecodeConfig(p.p)
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"author": {
|
||||||
|
key: "author",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
p.c.Author = p.p.GetStringMap(d.key)
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"social": {
|
||||||
|
key: "social",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
p.c.Social = p.p.GetStringMapString(d.key)
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"uglyurls": {
|
||||||
|
key: "uglyurls",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
v := p.p.Get(d.key)
|
||||||
|
switch vv := v.(type) {
|
||||||
|
case bool:
|
||||||
|
p.c.UglyURLs = vv
|
||||||
|
case string:
|
||||||
|
p.c.UglyURLs = vv == "true"
|
||||||
|
default:
|
||||||
|
p.c.UglyURLs = cast.ToStringMapBool(v)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"internal": {
|
||||||
|
key: "internal",
|
||||||
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
|
return mapstructure.WeakDecode(p.p.GetStringMap(d.key), &p.c.Internal)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
216
config/allconfig/configlanguage.go
Normal file
216
config/allconfig/configlanguage.go
Normal file
|
@ -0,0 +1,216 @@
|
||||||
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package allconfig
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/common/urls"
|
||||||
|
"github.com/gohugoio/hugo/config"
|
||||||
|
"github.com/gohugoio/hugo/langs"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ConfigLanguage struct {
|
||||||
|
config *Config
|
||||||
|
baseConfig config.BaseConfig
|
||||||
|
|
||||||
|
m *Configs
|
||||||
|
language *langs.Language
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) Language() *langs.Language {
|
||||||
|
return c.language
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) Languages() langs.Languages {
|
||||||
|
return c.m.Languages
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) LanguagesDefaultFirst() langs.Languages {
|
||||||
|
return c.m.LanguagesDefaultFirst
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) BaseURL() urls.BaseURL {
|
||||||
|
return c.config.C.BaseURL
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) BaseURLLiveReload() urls.BaseURL {
|
||||||
|
return c.config.C.BaseURLLiveReload
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) Environment() string {
|
||||||
|
return c.config.Environment
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) IsMultihost() bool {
|
||||||
|
return c.m.IsMultihost
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) IsMultiLingual() bool {
|
||||||
|
return len(c.m.Languages) > 1
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) TemplateMetrics() bool {
|
||||||
|
return c.config.TemplateMetrics
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) TemplateMetricsHints() bool {
|
||||||
|
return c.config.TemplateMetricsHints
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) IsLangDisabled(lang string) bool {
|
||||||
|
return c.config.C.DisabledLanguages[lang]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) IgnoredErrors() map[string]bool {
|
||||||
|
return c.config.C.IgnoredErrors
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) NoBuildLock() bool {
|
||||||
|
return c.config.NoBuildLock
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) NewContentEditor() string {
|
||||||
|
return c.config.NewContentEditor
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) Timeout() time.Duration {
|
||||||
|
return c.config.C.Timeout
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) BaseConfig() config.BaseConfig {
|
||||||
|
return c.baseConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) Dirs() config.CommonDirs {
|
||||||
|
return c.config.CommonDirs
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) DirsBase() config.CommonDirs {
|
||||||
|
return c.m.Base.CommonDirs
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) Quiet() bool {
|
||||||
|
return c.m.Base.Internal.Quiet
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetConfigSection is mostly used in tests. The switch statement isn't complete, but what's in use.
|
||||||
|
func (c ConfigLanguage) GetConfigSection(s string) any {
|
||||||
|
switch s {
|
||||||
|
case "security":
|
||||||
|
return c.config.Security
|
||||||
|
case "build":
|
||||||
|
return c.config.Build
|
||||||
|
case "frontmatter":
|
||||||
|
return c.config.Frontmatter
|
||||||
|
case "caches":
|
||||||
|
return c.config.Caches
|
||||||
|
case "markup":
|
||||||
|
return c.config.Markup
|
||||||
|
case "mediaTypes":
|
||||||
|
return c.config.MediaTypes.Config
|
||||||
|
case "outputFormats":
|
||||||
|
return c.config.OutputFormats.Config
|
||||||
|
case "permalinks":
|
||||||
|
return c.config.Permalinks
|
||||||
|
case "minify":
|
||||||
|
return c.config.Minify
|
||||||
|
case "activeModules":
|
||||||
|
return c.m.Modules
|
||||||
|
case "deployment":
|
||||||
|
return c.config.Deployment
|
||||||
|
default:
|
||||||
|
panic("not implemented: " + s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) GetConfig() any {
|
||||||
|
return c.config
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) CanonifyURLs() bool {
|
||||||
|
return c.config.CanonifyURLs
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) IsUglyURLs(section string) bool {
|
||||||
|
return c.config.C.IsUglyURLSection(section)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) IgnoreFile(s string) bool {
|
||||||
|
return c.config.C.IgnoreFile(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) DisablePathToLower() bool {
|
||||||
|
return c.config.DisablePathToLower
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) RemovePathAccents() bool {
|
||||||
|
return c.config.RemovePathAccents
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) DefaultContentLanguage() string {
|
||||||
|
return c.config.DefaultContentLanguage
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) DefaultContentLanguageInSubdir() bool {
|
||||||
|
return c.config.DefaultContentLanguageInSubdir
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) SummaryLength() int {
|
||||||
|
return c.config.SummaryLength
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) BuildExpired() bool {
|
||||||
|
return c.config.BuildExpired
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) BuildFuture() bool {
|
||||||
|
return c.config.BuildFuture
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) BuildDrafts() bool {
|
||||||
|
return c.config.BuildDrafts
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) Running() bool {
|
||||||
|
return c.config.Internal.Running
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) PrintUnusedTemplates() bool {
|
||||||
|
return c.config.PrintUnusedTemplates
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) EnableMissingTranslationPlaceholders() bool {
|
||||||
|
return c.config.EnableMissingTranslationPlaceholders
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) LogI18nWarnings() bool {
|
||||||
|
return c.config.LogI18nWarnings
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) CreateTitle(s string) string {
|
||||||
|
return c.config.C.CreateTitle(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) Paginate() int {
|
||||||
|
return c.config.Paginate
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) PaginatePath() string {
|
||||||
|
return c.config.PaginatePath
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c ConfigLanguage) StaticDirs() []string {
|
||||||
|
return c.config.staticDirs()
|
||||||
|
}
|
71
config/allconfig/integration_test.go
Normal file
71
config/allconfig/integration_test.go
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
package allconfig_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
qt "github.com/frankban/quicktest"
|
||||||
|
"github.com/gohugoio/hugo/config/allconfig"
|
||||||
|
"github.com/gohugoio/hugo/hugolib"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestDirsMount(t *testing.T) {
|
||||||
|
|
||||||
|
files := `
|
||||||
|
-- hugo.toml --
|
||||||
|
baseURL = "https://example.com"
|
||||||
|
disableKinds = ["taxonomy", "term"]
|
||||||
|
[languages]
|
||||||
|
[languages.en]
|
||||||
|
weight = 1
|
||||||
|
[languages.sv]
|
||||||
|
weight = 2
|
||||||
|
[[module.mounts]]
|
||||||
|
source = 'content/en'
|
||||||
|
target = 'content'
|
||||||
|
lang = 'en'
|
||||||
|
[[module.mounts]]
|
||||||
|
source = 'content/sv'
|
||||||
|
target = 'content'
|
||||||
|
lang = 'sv'
|
||||||
|
-- content/en/p1.md --
|
||||||
|
---
|
||||||
|
title: "p1"
|
||||||
|
---
|
||||||
|
-- content/sv/p1.md --
|
||||||
|
---
|
||||||
|
title: "p1"
|
||||||
|
---
|
||||||
|
-- layouts/_default/single.html --
|
||||||
|
Title: {{ .Title }}
|
||||||
|
`
|
||||||
|
|
||||||
|
b := hugolib.NewIntegrationTestBuilder(
|
||||||
|
hugolib.IntegrationTestConfig{T: t, TxtarString: files},
|
||||||
|
).Build()
|
||||||
|
|
||||||
|
//b.AssertFileContent("public/p1/index.html", "Title: p1")
|
||||||
|
|
||||||
|
sites := b.H.Sites
|
||||||
|
b.Assert(len(sites), qt.Equals, 2)
|
||||||
|
|
||||||
|
configs := b.H.Configs
|
||||||
|
mods := configs.Modules
|
||||||
|
b.Assert(len(mods), qt.Equals, 1)
|
||||||
|
mod := mods[0]
|
||||||
|
b.Assert(mod.Mounts(), qt.HasLen, 8)
|
||||||
|
|
||||||
|
enConcp := sites[0].Conf
|
||||||
|
enConf := enConcp.GetConfig().(*allconfig.Config)
|
||||||
|
|
||||||
|
b.Assert(enConcp.BaseURL().String(), qt.Equals, "https://example.com")
|
||||||
|
modConf := enConf.Module
|
||||||
|
b.Assert(modConf.Mounts, qt.HasLen, 2)
|
||||||
|
b.Assert(modConf.Mounts[0].Source, qt.Equals, filepath.FromSlash("content/en"))
|
||||||
|
b.Assert(modConf.Mounts[0].Target, qt.Equals, "content")
|
||||||
|
b.Assert(modConf.Mounts[0].Lang, qt.Equals, "en")
|
||||||
|
b.Assert(modConf.Mounts[1].Source, qt.Equals, filepath.FromSlash("content/sv"))
|
||||||
|
b.Assert(modConf.Mounts[1].Target, qt.Equals, "content")
|
||||||
|
b.Assert(modConf.Mounts[1].Lang, qt.Equals, "sv")
|
||||||
|
|
||||||
|
}
|
559
config/allconfig/load.go
Normal file
559
config/allconfig/load.go
Normal file
|
@ -0,0 +1,559 @@
|
||||||
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
// Package allconfig contains the full configuration for Hugo.
|
||||||
|
package allconfig
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/gobwas/glob"
|
||||||
|
"github.com/gohugoio/hugo/common/herrors"
|
||||||
|
"github.com/gohugoio/hugo/common/hexec"
|
||||||
|
"github.com/gohugoio/hugo/common/hugo"
|
||||||
|
"github.com/gohugoio/hugo/common/loggers"
|
||||||
|
"github.com/gohugoio/hugo/common/maps"
|
||||||
|
"github.com/gohugoio/hugo/common/paths"
|
||||||
|
"github.com/gohugoio/hugo/common/types"
|
||||||
|
"github.com/gohugoio/hugo/config"
|
||||||
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
hglob "github.com/gohugoio/hugo/hugofs/glob"
|
||||||
|
"github.com/gohugoio/hugo/modules"
|
||||||
|
"github.com/gohugoio/hugo/parser/metadecoders"
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
)
|
||||||
|
|
||||||
|
var ErrNoConfigFile = errors.New("Unable to locate config file or config directory. Perhaps you need to create a new site.\n Run `hugo help new` for details.\n")
|
||||||
|
|
||||||
|
func LoadConfig(d ConfigSourceDescriptor) (*Configs, error) {
|
||||||
|
if len(d.Environ) == 0 && !hugo.IsRunningAsTest() {
|
||||||
|
d.Environ = os.Environ()
|
||||||
|
}
|
||||||
|
|
||||||
|
l := &configLoader{ConfigSourceDescriptor: d, cfg: config.New()}
|
||||||
|
// Make sure we always do this, even in error situations,
|
||||||
|
// as we have commands (e.g. "hugo mod init") that will
|
||||||
|
// use a partial configuration to do its job.
|
||||||
|
defer l.deleteMergeStrategies()
|
||||||
|
res, _, err := l.loadConfigMain(d)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to load config: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
configs, err := FromLoadConfigResult(d.Fs, res)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create config from result: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
moduleConfig, modulesClient, err := l.loadModules(configs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to load modules: %w", err)
|
||||||
|
}
|
||||||
|
if len(l.ModulesConfigFiles) > 0 {
|
||||||
|
// Config merged in from modules.
|
||||||
|
// Re-read the config.
|
||||||
|
configs, err = FromLoadConfigResult(d.Fs, res)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to create config: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
configs.Modules = moduleConfig.ActiveModules
|
||||||
|
configs.ModulesClient = modulesClient
|
||||||
|
|
||||||
|
if err := configs.Init(); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to init config: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return configs, nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConfigSourceDescriptor describes where to find the config (e.g. config.toml etc.).
|
||||||
|
type ConfigSourceDescriptor struct {
|
||||||
|
Fs afero.Fs
|
||||||
|
Logger loggers.Logger
|
||||||
|
|
||||||
|
// Config received from the command line.
|
||||||
|
// These will override any config file settings.
|
||||||
|
Flags config.Provider
|
||||||
|
|
||||||
|
// Path to the config file to use, e.g. /my/project/config.toml
|
||||||
|
Filename string
|
||||||
|
|
||||||
|
// The (optional) directory for additional configuration files.
|
||||||
|
ConfigDir string
|
||||||
|
|
||||||
|
// production, development
|
||||||
|
Environment string
|
||||||
|
|
||||||
|
// Defaults to os.Environ if not set.
|
||||||
|
Environ []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d ConfigSourceDescriptor) configFilenames() []string {
|
||||||
|
if d.Filename == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return strings.Split(d.Filename, ",")
|
||||||
|
}
|
||||||
|
|
||||||
|
type configLoader struct {
|
||||||
|
cfg config.Provider
|
||||||
|
BaseConfig config.BaseConfig
|
||||||
|
ConfigSourceDescriptor
|
||||||
|
|
||||||
|
// collected
|
||||||
|
ModulesConfig modules.ModulesConfig
|
||||||
|
ModulesConfigFiles []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle some legacy values.
|
||||||
|
func (l configLoader) applyConfigAliases() error {
|
||||||
|
aliases := []types.KeyValueStr{{Key: "taxonomies", Value: "indexes"}}
|
||||||
|
|
||||||
|
for _, alias := range aliases {
|
||||||
|
if l.cfg.IsSet(alias.Key) {
|
||||||
|
vv := l.cfg.Get(alias.Key)
|
||||||
|
l.cfg.Set(alias.Value, vv)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l configLoader) applyDefaultConfig() error {
|
||||||
|
defaultSettings := maps.Params{
|
||||||
|
"baseURL": "",
|
||||||
|
"cleanDestinationDir": false,
|
||||||
|
"watch": false,
|
||||||
|
"contentDir": "content",
|
||||||
|
"resourceDir": "resources",
|
||||||
|
"publishDir": "public",
|
||||||
|
"publishDirOrig": "public",
|
||||||
|
"themesDir": "themes",
|
||||||
|
"assetDir": "assets",
|
||||||
|
"layoutDir": "layouts",
|
||||||
|
"i18nDir": "i18n",
|
||||||
|
"dataDir": "data",
|
||||||
|
"archetypeDir": "archetypes",
|
||||||
|
"configDir": "config",
|
||||||
|
"staticDir": "static",
|
||||||
|
"buildDrafts": false,
|
||||||
|
"buildFuture": false,
|
||||||
|
"buildExpired": false,
|
||||||
|
"params": maps.Params{},
|
||||||
|
"environment": hugo.EnvironmentProduction,
|
||||||
|
"uglyURLs": false,
|
||||||
|
"verbose": false,
|
||||||
|
"ignoreCache": false,
|
||||||
|
"canonifyURLs": false,
|
||||||
|
"relativeURLs": false,
|
||||||
|
"removePathAccents": false,
|
||||||
|
"titleCaseStyle": "AP",
|
||||||
|
"taxonomies": maps.Params{"tag": "tags", "category": "categories"},
|
||||||
|
"permalinks": maps.Params{},
|
||||||
|
"sitemap": maps.Params{"priority": -1, "filename": "sitemap.xml"},
|
||||||
|
"menus": maps.Params{},
|
||||||
|
"disableLiveReload": false,
|
||||||
|
"pluralizeListTitles": true,
|
||||||
|
"forceSyncStatic": false,
|
||||||
|
"footnoteAnchorPrefix": "",
|
||||||
|
"footnoteReturnLinkContents": "",
|
||||||
|
"newContentEditor": "",
|
||||||
|
"paginate": 10,
|
||||||
|
"paginatePath": "page",
|
||||||
|
"summaryLength": 70,
|
||||||
|
"rssLimit": -1,
|
||||||
|
"sectionPagesMenu": "",
|
||||||
|
"disablePathToLower": false,
|
||||||
|
"hasCJKLanguage": false,
|
||||||
|
"enableEmoji": false,
|
||||||
|
"defaultContentLanguage": "en",
|
||||||
|
"defaultContentLanguageInSubdir": false,
|
||||||
|
"enableMissingTranslationPlaceholders": false,
|
||||||
|
"enableGitInfo": false,
|
||||||
|
"ignoreFiles": make([]string, 0),
|
||||||
|
"disableAliases": false,
|
||||||
|
"debug": false,
|
||||||
|
"disableFastRender": false,
|
||||||
|
"timeout": "30s",
|
||||||
|
"timeZone": "",
|
||||||
|
"enableInlineShortcodes": false,
|
||||||
|
}
|
||||||
|
|
||||||
|
l.cfg.SetDefaults(defaultSettings)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l configLoader) normalizeCfg(cfg config.Provider) error {
|
||||||
|
minify := cfg.Get("minify")
|
||||||
|
if b, ok := minify.(bool); ok && b {
|
||||||
|
cfg.Set("minify", maps.Params{"minifyOutput": true})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Simplify later merge.
|
||||||
|
languages := cfg.GetStringMap("languages")
|
||||||
|
for _, v := range languages {
|
||||||
|
switch m := v.(type) {
|
||||||
|
case maps.Params:
|
||||||
|
// params have merge strategy deep by default.
|
||||||
|
// The languages config key has strategy none by default.
|
||||||
|
// This means that if these two sections does not exist on the left side,
|
||||||
|
// they will not get merged in, so just create some empty maps.
|
||||||
|
if _, ok := m["params"]; !ok {
|
||||||
|
m["params"] = maps.Params{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l configLoader) cleanExternalConfig(cfg config.Provider) error {
|
||||||
|
if cfg.IsSet("internal") {
|
||||||
|
cfg.Set("internal", nil)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l configLoader) applyFlagsOverrides(cfg config.Provider) error {
|
||||||
|
for _, k := range cfg.Keys() {
|
||||||
|
l.cfg.Set(k, cfg.Get(k))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l configLoader) applyOsEnvOverrides(environ []string) error {
|
||||||
|
if len(environ) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const delim = "__env__delim"
|
||||||
|
|
||||||
|
// Extract all that start with the HUGO prefix.
|
||||||
|
// The delimiter is the following rune, usually "_".
|
||||||
|
const hugoEnvPrefix = "HUGO"
|
||||||
|
var hugoEnv []types.KeyValueStr
|
||||||
|
for _, v := range environ {
|
||||||
|
key, val := config.SplitEnvVar(v)
|
||||||
|
if strings.HasPrefix(key, hugoEnvPrefix) {
|
||||||
|
delimiterAndKey := strings.TrimPrefix(key, hugoEnvPrefix)
|
||||||
|
if len(delimiterAndKey) < 2 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Allow delimiters to be case sensitive.
|
||||||
|
// It turns out there isn't that many allowed special
|
||||||
|
// chars in environment variables when used in Bash and similar,
|
||||||
|
// so variables on the form HUGOxPARAMSxFOO=bar is one option.
|
||||||
|
key := strings.ReplaceAll(delimiterAndKey[1:], delimiterAndKey[:1], delim)
|
||||||
|
key = strings.ToLower(key)
|
||||||
|
hugoEnv = append(hugoEnv, types.KeyValueStr{
|
||||||
|
Key: key,
|
||||||
|
Value: val,
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, env := range hugoEnv {
|
||||||
|
existing, nestedKey, owner, err := maps.GetNestedParamFn(env.Key, delim, l.cfg.Get)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if existing != nil {
|
||||||
|
val, err := metadecoders.Default.UnmarshalStringTo(env.Value, existing)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if owner != nil {
|
||||||
|
owner[nestedKey] = val
|
||||||
|
} else {
|
||||||
|
l.cfg.Set(env.Key, val)
|
||||||
|
}
|
||||||
|
} else if nestedKey != "" {
|
||||||
|
owner[nestedKey] = env.Value
|
||||||
|
} else {
|
||||||
|
// The container does not exist yet.
|
||||||
|
l.cfg.Set(strings.ReplaceAll(env.Key, delim, "."), env.Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *configLoader) loadConfigMain(d ConfigSourceDescriptor) (config.LoadConfigResult, modules.ModulesConfig, error) {
|
||||||
|
var res config.LoadConfigResult
|
||||||
|
|
||||||
|
if d.Flags != nil {
|
||||||
|
if err := l.normalizeCfg(d.Flags); err != nil {
|
||||||
|
return res, l.ModulesConfig, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.Fs == nil {
|
||||||
|
return res, l.ModulesConfig, errors.New("no filesystem provided")
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.Flags != nil {
|
||||||
|
if err := l.applyFlagsOverrides(d.Flags); err != nil {
|
||||||
|
return res, l.ModulesConfig, err
|
||||||
|
}
|
||||||
|
workingDir := filepath.Clean(l.cfg.GetString("workingDir"))
|
||||||
|
|
||||||
|
l.BaseConfig = config.BaseConfig{
|
||||||
|
WorkingDir: workingDir,
|
||||||
|
ThemesDir: paths.AbsPathify(workingDir, l.cfg.GetString("themesDir")),
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
names := d.configFilenames()
|
||||||
|
|
||||||
|
if names != nil {
|
||||||
|
for _, name := range names {
|
||||||
|
var filename string
|
||||||
|
filename, err := l.loadConfig(name)
|
||||||
|
if err == nil {
|
||||||
|
res.ConfigFiles = append(res.ConfigFiles, filename)
|
||||||
|
} else if err != ErrNoConfigFile {
|
||||||
|
return res, l.ModulesConfig, l.wrapFileError(err, filename)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for _, name := range config.DefaultConfigNames {
|
||||||
|
var filename string
|
||||||
|
filename, err := l.loadConfig(name)
|
||||||
|
if err == nil {
|
||||||
|
res.ConfigFiles = append(res.ConfigFiles, filename)
|
||||||
|
break
|
||||||
|
} else if err != ErrNoConfigFile {
|
||||||
|
return res, l.ModulesConfig, l.wrapFileError(err, filename)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.ConfigDir != "" {
|
||||||
|
absConfigDir := paths.AbsPathify(l.BaseConfig.WorkingDir, d.ConfigDir)
|
||||||
|
dcfg, dirnames, err := config.LoadConfigFromDir(l.Fs, absConfigDir, l.Environment)
|
||||||
|
if err == nil {
|
||||||
|
if len(dirnames) > 0 {
|
||||||
|
if err := l.normalizeCfg(dcfg); err != nil {
|
||||||
|
return res, l.ModulesConfig, err
|
||||||
|
}
|
||||||
|
if err := l.cleanExternalConfig(dcfg); err != nil {
|
||||||
|
return res, l.ModulesConfig, err
|
||||||
|
}
|
||||||
|
l.cfg.Set("", dcfg.Get(""))
|
||||||
|
res.ConfigFiles = append(res.ConfigFiles, dirnames...)
|
||||||
|
}
|
||||||
|
} else if err != ErrNoConfigFile {
|
||||||
|
if len(dirnames) > 0 {
|
||||||
|
return res, l.ModulesConfig, l.wrapFileError(err, dirnames[0])
|
||||||
|
}
|
||||||
|
return res, l.ModulesConfig, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
res.Cfg = l.cfg
|
||||||
|
|
||||||
|
if err := l.applyDefaultConfig(); err != nil {
|
||||||
|
return res, l.ModulesConfig, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Some settings are used before we're done collecting all settings,
|
||||||
|
// so apply OS environment both before and after.
|
||||||
|
if err := l.applyOsEnvOverrides(d.Environ); err != nil {
|
||||||
|
return res, l.ModulesConfig, err
|
||||||
|
}
|
||||||
|
|
||||||
|
workingDir := filepath.Clean(l.cfg.GetString("workingDir"))
|
||||||
|
|
||||||
|
l.BaseConfig = config.BaseConfig{
|
||||||
|
WorkingDir: workingDir,
|
||||||
|
CacheDir: l.cfg.GetString("cacheDir"),
|
||||||
|
ThemesDir: paths.AbsPathify(workingDir, l.cfg.GetString("themesDir")),
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
l.BaseConfig.CacheDir, err = helpers.GetCacheDir(l.Fs, l.BaseConfig.CacheDir)
|
||||||
|
if err != nil {
|
||||||
|
return res, l.ModulesConfig, err
|
||||||
|
}
|
||||||
|
|
||||||
|
res.BaseConfig = l.BaseConfig
|
||||||
|
|
||||||
|
l.cfg.SetDefaultMergeStrategy()
|
||||||
|
|
||||||
|
res.ConfigFiles = append(res.ConfigFiles, l.ModulesConfigFiles...)
|
||||||
|
|
||||||
|
if d.Flags != nil {
|
||||||
|
if err := l.applyFlagsOverrides(d.Flags); err != nil {
|
||||||
|
return res, l.ModulesConfig, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := l.applyOsEnvOverrides(d.Environ); err != nil {
|
||||||
|
return res, l.ModulesConfig, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = l.applyConfigAliases(); err != nil {
|
||||||
|
return res, l.ModulesConfig, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, l.ModulesConfig, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *configLoader) loadModules(configs *Configs) (modules.ModulesConfig, *modules.Client, error) {
|
||||||
|
bcfg := configs.LoadingInfo.BaseConfig
|
||||||
|
conf := configs.Base
|
||||||
|
workingDir := bcfg.WorkingDir
|
||||||
|
themesDir := bcfg.ThemesDir
|
||||||
|
|
||||||
|
cfg := configs.LoadingInfo.Cfg
|
||||||
|
|
||||||
|
var ignoreVendor glob.Glob
|
||||||
|
if s := conf.IgnoreVendorPaths; s != "" {
|
||||||
|
ignoreVendor, _ = hglob.GetGlob(hglob.NormalizePath(s))
|
||||||
|
}
|
||||||
|
|
||||||
|
ex := hexec.New(conf.Security)
|
||||||
|
|
||||||
|
hook := func(m *modules.ModulesConfig) error {
|
||||||
|
for _, tc := range m.ActiveModules {
|
||||||
|
if len(tc.ConfigFilenames()) > 0 {
|
||||||
|
if tc.Watch() {
|
||||||
|
l.ModulesConfigFiles = append(l.ModulesConfigFiles, tc.ConfigFilenames()...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge in the theme config using the configured
|
||||||
|
// merge strategy.
|
||||||
|
cfg.Merge("", tc.Cfg().Get(""))
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
modulesClient := modules.NewClient(modules.ClientConfig{
|
||||||
|
Fs: l.Fs,
|
||||||
|
Logger: l.Logger,
|
||||||
|
Exec: ex,
|
||||||
|
HookBeforeFinalize: hook,
|
||||||
|
WorkingDir: workingDir,
|
||||||
|
ThemesDir: themesDir,
|
||||||
|
Environment: l.Environment,
|
||||||
|
CacheDir: conf.Caches.CacheDirModules(),
|
||||||
|
ModuleConfig: conf.Module,
|
||||||
|
IgnoreVendor: ignoreVendor,
|
||||||
|
})
|
||||||
|
|
||||||
|
moduleConfig, err := modulesClient.Collect()
|
||||||
|
|
||||||
|
// We want to watch these for changes and trigger rebuild on version
|
||||||
|
// changes etc.
|
||||||
|
if moduleConfig.GoModulesFilename != "" {
|
||||||
|
l.ModulesConfigFiles = append(l.ModulesConfigFiles, moduleConfig.GoModulesFilename)
|
||||||
|
}
|
||||||
|
|
||||||
|
if moduleConfig.GoWorkspaceFilename != "" {
|
||||||
|
l.ModulesConfigFiles = append(l.ModulesConfigFiles, moduleConfig.GoWorkspaceFilename)
|
||||||
|
}
|
||||||
|
|
||||||
|
return moduleConfig, modulesClient, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l configLoader) loadConfig(configName string) (string, error) {
|
||||||
|
baseDir := l.BaseConfig.WorkingDir
|
||||||
|
var baseFilename string
|
||||||
|
if filepath.IsAbs(configName) {
|
||||||
|
baseFilename = configName
|
||||||
|
} else {
|
||||||
|
baseFilename = filepath.Join(baseDir, configName)
|
||||||
|
}
|
||||||
|
|
||||||
|
var filename string
|
||||||
|
if paths.ExtNoDelimiter(configName) != "" {
|
||||||
|
exists, _ := helpers.Exists(baseFilename, l.Fs)
|
||||||
|
if exists {
|
||||||
|
filename = baseFilename
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for _, ext := range config.ValidConfigFileExtensions {
|
||||||
|
filenameToCheck := baseFilename + "." + ext
|
||||||
|
exists, _ := helpers.Exists(filenameToCheck, l.Fs)
|
||||||
|
if exists {
|
||||||
|
filename = filenameToCheck
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if filename == "" {
|
||||||
|
return "", ErrNoConfigFile
|
||||||
|
}
|
||||||
|
|
||||||
|
m, err := config.FromFileToMap(l.Fs, filename)
|
||||||
|
if err != nil {
|
||||||
|
return filename, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set overwrites keys of the same name, recursively.
|
||||||
|
l.cfg.Set("", m)
|
||||||
|
|
||||||
|
if err := l.normalizeCfg(l.cfg); err != nil {
|
||||||
|
return filename, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := l.cleanExternalConfig(l.cfg); err != nil {
|
||||||
|
return filename, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return filename, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l configLoader) deleteMergeStrategies() {
|
||||||
|
l.cfg.WalkParams(func(params ...maps.KeyParams) bool {
|
||||||
|
params[len(params)-1].Params.DeleteMergeStrategy()
|
||||||
|
return false
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l configLoader) loadModulesConfig() (modules.Config, error) {
|
||||||
|
modConfig, err := modules.DecodeConfig(l.cfg)
|
||||||
|
if err != nil {
|
||||||
|
return modules.Config{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return modConfig, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l configLoader) wrapFileError(err error, filename string) error {
|
||||||
|
fe := herrors.UnwrapFileError(err)
|
||||||
|
if fe != nil {
|
||||||
|
pos := fe.Position()
|
||||||
|
pos.Filename = filename
|
||||||
|
fe.UpdatePosition(pos)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return herrors.NewFileErrorFromFile(err, filename, l.Fs, nil)
|
||||||
|
}
|
67
config/allconfig/load_test.go
Normal file
67
config/allconfig/load_test.go
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
package allconfig
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
)
|
||||||
|
|
||||||
|
func BenchmarkLoad(b *testing.B) {
|
||||||
|
tempDir := b.TempDir()
|
||||||
|
configFilename := filepath.Join(tempDir, "hugo.toml")
|
||||||
|
config := `
|
||||||
|
baseURL = "https://example.com"
|
||||||
|
defaultContentLanguage = 'en'
|
||||||
|
|
||||||
|
[module]
|
||||||
|
[[module.mounts]]
|
||||||
|
source = 'content/en'
|
||||||
|
target = 'content/en'
|
||||||
|
lang = 'en'
|
||||||
|
[[module.mounts]]
|
||||||
|
source = 'content/nn'
|
||||||
|
target = 'content/nn'
|
||||||
|
lang = 'nn'
|
||||||
|
[[module.mounts]]
|
||||||
|
source = 'content/no'
|
||||||
|
target = 'content/no'
|
||||||
|
lang = 'no'
|
||||||
|
[[module.mounts]]
|
||||||
|
source = 'content/sv'
|
||||||
|
target = 'content/sv'
|
||||||
|
lang = 'sv'
|
||||||
|
[[module.mounts]]
|
||||||
|
source = 'layouts'
|
||||||
|
target = 'layouts'
|
||||||
|
|
||||||
|
[languages]
|
||||||
|
[languages.en]
|
||||||
|
title = "English"
|
||||||
|
weight = 1
|
||||||
|
[languages.nn]
|
||||||
|
title = "Nynorsk"
|
||||||
|
weight = 2
|
||||||
|
[languages.no]
|
||||||
|
title = "Norsk"
|
||||||
|
weight = 3
|
||||||
|
[languages.sv]
|
||||||
|
title = "Svenska"
|
||||||
|
weight = 4
|
||||||
|
`
|
||||||
|
if err := os.WriteFile(configFilename, []byte(config), 0666); err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
d := ConfigSourceDescriptor{
|
||||||
|
Fs: afero.NewOsFs(),
|
||||||
|
Filename: configFilename,
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
_, err := LoadConfig(d)
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -17,7 +17,6 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/types"
|
"github.com/gohugoio/hugo/common/types"
|
||||||
|
|
||||||
|
@ -25,16 +24,66 @@ import (
|
||||||
"github.com/gohugoio/hugo/common/herrors"
|
"github.com/gohugoio/hugo/common/herrors"
|
||||||
"github.com/mitchellh/mapstructure"
|
"github.com/mitchellh/mapstructure"
|
||||||
"github.com/spf13/cast"
|
"github.com/spf13/cast"
|
||||||
jww "github.com/spf13/jwalterweatherman"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var DefaultBuild = Build{
|
type BaseConfig struct {
|
||||||
|
WorkingDir string
|
||||||
|
CacheDir string
|
||||||
|
ThemesDir string
|
||||||
|
PublishDir string
|
||||||
|
}
|
||||||
|
|
||||||
|
type CommonDirs struct {
|
||||||
|
// The directory where Hugo will look for themes.
|
||||||
|
ThemesDir string
|
||||||
|
|
||||||
|
// Where to put the generated files.
|
||||||
|
PublishDir string
|
||||||
|
|
||||||
|
// The directory to put the generated resources files. This directory should in most situations be considered temporary
|
||||||
|
// and not be committed to version control. But there may be cached content in here that you want to keep,
|
||||||
|
// e.g. resources/_gen/images for performance reasons or CSS built from SASS when your CI server doesn't have the full setup.
|
||||||
|
ResourceDir string
|
||||||
|
|
||||||
|
// The project root directory.
|
||||||
|
WorkingDir string
|
||||||
|
|
||||||
|
// The root directory for all cache files.
|
||||||
|
CacheDir string
|
||||||
|
|
||||||
|
// The content source directory.
|
||||||
|
// Deprecated: Use module mounts.
|
||||||
|
ContentDir string
|
||||||
|
// Deprecated: Use module mounts.
|
||||||
|
// The data source directory.
|
||||||
|
DataDir string
|
||||||
|
// Deprecated: Use module mounts.
|
||||||
|
// The layout source directory.
|
||||||
|
LayoutDir string
|
||||||
|
// Deprecated: Use module mounts.
|
||||||
|
// The i18n source directory.
|
||||||
|
I18nDir string
|
||||||
|
// Deprecated: Use module mounts.
|
||||||
|
// The archetypes source directory.
|
||||||
|
ArcheTypeDir string
|
||||||
|
// Deprecated: Use module mounts.
|
||||||
|
// The assets source directory.
|
||||||
|
AssetDir string
|
||||||
|
}
|
||||||
|
|
||||||
|
type LoadConfigResult struct {
|
||||||
|
Cfg Provider
|
||||||
|
ConfigFiles []string
|
||||||
|
BaseConfig BaseConfig
|
||||||
|
}
|
||||||
|
|
||||||
|
var DefaultBuild = BuildConfig{
|
||||||
UseResourceCacheWhen: "fallback",
|
UseResourceCacheWhen: "fallback",
|
||||||
WriteStats: false,
|
WriteStats: false,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build holds some build related configuration.
|
// BuildConfig holds some build related configuration.
|
||||||
type Build struct {
|
type BuildConfig struct {
|
||||||
UseResourceCacheWhen string // never, fallback, always. Default is fallback
|
UseResourceCacheWhen string // never, fallback, always. Default is fallback
|
||||||
|
|
||||||
// When enabled, will collect and write a hugo_stats.json with some build
|
// When enabled, will collect and write a hugo_stats.json with some build
|
||||||
|
@ -46,7 +95,7 @@ type Build struct {
|
||||||
NoJSConfigInAssets bool
|
NoJSConfigInAssets bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b Build) UseResourceCache(err error) bool {
|
func (b BuildConfig) UseResourceCache(err error) bool {
|
||||||
if b.UseResourceCacheWhen == "never" {
|
if b.UseResourceCacheWhen == "never" {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
@ -58,7 +107,7 @@ func (b Build) UseResourceCache(err error) bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func DecodeBuild(cfg Provider) Build {
|
func DecodeBuildConfig(cfg Provider) BuildConfig {
|
||||||
m := cfg.GetStringMap("build")
|
m := cfg.GetStringMap("build")
|
||||||
b := DefaultBuild
|
b := DefaultBuild
|
||||||
if m == nil {
|
if m == nil {
|
||||||
|
@ -79,28 +128,19 @@ func DecodeBuild(cfg Provider) Build {
|
||||||
return b
|
return b
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sitemap configures the sitemap to be generated.
|
// SitemapConfig configures the sitemap to be generated.
|
||||||
type Sitemap struct {
|
type SitemapConfig struct {
|
||||||
|
// The page change frequency.
|
||||||
ChangeFreq string
|
ChangeFreq string
|
||||||
Priority float64
|
// The priority of the page.
|
||||||
Filename string
|
Priority float64
|
||||||
|
// The sitemap filename.
|
||||||
|
Filename string
|
||||||
}
|
}
|
||||||
|
|
||||||
func DecodeSitemap(prototype Sitemap, input map[string]any) Sitemap {
|
func DecodeSitemap(prototype SitemapConfig, input map[string]any) (SitemapConfig, error) {
|
||||||
for key, value := range input {
|
err := mapstructure.WeakDecode(input, &prototype)
|
||||||
switch key {
|
return prototype, err
|
||||||
case "changefreq":
|
|
||||||
prototype.ChangeFreq = cast.ToString(value)
|
|
||||||
case "priority":
|
|
||||||
prototype.Priority = cast.ToFloat64(value)
|
|
||||||
case "filename":
|
|
||||||
prototype.Filename = cast.ToString(value)
|
|
||||||
default:
|
|
||||||
jww.WARN.Printf("Unknown Sitemap field: %s\n", key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return prototype
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Config for the dev server.
|
// Config for the dev server.
|
||||||
|
@ -108,25 +148,24 @@ type Server struct {
|
||||||
Headers []Headers
|
Headers []Headers
|
||||||
Redirects []Redirect
|
Redirects []Redirect
|
||||||
|
|
||||||
compiledInit sync.Once
|
|
||||||
compiledHeaders []glob.Glob
|
compiledHeaders []glob.Glob
|
||||||
compiledRedirects []glob.Glob
|
compiledRedirects []glob.Glob
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Server) init() {
|
func (s *Server) CompileConfig() error {
|
||||||
s.compiledInit.Do(func() {
|
if s.compiledHeaders != nil {
|
||||||
for _, h := range s.Headers {
|
return nil
|
||||||
s.compiledHeaders = append(s.compiledHeaders, glob.MustCompile(h.For))
|
}
|
||||||
}
|
for _, h := range s.Headers {
|
||||||
for _, r := range s.Redirects {
|
s.compiledHeaders = append(s.compiledHeaders, glob.MustCompile(h.For))
|
||||||
s.compiledRedirects = append(s.compiledRedirects, glob.MustCompile(r.From))
|
}
|
||||||
}
|
for _, r := range s.Redirects {
|
||||||
})
|
s.compiledRedirects = append(s.compiledRedirects, glob.MustCompile(r.From))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Server) MatchHeaders(pattern string) []types.KeyValueStr {
|
func (s *Server) MatchHeaders(pattern string) []types.KeyValueStr {
|
||||||
s.init()
|
|
||||||
|
|
||||||
if s.compiledHeaders == nil {
|
if s.compiledHeaders == nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -150,8 +189,6 @@ func (s *Server) MatchHeaders(pattern string) []types.KeyValueStr {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Server) MatchRedirect(pattern string) Redirect {
|
func (s *Server) MatchRedirect(pattern string) Redirect {
|
||||||
s.init()
|
|
||||||
|
|
||||||
if s.compiledRedirects == nil {
|
if s.compiledRedirects == nil {
|
||||||
return Redirect{}
|
return Redirect{}
|
||||||
}
|
}
|
||||||
|
@ -195,14 +232,10 @@ func (r Redirect) IsZero() bool {
|
||||||
return r.From == ""
|
return r.From == ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func DecodeServer(cfg Provider) (*Server, error) {
|
func DecodeServer(cfg Provider) (Server, error) {
|
||||||
m := cfg.GetStringMap("server")
|
|
||||||
s := &Server{}
|
s := &Server{}
|
||||||
if m == nil {
|
|
||||||
return s, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
_ = mapstructure.WeakDecode(m, s)
|
_ = mapstructure.WeakDecode(cfg.GetStringMap("server"), s)
|
||||||
|
|
||||||
for i, redir := range s.Redirects {
|
for i, redir := range s.Redirects {
|
||||||
// Get it in line with the Hugo server for OK responses.
|
// Get it in line with the Hugo server for OK responses.
|
||||||
|
@ -213,7 +246,7 @@ func DecodeServer(cfg Provider) (*Server, error) {
|
||||||
// There are some tricky infinite loop situations when dealing
|
// There are some tricky infinite loop situations when dealing
|
||||||
// when the target does not have a trailing slash.
|
// when the target does not have a trailing slash.
|
||||||
// This can certainly be handled better, but not time for that now.
|
// This can certainly be handled better, but not time for that now.
|
||||||
return nil, fmt.Errorf("unsupported redirect to value %q in server config; currently this must be either a remote destination or a local folder, e.g. \"/blog/\" or \"/blog/index.html\"", redir.To)
|
return Server{}, fmt.Errorf("unsupported redirect to value %q in server config; currently this must be either a remote destination or a local folder, e.g. \"/blog/\" or \"/blog/index.html\"", redir.To)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
s.Redirects[i] = redir
|
s.Redirects[i] = redir
|
||||||
|
@ -231,5 +264,5 @@ func DecodeServer(cfg Provider) (*Server, error) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return s, nil
|
return *s, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,7 @@ func TestBuild(t *testing.T) {
|
||||||
"useResourceCacheWhen": "always",
|
"useResourceCacheWhen": "always",
|
||||||
})
|
})
|
||||||
|
|
||||||
b := DecodeBuild(v)
|
b := DecodeBuildConfig(v)
|
||||||
|
|
||||||
c.Assert(b.UseResourceCacheWhen, qt.Equals, "always")
|
c.Assert(b.UseResourceCacheWhen, qt.Equals, "always")
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ func TestBuild(t *testing.T) {
|
||||||
"useResourceCacheWhen": "foo",
|
"useResourceCacheWhen": "foo",
|
||||||
})
|
})
|
||||||
|
|
||||||
b = DecodeBuild(v)
|
b = DecodeBuildConfig(v)
|
||||||
|
|
||||||
c.Assert(b.UseResourceCacheWhen, qt.Equals, "fallback")
|
c.Assert(b.UseResourceCacheWhen, qt.Equals, "fallback")
|
||||||
|
|
||||||
|
@ -91,6 +91,7 @@ status = 301
|
||||||
|
|
||||||
s, err := DecodeServer(cfg)
|
s, err := DecodeServer(cfg)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(s.CompileConfig(), qt.IsNil)
|
||||||
|
|
||||||
c.Assert(s.MatchHeaders("/foo.jpg"), qt.DeepEquals, []types.KeyValueStr{
|
c.Assert(s.MatchHeaders("/foo.jpg"), qt.DeepEquals, []types.KeyValueStr{
|
||||||
{Key: "X-Content-Type-Options", Value: "nosniff"},
|
{Key: "X-Content-Type-Options", Value: "nosniff"},
|
||||||
|
|
|
@ -1,117 +0,0 @@
|
||||||
// Copyright 2021 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package config
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/gohugoio/hugo/common/maps"
|
|
||||||
)
|
|
||||||
|
|
||||||
// NewCompositeConfig creates a new composite Provider with a read-only base
|
|
||||||
// and a writeable layer.
|
|
||||||
func NewCompositeConfig(base, layer Provider) Provider {
|
|
||||||
return &compositeConfig{
|
|
||||||
base: base,
|
|
||||||
layer: layer,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// compositeConfig contains a read only config base with
|
|
||||||
// a possibly writeable config layer on top.
|
|
||||||
type compositeConfig struct {
|
|
||||||
base Provider
|
|
||||||
layer Provider
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compositeConfig) GetBool(key string) bool {
|
|
||||||
if c.layer.IsSet(key) {
|
|
||||||
return c.layer.GetBool(key)
|
|
||||||
}
|
|
||||||
return c.base.GetBool(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compositeConfig) GetInt(key string) int {
|
|
||||||
if c.layer.IsSet(key) {
|
|
||||||
return c.layer.GetInt(key)
|
|
||||||
}
|
|
||||||
return c.base.GetInt(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compositeConfig) Merge(key string, value any) {
|
|
||||||
c.layer.Merge(key, value)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compositeConfig) GetParams(key string) maps.Params {
|
|
||||||
if c.layer.IsSet(key) {
|
|
||||||
return c.layer.GetParams(key)
|
|
||||||
}
|
|
||||||
return c.base.GetParams(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compositeConfig) GetStringMap(key string) map[string]any {
|
|
||||||
if c.layer.IsSet(key) {
|
|
||||||
return c.layer.GetStringMap(key)
|
|
||||||
}
|
|
||||||
return c.base.GetStringMap(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compositeConfig) GetStringMapString(key string) map[string]string {
|
|
||||||
if c.layer.IsSet(key) {
|
|
||||||
return c.layer.GetStringMapString(key)
|
|
||||||
}
|
|
||||||
return c.base.GetStringMapString(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compositeConfig) GetStringSlice(key string) []string {
|
|
||||||
if c.layer.IsSet(key) {
|
|
||||||
return c.layer.GetStringSlice(key)
|
|
||||||
}
|
|
||||||
return c.base.GetStringSlice(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compositeConfig) Get(key string) any {
|
|
||||||
if c.layer.IsSet(key) {
|
|
||||||
return c.layer.Get(key)
|
|
||||||
}
|
|
||||||
return c.base.Get(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compositeConfig) IsSet(key string) bool {
|
|
||||||
if c.layer.IsSet(key) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return c.base.IsSet(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compositeConfig) GetString(key string) string {
|
|
||||||
if c.layer.IsSet(key) {
|
|
||||||
return c.layer.GetString(key)
|
|
||||||
}
|
|
||||||
return c.base.GetString(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compositeConfig) Set(key string, value any) {
|
|
||||||
c.layer.Set(key, value)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compositeConfig) SetDefaults(params maps.Params) {
|
|
||||||
c.layer.SetDefaults(params)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compositeConfig) WalkParams(walkFn func(params ...KeyParams) bool) {
|
|
||||||
panic("not supported")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compositeConfig) SetDefaultMergeStrategy() {
|
|
||||||
panic("not supported")
|
|
||||||
}
|
|
|
@ -57,6 +57,14 @@ func IsValidConfigFilename(filename string) bool {
|
||||||
return validConfigFileExtensionsMap[ext]
|
return validConfigFileExtensionsMap[ext]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func FromTOMLConfigString(config string) Provider {
|
||||||
|
cfg, err := FromConfigString(config, "toml")
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return cfg
|
||||||
|
}
|
||||||
|
|
||||||
// FromConfigString creates a config from the given YAML, JSON or TOML config. This is useful in tests.
|
// FromConfigString creates a config from the given YAML, JSON or TOML config. This is useful in tests.
|
||||||
func FromConfigString(config, configType string) (Provider, error) {
|
func FromConfigString(config, configType string) (Provider, error) {
|
||||||
m, err := readConfig(metadecoders.FormatFromString(configType), []byte(config))
|
m, err := readConfig(metadecoders.FormatFromString(configType), []byte(config))
|
||||||
|
|
|
@ -14,10 +14,58 @@
|
||||||
package config
|
package config
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/maps"
|
"github.com/gohugoio/hugo/common/maps"
|
||||||
"github.com/gohugoio/hugo/common/types"
|
"github.com/gohugoio/hugo/common/types"
|
||||||
|
"github.com/gohugoio/hugo/common/urls"
|
||||||
|
"github.com/gohugoio/hugo/langs"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// AllProvider is a sub set of all config settings.
|
||||||
|
type AllProvider interface {
|
||||||
|
Language() *langs.Language
|
||||||
|
Languages() langs.Languages
|
||||||
|
LanguagesDefaultFirst() langs.Languages
|
||||||
|
BaseURL() urls.BaseURL
|
||||||
|
BaseURLLiveReload() urls.BaseURL
|
||||||
|
Environment() string
|
||||||
|
IsMultihost() bool
|
||||||
|
IsMultiLingual() bool
|
||||||
|
NoBuildLock() bool
|
||||||
|
BaseConfig() BaseConfig
|
||||||
|
Dirs() CommonDirs
|
||||||
|
Quiet() bool
|
||||||
|
DirsBase() CommonDirs
|
||||||
|
GetConfigSection(string) any
|
||||||
|
GetConfig() any
|
||||||
|
CanonifyURLs() bool
|
||||||
|
DisablePathToLower() bool
|
||||||
|
RemovePathAccents() bool
|
||||||
|
IsUglyURLs(section string) bool
|
||||||
|
DefaultContentLanguage() string
|
||||||
|
DefaultContentLanguageInSubdir() bool
|
||||||
|
IsLangDisabled(string) bool
|
||||||
|
SummaryLength() int
|
||||||
|
Paginate() int
|
||||||
|
PaginatePath() string
|
||||||
|
BuildExpired() bool
|
||||||
|
BuildFuture() bool
|
||||||
|
BuildDrafts() bool
|
||||||
|
Running() bool
|
||||||
|
PrintUnusedTemplates() bool
|
||||||
|
EnableMissingTranslationPlaceholders() bool
|
||||||
|
TemplateMetrics() bool
|
||||||
|
TemplateMetricsHints() bool
|
||||||
|
LogI18nWarnings() bool
|
||||||
|
CreateTitle(s string) string
|
||||||
|
IgnoreFile(s string) bool
|
||||||
|
NewContentEditor() string
|
||||||
|
Timeout() time.Duration
|
||||||
|
StaticDirs() []string
|
||||||
|
IgnoredErrors() map[string]bool
|
||||||
|
}
|
||||||
|
|
||||||
// Provider provides the configuration settings for Hugo.
|
// Provider provides the configuration settings for Hugo.
|
||||||
type Provider interface {
|
type Provider interface {
|
||||||
GetString(key string) string
|
GetString(key string) string
|
||||||
|
@ -29,10 +77,11 @@ type Provider interface {
|
||||||
GetStringSlice(key string) []string
|
GetStringSlice(key string) []string
|
||||||
Get(key string) any
|
Get(key string) any
|
||||||
Set(key string, value any)
|
Set(key string, value any)
|
||||||
|
Keys() []string
|
||||||
Merge(key string, value any)
|
Merge(key string, value any)
|
||||||
SetDefaults(params maps.Params)
|
SetDefaults(params maps.Params)
|
||||||
SetDefaultMergeStrategy()
|
SetDefaultMergeStrategy()
|
||||||
WalkParams(walkFn func(params ...KeyParams) bool)
|
WalkParams(walkFn func(params ...maps.KeyParams) bool)
|
||||||
IsSet(key string) bool
|
IsSet(key string) bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,22 +93,6 @@ func GetStringSlicePreserveString(cfg Provider, key string) []string {
|
||||||
return types.ToStringSlicePreserveString(sd)
|
return types.ToStringSlicePreserveString(sd)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetBaseTestDefaults provides some common config defaults used in tests.
|
|
||||||
func SetBaseTestDefaults(cfg Provider) Provider {
|
|
||||||
setIfNotSet(cfg, "baseURL", "https://example.org")
|
|
||||||
setIfNotSet(cfg, "resourceDir", "resources")
|
|
||||||
setIfNotSet(cfg, "contentDir", "content")
|
|
||||||
setIfNotSet(cfg, "dataDir", "data")
|
|
||||||
setIfNotSet(cfg, "i18nDir", "i18n")
|
|
||||||
setIfNotSet(cfg, "layoutDir", "layouts")
|
|
||||||
setIfNotSet(cfg, "assetDir", "assets")
|
|
||||||
setIfNotSet(cfg, "archetypeDir", "archetypes")
|
|
||||||
setIfNotSet(cfg, "publishDir", "public")
|
|
||||||
setIfNotSet(cfg, "workingDir", "")
|
|
||||||
setIfNotSet(cfg, "defaultContentLanguage", "en")
|
|
||||||
return cfg
|
|
||||||
}
|
|
||||||
|
|
||||||
func setIfNotSet(cfg Provider, key string, value any) {
|
func setIfNotSet(cfg Provider, key string, value any) {
|
||||||
if !cfg.IsSet(key) {
|
if !cfg.IsSet(key) {
|
||||||
cfg.Set(key, value)
|
cfg.Set(key, value)
|
||||||
|
|
|
@ -19,6 +19,8 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
|
xmaps "golang.org/x/exp/maps"
|
||||||
|
|
||||||
"github.com/spf13/cast"
|
"github.com/spf13/cast"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/maps"
|
"github.com/gohugoio/hugo/common/maps"
|
||||||
|
@ -75,11 +77,6 @@ func NewFrom(params maps.Params) Provider {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewWithTestDefaults is used in tests only.
|
|
||||||
func NewWithTestDefaults() Provider {
|
|
||||||
return SetBaseTestDefaults(New())
|
|
||||||
}
|
|
||||||
|
|
||||||
// defaultConfigProvider is a Provider backed by a map where all keys are lower case.
|
// defaultConfigProvider is a Provider backed by a map where all keys are lower case.
|
||||||
// All methods are thread safe.
|
// All methods are thread safe.
|
||||||
type defaultConfigProvider struct {
|
type defaultConfigProvider struct {
|
||||||
|
@ -160,9 +157,9 @@ func (c *defaultConfigProvider) Set(k string, v any) {
|
||||||
k = strings.ToLower(k)
|
k = strings.ToLower(k)
|
||||||
|
|
||||||
if k == "" {
|
if k == "" {
|
||||||
if p, ok := maps.ToParamsAndPrepare(v); ok {
|
if p, err := maps.ToParamsAndPrepare(v); err == nil {
|
||||||
// Set the values directly in root.
|
// Set the values directly in root.
|
||||||
c.root.Set(p)
|
maps.SetParams(c.root, p)
|
||||||
} else {
|
} else {
|
||||||
c.root[k] = v
|
c.root[k] = v
|
||||||
}
|
}
|
||||||
|
@ -184,7 +181,7 @@ func (c *defaultConfigProvider) Set(k string, v any) {
|
||||||
if existing, found := m[key]; found {
|
if existing, found := m[key]; found {
|
||||||
if p1, ok := existing.(maps.Params); ok {
|
if p1, ok := existing.(maps.Params); ok {
|
||||||
if p2, ok := v.(maps.Params); ok {
|
if p2, ok := v.(maps.Params); ok {
|
||||||
p1.Set(p2)
|
maps.SetParams(p1, p2)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -208,12 +205,6 @@ func (c *defaultConfigProvider) Merge(k string, v any) {
|
||||||
defer c.mu.Unlock()
|
defer c.mu.Unlock()
|
||||||
k = strings.ToLower(k)
|
k = strings.ToLower(k)
|
||||||
|
|
||||||
const (
|
|
||||||
languagesKey = "languages"
|
|
||||||
paramsKey = "params"
|
|
||||||
menusKey = "menus"
|
|
||||||
)
|
|
||||||
|
|
||||||
if k == "" {
|
if k == "" {
|
||||||
rs, f := c.root.GetMergeStrategy()
|
rs, f := c.root.GetMergeStrategy()
|
||||||
if f && rs == maps.ParamsMergeStrategyNone {
|
if f && rs == maps.ParamsMergeStrategyNone {
|
||||||
|
@ -222,7 +213,7 @@ func (c *defaultConfigProvider) Merge(k string, v any) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if p, ok := maps.ToParamsAndPrepare(v); ok {
|
if p, err := maps.ToParamsAndPrepare(v); err == nil {
|
||||||
// As there may be keys in p not in root, we need to handle
|
// As there may be keys in p not in root, we need to handle
|
||||||
// those as a special case.
|
// those as a special case.
|
||||||
var keysToDelete []string
|
var keysToDelete []string
|
||||||
|
@ -230,49 +221,14 @@ func (c *defaultConfigProvider) Merge(k string, v any) {
|
||||||
if pp, ok := vv.(maps.Params); ok {
|
if pp, ok := vv.(maps.Params); ok {
|
||||||
if pppi, ok := c.root[kk]; ok {
|
if pppi, ok := c.root[kk]; ok {
|
||||||
ppp := pppi.(maps.Params)
|
ppp := pppi.(maps.Params)
|
||||||
if kk == languagesKey {
|
maps.MergeParamsWithStrategy("", ppp, pp)
|
||||||
// Languages is currently a special case.
|
|
||||||
// We may have languages with menus or params in the
|
|
||||||
// right map that is not present in the left map.
|
|
||||||
// With the default merge strategy those items will not
|
|
||||||
// be passed over.
|
|
||||||
var hasParams, hasMenus bool
|
|
||||||
for _, rv := range pp {
|
|
||||||
if lkp, ok := rv.(maps.Params); ok {
|
|
||||||
_, hasMenus = lkp[menusKey]
|
|
||||||
_, hasParams = lkp[paramsKey]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if hasMenus || hasParams {
|
|
||||||
for _, lv := range ppp {
|
|
||||||
if lkp, ok := lv.(maps.Params); ok {
|
|
||||||
if hasMenus {
|
|
||||||
if _, ok := lkp[menusKey]; !ok {
|
|
||||||
p := maps.Params{}
|
|
||||||
p.SetDefaultMergeStrategy(maps.ParamsMergeStrategyShallow)
|
|
||||||
lkp[menusKey] = p
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if hasParams {
|
|
||||||
if _, ok := lkp[paramsKey]; !ok {
|
|
||||||
p := maps.Params{}
|
|
||||||
p.SetDefaultMergeStrategy(maps.ParamsMergeStrategyShallow)
|
|
||||||
lkp[paramsKey] = p
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ppp.Merge(pp)
|
|
||||||
} else {
|
} else {
|
||||||
// We need to use the default merge strategy for
|
// We need to use the default merge strategy for
|
||||||
// this key.
|
// this key.
|
||||||
np := make(maps.Params)
|
np := make(maps.Params)
|
||||||
strategy := c.determineMergeStrategy(KeyParams{Key: "", Params: c.root}, KeyParams{Key: kk, Params: np})
|
strategy := c.determineMergeStrategy(maps.KeyParams{Key: "", Params: c.root}, maps.KeyParams{Key: kk, Params: np})
|
||||||
np.SetDefaultMergeStrategy(strategy)
|
np.SetMergeStrategy(strategy)
|
||||||
np.Merge(pp)
|
maps.MergeParamsWithStrategy("", np, pp)
|
||||||
c.root[kk] = np
|
c.root[kk] = np
|
||||||
if np.IsZero() {
|
if np.IsZero() {
|
||||||
// Just keep it until merge is done.
|
// Just keep it until merge is done.
|
||||||
|
@ -282,7 +238,7 @@ func (c *defaultConfigProvider) Merge(k string, v any) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Merge the rest.
|
// Merge the rest.
|
||||||
c.root.MergeRoot(p)
|
maps.MergeParams(c.root, p)
|
||||||
for _, k := range keysToDelete {
|
for _, k := range keysToDelete {
|
||||||
delete(c.root, k)
|
delete(c.root, k)
|
||||||
}
|
}
|
||||||
|
@ -307,7 +263,7 @@ func (c *defaultConfigProvider) Merge(k string, v any) {
|
||||||
if existing, found := m[key]; found {
|
if existing, found := m[key]; found {
|
||||||
if p1, ok := existing.(maps.Params); ok {
|
if p1, ok := existing.(maps.Params); ok {
|
||||||
if p2, ok := v.(maps.Params); ok {
|
if p2, ok := v.(maps.Params); ok {
|
||||||
p1.Merge(p2)
|
maps.MergeParamsWithStrategy("", p1, p2)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -315,9 +271,15 @@ func (c *defaultConfigProvider) Merge(k string, v any) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *defaultConfigProvider) WalkParams(walkFn func(params ...KeyParams) bool) {
|
func (c *defaultConfigProvider) Keys() []string {
|
||||||
var walk func(params ...KeyParams)
|
c.mu.RLock()
|
||||||
walk = func(params ...KeyParams) {
|
defer c.mu.RUnlock()
|
||||||
|
return xmaps.Keys(c.root)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *defaultConfigProvider) WalkParams(walkFn func(params ...maps.KeyParams) bool) {
|
||||||
|
var walk func(params ...maps.KeyParams)
|
||||||
|
walk = func(params ...maps.KeyParams) {
|
||||||
if walkFn(params...) {
|
if walkFn(params...) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -325,17 +287,17 @@ func (c *defaultConfigProvider) WalkParams(walkFn func(params ...KeyParams) bool
|
||||||
i := len(params)
|
i := len(params)
|
||||||
for k, v := range p1.Params {
|
for k, v := range p1.Params {
|
||||||
if p2, ok := v.(maps.Params); ok {
|
if p2, ok := v.(maps.Params); ok {
|
||||||
paramsplus1 := make([]KeyParams, i+1)
|
paramsplus1 := make([]maps.KeyParams, i+1)
|
||||||
copy(paramsplus1, params)
|
copy(paramsplus1, params)
|
||||||
paramsplus1[i] = KeyParams{Key: k, Params: p2}
|
paramsplus1[i] = maps.KeyParams{Key: k, Params: p2}
|
||||||
walk(paramsplus1...)
|
walk(paramsplus1...)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
walk(KeyParams{Key: "", Params: c.root})
|
walk(maps.KeyParams{Key: "", Params: c.root})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *defaultConfigProvider) determineMergeStrategy(params ...KeyParams) maps.ParamsMergeStrategy {
|
func (c *defaultConfigProvider) determineMergeStrategy(params ...maps.KeyParams) maps.ParamsMergeStrategy {
|
||||||
if len(params) == 0 {
|
if len(params) == 0 {
|
||||||
return maps.ParamsMergeStrategyNone
|
return maps.ParamsMergeStrategyNone
|
||||||
}
|
}
|
||||||
|
@ -391,13 +353,8 @@ func (c *defaultConfigProvider) determineMergeStrategy(params ...KeyParams) maps
|
||||||
return strategy
|
return strategy
|
||||||
}
|
}
|
||||||
|
|
||||||
type KeyParams struct {
|
|
||||||
Key string
|
|
||||||
Params maps.Params
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *defaultConfigProvider) SetDefaultMergeStrategy() {
|
func (c *defaultConfigProvider) SetDefaultMergeStrategy() {
|
||||||
c.WalkParams(func(params ...KeyParams) bool {
|
c.WalkParams(func(params ...maps.KeyParams) bool {
|
||||||
if len(params) == 0 {
|
if len(params) == 0 {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
@ -409,7 +366,7 @@ func (c *defaultConfigProvider) SetDefaultMergeStrategy() {
|
||||||
}
|
}
|
||||||
strategy := c.determineMergeStrategy(params...)
|
strategy := c.determineMergeStrategy(params...)
|
||||||
if strategy != "" {
|
if strategy != "" {
|
||||||
p.SetDefaultMergeStrategy(strategy)
|
p.SetMergeStrategy(strategy)
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
})
|
})
|
||||||
|
|
76
config/namespace.go
Normal file
76
config/namespace.go
Normal file
|
@ -0,0 +1,76 @@
|
||||||
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/identity"
|
||||||
|
)
|
||||||
|
|
||||||
|
func DecodeNamespace[S, C any](configSource any, buildConfig func(any) (C, any, error)) (*ConfigNamespace[S, C], error) {
|
||||||
|
|
||||||
|
// Calculate the hash of the input (not including any defaults applied later).
|
||||||
|
// This allows us to introduce new config options without breaking the hash.
|
||||||
|
h := identity.HashString(configSource)
|
||||||
|
|
||||||
|
// Build the config
|
||||||
|
c, ext, err := buildConfig(configSource)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if ext == nil {
|
||||||
|
ext = configSource
|
||||||
|
}
|
||||||
|
|
||||||
|
if ext == nil {
|
||||||
|
panic("ext is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
ns := &ConfigNamespace[S, C]{
|
||||||
|
SourceStructure: ext,
|
||||||
|
SourceHash: h,
|
||||||
|
Config: c,
|
||||||
|
}
|
||||||
|
|
||||||
|
return ns, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConfigNamespace holds a Hugo configuration namespace.
|
||||||
|
// The construct looks a little odd, but it's built to make the configuration elements
|
||||||
|
// both self-documenting and contained in a common structure.
|
||||||
|
type ConfigNamespace[S, C any] struct {
|
||||||
|
// SourceStructure represents the source configuration with any defaults applied.
|
||||||
|
// This is used for documentation and printing of the configuration setup to the user.
|
||||||
|
SourceStructure any
|
||||||
|
|
||||||
|
// SourceHash is a hash of the source configuration before any defaults gets applied.
|
||||||
|
SourceHash string
|
||||||
|
|
||||||
|
// Config is the final configuration as used by Hugo.
|
||||||
|
Config C
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalJSON marshals the source structure.
|
||||||
|
func (ns *ConfigNamespace[S, C]) MarshalJSON() ([]byte, error) {
|
||||||
|
return json.Marshal(ns.SourceStructure)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Signature returns the signature of the source structure.
|
||||||
|
// Note that this is for documentation purposes only and SourceStructure may not always be cast to S (it's usually just a map).
|
||||||
|
func (ns *ConfigNamespace[S, C]) Signature() S {
|
||||||
|
var s S
|
||||||
|
return s
|
||||||
|
}
|
68
config/namespace_test.go
Normal file
68
config/namespace_test.go
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package config
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
qt "github.com/frankban/quicktest"
|
||||||
|
"github.com/gohugoio/hugo/common/maps"
|
||||||
|
"github.com/mitchellh/mapstructure"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestNamespace(t *testing.T) {
|
||||||
|
c := qt.New(t)
|
||||||
|
c.Assert(true, qt.Equals, true)
|
||||||
|
|
||||||
|
//ns, err := config.DecodeNamespace[map[string]DocsMediaTypeConfig](in, defaultMediaTypesConfig, buildConfig)
|
||||||
|
|
||||||
|
ns, err := DecodeNamespace[[]*tstNsExt](
|
||||||
|
map[string]interface{}{"foo": "bar"},
|
||||||
|
func(v any) (*tstNsExt, any, error) {
|
||||||
|
t := &tstNsExt{}
|
||||||
|
m, err := maps.ToStringMapE(v)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
return t, nil, mapstructure.WeakDecode(m, t)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(ns, qt.Not(qt.IsNil))
|
||||||
|
c.Assert(ns.SourceStructure, qt.DeepEquals, map[string]interface{}{"foo": "bar"})
|
||||||
|
c.Assert(ns.SourceHash, qt.Equals, "14368731254619220105")
|
||||||
|
c.Assert(ns.Config, qt.DeepEquals, &tstNsExt{Foo: "bar"})
|
||||||
|
c.Assert(ns.Signature(), qt.DeepEquals, []*tstNsExt(nil))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
type (
|
||||||
|
tstNsExt struct {
|
||||||
|
Foo string
|
||||||
|
}
|
||||||
|
tstNsInt struct {
|
||||||
|
Foo string
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
func (t *tstNsExt) Init() error {
|
||||||
|
t.Foo = strings.ToUpper(t.Foo)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
func (t *tstNsInt) Compile(ext *tstNsExt) error {
|
||||||
|
t.Foo = ext.Foo + " qux"
|
||||||
|
return nil
|
||||||
|
}
|
|
@ -54,14 +54,16 @@ var DefaultConfig = Config{
|
||||||
}
|
}
|
||||||
|
|
||||||
// Config is the top level security config.
|
// Config is the top level security config.
|
||||||
|
// <docsmeta>{"name": "security", "description": "This section holds the top level security config.", "newIn": "0.91.0" }</docsmeta>
|
||||||
type Config struct {
|
type Config struct {
|
||||||
// Restricts access to os.Exec.
|
// Restricts access to os.Exec....
|
||||||
|
// <docsmeta>{ "newIn": "0.91.0" }</docsmeta>
|
||||||
Exec Exec `json:"exec"`
|
Exec Exec `json:"exec"`
|
||||||
|
|
||||||
// Restricts access to certain template funcs.
|
// Restricts access to certain template funcs.
|
||||||
Funcs Funcs `json:"funcs"`
|
Funcs Funcs `json:"funcs"`
|
||||||
|
|
||||||
// Restricts access to resources.Get, getJSON, getCSV.
|
// Restricts access to resources.GetRemote, getJSON, getCSV.
|
||||||
HTTP HTTP `json:"http"`
|
HTTP HTTP `json:"http"`
|
||||||
|
|
||||||
// Allow inline shortcodes
|
// Allow inline shortcodes
|
||||||
|
|
|
@ -54,7 +54,7 @@ disableInlineCSS = true
|
||||||
func TestUseSettingsFromRootIfSet(t *testing.T) {
|
func TestUseSettingsFromRootIfSet(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
|
|
||||||
cfg := config.NewWithTestDefaults()
|
cfg := config.New()
|
||||||
cfg.Set("disqusShortname", "root_short")
|
cfg.Set("disqusShortname", "root_short")
|
||||||
cfg.Set("googleAnalytics", "ga_root")
|
cfg.Set("googleAnalytics", "ga_root")
|
||||||
|
|
||||||
|
|
84
config/testconfig/testconfig.go
Normal file
84
config/testconfig/testconfig.go
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
// This package should only be used for testing.
|
||||||
|
package testconfig
|
||||||
|
|
||||||
|
import (
|
||||||
|
_ "unsafe"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/common/maps"
|
||||||
|
"github.com/gohugoio/hugo/config"
|
||||||
|
"github.com/gohugoio/hugo/config/allconfig"
|
||||||
|
"github.com/gohugoio/hugo/deps"
|
||||||
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
|
toml "github.com/pelletier/go-toml/v2"
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GetTestConfigs(fs afero.Fs, cfg config.Provider) *allconfig.Configs {
|
||||||
|
if fs == nil {
|
||||||
|
fs = afero.NewMemMapFs()
|
||||||
|
}
|
||||||
|
if cfg == nil {
|
||||||
|
cfg = config.New()
|
||||||
|
}
|
||||||
|
// Make sure that the workingDir exists.
|
||||||
|
workingDir := cfg.GetString("workingDir")
|
||||||
|
if workingDir != "" {
|
||||||
|
if err := fs.MkdirAll(workingDir, 0777); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
configs, err := allconfig.LoadConfig(allconfig.ConfigSourceDescriptor{Fs: fs, Flags: cfg})
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return configs
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetTestConfig(fs afero.Fs, cfg config.Provider) config.AllProvider {
|
||||||
|
return GetTestConfigs(fs, cfg).GetFirstLanguageConfig()
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetTestDeps(fs afero.Fs, cfg config.Provider, beforeInit ...func(*deps.Deps)) *deps.Deps {
|
||||||
|
if fs == nil {
|
||||||
|
fs = afero.NewMemMapFs()
|
||||||
|
}
|
||||||
|
conf := GetTestConfig(fs, cfg)
|
||||||
|
d := &deps.Deps{
|
||||||
|
Conf: conf,
|
||||||
|
Fs: hugofs.NewFrom(fs, conf.BaseConfig()),
|
||||||
|
}
|
||||||
|
for _, f := range beforeInit {
|
||||||
|
f(d)
|
||||||
|
}
|
||||||
|
if err := d.Init(); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetTestConfigSectionFromStruct(section string, v any) config.AllProvider {
|
||||||
|
data, err := toml.Marshal(v)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
p := maps.Params{
|
||||||
|
section: config.FromTOMLConfigString(string(data)).Get(""),
|
||||||
|
}
|
||||||
|
cfg := config.NewFrom(p)
|
||||||
|
return GetTestConfig(nil, cfg)
|
||||||
|
}
|
|
@ -340,7 +340,7 @@ func (b *contentBuilder) mapArcheTypeDir() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *contentBuilder) openInEditorIfConfigured(filename string) error {
|
func (b *contentBuilder) openInEditorIfConfigured(filename string) error {
|
||||||
editor := b.h.Cfg.GetString("newContentEditor")
|
editor := b.h.Conf.NewContentEditor()
|
||||||
if editor == "" {
|
if editor == "" {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,8 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/gohugoio/hugo/config"
|
||||||
|
"github.com/gohugoio/hugo/config/allconfig"
|
||||||
|
"github.com/gohugoio/hugo/config/testconfig"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/deps"
|
"github.com/gohugoio/hugo/deps"
|
||||||
|
|
||||||
|
@ -80,7 +82,8 @@ func TestNewContentFromFile(t *testing.T) {
|
||||||
mm := afero.NewMemMapFs()
|
mm := afero.NewMemMapFs()
|
||||||
c.Assert(initFs(mm), qt.IsNil)
|
c.Assert(initFs(mm), qt.IsNil)
|
||||||
cfg, fs := newTestCfg(c, mm)
|
cfg, fs := newTestCfg(c, mm)
|
||||||
h, err := hugolib.NewHugoSites(deps.DepsCfg{Cfg: cfg, Fs: fs})
|
conf := testconfig.GetTestConfigs(fs.Source, cfg)
|
||||||
|
h, err := hugolib.NewHugoSites(deps.DepsCfg{Configs: conf, Fs: fs})
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
err = create.NewContent(h, cas.kind, cas.path, false)
|
err = create.NewContent(h, cas.kind, cas.path, false)
|
||||||
|
|
||||||
|
@ -141,7 +144,8 @@ i18n: {{ T "hugo" }}
|
||||||
c.Assert(initFs(mm), qt.IsNil)
|
c.Assert(initFs(mm), qt.IsNil)
|
||||||
cfg, fs := newTestCfg(c, mm)
|
cfg, fs := newTestCfg(c, mm)
|
||||||
|
|
||||||
h, err := hugolib.NewHugoSites(deps.DepsCfg{Cfg: cfg, Fs: fs})
|
conf := testconfig.GetTestConfigs(fs.Source, cfg)
|
||||||
|
h, err := hugolib.NewHugoSites(deps.DepsCfg{Configs: conf, Fs: fs})
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(len(h.Sites), qt.Equals, 2)
|
c.Assert(len(h.Sites), qt.Equals, 2)
|
||||||
|
|
||||||
|
@ -183,7 +187,8 @@ site RegularPages: {{ len site.RegularPages }}
|
||||||
c.Assert(initFs(mm), qt.IsNil)
|
c.Assert(initFs(mm), qt.IsNil)
|
||||||
cfg, fs := newTestCfg(c, mm)
|
cfg, fs := newTestCfg(c, mm)
|
||||||
|
|
||||||
h, err := hugolib.NewHugoSites(deps.DepsCfg{Cfg: cfg, Fs: fs})
|
conf := testconfig.GetTestConfigs(fs.Source, cfg)
|
||||||
|
h, err := hugolib.NewHugoSites(deps.DepsCfg{Configs: conf, Fs: fs})
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(len(h.Sites), qt.Equals, 2)
|
c.Assert(len(h.Sites), qt.Equals, 2)
|
||||||
|
|
||||||
|
@ -232,8 +237,8 @@ i18n: {{ T "hugo" }}
|
||||||
|
|
||||||
c.Assert(initFs(mm), qt.IsNil)
|
c.Assert(initFs(mm), qt.IsNil)
|
||||||
cfg, fs := newTestCfg(c, mm)
|
cfg, fs := newTestCfg(c, mm)
|
||||||
|
conf := testconfig.GetTestConfigs(fs.Source, cfg)
|
||||||
h, err := hugolib.NewHugoSites(deps.DepsCfg{Cfg: cfg, Fs: fs})
|
h, err := hugolib.NewHugoSites(deps.DepsCfg{Configs: conf, Fs: fs})
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(len(h.Sites), qt.Equals, 2)
|
c.Assert(len(h.Sites), qt.Equals, 2)
|
||||||
|
|
||||||
|
@ -264,7 +269,8 @@ func TestNewContentForce(t *testing.T) {
|
||||||
c.Assert(initFs(mm), qt.IsNil)
|
c.Assert(initFs(mm), qt.IsNil)
|
||||||
cfg, fs := newTestCfg(c, mm)
|
cfg, fs := newTestCfg(c, mm)
|
||||||
|
|
||||||
h, err := hugolib.NewHugoSites(deps.DepsCfg{Cfg: cfg, Fs: fs})
|
conf := testconfig.GetTestConfigs(fs.Source, cfg)
|
||||||
|
h, err := hugolib.NewHugoSites(deps.DepsCfg{Configs: conf, Fs: fs})
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(len(h.Sites), qt.Equals, 2)
|
c.Assert(len(h.Sites), qt.Equals, 2)
|
||||||
|
|
||||||
|
@ -461,8 +467,8 @@ other = "Hugo Rokkar!"`), 0o755), qt.IsNil)
|
||||||
|
|
||||||
c.Assert(afero.WriteFile(mm, "config.toml", []byte(cfg), 0o755), qt.IsNil)
|
c.Assert(afero.WriteFile(mm, "config.toml", []byte(cfg), 0o755), qt.IsNil)
|
||||||
|
|
||||||
v, _, err := hugolib.LoadConfig(hugolib.ConfigSourceDescriptor{Fs: mm, Filename: "config.toml"})
|
res, err := allconfig.LoadConfig(allconfig.ConfigSourceDescriptor{Fs: mm, Filename: "config.toml"})
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
return v, hugofs.NewFrom(mm, v)
|
return res.LoadingInfo.Cfg, hugofs.NewFrom(mm, res.LoadingInfo.BaseConfig)
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,17 +55,12 @@ type Deployer struct {
|
||||||
localFs afero.Fs
|
localFs afero.Fs
|
||||||
bucket *blob.Bucket
|
bucket *blob.Bucket
|
||||||
|
|
||||||
target *target // the target to deploy to
|
mediaTypes media.Types // Hugo's MediaType to guess ContentType
|
||||||
matchers []*matcher // matchers to apply to uploaded files
|
quiet bool // true reduces STDOUT
|
||||||
mediaTypes media.Types // Hugo's MediaType to guess ContentType
|
|
||||||
ordering []*regexp.Regexp // orders uploads
|
cfg DeployConfig
|
||||||
quiet bool // true reduces STDOUT
|
|
||||||
confirm bool // true enables confirmation before making changes
|
target *Target // the target to deploy to
|
||||||
dryRun bool // true skips conformations and prints changes instead of applying them
|
|
||||||
force bool // true forces upload of all files
|
|
||||||
invalidateCDN bool // true enables invalidate CDN cache (if possible)
|
|
||||||
maxDeletes int // caps the # of files to delete; -1 to disable
|
|
||||||
workers int // The number of workers to transfer files
|
|
||||||
|
|
||||||
// For tests...
|
// For tests...
|
||||||
summary deploySummary // summary of latest Deploy results
|
summary deploySummary // summary of latest Deploy results
|
||||||
|
@ -78,21 +73,18 @@ type deploySummary struct {
|
||||||
const metaMD5Hash = "md5chksum" // the meta key to store md5hash in
|
const metaMD5Hash = "md5chksum" // the meta key to store md5hash in
|
||||||
|
|
||||||
// New constructs a new *Deployer.
|
// New constructs a new *Deployer.
|
||||||
func New(cfg config.Provider, localFs afero.Fs) (*Deployer, error) {
|
func New(cfg config.AllProvider, localFs afero.Fs) (*Deployer, error) {
|
||||||
targetName := cfg.GetString("target")
|
|
||||||
|
|
||||||
// Load the [deployment] section of the config.
|
dcfg := cfg.GetConfigSection(deploymentConfigKey).(DeployConfig)
|
||||||
dcfg, err := decodeConfig(cfg)
|
targetName := dcfg.Target
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(dcfg.Targets) == 0 {
|
if len(dcfg.Targets) == 0 {
|
||||||
return nil, errors.New("no deployment targets found")
|
return nil, errors.New("no deployment targets found")
|
||||||
}
|
}
|
||||||
|
mediaTypes := cfg.GetConfigSection("mediaTypes").(media.Types)
|
||||||
|
|
||||||
// Find the target to deploy to.
|
// Find the target to deploy to.
|
||||||
var tgt *target
|
var tgt *Target
|
||||||
if targetName == "" {
|
if targetName == "" {
|
||||||
// Default to the first target.
|
// Default to the first target.
|
||||||
tgt = dcfg.Targets[0]
|
tgt = dcfg.Targets[0]
|
||||||
|
@ -108,18 +100,11 @@ func New(cfg config.Provider, localFs afero.Fs) (*Deployer, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
return &Deployer{
|
return &Deployer{
|
||||||
localFs: localFs,
|
localFs: localFs,
|
||||||
target: tgt,
|
target: tgt,
|
||||||
matchers: dcfg.Matchers,
|
quiet: cfg.BuildExpired(),
|
||||||
ordering: dcfg.ordering,
|
mediaTypes: mediaTypes,
|
||||||
mediaTypes: dcfg.mediaTypes,
|
cfg: dcfg,
|
||||||
quiet: cfg.GetBool("quiet"),
|
|
||||||
confirm: cfg.GetBool("confirm"),
|
|
||||||
dryRun: cfg.GetBool("dryRun"),
|
|
||||||
force: cfg.GetBool("force"),
|
|
||||||
invalidateCDN: cfg.GetBool("invalidateCDN"),
|
|
||||||
maxDeletes: cfg.GetInt("maxDeletes"),
|
|
||||||
workers: cfg.GetInt("workers"),
|
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -138,12 +123,16 @@ func (d *Deployer) Deploy(ctx context.Context) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if d.cfg.Workers <= 0 {
|
||||||
|
d.cfg.Workers = 10
|
||||||
|
}
|
||||||
|
|
||||||
// Load local files from the source directory.
|
// Load local files from the source directory.
|
||||||
var include, exclude glob.Glob
|
var include, exclude glob.Glob
|
||||||
if d.target != nil {
|
if d.target != nil {
|
||||||
include, exclude = d.target.includeGlob, d.target.excludeGlob
|
include, exclude = d.target.includeGlob, d.target.excludeGlob
|
||||||
}
|
}
|
||||||
local, err := walkLocal(d.localFs, d.matchers, include, exclude, d.mediaTypes)
|
local, err := walkLocal(d.localFs, d.cfg.Matchers, include, exclude, d.mediaTypes)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -159,7 +148,7 @@ func (d *Deployer) Deploy(ctx context.Context) error {
|
||||||
d.summary.NumRemote = len(remote)
|
d.summary.NumRemote = len(remote)
|
||||||
|
|
||||||
// Diff local vs remote to see what changes need to be applied.
|
// Diff local vs remote to see what changes need to be applied.
|
||||||
uploads, deletes := findDiffs(local, remote, d.force)
|
uploads, deletes := findDiffs(local, remote, d.cfg.Force)
|
||||||
d.summary.NumUploads = len(uploads)
|
d.summary.NumUploads = len(uploads)
|
||||||
d.summary.NumDeletes = len(deletes)
|
d.summary.NumDeletes = len(deletes)
|
||||||
if len(uploads)+len(deletes) == 0 {
|
if len(uploads)+len(deletes) == 0 {
|
||||||
|
@ -173,7 +162,7 @@ func (d *Deployer) Deploy(ctx context.Context) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ask for confirmation before proceeding.
|
// Ask for confirmation before proceeding.
|
||||||
if d.confirm && !d.dryRun {
|
if d.cfg.Confirm && !d.cfg.DryRun {
|
||||||
fmt.Printf("Continue? (Y/n) ")
|
fmt.Printf("Continue? (Y/n) ")
|
||||||
var confirm string
|
var confirm string
|
||||||
if _, err := fmt.Scanln(&confirm); err != nil {
|
if _, err := fmt.Scanln(&confirm); err != nil {
|
||||||
|
@ -186,15 +175,9 @@ func (d *Deployer) Deploy(ctx context.Context) error {
|
||||||
|
|
||||||
// Order the uploads. They are organized in groups; all uploads in a group
|
// Order the uploads. They are organized in groups; all uploads in a group
|
||||||
// must be complete before moving on to the next group.
|
// must be complete before moving on to the next group.
|
||||||
uploadGroups := applyOrdering(d.ordering, uploads)
|
uploadGroups := applyOrdering(d.cfg.ordering, uploads)
|
||||||
|
|
||||||
// Apply the changes in parallel, using an inverted worker
|
nParallel := d.cfg.Workers
|
||||||
// pool (https://www.youtube.com/watch?v=5zXAHh5tJqQ&t=26m58s).
|
|
||||||
// sem prevents more than nParallel concurrent goroutines.
|
|
||||||
if d.workers <= 0 {
|
|
||||||
d.workers = 10
|
|
||||||
}
|
|
||||||
nParallel := d.workers
|
|
||||||
var errs []error
|
var errs []error
|
||||||
var errMu sync.Mutex // protects errs
|
var errMu sync.Mutex // protects errs
|
||||||
|
|
||||||
|
@ -207,7 +190,7 @@ func (d *Deployer) Deploy(ctx context.Context) error {
|
||||||
// Within the group, apply uploads in parallel.
|
// Within the group, apply uploads in parallel.
|
||||||
sem := make(chan struct{}, nParallel)
|
sem := make(chan struct{}, nParallel)
|
||||||
for _, upload := range uploads {
|
for _, upload := range uploads {
|
||||||
if d.dryRun {
|
if d.cfg.DryRun {
|
||||||
if !d.quiet {
|
if !d.quiet {
|
||||||
jww.FEEDBACK.Printf("[DRY RUN] Would upload: %v\n", upload)
|
jww.FEEDBACK.Printf("[DRY RUN] Would upload: %v\n", upload)
|
||||||
}
|
}
|
||||||
|
@ -230,15 +213,15 @@ func (d *Deployer) Deploy(ctx context.Context) error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if d.maxDeletes != -1 && len(deletes) > d.maxDeletes {
|
if d.cfg.MaxDeletes != -1 && len(deletes) > d.cfg.MaxDeletes {
|
||||||
jww.WARN.Printf("Skipping %d deletes because it is more than --maxDeletes (%d). If this is expected, set --maxDeletes to a larger number, or -1 to disable this check.\n", len(deletes), d.maxDeletes)
|
jww.WARN.Printf("Skipping %d deletes because it is more than --maxDeletes (%d). If this is expected, set --maxDeletes to a larger number, or -1 to disable this check.\n", len(deletes), d.cfg.MaxDeletes)
|
||||||
d.summary.NumDeletes = 0
|
d.summary.NumDeletes = 0
|
||||||
} else {
|
} else {
|
||||||
// Apply deletes in parallel.
|
// Apply deletes in parallel.
|
||||||
sort.Slice(deletes, func(i, j int) bool { return deletes[i] < deletes[j] })
|
sort.Slice(deletes, func(i, j int) bool { return deletes[i] < deletes[j] })
|
||||||
sem := make(chan struct{}, nParallel)
|
sem := make(chan struct{}, nParallel)
|
||||||
for _, del := range deletes {
|
for _, del := range deletes {
|
||||||
if d.dryRun {
|
if d.cfg.DryRun {
|
||||||
if !d.quiet {
|
if !d.quiet {
|
||||||
jww.FEEDBACK.Printf("[DRY RUN] Would delete %s\n", del)
|
jww.FEEDBACK.Printf("[DRY RUN] Would delete %s\n", del)
|
||||||
}
|
}
|
||||||
|
@ -264,6 +247,7 @@ func (d *Deployer) Deploy(ctx context.Context) error {
|
||||||
sem <- struct{}{}
|
sem <- struct{}{}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(errs) > 0 {
|
if len(errs) > 0 {
|
||||||
if !d.quiet {
|
if !d.quiet {
|
||||||
jww.FEEDBACK.Printf("Encountered %d errors.\n", len(errs))
|
jww.FEEDBACK.Printf("Encountered %d errors.\n", len(errs))
|
||||||
|
@ -274,9 +258,9 @@ func (d *Deployer) Deploy(ctx context.Context) error {
|
||||||
jww.FEEDBACK.Println("Success!")
|
jww.FEEDBACK.Println("Success!")
|
||||||
}
|
}
|
||||||
|
|
||||||
if d.invalidateCDN {
|
if d.cfg.InvalidateCDN {
|
||||||
if d.target.CloudFrontDistributionID != "" {
|
if d.target.CloudFrontDistributionID != "" {
|
||||||
if d.dryRun {
|
if d.cfg.DryRun {
|
||||||
if !d.quiet {
|
if !d.quiet {
|
||||||
jww.FEEDBACK.Printf("[DRY RUN] Would invalidate CloudFront CDN with ID %s\n", d.target.CloudFrontDistributionID)
|
jww.FEEDBACK.Printf("[DRY RUN] Would invalidate CloudFront CDN with ID %s\n", d.target.CloudFrontDistributionID)
|
||||||
}
|
}
|
||||||
|
@ -289,7 +273,7 @@ func (d *Deployer) Deploy(ctx context.Context) error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if d.target.GoogleCloudCDNOrigin != "" {
|
if d.target.GoogleCloudCDNOrigin != "" {
|
||||||
if d.dryRun {
|
if d.cfg.DryRun {
|
||||||
if !d.quiet {
|
if !d.quiet {
|
||||||
jww.FEEDBACK.Printf("[DRY RUN] Would invalidate Google Cloud CDN with origin %s\n", d.target.GoogleCloudCDNOrigin)
|
jww.FEEDBACK.Printf("[DRY RUN] Would invalidate Google Cloud CDN with origin %s\n", d.target.GoogleCloudCDNOrigin)
|
||||||
}
|
}
|
||||||
|
@ -356,14 +340,14 @@ type localFile struct {
|
||||||
UploadSize int64
|
UploadSize int64
|
||||||
|
|
||||||
fs afero.Fs
|
fs afero.Fs
|
||||||
matcher *matcher
|
matcher *Matcher
|
||||||
md5 []byte // cache
|
md5 []byte // cache
|
||||||
gzipped bytes.Buffer // cached of gzipped contents if gzipping
|
gzipped bytes.Buffer // cached of gzipped contents if gzipping
|
||||||
mediaTypes media.Types
|
mediaTypes media.Types
|
||||||
}
|
}
|
||||||
|
|
||||||
// newLocalFile initializes a *localFile.
|
// newLocalFile initializes a *localFile.
|
||||||
func newLocalFile(fs afero.Fs, nativePath, slashpath string, m *matcher, mt media.Types) (*localFile, error) {
|
func newLocalFile(fs afero.Fs, nativePath, slashpath string, m *Matcher, mt media.Types) (*localFile, error) {
|
||||||
f, err := fs.Open(nativePath)
|
f, err := fs.Open(nativePath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -448,7 +432,7 @@ func (lf *localFile) ContentType() string {
|
||||||
|
|
||||||
ext := filepath.Ext(lf.NativePath)
|
ext := filepath.Ext(lf.NativePath)
|
||||||
if mimeType, _, found := lf.mediaTypes.GetFirstBySuffix(strings.TrimPrefix(ext, ".")); found {
|
if mimeType, _, found := lf.mediaTypes.GetFirstBySuffix(strings.TrimPrefix(ext, ".")); found {
|
||||||
return mimeType.Type()
|
return mimeType.Type
|
||||||
}
|
}
|
||||||
|
|
||||||
return mime.TypeByExtension(ext)
|
return mime.TypeByExtension(ext)
|
||||||
|
@ -495,7 +479,7 @@ func knownHiddenDirectory(name string) bool {
|
||||||
|
|
||||||
// walkLocal walks the source directory and returns a flat list of files,
|
// walkLocal walks the source directory and returns a flat list of files,
|
||||||
// using localFile.SlashPath as the map keys.
|
// using localFile.SlashPath as the map keys.
|
||||||
func walkLocal(fs afero.Fs, matchers []*matcher, include, exclude glob.Glob, mediaTypes media.Types) (map[string]*localFile, error) {
|
func walkLocal(fs afero.Fs, matchers []*Matcher, include, exclude glob.Glob, mediaTypes media.Types) (map[string]*localFile, error) {
|
||||||
retval := map[string]*localFile{}
|
retval := map[string]*localFile{}
|
||||||
err := afero.Walk(fs, "", func(path string, info os.FileInfo, err error) error {
|
err := afero.Walk(fs, "", func(path string, info os.FileInfo, err error) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -534,7 +518,7 @@ func walkLocal(fs afero.Fs, matchers []*matcher, include, exclude glob.Glob, med
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find the first matching matcher (if any).
|
// Find the first matching matcher (if any).
|
||||||
var m *matcher
|
var m *Matcher
|
||||||
for _, cur := range matchers {
|
for _, cur := range matchers {
|
||||||
if cur.Matches(slashpath) {
|
if cur.Matches(slashpath) {
|
||||||
m = cur
|
m = cur
|
||||||
|
|
|
@ -25,23 +25,37 @@ import (
|
||||||
"github.com/gobwas/glob"
|
"github.com/gobwas/glob"
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/gohugoio/hugo/config"
|
||||||
hglob "github.com/gohugoio/hugo/hugofs/glob"
|
hglob "github.com/gohugoio/hugo/hugofs/glob"
|
||||||
"github.com/gohugoio/hugo/media"
|
|
||||||
"github.com/mitchellh/mapstructure"
|
"github.com/mitchellh/mapstructure"
|
||||||
)
|
)
|
||||||
|
|
||||||
const deploymentConfigKey = "deployment"
|
const deploymentConfigKey = "deployment"
|
||||||
|
|
||||||
// deployConfig is the complete configuration for deployment.
|
// DeployConfig is the complete configuration for deployment.
|
||||||
type deployConfig struct {
|
type DeployConfig struct {
|
||||||
Targets []*target
|
Targets []*Target
|
||||||
Matchers []*matcher
|
Matchers []*Matcher
|
||||||
Order []string
|
Order []string
|
||||||
|
|
||||||
ordering []*regexp.Regexp // compiled Order
|
// Usually set via flags.
|
||||||
mediaTypes media.Types
|
// Target deployment Name; defaults to the first one.
|
||||||
|
Target string
|
||||||
|
// Show a confirm prompt before deploying.
|
||||||
|
Confirm bool
|
||||||
|
// DryRun will try the deployment without any remote changes.
|
||||||
|
DryRun bool
|
||||||
|
// Force will re-upload all files.
|
||||||
|
Force bool
|
||||||
|
// Invalidate the CDN cache listed in the deployment target.
|
||||||
|
InvalidateCDN bool
|
||||||
|
// MaxDeletes is the maximum number of files to delete.
|
||||||
|
MaxDeletes int
|
||||||
|
// Number of concurrent workers to use when uploading files.
|
||||||
|
Workers int
|
||||||
|
|
||||||
|
ordering []*regexp.Regexp // compiled Order
|
||||||
}
|
}
|
||||||
|
|
||||||
type target struct {
|
type Target struct {
|
||||||
Name string
|
Name string
|
||||||
URL string
|
URL string
|
||||||
|
|
||||||
|
@ -61,7 +75,7 @@ type target struct {
|
||||||
excludeGlob glob.Glob
|
excludeGlob glob.Glob
|
||||||
}
|
}
|
||||||
|
|
||||||
func (tgt *target) parseIncludeExclude() error {
|
func (tgt *Target) parseIncludeExclude() error {
|
||||||
var err error
|
var err error
|
||||||
if tgt.Include != "" {
|
if tgt.Include != "" {
|
||||||
tgt.includeGlob, err = hglob.GetGlob(tgt.Include)
|
tgt.includeGlob, err = hglob.GetGlob(tgt.Include)
|
||||||
|
@ -78,9 +92,9 @@ func (tgt *target) parseIncludeExclude() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// matcher represents configuration to be applied to files whose paths match
|
// Matcher represents configuration to be applied to files whose paths match
|
||||||
// a specified pattern.
|
// a specified pattern.
|
||||||
type matcher struct {
|
type Matcher struct {
|
||||||
// Pattern is the string pattern to match against paths.
|
// Pattern is the string pattern to match against paths.
|
||||||
// Matching is done against paths converted to use / as the path separator.
|
// Matching is done against paths converted to use / as the path separator.
|
||||||
Pattern string
|
Pattern string
|
||||||
|
@ -109,15 +123,14 @@ type matcher struct {
|
||||||
re *regexp.Regexp
|
re *regexp.Regexp
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *matcher) Matches(path string) bool {
|
func (m *Matcher) Matches(path string) bool {
|
||||||
return m.re.MatchString(path)
|
return m.re.MatchString(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
// decode creates a config from a given Hugo configuration.
|
// DecodeConfig creates a config from a given Hugo configuration.
|
||||||
func decodeConfig(cfg config.Provider) (deployConfig, error) {
|
func DecodeConfig(cfg config.Provider) (DeployConfig, error) {
|
||||||
var (
|
var (
|
||||||
mediaTypesConfig []map[string]any
|
dcfg DeployConfig
|
||||||
dcfg deployConfig
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if !cfg.IsSet(deploymentConfigKey) {
|
if !cfg.IsSet(deploymentConfigKey) {
|
||||||
|
@ -126,8 +139,13 @@ func decodeConfig(cfg config.Provider) (deployConfig, error) {
|
||||||
if err := mapstructure.WeakDecode(cfg.GetStringMap(deploymentConfigKey), &dcfg); err != nil {
|
if err := mapstructure.WeakDecode(cfg.GetStringMap(deploymentConfigKey), &dcfg); err != nil {
|
||||||
return dcfg, err
|
return dcfg, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if dcfg.Workers <= 0 {
|
||||||
|
dcfg.Workers = 10
|
||||||
|
}
|
||||||
|
|
||||||
for _, tgt := range dcfg.Targets {
|
for _, tgt := range dcfg.Targets {
|
||||||
if *tgt == (target{}) {
|
if *tgt == (Target{}) {
|
||||||
return dcfg, errors.New("empty deployment target")
|
return dcfg, errors.New("empty deployment target")
|
||||||
}
|
}
|
||||||
if err := tgt.parseIncludeExclude(); err != nil {
|
if err := tgt.parseIncludeExclude(); err != nil {
|
||||||
|
@ -136,7 +154,7 @@ func decodeConfig(cfg config.Provider) (deployConfig, error) {
|
||||||
}
|
}
|
||||||
var err error
|
var err error
|
||||||
for _, m := range dcfg.Matchers {
|
for _, m := range dcfg.Matchers {
|
||||||
if *m == (matcher{}) {
|
if *m == (Matcher{}) {
|
||||||
return dcfg, errors.New("empty deployment matcher")
|
return dcfg, errors.New("empty deployment matcher")
|
||||||
}
|
}
|
||||||
m.re, err = regexp.Compile(m.Pattern)
|
m.re, err = regexp.Compile(m.Pattern)
|
||||||
|
@ -152,13 +170,5 @@ func decodeConfig(cfg config.Provider) (deployConfig, error) {
|
||||||
dcfg.ordering = append(dcfg.ordering, re)
|
dcfg.ordering = append(dcfg.ordering, re)
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfg.IsSet("mediaTypes") {
|
|
||||||
mediaTypesConfig = append(mediaTypesConfig, cfg.GetStringMap("mediaTypes"))
|
|
||||||
}
|
|
||||||
|
|
||||||
dcfg.mediaTypes, err = media.DecodeTypes(mediaTypesConfig...)
|
|
||||||
if err != nil {
|
|
||||||
return dcfg, err
|
|
||||||
}
|
|
||||||
return dcfg, nil
|
return dcfg, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,7 +84,7 @@ force = true
|
||||||
cfg, err := config.FromConfigString(tomlConfig, "toml")
|
cfg, err := config.FromConfigString(tomlConfig, "toml")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
dcfg, err := decodeConfig(cfg)
|
dcfg, err := DecodeConfig(cfg)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
// Order.
|
// Order.
|
||||||
|
@ -139,7 +139,7 @@ order = ["["] # invalid regular expression
|
||||||
cfg, err := config.FromConfigString(tomlConfig, "toml")
|
cfg, err := config.FromConfigString(tomlConfig, "toml")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
_, err = decodeConfig(cfg)
|
_, err = DecodeConfig(cfg)
|
||||||
c.Assert(err, qt.Not(qt.IsNil))
|
c.Assert(err, qt.Not(qt.IsNil))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -157,14 +157,14 @@ Pattern = "[" # invalid regular expression
|
||||||
cfg, err := config.FromConfigString(tomlConfig, "toml")
|
cfg, err := config.FromConfigString(tomlConfig, "toml")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
_, err = decodeConfig(cfg)
|
_, err = DecodeConfig(cfg)
|
||||||
c.Assert(err, qt.Not(qt.IsNil))
|
c.Assert(err, qt.Not(qt.IsNil))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestDecodeConfigDefault(t *testing.T) {
|
func TestDecodeConfigDefault(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
|
|
||||||
dcfg, err := decodeConfig(config.New())
|
dcfg, err := DecodeConfig(config.New())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(len(dcfg.Targets), qt.Equals, 0)
|
c.Assert(len(dcfg.Targets), qt.Equals, 0)
|
||||||
c.Assert(len(dcfg.Matchers), qt.Equals, 0)
|
c.Assert(len(dcfg.Matchers), qt.Equals, 0)
|
||||||
|
@ -180,7 +180,7 @@ func TestEmptyTarget(t *testing.T) {
|
||||||
cfg, err := config.FromConfigString(tomlConfig, "toml")
|
cfg, err := config.FromConfigString(tomlConfig, "toml")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
_, err = decodeConfig(cfg)
|
_, err = DecodeConfig(cfg)
|
||||||
c.Assert(err, qt.Not(qt.IsNil))
|
c.Assert(err, qt.Not(qt.IsNil))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -194,6 +194,6 @@ func TestEmptyMatcher(t *testing.T) {
|
||||||
cfg, err := config.FromConfigString(tomlConfig, "toml")
|
cfg, err := config.FromConfigString(tomlConfig, "toml")
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
_, err = decodeConfig(cfg)
|
_, err = DecodeConfig(cfg)
|
||||||
c.Assert(err, qt.Not(qt.IsNil))
|
c.Assert(err, qt.Not(qt.IsNil))
|
||||||
}
|
}
|
||||||
|
|
|
@ -108,7 +108,7 @@ func TestFindDiffs(t *testing.T) {
|
||||||
{
|
{
|
||||||
Description: "local == remote with route.Force true -> diffs",
|
Description: "local == remote with route.Force true -> diffs",
|
||||||
Local: []*localFile{
|
Local: []*localFile{
|
||||||
{NativePath: "aaa", SlashPath: "aaa", UploadSize: 1, matcher: &matcher{Force: true}, md5: hash1},
|
{NativePath: "aaa", SlashPath: "aaa", UploadSize: 1, matcher: &Matcher{Force: true}, md5: hash1},
|
||||||
makeLocal("bbb", 2, hash1),
|
makeLocal("bbb", 2, hash1),
|
||||||
},
|
},
|
||||||
Remote: []*blob.ListObject{
|
Remote: []*blob.ListObject{
|
||||||
|
@ -289,8 +289,8 @@ func TestLocalFile(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
Description string
|
Description string
|
||||||
Path string
|
Path string
|
||||||
Matcher *matcher
|
Matcher *Matcher
|
||||||
MediaTypesConfig []map[string]any
|
MediaTypesConfig map[string]any
|
||||||
WantContent []byte
|
WantContent []byte
|
||||||
WantSize int64
|
WantSize int64
|
||||||
WantMD5 []byte
|
WantMD5 []byte
|
||||||
|
@ -315,7 +315,7 @@ func TestLocalFile(t *testing.T) {
|
||||||
{
|
{
|
||||||
Description: "CacheControl from matcher",
|
Description: "CacheControl from matcher",
|
||||||
Path: "foo.txt",
|
Path: "foo.txt",
|
||||||
Matcher: &matcher{CacheControl: "max-age=630720000"},
|
Matcher: &Matcher{CacheControl: "max-age=630720000"},
|
||||||
WantContent: contentBytes,
|
WantContent: contentBytes,
|
||||||
WantSize: contentLen,
|
WantSize: contentLen,
|
||||||
WantMD5: contentMD5[:],
|
WantMD5: contentMD5[:],
|
||||||
|
@ -324,7 +324,7 @@ func TestLocalFile(t *testing.T) {
|
||||||
{
|
{
|
||||||
Description: "ContentEncoding from matcher",
|
Description: "ContentEncoding from matcher",
|
||||||
Path: "foo.txt",
|
Path: "foo.txt",
|
||||||
Matcher: &matcher{ContentEncoding: "foobar"},
|
Matcher: &Matcher{ContentEncoding: "foobar"},
|
||||||
WantContent: contentBytes,
|
WantContent: contentBytes,
|
||||||
WantSize: contentLen,
|
WantSize: contentLen,
|
||||||
WantMD5: contentMD5[:],
|
WantMD5: contentMD5[:],
|
||||||
|
@ -333,7 +333,7 @@ func TestLocalFile(t *testing.T) {
|
||||||
{
|
{
|
||||||
Description: "ContentType from matcher",
|
Description: "ContentType from matcher",
|
||||||
Path: "foo.txt",
|
Path: "foo.txt",
|
||||||
Matcher: &matcher{ContentType: "foo/bar"},
|
Matcher: &Matcher{ContentType: "foo/bar"},
|
||||||
WantContent: contentBytes,
|
WantContent: contentBytes,
|
||||||
WantSize: contentLen,
|
WantSize: contentLen,
|
||||||
WantMD5: contentMD5[:],
|
WantMD5: contentMD5[:],
|
||||||
|
@ -342,7 +342,7 @@ func TestLocalFile(t *testing.T) {
|
||||||
{
|
{
|
||||||
Description: "gzipped content",
|
Description: "gzipped content",
|
||||||
Path: "foo.txt",
|
Path: "foo.txt",
|
||||||
Matcher: &matcher{Gzip: true},
|
Matcher: &Matcher{Gzip: true},
|
||||||
WantContent: gzBytes,
|
WantContent: gzBytes,
|
||||||
WantSize: gzLen,
|
WantSize: gzLen,
|
||||||
WantMD5: gzMD5[:],
|
WantMD5: gzMD5[:],
|
||||||
|
@ -351,11 +351,9 @@ func TestLocalFile(t *testing.T) {
|
||||||
{
|
{
|
||||||
Description: "Custom MediaType",
|
Description: "Custom MediaType",
|
||||||
Path: "foo.hugo",
|
Path: "foo.hugo",
|
||||||
MediaTypesConfig: []map[string]any{
|
MediaTypesConfig: map[string]any{
|
||||||
{
|
"hugo/custom": map[string]any{
|
||||||
"hugo/custom": map[string]any{
|
"suffixes": []string{"hugo"},
|
||||||
"suffixes": []string{"hugo"},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
WantContent: contentBytes,
|
WantContent: contentBytes,
|
||||||
|
@ -373,11 +371,11 @@ func TestLocalFile(t *testing.T) {
|
||||||
}
|
}
|
||||||
mediaTypes := media.DefaultTypes
|
mediaTypes := media.DefaultTypes
|
||||||
if len(tc.MediaTypesConfig) > 0 {
|
if len(tc.MediaTypesConfig) > 0 {
|
||||||
mt, err := media.DecodeTypes(tc.MediaTypesConfig...)
|
mt, err := media.DecodeTypes(tc.MediaTypesConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
mediaTypes = mt
|
mediaTypes = mt.Config
|
||||||
}
|
}
|
||||||
lf, err := newLocalFile(fs, tc.Path, filepath.ToSlash(tc.Path), tc.Matcher, mediaTypes)
|
lf, err := newLocalFile(fs, tc.Path, filepath.ToSlash(tc.Path), tc.Matcher, mediaTypes)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -556,9 +554,9 @@ func TestEndToEndSync(t *testing.T) {
|
||||||
}
|
}
|
||||||
deployer := &Deployer{
|
deployer := &Deployer{
|
||||||
localFs: test.fs,
|
localFs: test.fs,
|
||||||
maxDeletes: -1,
|
|
||||||
bucket: test.bucket,
|
bucket: test.bucket,
|
||||||
mediaTypes: media.DefaultTypes,
|
mediaTypes: media.DefaultTypes,
|
||||||
|
cfg: DeployConfig{MaxDeletes: -1},
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initial deployment should sync remote with local.
|
// Initial deployment should sync remote with local.
|
||||||
|
@ -639,9 +637,9 @@ func TestMaxDeletes(t *testing.T) {
|
||||||
}
|
}
|
||||||
deployer := &Deployer{
|
deployer := &Deployer{
|
||||||
localFs: test.fs,
|
localFs: test.fs,
|
||||||
maxDeletes: -1,
|
|
||||||
bucket: test.bucket,
|
bucket: test.bucket,
|
||||||
mediaTypes: media.DefaultTypes,
|
mediaTypes: media.DefaultTypes,
|
||||||
|
cfg: DeployConfig{MaxDeletes: -1},
|
||||||
}
|
}
|
||||||
|
|
||||||
// Sync remote with local.
|
// Sync remote with local.
|
||||||
|
@ -662,7 +660,7 @@ func TestMaxDeletes(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// A deployment with maxDeletes=0 shouldn't change anything.
|
// A deployment with maxDeletes=0 shouldn't change anything.
|
||||||
deployer.maxDeletes = 0
|
deployer.cfg.MaxDeletes = 0
|
||||||
if err := deployer.Deploy(ctx); err != nil {
|
if err := deployer.Deploy(ctx); err != nil {
|
||||||
t.Errorf("deploy failed: %v", err)
|
t.Errorf("deploy failed: %v", err)
|
||||||
}
|
}
|
||||||
|
@ -672,7 +670,7 @@ func TestMaxDeletes(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// A deployment with maxDeletes=1 shouldn't change anything either.
|
// A deployment with maxDeletes=1 shouldn't change anything either.
|
||||||
deployer.maxDeletes = 1
|
deployer.cfg.MaxDeletes = 1
|
||||||
if err := deployer.Deploy(ctx); err != nil {
|
if err := deployer.Deploy(ctx); err != nil {
|
||||||
t.Errorf("deploy failed: %v", err)
|
t.Errorf("deploy failed: %v", err)
|
||||||
}
|
}
|
||||||
|
@ -682,7 +680,7 @@ func TestMaxDeletes(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// A deployment with maxDeletes=2 should make the changes.
|
// A deployment with maxDeletes=2 should make the changes.
|
||||||
deployer.maxDeletes = 2
|
deployer.cfg.MaxDeletes = 2
|
||||||
if err := deployer.Deploy(ctx); err != nil {
|
if err := deployer.Deploy(ctx); err != nil {
|
||||||
t.Errorf("deploy failed: %v", err)
|
t.Errorf("deploy failed: %v", err)
|
||||||
}
|
}
|
||||||
|
@ -700,7 +698,7 @@ func TestMaxDeletes(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// A deployment with maxDeletes=-1 should make the changes.
|
// A deployment with maxDeletes=-1 should make the changes.
|
||||||
deployer.maxDeletes = -1
|
deployer.cfg.MaxDeletes = -1
|
||||||
if err := deployer.Deploy(ctx); err != nil {
|
if err := deployer.Deploy(ctx); err != nil {
|
||||||
t.Errorf("deploy failed: %v", err)
|
t.Errorf("deploy failed: %v", err)
|
||||||
}
|
}
|
||||||
|
@ -762,7 +760,7 @@ func TestIncludeExclude(t *testing.T) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
tgt := &target{
|
tgt := &Target{
|
||||||
Include: test.Include,
|
Include: test.Include,
|
||||||
Exclude: test.Exclude,
|
Exclude: test.Exclude,
|
||||||
}
|
}
|
||||||
|
@ -770,9 +768,8 @@ func TestIncludeExclude(t *testing.T) {
|
||||||
t.Error(err)
|
t.Error(err)
|
||||||
}
|
}
|
||||||
deployer := &Deployer{
|
deployer := &Deployer{
|
||||||
localFs: fsTest.fs,
|
localFs: fsTest.fs,
|
||||||
maxDeletes: -1,
|
cfg: DeployConfig{MaxDeletes: -1}, bucket: fsTest.bucket,
|
||||||
bucket: fsTest.bucket,
|
|
||||||
target: tgt,
|
target: tgt,
|
||||||
mediaTypes: media.DefaultTypes,
|
mediaTypes: media.DefaultTypes,
|
||||||
}
|
}
|
||||||
|
@ -828,9 +825,8 @@ func TestIncludeExcludeRemoteDelete(t *testing.T) {
|
||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
deployer := &Deployer{
|
deployer := &Deployer{
|
||||||
localFs: fsTest.fs,
|
localFs: fsTest.fs,
|
||||||
maxDeletes: -1,
|
cfg: DeployConfig{MaxDeletes: -1}, bucket: fsTest.bucket,
|
||||||
bucket: fsTest.bucket,
|
|
||||||
mediaTypes: media.DefaultTypes,
|
mediaTypes: media.DefaultTypes,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -848,7 +844,7 @@ func TestIncludeExcludeRemoteDelete(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Second sync
|
// Second sync
|
||||||
tgt := &target{
|
tgt := &Target{
|
||||||
Include: test.Include,
|
Include: test.Include,
|
||||||
Exclude: test.Exclude,
|
Exclude: test.Exclude,
|
||||||
}
|
}
|
||||||
|
@ -882,7 +878,7 @@ func TestCompression(t *testing.T) {
|
||||||
deployer := &Deployer{
|
deployer := &Deployer{
|
||||||
localFs: test.fs,
|
localFs: test.fs,
|
||||||
bucket: test.bucket,
|
bucket: test.bucket,
|
||||||
matchers: []*matcher{{Pattern: ".*", Gzip: true, re: regexp.MustCompile(".*")}},
|
cfg: DeployConfig{MaxDeletes: -1, Matchers: []*Matcher{{Pattern: ".*", Gzip: true, re: regexp.MustCompile(".*")}}},
|
||||||
mediaTypes: media.DefaultTypes,
|
mediaTypes: media.DefaultTypes,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -937,7 +933,7 @@ func TestMatching(t *testing.T) {
|
||||||
deployer := &Deployer{
|
deployer := &Deployer{
|
||||||
localFs: test.fs,
|
localFs: test.fs,
|
||||||
bucket: test.bucket,
|
bucket: test.bucket,
|
||||||
matchers: []*matcher{{Pattern: "^subdir/aaa$", Force: true, re: regexp.MustCompile("^subdir/aaa$")}},
|
cfg: DeployConfig{MaxDeletes: -1, Matchers: []*Matcher{{Pattern: "^subdir/aaa$", Force: true, re: regexp.MustCompile("^subdir/aaa$")}}},
|
||||||
mediaTypes: media.DefaultTypes,
|
mediaTypes: media.DefaultTypes,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -962,7 +958,7 @@ func TestMatching(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Repeat with a matcher that should now match 3 files.
|
// Repeat with a matcher that should now match 3 files.
|
||||||
deployer.matchers = []*matcher{{Pattern: "aaa", Force: true, re: regexp.MustCompile("aaa")}}
|
deployer.cfg.Matchers = []*Matcher{{Pattern: "aaa", Force: true, re: regexp.MustCompile("aaa")}}
|
||||||
if err := deployer.Deploy(ctx); err != nil {
|
if err := deployer.Deploy(ctx); err != nil {
|
||||||
t.Errorf("no-op deploy with triple force matcher: %v", err)
|
t.Errorf("no-op deploy with triple force matcher: %v", err)
|
||||||
}
|
}
|
||||||
|
|
450
deps/deps.go
vendored
450
deps/deps.go
vendored
|
@ -4,30 +4,27 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/cache/filecache"
|
|
||||||
"github.com/gohugoio/hugo/common/hexec"
|
"github.com/gohugoio/hugo/common/hexec"
|
||||||
"github.com/gohugoio/hugo/common/loggers"
|
"github.com/gohugoio/hugo/common/loggers"
|
||||||
"github.com/gohugoio/hugo/common/types"
|
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/gohugoio/hugo/config"
|
||||||
|
"github.com/gohugoio/hugo/config/allconfig"
|
||||||
"github.com/gohugoio/hugo/config/security"
|
"github.com/gohugoio/hugo/config/security"
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
"github.com/gohugoio/hugo/langs"
|
|
||||||
"github.com/gohugoio/hugo/media"
|
"github.com/gohugoio/hugo/media"
|
||||||
"github.com/gohugoio/hugo/resources/page"
|
"github.com/gohugoio/hugo/resources/page"
|
||||||
"github.com/gohugoio/hugo/resources/postpub"
|
"github.com/gohugoio/hugo/resources/postpub"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/metrics"
|
"github.com/gohugoio/hugo/metrics"
|
||||||
"github.com/gohugoio/hugo/output"
|
|
||||||
"github.com/gohugoio/hugo/resources"
|
"github.com/gohugoio/hugo/resources"
|
||||||
"github.com/gohugoio/hugo/source"
|
"github.com/gohugoio/hugo/source"
|
||||||
"github.com/gohugoio/hugo/tpl"
|
"github.com/gohugoio/hugo/tpl"
|
||||||
"github.com/spf13/cast"
|
"github.com/spf13/afero"
|
||||||
jww "github.com/spf13/jwalterweatherman"
|
jww "github.com/spf13/jwalterweatherman"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -45,10 +42,7 @@ type Deps struct {
|
||||||
ExecHelper *hexec.Exec
|
ExecHelper *hexec.Exec
|
||||||
|
|
||||||
// The templates to use. This will usually implement the full tpl.TemplateManager.
|
// The templates to use. This will usually implement the full tpl.TemplateManager.
|
||||||
tmpl tpl.TemplateHandler
|
tmplHandlers *tpl.TemplateHandlers
|
||||||
|
|
||||||
// We use this to parse and execute ad-hoc text templates.
|
|
||||||
textTmpl tpl.TemplateParseFinder
|
|
||||||
|
|
||||||
// The file systems to use.
|
// The file systems to use.
|
||||||
Fs *hugofs.Fs `json:"-"`
|
Fs *hugofs.Fs `json:"-"`
|
||||||
|
@ -66,56 +60,170 @@ type Deps struct {
|
||||||
ResourceSpec *resources.Spec
|
ResourceSpec *resources.Spec
|
||||||
|
|
||||||
// The configuration to use
|
// The configuration to use
|
||||||
Cfg config.Provider `json:"-"`
|
Conf config.AllProvider `json:"-"`
|
||||||
|
|
||||||
// The file cache to use.
|
|
||||||
FileCaches filecache.Caches
|
|
||||||
|
|
||||||
// The translation func to use
|
// The translation func to use
|
||||||
Translate func(ctx context.Context, translationID string, templateData any) string `json:"-"`
|
Translate func(ctx context.Context, translationID string, templateData any) string `json:"-"`
|
||||||
|
|
||||||
// The language in use. TODO(bep) consolidate with site
|
|
||||||
Language *langs.Language
|
|
||||||
|
|
||||||
// The site building.
|
// The site building.
|
||||||
Site page.Site
|
Site page.Site
|
||||||
|
|
||||||
// All the output formats available for the current site.
|
TemplateProvider ResourceProvider
|
||||||
OutputFormatsConfig output.Formats
|
|
||||||
|
|
||||||
// FilenameHasPostProcessPrefix is a set of filenames in /public that
|
|
||||||
// contains a post-processing prefix.
|
|
||||||
FilenameHasPostProcessPrefix []string
|
|
||||||
|
|
||||||
templateProvider ResourceProvider
|
|
||||||
WithTemplate func(templ tpl.TemplateManager) error `json:"-"`
|
|
||||||
|
|
||||||
// Used in tests
|
// Used in tests
|
||||||
OverloadedTemplateFuncs map[string]any
|
OverloadedTemplateFuncs map[string]any
|
||||||
|
|
||||||
translationProvider ResourceProvider
|
TranslationProvider ResourceProvider
|
||||||
|
|
||||||
Metrics metrics.Provider
|
Metrics metrics.Provider
|
||||||
|
|
||||||
// Timeout is configurable in site config.
|
|
||||||
Timeout time.Duration
|
|
||||||
|
|
||||||
// BuildStartListeners will be notified before a build starts.
|
// BuildStartListeners will be notified before a build starts.
|
||||||
BuildStartListeners *Listeners
|
BuildStartListeners *Listeners
|
||||||
|
|
||||||
// Resources that gets closed when the build is done or the server shuts down.
|
// Resources that gets closed when the build is done or the server shuts down.
|
||||||
BuildClosers *Closers
|
BuildClosers *Closers
|
||||||
|
|
||||||
// Atomic values set during a build.
|
|
||||||
// This is common/global for all sites.
|
// This is common/global for all sites.
|
||||||
BuildState *BuildState
|
BuildState *BuildState
|
||||||
|
|
||||||
// Whether we are in running (server) mode
|
|
||||||
Running bool
|
|
||||||
|
|
||||||
*globalErrHandler
|
*globalErrHandler
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d Deps) Clone(s page.Site, conf config.AllProvider) (*Deps, error) {
|
||||||
|
d.Conf = conf
|
||||||
|
d.Site = s
|
||||||
|
d.ExecHelper = nil
|
||||||
|
d.ContentSpec = nil
|
||||||
|
|
||||||
|
if err := d.Init(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &d, nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Deps) SetTempl(t *tpl.TemplateHandlers) {
|
||||||
|
d.tmplHandlers = t
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Deps) Init() error {
|
||||||
|
if d.Conf == nil {
|
||||||
|
panic("conf is nil")
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.Fs == nil {
|
||||||
|
// For tests.
|
||||||
|
d.Fs = hugofs.NewFrom(afero.NewMemMapFs(), d.Conf.BaseConfig())
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.Log == nil {
|
||||||
|
d.Log = loggers.NewErrorLogger()
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.LogDistinct == nil {
|
||||||
|
d.LogDistinct = helpers.NewDistinctLogger(d.Log)
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.globalErrHandler == nil {
|
||||||
|
d.globalErrHandler = &globalErrHandler{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.BuildState == nil {
|
||||||
|
d.BuildState = &BuildState{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.BuildStartListeners == nil {
|
||||||
|
d.BuildStartListeners = &Listeners{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.BuildClosers == nil {
|
||||||
|
d.BuildClosers = &Closers{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.Metrics == nil && d.Conf.TemplateMetrics() {
|
||||||
|
d.Metrics = metrics.NewProvider(d.Conf.TemplateMetricsHints())
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.ExecHelper == nil {
|
||||||
|
d.ExecHelper = hexec.New(d.Conf.GetConfigSection("security").(security.Config))
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.PathSpec == nil {
|
||||||
|
hashBytesReceiverFunc := func(name string, match bool) {
|
||||||
|
if !match {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
d.BuildState.AddFilenameWithPostPrefix(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip binary files.
|
||||||
|
mediaTypes := d.Conf.GetConfigSection("mediaTypes").(media.Types)
|
||||||
|
hashBytesSHouldCheck := func(name string) bool {
|
||||||
|
ext := strings.TrimPrefix(filepath.Ext(name), ".")
|
||||||
|
return mediaTypes.IsTextSuffix(ext)
|
||||||
|
}
|
||||||
|
d.Fs.PublishDir = hugofs.NewHasBytesReceiver(d.Fs.PublishDir, hashBytesSHouldCheck, hashBytesReceiverFunc, []byte(postpub.PostProcessPrefix))
|
||||||
|
pathSpec, err := helpers.NewPathSpec(d.Fs, d.Conf, d.Log)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
d.PathSpec = pathSpec
|
||||||
|
} else {
|
||||||
|
var err error
|
||||||
|
d.PathSpec, err = helpers.NewPathSpecWithBaseBaseFsProvided(d.Fs, d.Conf, d.Log, d.PathSpec.BaseFs)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.ContentSpec == nil {
|
||||||
|
contentSpec, err := helpers.NewContentSpec(d.Conf, d.Log, d.Content.Fs, d.ExecHelper)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
d.ContentSpec = contentSpec
|
||||||
|
}
|
||||||
|
|
||||||
|
if d.SourceSpec == nil {
|
||||||
|
d.SourceSpec = source.NewSourceSpec(d.PathSpec, nil, d.Fs.Source)
|
||||||
|
}
|
||||||
|
|
||||||
|
var common *resources.SpecCommon
|
||||||
|
if d.ResourceSpec != nil {
|
||||||
|
common = d.ResourceSpec.SpecCommon
|
||||||
|
}
|
||||||
|
resourceSpec, err := resources.NewSpec(d.PathSpec, common, d.BuildState, d.Log, d, d.ExecHelper)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create resource spec: %w", err)
|
||||||
|
}
|
||||||
|
d.ResourceSpec = resourceSpec
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Deps) Compile(prototype *Deps) error {
|
||||||
|
var err error
|
||||||
|
if prototype == nil {
|
||||||
|
if err = d.TemplateProvider.NewResource(d); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err = d.TranslationProvider.NewResource(d); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = d.TemplateProvider.CloneResource(d, prototype); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = d.TranslationProvider.CloneResource(d, prototype); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
type globalErrHandler struct {
|
type globalErrHandler struct {
|
||||||
// Channel for some "hard to get to" build errors
|
// Channel for some "hard to get to" build errors
|
||||||
buildErrors chan error
|
buildErrors chan error
|
||||||
|
@ -181,236 +289,22 @@ func (b *Listeners) Notify() {
|
||||||
|
|
||||||
// ResourceProvider is used to create and refresh, and clone resources needed.
|
// ResourceProvider is used to create and refresh, and clone resources needed.
|
||||||
type ResourceProvider interface {
|
type ResourceProvider interface {
|
||||||
Update(deps *Deps) error
|
NewResource(dst *Deps) error
|
||||||
Clone(deps *Deps) error
|
CloneResource(dst, src *Deps) error
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Deps) Tmpl() tpl.TemplateHandler {
|
func (d *Deps) Tmpl() tpl.TemplateHandler {
|
||||||
return d.tmpl
|
return d.tmplHandlers.Tmpl
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Deps) TextTmpl() tpl.TemplateParseFinder {
|
func (d *Deps) TextTmpl() tpl.TemplateParseFinder {
|
||||||
return d.textTmpl
|
return d.tmplHandlers.TxtTmpl
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Deps) SetTmpl(tmpl tpl.TemplateHandler) {
|
|
||||||
d.tmpl = tmpl
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Deps) SetTextTmpl(tmpl tpl.TemplateParseFinder) {
|
|
||||||
d.textTmpl = tmpl
|
|
||||||
}
|
|
||||||
|
|
||||||
// LoadResources loads translations and templates.
|
|
||||||
func (d *Deps) LoadResources() error {
|
|
||||||
// Note that the translations need to be loaded before the templates.
|
|
||||||
if err := d.translationProvider.Update(d); err != nil {
|
|
||||||
return fmt.Errorf("loading translations: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := d.templateProvider.Update(d); err != nil {
|
|
||||||
return fmt.Errorf("loading templates: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// New initializes a Dep struct.
|
|
||||||
// Defaults are set for nil values,
|
|
||||||
// but TemplateProvider, TranslationProvider and Language are always required.
|
|
||||||
func New(cfg DepsCfg) (*Deps, error) {
|
|
||||||
var (
|
|
||||||
logger = cfg.Logger
|
|
||||||
fs = cfg.Fs
|
|
||||||
d *Deps
|
|
||||||
)
|
|
||||||
|
|
||||||
if cfg.TemplateProvider == nil {
|
|
||||||
panic("Must have a TemplateProvider")
|
|
||||||
}
|
|
||||||
|
|
||||||
if cfg.TranslationProvider == nil {
|
|
||||||
panic("Must have a TranslationProvider")
|
|
||||||
}
|
|
||||||
|
|
||||||
if cfg.Language == nil {
|
|
||||||
panic("Must have a Language")
|
|
||||||
}
|
|
||||||
|
|
||||||
if logger == nil {
|
|
||||||
logger = loggers.NewErrorLogger()
|
|
||||||
}
|
|
||||||
|
|
||||||
if fs == nil {
|
|
||||||
// Default to the production file system.
|
|
||||||
fs = hugofs.NewDefault(cfg.Language)
|
|
||||||
}
|
|
||||||
|
|
||||||
if cfg.MediaTypes == nil {
|
|
||||||
cfg.MediaTypes = media.DefaultTypes
|
|
||||||
}
|
|
||||||
|
|
||||||
if cfg.OutputFormats == nil {
|
|
||||||
cfg.OutputFormats = output.DefaultFormats
|
|
||||||
}
|
|
||||||
|
|
||||||
securityConfig, err := security.DecodeConfig(cfg.Cfg)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to create security config from configuration: %w", err)
|
|
||||||
}
|
|
||||||
execHelper := hexec.New(securityConfig)
|
|
||||||
|
|
||||||
var filenameHasPostProcessPrefixMu sync.Mutex
|
|
||||||
hashBytesReceiverFunc := func(name string, match bool) {
|
|
||||||
if !match {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
filenameHasPostProcessPrefixMu.Lock()
|
|
||||||
d.FilenameHasPostProcessPrefix = append(d.FilenameHasPostProcessPrefix, name)
|
|
||||||
filenameHasPostProcessPrefixMu.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip binary files.
|
|
||||||
hashBytesSHouldCheck := func(name string) bool {
|
|
||||||
ext := strings.TrimPrefix(filepath.Ext(name), ".")
|
|
||||||
mime, _, found := cfg.MediaTypes.GetBySuffix(ext)
|
|
||||||
if !found {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
switch mime.MainType {
|
|
||||||
case "text", "application":
|
|
||||||
return true
|
|
||||||
default:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fs.PublishDir = hugofs.NewHasBytesReceiver(fs.PublishDir, hashBytesSHouldCheck, hashBytesReceiverFunc, []byte(postpub.PostProcessPrefix))
|
|
||||||
|
|
||||||
ps, err := helpers.NewPathSpec(fs, cfg.Language, logger)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("create PathSpec: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
fileCaches, err := filecache.NewCaches(ps)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to create file caches from configuration: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
errorHandler := &globalErrHandler{}
|
|
||||||
buildState := &BuildState{}
|
|
||||||
|
|
||||||
resourceSpec, err := resources.NewSpec(ps, fileCaches, buildState, logger, errorHandler, execHelper, cfg.OutputFormats, cfg.MediaTypes)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
contentSpec, err := helpers.NewContentSpec(cfg.Language, logger, ps.BaseFs.Content.Fs, execHelper)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
sp := source.NewSourceSpec(ps, nil, fs.Source)
|
|
||||||
|
|
||||||
timeout := 30 * time.Second
|
|
||||||
if cfg.Cfg.IsSet("timeout") {
|
|
||||||
v := cfg.Cfg.Get("timeout")
|
|
||||||
d, err := types.ToDurationE(v)
|
|
||||||
if err == nil {
|
|
||||||
timeout = d
|
|
||||||
}
|
|
||||||
}
|
|
||||||
ignoreErrors := cast.ToStringSlice(cfg.Cfg.Get("ignoreErrors"))
|
|
||||||
ignorableLogger := loggers.NewIgnorableLogger(logger, ignoreErrors...)
|
|
||||||
|
|
||||||
logDistinct := helpers.NewDistinctLogger(logger)
|
|
||||||
|
|
||||||
d = &Deps{
|
|
||||||
Fs: fs,
|
|
||||||
Log: ignorableLogger,
|
|
||||||
LogDistinct: logDistinct,
|
|
||||||
ExecHelper: execHelper,
|
|
||||||
templateProvider: cfg.TemplateProvider,
|
|
||||||
translationProvider: cfg.TranslationProvider,
|
|
||||||
WithTemplate: cfg.WithTemplate,
|
|
||||||
OverloadedTemplateFuncs: cfg.OverloadedTemplateFuncs,
|
|
||||||
PathSpec: ps,
|
|
||||||
ContentSpec: contentSpec,
|
|
||||||
SourceSpec: sp,
|
|
||||||
ResourceSpec: resourceSpec,
|
|
||||||
Cfg: cfg.Language,
|
|
||||||
Language: cfg.Language,
|
|
||||||
Site: cfg.Site,
|
|
||||||
FileCaches: fileCaches,
|
|
||||||
BuildStartListeners: &Listeners{},
|
|
||||||
BuildClosers: &Closers{},
|
|
||||||
BuildState: buildState,
|
|
||||||
Running: cfg.Running,
|
|
||||||
Timeout: timeout,
|
|
||||||
globalErrHandler: errorHandler,
|
|
||||||
}
|
|
||||||
|
|
||||||
if cfg.Cfg.GetBool("templateMetrics") {
|
|
||||||
d.Metrics = metrics.NewProvider(cfg.Cfg.GetBool("templateMetricsHints"))
|
|
||||||
}
|
|
||||||
|
|
||||||
return d, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Deps) Close() error {
|
func (d *Deps) Close() error {
|
||||||
return d.BuildClosers.Close()
|
return d.BuildClosers.Close()
|
||||||
}
|
}
|
||||||
|
|
||||||
// ForLanguage creates a copy of the Deps with the language dependent
|
|
||||||
// parts switched out.
|
|
||||||
func (d Deps) ForLanguage(cfg DepsCfg, onCreated func(d *Deps) error) (*Deps, error) {
|
|
||||||
l := cfg.Language
|
|
||||||
var err error
|
|
||||||
|
|
||||||
d.PathSpec, err = helpers.NewPathSpecWithBaseBaseFsProvided(d.Fs, l, d.Log, d.BaseFs)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
d.ContentSpec, err = helpers.NewContentSpec(l, d.Log, d.BaseFs.Content.Fs, d.ExecHelper)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
d.Site = cfg.Site
|
|
||||||
|
|
||||||
// These are common for all sites, so reuse.
|
|
||||||
// TODO(bep) clean up these inits.
|
|
||||||
resourceCache := d.ResourceSpec.ResourceCache
|
|
||||||
postBuildAssets := d.ResourceSpec.PostBuildAssets
|
|
||||||
d.ResourceSpec, err = resources.NewSpec(d.PathSpec, d.ResourceSpec.FileCaches, d.BuildState, d.Log, d.globalErrHandler, d.ExecHelper, cfg.OutputFormats, cfg.MediaTypes)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
d.ResourceSpec.ResourceCache = resourceCache
|
|
||||||
d.ResourceSpec.PostBuildAssets = postBuildAssets
|
|
||||||
|
|
||||||
d.Cfg = l
|
|
||||||
d.Language = l
|
|
||||||
|
|
||||||
if onCreated != nil {
|
|
||||||
if err = onCreated(&d); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := d.translationProvider.Clone(&d); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := d.templateProvider.Clone(&d); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
d.BuildStartListeners = &Listeners{}
|
|
||||||
|
|
||||||
return &d, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// DepsCfg contains configuration options that can be used to configure Hugo
|
// DepsCfg contains configuration options that can be used to configure Hugo
|
||||||
// on a global level, i.e. logging etc.
|
// on a global level, i.e. logging etc.
|
||||||
// Nil values will be given default values.
|
// Nil values will be given default values.
|
||||||
|
@ -422,47 +316,53 @@ type DepsCfg struct {
|
||||||
// The file systems to use
|
// The file systems to use
|
||||||
Fs *hugofs.Fs
|
Fs *hugofs.Fs
|
||||||
|
|
||||||
// The language to use.
|
|
||||||
Language *langs.Language
|
|
||||||
|
|
||||||
// The Site in use
|
// The Site in use
|
||||||
Site page.Site
|
Site page.Site
|
||||||
|
|
||||||
// The configuration to use.
|
Configs *allconfig.Configs
|
||||||
Cfg config.Provider
|
|
||||||
|
|
||||||
// The media types configured.
|
|
||||||
MediaTypes media.Types
|
|
||||||
|
|
||||||
// The output formats configured.
|
|
||||||
OutputFormats output.Formats
|
|
||||||
|
|
||||||
// Template handling.
|
// Template handling.
|
||||||
TemplateProvider ResourceProvider
|
TemplateProvider ResourceProvider
|
||||||
WithTemplate func(templ tpl.TemplateManager) error
|
|
||||||
// Used in tests
|
|
||||||
OverloadedTemplateFuncs map[string]any
|
|
||||||
|
|
||||||
// i18n handling.
|
// i18n handling.
|
||||||
TranslationProvider ResourceProvider
|
TranslationProvider ResourceProvider
|
||||||
|
|
||||||
// Whether we are in running (server) mode
|
|
||||||
Running bool
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// BuildState are flags that may be turned on during a build.
|
// BuildState are state used during a build.
|
||||||
type BuildState struct {
|
type BuildState struct {
|
||||||
counter uint64
|
counter uint64
|
||||||
|
|
||||||
|
mu sync.Mutex // protects state below.
|
||||||
|
|
||||||
|
// A set of ilenames in /public that
|
||||||
|
// contains a post-processing prefix.
|
||||||
|
filenamesWithPostPrefix map[string]bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *BuildState) AddFilenameWithPostPrefix(filename string) {
|
||||||
|
b.mu.Lock()
|
||||||
|
defer b.mu.Unlock()
|
||||||
|
if b.filenamesWithPostPrefix == nil {
|
||||||
|
b.filenamesWithPostPrefix = make(map[string]bool)
|
||||||
|
}
|
||||||
|
b.filenamesWithPostPrefix[filename] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *BuildState) GetFilenamesWithPostPrefix() []string {
|
||||||
|
b.mu.Lock()
|
||||||
|
defer b.mu.Unlock()
|
||||||
|
var filenames []string
|
||||||
|
for filename := range b.filenamesWithPostPrefix {
|
||||||
|
filenames = append(filenames, filename)
|
||||||
|
}
|
||||||
|
sort.Strings(filenames)
|
||||||
|
return filenames
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BuildState) Incr() int {
|
func (b *BuildState) Incr() int {
|
||||||
return int(atomic.AddUint64(&b.counter, uint64(1)))
|
return int(atomic.AddUint64(&b.counter, uint64(1)))
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBuildState() BuildState {
|
|
||||||
return BuildState{}
|
|
||||||
}
|
|
||||||
|
|
||||||
type Closer interface {
|
type Closer interface {
|
||||||
Close() error
|
Close() error
|
||||||
}
|
}
|
||||||
|
|
5
deps/deps_test.go
vendored
5
deps/deps_test.go
vendored
|
@ -11,17 +11,18 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package deps
|
package deps_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
qt "github.com/frankban/quicktest"
|
||||||
|
"github.com/gohugoio/hugo/deps"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestBuildFlags(t *testing.T) {
|
func TestBuildFlags(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
var bf BuildState
|
var bf deps.BuildState
|
||||||
bf.Incr()
|
bf.Incr()
|
||||||
bf.Incr()
|
bf.Incr()
|
||||||
bf.Incr()
|
bf.Incr()
|
||||||
|
|
9
go.mod
9
go.mod
|
@ -47,12 +47,12 @@ require (
|
||||||
github.com/niklasfasching/go-org v1.6.6
|
github.com/niklasfasching/go-org v1.6.6
|
||||||
github.com/olekukonko/tablewriter v0.0.5
|
github.com/olekukonko/tablewriter v0.0.5
|
||||||
github.com/pelletier/go-toml/v2 v2.0.6
|
github.com/pelletier/go-toml/v2 v2.0.6
|
||||||
github.com/rogpeppe/go-internal v1.9.0
|
github.com/rogpeppe/go-internal v1.10.1-0.20230508101108-a4f6fabd84c5
|
||||||
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd
|
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd
|
||||||
github.com/sanity-io/litter v1.5.5
|
github.com/sanity-io/litter v1.5.5
|
||||||
github.com/spf13/afero v1.9.3
|
github.com/spf13/afero v1.9.3
|
||||||
github.com/spf13/cast v1.5.1
|
github.com/spf13/cast v1.5.1
|
||||||
github.com/spf13/cobra v1.6.1
|
github.com/spf13/cobra v1.7.0
|
||||||
github.com/spf13/fsync v0.9.0
|
github.com/spf13/fsync v0.9.0
|
||||||
github.com/spf13/jwalterweatherman v1.1.0
|
github.com/spf13/jwalterweatherman v1.1.0
|
||||||
github.com/spf13/pflag v1.0.5
|
github.com/spf13/pflag v1.0.5
|
||||||
|
@ -94,6 +94,8 @@ require (
|
||||||
github.com/aws/aws-sdk-go-v2/service/sso v1.4.0 // indirect
|
github.com/aws/aws-sdk-go-v2/service/sso v1.4.0 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/service/sts v1.7.0 // indirect
|
github.com/aws/aws-sdk-go-v2/service/sts v1.7.0 // indirect
|
||||||
github.com/aws/smithy-go v1.8.0 // indirect
|
github.com/aws/smithy-go v1.8.0 // indirect
|
||||||
|
github.com/bep/helpers v0.4.0 // indirect
|
||||||
|
github.com/bep/simplecobra v0.2.0 // indirect
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect
|
github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect
|
||||||
github.com/dlclark/regexp2 v1.7.0 // indirect
|
github.com/dlclark/regexp2 v1.7.0 // indirect
|
||||||
github.com/go-openapi/jsonpointer v0.19.5 // indirect
|
github.com/go-openapi/jsonpointer v0.19.5 // indirect
|
||||||
|
@ -106,7 +108,7 @@ require (
|
||||||
github.com/googleapis/gax-go/v2 v2.3.0 // indirect
|
github.com/googleapis/gax-go/v2 v2.3.0 // indirect
|
||||||
github.com/googleapis/go-type-adapters v1.0.0 // indirect
|
github.com/googleapis/go-type-adapters v1.0.0 // indirect
|
||||||
github.com/hashicorp/golang-lru/v2 v2.0.1 // indirect
|
github.com/hashicorp/golang-lru/v2 v2.0.1 // indirect
|
||||||
github.com/inconshreveable/mousetrap v1.0.1 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/invopop/yaml v0.1.0 // indirect
|
github.com/invopop/yaml v0.1.0 // indirect
|
||||||
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||||
github.com/kr/pretty v0.3.1 // indirect
|
github.com/kr/pretty v0.3.1 // indirect
|
||||||
|
@ -119,6 +121,7 @@ require (
|
||||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||||
go.opencensus.io v0.24.0 // indirect
|
go.opencensus.io v0.24.0 // indirect
|
||||||
golang.org/x/crypto v0.3.0 // indirect
|
golang.org/x/crypto v0.3.0 // indirect
|
||||||
|
golang.org/x/mod v0.9.0 // indirect
|
||||||
golang.org/x/oauth2 v0.2.0 // indirect
|
golang.org/x/oauth2 v0.2.0 // indirect
|
||||||
golang.org/x/sys v0.5.0 // indirect
|
golang.org/x/sys v0.5.0 // indirect
|
||||||
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect
|
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect
|
||||||
|
|
12
go.sum
12
go.sum
|
@ -179,10 +179,14 @@ github.com/bep/golibsass v1.1.0 h1:pjtXr00IJZZaOdfryNa9wARTB3Q0BmxC3/V1KNcgyTw=
|
||||||
github.com/bep/golibsass v1.1.0/go.mod h1:DL87K8Un/+pWUS75ggYv41bliGiolxzDKWJAq3eJ1MA=
|
github.com/bep/golibsass v1.1.0/go.mod h1:DL87K8Un/+pWUS75ggYv41bliGiolxzDKWJAq3eJ1MA=
|
||||||
github.com/bep/gowebp v0.2.0 h1:ZVfK8i9PpZqKHEmthQSt3qCnnHycbLzBPEsVtk2ch2Q=
|
github.com/bep/gowebp v0.2.0 h1:ZVfK8i9PpZqKHEmthQSt3qCnnHycbLzBPEsVtk2ch2Q=
|
||||||
github.com/bep/gowebp v0.2.0/go.mod h1:ZhFodwdiFp8ehGJpF4LdPl6unxZm9lLFjxD3z2h2AgI=
|
github.com/bep/gowebp v0.2.0/go.mod h1:ZhFodwdiFp8ehGJpF4LdPl6unxZm9lLFjxD3z2h2AgI=
|
||||||
|
github.com/bep/helpers v0.4.0 h1:ab9veaAiWY4ST48Oxp5usaqivDmYdB744fz+tcZ3Ifs=
|
||||||
|
github.com/bep/helpers v0.4.0/go.mod h1:/QpHdmcPagDw7+RjkLFCvnlUc8lQ5kg4KDrEkb2Yyco=
|
||||||
github.com/bep/lazycache v0.2.0 h1:HKrlZTrDxHIrNKqmnurH42ryxkngCMYLfBpyu40VcwY=
|
github.com/bep/lazycache v0.2.0 h1:HKrlZTrDxHIrNKqmnurH42ryxkngCMYLfBpyu40VcwY=
|
||||||
github.com/bep/lazycache v0.2.0/go.mod h1:xUIsoRD824Vx0Q/n57+ZO7kmbEhMBOnTjM/iPixNGbg=
|
github.com/bep/lazycache v0.2.0/go.mod h1:xUIsoRD824Vx0Q/n57+ZO7kmbEhMBOnTjM/iPixNGbg=
|
||||||
github.com/bep/overlayfs v0.6.0 h1:sgLcq/qtIzbaQNl2TldGXOkHvqeZB025sPvHOQL+DYo=
|
github.com/bep/overlayfs v0.6.0 h1:sgLcq/qtIzbaQNl2TldGXOkHvqeZB025sPvHOQL+DYo=
|
||||||
github.com/bep/overlayfs v0.6.0/go.mod h1:NFjSmn3kCqG7KX2Lmz8qT8VhPPCwZap3UNogXawoQHM=
|
github.com/bep/overlayfs v0.6.0/go.mod h1:NFjSmn3kCqG7KX2Lmz8qT8VhPPCwZap3UNogXawoQHM=
|
||||||
|
github.com/bep/simplecobra v0.2.0 h1:gfdZZ8QlPBMC9R9DRzUsxExR3FyuNtRkqMJqK98SBno=
|
||||||
|
github.com/bep/simplecobra v0.2.0/go.mod h1:EOp6bCKuuHmwA9bQcRC8LcDB60co2Cmht5X4xMIOwf0=
|
||||||
github.com/bep/tmc v0.5.1 h1:CsQnSC6MsomH64gw0cT5f+EwQDcvZz4AazKunFwTpuI=
|
github.com/bep/tmc v0.5.1 h1:CsQnSC6MsomH64gw0cT5f+EwQDcvZz4AazKunFwTpuI=
|
||||||
github.com/bep/tmc v0.5.1/go.mod h1:tGYHN8fS85aJPhDLgXETVKp+PR382OvFi2+q2GkGsq0=
|
github.com/bep/tmc v0.5.1/go.mod h1:tGYHN8fS85aJPhDLgXETVKp+PR382OvFi2+q2GkGsq0=
|
||||||
github.com/bep/workers v1.0.0 h1:U+H8YmEaBCEaFZBst7GcRVEoqeRC9dzH2dWOwGmOchg=
|
github.com/bep/workers v1.0.0 h1:U+H8YmEaBCEaFZBst7GcRVEoqeRC9dzH2dWOwGmOchg=
|
||||||
|
@ -408,6 +412,8 @@ github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:
|
||||||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||||
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
|
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
|
||||||
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||||
|
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||||
|
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||||
github.com/invopop/yaml v0.1.0 h1:YW3WGUoJEXYfzWBjn00zIlrw7brGVD0fUKRYDPAPhrc=
|
github.com/invopop/yaml v0.1.0 h1:YW3WGUoJEXYfzWBjn00zIlrw7brGVD0fUKRYDPAPhrc=
|
||||||
github.com/invopop/yaml v0.1.0/go.mod h1:2XuRLgs/ouIrW3XNzuNj7J3Nvu/Dig5MXvbCEdiBN3Q=
|
github.com/invopop/yaml v0.1.0/go.mod h1:2XuRLgs/ouIrW3XNzuNj7J3Nvu/Dig5MXvbCEdiBN3Q=
|
||||||
github.com/jdkato/prose v1.2.1 h1:Fp3UnJmLVISmlc57BgKUzdjr0lOtjqTZicL3PaYy6cU=
|
github.com/jdkato/prose v1.2.1 h1:Fp3UnJmLVISmlc57BgKUzdjr0lOtjqTZicL3PaYy6cU=
|
||||||
|
@ -493,6 +499,8 @@ github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFR
|
||||||
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||||
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
|
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
|
||||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||||
|
github.com/rogpeppe/go-internal v1.10.1-0.20230508101108-a4f6fabd84c5 h1:Tb1D114RozKzV2dDfarvSZn8lVYvjcGSCDaMQ+b4I+E=
|
||||||
|
github.com/rogpeppe/go-internal v1.10.1-0.20230508101108-a4f6fabd84c5/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd h1:CmH9+J6ZSsIjUK3dcGsnCnO41eRBOnY12zwkn5qVwgc=
|
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd h1:CmH9+J6ZSsIjUK3dcGsnCnO41eRBOnY12zwkn5qVwgc=
|
||||||
|
@ -510,6 +518,8 @@ github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA=
|
||||||
github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48=
|
github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48=
|
||||||
github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA=
|
github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA=
|
||||||
github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
|
github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
|
||||||
|
github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
|
||||||
|
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
|
||||||
github.com/spf13/fsync v0.9.0 h1:f9CEt3DOB2mnHxZaftmEOFWjABEvKM/xpf3cUwJrGOY=
|
github.com/spf13/fsync v0.9.0 h1:f9CEt3DOB2mnHxZaftmEOFWjABEvKM/xpf3cUwJrGOY=
|
||||||
github.com/spf13/fsync v0.9.0/go.mod h1:fNtJEfG3HiltN3y4cPOz6MLjos9+2pIEqLIgszqhp/0=
|
github.com/spf13/fsync v0.9.0/go.mod h1:fNtJEfG3HiltN3y4cPOz6MLjos9+2pIEqLIgszqhp/0=
|
||||||
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
|
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
|
||||||
|
@ -628,6 +638,8 @@ golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
|
golang.org/x/mod v0.9.0 h1:KENHtAZL2y3NLMYZeHY9DW8HW8V+kQyJsY/V9JlKvCs=
|
||||||
|
golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
|
|
@ -50,30 +50,18 @@ type ContentSpec struct {
|
||||||
anchorNameSanitizer converter.AnchorNameSanitizer
|
anchorNameSanitizer converter.AnchorNameSanitizer
|
||||||
getRenderer func(t hooks.RendererType, id any) any
|
getRenderer func(t hooks.RendererType, id any) any
|
||||||
|
|
||||||
// SummaryLength is the length of the summary that Hugo extracts from a content.
|
Cfg config.AllProvider
|
||||||
summaryLength int
|
|
||||||
|
|
||||||
BuildFuture bool
|
|
||||||
BuildExpired bool
|
|
||||||
BuildDrafts bool
|
|
||||||
|
|
||||||
Cfg config.Provider
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewContentSpec returns a ContentSpec initialized
|
// NewContentSpec returns a ContentSpec initialized
|
||||||
// with the appropriate fields from the given config.Provider.
|
// with the appropriate fields from the given config.Provider.
|
||||||
func NewContentSpec(cfg config.Provider, logger loggers.Logger, contentFs afero.Fs, ex *hexec.Exec) (*ContentSpec, error) {
|
func NewContentSpec(cfg config.AllProvider, logger loggers.Logger, contentFs afero.Fs, ex *hexec.Exec) (*ContentSpec, error) {
|
||||||
spec := &ContentSpec{
|
spec := &ContentSpec{
|
||||||
summaryLength: cfg.GetInt("summaryLength"),
|
|
||||||
BuildFuture: cfg.GetBool("buildFuture"),
|
|
||||||
BuildExpired: cfg.GetBool("buildExpired"),
|
|
||||||
BuildDrafts: cfg.GetBool("buildDrafts"),
|
|
||||||
|
|
||||||
Cfg: cfg,
|
Cfg: cfg,
|
||||||
}
|
}
|
||||||
|
|
||||||
converterProvider, err := markup.NewConverterProvider(converter.ProviderConfig{
|
converterProvider, err := markup.NewConverterProvider(converter.ProviderConfig{
|
||||||
Cfg: cfg,
|
Conf: cfg,
|
||||||
ContentFs: contentFs,
|
ContentFs: contentFs,
|
||||||
Logger: logger,
|
Logger: logger,
|
||||||
Exec: ex,
|
Exec: ex,
|
||||||
|
@ -157,6 +145,9 @@ func (c *ContentSpec) SanitizeAnchorName(s string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *ContentSpec) ResolveMarkup(in string) string {
|
func (c *ContentSpec) ResolveMarkup(in string) string {
|
||||||
|
if c == nil {
|
||||||
|
panic("nil ContentSpec")
|
||||||
|
}
|
||||||
in = strings.ToLower(in)
|
in = strings.ToLower(in)
|
||||||
switch in {
|
switch in {
|
||||||
case "md", "markdown", "mdown":
|
case "md", "markdown", "mdown":
|
||||||
|
@ -194,17 +185,17 @@ func (c *ContentSpec) TruncateWordsByRune(in []string) (string, bool) {
|
||||||
|
|
||||||
count := 0
|
count := 0
|
||||||
for index, word := range words {
|
for index, word := range words {
|
||||||
if count >= c.summaryLength {
|
if count >= c.Cfg.SummaryLength() {
|
||||||
return strings.Join(words[:index], " "), true
|
return strings.Join(words[:index], " "), true
|
||||||
}
|
}
|
||||||
runeCount := utf8.RuneCountInString(word)
|
runeCount := utf8.RuneCountInString(word)
|
||||||
if len(word) == runeCount {
|
if len(word) == runeCount {
|
||||||
count++
|
count++
|
||||||
} else if count+runeCount < c.summaryLength {
|
} else if count+runeCount < c.Cfg.SummaryLength() {
|
||||||
count += runeCount
|
count += runeCount
|
||||||
} else {
|
} else {
|
||||||
for ri := range word {
|
for ri := range word {
|
||||||
if count >= c.summaryLength {
|
if count >= c.Cfg.SummaryLength() {
|
||||||
truncatedWords := append(words[:index], word[:ri])
|
truncatedWords := append(words[:index], word[:ri])
|
||||||
return strings.Join(truncatedWords, " "), true
|
return strings.Join(truncatedWords, " "), true
|
||||||
}
|
}
|
||||||
|
@ -229,7 +220,7 @@ func (c *ContentSpec) TruncateWordsToWholeSentence(s string) (string, bool) {
|
||||||
wordCount++
|
wordCount++
|
||||||
lastWordIndex = i
|
lastWordIndex = i
|
||||||
|
|
||||||
if wordCount >= c.summaryLength {
|
if wordCount >= c.Cfg.SummaryLength() {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -283,19 +274,19 @@ func isEndOfSentence(r rune) bool {
|
||||||
func (c *ContentSpec) truncateWordsToWholeSentenceOld(content string) (string, bool) {
|
func (c *ContentSpec) truncateWordsToWholeSentenceOld(content string) (string, bool) {
|
||||||
words := strings.Fields(content)
|
words := strings.Fields(content)
|
||||||
|
|
||||||
if c.summaryLength >= len(words) {
|
if c.Cfg.SummaryLength() >= len(words) {
|
||||||
return strings.Join(words, " "), false
|
return strings.Join(words, " "), false
|
||||||
}
|
}
|
||||||
|
|
||||||
for counter, word := range words[c.summaryLength:] {
|
for counter, word := range words[c.Cfg.SummaryLength():] {
|
||||||
if strings.HasSuffix(word, ".") ||
|
if strings.HasSuffix(word, ".") ||
|
||||||
strings.HasSuffix(word, "?") ||
|
strings.HasSuffix(word, "?") ||
|
||||||
strings.HasSuffix(word, ".\"") ||
|
strings.HasSuffix(word, ".\"") ||
|
||||||
strings.HasSuffix(word, "!") {
|
strings.HasSuffix(word, "!") {
|
||||||
upper := c.summaryLength + counter + 1
|
upper := c.Cfg.SummaryLength() + counter + 1
|
||||||
return strings.Join(words[:upper], " "), (upper < len(words))
|
return strings.Join(words[:upper], " "), (upper < len(words))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return strings.Join(words[:c.summaryLength], " "), true
|
return strings.Join(words[:c.Cfg.SummaryLength()], " "), true
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package helpers
|
package helpers_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
@ -19,12 +19,9 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/spf13/afero"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/loggers"
|
|
||||||
"github.com/gohugoio/hugo/config"
|
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
qt "github.com/frankban/quicktest"
|
||||||
|
"github.com/gohugoio/hugo/config"
|
||||||
|
"github.com/gohugoio/hugo/helpers"
|
||||||
)
|
)
|
||||||
|
|
||||||
const tstHTMLContent = "<!DOCTYPE html><html><head><script src=\"http://two/foobar.js\"></script></head><body><nav><ul><li hugo-nav=\"section_0\"></li><li hugo-nav=\"section_1\"></li></ul></nav><article>content <a href=\"http://two/foobar\">foobar</a>. Follow up</article><p>This is some text.<br>And some more.</p></body></html>"
|
const tstHTMLContent = "<!DOCTYPE html><html><head><script src=\"http://two/foobar.js\"></script></head><body><nav><ul><li hugo-nav=\"section_0\"></li><li hugo-nav=\"section_1\"></li></ul></nav><article>content <a href=\"http://two/foobar\">foobar</a>. Follow up</article><p>This is some text.<br>And some more.</p></body></html>"
|
||||||
|
@ -43,7 +40,7 @@ func TestTrimShortHTML(t *testing.T) {
|
||||||
{[]byte("<p>Hello</p>\n<ul>\n<li>list1</li>\n<li>list2</li>\n</ul>"), []byte("<p>Hello</p>\n<ul>\n<li>list1</li>\n<li>list2</li>\n</ul>")},
|
{[]byte("<p>Hello</p>\n<ul>\n<li>list1</li>\n<li>list2</li>\n</ul>"), []byte("<p>Hello</p>\n<ul>\n<li>list1</li>\n<li>list2</li>\n</ul>")},
|
||||||
}
|
}
|
||||||
|
|
||||||
c := newTestContentSpec()
|
c := newTestContentSpec(nil)
|
||||||
for i, test := range tests {
|
for i, test := range tests {
|
||||||
output := c.TrimShortHTML(test.input)
|
output := c.TrimShortHTML(test.input)
|
||||||
if !bytes.Equal(test.output, output) {
|
if !bytes.Equal(test.output, output) {
|
||||||
|
@ -52,55 +49,23 @@ func TestTrimShortHTML(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestStripEmptyNav(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
cleaned := stripEmptyNav([]byte("do<nav>\n</nav>\n\nbedobedo"))
|
|
||||||
c.Assert(cleaned, qt.DeepEquals, []byte("dobedobedo"))
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestBytesToHTML(t *testing.T) {
|
func TestBytesToHTML(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
c.Assert(BytesToHTML([]byte("dobedobedo")), qt.Equals, template.HTML("dobedobedo"))
|
c.Assert(helpers.BytesToHTML([]byte("dobedobedo")), qt.Equals, template.HTML("dobedobedo"))
|
||||||
}
|
|
||||||
|
|
||||||
func TestNewContentSpec(t *testing.T) {
|
|
||||||
cfg := config.NewWithTestDefaults()
|
|
||||||
c := qt.New(t)
|
|
||||||
|
|
||||||
cfg.Set("summaryLength", 32)
|
|
||||||
cfg.Set("buildFuture", true)
|
|
||||||
cfg.Set("buildExpired", true)
|
|
||||||
cfg.Set("buildDrafts", true)
|
|
||||||
|
|
||||||
spec, err := NewContentSpec(cfg, loggers.NewErrorLogger(), afero.NewMemMapFs(), nil)
|
|
||||||
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(spec.summaryLength, qt.Equals, 32)
|
|
||||||
c.Assert(spec.BuildFuture, qt.Equals, true)
|
|
||||||
c.Assert(spec.BuildExpired, qt.Equals, true)
|
|
||||||
c.Assert(spec.BuildDrafts, qt.Equals, true)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var benchmarkTruncateString = strings.Repeat("This is a sentence about nothing.", 20)
|
var benchmarkTruncateString = strings.Repeat("This is a sentence about nothing.", 20)
|
||||||
|
|
||||||
func BenchmarkTestTruncateWordsToWholeSentence(b *testing.B) {
|
func BenchmarkTestTruncateWordsToWholeSentence(b *testing.B) {
|
||||||
c := newTestContentSpec()
|
c := newTestContentSpec(nil)
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
c.TruncateWordsToWholeSentence(benchmarkTruncateString)
|
c.TruncateWordsToWholeSentence(benchmarkTruncateString)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkTestTruncateWordsToWholeSentenceOld(b *testing.B) {
|
|
||||||
c := newTestContentSpec()
|
|
||||||
b.ResetTimer()
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
c.truncateWordsToWholeSentenceOld(benchmarkTruncateString)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestTruncateWordsToWholeSentence(t *testing.T) {
|
func TestTruncateWordsToWholeSentence(t *testing.T) {
|
||||||
c := newTestContentSpec()
|
|
||||||
type test struct {
|
type test struct {
|
||||||
input, expected string
|
input, expected string
|
||||||
max int
|
max int
|
||||||
|
@ -118,7 +83,9 @@ func TestTruncateWordsToWholeSentence(t *testing.T) {
|
||||||
{"This... is a more difficult test?", "This... is a more difficult test?", 1, false},
|
{"This... is a more difficult test?", "This... is a more difficult test?", 1, false},
|
||||||
}
|
}
|
||||||
for i, d := range data {
|
for i, d := range data {
|
||||||
c.summaryLength = d.max
|
cfg := config.New()
|
||||||
|
cfg.Set("summaryLength", d.max)
|
||||||
|
c := newTestContentSpec(cfg)
|
||||||
output, truncated := c.TruncateWordsToWholeSentence(d.input)
|
output, truncated := c.TruncateWordsToWholeSentence(d.input)
|
||||||
if d.expected != output {
|
if d.expected != output {
|
||||||
t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
|
t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
|
||||||
|
@ -131,7 +98,7 @@ func TestTruncateWordsToWholeSentence(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTruncateWordsByRune(t *testing.T) {
|
func TestTruncateWordsByRune(t *testing.T) {
|
||||||
c := newTestContentSpec()
|
|
||||||
type test struct {
|
type test struct {
|
||||||
input, expected string
|
input, expected string
|
||||||
max int
|
max int
|
||||||
|
@ -153,7 +120,9 @@ func TestTruncateWordsByRune(t *testing.T) {
|
||||||
{" \nThis is not a sentence\n ", "This is not", 3, true},
|
{" \nThis is not a sentence\n ", "This is not", 3, true},
|
||||||
}
|
}
|
||||||
for i, d := range data {
|
for i, d := range data {
|
||||||
c.summaryLength = d.max
|
cfg := config.New()
|
||||||
|
cfg.Set("summaryLength", d.max)
|
||||||
|
c := newTestContentSpec(cfg)
|
||||||
output, truncated := c.TruncateWordsByRune(strings.Fields(d.input))
|
output, truncated := c.TruncateWordsByRune(strings.Fields(d.input))
|
||||||
if d.expected != output {
|
if d.expected != output {
|
||||||
t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
|
t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
|
||||||
|
@ -168,7 +137,7 @@ func TestTruncateWordsByRune(t *testing.T) {
|
||||||
func TestExtractTOCNormalContent(t *testing.T) {
|
func TestExtractTOCNormalContent(t *testing.T) {
|
||||||
content := []byte("<nav>\n<ul>\nTOC<li><a href=\"#")
|
content := []byte("<nav>\n<ul>\nTOC<li><a href=\"#")
|
||||||
|
|
||||||
actualTocLessContent, actualToc := ExtractTOC(content)
|
actualTocLessContent, actualToc := helpers.ExtractTOC(content)
|
||||||
expectedTocLess := []byte("TOC<li><a href=\"#")
|
expectedTocLess := []byte("TOC<li><a href=\"#")
|
||||||
expectedToc := []byte("<nav id=\"TableOfContents\">\n<ul>\n")
|
expectedToc := []byte("<nav id=\"TableOfContents\">\n<ul>\n")
|
||||||
|
|
||||||
|
@ -184,7 +153,7 @@ func TestExtractTOCNormalContent(t *testing.T) {
|
||||||
func TestExtractTOCGreaterThanSeventy(t *testing.T) {
|
func TestExtractTOCGreaterThanSeventy(t *testing.T) {
|
||||||
content := []byte("<nav>\n<ul>\nTOC This is a very long content which will definitely be greater than seventy, I promise you that.<li><a href=\"#")
|
content := []byte("<nav>\n<ul>\nTOC This is a very long content which will definitely be greater than seventy, I promise you that.<li><a href=\"#")
|
||||||
|
|
||||||
actualTocLessContent, actualToc := ExtractTOC(content)
|
actualTocLessContent, actualToc := helpers.ExtractTOC(content)
|
||||||
// Because the start of Toc is greater than 70+startpoint of <li> content and empty TOC will be returned
|
// Because the start of Toc is greater than 70+startpoint of <li> content and empty TOC will be returned
|
||||||
expectedToc := []byte("")
|
expectedToc := []byte("")
|
||||||
|
|
||||||
|
@ -200,7 +169,7 @@ func TestExtractTOCGreaterThanSeventy(t *testing.T) {
|
||||||
func TestExtractNoTOC(t *testing.T) {
|
func TestExtractNoTOC(t *testing.T) {
|
||||||
content := []byte("TOC")
|
content := []byte("TOC")
|
||||||
|
|
||||||
actualTocLessContent, actualToc := ExtractTOC(content)
|
actualTocLessContent, actualToc := helpers.ExtractTOC(content)
|
||||||
expectedToc := []byte("")
|
expectedToc := []byte("")
|
||||||
|
|
||||||
if !bytes.Equal(actualTocLessContent, content) {
|
if !bytes.Equal(actualTocLessContent, content) {
|
||||||
|
@ -225,7 +194,7 @@ func TestTotalWords(t *testing.T) {
|
||||||
{"One, Two, Three", 3},
|
{"One, Two, Three", 3},
|
||||||
{totalWordsBenchmarkString, 400},
|
{totalWordsBenchmarkString, 400},
|
||||||
} {
|
} {
|
||||||
actualWordCount := TotalWords(this.s)
|
actualWordCount := helpers.TotalWords(this.s)
|
||||||
|
|
||||||
if actualWordCount != this.words {
|
if actualWordCount != this.words {
|
||||||
t.Errorf("[%d] Actual word count (%d) for test string (%s) did not match %d", i, actualWordCount, this.s, this.words)
|
t.Errorf("[%d] Actual word count (%d) for test string (%s) did not match %d", i, actualWordCount, this.s, this.words)
|
||||||
|
@ -236,7 +205,7 @@ func TestTotalWords(t *testing.T) {
|
||||||
func BenchmarkTotalWords(b *testing.B) {
|
func BenchmarkTotalWords(b *testing.B) {
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
wordCount := TotalWords(totalWordsBenchmarkString)
|
wordCount := helpers.TotalWords(totalWordsBenchmarkString)
|
||||||
if wordCount != 400 {
|
if wordCount != 400 {
|
||||||
b.Fatal("Wordcount error")
|
b.Fatal("Wordcount error")
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,20 +43,6 @@ import (
|
||||||
// FilePathSeparator as defined by os.Separator.
|
// FilePathSeparator as defined by os.Separator.
|
||||||
const FilePathSeparator = string(filepath.Separator)
|
const FilePathSeparator = string(filepath.Separator)
|
||||||
|
|
||||||
// FindAvailablePort returns an available and valid TCP port.
|
|
||||||
func FindAvailablePort() (*net.TCPAddr, error) {
|
|
||||||
l, err := net.Listen("tcp", ":0")
|
|
||||||
if err == nil {
|
|
||||||
defer l.Close()
|
|
||||||
addr := l.Addr()
|
|
||||||
if a, ok := addr.(*net.TCPAddr); ok {
|
|
||||||
return a, nil
|
|
||||||
}
|
|
||||||
return nil, fmt.Errorf("unable to obtain a valid tcp port: %v", addr)
|
|
||||||
}
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// TCPListen starts listening on a valid TCP port.
|
// TCPListen starts listening on a valid TCP port.
|
||||||
func TCPListen() (net.Listener, *net.TCPAddr, error) {
|
func TCPListen() (net.Listener, *net.TCPAddr, error) {
|
||||||
l, err := net.Listen("tcp", ":0")
|
l, err := net.Listen("tcp", ":0")
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package helpers
|
package helpers_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
@ -21,17 +21,14 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/loggers"
|
"github.com/gohugoio/hugo/common/loggers"
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
qt "github.com/frankban/quicktest"
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestResolveMarkup(t *testing.T) {
|
func TestResolveMarkup(t *testing.T) {
|
||||||
c := qt.New(t)
|
spec := newTestContentSpec(nil)
|
||||||
cfg := config.NewWithTestDefaults()
|
|
||||||
spec, err := NewContentSpec(cfg, loggers.NewErrorLogger(), afero.NewMemMapFs(), nil)
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
|
|
||||||
for i, this := range []struct {
|
for i, this := range []struct {
|
||||||
in string
|
in string
|
||||||
|
@ -61,7 +58,7 @@ func TestResolveMarkup(t *testing.T) {
|
||||||
func TestDistinctLoggerDoesNotLockOnWarningPanic(t *testing.T) {
|
func TestDistinctLoggerDoesNotLockOnWarningPanic(t *testing.T) {
|
||||||
// Testing to make sure logger mutex doesn't lock if warnings cause panics.
|
// Testing to make sure logger mutex doesn't lock if warnings cause panics.
|
||||||
// func Warnf() of DistinctLogger is defined in general.go
|
// func Warnf() of DistinctLogger is defined in general.go
|
||||||
l := NewDistinctLogger(loggers.NewWarningLogger())
|
l := helpers.NewDistinctLogger(loggers.NewWarningLogger())
|
||||||
|
|
||||||
// Set PanicOnWarning to true to reproduce issue 9380
|
// Set PanicOnWarning to true to reproduce issue 9380
|
||||||
// Ensure global variable loggers.PanicOnWarning is reset to old value after test
|
// Ensure global variable loggers.PanicOnWarning is reset to old value after test
|
||||||
|
@ -123,7 +120,7 @@ func TestFirstUpper(t *testing.T) {
|
||||||
{"", ""},
|
{"", ""},
|
||||||
{"å", "Å"},
|
{"å", "Å"},
|
||||||
} {
|
} {
|
||||||
result := FirstUpper(this.in)
|
result := helpers.FirstUpper(this.in)
|
||||||
if result != this.expect {
|
if result != this.expect {
|
||||||
t.Errorf("[%d] got %s but expected %s", i, result, this.expect)
|
t.Errorf("[%d] got %s but expected %s", i, result, this.expect)
|
||||||
}
|
}
|
||||||
|
@ -143,7 +140,7 @@ func TestHasStringsPrefix(t *testing.T) {
|
||||||
{[]string{"abra", "ca", "dabra"}, []string{"abra", "ca"}, true},
|
{[]string{"abra", "ca", "dabra"}, []string{"abra", "ca"}, true},
|
||||||
{[]string{"abra", "ca"}, []string{"abra", "ca", "dabra"}, false},
|
{[]string{"abra", "ca"}, []string{"abra", "ca", "dabra"}, false},
|
||||||
} {
|
} {
|
||||||
result := HasStringsPrefix(this.s, this.prefix)
|
result := helpers.HasStringsPrefix(this.s, this.prefix)
|
||||||
if result != this.expect {
|
if result != this.expect {
|
||||||
t.Fatalf("[%d] got %t but expected %t", i, result, this.expect)
|
t.Fatalf("[%d] got %t but expected %t", i, result, this.expect)
|
||||||
}
|
}
|
||||||
|
@ -162,7 +159,7 @@ func TestHasStringsSuffix(t *testing.T) {
|
||||||
{[]string{"abra", "ca", "dabra"}, []string{"abra", "ca"}, false},
|
{[]string{"abra", "ca", "dabra"}, []string{"abra", "ca"}, false},
|
||||||
{[]string{"abra", "ca", "dabra"}, []string{"ca", "dabra"}, true},
|
{[]string{"abra", "ca", "dabra"}, []string{"ca", "dabra"}, true},
|
||||||
} {
|
} {
|
||||||
result := HasStringsSuffix(this.s, this.suffix)
|
result := helpers.HasStringsSuffix(this.s, this.suffix)
|
||||||
if result != this.expect {
|
if result != this.expect {
|
||||||
t.Fatalf("[%d] got %t but expected %t", i, result, this.expect)
|
t.Fatalf("[%d] got %t but expected %t", i, result, this.expect)
|
||||||
}
|
}
|
||||||
|
@ -239,7 +236,7 @@ func TestSliceToLower(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
res := SliceToLower(test.value)
|
res := helpers.SliceToLower(test.value)
|
||||||
for i, val := range res {
|
for i, val := range res {
|
||||||
if val != test.expected[i] {
|
if val != test.expected[i] {
|
||||||
t.Errorf("Case mismatch. Expected %s, got %s", test.expected[i], res[i])
|
t.Errorf("Case mismatch. Expected %s, got %s", test.expected[i], res[i])
|
||||||
|
@ -251,34 +248,34 @@ func TestSliceToLower(t *testing.T) {
|
||||||
func TestReaderContains(t *testing.T) {
|
func TestReaderContains(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
for i, this := range append(containsBenchTestData, containsAdditionalTestData...) {
|
for i, this := range append(containsBenchTestData, containsAdditionalTestData...) {
|
||||||
result := ReaderContains(strings.NewReader(this.v1), this.v2)
|
result := helpers.ReaderContains(strings.NewReader(this.v1), this.v2)
|
||||||
if result != this.expect {
|
if result != this.expect {
|
||||||
t.Errorf("[%d] got %t but expected %t", i, result, this.expect)
|
t.Errorf("[%d] got %t but expected %t", i, result, this.expect)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Assert(ReaderContains(nil, []byte("a")), qt.Equals, false)
|
c.Assert(helpers.ReaderContains(nil, []byte("a")), qt.Equals, false)
|
||||||
c.Assert(ReaderContains(nil, nil), qt.Equals, false)
|
c.Assert(helpers.ReaderContains(nil, nil), qt.Equals, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetTitleFunc(t *testing.T) {
|
func TestGetTitleFunc(t *testing.T) {
|
||||||
title := "somewhere over the rainbow"
|
title := "somewhere over the rainbow"
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
|
|
||||||
c.Assert(GetTitleFunc("go")(title), qt.Equals, "Somewhere Over The Rainbow")
|
c.Assert(helpers.GetTitleFunc("go")(title), qt.Equals, "Somewhere Over The Rainbow")
|
||||||
c.Assert(GetTitleFunc("chicago")(title), qt.Equals, "Somewhere over the Rainbow")
|
c.Assert(helpers.GetTitleFunc("chicago")(title), qt.Equals, "Somewhere over the Rainbow")
|
||||||
c.Assert(GetTitleFunc("Chicago")(title), qt.Equals, "Somewhere over the Rainbow")
|
c.Assert(helpers.GetTitleFunc("Chicago")(title), qt.Equals, "Somewhere over the Rainbow")
|
||||||
c.Assert(GetTitleFunc("ap")(title), qt.Equals, "Somewhere Over the Rainbow")
|
c.Assert(helpers.GetTitleFunc("ap")(title), qt.Equals, "Somewhere Over the Rainbow")
|
||||||
c.Assert(GetTitleFunc("ap")(title), qt.Equals, "Somewhere Over the Rainbow")
|
c.Assert(helpers.GetTitleFunc("ap")(title), qt.Equals, "Somewhere Over the Rainbow")
|
||||||
c.Assert(GetTitleFunc("")(title), qt.Equals, "Somewhere Over the Rainbow")
|
c.Assert(helpers.GetTitleFunc("")(title), qt.Equals, "Somewhere Over the Rainbow")
|
||||||
c.Assert(GetTitleFunc("unknown")(title), qt.Equals, "Somewhere Over the Rainbow")
|
c.Assert(helpers.GetTitleFunc("unknown")(title), qt.Equals, "Somewhere Over the Rainbow")
|
||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkReaderContains(b *testing.B) {
|
func BenchmarkReaderContains(b *testing.B) {
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
for i, this := range containsBenchTestData {
|
for i, this := range containsBenchTestData {
|
||||||
result := ReaderContains(strings.NewReader(this.v1), this.v2)
|
result := helpers.ReaderContains(strings.NewReader(this.v1), this.v2)
|
||||||
if result != this.expect {
|
if result != this.expect {
|
||||||
b.Errorf("[%d] got %t but expected %t", i, result, this.expect)
|
b.Errorf("[%d] got %t but expected %t", i, result, this.expect)
|
||||||
}
|
}
|
||||||
|
@ -288,7 +285,7 @@ func BenchmarkReaderContains(b *testing.B) {
|
||||||
|
|
||||||
func TestUniqueStrings(t *testing.T) {
|
func TestUniqueStrings(t *testing.T) {
|
||||||
in := []string{"a", "b", "a", "b", "c", "", "a", "", "d"}
|
in := []string{"a", "b", "a", "b", "c", "", "a", "", "d"}
|
||||||
output := UniqueStrings(in)
|
output := helpers.UniqueStrings(in)
|
||||||
expected := []string{"a", "b", "c", "", "d"}
|
expected := []string{"a", "b", "c", "", "d"}
|
||||||
if !reflect.DeepEqual(output, expected) {
|
if !reflect.DeepEqual(output, expected) {
|
||||||
t.Errorf("Expected %#v, got %#v\n", expected, output)
|
t.Errorf("Expected %#v, got %#v\n", expected, output)
|
||||||
|
@ -297,7 +294,7 @@ func TestUniqueStrings(t *testing.T) {
|
||||||
|
|
||||||
func TestUniqueStringsReuse(t *testing.T) {
|
func TestUniqueStringsReuse(t *testing.T) {
|
||||||
in := []string{"a", "b", "a", "b", "c", "", "a", "", "d"}
|
in := []string{"a", "b", "a", "b", "c", "", "a", "", "d"}
|
||||||
output := UniqueStringsReuse(in)
|
output := helpers.UniqueStringsReuse(in)
|
||||||
expected := []string{"a", "b", "c", "", "d"}
|
expected := []string{"a", "b", "c", "", "d"}
|
||||||
if !reflect.DeepEqual(output, expected) {
|
if !reflect.DeepEqual(output, expected) {
|
||||||
t.Errorf("Expected %#v, got %#v\n", expected, output)
|
t.Errorf("Expected %#v, got %#v\n", expected, output)
|
||||||
|
@ -307,18 +304,10 @@ func TestUniqueStringsReuse(t *testing.T) {
|
||||||
func TestUniqueStringsSorted(t *testing.T) {
|
func TestUniqueStringsSorted(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
in := []string{"a", "a", "b", "c", "b", "", "a", "", "d"}
|
in := []string{"a", "a", "b", "c", "b", "", "a", "", "d"}
|
||||||
output := UniqueStringsSorted(in)
|
output := helpers.UniqueStringsSorted(in)
|
||||||
expected := []string{"", "a", "b", "c", "d"}
|
expected := []string{"", "a", "b", "c", "d"}
|
||||||
c.Assert(output, qt.DeepEquals, expected)
|
c.Assert(output, qt.DeepEquals, expected)
|
||||||
c.Assert(UniqueStringsSorted(nil), qt.IsNil)
|
c.Assert(helpers.UniqueStringsSorted(nil), qt.IsNil)
|
||||||
}
|
|
||||||
|
|
||||||
func TestFindAvailablePort(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
addr, err := FindAvailablePort()
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(addr, qt.Not(qt.IsNil))
|
|
||||||
c.Assert(addr.Port > 0, qt.Equals, true)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFastMD5FromFile(t *testing.T) {
|
func TestFastMD5FromFile(t *testing.T) {
|
||||||
|
@ -357,23 +346,23 @@ func TestFastMD5FromFile(t *testing.T) {
|
||||||
defer bf1.Close()
|
defer bf1.Close()
|
||||||
defer bf2.Close()
|
defer bf2.Close()
|
||||||
|
|
||||||
m1, err := MD5FromFileFast(sf1)
|
m1, err := helpers.MD5FromFileFast(sf1)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(m1, qt.Equals, "e9c8989b64b71a88b4efb66ad05eea96")
|
c.Assert(m1, qt.Equals, "e9c8989b64b71a88b4efb66ad05eea96")
|
||||||
|
|
||||||
m2, err := MD5FromFileFast(sf2)
|
m2, err := helpers.MD5FromFileFast(sf2)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(m2, qt.Not(qt.Equals), m1)
|
c.Assert(m2, qt.Not(qt.Equals), m1)
|
||||||
|
|
||||||
m3, err := MD5FromFileFast(bf1)
|
m3, err := helpers.MD5FromFileFast(bf1)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(m3, qt.Not(qt.Equals), m2)
|
c.Assert(m3, qt.Not(qt.Equals), m2)
|
||||||
|
|
||||||
m4, err := MD5FromFileFast(bf2)
|
m4, err := helpers.MD5FromFileFast(bf2)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(m4, qt.Not(qt.Equals), m3)
|
c.Assert(m4, qt.Not(qt.Equals), m3)
|
||||||
|
|
||||||
m5, err := MD5FromReader(bf2)
|
m5, err := helpers.MD5FromReader(bf2)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(m5, qt.Not(qt.Equals), m4)
|
c.Assert(m5, qt.Not(qt.Equals), m4)
|
||||||
}
|
}
|
||||||
|
@ -394,11 +383,11 @@ func BenchmarkMD5FromFileFast(b *testing.B) {
|
||||||
}
|
}
|
||||||
b.StartTimer()
|
b.StartTimer()
|
||||||
if full {
|
if full {
|
||||||
if _, err := MD5FromReader(f); err != nil {
|
if _, err := helpers.MD5FromReader(f); err != nil {
|
||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if _, err := MD5FromFileFast(f); err != nil {
|
if _, err := helpers.MD5FromFileFast(f); err != nil {
|
||||||
b.Fatal(err)
|
b.Fatal(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -413,7 +402,7 @@ func BenchmarkUniqueStrings(b *testing.B) {
|
||||||
|
|
||||||
b.Run("Safe", func(b *testing.B) {
|
b.Run("Safe", func(b *testing.B) {
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
result := UniqueStrings(input)
|
result := helpers.UniqueStrings(input)
|
||||||
if len(result) != 6 {
|
if len(result) != 6 {
|
||||||
b.Fatal(fmt.Sprintf("invalid count: %d", len(result)))
|
b.Fatal(fmt.Sprintf("invalid count: %d", len(result)))
|
||||||
}
|
}
|
||||||
|
@ -432,7 +421,7 @@ func BenchmarkUniqueStrings(b *testing.B) {
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
inputc := inputs[i]
|
inputc := inputs[i]
|
||||||
|
|
||||||
result := UniqueStringsReuse(inputc)
|
result := helpers.UniqueStringsReuse(inputc)
|
||||||
if len(result) != 6 {
|
if len(result) != 6 {
|
||||||
b.Fatal(fmt.Sprintf("invalid count: %d", len(result)))
|
b.Fatal(fmt.Sprintf("invalid count: %d", len(result)))
|
||||||
}
|
}
|
||||||
|
@ -451,7 +440,7 @@ func BenchmarkUniqueStrings(b *testing.B) {
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
inputc := inputs[i]
|
inputc := inputs[i]
|
||||||
|
|
||||||
result := UniqueStringsSorted(inputc)
|
result := helpers.UniqueStringsSorted(inputc)
|
||||||
if len(result) != 6 {
|
if len(result) != 6 {
|
||||||
b.Fatal(fmt.Sprintf("invalid count: %d", len(result)))
|
b.Fatal(fmt.Sprintf("invalid count: %d", len(result)))
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -28,8 +28,6 @@ import (
|
||||||
"github.com/gohugoio/hugo/common/herrors"
|
"github.com/gohugoio/hugo/common/herrors"
|
||||||
"github.com/gohugoio/hugo/common/text"
|
"github.com/gohugoio/hugo/common/text"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/config"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/hugio"
|
"github.com/gohugoio/hugo/common/hugio"
|
||||||
|
@ -54,7 +52,7 @@ func (p *PathSpec) MakePathsSanitized(paths []string) {
|
||||||
|
|
||||||
// MakePathSanitized creates a Unicode-sanitized string, with the spaces replaced
|
// MakePathSanitized creates a Unicode-sanitized string, with the spaces replaced
|
||||||
func (p *PathSpec) MakePathSanitized(s string) string {
|
func (p *PathSpec) MakePathSanitized(s string) string {
|
||||||
if p.DisablePathToLower {
|
if p.Cfg.DisablePathToLower() {
|
||||||
return p.MakePath(s)
|
return p.MakePath(s)
|
||||||
}
|
}
|
||||||
return strings.ToLower(p.MakePath(s))
|
return strings.ToLower(p.MakePath(s))
|
||||||
|
@ -91,7 +89,7 @@ func ishex(c rune) bool {
|
||||||
// Hyphens in the original input are maintained.
|
// Hyphens in the original input are maintained.
|
||||||
// Spaces will be replaced with a single hyphen, and sequential replacement hyphens will be reduced to one.
|
// Spaces will be replaced with a single hyphen, and sequential replacement hyphens will be reduced to one.
|
||||||
func (p *PathSpec) UnicodeSanitize(s string) string {
|
func (p *PathSpec) UnicodeSanitize(s string) string {
|
||||||
if p.RemovePathAccents {
|
if p.Cfg.RemovePathAccents() {
|
||||||
s = text.RemoveAccentsString(s)
|
s = text.RemoveAccentsString(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -128,7 +126,7 @@ func (p *PathSpec) UnicodeSanitize(s string) string {
|
||||||
return string(target)
|
return string(target)
|
||||||
}
|
}
|
||||||
|
|
||||||
func makePathRelative(inPath string, possibleDirectories ...string) (string, error) {
|
func MakePathRelative(inPath string, possibleDirectories ...string) (string, error) {
|
||||||
for _, currentPath := range possibleDirectories {
|
for _, currentPath := range possibleDirectories {
|
||||||
if strings.HasPrefix(inPath, currentPath) {
|
if strings.HasPrefix(inPath, currentPath) {
|
||||||
return strings.TrimPrefix(inPath, currentPath), nil
|
return strings.TrimPrefix(inPath, currentPath), nil
|
||||||
|
@ -394,8 +392,8 @@ func OpenFileForWriting(fs afero.Fs, filename string) (afero.File, error) {
|
||||||
|
|
||||||
// GetCacheDir returns a cache dir from the given filesystem and config.
|
// GetCacheDir returns a cache dir from the given filesystem and config.
|
||||||
// The dir will be created if it does not exist.
|
// The dir will be created if it does not exist.
|
||||||
func GetCacheDir(fs afero.Fs, cfg config.Provider) (string, error) {
|
func GetCacheDir(fs afero.Fs, cacheDir string) (string, error) {
|
||||||
cacheDir := getCacheDir(cfg)
|
cacheDir = cacheDirDefault(cacheDir)
|
||||||
if cacheDir != "" {
|
if cacheDir != "" {
|
||||||
exists, err := DirExists(cacheDir, fs)
|
exists, err := DirExists(cacheDir, fs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -414,9 +412,8 @@ func GetCacheDir(fs afero.Fs, cfg config.Provider) (string, error) {
|
||||||
return GetTempDir("hugo_cache", fs), nil
|
return GetTempDir("hugo_cache", fs), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getCacheDir(cfg config.Provider) string {
|
func cacheDirDefault(cacheDir string) string {
|
||||||
// Always use the cacheDir config if set.
|
// Always use the cacheDir config if set.
|
||||||
cacheDir := cfg.GetString("cacheDir")
|
|
||||||
if len(cacheDir) > 1 {
|
if len(cacheDir) > 1 {
|
||||||
return addTrailingFileSeparator(cacheDir)
|
return addTrailingFileSeparator(cacheDir)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package helpers
|
package helpers_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
@ -24,16 +24,12 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/langs"
|
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
qt "github.com/frankban/quicktest"
|
||||||
|
"github.com/gohugoio/hugo/helpers"
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestMakePath(t *testing.T) {
|
func TestMakePath(t *testing.T) {
|
||||||
c := qt.New(t)
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
input string
|
input string
|
||||||
expected string
|
expected string
|
||||||
|
@ -60,13 +56,7 @@ func TestMakePath(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
v := newTestCfg()
|
p := newTestPathSpec("removePathAccents", test.removeAccents)
|
||||||
v.Set("removePathAccents", test.removeAccents)
|
|
||||||
|
|
||||||
l := langs.NewDefaultLanguage(v)
|
|
||||||
p, err := NewPathSpec(hugofs.NewMem(v), l, nil)
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
|
|
||||||
output := p.MakePath(test.input)
|
output := p.MakePath(test.input)
|
||||||
if output != test.expected {
|
if output != test.expected {
|
||||||
t.Errorf("Expected %#v, got %#v\n", test.expected, output)
|
t.Errorf("Expected %#v, got %#v\n", test.expected, output)
|
||||||
|
@ -75,9 +65,7 @@ func TestMakePath(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMakePathSanitized(t *testing.T) {
|
func TestMakePathSanitized(t *testing.T) {
|
||||||
v := newTestCfg()
|
p := newTestPathSpec()
|
||||||
|
|
||||||
p, _ := NewPathSpec(hugofs.NewMem(v), v, nil)
|
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
input string
|
input string
|
||||||
|
@ -100,12 +88,7 @@ func TestMakePathSanitized(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMakePathSanitizedDisablePathToLower(t *testing.T) {
|
func TestMakePathSanitizedDisablePathToLower(t *testing.T) {
|
||||||
v := newTestCfg()
|
p := newTestPathSpec("disablePathToLower", true)
|
||||||
|
|
||||||
v.Set("disablePathToLower", true)
|
|
||||||
|
|
||||||
l := langs.NewDefaultLanguage(v)
|
|
||||||
p, _ := NewPathSpec(hugofs.NewMem(v), l, nil)
|
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
input string
|
input string
|
||||||
|
@ -138,12 +121,12 @@ func TestMakePathRelative(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, d := range data {
|
for i, d := range data {
|
||||||
output, _ := makePathRelative(d.inPath, d.path1, d.path2)
|
output, _ := helpers.MakePathRelative(d.inPath, d.path1, d.path2)
|
||||||
if d.output != output {
|
if d.output != output {
|
||||||
t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output)
|
t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_, error := makePathRelative("a/b/c.ss", "/a/c", "/d/c", "/e/f")
|
_, error := helpers.MakePathRelative("a/b/c.ss", "/a/c", "/d/c", "/e/f")
|
||||||
|
|
||||||
if error == nil {
|
if error == nil {
|
||||||
t.Errorf("Test failed, expected error")
|
t.Errorf("Test failed, expected error")
|
||||||
|
@ -181,7 +164,7 @@ func doTestGetDottedRelativePath(urlFixer func(string) string, t *testing.T) {
|
||||||
{"/404.html", "./"},
|
{"/404.html", "./"},
|
||||||
}
|
}
|
||||||
for i, d := range data {
|
for i, d := range data {
|
||||||
output := GetDottedRelativePath(d.input)
|
output := helpers.GetDottedRelativePath(d.input)
|
||||||
if d.expected != output {
|
if d.expected != output {
|
||||||
t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
|
t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
|
||||||
}
|
}
|
||||||
|
@ -198,7 +181,7 @@ func TestMakeTitle(t *testing.T) {
|
||||||
{"make_title", "make_title"},
|
{"make_title", "make_title"},
|
||||||
}
|
}
|
||||||
for i, d := range data {
|
for i, d := range data {
|
||||||
output := MakeTitle(d.input)
|
output := helpers.MakeTitle(d.input)
|
||||||
if d.expected != output {
|
if d.expected != output {
|
||||||
t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
|
t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
|
||||||
}
|
}
|
||||||
|
@ -219,7 +202,7 @@ func TestDirExists(t *testing.T) {
|
||||||
{"./..", true},
|
{"./..", true},
|
||||||
{"./../", true},
|
{"./../", true},
|
||||||
{os.TempDir(), true},
|
{os.TempDir(), true},
|
||||||
{os.TempDir() + FilePathSeparator, true},
|
{os.TempDir() + helpers.FilePathSeparator, true},
|
||||||
{"/", true},
|
{"/", true},
|
||||||
{"/some-really-random-directory-name", false},
|
{"/some-really-random-directory-name", false},
|
||||||
{"/some/really/random/directory/name", false},
|
{"/some/really/random/directory/name", false},
|
||||||
|
@ -228,7 +211,7 @@ func TestDirExists(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, d := range data {
|
for i, d := range data {
|
||||||
exists, _ := DirExists(filepath.FromSlash(d.input), new(afero.OsFs))
|
exists, _ := helpers.DirExists(filepath.FromSlash(d.input), new(afero.OsFs))
|
||||||
if d.expected != exists {
|
if d.expected != exists {
|
||||||
t.Errorf("Test %d failed. Expected %t got %t", i, d.expected, exists)
|
t.Errorf("Test %d failed. Expected %t got %t", i, d.expected, exists)
|
||||||
}
|
}
|
||||||
|
@ -249,7 +232,7 @@ func TestIsDir(t *testing.T) {
|
||||||
|
|
||||||
for i, d := range data {
|
for i, d := range data {
|
||||||
|
|
||||||
exists, _ := IsDir(d.input, new(afero.OsFs))
|
exists, _ := helpers.IsDir(d.input, new(afero.OsFs))
|
||||||
if d.expected != exists {
|
if d.expected != exists {
|
||||||
t.Errorf("Test %d failed. Expected %t got %t", i, d.expected, exists)
|
t.Errorf("Test %d failed. Expected %t got %t", i, d.expected, exists)
|
||||||
}
|
}
|
||||||
|
@ -310,7 +293,7 @@ func TestExists(t *testing.T) {
|
||||||
{nonExistentDir, false, nil},
|
{nonExistentDir, false, nil},
|
||||||
}
|
}
|
||||||
for i, d := range data {
|
for i, d := range data {
|
||||||
exists, err := Exists(d.input, new(afero.OsFs))
|
exists, err := helpers.Exists(d.input, new(afero.OsFs))
|
||||||
if d.expectedResult != exists {
|
if d.expectedResult != exists {
|
||||||
t.Errorf("Test %d failed. Expected result %t got %t", i, d.expectedResult, exists)
|
t.Errorf("Test %d failed. Expected result %t got %t", i, d.expectedResult, exists)
|
||||||
}
|
}
|
||||||
|
@ -341,7 +324,7 @@ func TestAbsPathify(t *testing.T) {
|
||||||
|
|
||||||
for i, d := range data {
|
for i, d := range data {
|
||||||
// todo see comment in AbsPathify
|
// todo see comment in AbsPathify
|
||||||
ps := newTestDefaultPathSpec("workingDir", d.workingDir)
|
ps := newTestPathSpec("workingDir", d.workingDir)
|
||||||
|
|
||||||
expected := ps.AbsPathify(d.inPath)
|
expected := ps.AbsPathify(d.inPath)
|
||||||
if d.expected != expected {
|
if d.expected != expected {
|
||||||
|
@ -351,7 +334,7 @@ func TestAbsPathify(t *testing.T) {
|
||||||
t.Logf("Running platform specific path tests for %s", runtime.GOOS)
|
t.Logf("Running platform specific path tests for %s", runtime.GOOS)
|
||||||
if runtime.GOOS == "windows" {
|
if runtime.GOOS == "windows" {
|
||||||
for i, d := range windowsData {
|
for i, d := range windowsData {
|
||||||
ps := newTestDefaultPathSpec("workingDir", d.workingDir)
|
ps := newTestPathSpec("workingDir", d.workingDir)
|
||||||
|
|
||||||
expected := ps.AbsPathify(d.inPath)
|
expected := ps.AbsPathify(d.inPath)
|
||||||
if d.expected != expected {
|
if d.expected != expected {
|
||||||
|
@ -360,7 +343,7 @@ func TestAbsPathify(t *testing.T) {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for i, d := range unixData {
|
for i, d := range unixData {
|
||||||
ps := newTestDefaultPathSpec("workingDir", d.workingDir)
|
ps := newTestPathSpec("workingDir", d.workingDir)
|
||||||
|
|
||||||
expected := ps.AbsPathify(d.inPath)
|
expected := ps.AbsPathify(d.inPath)
|
||||||
if d.expected != expected {
|
if d.expected != expected {
|
||||||
|
@ -383,7 +366,7 @@ func TestExtractAndGroupRootPaths(t *testing.T) {
|
||||||
inCopy := make([]string, len(in))
|
inCopy := make([]string, len(in))
|
||||||
copy(inCopy, in)
|
copy(inCopy, in)
|
||||||
|
|
||||||
result := ExtractAndGroupRootPaths(in)
|
result := helpers.ExtractAndGroupRootPaths(in)
|
||||||
|
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
c.Assert(fmt.Sprint(result), qt.Equals, filepath.FromSlash("[/a/b/{c,e} /c/d/e]"))
|
c.Assert(fmt.Sprint(result), qt.Equals, filepath.FromSlash("[/a/b/{c,e} /c/d/e]"))
|
||||||
|
@ -405,7 +388,7 @@ func TestExtractRootPaths(t *testing.T) {
|
||||||
}}
|
}}
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
output := ExtractRootPaths(test.input)
|
output := helpers.ExtractRootPaths(test.input)
|
||||||
if !reflect.DeepEqual(output, test.expected) {
|
if !reflect.DeepEqual(output, test.expected) {
|
||||||
t.Errorf("Expected %#v, got %#v\n", test.expected, output)
|
t.Errorf("Expected %#v, got %#v\n", test.expected, output)
|
||||||
}
|
}
|
||||||
|
@ -426,7 +409,7 @@ func TestFindCWD(t *testing.T) {
|
||||||
// I really don't know a better way to test this function. - SPF 2014.11.04
|
// I really don't know a better way to test this function. - SPF 2014.11.04
|
||||||
}
|
}
|
||||||
for i, d := range data {
|
for i, d := range data {
|
||||||
dir, err := FindCWD()
|
dir, err := helpers.FindCWD()
|
||||||
if d.expectedDir != dir {
|
if d.expectedDir != dir {
|
||||||
t.Errorf("Test %d failed. Expected %q but got %q", i, d.expectedDir, dir)
|
t.Errorf("Test %d failed. Expected %q but got %q", i, d.expectedDir, dir)
|
||||||
}
|
}
|
||||||
|
@ -459,7 +442,7 @@ func TestSafeWriteToDisk(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, d := range data {
|
for i, d := range data {
|
||||||
e := SafeWriteToDisk(d.filename, reader, new(afero.OsFs))
|
e := helpers.SafeWriteToDisk(d.filename, reader, new(afero.OsFs))
|
||||||
if d.expectedErr != nil {
|
if d.expectedErr != nil {
|
||||||
if d.expectedErr.Error() != e.Error() {
|
if d.expectedErr.Error() != e.Error() {
|
||||||
t.Errorf("Test %d failed. Expected error %q but got %q", i, d.expectedErr.Error(), e.Error())
|
t.Errorf("Test %d failed. Expected error %q but got %q", i, d.expectedErr.Error(), e.Error())
|
||||||
|
@ -498,7 +481,7 @@ func TestWriteToDisk(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, d := range data {
|
for i, d := range data {
|
||||||
e := WriteToDisk(d.filename, reader, new(afero.OsFs))
|
e := helpers.WriteToDisk(d.filename, reader, new(afero.OsFs))
|
||||||
if d.expectedErr != e {
|
if d.expectedErr != e {
|
||||||
t.Errorf("Test %d failed. WriteToDisk Error Expected %q but got %q", i, d.expectedErr, e)
|
t.Errorf("Test %d failed. WriteToDisk Error Expected %q but got %q", i, d.expectedErr, e)
|
||||||
}
|
}
|
||||||
|
@ -515,27 +498,27 @@ func TestWriteToDisk(t *testing.T) {
|
||||||
|
|
||||||
func TestGetTempDir(t *testing.T) {
|
func TestGetTempDir(t *testing.T) {
|
||||||
dir := os.TempDir()
|
dir := os.TempDir()
|
||||||
if FilePathSeparator != dir[len(dir)-1:] {
|
if helpers.FilePathSeparator != dir[len(dir)-1:] {
|
||||||
dir = dir + FilePathSeparator
|
dir = dir + helpers.FilePathSeparator
|
||||||
}
|
}
|
||||||
testDir := "hugoTestFolder" + FilePathSeparator
|
testDir := "hugoTestFolder" + helpers.FilePathSeparator
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
input string
|
input string
|
||||||
expected string
|
expected string
|
||||||
}{
|
}{
|
||||||
{"", dir},
|
{"", dir},
|
||||||
{testDir + " Foo bar ", dir + testDir + " Foo bar " + FilePathSeparator},
|
{testDir + " Foo bar ", dir + testDir + " Foo bar " + helpers.FilePathSeparator},
|
||||||
{testDir + "Foo.Bar/foo_Bar-Foo", dir + testDir + "Foo.Bar/foo_Bar-Foo" + FilePathSeparator},
|
{testDir + "Foo.Bar/foo_Bar-Foo", dir + testDir + "Foo.Bar/foo_Bar-Foo" + helpers.FilePathSeparator},
|
||||||
{testDir + "fOO,bar:foo%bAR", dir + testDir + "fOObarfoo%bAR" + FilePathSeparator},
|
{testDir + "fOO,bar:foo%bAR", dir + testDir + "fOObarfoo%bAR" + helpers.FilePathSeparator},
|
||||||
{testDir + "fOO,bar:foobAR", dir + testDir + "fOObarfoobAR" + FilePathSeparator},
|
{testDir + "fOO,bar:foobAR", dir + testDir + "fOObarfoobAR" + helpers.FilePathSeparator},
|
||||||
{testDir + "FOo/BaR.html", dir + testDir + "FOo/BaR.html" + FilePathSeparator},
|
{testDir + "FOo/BaR.html", dir + testDir + "FOo/BaR.html" + helpers.FilePathSeparator},
|
||||||
{testDir + "трям/трям", dir + testDir + "трям/трям" + FilePathSeparator},
|
{testDir + "трям/трям", dir + testDir + "трям/трям" + helpers.FilePathSeparator},
|
||||||
{testDir + "은행", dir + testDir + "은행" + FilePathSeparator},
|
{testDir + "은행", dir + testDir + "은행" + helpers.FilePathSeparator},
|
||||||
{testDir + "Банковский кассир", dir + testDir + "Банковский кассир" + FilePathSeparator},
|
{testDir + "Банковский кассир", dir + testDir + "Банковский кассир" + helpers.FilePathSeparator},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
output := GetTempDir(test.input, new(afero.MemMapFs))
|
output := helpers.GetTempDir(test.input, new(afero.MemMapFs))
|
||||||
if output != test.expected {
|
if output != test.expected {
|
||||||
t.Errorf("Expected %#v, got %#v\n", test.expected, output)
|
t.Errorf("Expected %#v, got %#v\n", test.expected, output)
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,17 +34,17 @@ type PathSpec struct {
|
||||||
Fs *hugofs.Fs
|
Fs *hugofs.Fs
|
||||||
|
|
||||||
// The config provider to use
|
// The config provider to use
|
||||||
Cfg config.Provider
|
Cfg config.AllProvider
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewPathSpec creates a new PathSpec from the given filesystems and language.
|
// NewPathSpec creates a new PathSpec from the given filesystems and language.
|
||||||
func NewPathSpec(fs *hugofs.Fs, cfg config.Provider, logger loggers.Logger) (*PathSpec, error) {
|
func NewPathSpec(fs *hugofs.Fs, cfg config.AllProvider, logger loggers.Logger) (*PathSpec, error) {
|
||||||
return NewPathSpecWithBaseBaseFsProvided(fs, cfg, logger, nil)
|
return NewPathSpecWithBaseBaseFsProvided(fs, cfg, logger, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewPathSpecWithBaseBaseFsProvided creates a new PathSpec from the given filesystems and language.
|
// NewPathSpecWithBaseBaseFsProvided creates a new PathSpec from the given filesystems and language.
|
||||||
// If an existing BaseFs is provided, parts of that is reused.
|
// If an existing BaseFs is provided, parts of that is reused.
|
||||||
func NewPathSpecWithBaseBaseFsProvided(fs *hugofs.Fs, cfg config.Provider, logger loggers.Logger, baseBaseFs *filesystems.BaseFs) (*PathSpec, error) {
|
func NewPathSpecWithBaseBaseFsProvided(fs *hugofs.Fs, cfg config.AllProvider, logger loggers.Logger, baseBaseFs *filesystems.BaseFs) (*PathSpec, error) {
|
||||||
p, err := paths.New(fs, cfg)
|
p, err := paths.New(fs, cfg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -69,11 +69,6 @@ func NewPathSpecWithBaseBaseFsProvided(fs *hugofs.Fs, cfg config.Provider, logge
|
||||||
ProcessingStats: NewProcessingStats(p.Lang()),
|
ProcessingStats: NewProcessingStats(p.Lang()),
|
||||||
}
|
}
|
||||||
|
|
||||||
basePath := ps.BaseURL.Path()
|
|
||||||
if basePath != "" && basePath != "/" {
|
|
||||||
ps.BasePath = basePath
|
|
||||||
}
|
|
||||||
|
|
||||||
return ps, nil
|
return ps, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,62 +0,0 @@
|
||||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package helpers
|
|
||||||
|
|
||||||
import (
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/langs"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestNewPathSpecFromConfig(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
v := newTestCfg()
|
|
||||||
l := langs.NewLanguage("no", v)
|
|
||||||
v.Set("disablePathToLower", true)
|
|
||||||
v.Set("removePathAccents", true)
|
|
||||||
v.Set("uglyURLs", true)
|
|
||||||
v.Set("canonifyURLs", true)
|
|
||||||
v.Set("paginatePath", "side")
|
|
||||||
v.Set("baseURL", "http://base.com/foo")
|
|
||||||
v.Set("themesDir", "thethemes")
|
|
||||||
v.Set("layoutDir", "thelayouts")
|
|
||||||
v.Set("workingDir", "thework")
|
|
||||||
v.Set("staticDir", "thestatic")
|
|
||||||
v.Set("theme", "thetheme")
|
|
||||||
langs.LoadLanguageSettings(v, nil)
|
|
||||||
|
|
||||||
fs := hugofs.NewMem(v)
|
|
||||||
fs.Source.MkdirAll(filepath.FromSlash("thework/thethemes/thetheme"), 0777)
|
|
||||||
|
|
||||||
p, err := NewPathSpec(fs, l, nil)
|
|
||||||
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(p.CanonifyURLs, qt.Equals, true)
|
|
||||||
c.Assert(p.DisablePathToLower, qt.Equals, true)
|
|
||||||
c.Assert(p.RemovePathAccents, qt.Equals, true)
|
|
||||||
c.Assert(p.UglyURLs, qt.Equals, true)
|
|
||||||
c.Assert(p.Language.Lang, qt.Equals, "no")
|
|
||||||
c.Assert(p.PaginatePath, qt.Equals, "side")
|
|
||||||
|
|
||||||
c.Assert(p.BaseURL.String(), qt.Equals, "http://base.com/foo")
|
|
||||||
c.Assert(p.BaseURLString, qt.Equals, "http://base.com/foo")
|
|
||||||
c.Assert(p.BaseURLNoPathString, qt.Equals, "http://base.com")
|
|
||||||
|
|
||||||
c.Assert(p.ThemesDir, qt.Equals, "thethemes")
|
|
||||||
c.Assert(p.WorkingDir, qt.Equals, "thework")
|
|
||||||
}
|
|
|
@ -1,47 +1,47 @@
|
||||||
package helpers
|
package helpers_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/gohugoio/hugo/common/loggers"
|
"github.com/gohugoio/hugo/common/loggers"
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/gohugoio/hugo/config"
|
||||||
"github.com/spf13/afero"
|
"github.com/gohugoio/hugo/config/testconfig"
|
||||||
|
"github.com/gohugoio/hugo/helpers"
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
"github.com/gohugoio/hugo/langs"
|
"github.com/spf13/afero"
|
||||||
"github.com/gohugoio/hugo/modules"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func newTestPathSpec(fs *hugofs.Fs, v config.Provider) *PathSpec {
|
func newTestPathSpecFromCfgAndLang(cfg config.Provider, lang string) *helpers.PathSpec {
|
||||||
l := langs.NewDefaultLanguage(v)
|
mfs := afero.NewMemMapFs()
|
||||||
ps, _ := NewPathSpec(fs, l, nil)
|
|
||||||
return ps
|
|
||||||
}
|
|
||||||
|
|
||||||
func newTestDefaultPathSpec(configKeyValues ...any) *PathSpec {
|
configs := testconfig.GetTestConfigs(mfs, cfg)
|
||||||
cfg := newTestCfg()
|
var conf config.AllProvider
|
||||||
fs := hugofs.NewMem(cfg)
|
if lang == "" {
|
||||||
|
conf = configs.GetFirstLanguageConfig()
|
||||||
for i := 0; i < len(configKeyValues); i += 2 {
|
} else {
|
||||||
cfg.Set(configKeyValues[i].(string), configKeyValues[i+1])
|
conf = configs.GetByLang(lang)
|
||||||
|
if conf == nil {
|
||||||
|
panic("no config for lang " + lang)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return newTestPathSpec(fs, cfg)
|
fs := hugofs.NewFrom(mfs, conf.BaseConfig())
|
||||||
}
|
ps, err := helpers.NewPathSpec(fs, conf, loggers.NewErrorLogger())
|
||||||
|
|
||||||
func newTestCfg() config.Provider {
|
|
||||||
v := config.NewWithTestDefaults()
|
|
||||||
langs.LoadLanguageSettings(v, nil)
|
|
||||||
langs.LoadLanguageSettings(v, nil)
|
|
||||||
mod, err := modules.CreateProjectModule(v)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
v.Set("allModules", modules.Modules{mod})
|
return ps
|
||||||
|
|
||||||
return v
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func newTestContentSpec() *ContentSpec {
|
func newTestPathSpec(configKeyValues ...any) *helpers.PathSpec {
|
||||||
v := config.NewWithTestDefaults()
|
cfg := config.New()
|
||||||
spec, err := NewContentSpec(v, loggers.NewErrorLogger(), afero.NewMemMapFs(), nil)
|
for i := 0; i < len(configKeyValues); i += 2 {
|
||||||
|
cfg.Set(configKeyValues[i].(string), configKeyValues[i+1])
|
||||||
|
}
|
||||||
|
return newTestPathSpecFromCfgAndLang(cfg, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
func newTestContentSpec(cfg config.Provider) *helpers.ContentSpec {
|
||||||
|
fs := afero.NewMemMapFs()
|
||||||
|
conf := testconfig.GetTestConfig(fs, cfg)
|
||||||
|
spec, err := helpers.NewContentSpec(conf, loggers.NewErrorLogger(), fs, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -71,8 +71,9 @@ func SanitizeURLKeepTrailingSlash(in string) string {
|
||||||
|
|
||||||
// URLize is similar to MakePath, but with Unicode handling
|
// URLize is similar to MakePath, but with Unicode handling
|
||||||
// Example:
|
// Example:
|
||||||
// uri: Vim (text editor)
|
//
|
||||||
// urlize: vim-text-editor
|
// uri: Vim (text editor)
|
||||||
|
// urlize: vim-text-editor
|
||||||
func (p *PathSpec) URLize(uri string) string {
|
func (p *PathSpec) URLize(uri string) string {
|
||||||
return p.URLEscape(p.MakePathSanitized(uri))
|
return p.URLEscape(p.MakePathSanitized(uri))
|
||||||
}
|
}
|
||||||
|
@ -141,16 +142,16 @@ func (p *PathSpec) AbsURL(in string, addLanguage bool) string {
|
||||||
func (p *PathSpec) getBaseURLRoot(path string) string {
|
func (p *PathSpec) getBaseURLRoot(path string) string {
|
||||||
if strings.HasPrefix(path, "/") {
|
if strings.HasPrefix(path, "/") {
|
||||||
// Treat it as relative to the server root.
|
// Treat it as relative to the server root.
|
||||||
return p.BaseURLNoPathString
|
return p.Cfg.BaseURL().WithoutPath
|
||||||
} else {
|
} else {
|
||||||
// Treat it as relative to the baseURL.
|
// Treat it as relative to the baseURL.
|
||||||
return p.BaseURLString
|
return p.Cfg.BaseURL().WithPath
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *PathSpec) RelURL(in string, addLanguage bool) string {
|
func (p *PathSpec) RelURL(in string, addLanguage bool) string {
|
||||||
baseURL := p.getBaseURLRoot(in)
|
baseURL := p.getBaseURLRoot(in)
|
||||||
canonifyURLs := p.CanonifyURLs
|
canonifyURLs := p.Cfg.CanonifyURLs()
|
||||||
if (!strings.HasPrefix(in, baseURL) && strings.HasPrefix(in, "http")) || strings.HasPrefix(in, "//") {
|
if (!strings.HasPrefix(in, baseURL) && strings.HasPrefix(in, "http")) || strings.HasPrefix(in, "//") {
|
||||||
return in
|
return in
|
||||||
}
|
}
|
||||||
|
@ -217,25 +218,3 @@ func (p *PathSpec) PrependBasePath(rel string, isAbs bool) string {
|
||||||
}
|
}
|
||||||
return rel
|
return rel
|
||||||
}
|
}
|
||||||
|
|
||||||
// URLizeAndPrep applies misc sanitation to the given URL to get it in line
|
|
||||||
// with the Hugo standard.
|
|
||||||
func (p *PathSpec) URLizeAndPrep(in string) string {
|
|
||||||
return p.URLPrep(p.URLize(in))
|
|
||||||
}
|
|
||||||
|
|
||||||
// URLPrep applies misc sanitation to the given URL.
|
|
||||||
func (p *PathSpec) URLPrep(in string) string {
|
|
||||||
if p.UglyURLs {
|
|
||||||
return paths.Uglify(SanitizeURL(in))
|
|
||||||
}
|
|
||||||
pretty := paths.PrettifyURL(SanitizeURL(in))
|
|
||||||
if path.Ext(pretty) == ".xml" {
|
|
||||||
return pretty
|
|
||||||
}
|
|
||||||
url, err := purell.NormalizeURLString(pretty, purell.FlagAddTrailingSlash)
|
|
||||||
if err != nil {
|
|
||||||
return pretty
|
|
||||||
}
|
|
||||||
return url
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
// Copyright 2023 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -11,21 +11,20 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package helpers
|
package helpers_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
qt "github.com/frankban/quicktest"
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
"github.com/gohugoio/hugo/config"
|
||||||
"github.com/gohugoio/hugo/langs"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestURLize(t *testing.T) {
|
func TestURLize(t *testing.T) {
|
||||||
v := newTestCfg()
|
p := newTestPathSpec()
|
||||||
l := langs.NewDefaultLanguage(v)
|
|
||||||
p, _ := NewPathSpec(hugofs.NewMem(v), l, nil)
|
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
input string
|
input string
|
||||||
|
@ -61,10 +60,6 @@ func TestAbsURL(t *testing.T) {
|
||||||
|
|
||||||
func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool, lang string) {
|
func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool, lang string) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
v := newTestCfg()
|
|
||||||
v.Set("multilingual", multilingual)
|
|
||||||
v.Set("defaultContentLanguage", "en")
|
|
||||||
v.Set("defaultContentLanguageInSubdir", defaultInSubDir)
|
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
input string
|
input string
|
||||||
|
@ -103,24 +98,42 @@ func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
v.Set("baseURL", test.baseURL)
|
c.Run(fmt.Sprintf("%v/%t-%t-%t/%s", test, defaultInSubDir, addLanguage, multilingual, lang), func(c *qt.C) {
|
||||||
v.Set("contentDir", "content")
|
v := config.New()
|
||||||
l := langs.NewLanguage(lang, v)
|
if multilingual {
|
||||||
p, _ := NewPathSpec(hugofs.NewMem(v), l, nil)
|
v.Set("languages", map[string]any{
|
||||||
|
"fr": map[string]interface{}{
|
||||||
output := p.AbsURL(test.input, addLanguage)
|
"weight": 20,
|
||||||
expected := test.expected
|
},
|
||||||
if multilingual && addLanguage {
|
"en": map[string]interface{}{
|
||||||
if !defaultInSubDir && lang == "en" {
|
"weight": 10,
|
||||||
expected = strings.Replace(expected, "MULTI", "", 1)
|
},
|
||||||
} else {
|
})
|
||||||
expected = strings.Replace(expected, "MULTI", lang+"/", 1)
|
|
||||||
}
|
}
|
||||||
} else {
|
v.Set("defaultContentLanguage", "en")
|
||||||
expected = strings.Replace(expected, "MULTI", "", 1)
|
v.Set("defaultContentLanguageInSubdir", defaultInSubDir)
|
||||||
}
|
v.Set("baseURL", test.baseURL)
|
||||||
|
|
||||||
c.Assert(output, qt.Equals, expected)
|
var configLang string
|
||||||
|
if multilingual {
|
||||||
|
configLang = lang
|
||||||
|
}
|
||||||
|
p := newTestPathSpecFromCfgAndLang(v, configLang)
|
||||||
|
|
||||||
|
output := p.AbsURL(test.input, addLanguage)
|
||||||
|
expected := test.expected
|
||||||
|
if multilingual && addLanguage {
|
||||||
|
if !defaultInSubDir && lang == "en" {
|
||||||
|
expected = strings.Replace(expected, "MULTI", "", 1)
|
||||||
|
} else {
|
||||||
|
expected = strings.Replace(expected, "MULTI", lang+"/", 1)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
expected = strings.Replace(expected, "MULTI", "", 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
c.Assert(output, qt.Equals, expected)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -137,9 +150,19 @@ func TestRelURL(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func doTestRelURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool, lang string) {
|
func doTestRelURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool, lang string) {
|
||||||
|
t.Helper()
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
v := newTestCfg()
|
v := config.New()
|
||||||
v.Set("multilingual", multilingual)
|
if multilingual {
|
||||||
|
v.Set("languages", map[string]any{
|
||||||
|
"fr": map[string]interface{}{
|
||||||
|
"weight": 20,
|
||||||
|
},
|
||||||
|
"en": map[string]interface{}{
|
||||||
|
"weight": 10,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
v.Set("defaultContentLanguage", "en")
|
v.Set("defaultContentLanguage", "en")
|
||||||
v.Set("defaultContentLanguageInSubdir", defaultInSubDir)
|
v.Set("defaultContentLanguageInSubdir", defaultInSubDir)
|
||||||
|
|
||||||
|
@ -182,25 +205,31 @@ func doTestRelURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, test := range tests {
|
for i, test := range tests {
|
||||||
v.Set("baseURL", test.baseURL)
|
c.Run(fmt.Sprintf("%v/%t%t%t/%s", test, defaultInSubDir, addLanguage, multilingual, lang), func(c *qt.C) {
|
||||||
v.Set("canonifyURLs", test.canonify)
|
|
||||||
l := langs.NewLanguage(lang, v)
|
|
||||||
p, _ := NewPathSpec(hugofs.NewMem(v), l, nil)
|
|
||||||
|
|
||||||
output := p.RelURL(test.input, addLanguage)
|
v.Set("baseURL", test.baseURL)
|
||||||
|
v.Set("canonifyURLs", test.canonify)
|
||||||
expected := test.expected
|
var configLang string
|
||||||
if multilingual && addLanguage {
|
if multilingual {
|
||||||
if !defaultInSubDir && lang == "en" {
|
configLang = lang
|
||||||
expected = strings.Replace(expected, "MULTI", "", 1)
|
|
||||||
} else {
|
|
||||||
expected = strings.Replace(expected, "MULTI", "/"+lang, 1)
|
|
||||||
}
|
}
|
||||||
} else {
|
p := newTestPathSpecFromCfgAndLang(v, configLang)
|
||||||
expected = strings.Replace(expected, "MULTI", "", 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
c.Assert(output, qt.Equals, expected, qt.Commentf("[%d] %s", i, test.input))
|
output := p.RelURL(test.input, addLanguage)
|
||||||
|
|
||||||
|
expected := test.expected
|
||||||
|
if multilingual && addLanguage {
|
||||||
|
if !defaultInSubDir && lang == "en" {
|
||||||
|
expected = strings.Replace(expected, "MULTI", "", 1)
|
||||||
|
} else {
|
||||||
|
expected = strings.Replace(expected, "MULTI", "/"+lang, 1)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
expected = strings.Replace(expected, "MULTI", "", 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
c.Assert(output, qt.Equals, expected, qt.Commentf("[%d] %s", i, test.input))
|
||||||
|
})
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -216,8 +245,8 @@ func TestSanitizeURL(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, test := range tests {
|
for i, test := range tests {
|
||||||
o1 := SanitizeURL(test.input)
|
o1 := helpers.SanitizeURL(test.input)
|
||||||
o2 := SanitizeURLKeepTrailingSlash(test.input)
|
o2 := helpers.SanitizeURLKeepTrailingSlash(test.input)
|
||||||
|
|
||||||
expected2 := test.expected
|
expected2 := test.expected
|
||||||
|
|
||||||
|
@ -233,28 +262,3 @@ func TestSanitizeURL(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestURLPrep(t *testing.T) {
|
|
||||||
type test struct {
|
|
||||||
ugly bool
|
|
||||||
input string
|
|
||||||
output string
|
|
||||||
}
|
|
||||||
|
|
||||||
data := []test{
|
|
||||||
{false, "/section/name.html", "/section/name/"},
|
|
||||||
{true, "/section/name/index.html", "/section/name.html"},
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, d := range data {
|
|
||||||
v := newTestCfg()
|
|
||||||
v.Set("uglyURLs", d.ugly)
|
|
||||||
l := langs.NewDefaultLanguage(v)
|
|
||||||
p, _ := NewPathSpec(hugofs.NewMem(v), l, nil)
|
|
||||||
|
|
||||||
output := p.URLPrep(d.input)
|
|
||||||
if d.output != output {
|
|
||||||
t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
43
hugofs/fs.go
43
hugofs/fs.go
|
@ -62,39 +62,55 @@ type Fs struct {
|
||||||
|
|
||||||
// NewDefault creates a new Fs with the OS file system
|
// NewDefault creates a new Fs with the OS file system
|
||||||
// as source and destination file systems.
|
// as source and destination file systems.
|
||||||
func NewDefault(cfg config.Provider) *Fs {
|
func NewDefault(conf config.BaseConfig) *Fs {
|
||||||
fs := Os
|
fs := Os
|
||||||
return newFs(fs, fs, cfg)
|
return NewFrom(fs, conf)
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewMem creates a new Fs with the MemMapFs
|
func NewDefaultOld(cfg config.Provider) *Fs {
|
||||||
// as source and destination file systems.
|
workingDir, publishDir := getWorkingPublishDir(cfg)
|
||||||
// Useful for testing.
|
fs := Os
|
||||||
func NewMem(cfg config.Provider) *Fs {
|
return newFs(fs, fs, workingDir, publishDir)
|
||||||
fs := &afero.MemMapFs{}
|
|
||||||
return newFs(fs, fs, cfg)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewFrom creates a new Fs based on the provided Afero Fs
|
// NewFrom creates a new Fs based on the provided Afero Fs
|
||||||
// as source and destination file systems.
|
// as source and destination file systems.
|
||||||
// Useful for testing.
|
// Useful for testing.
|
||||||
func NewFrom(fs afero.Fs, cfg config.Provider) *Fs {
|
func NewFrom(fs afero.Fs, conf config.BaseConfig) *Fs {
|
||||||
return newFs(fs, fs, cfg)
|
return newFs(fs, fs, conf.WorkingDir, conf.PublishDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewFromOld(fs afero.Fs, cfg config.Provider) *Fs {
|
||||||
|
workingDir, publishDir := getWorkingPublishDir(cfg)
|
||||||
|
return newFs(fs, fs, workingDir, publishDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewFrom creates a new Fs based on the provided Afero Fss
|
// NewFrom creates a new Fs based on the provided Afero Fss
|
||||||
// as the source and destination file systems.
|
// as the source and destination file systems.
|
||||||
func NewFromSourceAndDestination(source, destination afero.Fs, cfg config.Provider) *Fs {
|
func NewFromSourceAndDestination(source, destination afero.Fs, cfg config.Provider) *Fs {
|
||||||
return newFs(source, destination, cfg)
|
workingDir, publishDir := getWorkingPublishDir(cfg)
|
||||||
|
return newFs(source, destination, workingDir, publishDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
func newFs(source, destination afero.Fs, cfg config.Provider) *Fs {
|
func getWorkingPublishDir(cfg config.Provider) (string, string) {
|
||||||
workingDir := cfg.GetString("workingDir")
|
workingDir := cfg.GetString("workingDir")
|
||||||
publishDir := cfg.GetString("publishDir")
|
publishDir := cfg.GetString("publishDirDynamic")
|
||||||
|
if publishDir == "" {
|
||||||
|
publishDir = cfg.GetString("publishDir")
|
||||||
|
}
|
||||||
|
return workingDir, publishDir
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func newFs(source, destination afero.Fs, workingDir, publishDir string) *Fs {
|
||||||
if publishDir == "" {
|
if publishDir == "" {
|
||||||
panic("publishDir is empty")
|
panic("publishDir is empty")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if workingDir == "." {
|
||||||
|
workingDir = ""
|
||||||
|
}
|
||||||
|
|
||||||
// Sanity check
|
// Sanity check
|
||||||
if IsOsFs(source) && len(workingDir) < 2 {
|
if IsOsFs(source) && len(workingDir) < 2 {
|
||||||
panic("workingDir is too short")
|
panic("workingDir is too short")
|
||||||
|
@ -158,6 +174,7 @@ func MakeReadableAndRemoveAllModulePkgDir(fs afero.Fs, dir string) (int, error)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
|
|
||||||
return counter, fs.RemoveAll(dir)
|
return counter, fs.RemoveAll(dir)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -35,9 +35,10 @@ func TestIsOsFs(t *testing.T) {
|
||||||
|
|
||||||
func TestNewDefault(t *testing.T) {
|
func TestNewDefault(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
v := config.NewWithTestDefaults()
|
v := config.New()
|
||||||
v.Set("workingDir", t.TempDir())
|
v.Set("workingDir", t.TempDir())
|
||||||
f := NewDefault(v)
|
v.Set("publishDir", "public")
|
||||||
|
f := NewDefaultOld(v)
|
||||||
|
|
||||||
c.Assert(f.Source, qt.IsNotNil)
|
c.Assert(f.Source, qt.IsNotNil)
|
||||||
c.Assert(f.Source, hqt.IsSameType, new(afero.OsFs))
|
c.Assert(f.Source, hqt.IsSameType, new(afero.OsFs))
|
||||||
|
@ -49,20 +50,3 @@ func TestNewDefault(t *testing.T) {
|
||||||
c.Assert(IsOsFs(f.PublishDir), qt.IsTrue)
|
c.Assert(IsOsFs(f.PublishDir), qt.IsTrue)
|
||||||
c.Assert(IsOsFs(f.Os), qt.IsTrue)
|
c.Assert(IsOsFs(f.Os), qt.IsTrue)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNewMem(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
v := config.NewWithTestDefaults()
|
|
||||||
f := NewMem(v)
|
|
||||||
|
|
||||||
c.Assert(f.Source, qt.Not(qt.IsNil))
|
|
||||||
c.Assert(f.Source, hqt.IsSameType, new(afero.MemMapFs))
|
|
||||||
c.Assert(f.PublishDir, qt.Not(qt.IsNil))
|
|
||||||
c.Assert(f.PublishDir, hqt.IsSameType, new(afero.BasePathFs))
|
|
||||||
c.Assert(f.Os, hqt.IsSameType, new(afero.OsFs))
|
|
||||||
c.Assert(f.WorkingDirReadOnly, qt.IsNotNil)
|
|
||||||
c.Assert(IsOsFs(f.Source), qt.IsFalse)
|
|
||||||
c.Assert(IsOsFs(f.WorkingDirReadOnly), qt.IsFalse)
|
|
||||||
c.Assert(IsOsFs(f.PublishDir), qt.IsFalse)
|
|
||||||
c.Assert(IsOsFs(f.Os), qt.IsTrue)
|
|
||||||
}
|
|
||||||
|
|
|
@ -34,7 +34,7 @@ type noOpFs struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (fs noOpFs) Create(name string) (afero.File, error) {
|
func (fs noOpFs) Create(name string) (afero.File, error) {
|
||||||
return nil, errNoOp
|
panic(errNoOp)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (fs noOpFs) Mkdir(name string, perm os.FileMode) error {
|
func (fs noOpFs) Mkdir(name string, perm os.FileMode) error {
|
||||||
|
@ -62,7 +62,7 @@ func (fs noOpFs) RemoveAll(path string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (fs noOpFs) Rename(oldname string, newname string) error {
|
func (fs noOpFs) Rename(oldname string, newname string) error {
|
||||||
return errNoOp
|
panic(errNoOp)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (fs noOpFs) Stat(name string) (os.FileInfo, error) {
|
func (fs noOpFs) Stat(name string) (os.FileInfo, error) {
|
||||||
|
@ -74,13 +74,13 @@ func (fs noOpFs) Name() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (fs noOpFs) Chmod(name string, mode os.FileMode) error {
|
func (fs noOpFs) Chmod(name string, mode os.FileMode) error {
|
||||||
return errNoOp
|
panic(errNoOp)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (fs noOpFs) Chtimes(name string, atime time.Time, mtime time.Time) error {
|
func (fs noOpFs) Chtimes(name string, atime time.Time, mtime time.Time) error {
|
||||||
return errNoOp
|
panic(errNoOp)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (fs *noOpFs) Chown(name string, uid int, gid int) error {
|
func (fs *noOpFs) Chown(name string, uid int, gid int) error {
|
||||||
return errNoOp
|
panic(errNoOp)
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,7 +30,7 @@ import (
|
||||||
|
|
||||||
func TestLanguageRootMapping(t *testing.T) {
|
func TestLanguageRootMapping(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
v := config.NewWithTestDefaults()
|
v := config.New()
|
||||||
v.Set("contentDir", "content")
|
v.Set("contentDir", "content")
|
||||||
|
|
||||||
fs := NewBaseFileDecorator(afero.NewMemMapFs())
|
fs := NewBaseFileDecorator(afero.NewMemMapFs())
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue