mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-07 20:30:36 -05:00
Handle themes in the new file cache (for images, assets)
In the newly consolidated file cache implementation, we forgot that we also look in the theme(s) for assets (SCSS transformations etc.), which is not good for Netlify and the demo sites. Fixes #5460
This commit is contained in:
parent
e82b2dc8c1
commit
f9b4eb4f39
11 changed files with 207 additions and 97 deletions
25
cache/filecache/filecache.go
vendored
25
cache/filecache/filecache.go
vendored
|
@ -17,6 +17,7 @@ import (
|
|||
"bytes"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
@ -26,8 +27,6 @@ import (
|
|||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
|
||||
"github.com/gohugoio/hugo/hugolib/paths"
|
||||
|
||||
"github.com/BurntSushi/locker"
|
||||
"github.com/spf13/afero"
|
||||
)
|
||||
|
@ -305,22 +304,28 @@ func (f Caches) Get(name string) *Cache {
|
|||
return f[strings.ToLower(name)]
|
||||
}
|
||||
|
||||
// NewCachesFromPaths creates a new set of file caches from the given
|
||||
// NewCaches creates a new set of file caches from the given
|
||||
// configuration.
|
||||
func NewCachesFromPaths(p *paths.Paths) (Caches, error) {
|
||||
func NewCaches(p *helpers.PathSpec) (Caches, error) {
|
||||
dcfg, err := decodeConfig(p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
genDir := filepath.FromSlash("/_gen")
|
||||
|
||||
fs := p.Fs.Source
|
||||
|
||||
m := make(Caches)
|
||||
for k, v := range dcfg {
|
||||
var cfs afero.Fs
|
||||
|
||||
if v.isResourceDir {
|
||||
cfs = p.BaseFs.Resources.Fs
|
||||
} else {
|
||||
cfs = fs
|
||||
}
|
||||
|
||||
var baseDir string
|
||||
if !strings.Contains(v.Dir, genDir) {
|
||||
if !strings.HasPrefix(v.Dir, "_gen") {
|
||||
// We do cache eviction (file removes) and since the user can set
|
||||
// his/hers own cache directory, we really want to make sure
|
||||
// we do not delete any files that do not belong to this cache.
|
||||
|
@ -331,10 +336,12 @@ func NewCachesFromPaths(p *paths.Paths) (Caches, error) {
|
|||
} else {
|
||||
baseDir = filepath.Join(v.Dir, k)
|
||||
}
|
||||
if err = fs.MkdirAll(baseDir, 0777); err != nil {
|
||||
if err = cfs.MkdirAll(baseDir, 0777); err != nil && !os.IsExist(err) {
|
||||
return nil, err
|
||||
}
|
||||
bfs := afero.NewBasePathFs(fs, baseDir)
|
||||
|
||||
bfs := afero.NewBasePathFs(cfs, baseDir)
|
||||
|
||||
m[k] = NewCache(bfs, v.MaxAge)
|
||||
}
|
||||
|
||||
|
|
37
cache/filecache/filecache_config.go
vendored
37
cache/filecache/filecache_config.go
vendored
|
@ -20,7 +20,6 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugolib/paths"
|
||||
|
||||
"github.com/bep/mapstructure"
|
||||
"github.com/pkg/errors"
|
||||
|
@ -68,6 +67,10 @@ type cacheConfig struct {
|
|||
|
||||
// The directory where files are stored.
|
||||
Dir string
|
||||
|
||||
// Will resources/_gen will get its own composite filesystem that
|
||||
// also checks any theme.
|
||||
isResourceDir bool
|
||||
}
|
||||
|
||||
// GetJSONCache gets the file cache for getJSON.
|
||||
|
@ -90,7 +93,7 @@ func (f Caches) AssetsCache() *Cache {
|
|||
return f[cacheKeyAssets]
|
||||
}
|
||||
|
||||
func decodeConfig(p *paths.Paths) (cachesConfig, error) {
|
||||
func decodeConfig(p *helpers.PathSpec) (cachesConfig, error) {
|
||||
c := make(cachesConfig)
|
||||
valid := make(map[string]bool)
|
||||
// Add defaults
|
||||
|
@ -145,10 +148,13 @@ func decodeConfig(p *paths.Paths) (cachesConfig, error) {
|
|||
|
||||
for i, part := range parts {
|
||||
if strings.HasPrefix(part, ":") {
|
||||
resolved, err := resolveDirPlaceholder(p, part)
|
||||
resolved, isResource, err := resolveDirPlaceholder(p, part)
|
||||
if err != nil {
|
||||
return c, err
|
||||
}
|
||||
if isResource {
|
||||
v.isResourceDir = true
|
||||
}
|
||||
parts[i] = resolved
|
||||
}
|
||||
}
|
||||
|
@ -159,13 +165,15 @@ func decodeConfig(p *paths.Paths) (cachesConfig, error) {
|
|||
}
|
||||
v.Dir = filepath.Clean(filepath.FromSlash(dir))
|
||||
|
||||
if isOsFs && !filepath.IsAbs(v.Dir) {
|
||||
return c, errors.Errorf("%q must resolve to an absolute directory", v.Dir)
|
||||
}
|
||||
if !v.isResourceDir {
|
||||
if isOsFs && !filepath.IsAbs(v.Dir) {
|
||||
return c, errors.Errorf("%q must resolve to an absolute directory", v.Dir)
|
||||
}
|
||||
|
||||
// Avoid cache in root, e.g. / (Unix) or c:\ (Windows)
|
||||
if len(strings.TrimPrefix(v.Dir, filepath.VolumeName(v.Dir))) == 1 {
|
||||
return c, errors.Errorf("%q is a root folder and not allowed as cache dir", v.Dir)
|
||||
// Avoid cache in root, e.g. / (Unix) or c:\ (Windows)
|
||||
if len(strings.TrimPrefix(v.Dir, filepath.VolumeName(v.Dir))) == 1 {
|
||||
return c, errors.Errorf("%q is a root folder and not allowed as cache dir", v.Dir)
|
||||
}
|
||||
}
|
||||
|
||||
if disabled {
|
||||
|
@ -179,15 +187,16 @@ func decodeConfig(p *paths.Paths) (cachesConfig, error) {
|
|||
}
|
||||
|
||||
// Resolves :resourceDir => /myproject/resources etc., :cacheDir => ...
|
||||
func resolveDirPlaceholder(p *paths.Paths, placeholder string) (string, error) {
|
||||
func resolveDirPlaceholder(p *helpers.PathSpec, placeholder string) (cacheDir string, isResource bool, err error) {
|
||||
switch strings.ToLower(placeholder) {
|
||||
case ":resourcedir":
|
||||
return p.AbsResourcesDir, nil
|
||||
return "", true, nil
|
||||
case ":cachedir":
|
||||
return helpers.GetCacheDir(p.Fs.Source, p.Cfg)
|
||||
d, err := helpers.GetCacheDir(p.Fs.Source, p.Cfg)
|
||||
return d, false, err
|
||||
case ":project":
|
||||
return filepath.Base(p.WorkingDir), nil
|
||||
return filepath.Base(p.WorkingDir), false, nil
|
||||
}
|
||||
|
||||
return "", errors.Errorf("%q is not a valid placeholder (valid values are :cacheDir or :resourceDir)", placeholder)
|
||||
return "", false, errors.Errorf("%q is not a valid placeholder (valid values are :cacheDir or :resourceDir)", placeholder)
|
||||
}
|
||||
|
|
61
cache/filecache/filecache_config_test.go
vendored
61
cache/filecache/filecache_config_test.go
vendored
|
@ -20,9 +20,10 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/gohugoio/hugo/hugolib/paths"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
@ -35,6 +36,13 @@ func TestDecodeConfig(t *testing.T) {
|
|||
|
||||
configStr := `
|
||||
resourceDir = "myresources"
|
||||
contentDir = "content"
|
||||
dataDir = "data"
|
||||
i18nDir = "i18n"
|
||||
layoutDir = "layouts"
|
||||
assetDir = "assets"
|
||||
archetypeDir = "archetypes"
|
||||
|
||||
[caches]
|
||||
[caches.getJSON]
|
||||
maxAge = "10m"
|
||||
|
@ -50,7 +58,7 @@ dir = "/path/to/c3"
|
|||
cfg, err := config.FromConfigString(configStr, "toml")
|
||||
assert.NoError(err)
|
||||
fs := hugofs.NewMem(cfg)
|
||||
p, err := paths.New(fs, cfg)
|
||||
p, err := helpers.NewPathSpec(fs, cfg)
|
||||
assert.NoError(err)
|
||||
|
||||
decoded, err := decodeConfig(p)
|
||||
|
@ -75,6 +83,13 @@ func TestDecodeConfigIgnoreCache(t *testing.T) {
|
|||
|
||||
configStr := `
|
||||
resourceDir = "myresources"
|
||||
contentDir = "content"
|
||||
dataDir = "data"
|
||||
i18nDir = "i18n"
|
||||
layoutDir = "layouts"
|
||||
assetDir = "assets"
|
||||
archeTypedir = "archetypes"
|
||||
|
||||
ignoreCache = true
|
||||
[caches]
|
||||
[caches.getJSON]
|
||||
|
@ -91,7 +106,7 @@ dir = "/path/to/c3"
|
|||
cfg, err := config.FromConfigString(configStr, "toml")
|
||||
assert.NoError(err)
|
||||
fs := hugofs.NewMem(cfg)
|
||||
p, err := paths.New(fs, cfg)
|
||||
p, err := helpers.NewPathSpec(fs, cfg)
|
||||
assert.NoError(err)
|
||||
|
||||
decoded, err := decodeConfig(p)
|
||||
|
@ -107,8 +122,7 @@ dir = "/path/to/c3"
|
|||
|
||||
func TestDecodeConfigDefault(t *testing.T) {
|
||||
assert := require.New(t)
|
||||
cfg := viper.New()
|
||||
cfg.Set("workingDir", filepath.FromSlash("/my/cool/hugoproject"))
|
||||
cfg := newTestConfig()
|
||||
|
||||
if runtime.GOOS == "windows" {
|
||||
cfg.Set("resourceDir", "c:\\cache\\resources")
|
||||
|
@ -120,7 +134,7 @@ func TestDecodeConfigDefault(t *testing.T) {
|
|||
}
|
||||
|
||||
fs := hugofs.NewMem(cfg)
|
||||
p, err := paths.New(fs, cfg)
|
||||
p, err := helpers.NewPathSpec(fs, cfg)
|
||||
assert.NoError(err)
|
||||
|
||||
decoded, err := decodeConfig(p)
|
||||
|
@ -129,12 +143,18 @@ func TestDecodeConfigDefault(t *testing.T) {
|
|||
|
||||
assert.Equal(4, len(decoded))
|
||||
|
||||
imgConfig := decoded[cacheKeyImages]
|
||||
jsonConfig := decoded[cacheKeyGetJSON]
|
||||
|
||||
if runtime.GOOS == "windows" {
|
||||
assert.Equal("c:\\cache\\resources\\_gen", decoded[cacheKeyImages].Dir)
|
||||
assert.Equal("_gen", imgConfig.Dir)
|
||||
} else {
|
||||
assert.Equal("/cache/resources/_gen", decoded[cacheKeyImages].Dir)
|
||||
assert.Equal("/cache/thecache/hugoproject", decoded[cacheKeyGetJSON].Dir)
|
||||
assert.Equal("_gen", imgConfig.Dir)
|
||||
assert.Equal("/cache/thecache/hugoproject", jsonConfig.Dir)
|
||||
}
|
||||
|
||||
assert.True(imgConfig.isResourceDir)
|
||||
assert.False(jsonConfig.isResourceDir)
|
||||
}
|
||||
|
||||
func TestDecodeConfigInvalidDir(t *testing.T) {
|
||||
|
@ -144,6 +164,13 @@ func TestDecodeConfigInvalidDir(t *testing.T) {
|
|||
|
||||
configStr := `
|
||||
resourceDir = "myresources"
|
||||
contentDir = "content"
|
||||
dataDir = "data"
|
||||
i18nDir = "i18n"
|
||||
layoutDir = "layouts"
|
||||
assetDir = "assets"
|
||||
archeTypedir = "archetypes"
|
||||
|
||||
[caches]
|
||||
[caches.getJSON]
|
||||
maxAge = "10m"
|
||||
|
@ -157,10 +184,24 @@ dir = "/"
|
|||
cfg, err := config.FromConfigString(configStr, "toml")
|
||||
assert.NoError(err)
|
||||
fs := hugofs.NewMem(cfg)
|
||||
p, err := paths.New(fs, cfg)
|
||||
p, err := helpers.NewPathSpec(fs, cfg)
|
||||
assert.NoError(err)
|
||||
|
||||
_, err = decodeConfig(p)
|
||||
assert.Error(err)
|
||||
|
||||
}
|
||||
|
||||
func newTestConfig() *viper.Viper {
|
||||
cfg := viper.New()
|
||||
cfg.Set("workingDir", filepath.FromSlash("/my/cool/hugoproject"))
|
||||
cfg.Set("contentDir", "content")
|
||||
cfg.Set("dataDir", "data")
|
||||
cfg.Set("resourceDir", "resources")
|
||||
cfg.Set("i18nDir", "i18n")
|
||||
cfg.Set("layoutDir", "layouts")
|
||||
cfg.Set("archetypeDir", "archetypes")
|
||||
cfg.Set("assetDir", "assets")
|
||||
|
||||
return cfg
|
||||
}
|
||||
|
|
118
cache/filecache/filecache_pruner_test.go
vendored
118
cache/filecache/filecache_pruner_test.go
vendored
|
@ -19,8 +19,8 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/gohugoio/hugo/hugolib/paths"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
@ -32,69 +32,87 @@ func TestPrune(t *testing.T) {
|
|||
|
||||
configStr := `
|
||||
resourceDir = "myresources"
|
||||
contentDir = "content"
|
||||
dataDir = "data"
|
||||
i18nDir = "i18n"
|
||||
layoutDir = "layouts"
|
||||
assetDir = "assets"
|
||||
archeTypedir = "archetypes"
|
||||
|
||||
[caches]
|
||||
[caches.getjson]
|
||||
maxAge = "200ms"
|
||||
dir = "/cache/c"
|
||||
|
||||
[caches.getcsv]
|
||||
maxAge = "200ms"
|
||||
dir = "/cache/d"
|
||||
[caches.assets]
|
||||
maxAge = "200ms"
|
||||
dir = ":resourceDir/_gen"
|
||||
[caches.images]
|
||||
maxAge = "200ms"
|
||||
dir = ":resourceDir/_gen"
|
||||
`
|
||||
|
||||
cfg, err := config.FromConfigString(configStr, "toml")
|
||||
assert.NoError(err)
|
||||
fs := hugofs.NewMem(cfg)
|
||||
p, err := paths.New(fs, cfg)
|
||||
assert.NoError(err)
|
||||
|
||||
caches, err := NewCachesFromPaths(p)
|
||||
assert.NoError(err)
|
||||
for _, name := range []string{cacheKeyGetCSV, cacheKeyGetJSON, cacheKeyAssets, cacheKeyImages} {
|
||||
msg := fmt.Sprintf("cache: %s", name)
|
||||
fs := hugofs.NewMem(cfg)
|
||||
p, err := helpers.NewPathSpec(fs, cfg)
|
||||
assert.NoError(err)
|
||||
caches, err := NewCaches(p)
|
||||
assert.NoError(err)
|
||||
cache := caches[name]
|
||||
for i := 0; i < 10; i++ {
|
||||
id := fmt.Sprintf("i%d", i)
|
||||
cache.GetOrCreateBytes(id, func() ([]byte, error) {
|
||||
return []byte("abc"), nil
|
||||
})
|
||||
if i == 4 {
|
||||
// This will expire the first 5
|
||||
time.Sleep(201 * time.Millisecond)
|
||||
}
|
||||
}
|
||||
|
||||
jsonCache := caches.GetJSONCache()
|
||||
for i := 0; i < 10; i++ {
|
||||
id := fmt.Sprintf("i%d", i)
|
||||
jsonCache.GetOrCreateBytes(id, func() ([]byte, error) {
|
||||
count, err := caches.Prune()
|
||||
assert.NoError(err)
|
||||
assert.Equal(5, count, msg)
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
id := fmt.Sprintf("i%d", i)
|
||||
v := cache.getString(id)
|
||||
if i < 5 {
|
||||
assert.Equal("", v, id)
|
||||
} else {
|
||||
assert.Equal("abc", v, id)
|
||||
}
|
||||
}
|
||||
|
||||
caches, err = NewCaches(p)
|
||||
assert.NoError(err)
|
||||
cache = caches[name]
|
||||
// Touch one and then prune.
|
||||
cache.GetOrCreateBytes("i5", func() ([]byte, error) {
|
||||
return []byte("abc"), nil
|
||||
})
|
||||
if i == 4 {
|
||||
// This will expire the first 5
|
||||
time.Sleep(201 * time.Millisecond)
|
||||
|
||||
count, err = caches.Prune()
|
||||
assert.NoError(err)
|
||||
assert.Equal(4, count)
|
||||
|
||||
// Now only the i5 should be left.
|
||||
for i := 0; i < 10; i++ {
|
||||
id := fmt.Sprintf("i%d", i)
|
||||
v := cache.getString(id)
|
||||
if i != 5 {
|
||||
assert.Equal("", v, id)
|
||||
} else {
|
||||
assert.Equal("abc", v, id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
count, err := caches.Prune()
|
||||
assert.NoError(err)
|
||||
assert.Equal(5, count)
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
id := fmt.Sprintf("i%d", i)
|
||||
v := jsonCache.getString(id)
|
||||
if i < 5 {
|
||||
assert.Equal("", v, id)
|
||||
} else {
|
||||
assert.Equal("abc", v, id)
|
||||
}
|
||||
}
|
||||
|
||||
caches, err = NewCachesFromPaths(p)
|
||||
assert.NoError(err)
|
||||
jsonCache = caches.GetJSONCache()
|
||||
// Touch one and then prune.
|
||||
jsonCache.GetOrCreateBytes("i5", func() ([]byte, error) {
|
||||
return []byte("abc"), nil
|
||||
})
|
||||
|
||||
count, err = caches.Prune()
|
||||
assert.NoError(err)
|
||||
assert.Equal(4, count)
|
||||
|
||||
// Now only the i5 should be left.
|
||||
for i := 0; i < 10; i++ {
|
||||
id := fmt.Sprintf("i%d", i)
|
||||
v := jsonCache.getString(id)
|
||||
if i != 5 {
|
||||
assert.Equal("", v, id)
|
||||
} else {
|
||||
assert.Equal("abc", v, id)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
28
cache/filecache/filecache_test.go
vendored
28
cache/filecache/filecache_test.go
vendored
|
@ -25,10 +25,10 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/common/hugio"
|
||||
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/gohugoio/hugo/hugolib/paths"
|
||||
"github.com/spf13/afero"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
@ -44,6 +44,13 @@ func TestFileCache(t *testing.T) {
|
|||
workingDir = "/my/work"
|
||||
resourceDir = "resources"
|
||||
cacheDir = "CACHEDIR"
|
||||
contentDir = "content"
|
||||
dataDir = "data"
|
||||
i18nDir = "i18n"
|
||||
layoutDir = "layouts"
|
||||
assetDir = "assets"
|
||||
archeTypedir = "archetypes"
|
||||
|
||||
[caches]
|
||||
[caches.getJSON]
|
||||
maxAge = "10h"
|
||||
|
@ -56,10 +63,10 @@ dir = ":cacheDir/c"
|
|||
assert.NoError(err)
|
||||
|
||||
fs := hugofs.NewMem(cfg)
|
||||
p, err := paths.New(fs, cfg)
|
||||
p, err := helpers.NewPathSpec(fs, cfg)
|
||||
assert.NoError(err)
|
||||
|
||||
caches, err := NewCachesFromPaths(p)
|
||||
caches, err := NewCaches(p)
|
||||
assert.NoError(err)
|
||||
|
||||
c := caches.Get("GetJSON")
|
||||
|
@ -83,7 +90,7 @@ dir = ":cacheDir/c"
|
|||
bfs, ok = c.Fs.(*afero.BasePathFs)
|
||||
assert.True(ok)
|
||||
filename, _ = bfs.RealPath("key")
|
||||
assert.Equal(filepath.FromSlash("/my/work/resources/_gen/images/key"), filename)
|
||||
assert.Equal(filepath.FromSlash("_gen/images/key"), filename)
|
||||
|
||||
rf := func(s string) func() (io.ReadCloser, error) {
|
||||
return func() (io.ReadCloser, error) {
|
||||
|
@ -160,6 +167,13 @@ func TestFileCacheConcurrent(t *testing.T) {
|
|||
|
||||
configStr := `
|
||||
resourceDir = "myresources"
|
||||
contentDir = "content"
|
||||
dataDir = "data"
|
||||
i18nDir = "i18n"
|
||||
layoutDir = "layouts"
|
||||
assetDir = "assets"
|
||||
archeTypedir = "archetypes"
|
||||
|
||||
[caches]
|
||||
[caches.getjson]
|
||||
maxAge = "1s"
|
||||
|
@ -170,10 +184,10 @@ dir = "/cache/c"
|
|||
cfg, err := config.FromConfigString(configStr, "toml")
|
||||
assert.NoError(err)
|
||||
fs := hugofs.NewMem(cfg)
|
||||
p, err := paths.New(fs, cfg)
|
||||
p, err := helpers.NewPathSpec(fs, cfg)
|
||||
assert.NoError(err)
|
||||
|
||||
caches, err := NewCachesFromPaths(p)
|
||||
caches, err := NewCaches(p)
|
||||
assert.NoError(err)
|
||||
|
||||
const cacheName = "getjson"
|
||||
|
|
2
deps/deps.go
vendored
2
deps/deps.go
vendored
|
@ -193,7 +193,7 @@ func New(cfg DepsCfg) (*Deps, error) {
|
|||
return nil, err
|
||||
}
|
||||
|
||||
fileCaches, err := filecache.NewCachesFromPaths(ps.Paths)
|
||||
fileCaches, err := filecache.NewCaches(ps)
|
||||
if err != nil {
|
||||
return nil, errors.WithMessage(err, "failed to create file caches from configuration")
|
||||
}
|
||||
|
|
1
go.sum
1
go.sum
|
@ -143,6 +143,7 @@ github.com/tdewolff/test v1.0.0 h1:jOwzqCXr5ePXEPGJaq2ivoR6HOCi+D5TPfpoyg8yvmU=
|
|||
github.com/tdewolff/test v1.0.0/go.mod h1:DiQUlutnqlEvdvhSn2LPGy4TFwRauAaYDsL+683RNX4=
|
||||
github.com/wellington/go-libsass v0.0.0-20180624165032-615eaa47ef79 h1:ivqgxj/zO3UZuzX7ZnlcyX8cAbNqLl1oes4zPddAO5Q=
|
||||
github.com/wellington/go-libsass v0.0.0-20180624165032-615eaa47ef79/go.mod h1:mxgxgam0N0E+NAUMHLcu20Ccfc3mVpDkyrLDayqfiTs=
|
||||
github.com/wellington/go-libsass v0.9.3-0.20181113175235-c63644206701 h1:9vG9vvVNVupO4Y7uwFkRgIMNe9rdaJMCINDe8vhAhLo=
|
||||
github.com/wellington/go-libsass v0.9.3-0.20181113175235-c63644206701/go.mod h1:mxgxgam0N0E+NAUMHLcu20Ccfc3mVpDkyrLDayqfiTs=
|
||||
github.com/yosssi/ace v0.0.5 h1:tUkIP/BLdKqrlrPwcmH0shwEEhTRHoGnc1wFIWmaBUA=
|
||||
github.com/yosssi/ace v0.0.5/go.mod h1:ALfIzm2vT7t5ZE7uoIZqF3TQ7SAOyupFZnkrF5id+K0=
|
||||
|
|
|
@ -81,6 +81,7 @@ type SourceFilesystems struct {
|
|||
Layouts *SourceFilesystem
|
||||
Archetypes *SourceFilesystem
|
||||
Assets *SourceFilesystem
|
||||
Resources *SourceFilesystem
|
||||
|
||||
// This is a unified read-only view of the project's and themes' workdir.
|
||||
Work *SourceFilesystem
|
||||
|
@ -375,6 +376,13 @@ func (b *sourceFilesystemsBuilder) Build() (*SourceFilesystems, error) {
|
|||
}
|
||||
b.result.Assets = sfs
|
||||
|
||||
sfs, err = b.createFs(true, false, "resourceDir", "resources")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
b.result.Resources = sfs
|
||||
|
||||
sfs, err = b.createFs(false, true, "", "")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
@ -108,6 +108,7 @@ theme = ["atheme"]
|
|||
checkFileCount(bfs.Data.Fs, "", assert, 9) // 7 + 2 themes
|
||||
checkFileCount(bfs.Archetypes.Fs, "", assert, 10) // 8 + 2 themes
|
||||
checkFileCount(bfs.Assets.Fs, "", assert, 9)
|
||||
checkFileCount(bfs.Resources.Fs, "", assert, 10)
|
||||
checkFileCount(bfs.Work.Fs, "", assert, 78)
|
||||
|
||||
assert.Equal([]string{filepath.FromSlash("/my/work/mydata"), filepath.FromSlash("/my/work/themes/btheme/data"), filepath.FromSlash("/my/work/themes/atheme/data")}, bfs.Data.Dirnames)
|
||||
|
@ -228,6 +229,8 @@ func TestRealDirs(t *testing.T) {
|
|||
assert.Equal(filepath.Join(root, "myassets/scss"), realDirs[0])
|
||||
assert.Equal(filepath.Join(themesDir, "mytheme/assets/scss"), realDirs[len(realDirs)-1])
|
||||
|
||||
checkFileCount(bfs.Resources.Fs, "", assert, 3)
|
||||
|
||||
assert.NotNil(bfs.themeFs)
|
||||
fi, b, err := bfs.themeFs.(afero.Lstater).LstatIfPossible(filepath.Join("resources", "t1.txt"))
|
||||
assert.NoError(err)
|
||||
|
|
|
@ -52,7 +52,7 @@ func newTestResourceSpecForBaseURL(assert *require.Assertions, baseURL string) *
|
|||
s, err := helpers.NewPathSpec(fs, cfg)
|
||||
assert.NoError(err)
|
||||
|
||||
filecaches, err := filecache.NewCachesFromPaths(s.Paths)
|
||||
filecaches, err := filecache.NewCaches(s)
|
||||
assert.NoError(err)
|
||||
|
||||
spec, err := NewSpec(s, filecaches, nil, output.DefaultFormats, media.DefaultTypes)
|
||||
|
@ -88,7 +88,7 @@ func newTestResourceOsFs(assert *require.Assertions) *Spec {
|
|||
s, err := helpers.NewPathSpec(fs, cfg)
|
||||
assert.NoError(err)
|
||||
|
||||
filecaches, err := filecache.NewCachesFromPaths(s.Paths)
|
||||
filecaches, err := filecache.NewCaches(s)
|
||||
assert.NoError(err)
|
||||
|
||||
spec, err := NewSpec(s, filecaches, nil, output.DefaultFormats, media.DefaultTypes)
|
||||
|
|
|
@ -23,13 +23,12 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/hugolib/paths"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
|
||||
"github.com/gohugoio/hugo/cache/filecache"
|
||||
"github.com/gohugoio/hugo/common/loggers"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/deps"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/gohugoio/hugo/langs"
|
||||
"github.com/spf13/afero"
|
||||
|
@ -181,6 +180,12 @@ func TestScpGetRemoteParallel(t *testing.T) {
|
|||
|
||||
func newDeps(cfg config.Provider) *deps.Deps {
|
||||
cfg.Set("resourceDir", "resources")
|
||||
cfg.Set("dataDir", "resources")
|
||||
cfg.Set("i18nDir", "i18n")
|
||||
cfg.Set("assetDir", "assets")
|
||||
cfg.Set("layoutDir", "layouts")
|
||||
cfg.Set("archetypeDir", "archetypes")
|
||||
|
||||
l := langs.NewLanguage("en", cfg)
|
||||
l.Set("i18nDir", "i18n")
|
||||
cs, err := helpers.NewContentSpec(l)
|
||||
|
@ -190,9 +195,13 @@ func newDeps(cfg config.Provider) *deps.Deps {
|
|||
|
||||
fs := hugofs.NewMem(l)
|
||||
logger := loggers.NewErrorLogger()
|
||||
p, _ := paths.New(fs, cfg)
|
||||
|
||||
fileCaches, err := filecache.NewCachesFromPaths(p)
|
||||
p, err := helpers.NewPathSpec(fs, cfg)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fileCaches, err := filecache.NewCaches(p)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue