mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
Add support for a content dir set per language
A sample config: ```toml defaultContentLanguage = "en" defaultContentLanguageInSubdir = true [Languages] [Languages.en] weight = 10 title = "In English" languageName = "English" contentDir = "content/english" [Languages.nn] weight = 20 title = "På Norsk" languageName = "Norsk" contentDir = "content/norwegian" ``` The value of `contentDir` can be any valid path, even absolute path references. The only restriction is that the content dirs cannot overlap. The content files will be assigned a language by 1. The placement: `content/norwegian/post/my-post.md` will be read as Norwegian content. 2. The filename: `content/english/post/my-post.nn.md` will be read as Norwegian even if it lives in the English content folder. The content directories will be merged into a big virtual filesystem with one simple rule: The most specific language file will win. This means that if both `content/norwegian/post/my-post.md` and `content/english/post/my-post.nn.md` exists, they will be considered duplicates and the version inside `content/norwegian` will win. Note that translations will be automatically assigned by Hugo by the content file's relative placement, so `content/norwegian/post/my-post.md` will be a translation of `content/english/post/my-post.md`. If this does not work for you, you can connect the translations together by setting a `translationKey` in the content files' front matter. Fixes #4523 Fixes #4552 Fixes #4553
This commit is contained in:
parent
f27977809c
commit
eb42774e58
66 changed files with 1819 additions and 556 deletions
6
Gopkg.lock
generated
6
Gopkg.lock
generated
|
@ -293,8 +293,8 @@
|
||||||
".",
|
".",
|
||||||
"mem"
|
"mem"
|
||||||
]
|
]
|
||||||
revision = "bb8f1927f2a9d3ab41c9340aa034f6b803f4359c"
|
revision = "63644898a8da0bc22138abf860edaf5277b6102e"
|
||||||
version = "v1.0.2"
|
version = "v1.1.0"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/spf13/cast"
|
name = "github.com/spf13/cast"
|
||||||
|
@ -424,6 +424,6 @@
|
||||||
[solve-meta]
|
[solve-meta]
|
||||||
analyzer-name = "dep"
|
analyzer-name = "dep"
|
||||||
analyzer-version = 1
|
analyzer-version = 1
|
||||||
inputs-digest = "13ab39f8bfafadc12c05726e565ee3f3d94bf7d6c0e8adf04056de0691bf2dd6"
|
inputs-digest = "edb250b53926de21df1740c379c76351b7e9b110c96a77078a10ba69bf31a2d4"
|
||||||
solver-name = "gps-cdcl"
|
solver-name = "gps-cdcl"
|
||||||
solver-version = 1
|
solver-version = 1
|
||||||
|
|
|
@ -78,7 +78,7 @@
|
||||||
|
|
||||||
[[constraint]]
|
[[constraint]]
|
||||||
name = "github.com/spf13/afero"
|
name = "github.com/spf13/afero"
|
||||||
version = "^1.0.1"
|
version = "^1.1.0"
|
||||||
|
|
||||||
[[constraint]]
|
[[constraint]]
|
||||||
name = "github.com/spf13/cast"
|
name = "github.com/spf13/cast"
|
||||||
|
|
|
@ -705,7 +705,7 @@ func (c *commandeer) getDirList() ([]string, error) {
|
||||||
c.Logger.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", path, err)
|
c.Logger.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", path, err)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
linkfi, err := helpers.LstatIfOs(c.Fs.Source, link)
|
linkfi, err := helpers.LstatIfPossible(c.Fs.Source, link)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.Logger.ERROR.Printf("Cannot stat %q: %s", link, err)
|
c.Logger.ERROR.Printf("Cannot stat %q: %s", link, err)
|
||||||
return nil
|
return nil
|
||||||
|
@ -743,9 +743,13 @@ func (c *commandeer) getDirList() ([]string, error) {
|
||||||
|
|
||||||
// SymbolicWalk will log anny ERRORs
|
// SymbolicWalk will log anny ERRORs
|
||||||
_ = helpers.SymbolicWalk(c.Fs.Source, dataDir, regularWalker)
|
_ = helpers.SymbolicWalk(c.Fs.Source, dataDir, regularWalker)
|
||||||
_ = helpers.SymbolicWalk(c.Fs.Source, c.PathSpec().AbsPathify(c.Cfg.GetString("contentDir")), symLinkWalker)
|
|
||||||
_ = helpers.SymbolicWalk(c.Fs.Source, i18nDir, regularWalker)
|
_ = helpers.SymbolicWalk(c.Fs.Source, i18nDir, regularWalker)
|
||||||
_ = helpers.SymbolicWalk(c.Fs.Source, layoutDir, regularWalker)
|
_ = helpers.SymbolicWalk(c.Fs.Source, layoutDir, regularWalker)
|
||||||
|
|
||||||
|
for _, contentDir := range c.PathSpec().ContentDirs() {
|
||||||
|
_ = helpers.SymbolicWalk(c.Fs.Source, contentDir.Value, symLinkWalker)
|
||||||
|
}
|
||||||
|
|
||||||
for _, staticDir := range staticDirs {
|
for _, staticDir := range staticDirs {
|
||||||
_ = helpers.SymbolicWalk(c.Fs.Source, staticDir, regularWalker)
|
_ = helpers.SymbolicWalk(c.Fs.Source, staticDir, regularWalker)
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,12 @@ import (
|
||||||
"github.com/spf13/cast"
|
"github.com/spf13/cast"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// KeyValueStr is a string tuple.
|
||||||
|
type KeyValueStr struct {
|
||||||
|
Key string
|
||||||
|
Value string
|
||||||
|
}
|
||||||
|
|
||||||
// KeyValues holds an key and a slice of values.
|
// KeyValues holds an key and a slice of values.
|
||||||
type KeyValues struct {
|
type KeyValues struct {
|
||||||
Key interface{}
|
Key interface{}
|
||||||
|
|
|
@ -63,7 +63,22 @@ func NewContent(
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
contentPath := s.PathSpec.AbsPathify(filepath.Join(s.Cfg.GetString("contentDir"), targetPath))
|
// The site may have multiple content dirs, and we currently do not know which contentDir the
|
||||||
|
// user wants to create this content in. We should improve on this, but we start by testing if the
|
||||||
|
// provided path points to an existing dir. If so, use it as is.
|
||||||
|
var contentPath string
|
||||||
|
var exists bool
|
||||||
|
targetDir := filepath.Dir(targetPath)
|
||||||
|
|
||||||
|
if targetDir != "" && targetDir != "." {
|
||||||
|
exists, _ = helpers.Exists(targetDir, ps.Fs.Source)
|
||||||
|
}
|
||||||
|
|
||||||
|
if exists {
|
||||||
|
contentPath = targetPath
|
||||||
|
} else {
|
||||||
|
contentPath = s.PathSpec.AbsPathify(filepath.Join(s.Cfg.GetString("contentDir"), targetPath))
|
||||||
|
}
|
||||||
|
|
||||||
if err := helpers.SafeWriteToDisk(contentPath, bytes.NewReader(content), s.Fs.Source); err != nil {
|
if err := helpers.SafeWriteToDisk(contentPath, bytes.NewReader(content), s.Fs.Source); err != nil {
|
||||||
return err
|
return err
|
||||||
|
|
|
@ -88,10 +88,15 @@ func executeArcheTypeAsTemplate(s *hugolib.Site, kind, targetPath, archetypeFile
|
||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
|
|
||||||
sp := source.NewSourceSpec(s.Deps.Cfg, s.Deps.Fs)
|
ps, err := helpers.NewPathSpec(s.Deps.Fs, s.Deps.Cfg)
|
||||||
|
sp := source.NewSourceSpec(ps, ps.Fs.Source)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
f := sp.NewFileInfo("", targetPath, false, nil)
|
f := sp.NewFileInfo("", targetPath, false, nil)
|
||||||
|
|
||||||
name := f.TranslationBaseName()
|
name := f.TranslationBaseName()
|
||||||
|
|
||||||
if name == "index" || name == "_index" {
|
if name == "index" || name == "_index" {
|
||||||
// Page bundles; the directory name will hopefully have a better name.
|
// Page bundles; the directory name will hopefully have a better name.
|
||||||
dir := strings.TrimSuffix(f.Dir(), helpers.FilePathSeparator)
|
dir := strings.TrimSuffix(f.Dir(), helpers.FilePathSeparator)
|
||||||
|
|
|
@ -75,7 +75,7 @@ func TestNewContent(t *testing.T) {
|
||||||
for i, v := range c.expected {
|
for i, v := range c.expected {
|
||||||
found := strings.Contains(content, v)
|
found := strings.Contains(content, v)
|
||||||
if !found {
|
if !found {
|
||||||
t.Errorf("[%d] %q missing from output:\n%q", i, v, content)
|
t.Fatalf("[%d] %q missing from output:\n%q", i, v, content)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
2
deps/deps.go
vendored
2
deps/deps.go
vendored
|
@ -126,7 +126,7 @@ func New(cfg DepsCfg) (*Deps, error) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
sp := source.NewSourceSpec(cfg.Language, fs)
|
sp := source.NewSourceSpec(ps, fs.Source)
|
||||||
|
|
||||||
d := &Deps{
|
d := &Deps{
|
||||||
Fs: fs,
|
Fs: fs,
|
||||||
|
|
|
@ -41,6 +41,14 @@ type Language struct {
|
||||||
Title string
|
Title string
|
||||||
Weight int
|
Weight int
|
||||||
|
|
||||||
|
Disabled bool
|
||||||
|
|
||||||
|
// If set per language, this tells Hugo that all content files without any
|
||||||
|
// language indicator (e.g. my-page.en.md) is in this language.
|
||||||
|
// This is usually a path relative to the working dir, but it can be an
|
||||||
|
// absolute directory referenece. It is what we get.
|
||||||
|
ContentDir string
|
||||||
|
|
||||||
Cfg config.Provider
|
Cfg config.Provider
|
||||||
|
|
||||||
// These are params declared in the [params] section of the language merged with the
|
// These are params declared in the [params] section of the language merged with the
|
||||||
|
@ -66,7 +74,13 @@ func NewLanguage(lang string, cfg config.Provider) *Language {
|
||||||
params[k] = v
|
params[k] = v
|
||||||
}
|
}
|
||||||
ToLowerMap(params)
|
ToLowerMap(params)
|
||||||
l := &Language{Lang: lang, Cfg: cfg, params: params, settings: make(map[string]interface{})}
|
|
||||||
|
defaultContentDir := cfg.GetString("contentDir")
|
||||||
|
if defaultContentDir == "" {
|
||||||
|
panic("contentDir not set")
|
||||||
|
}
|
||||||
|
|
||||||
|
l := &Language{Lang: lang, ContentDir: defaultContentDir, Cfg: cfg, params: params, settings: make(map[string]interface{})}
|
||||||
return l
|
return l
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,11 +22,12 @@ import (
|
||||||
|
|
||||||
func TestGetGlobalOnlySetting(t *testing.T) {
|
func TestGetGlobalOnlySetting(t *testing.T) {
|
||||||
v := viper.New()
|
v := viper.New()
|
||||||
|
v.Set("defaultContentLanguageInSubdir", true)
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
v.Set("paginatePath", "page")
|
||||||
lang := NewDefaultLanguage(v)
|
lang := NewDefaultLanguage(v)
|
||||||
lang.Set("defaultContentLanguageInSubdir", false)
|
lang.Set("defaultContentLanguageInSubdir", false)
|
||||||
lang.Set("paginatePath", "side")
|
lang.Set("paginatePath", "side")
|
||||||
v.Set("defaultContentLanguageInSubdir", true)
|
|
||||||
v.Set("paginatePath", "page")
|
|
||||||
|
|
||||||
require.True(t, lang.GetBool("defaultContentLanguageInSubdir"))
|
require.True(t, lang.GetBool("defaultContentLanguageInSubdir"))
|
||||||
require.Equal(t, "side", lang.GetString("paginatePath"))
|
require.Equal(t, "side", lang.GetString("paginatePath"))
|
||||||
|
@ -37,6 +38,7 @@ func TestLanguageParams(t *testing.T) {
|
||||||
|
|
||||||
v := viper.New()
|
v := viper.New()
|
||||||
v.Set("p1", "p1cfg")
|
v.Set("p1", "p1cfg")
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
|
||||||
lang := NewDefaultLanguage(v)
|
lang := NewDefaultLanguage(v)
|
||||||
lang.SetParam("p1", "p1p")
|
lang.SetParam("p1", "p1p")
|
||||||
|
|
|
@ -33,7 +33,7 @@ var (
|
||||||
ErrThemeUndefined = errors.New("no theme set")
|
ErrThemeUndefined = errors.New("no theme set")
|
||||||
|
|
||||||
// ErrWalkRootTooShort is returned when the root specified for a file walk is shorter than 4 characters.
|
// ErrWalkRootTooShort is returned when the root specified for a file walk is shorter than 4 characters.
|
||||||
ErrWalkRootTooShort = errors.New("Path too short. Stop walking.")
|
ErrPathTooShort = errors.New("file path is too short")
|
||||||
)
|
)
|
||||||
|
|
||||||
// filepathPathBridge is a bridge for common functionality in filepath vs path
|
// filepathPathBridge is a bridge for common functionality in filepath vs path
|
||||||
|
@ -446,7 +446,7 @@ func SymbolicWalk(fs afero.Fs, root string, walker filepath.WalkFunc) error {
|
||||||
|
|
||||||
// Sanity check
|
// Sanity check
|
||||||
if len(root) < 4 {
|
if len(root) < 4 {
|
||||||
return ErrWalkRootTooShort
|
return ErrPathTooShort
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle the root first
|
// Handle the root first
|
||||||
|
@ -481,7 +481,7 @@ func SymbolicWalk(fs afero.Fs, root string, walker filepath.WalkFunc) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func getRealFileInfo(fs afero.Fs, path string) (os.FileInfo, string, error) {
|
func getRealFileInfo(fs afero.Fs, path string) (os.FileInfo, string, error) {
|
||||||
fileInfo, err := LstatIfOs(fs, path)
|
fileInfo, err := LstatIfPossible(fs, path)
|
||||||
realPath := path
|
realPath := path
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -493,7 +493,7 @@ func getRealFileInfo(fs afero.Fs, path string) (os.FileInfo, string, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, "", fmt.Errorf("Cannot read symbolic link '%s', error was: %s", path, err)
|
return nil, "", fmt.Errorf("Cannot read symbolic link '%s', error was: %s", path, err)
|
||||||
}
|
}
|
||||||
fileInfo, err = LstatIfOs(fs, link)
|
fileInfo, err = LstatIfPossible(fs, link)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, "", fmt.Errorf("Cannot stat '%s', error was: %s", link, err)
|
return nil, "", fmt.Errorf("Cannot stat '%s', error was: %s", link, err)
|
||||||
}
|
}
|
||||||
|
@ -514,16 +514,14 @@ func GetRealPath(fs afero.Fs, path string) (string, error) {
|
||||||
return realPath, nil
|
return realPath, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Code copied from Afero's path.go
|
// LstatIfPossible can be used to call Lstat if possible, else Stat.
|
||||||
// if the filesystem is OsFs use Lstat, else use fs.Stat
|
func LstatIfPossible(fs afero.Fs, path string) (os.FileInfo, error) {
|
||||||
func LstatIfOs(fs afero.Fs, path string) (info os.FileInfo, err error) {
|
if lstater, ok := fs.(afero.Lstater); ok {
|
||||||
_, ok := fs.(*afero.OsFs)
|
fi, _, err := lstater.LstatIfPossible(path)
|
||||||
if ok {
|
return fi, err
|
||||||
info, err = os.Lstat(path)
|
|
||||||
} else {
|
|
||||||
info, err = fs.Stat(path)
|
|
||||||
}
|
}
|
||||||
return
|
|
||||||
|
return fs.Stat(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SafeWriteToDisk is the same as WriteToDisk
|
// SafeWriteToDisk is the same as WriteToDisk
|
||||||
|
|
|
@ -57,8 +57,10 @@ func TestMakePath(t *testing.T) {
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
v := viper.New()
|
v := viper.New()
|
||||||
l := NewDefaultLanguage(v)
|
v.Set("contentDir", "content")
|
||||||
v.Set("removePathAccents", test.removeAccents)
|
v.Set("removePathAccents", test.removeAccents)
|
||||||
|
|
||||||
|
l := NewDefaultLanguage(v)
|
||||||
p, err := NewPathSpec(hugofs.NewMem(v), l)
|
p, err := NewPathSpec(hugofs.NewMem(v), l)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
@ -71,6 +73,8 @@ func TestMakePath(t *testing.T) {
|
||||||
|
|
||||||
func TestMakePathSanitized(t *testing.T) {
|
func TestMakePathSanitized(t *testing.T) {
|
||||||
v := viper.New()
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
|
||||||
l := NewDefaultLanguage(v)
|
l := NewDefaultLanguage(v)
|
||||||
p, _ := NewPathSpec(hugofs.NewMem(v), l)
|
p, _ := NewPathSpec(hugofs.NewMem(v), l)
|
||||||
|
|
||||||
|
@ -98,6 +102,7 @@ func TestMakePathSanitizedDisablePathToLower(t *testing.T) {
|
||||||
v := viper.New()
|
v := viper.New()
|
||||||
|
|
||||||
v.Set("disablePathToLower", true)
|
v.Set("disablePathToLower", true)
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
|
||||||
l := NewDefaultLanguage(v)
|
l := NewDefaultLanguage(v)
|
||||||
p, _ := NewPathSpec(hugofs.NewMem(v), l)
|
p, _ := NewPathSpec(hugofs.NewMem(v), l)
|
||||||
|
|
|
@ -17,6 +17,9 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/common/types"
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/gohugoio/hugo/config"
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
"github.com/spf13/cast"
|
"github.com/spf13/cast"
|
||||||
|
@ -49,6 +52,8 @@ type PathSpec struct {
|
||||||
layoutDir string
|
layoutDir string
|
||||||
workingDir string
|
workingDir string
|
||||||
staticDirs []string
|
staticDirs []string
|
||||||
|
absContentDirs []types.KeyValueStr
|
||||||
|
|
||||||
PublishDir string
|
PublishDir string
|
||||||
|
|
||||||
// The PathSpec looks up its config settings in both the current language
|
// The PathSpec looks up its config settings in both the current language
|
||||||
|
@ -65,6 +70,9 @@ type PathSpec struct {
|
||||||
// The file systems to use
|
// The file systems to use
|
||||||
Fs *hugofs.Fs
|
Fs *hugofs.Fs
|
||||||
|
|
||||||
|
// The fine grained filesystems in play (resources, content etc.).
|
||||||
|
BaseFs *hugofs.BaseFs
|
||||||
|
|
||||||
// The config provider to use
|
// The config provider to use
|
||||||
Cfg config.Provider
|
Cfg config.Provider
|
||||||
}
|
}
|
||||||
|
@ -105,8 +113,65 @@ func NewPathSpec(fs *hugofs.Fs, cfg config.Provider) (*PathSpec, error) {
|
||||||
languages = l
|
languages = l
|
||||||
}
|
}
|
||||||
|
|
||||||
|
defaultContentLanguage := cfg.GetString("defaultContentLanguage")
|
||||||
|
|
||||||
|
// We will eventually pull out this badly placed path logic.
|
||||||
|
contentDir := cfg.GetString("contentDir")
|
||||||
|
workingDir := cfg.GetString("workingDir")
|
||||||
|
resourceDir := cfg.GetString("resourceDir")
|
||||||
|
publishDir := cfg.GetString("publishDir")
|
||||||
|
|
||||||
|
if len(languages) == 0 {
|
||||||
|
// We have some old tests that does not test the entire chain, hence
|
||||||
|
// they have no languages. So create one so we get the proper filesystem.
|
||||||
|
languages = Languages{&Language{Lang: "en", ContentDir: contentDir}}
|
||||||
|
}
|
||||||
|
|
||||||
|
absPuslishDir := AbsPathify(workingDir, publishDir)
|
||||||
|
if !strings.HasSuffix(absPuslishDir, FilePathSeparator) {
|
||||||
|
absPuslishDir += FilePathSeparator
|
||||||
|
}
|
||||||
|
// If root, remove the second '/'
|
||||||
|
if absPuslishDir == "//" {
|
||||||
|
absPuslishDir = FilePathSeparator
|
||||||
|
}
|
||||||
|
absResourcesDir := AbsPathify(workingDir, resourceDir)
|
||||||
|
if !strings.HasSuffix(absResourcesDir, FilePathSeparator) {
|
||||||
|
absResourcesDir += FilePathSeparator
|
||||||
|
}
|
||||||
|
if absResourcesDir == "//" {
|
||||||
|
absResourcesDir = FilePathSeparator
|
||||||
|
}
|
||||||
|
|
||||||
|
contentFs, absContentDirs, err := createContentFs(fs.Source, workingDir, defaultContentLanguage, languages)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure we don't have any overlapping content dirs. That will never work.
|
||||||
|
for i, d1 := range absContentDirs {
|
||||||
|
for j, d2 := range absContentDirs {
|
||||||
|
if i == j {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(d1.Value, d2.Value) || strings.HasPrefix(d2.Value, d1.Value) {
|
||||||
|
return nil, fmt.Errorf("found overlapping content dirs (%q and %q)", d1, d2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resourcesFs := afero.NewBasePathFs(fs.Source, absResourcesDir)
|
||||||
|
publishFs := afero.NewBasePathFs(fs.Destination, absPuslishDir)
|
||||||
|
|
||||||
|
baseFs := &hugofs.BaseFs{
|
||||||
|
ContentFs: contentFs,
|
||||||
|
ResourcesFs: resourcesFs,
|
||||||
|
PublishFs: publishFs,
|
||||||
|
}
|
||||||
|
|
||||||
ps := &PathSpec{
|
ps := &PathSpec{
|
||||||
Fs: fs,
|
Fs: fs,
|
||||||
|
BaseFs: baseFs,
|
||||||
Cfg: cfg,
|
Cfg: cfg,
|
||||||
disablePathToLower: cfg.GetBool("disablePathToLower"),
|
disablePathToLower: cfg.GetBool("disablePathToLower"),
|
||||||
removePathAccents: cfg.GetBool("removePathAccents"),
|
removePathAccents: cfg.GetBool("removePathAccents"),
|
||||||
|
@ -116,14 +181,15 @@ func NewPathSpec(fs *hugofs.Fs, cfg config.Provider) (*PathSpec, error) {
|
||||||
Language: language,
|
Language: language,
|
||||||
Languages: languages,
|
Languages: languages,
|
||||||
defaultContentLanguageInSubdir: cfg.GetBool("defaultContentLanguageInSubdir"),
|
defaultContentLanguageInSubdir: cfg.GetBool("defaultContentLanguageInSubdir"),
|
||||||
defaultContentLanguage: cfg.GetString("defaultContentLanguage"),
|
defaultContentLanguage: defaultContentLanguage,
|
||||||
paginatePath: cfg.GetString("paginatePath"),
|
paginatePath: cfg.GetString("paginatePath"),
|
||||||
BaseURL: baseURL,
|
BaseURL: baseURL,
|
||||||
contentDir: cfg.GetString("contentDir"),
|
contentDir: contentDir,
|
||||||
themesDir: cfg.GetString("themesDir"),
|
themesDir: cfg.GetString("themesDir"),
|
||||||
layoutDir: cfg.GetString("layoutDir"),
|
layoutDir: cfg.GetString("layoutDir"),
|
||||||
workingDir: cfg.GetString("workingDir"),
|
workingDir: workingDir,
|
||||||
staticDirs: staticDirs,
|
staticDirs: staticDirs,
|
||||||
|
absContentDirs: absContentDirs,
|
||||||
theme: cfg.GetString("theme"),
|
theme: cfg.GetString("theme"),
|
||||||
ProcessingStats: NewProcessingStats(lang),
|
ProcessingStats: NewProcessingStats(lang),
|
||||||
}
|
}
|
||||||
|
@ -135,13 +201,8 @@ func NewPathSpec(fs *hugofs.Fs, cfg config.Provider) (*PathSpec, error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
publishDir := ps.AbsPathify(cfg.GetString("publishDir")) + FilePathSeparator
|
// TODO(bep) remove this, eventually
|
||||||
// If root, remove the second '/'
|
ps.PublishDir = absPuslishDir
|
||||||
if publishDir == "//" {
|
|
||||||
publishDir = FilePathSeparator
|
|
||||||
}
|
|
||||||
|
|
||||||
ps.PublishDir = publishDir
|
|
||||||
|
|
||||||
return ps, nil
|
return ps, nil
|
||||||
}
|
}
|
||||||
|
@ -165,6 +226,107 @@ func getStringOrStringSlice(cfg config.Provider, key string, id int) []string {
|
||||||
return out
|
return out
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func createContentFs(fs afero.Fs,
|
||||||
|
workingDir,
|
||||||
|
defaultContentLanguage string,
|
||||||
|
languages Languages) (afero.Fs, []types.KeyValueStr, error) {
|
||||||
|
|
||||||
|
var contentLanguages Languages
|
||||||
|
var contentDirSeen = make(map[string]bool)
|
||||||
|
languageSet := make(map[string]bool)
|
||||||
|
|
||||||
|
// The default content language needs to be first.
|
||||||
|
for _, language := range languages {
|
||||||
|
if language.Lang == defaultContentLanguage {
|
||||||
|
contentLanguages = append(contentLanguages, language)
|
||||||
|
contentDirSeen[language.ContentDir] = true
|
||||||
|
}
|
||||||
|
languageSet[language.Lang] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, language := range languages {
|
||||||
|
if contentDirSeen[language.ContentDir] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if language.ContentDir == "" {
|
||||||
|
language.ContentDir = defaultContentLanguage
|
||||||
|
}
|
||||||
|
contentDirSeen[language.ContentDir] = true
|
||||||
|
contentLanguages = append(contentLanguages, language)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
var absContentDirs []types.KeyValueStr
|
||||||
|
|
||||||
|
fs, err := createContentOverlayFs(fs, workingDir, contentLanguages, languageSet, &absContentDirs)
|
||||||
|
return fs, absContentDirs, err
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func createContentOverlayFs(source afero.Fs,
|
||||||
|
workingDir string,
|
||||||
|
languages Languages,
|
||||||
|
languageSet map[string]bool,
|
||||||
|
absContentDirs *[]types.KeyValueStr) (afero.Fs, error) {
|
||||||
|
if len(languages) == 0 {
|
||||||
|
return source, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
language := languages[0]
|
||||||
|
|
||||||
|
contentDir := language.ContentDir
|
||||||
|
if contentDir == "" {
|
||||||
|
panic("missing contentDir")
|
||||||
|
}
|
||||||
|
|
||||||
|
absContentDir := AbsPathify(workingDir, language.ContentDir)
|
||||||
|
if !strings.HasSuffix(absContentDir, FilePathSeparator) {
|
||||||
|
absContentDir += FilePathSeparator
|
||||||
|
}
|
||||||
|
|
||||||
|
// If root, remove the second '/'
|
||||||
|
if absContentDir == "//" {
|
||||||
|
absContentDir = FilePathSeparator
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(absContentDir) < 6 {
|
||||||
|
return nil, fmt.Errorf("invalid content dir %q: %s", absContentDir, ErrPathTooShort)
|
||||||
|
}
|
||||||
|
|
||||||
|
*absContentDirs = append(*absContentDirs, types.KeyValueStr{Key: language.Lang, Value: absContentDir})
|
||||||
|
|
||||||
|
overlay := hugofs.NewLanguageFs(language.Lang, languageSet, afero.NewBasePathFs(source, absContentDir))
|
||||||
|
if len(languages) == 1 {
|
||||||
|
return overlay, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
base, err := createContentOverlayFs(source, workingDir, languages[1:], languageSet, absContentDirs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return hugofs.NewLanguageCompositeFs(base, overlay), nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// RelContentDir tries to create a path relative to the content root from
|
||||||
|
// the given filename. The return value is the path and language code.
|
||||||
|
func (p *PathSpec) RelContentDir(filename string) (string, string) {
|
||||||
|
for _, dir := range p.absContentDirs {
|
||||||
|
if strings.HasPrefix(filename, dir.Value) {
|
||||||
|
rel := strings.TrimPrefix(filename, dir.Value)
|
||||||
|
return strings.TrimPrefix(rel, FilePathSeparator), dir.Key
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Either not a content dir or already relative.
|
||||||
|
return filename, ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// ContentDirs returns all the content dirs (absolute paths).
|
||||||
|
func (p *PathSpec) ContentDirs() []types.KeyValueStr {
|
||||||
|
return p.absContentDirs
|
||||||
|
}
|
||||||
|
|
||||||
// PaginatePath returns the configured root path used for paginator pages.
|
// PaginatePath returns the configured root path used for paginator pages.
|
||||||
func (p *PathSpec) PaginatePath() string {
|
func (p *PathSpec) PaginatePath() string {
|
||||||
return p.paginatePath
|
return p.paginatePath
|
||||||
|
|
|
@ -24,6 +24,7 @@ import (
|
||||||
|
|
||||||
func TestNewPathSpecFromConfig(t *testing.T) {
|
func TestNewPathSpecFromConfig(t *testing.T) {
|
||||||
v := viper.New()
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
l := NewLanguage("no", v)
|
l := NewLanguage("no", v)
|
||||||
v.Set("disablePathToLower", true)
|
v.Set("disablePathToLower", true)
|
||||||
v.Set("removePathAccents", true)
|
v.Set("removePathAccents", true)
|
||||||
|
|
|
@ -25,6 +25,7 @@ func newTestDefaultPathSpec(configKeyValues ...interface{}) *PathSpec {
|
||||||
|
|
||||||
func newTestCfg(fs *hugofs.Fs) *viper.Viper {
|
func newTestCfg(fs *hugofs.Fs) *viper.Viper {
|
||||||
v := viper.New()
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
|
||||||
v.SetFs(fs.Source)
|
v.SetFs(fs.Source)
|
||||||
|
|
||||||
|
|
|
@ -27,6 +27,7 @@ import (
|
||||||
func TestURLize(t *testing.T) {
|
func TestURLize(t *testing.T) {
|
||||||
|
|
||||||
v := viper.New()
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
l := NewDefaultLanguage(v)
|
l := NewDefaultLanguage(v)
|
||||||
p, _ := NewPathSpec(hugofs.NewMem(v), l)
|
p, _ := NewPathSpec(hugofs.NewMem(v), l)
|
||||||
|
|
||||||
|
@ -88,6 +89,7 @@ func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool,
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
v.Set("baseURL", test.baseURL)
|
v.Set("baseURL", test.baseURL)
|
||||||
|
v.Set("contentDir", "content")
|
||||||
l := NewLanguage(lang, v)
|
l := NewLanguage(lang, v)
|
||||||
p, _ := NewPathSpec(hugofs.NewMem(v), l)
|
p, _ := NewPathSpec(hugofs.NewMem(v), l)
|
||||||
|
|
||||||
|
@ -166,6 +168,7 @@ func doTestRelURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool,
|
||||||
for i, test := range tests {
|
for i, test := range tests {
|
||||||
v.Set("baseURL", test.baseURL)
|
v.Set("baseURL", test.baseURL)
|
||||||
v.Set("canonifyURLs", test.canonify)
|
v.Set("canonifyURLs", test.canonify)
|
||||||
|
v.Set("contentDir", "content")
|
||||||
l := NewLanguage(lang, v)
|
l := NewLanguage(lang, v)
|
||||||
p, _ := NewPathSpec(hugofs.NewMem(v), l)
|
p, _ := NewPathSpec(hugofs.NewMem(v), l)
|
||||||
|
|
||||||
|
@ -254,6 +257,7 @@ func TestURLPrep(t *testing.T) {
|
||||||
for i, d := range data {
|
for i, d := range data {
|
||||||
v := viper.New()
|
v := viper.New()
|
||||||
v.Set("uglyURLs", d.ugly)
|
v.Set("uglyURLs", d.ugly)
|
||||||
|
v.Set("contentDir", "content")
|
||||||
l := NewDefaultLanguage(v)
|
l := NewDefaultLanguage(v)
|
||||||
p, _ := NewPathSpec(hugofs.NewMem(v), l)
|
p, _ := NewPathSpec(hugofs.NewMem(v), l)
|
||||||
|
|
||||||
|
|
35
hugofs/base_fs.go
Normal file
35
hugofs/base_fs.go
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package hugofs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
)
|
||||||
|
|
||||||
|
// BaseFs contains the core base filesystems used by Hugo. The name "base" is used
|
||||||
|
// to underline that even if they can be composites, they all have a base path set to a specific
|
||||||
|
// resource folder, e.g "/my-project/content". So, no absolute filenames needed.
|
||||||
|
type BaseFs struct {
|
||||||
|
// The filesystem used to capture content. This can be a composite and
|
||||||
|
// language aware file system.
|
||||||
|
ContentFs afero.Fs
|
||||||
|
|
||||||
|
// The filesystem used to store resources (processed images etc.).
|
||||||
|
// This usually maps to /my-project/resources.
|
||||||
|
ResourcesFs afero.Fs
|
||||||
|
|
||||||
|
// The filesystem used to publish the rendered site.
|
||||||
|
// This usually maps to /my-project/public.
|
||||||
|
PublishFs afero.Fs
|
||||||
|
}
|
51
hugofs/language_composite_fs.go
Normal file
51
hugofs/language_composite_fs.go
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package hugofs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
_ afero.Fs = (*languageCompositeFs)(nil)
|
||||||
|
_ afero.Lstater = (*languageCompositeFs)(nil)
|
||||||
|
)
|
||||||
|
|
||||||
|
type languageCompositeFs struct {
|
||||||
|
*afero.CopyOnWriteFs
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewLanguageCompositeFs creates a composite and language aware filesystem.
|
||||||
|
// This is a hybrid filesystem. To get a specific file in Open, Stat etc., use the full filename
|
||||||
|
// to the target filesystem. This information is available in Readdir, Stat etc. via the
|
||||||
|
// special LanguageFileInfo FileInfo implementation.
|
||||||
|
func NewLanguageCompositeFs(base afero.Fs, overlay *LanguageFs) afero.Fs {
|
||||||
|
return afero.NewReadOnlyFs(&languageCompositeFs{afero.NewCopyOnWriteFs(base, overlay).(*afero.CopyOnWriteFs)})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Open takes the full path to the file in the target filesystem. If it is a directory, it gets merged
|
||||||
|
// using the language as a weight.
|
||||||
|
func (fs *languageCompositeFs) Open(name string) (afero.File, error) {
|
||||||
|
f, err := fs.CopyOnWriteFs.Open(name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
fu, ok := f.(*afero.UnionFile)
|
||||||
|
if ok {
|
||||||
|
// This is a directory: Merge it.
|
||||||
|
fu.Merger = LanguageDirsMerger
|
||||||
|
}
|
||||||
|
return f, nil
|
||||||
|
}
|
106
hugofs/language_composite_fs_test.go
Normal file
106
hugofs/language_composite_fs_test.go
Normal file
|
@ -0,0 +1,106 @@
|
||||||
|
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package hugofs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCompositeLanguagFsTest(t *testing.T) {
|
||||||
|
assert := require.New(t)
|
||||||
|
|
||||||
|
languages := map[string]bool{
|
||||||
|
"sv": true,
|
||||||
|
"en": true,
|
||||||
|
"nn": true,
|
||||||
|
}
|
||||||
|
msv := afero.NewMemMapFs()
|
||||||
|
baseSv := "/content/sv"
|
||||||
|
lfssv := NewLanguageFs("sv", languages, afero.NewBasePathFs(msv, baseSv))
|
||||||
|
mnn := afero.NewMemMapFs()
|
||||||
|
baseNn := "/content/nn"
|
||||||
|
lfsnn := NewLanguageFs("nn", languages, afero.NewBasePathFs(mnn, baseNn))
|
||||||
|
men := afero.NewMemMapFs()
|
||||||
|
baseEn := "/content/en"
|
||||||
|
lfsen := NewLanguageFs("en", languages, afero.NewBasePathFs(men, baseEn))
|
||||||
|
|
||||||
|
// The order will be sv, en, nn
|
||||||
|
composite := NewLanguageCompositeFs(lfsnn, lfsen)
|
||||||
|
composite = NewLanguageCompositeFs(composite, lfssv)
|
||||||
|
|
||||||
|
afero.WriteFile(msv, filepath.Join(baseSv, "f1.txt"), []byte("some sv"), 0755)
|
||||||
|
afero.WriteFile(mnn, filepath.Join(baseNn, "f1.txt"), []byte("some nn"), 0755)
|
||||||
|
afero.WriteFile(men, filepath.Join(baseEn, "f1.txt"), []byte("some en"), 0755)
|
||||||
|
|
||||||
|
// Swedish is the top layer.
|
||||||
|
assertLangFile(t, composite, "f1.txt", "sv")
|
||||||
|
|
||||||
|
afero.WriteFile(msv, filepath.Join(baseSv, "f2.en.txt"), []byte("some sv"), 0755)
|
||||||
|
afero.WriteFile(mnn, filepath.Join(baseNn, "f2.en.txt"), []byte("some nn"), 0755)
|
||||||
|
afero.WriteFile(men, filepath.Join(baseEn, "f2.en.txt"), []byte("some en"), 0755)
|
||||||
|
|
||||||
|
// English is in the middle, but the most specific language match wins.
|
||||||
|
//assertLangFile(t, composite, "f2.en.txt", "en")
|
||||||
|
|
||||||
|
// Fetch some specific language versions
|
||||||
|
assertLangFile(t, composite, filepath.Join(baseNn, "f2.en.txt"), "nn")
|
||||||
|
assertLangFile(t, composite, filepath.Join(baseEn, "f2.en.txt"), "en")
|
||||||
|
assertLangFile(t, composite, filepath.Join(baseSv, "f2.en.txt"), "sv")
|
||||||
|
|
||||||
|
// Read the root
|
||||||
|
f, err := composite.Open("/")
|
||||||
|
assert.NoError(err)
|
||||||
|
defer f.Close()
|
||||||
|
files, err := f.Readdir(-1)
|
||||||
|
assert.Equal(4, len(files))
|
||||||
|
expected := map[string]bool{
|
||||||
|
filepath.FromSlash("/content/en/f1.txt"): true,
|
||||||
|
filepath.FromSlash("/content/nn/f1.txt"): true,
|
||||||
|
filepath.FromSlash("/content/sv/f1.txt"): true,
|
||||||
|
filepath.FromSlash("/content/en/f2.en.txt"): true,
|
||||||
|
}
|
||||||
|
got := make(map[string]bool)
|
||||||
|
|
||||||
|
for _, fi := range files {
|
||||||
|
fil, ok := fi.(*LanguageFileInfo)
|
||||||
|
assert.True(ok)
|
||||||
|
got[fil.Filename()] = true
|
||||||
|
}
|
||||||
|
assert.Equal(expected, got)
|
||||||
|
}
|
||||||
|
|
||||||
|
func assertLangFile(t testing.TB, fs afero.Fs, filename, match string) {
|
||||||
|
f, err := fs.Open(filename)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
b, err := afero.ReadAll(f)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
s := string(b)
|
||||||
|
if !strings.Contains(s, match) {
|
||||||
|
t.Fatalf("got %q expected it to contain %q", s, match)
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
328
hugofs/language_fs.go
Normal file
328
hugofs/language_fs.go
Normal file
|
@ -0,0 +1,328 @@
|
||||||
|
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package hugofs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
)
|
||||||
|
|
||||||
|
const hugoFsMarker = "__hugofs"
|
||||||
|
|
||||||
|
var (
|
||||||
|
_ LanguageAnnouncer = (*LanguageFileInfo)(nil)
|
||||||
|
_ FilePather = (*LanguageFileInfo)(nil)
|
||||||
|
_ afero.Lstater = (*LanguageFs)(nil)
|
||||||
|
)
|
||||||
|
|
||||||
|
// LanguageAnnouncer is aware of its language.
|
||||||
|
type LanguageAnnouncer interface {
|
||||||
|
Lang() string
|
||||||
|
TranslationBaseName() string
|
||||||
|
}
|
||||||
|
|
||||||
|
// FilePather is aware of its file's location.
|
||||||
|
type FilePather interface {
|
||||||
|
// Filename gets the full path and filename to the file.
|
||||||
|
Filename() string
|
||||||
|
|
||||||
|
// Path gets the content relative path including file name and extension.
|
||||||
|
// The directory is relative to the content root where "content" is a broad term.
|
||||||
|
Path() string
|
||||||
|
|
||||||
|
// RealName is FileInfo.Name in its original form.
|
||||||
|
RealName() string
|
||||||
|
|
||||||
|
BaseDir() string
|
||||||
|
}
|
||||||
|
|
||||||
|
var LanguageDirsMerger = func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) {
|
||||||
|
m := make(map[string]*LanguageFileInfo)
|
||||||
|
|
||||||
|
for _, fi := range lofi {
|
||||||
|
fil, ok := fi.(*LanguageFileInfo)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("received %T, expected *LanguageFileInfo", fi)
|
||||||
|
}
|
||||||
|
m[fil.virtualName] = fil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, fi := range bofi {
|
||||||
|
fil, ok := fi.(*LanguageFileInfo)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("received %T, expected *LanguageFileInfo", fi)
|
||||||
|
}
|
||||||
|
existing, found := m[fil.virtualName]
|
||||||
|
|
||||||
|
if !found || existing.weight < fil.weight {
|
||||||
|
m[fil.virtualName] = fil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
merged := make([]os.FileInfo, len(m))
|
||||||
|
i := 0
|
||||||
|
for _, v := range m {
|
||||||
|
merged[i] = v
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
|
||||||
|
return merged, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type LanguageFileInfo struct {
|
||||||
|
os.FileInfo
|
||||||
|
lang string
|
||||||
|
baseDir string
|
||||||
|
realFilename string
|
||||||
|
relFilename string
|
||||||
|
name string
|
||||||
|
realName string
|
||||||
|
virtualName string
|
||||||
|
translationBaseName string
|
||||||
|
|
||||||
|
// We add some weight to the files in their own language's content directory.
|
||||||
|
weight int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fi *LanguageFileInfo) Filename() string {
|
||||||
|
return fi.realFilename
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fi *LanguageFileInfo) Path() string {
|
||||||
|
return fi.relFilename
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fi *LanguageFileInfo) RealName() string {
|
||||||
|
return fi.realName
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fi *LanguageFileInfo) BaseDir() string {
|
||||||
|
return fi.baseDir
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fi *LanguageFileInfo) Lang() string {
|
||||||
|
return fi.lang
|
||||||
|
}
|
||||||
|
|
||||||
|
// TranslationBaseName returns the base filename without any extension or language
|
||||||
|
// identificator.
|
||||||
|
func (fi *LanguageFileInfo) TranslationBaseName() string {
|
||||||
|
return fi.translationBaseName
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name is the name of the file within this filesystem without any path info.
|
||||||
|
// It will be marked with language information so we can identify it as ours.
|
||||||
|
func (fi *LanguageFileInfo) Name() string {
|
||||||
|
return fi.name
|
||||||
|
}
|
||||||
|
|
||||||
|
type languageFile struct {
|
||||||
|
afero.File
|
||||||
|
fs *LanguageFs
|
||||||
|
}
|
||||||
|
|
||||||
|
// Readdir creates FileInfo entries by calling Lstat if possible.
|
||||||
|
func (l *languageFile) Readdir(c int) (ofi []os.FileInfo, err error) {
|
||||||
|
names, err := l.File.Readdirnames(c)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
fis := make([]os.FileInfo, len(names))
|
||||||
|
|
||||||
|
for i, name := range names {
|
||||||
|
fi, _, err := l.fs.LstatIfPossible(filepath.Join(l.Name(), name))
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
fis[i] = fi
|
||||||
|
}
|
||||||
|
|
||||||
|
return fis, err
|
||||||
|
}
|
||||||
|
|
||||||
|
type LanguageFs struct {
|
||||||
|
// This Fs is usually created with a BasePathFs
|
||||||
|
basePath string
|
||||||
|
lang string
|
||||||
|
nameMarker string
|
||||||
|
languages map[string]bool
|
||||||
|
afero.Fs
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewLanguageFs(lang string, languages map[string]bool, fs afero.Fs) *LanguageFs {
|
||||||
|
if lang == "" {
|
||||||
|
panic("no lang set for the language fs")
|
||||||
|
}
|
||||||
|
var basePath string
|
||||||
|
|
||||||
|
if bfs, ok := fs.(*afero.BasePathFs); ok {
|
||||||
|
basePath, _ = bfs.RealPath("")
|
||||||
|
}
|
||||||
|
|
||||||
|
marker := hugoFsMarker + "_" + lang + "_"
|
||||||
|
|
||||||
|
return &LanguageFs{lang: lang, languages: languages, basePath: basePath, Fs: fs, nameMarker: marker}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *LanguageFs) Lang() string {
|
||||||
|
return fs.lang
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *LanguageFs) Stat(name string) (os.FileInfo, error) {
|
||||||
|
name, err := fs.realName(name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
fi, err := fs.Fs.Stat(name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return fs.newLanguageFileInfo(name, fi)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *LanguageFs) Open(name string) (afero.File, error) {
|
||||||
|
name, err := fs.realName(name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
f, err := fs.Fs.Open(name)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &languageFile{File: f, fs: fs}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *LanguageFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
|
||||||
|
name, err := fs.realName(name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var fi os.FileInfo
|
||||||
|
var b bool
|
||||||
|
|
||||||
|
if lif, ok := fs.Fs.(afero.Lstater); ok {
|
||||||
|
fi, b, err = lif.LstatIfPossible(name)
|
||||||
|
} else {
|
||||||
|
fi, err = fs.Fs.Stat(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, b, err
|
||||||
|
}
|
||||||
|
|
||||||
|
lfi, err := fs.newLanguageFileInfo(name, fi)
|
||||||
|
|
||||||
|
return lfi, b, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *LanguageFs) realPath(name string) (string, error) {
|
||||||
|
if baseFs, ok := fs.Fs.(*afero.BasePathFs); ok {
|
||||||
|
return baseFs.RealPath(name)
|
||||||
|
}
|
||||||
|
return name, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *LanguageFs) realName(name string) (string, error) {
|
||||||
|
if strings.Contains(name, hugoFsMarker) {
|
||||||
|
if !strings.Contains(name, fs.nameMarker) {
|
||||||
|
return "", os.ErrNotExist
|
||||||
|
}
|
||||||
|
return strings.Replace(name, fs.nameMarker, "", 1), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if fs.basePath == "" {
|
||||||
|
return name, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.TrimPrefix(name, fs.basePath), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *LanguageFs) newLanguageFileInfo(filename string, fi os.FileInfo) (*LanguageFileInfo, error) {
|
||||||
|
filename = filepath.Clean(filename)
|
||||||
|
_, name := filepath.Split(filename)
|
||||||
|
|
||||||
|
realName := name
|
||||||
|
virtualName := name
|
||||||
|
|
||||||
|
realPath, err := fs.realPath(filename)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
lang := fs.Lang()
|
||||||
|
|
||||||
|
baseNameNoExt := ""
|
||||||
|
|
||||||
|
if !fi.IsDir() {
|
||||||
|
|
||||||
|
// Try to extract the language from the file name.
|
||||||
|
// Any valid language identificator in the name will win over the
|
||||||
|
// language set on the file system, e.g. "mypost.en.md".
|
||||||
|
baseName := filepath.Base(name)
|
||||||
|
ext := filepath.Ext(baseName)
|
||||||
|
baseNameNoExt = baseName
|
||||||
|
|
||||||
|
if ext != "" {
|
||||||
|
baseNameNoExt = strings.TrimSuffix(baseNameNoExt, ext)
|
||||||
|
}
|
||||||
|
|
||||||
|
fileLangExt := filepath.Ext(baseNameNoExt)
|
||||||
|
fileLang := strings.TrimPrefix(fileLangExt, ".")
|
||||||
|
|
||||||
|
if fs.languages[fileLang] {
|
||||||
|
lang = fileLang
|
||||||
|
}
|
||||||
|
|
||||||
|
baseNameNoExt = strings.TrimSuffix(baseNameNoExt, fileLangExt)
|
||||||
|
|
||||||
|
// This connects the filename to the filesystem, not the language.
|
||||||
|
virtualName = baseNameNoExt + "." + lang + ext
|
||||||
|
|
||||||
|
name = fs.nameMarker + name
|
||||||
|
}
|
||||||
|
|
||||||
|
weight := 1
|
||||||
|
// If this file's language belongs in this directory, add some weight to it
|
||||||
|
// to make it more important.
|
||||||
|
if lang == fs.Lang() {
|
||||||
|
weight = 2
|
||||||
|
}
|
||||||
|
|
||||||
|
if fi.IsDir() {
|
||||||
|
// For directories we always want to start from the union view.
|
||||||
|
realPath = strings.TrimPrefix(realPath, fs.basePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &LanguageFileInfo{
|
||||||
|
lang: lang,
|
||||||
|
weight: weight,
|
||||||
|
realFilename: realPath,
|
||||||
|
realName: realName,
|
||||||
|
relFilename: strings.TrimPrefix(strings.TrimPrefix(realPath, fs.basePath), string(os.PathSeparator)),
|
||||||
|
name: name,
|
||||||
|
virtualName: virtualName,
|
||||||
|
translationBaseName: baseNameNoExt,
|
||||||
|
baseDir: fs.basePath,
|
||||||
|
FileInfo: fi}, nil
|
||||||
|
}
|
54
hugofs/language_fs_test.go
Normal file
54
hugofs/language_fs_test.go
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package hugofs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestLanguagFs(t *testing.T) {
|
||||||
|
languages := map[string]bool{
|
||||||
|
"sv": true,
|
||||||
|
}
|
||||||
|
base := filepath.FromSlash("/my/base")
|
||||||
|
assert := require.New(t)
|
||||||
|
m := afero.NewMemMapFs()
|
||||||
|
bfs := afero.NewBasePathFs(m, base)
|
||||||
|
lfs := NewLanguageFs("sv", languages, bfs)
|
||||||
|
assert.NotNil(lfs)
|
||||||
|
assert.Equal("sv", lfs.Lang())
|
||||||
|
err := afero.WriteFile(lfs, filepath.FromSlash("sect/page.md"), []byte("abc"), 0777)
|
||||||
|
assert.NoError(err)
|
||||||
|
fi, err := lfs.Stat(filepath.FromSlash("sect/page.md"))
|
||||||
|
assert.NoError(err)
|
||||||
|
assert.Equal("__hugofs_sv_page.md", fi.Name())
|
||||||
|
|
||||||
|
languager, ok := fi.(LanguageAnnouncer)
|
||||||
|
assert.True(ok)
|
||||||
|
|
||||||
|
assert.Equal("sv", languager.Lang())
|
||||||
|
|
||||||
|
lfi, ok := fi.(*LanguageFileInfo)
|
||||||
|
assert.True(ok)
|
||||||
|
assert.Equal(filepath.FromSlash("/my/base/sect/page.md"), lfi.Filename())
|
||||||
|
assert.Equal(filepath.FromSlash("sect/page.md"), lfi.Path())
|
||||||
|
assert.Equal("page.sv.md", lfi.virtualName)
|
||||||
|
assert.Equal("__hugofs_sv_page.md", lfi.Name())
|
||||||
|
assert.Equal("page.md", lfi.RealName())
|
||||||
|
|
||||||
|
}
|
|
@ -130,22 +130,18 @@ func loadLanguageSettings(cfg config.Provider, oldLangs helpers.Languages) error
|
||||||
} else {
|
} else {
|
||||||
languages = make(map[string]interface{})
|
languages = make(map[string]interface{})
|
||||||
for k, v := range languagesFromConfig {
|
for k, v := range languagesFromConfig {
|
||||||
isDisabled := false
|
|
||||||
for _, disabled := range disableLanguages {
|
for _, disabled := range disableLanguages {
|
||||||
if disabled == defaultLang {
|
if disabled == defaultLang {
|
||||||
return fmt.Errorf("cannot disable default language %q", defaultLang)
|
return fmt.Errorf("cannot disable default language %q", defaultLang)
|
||||||
}
|
}
|
||||||
|
|
||||||
if strings.EqualFold(k, disabled) {
|
if strings.EqualFold(k, disabled) {
|
||||||
isDisabled = true
|
v.(map[string]interface{})["disabled"] = true
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !isDisabled {
|
|
||||||
languages[k] = v
|
languages[k] = v
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
|
|
@ -104,8 +104,8 @@ categories:
|
||||||
|
|
||||||
writeSource(t, fs, "content/sect/p1.md", fmt.Sprintf(pageTemplate, "P1", "- tag1"))
|
writeSource(t, fs, "content/sect/p1.md", fmt.Sprintf(pageTemplate, "P1", "- tag1"))
|
||||||
|
|
||||||
writeNewContentFile(t, fs, "Category Terms", "2017-01-01", "content/categories/_index.md", 10)
|
writeNewContentFile(t, fs.Source, "Category Terms", "2017-01-01", "content/categories/_index.md", 10)
|
||||||
writeNewContentFile(t, fs, "Tag1 List", "2017-01-01", "content/tags/tag1/_index.md", 10)
|
writeNewContentFile(t, fs.Source, "Tag1 List", "2017-01-01", "content/tags/tag1/_index.md", 10)
|
||||||
|
|
||||||
h, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
|
h, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,6 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
@ -25,11 +24,22 @@ import (
|
||||||
var (
|
var (
|
||||||
_ source.File = (*fileInfo)(nil)
|
_ source.File = (*fileInfo)(nil)
|
||||||
_ source.ReadableFile = (*fileInfo)(nil)
|
_ source.ReadableFile = (*fileInfo)(nil)
|
||||||
|
_ pathLangFile = (*fileInfo)(nil)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// A partial interface to prevent ambigous compiler error.
|
||||||
|
type basePather interface {
|
||||||
|
Filename() string
|
||||||
|
RealName() string
|
||||||
|
BaseDir() string
|
||||||
|
}
|
||||||
|
|
||||||
type fileInfo struct {
|
type fileInfo struct {
|
||||||
bundleTp bundleDirType
|
bundleTp bundleDirType
|
||||||
|
|
||||||
source.ReadableFile
|
source.ReadableFile
|
||||||
|
basePather
|
||||||
|
|
||||||
overriddenLang string
|
overriddenLang string
|
||||||
|
|
||||||
// Set if the content language for this file is disabled.
|
// Set if the content language for this file is disabled.
|
||||||
|
@ -43,6 +53,10 @@ func (fi *fileInfo) Lang() string {
|
||||||
return fi.ReadableFile.Lang()
|
return fi.ReadableFile.Lang()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (fi *fileInfo) Filename() string {
|
||||||
|
return fi.basePather.Filename()
|
||||||
|
}
|
||||||
|
|
||||||
func (fi *fileInfo) isOwner() bool {
|
func (fi *fileInfo) isOwner() bool {
|
||||||
return fi.bundleTp > bundleNot
|
return fi.bundleTp > bundleNot
|
||||||
}
|
}
|
||||||
|
@ -55,12 +69,13 @@ func (fi *fileInfo) isContentFile() bool {
|
||||||
return contentFileExtensionsSet[fi.Ext()]
|
return contentFileExtensionsSet[fi.Ext()]
|
||||||
}
|
}
|
||||||
|
|
||||||
func newFileInfo(sp *source.SourceSpec, baseDir, filename string, fi os.FileInfo, tp bundleDirType) *fileInfo {
|
func newFileInfo(sp *source.SourceSpec, baseDir, filename string, fi pathLangFileFi, tp bundleDirType) *fileInfo {
|
||||||
|
|
||||||
baseFi := sp.NewFileInfo(baseDir, filename, tp == bundleLeaf, fi)
|
baseFi := sp.NewFileInfo(baseDir, filename, tp == bundleLeaf, fi)
|
||||||
f := &fileInfo{
|
f := &fileInfo{
|
||||||
bundleTp: tp,
|
bundleTp: tp,
|
||||||
ReadableFile: baseFi,
|
ReadableFile: baseFi,
|
||||||
|
basePather: fi,
|
||||||
}
|
}
|
||||||
|
|
||||||
lang := f.Lang()
|
lang := f.Lang()
|
||||||
|
|
|
@ -1,61 +0,0 @@
|
||||||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package hugolib
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"path/filepath"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/source"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestBundleFileInfo(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
assert := require.New(t)
|
|
||||||
cfg, fs := newTestBundleSourcesMultilingual(t)
|
|
||||||
sourceSpec := source.NewSourceSpec(cfg, fs)
|
|
||||||
|
|
||||||
for _, this := range []struct {
|
|
||||||
filename string
|
|
||||||
check func(f *fileInfo)
|
|
||||||
}{
|
|
||||||
{"/path/to/file.md", func(fi *fileInfo) {
|
|
||||||
assert.Equal("md", fi.Ext())
|
|
||||||
assert.Equal("en", fi.Lang())
|
|
||||||
assert.False(fi.isOwner())
|
|
||||||
assert.True(fi.isContentFile())
|
|
||||||
}},
|
|
||||||
{"/path/to/file.JPG", func(fi *fileInfo) {
|
|
||||||
assert.Equal("jpg", fi.Ext())
|
|
||||||
assert.False(fi.isContentFile())
|
|
||||||
}},
|
|
||||||
{"/path/to/file.nn.png", func(fi *fileInfo) {
|
|
||||||
assert.Equal("png", fi.Ext())
|
|
||||||
assert.Equal("nn", fi.Lang())
|
|
||||||
assert.Equal("file", fi.TranslationBaseName())
|
|
||||||
assert.False(fi.isContentFile())
|
|
||||||
}},
|
|
||||||
} {
|
|
||||||
fi := newFileInfo(
|
|
||||||
sourceSpec,
|
|
||||||
filepath.FromSlash("/work/base"),
|
|
||||||
filepath.FromSlash(this.filename),
|
|
||||||
nil, bundleNot)
|
|
||||||
this.check(fi)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -75,19 +75,8 @@ func (h *HugoSites) langSite() map[string]*Site {
|
||||||
// GetContentPage finds a Page with content given the absolute filename.
|
// GetContentPage finds a Page with content given the absolute filename.
|
||||||
// Returns nil if none found.
|
// Returns nil if none found.
|
||||||
func (h *HugoSites) GetContentPage(filename string) *Page {
|
func (h *HugoSites) GetContentPage(filename string) *Page {
|
||||||
s := h.Sites[0]
|
|
||||||
contendDir := filepath.Join(s.PathSpec.AbsPathify(s.Cfg.GetString("contentDir")))
|
|
||||||
if !strings.HasPrefix(filename, contendDir) {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
rel := strings.TrimPrefix(filename, contendDir)
|
|
||||||
rel = strings.TrimPrefix(rel, helpers.FilePathSeparator)
|
|
||||||
|
|
||||||
for _, s := range h.Sites {
|
for _, s := range h.Sites {
|
||||||
|
pos := s.rawAllPages.findPagePosByFilename(filename)
|
||||||
pos := s.rawAllPages.findPagePosByFilePath(rel)
|
|
||||||
|
|
||||||
if pos == -1 {
|
if pos == -1 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -95,19 +84,16 @@ func (h *HugoSites) GetContentPage(filename string) *Page {
|
||||||
}
|
}
|
||||||
|
|
||||||
// If not found already, this may be bundled in another content file.
|
// If not found already, this may be bundled in another content file.
|
||||||
rel = filepath.Dir(rel)
|
dir := filepath.Dir(filename)
|
||||||
|
|
||||||
for _, s := range h.Sites {
|
for _, s := range h.Sites {
|
||||||
|
pos := s.rawAllPages.findPagePosByFilnamePrefix(dir)
|
||||||
pos := s.rawAllPages.findFirstPagePosByFilePathPrefix(rel)
|
|
||||||
|
|
||||||
if pos == -1 {
|
if pos == -1 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
return s.rawAllPages[pos]
|
return s.rawAllPages[pos]
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewHugoSites creates a new collection of sites given the input sites, building
|
// NewHugoSites creates a new collection of sites given the input sites, building
|
||||||
|
@ -126,17 +112,10 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
|
||||||
|
|
||||||
var contentChangeTracker *contentChangeMap
|
var contentChangeTracker *contentChangeMap
|
||||||
|
|
||||||
// Only needed in server mode.
|
|
||||||
// TODO(bep) clean up the running vs watching terms
|
|
||||||
if cfg.Running {
|
|
||||||
contentChangeTracker = &contentChangeMap{symContent: make(map[string]map[string]bool)}
|
|
||||||
}
|
|
||||||
|
|
||||||
h := &HugoSites{
|
h := &HugoSites{
|
||||||
running: cfg.Running,
|
running: cfg.Running,
|
||||||
multilingual: langConfig,
|
multilingual: langConfig,
|
||||||
multihost: cfg.Cfg.GetBool("multihost"),
|
multihost: cfg.Cfg.GetBool("multihost"),
|
||||||
ContentChanges: contentChangeTracker,
|
|
||||||
Sites: sites}
|
Sites: sites}
|
||||||
|
|
||||||
for _, s := range sites {
|
for _, s := range sites {
|
||||||
|
@ -149,6 +128,13 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
|
||||||
|
|
||||||
h.Deps = sites[0].Deps
|
h.Deps = sites[0].Deps
|
||||||
|
|
||||||
|
// Only needed in server mode.
|
||||||
|
// TODO(bep) clean up the running vs watching terms
|
||||||
|
if cfg.Running {
|
||||||
|
contentChangeTracker = &contentChangeMap{pathSpec: h.PathSpec, symContent: make(map[string]map[string]bool)}
|
||||||
|
h.ContentChanges = contentChangeTracker
|
||||||
|
}
|
||||||
|
|
||||||
if err := h.initGitInfo(); err != nil {
|
if err := h.initGitInfo(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -212,6 +198,7 @@ func applyDepsIfNeeded(cfg deps.DepsCfg, sites ...*Site) error {
|
||||||
d.OutputFormatsConfig = s.outputFormatsConfig
|
d.OutputFormatsConfig = s.outputFormatsConfig
|
||||||
s.Deps = d
|
s.Deps = d
|
||||||
}
|
}
|
||||||
|
|
||||||
s.resourceSpec, err = resource.NewSpec(s.Deps.PathSpec, s.mediaTypesConfig)
|
s.resourceSpec, err = resource.NewSpec(s.Deps.PathSpec, s.mediaTypesConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -260,6 +247,9 @@ func createSitesFromConfig(cfg deps.DepsCfg) ([]*Site, error) {
|
||||||
languages := getLanguages(cfg.Cfg)
|
languages := getLanguages(cfg.Cfg)
|
||||||
|
|
||||||
for _, lang := range languages {
|
for _, lang := range languages {
|
||||||
|
if lang.Disabled {
|
||||||
|
continue
|
||||||
|
}
|
||||||
var s *Site
|
var s *Site
|
||||||
var err error
|
var err error
|
||||||
cfg.Language = lang
|
cfg.Language = lang
|
||||||
|
@ -517,9 +507,9 @@ func (h *HugoSites) createMissingPages() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *HugoSites) removePageByPath(path string) {
|
func (h *HugoSites) removePageByFilename(filename string) {
|
||||||
for _, s := range h.Sites {
|
for _, s := range h.Sites {
|
||||||
s.removePageByPath(path)
|
s.removePageFilename(filename)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -671,6 +661,8 @@ type contentChangeMap struct {
|
||||||
branches []string
|
branches []string
|
||||||
leafs []string
|
leafs []string
|
||||||
|
|
||||||
|
pathSpec *helpers.PathSpec
|
||||||
|
|
||||||
// Hugo supports symlinked content (both directories and files). This
|
// Hugo supports symlinked content (both directories and files). This
|
||||||
// can lead to situations where the same file can be referenced from several
|
// can lead to situations where the same file can be referenced from several
|
||||||
// locations in /content -- which is really cool, but also means we have to
|
// locations in /content -- which is really cool, but also means we have to
|
||||||
|
@ -683,7 +675,7 @@ type contentChangeMap struct {
|
||||||
|
|
||||||
func (m *contentChangeMap) add(filename string, tp bundleDirType) {
|
func (m *contentChangeMap) add(filename string, tp bundleDirType) {
|
||||||
m.mu.Lock()
|
m.mu.Lock()
|
||||||
dir := filepath.Dir(filename)
|
dir := filepath.Dir(filename) + helpers.FilePathSeparator
|
||||||
switch tp {
|
switch tp {
|
||||||
case bundleBranch:
|
case bundleBranch:
|
||||||
m.branches = append(m.branches, dir)
|
m.branches = append(m.branches, dir)
|
||||||
|
@ -698,7 +690,7 @@ func (m *contentChangeMap) add(filename string, tp bundleDirType) {
|
||||||
// Track the addition of bundle dirs.
|
// Track the addition of bundle dirs.
|
||||||
func (m *contentChangeMap) handleBundles(b *bundleDirs) {
|
func (m *contentChangeMap) handleBundles(b *bundleDirs) {
|
||||||
for _, bd := range b.bundles {
|
for _, bd := range b.bundles {
|
||||||
m.add(bd.fi.Filename(), bd.tp)
|
m.add(bd.fi.Path(), bd.tp)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -709,21 +701,21 @@ func (m *contentChangeMap) resolveAndRemove(filename string) (string, string, bu
|
||||||
m.mu.RLock()
|
m.mu.RLock()
|
||||||
defer m.mu.RUnlock()
|
defer m.mu.RUnlock()
|
||||||
|
|
||||||
dir, name := filepath.Split(filename)
|
// Bundles share resources, so we need to start from the virtual root.
|
||||||
dir = strings.TrimSuffix(dir, helpers.FilePathSeparator)
|
relPath, _ := m.pathSpec.RelContentDir(filename)
|
||||||
fileTp, isContent := classifyBundledFile(name)
|
dir, name := filepath.Split(relPath)
|
||||||
|
if !strings.HasSuffix(dir, helpers.FilePathSeparator) {
|
||||||
// If the file itself is a bundle, no need to look further:
|
dir += helpers.FilePathSeparator
|
||||||
if fileTp > bundleNot {
|
|
||||||
return dir, dir, fileTp
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fileTp, _ := classifyBundledFile(name)
|
||||||
|
|
||||||
// This may be a member of a bundle. Start with branch bundles, the most specific.
|
// This may be a member of a bundle. Start with branch bundles, the most specific.
|
||||||
if !isContent {
|
if fileTp != bundleLeaf {
|
||||||
for i, b := range m.branches {
|
for i, b := range m.branches {
|
||||||
if b == dir {
|
if b == dir {
|
||||||
m.branches = append(m.branches[:i], m.branches[i+1:]...)
|
m.branches = append(m.branches[:i], m.branches[i+1:]...)
|
||||||
return dir, dir, bundleBranch
|
return dir, b, bundleBranch
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -732,7 +724,7 @@ func (m *contentChangeMap) resolveAndRemove(filename string) (string, string, bu
|
||||||
for i, l := range m.leafs {
|
for i, l := range m.leafs {
|
||||||
if strings.HasPrefix(dir, l) {
|
if strings.HasPrefix(dir, l) {
|
||||||
m.leafs = append(m.leafs[:i], m.leafs[i+1:]...)
|
m.leafs = append(m.leafs[:i], m.leafs[i+1:]...)
|
||||||
return dir, dir, bundleLeaf
|
return dir, l, bundleLeaf
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@ package hugolib
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -433,7 +434,7 @@ func TestMultiSitesRebuild(t *testing.T) {
|
||||||
// t.Parallel() not supported, see https://github.com/fortytw2/leaktest/issues/4
|
// t.Parallel() not supported, see https://github.com/fortytw2/leaktest/issues/4
|
||||||
// This leaktest seems to be a little bit shaky on Travis.
|
// This leaktest seems to be a little bit shaky on Travis.
|
||||||
if !isCI() {
|
if !isCI() {
|
||||||
defer leaktest.CheckTimeout(t, 30*time.Second)()
|
defer leaktest.CheckTimeout(t, 10*time.Second)()
|
||||||
}
|
}
|
||||||
|
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
|
@ -459,6 +460,8 @@ func TestMultiSitesRebuild(t *testing.T) {
|
||||||
b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Shortcode: Bonjour")
|
b.AssertFileContent("public/fr/sect/doc1/index.html", "Single", "Shortcode: Bonjour")
|
||||||
b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello")
|
b.AssertFileContent("public/en/sect/doc1-slug/index.html", "Single", "Shortcode: Hello")
|
||||||
|
|
||||||
|
contentFs := b.H.BaseFs.ContentFs
|
||||||
|
|
||||||
for i, this := range []struct {
|
for i, this := range []struct {
|
||||||
preFunc func(t *testing.T)
|
preFunc func(t *testing.T)
|
||||||
events []fsnotify.Event
|
events []fsnotify.Event
|
||||||
|
@ -490,9 +493,9 @@ func TestMultiSitesRebuild(t *testing.T) {
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
func(t *testing.T) {
|
func(t *testing.T) {
|
||||||
writeNewContentFile(t, fs, "new_en_1", "2016-07-31", "content/new1.en.md", -5)
|
writeNewContentFile(t, contentFs, "new_en_1", "2016-07-31", "new1.en.md", -5)
|
||||||
writeNewContentFile(t, fs, "new_en_2", "1989-07-30", "content/new2.en.md", -10)
|
writeNewContentFile(t, contentFs, "new_en_2", "1989-07-30", "new2.en.md", -10)
|
||||||
writeNewContentFile(t, fs, "new_fr_1", "2016-07-30", "content/new1.fr.md", 10)
|
writeNewContentFile(t, contentFs, "new_fr_1", "2016-07-30", "new1.fr.md", 10)
|
||||||
},
|
},
|
||||||
[]fsnotify.Event{
|
[]fsnotify.Event{
|
||||||
{Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Create},
|
{Name: filepath.FromSlash("content/new1.en.md"), Op: fsnotify.Create},
|
||||||
|
@ -513,10 +516,10 @@ func TestMultiSitesRebuild(t *testing.T) {
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
func(t *testing.T) {
|
func(t *testing.T) {
|
||||||
p := "content/sect/doc1.en.md"
|
p := "sect/doc1.en.md"
|
||||||
doc1 := readSource(t, fs, p)
|
doc1 := readFileFromFs(t, contentFs, p)
|
||||||
doc1 += "CHANGED"
|
doc1 += "CHANGED"
|
||||||
writeSource(t, fs, p, doc1)
|
writeToFs(t, contentFs, p, doc1)
|
||||||
},
|
},
|
||||||
[]fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc1.en.md"), Op: fsnotify.Write}},
|
[]fsnotify.Event{{Name: filepath.FromSlash("content/sect/doc1.en.md"), Op: fsnotify.Write}},
|
||||||
func(t *testing.T) {
|
func(t *testing.T) {
|
||||||
|
@ -529,7 +532,7 @@ func TestMultiSitesRebuild(t *testing.T) {
|
||||||
// Rename a file
|
// Rename a file
|
||||||
{
|
{
|
||||||
func(t *testing.T) {
|
func(t *testing.T) {
|
||||||
if err := fs.Source.Rename("content/new1.en.md", "content/new1renamed.en.md"); err != nil {
|
if err := contentFs.Rename("new1.en.md", "new1renamed.en.md"); err != nil {
|
||||||
t.Fatalf("Rename failed: %s", err)
|
t.Fatalf("Rename failed: %s", err)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -650,7 +653,7 @@ weight = 15
|
||||||
title = "Svenska"
|
title = "Svenska"
|
||||||
`
|
`
|
||||||
|
|
||||||
writeNewContentFile(t, fs, "Swedish Contentfile", "2016-01-01", "content/sect/doc1.sv.md", 10)
|
writeNewContentFile(t, fs.Source, "Swedish Contentfile", "2016-01-01", "content/sect/doc1.sv.md", 10)
|
||||||
// replace the config
|
// replace the config
|
||||||
b.WithNewConfig(newConfig)
|
b.WithNewConfig(newConfig)
|
||||||
|
|
||||||
|
@ -1038,18 +1041,31 @@ func readFileFromFs(t testing.TB, fs afero.Fs, filename string) string {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Print some debug info
|
// Print some debug info
|
||||||
root := strings.Split(filename, helpers.FilePathSeparator)[0]
|
root := strings.Split(filename, helpers.FilePathSeparator)[0]
|
||||||
afero.Walk(fs, root, func(path string, info os.FileInfo, err error) error {
|
printFs(fs, root, os.Stdout)
|
||||||
if info != nil && !info.IsDir() {
|
|
||||||
fmt.Println(" ", path)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
Fatalf(t, "Failed to read file: %s", err)
|
Fatalf(t, "Failed to read file: %s", err)
|
||||||
}
|
}
|
||||||
return string(b)
|
return string(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func printFs(fs afero.Fs, path string, w io.Writer) {
|
||||||
|
if fs == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
afero.Walk(fs, path, func(path string, info os.FileInfo, err error) error {
|
||||||
|
if info != nil && !info.IsDir() {
|
||||||
|
s := path
|
||||||
|
if lang, ok := info.(hugofs.LanguageAnnouncer); ok {
|
||||||
|
s = s + "\tLANG: " + lang.Lang()
|
||||||
|
}
|
||||||
|
if fp, ok := info.(hugofs.FilePather); ok {
|
||||||
|
s = s + "\tRF: " + fp.Filename() + "\tBP: " + fp.BaseDir()
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, " ", s)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
const testPageTemplate = `---
|
const testPageTemplate = `---
|
||||||
title: "%s"
|
title: "%s"
|
||||||
publishdate: "%s"
|
publishdate: "%s"
|
||||||
|
@ -1062,9 +1078,9 @@ func newTestPage(title, date string, weight int) string {
|
||||||
return fmt.Sprintf(testPageTemplate, title, date, weight, title)
|
return fmt.Sprintf(testPageTemplate, title, date, weight, title)
|
||||||
}
|
}
|
||||||
|
|
||||||
func writeNewContentFile(t *testing.T, fs *hugofs.Fs, title, date, filename string, weight int) {
|
func writeNewContentFile(t *testing.T, fs afero.Fs, title, date, filename string, weight int) {
|
||||||
content := newTestPage(title, date, weight)
|
content := newTestPage(title, date, weight)
|
||||||
writeSource(t, fs, filename, content)
|
writeToFs(t, fs, filename, content)
|
||||||
}
|
}
|
||||||
|
|
||||||
type multiSiteTestBuilder struct {
|
type multiSiteTestBuilder struct {
|
||||||
|
|
253
hugolib/language_content_dir_test.go
Normal file
253
hugolib/language_content_dir_test.go
Normal file
|
@ -0,0 +1,253 @@
|
||||||
|
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package hugolib
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
/*
|
||||||
|
|
||||||
|
/en/p1.md
|
||||||
|
/nn/p1.md
|
||||||
|
|
||||||
|
.Readdir
|
||||||
|
|
||||||
|
- Name() => p1.en.md, p1.nn.md
|
||||||
|
|
||||||
|
.Stat(name)
|
||||||
|
|
||||||
|
.Open() --- real file name
|
||||||
|
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
func TestLanguageContentRoot(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
assert := require.New(t)
|
||||||
|
|
||||||
|
config := `
|
||||||
|
baseURL = "https://example.org/"
|
||||||
|
|
||||||
|
defaultContentLanguage = "en"
|
||||||
|
defaultContentLanguageInSubdir = true
|
||||||
|
|
||||||
|
contentDir = "content/main"
|
||||||
|
workingDir = "/my/project"
|
||||||
|
|
||||||
|
[Languages]
|
||||||
|
[Languages.en]
|
||||||
|
weight = 10
|
||||||
|
title = "In English"
|
||||||
|
languageName = "English"
|
||||||
|
|
||||||
|
[Languages.nn]
|
||||||
|
weight = 20
|
||||||
|
title = "På Norsk"
|
||||||
|
languageName = "Norsk"
|
||||||
|
# This tells Hugo that all content in this directory is in the Norwegian language.
|
||||||
|
# It does not have to have the "my-page.nn.md" format. It can, but that is optional.
|
||||||
|
contentDir = "content/norsk"
|
||||||
|
|
||||||
|
[Languages.sv]
|
||||||
|
weight = 30
|
||||||
|
title = "På Svenska"
|
||||||
|
languageName = "Svensk"
|
||||||
|
contentDir = "content/svensk"
|
||||||
|
`
|
||||||
|
|
||||||
|
pageTemplate := `
|
||||||
|
---
|
||||||
|
title: %s
|
||||||
|
slug: %s
|
||||||
|
weight: %d
|
||||||
|
---
|
||||||
|
|
||||||
|
Content.
|
||||||
|
|
||||||
|
`
|
||||||
|
|
||||||
|
pageBundleTemplate := `
|
||||||
|
---
|
||||||
|
title: %s
|
||||||
|
weight: %d
|
||||||
|
---
|
||||||
|
|
||||||
|
Content.
|
||||||
|
|
||||||
|
`
|
||||||
|
var contentFiles []string
|
||||||
|
section := "sect"
|
||||||
|
|
||||||
|
var contentRoot = func(lang string) string {
|
||||||
|
contentRoot := "content/main"
|
||||||
|
|
||||||
|
switch lang {
|
||||||
|
case "nn":
|
||||||
|
contentRoot = "content/norsk"
|
||||||
|
case "sv":
|
||||||
|
contentRoot = "content/svensk"
|
||||||
|
}
|
||||||
|
return contentRoot + "/" + section
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, lang := range []string{"en", "nn", "sv"} {
|
||||||
|
for j := 1; j <= 10; j++ {
|
||||||
|
if (lang == "nn" || lang == "en") && j%4 == 0 {
|
||||||
|
// Skip 4 and 8 for nn
|
||||||
|
// We also skip it for en, but that is added to the Swedish directory below.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if lang == "sv" && j%5 == 0 {
|
||||||
|
// Skip 5 and 10 for sv
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
base := fmt.Sprintf("p-%s-%d", lang, j)
|
||||||
|
slug := fmt.Sprintf("%s", base)
|
||||||
|
langID := ""
|
||||||
|
|
||||||
|
if lang == "sv" && j%4 == 0 {
|
||||||
|
// Put an English page in the Swedish content dir.
|
||||||
|
langID = ".en"
|
||||||
|
}
|
||||||
|
|
||||||
|
if lang == "en" && j == 8 {
|
||||||
|
// This should win over the sv variant above.
|
||||||
|
langID = ".en"
|
||||||
|
}
|
||||||
|
|
||||||
|
slug += langID
|
||||||
|
|
||||||
|
contentRoot := contentRoot(lang)
|
||||||
|
|
||||||
|
filename := filepath.Join(contentRoot, fmt.Sprintf("page%d%s.md", j, langID))
|
||||||
|
contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, slug, slug, j))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Put common translations in all of them
|
||||||
|
for i, lang := range []string{"en", "nn", "sv"} {
|
||||||
|
contentRoot := contentRoot(lang)
|
||||||
|
|
||||||
|
slug := fmt.Sprintf("common_%s", lang)
|
||||||
|
|
||||||
|
filename := filepath.Join(contentRoot, "common.md")
|
||||||
|
contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, slug, slug, 100+i))
|
||||||
|
|
||||||
|
for j, lang2 := range []string{"en", "nn", "sv"} {
|
||||||
|
filename := filepath.Join(contentRoot, fmt.Sprintf("translated_all.%s.md", lang2))
|
||||||
|
langSlug := slug + "_translated_all_" + lang2
|
||||||
|
contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, langSlug, langSlug, 200+i+j))
|
||||||
|
}
|
||||||
|
|
||||||
|
for j, lang2 := range []string{"sv", "nn"} {
|
||||||
|
if lang == "en" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
filename := filepath.Join(contentRoot, fmt.Sprintf("translated_some.%s.md", lang2))
|
||||||
|
langSlug := slug + "_translated_some_" + lang2
|
||||||
|
contentFiles = append(contentFiles, filename, fmt.Sprintf(pageTemplate, langSlug, langSlug, 300+i+j))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add a bundle with some images
|
||||||
|
for i, lang := range []string{"en", "nn", "sv"} {
|
||||||
|
contentRoot := contentRoot(lang)
|
||||||
|
slug := fmt.Sprintf("bundle_%s", lang)
|
||||||
|
filename := filepath.Join(contentRoot, "mybundle", "index.md")
|
||||||
|
contentFiles = append(contentFiles, filename, fmt.Sprintf(pageBundleTemplate, slug, 400+i))
|
||||||
|
if lang == "en" {
|
||||||
|
imageFilename := filepath.Join(contentRoot, "mybundle", "logo.png")
|
||||||
|
contentFiles = append(contentFiles, imageFilename, "PNG Data")
|
||||||
|
}
|
||||||
|
imageFilename := filepath.Join(contentRoot, "mybundle", "featured.png")
|
||||||
|
contentFiles = append(contentFiles, imageFilename, fmt.Sprintf("PNG Data for %s", lang))
|
||||||
|
|
||||||
|
// Add some bundled pages
|
||||||
|
contentFiles = append(contentFiles, filepath.Join(contentRoot, "mybundle", "p1.md"), fmt.Sprintf(pageBundleTemplate, slug, 401+i))
|
||||||
|
contentFiles = append(contentFiles, filepath.Join(contentRoot, "mybundle", "sub", "p1.md"), fmt.Sprintf(pageBundleTemplate, slug, 402+i))
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
b := newTestSitesBuilder(t)
|
||||||
|
b.WithWorkingDir("/my/project").WithConfigFile("toml", config).WithContent(contentFiles...).CreateSites()
|
||||||
|
|
||||||
|
_ = os.Stdout
|
||||||
|
//printFs(b.H.BaseFs.ContentFs, "/", os.Stdout)
|
||||||
|
|
||||||
|
b.Build(BuildCfg{})
|
||||||
|
|
||||||
|
assert.Equal(3, len(b.H.Sites))
|
||||||
|
|
||||||
|
enSite := b.H.Sites[0]
|
||||||
|
nnSite := b.H.Sites[1]
|
||||||
|
svSite := b.H.Sites[2]
|
||||||
|
|
||||||
|
//dumpPages(nnSite.RegularPages...)
|
||||||
|
assert.Equal(12, len(nnSite.RegularPages))
|
||||||
|
assert.Equal(13, len(enSite.RegularPages))
|
||||||
|
|
||||||
|
assert.Equal(10, len(svSite.RegularPages))
|
||||||
|
|
||||||
|
for i, p := range enSite.RegularPages {
|
||||||
|
j := i + 1
|
||||||
|
msg := fmt.Sprintf("Test %d", j)
|
||||||
|
assert.Equal("en", p.Lang(), msg)
|
||||||
|
assert.Equal("sect", p.Section())
|
||||||
|
if j < 9 {
|
||||||
|
if j%4 == 0 {
|
||||||
|
assert.Contains(p.Title(), fmt.Sprintf("p-sv-%d.en", i+1), msg)
|
||||||
|
} else {
|
||||||
|
assert.Contains(p.Title(), "p-en", msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check bundles
|
||||||
|
bundleEn := enSite.RegularPages[len(enSite.RegularPages)-1]
|
||||||
|
bundleNn := nnSite.RegularPages[len(nnSite.RegularPages)-1]
|
||||||
|
bundleSv := svSite.RegularPages[len(svSite.RegularPages)-1]
|
||||||
|
|
||||||
|
assert.Equal("/en/sect/mybundle/", bundleEn.RelPermalink())
|
||||||
|
assert.Equal("/sv/sect/mybundle/", bundleSv.RelPermalink())
|
||||||
|
|
||||||
|
assert.Equal(4, len(bundleEn.Resources))
|
||||||
|
assert.Equal(4, len(bundleNn.Resources))
|
||||||
|
assert.Equal(4, len(bundleSv.Resources))
|
||||||
|
|
||||||
|
assert.Equal("/en/sect/mybundle/logo.png", bundleEn.Resources.GetMatch("logo*").RelPermalink())
|
||||||
|
assert.Equal("/nn/sect/mybundle/logo.png", bundleNn.Resources.GetMatch("logo*").RelPermalink())
|
||||||
|
assert.Equal("/sv/sect/mybundle/logo.png", bundleSv.Resources.GetMatch("logo*").RelPermalink())
|
||||||
|
|
||||||
|
b.AssertFileContent("/my/project/public/sv/sect/mybundle/featured.png", "PNG Data for sv")
|
||||||
|
b.AssertFileContent("/my/project/public/nn/sect/mybundle/featured.png", "PNG Data for nn")
|
||||||
|
b.AssertFileContent("/my/project/public/en/sect/mybundle/featured.png", "PNG Data for en")
|
||||||
|
b.AssertFileContent("/my/project/public/en/sect/mybundle/logo.png", "PNG Data")
|
||||||
|
b.AssertFileContent("/my/project/public/sv/sect/mybundle/logo.png", "PNG Data")
|
||||||
|
b.AssertFileContent("/my/project/public/nn/sect/mybundle/logo.png", "PNG Data")
|
||||||
|
|
||||||
|
nnSect := nnSite.getPage(KindSection, "sect")
|
||||||
|
assert.NotNil(nnSect)
|
||||||
|
assert.Equal(12, len(nnSect.Pages))
|
||||||
|
nnHome, _ := nnSite.Info.Home()
|
||||||
|
assert.Equal("/nn/", nnHome.RelPermalink())
|
||||||
|
|
||||||
|
}
|
|
@ -74,8 +74,8 @@ Menu Main: {{ partial "menu.html" (dict "page" . "menu" "main") }}`,
|
||||||
writeSource(t, fs, "content/sect2/p4.md", fmt.Sprintf(menuPageTemplate, "p4", 4, "main", "atitle4", 10))
|
writeSource(t, fs, "content/sect2/p4.md", fmt.Sprintf(menuPageTemplate, "p4", 4, "main", "atitle4", 10))
|
||||||
writeSource(t, fs, "content/sect3/p5.md", fmt.Sprintf(menuPageTemplate, "p5", 5, "main", "atitle5", 5))
|
writeSource(t, fs, "content/sect3/p5.md", fmt.Sprintf(menuPageTemplate, "p5", 5, "main", "atitle5", 5))
|
||||||
|
|
||||||
writeNewContentFile(t, fs, "Section One", "2017-01-01", "content/sect1/_index.md", 100)
|
writeNewContentFile(t, fs.Source, "Section One", "2017-01-01", "content/sect1/_index.md", 100)
|
||||||
writeNewContentFile(t, fs, "Section Five", "2017-01-01", "content/sect5/_index.md", 10)
|
writeNewContentFile(t, fs.Source, "Section Five", "2017-01-01", "content/sect5/_index.md", 10)
|
||||||
|
|
||||||
err := h.Build(BuildCfg{})
|
err := h.Build(BuildCfg{})
|
||||||
|
|
||||||
|
|
|
@ -111,6 +111,10 @@ func toSortedLanguages(cfg config.Provider, l map[string]interface{}) (helpers.L
|
||||||
language.LanguageName = cast.ToString(v)
|
language.LanguageName = cast.ToString(v)
|
||||||
case "weight":
|
case "weight":
|
||||||
language.Weight = cast.ToInt(v)
|
language.Weight = cast.ToInt(v)
|
||||||
|
case "contentdir":
|
||||||
|
language.ContentDir = cast.ToString(v)
|
||||||
|
case "disabled":
|
||||||
|
language.Disabled = cast.ToBool(v)
|
||||||
case "params":
|
case "params":
|
||||||
m := cast.ToStringMap(v)
|
m := cast.ToStringMap(v)
|
||||||
// Needed for case insensitive fetching of params values
|
// Needed for case insensitive fetching of params values
|
||||||
|
|
|
@ -388,9 +388,9 @@ func (ps Pages) String() string {
|
||||||
return fmt.Sprintf("Pages(%d)", len(ps))
|
return fmt.Sprintf("Pages(%d)", len(ps))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ps Pages) findPagePosByFilePath(inPath string) int {
|
func (ps Pages) findPagePosByFilename(filename string) int {
|
||||||
for i, x := range ps {
|
for i, x := range ps {
|
||||||
if x.Source.Path() == inPath {
|
if x.Source.Filename() == filename {
|
||||||
return i
|
return i
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -412,16 +412,26 @@ func (ps Pages) removeFirstIfFound(p *Page) Pages {
|
||||||
return ps
|
return ps
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ps Pages) findFirstPagePosByFilePathPrefix(prefix string) int {
|
func (ps Pages) findPagePosByFilnamePrefix(prefix string) int {
|
||||||
if prefix == "" {
|
if prefix == "" {
|
||||||
return -1
|
return -1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
lenDiff := -1
|
||||||
|
currPos := -1
|
||||||
|
prefixLen := len(prefix)
|
||||||
|
|
||||||
|
// Find the closest match
|
||||||
for i, x := range ps {
|
for i, x := range ps {
|
||||||
if strings.HasPrefix(x.Source.Path(), prefix) {
|
if strings.HasPrefix(x.Source.Filename(), prefix) {
|
||||||
return i
|
diff := len(x.Source.Filename()) - prefixLen
|
||||||
|
if lenDiff == -1 || diff < lenDiff {
|
||||||
|
lenDiff = diff
|
||||||
|
currPos = i
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return -1
|
}
|
||||||
|
return currPos
|
||||||
}
|
}
|
||||||
|
|
||||||
// findPagePos Given a page, it will find the position in Pages
|
// findPagePos Given a page, it will find the position in Pages
|
||||||
|
|
|
@ -17,7 +17,6 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
|
||||||
|
|
||||||
// Use this until errgroup gets ported to context
|
// Use this until errgroup gets ported to context
|
||||||
// See https://github.com/golang/go/issues/19781
|
// See https://github.com/golang/go/issues/19781
|
||||||
|
@ -26,8 +25,6 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
type siteContentProcessor struct {
|
type siteContentProcessor struct {
|
||||||
baseDir string
|
|
||||||
|
|
||||||
site *Site
|
site *Site
|
||||||
|
|
||||||
handleContent contentHandler
|
handleContent contentHandler
|
||||||
|
@ -41,7 +38,7 @@ type siteContentProcessor struct {
|
||||||
fileSinglesChan chan *fileInfo
|
fileSinglesChan chan *fileInfo
|
||||||
|
|
||||||
// These assets should be just copied to destination.
|
// These assets should be just copied to destination.
|
||||||
fileAssetsChan chan []string
|
fileAssetsChan chan []pathLangFile
|
||||||
|
|
||||||
numWorkers int
|
numWorkers int
|
||||||
|
|
||||||
|
@ -67,14 +64,14 @@ func (s *siteContentProcessor) processSingle(fi *fileInfo) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *siteContentProcessor) processAssets(assets []string) {
|
func (s *siteContentProcessor) processAssets(assets []pathLangFile) {
|
||||||
select {
|
select {
|
||||||
case s.fileAssetsChan <- assets:
|
case s.fileAssetsChan <- assets:
|
||||||
case <-s.ctx.Done():
|
case <-s.ctx.Done():
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func newSiteContentProcessor(ctx context.Context, baseDir string, partialBuild bool, s *Site) *siteContentProcessor {
|
func newSiteContentProcessor(ctx context.Context, partialBuild bool, s *Site) *siteContentProcessor {
|
||||||
numWorkers := 12
|
numWorkers := 12
|
||||||
if n := runtime.NumCPU() * 3; n > numWorkers {
|
if n := runtime.NumCPU() * 3; n > numWorkers {
|
||||||
numWorkers = n
|
numWorkers = n
|
||||||
|
@ -85,12 +82,11 @@ func newSiteContentProcessor(ctx context.Context, baseDir string, partialBuild b
|
||||||
return &siteContentProcessor{
|
return &siteContentProcessor{
|
||||||
ctx: ctx,
|
ctx: ctx,
|
||||||
partialBuild: partialBuild,
|
partialBuild: partialBuild,
|
||||||
baseDir: baseDir,
|
|
||||||
site: s,
|
site: s,
|
||||||
handleContent: newHandlerChain(s),
|
handleContent: newHandlerChain(s),
|
||||||
fileBundlesChan: make(chan *bundleDir, numWorkers),
|
fileBundlesChan: make(chan *bundleDir, numWorkers),
|
||||||
fileSinglesChan: make(chan *fileInfo, numWorkers),
|
fileSinglesChan: make(chan *fileInfo, numWorkers),
|
||||||
fileAssetsChan: make(chan []string, numWorkers),
|
fileAssetsChan: make(chan []pathLangFile, numWorkers),
|
||||||
numWorkers: numWorkers,
|
numWorkers: numWorkers,
|
||||||
pagesChan: make(chan *Page, numWorkers),
|
pagesChan: make(chan *Page, numWorkers),
|
||||||
}
|
}
|
||||||
|
@ -143,18 +139,16 @@ func (s *siteContentProcessor) process(ctx context.Context) error {
|
||||||
g2.Go(func() error {
|
g2.Go(func() error {
|
||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case filenames, ok := <-s.fileAssetsChan:
|
case files, ok := <-s.fileAssetsChan:
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
for _, filename := range filenames {
|
for _, file := range files {
|
||||||
name := strings.TrimPrefix(filename, s.baseDir)
|
f, err := s.site.BaseFs.ContentFs.Open(file.Filename())
|
||||||
f, err := s.site.Fs.Source.Open(filename)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return fmt.Errorf("failed to open assets file: %s", err)
|
||||||
}
|
}
|
||||||
|
err = s.site.publish(&s.site.PathSpec.ProcessingStats.Files, file.Path(), f)
|
||||||
err = s.site.publish(&s.site.PathSpec.ProcessingStats.Files, name, f)
|
|
||||||
f.Close()
|
f.Close()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -204,11 +198,11 @@ func (s *siteContentProcessor) process(ctx context.Context) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *siteContentProcessor) readAndConvertContentFile(file *fileInfo) error {
|
func (s *siteContentProcessor) readAndConvertContentFile(file *fileInfo) error {
|
||||||
ctx := &handlerContext{source: file, baseDir: s.baseDir, pages: s.pagesChan}
|
ctx := &handlerContext{source: file, pages: s.pagesChan}
|
||||||
return s.handleContent(ctx).err
|
return s.handleContent(ctx).err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *siteContentProcessor) readAndConvertContentBundle(bundle *bundleDir) error {
|
func (s *siteContentProcessor) readAndConvertContentBundle(bundle *bundleDir) error {
|
||||||
ctx := &handlerContext{bundle: bundle, baseDir: s.baseDir, pages: s.pagesChan}
|
ctx := &handlerContext{bundle: bundle, pages: s.pagesChan}
|
||||||
return s.handleContent(ctx).err
|
return s.handleContent(ctx).err
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,17 +17,21 @@ import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
|
||||||
"golang.org/x/sync/errgroup"
|
"golang.org/x/sync/errgroup"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/source"
|
"github.com/gohugoio/hugo/source"
|
||||||
"github.com/spf13/afero"
|
|
||||||
jww "github.com/spf13/jwalterweatherman"
|
jww "github.com/spf13/jwalterweatherman"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -44,8 +48,6 @@ type capturer struct {
|
||||||
fs afero.Fs
|
fs afero.Fs
|
||||||
logger *jww.Notepad
|
logger *jww.Notepad
|
||||||
|
|
||||||
baseDir string
|
|
||||||
|
|
||||||
// Filenames limits the content to process to a list of filenames/directories.
|
// Filenames limits the content to process to a list of filenames/directories.
|
||||||
// This is used for partial building in server mode.
|
// This is used for partial building in server mode.
|
||||||
filenames []string
|
filenames []string
|
||||||
|
@ -62,7 +64,7 @@ func newCapturer(
|
||||||
sourceSpec *source.SourceSpec,
|
sourceSpec *source.SourceSpec,
|
||||||
handler captureResultHandler,
|
handler captureResultHandler,
|
||||||
contentChanges *contentChangeMap,
|
contentChanges *contentChangeMap,
|
||||||
baseDir string, filenames ...string) *capturer {
|
filenames ...string) *capturer {
|
||||||
|
|
||||||
numWorkers := 4
|
numWorkers := 4
|
||||||
if n := runtime.NumCPU(); n > numWorkers {
|
if n := runtime.NumCPU(); n > numWorkers {
|
||||||
|
@ -73,9 +75,10 @@ func newCapturer(
|
||||||
sem: make(chan bool, numWorkers),
|
sem: make(chan bool, numWorkers),
|
||||||
handler: handler,
|
handler: handler,
|
||||||
sourceSpec: sourceSpec,
|
sourceSpec: sourceSpec,
|
||||||
|
fs: sourceSpec.Fs,
|
||||||
logger: logger,
|
logger: logger,
|
||||||
contentChanges: contentChanges,
|
contentChanges: contentChanges,
|
||||||
fs: sourceSpec.Fs.Source, baseDir: baseDir, seen: make(map[string]bool),
|
seen: make(map[string]bool),
|
||||||
filenames: filenames}
|
filenames: filenames}
|
||||||
|
|
||||||
return c
|
return c
|
||||||
|
@ -85,7 +88,7 @@ func newCapturer(
|
||||||
// these channels.
|
// these channels.
|
||||||
type captureResultHandler interface {
|
type captureResultHandler interface {
|
||||||
handleSingles(fis ...*fileInfo)
|
handleSingles(fis ...*fileInfo)
|
||||||
handleCopyFiles(filenames ...string)
|
handleCopyFiles(fis ...pathLangFile)
|
||||||
captureBundlesHandler
|
captureBundlesHandler
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -110,10 +113,10 @@ func (c *captureResultHandlerChain) handleBundles(b *bundleDirs) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *captureResultHandlerChain) handleCopyFiles(filenames ...string) {
|
func (c *captureResultHandlerChain) handleCopyFiles(files ...pathLangFile) {
|
||||||
for _, h := range c.handlers {
|
for _, h := range c.handlers {
|
||||||
if hh, ok := h.(captureResultHandler); ok {
|
if hh, ok := h.(captureResultHandler); ok {
|
||||||
hh.handleCopyFiles(filenames...)
|
hh.handleCopyFiles(files...)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -139,7 +142,7 @@ func (c *capturer) capturePartial(filenames ...string) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
fi, _, err := c.getRealFileInfo(resolvedFilename)
|
fi, err := c.resolveRealPath(resolvedFilename)
|
||||||
if os.IsNotExist(err) {
|
if os.IsNotExist(err) {
|
||||||
// File has been deleted.
|
// File has been deleted.
|
||||||
continue
|
continue
|
||||||
|
@ -147,9 +150,9 @@ func (c *capturer) capturePartial(filenames ...string) error {
|
||||||
|
|
||||||
// Just in case the owning dir is a new symlink -- this will
|
// Just in case the owning dir is a new symlink -- this will
|
||||||
// create the proper mapping for it.
|
// create the proper mapping for it.
|
||||||
c.getRealFileInfo(dir)
|
c.resolveRealPath(dir)
|
||||||
|
|
||||||
f, active := c.newFileInfo(resolvedFilename, fi, tp)
|
f, active := c.newFileInfo(fi, tp)
|
||||||
if active {
|
if active {
|
||||||
c.copyOrHandleSingle(f)
|
c.copyOrHandleSingle(f)
|
||||||
}
|
}
|
||||||
|
@ -164,7 +167,7 @@ func (c *capturer) capture() error {
|
||||||
return c.capturePartial(c.filenames...)
|
return c.capturePartial(c.filenames...)
|
||||||
}
|
}
|
||||||
|
|
||||||
err := c.handleDir(c.baseDir)
|
err := c.handleDir(helpers.FilePathSeparator)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -196,6 +199,7 @@ func (c *capturer) handleNestedDir(dirname string) error {
|
||||||
func (c *capturer) handleBranchDir(dirname string) error {
|
func (c *capturer) handleBranchDir(dirname string) error {
|
||||||
files, err := c.readDir(dirname)
|
files, err := c.readDir(dirname)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -205,7 +209,7 @@ func (c *capturer) handleBranchDir(dirname string) error {
|
||||||
|
|
||||||
for _, fi := range files {
|
for _, fi := range files {
|
||||||
if !fi.IsDir() {
|
if !fi.IsDir() {
|
||||||
tp, _ := classifyBundledFile(fi.Name())
|
tp, _ := classifyBundledFile(fi.RealName())
|
||||||
if dirType == bundleNot {
|
if dirType == bundleNot {
|
||||||
dirType = tp
|
dirType = tp
|
||||||
}
|
}
|
||||||
|
@ -222,25 +226,35 @@ func (c *capturer) handleBranchDir(dirname string) error {
|
||||||
|
|
||||||
dirs := newBundleDirs(bundleBranch, c)
|
dirs := newBundleDirs(bundleBranch, c)
|
||||||
|
|
||||||
for _, fi := range files {
|
var secondPass []*fileInfo
|
||||||
|
|
||||||
|
// Handle potential bundle headers first.
|
||||||
|
for _, fi := range files {
|
||||||
if fi.IsDir() {
|
if fi.IsDir() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
tp, isContent := classifyBundledFile(fi.Name())
|
tp, isContent := classifyBundledFile(fi.RealName())
|
||||||
|
|
||||||
|
f, active := c.newFileInfo(fi, tp)
|
||||||
|
|
||||||
f, active := c.newFileInfo(fi.filename, fi.FileInfo, tp)
|
|
||||||
if !active {
|
if !active {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if f.isOwner() {
|
|
||||||
dirs.addBundleHeader(f)
|
if !f.isOwner() {
|
||||||
} else if !isContent {
|
if !isContent {
|
||||||
// This is a partial update -- we only care about the files that
|
// This is a partial update -- we only care about the files that
|
||||||
// is in this bundle.
|
// is in this bundle.
|
||||||
dirs.addBundleFiles(f)
|
secondPass = append(secondPass, f)
|
||||||
}
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
dirs.addBundleHeader(f)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, f := range secondPass {
|
||||||
|
dirs.addBundleFiles(f)
|
||||||
}
|
}
|
||||||
|
|
||||||
c.handler.handleBundles(dirs)
|
c.handler.handleBundles(dirs)
|
||||||
|
@ -250,6 +264,7 @@ func (c *capturer) handleBranchDir(dirname string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *capturer) handleDir(dirname string) error {
|
func (c *capturer) handleDir(dirname string) error {
|
||||||
|
|
||||||
files, err := c.readDir(dirname)
|
files, err := c.readDir(dirname)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -290,7 +305,8 @@ func (c *capturer) handleDir(dirname string) error {
|
||||||
|
|
||||||
for i, fi := range files {
|
for i, fi := range files {
|
||||||
if !fi.IsDir() {
|
if !fi.IsDir() {
|
||||||
tp, isContent := classifyBundledFile(fi.Name())
|
tp, isContent := classifyBundledFile(fi.RealName())
|
||||||
|
|
||||||
fileBundleTypes[i] = tp
|
fileBundleTypes[i] = tp
|
||||||
if !isBranch {
|
if !isBranch {
|
||||||
isBranch = tp == bundleBranch
|
isBranch = tp == bundleBranch
|
||||||
|
@ -317,6 +333,7 @@ func (c *capturer) handleDir(dirname string) error {
|
||||||
var fileInfos = make([]*fileInfo, 0, len(files))
|
var fileInfos = make([]*fileInfo, 0, len(files))
|
||||||
|
|
||||||
for i, fi := range files {
|
for i, fi := range files {
|
||||||
|
|
||||||
currentType := bundleNot
|
currentType := bundleNot
|
||||||
|
|
||||||
if !fi.IsDir() {
|
if !fi.IsDir() {
|
||||||
|
@ -329,7 +346,9 @@ func (c *capturer) handleDir(dirname string) error {
|
||||||
if bundleType == bundleNot && currentType != bundleNot {
|
if bundleType == bundleNot && currentType != bundleNot {
|
||||||
bundleType = currentType
|
bundleType = currentType
|
||||||
}
|
}
|
||||||
f, active := c.newFileInfo(fi.filename, fi.FileInfo, currentType)
|
|
||||||
|
f, active := c.newFileInfo(fi, currentType)
|
||||||
|
|
||||||
if !active {
|
if !active {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -343,8 +362,7 @@ func (c *capturer) handleDir(dirname string) error {
|
||||||
for _, fi := range fileInfos {
|
for _, fi := range fileInfos {
|
||||||
if fi.FileInfo().IsDir() {
|
if fi.FileInfo().IsDir() {
|
||||||
// Handle potential nested bundles.
|
// Handle potential nested bundles.
|
||||||
filename := fi.Filename()
|
if err := c.handleNestedDir(fi.Path()); err != nil {
|
||||||
if err := c.handleNestedDir(filename); err != nil {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
} else if bundleType == bundleNot || (!fi.isOwner() && fi.isContentFile()) {
|
} else if bundleType == bundleNot || (!fi.isOwner() && fi.isContentFile()) {
|
||||||
|
@ -376,23 +394,23 @@ func (c *capturer) handleDir(dirname string) error {
|
||||||
|
|
||||||
func (c *capturer) handleNonBundle(
|
func (c *capturer) handleNonBundle(
|
||||||
dirname string,
|
dirname string,
|
||||||
fileInfos []fileInfoName,
|
fileInfos pathLangFileFis,
|
||||||
singlesOnly bool) error {
|
singlesOnly bool) error {
|
||||||
|
|
||||||
for _, fi := range fileInfos {
|
for _, fi := range fileInfos {
|
||||||
if fi.IsDir() {
|
if fi.IsDir() {
|
||||||
if err := c.handleNestedDir(fi.filename); err != nil {
|
if err := c.handleNestedDir(fi.Filename()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
if singlesOnly {
|
if singlesOnly {
|
||||||
f, active := c.newFileInfo(fi.filename, fi, bundleNot)
|
f, active := c.newFileInfo(fi, bundleNot)
|
||||||
if !active {
|
if !active {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
c.handler.handleSingles(f)
|
c.handler.handleSingles(f)
|
||||||
} else {
|
} else {
|
||||||
c.handler.handleCopyFiles(fi.filename)
|
c.handler.handleCopyFiles(fi)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -405,7 +423,7 @@ func (c *capturer) copyOrHandleSingle(fi *fileInfo) {
|
||||||
c.handler.handleSingles(fi)
|
c.handler.handleSingles(fi)
|
||||||
} else {
|
} else {
|
||||||
// These do not currently need any further processing.
|
// These do not currently need any further processing.
|
||||||
c.handler.handleCopyFiles(fi.Filename())
|
c.handler.handleCopyFiles(fi)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -430,7 +448,7 @@ func (c *capturer) createBundleDirs(fileInfos []*fileInfo, bundleType bundleDirT
|
||||||
fileInfos = append(fileInfos, fis...)
|
fileInfos = append(fileInfos, fis...)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
err := c.collectFiles(fi.Filename(), collector)
|
err := c.collectFiles(fi.Path(), collector)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -462,6 +480,7 @@ func (c *capturer) createBundleDirs(fileInfos []*fileInfo, bundleType bundleDirT
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *capturer) collectFiles(dirname string, handleFiles func(fis ...*fileInfo)) error {
|
func (c *capturer) collectFiles(dirname string, handleFiles func(fis ...*fileInfo)) error {
|
||||||
|
|
||||||
filesInDir, err := c.readDir(dirname)
|
filesInDir, err := c.readDir(dirname)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -469,12 +488,12 @@ func (c *capturer) collectFiles(dirname string, handleFiles func(fis ...*fileInf
|
||||||
|
|
||||||
for _, fi := range filesInDir {
|
for _, fi := range filesInDir {
|
||||||
if fi.IsDir() {
|
if fi.IsDir() {
|
||||||
err := c.collectFiles(fi.filename, handleFiles)
|
err := c.collectFiles(fi.Filename(), handleFiles)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
f, active := c.newFileInfo(fi.filename, fi.FileInfo, bundleNot)
|
f, active := c.newFileInfo(fi, bundleNot)
|
||||||
if active {
|
if active {
|
||||||
handleFiles(f)
|
handleFiles(f)
|
||||||
}
|
}
|
||||||
|
@ -484,27 +503,29 @@ func (c *capturer) collectFiles(dirname string, handleFiles func(fis ...*fileInf
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *capturer) readDir(dirname string) ([]fileInfoName, error) {
|
func (c *capturer) readDir(dirname string) (pathLangFileFis, error) {
|
||||||
if c.sourceSpec.IgnoreFile(dirname) {
|
if c.sourceSpec.IgnoreFile(dirname) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
dir, err := c.fs.Open(dirname)
|
dir, err := c.fs.Open(dirname)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, fmt.Errorf("readDir: %s", err)
|
||||||
}
|
}
|
||||||
defer dir.Close()
|
defer dir.Close()
|
||||||
names, err := dir.Readdirnames(-1)
|
fis, err := dir.Readdir(-1)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
fis := make([]fileInfoName, 0, len(names))
|
pfis := make(pathLangFileFis, 0, len(fis))
|
||||||
|
|
||||||
for _, name := range names {
|
for _, fi := range fis {
|
||||||
filename := filepath.Join(dirname, name)
|
fip := fi.(pathLangFileFi)
|
||||||
if !c.sourceSpec.IgnoreFile(filename) {
|
|
||||||
fi, _, err := c.getRealFileInfo(filename)
|
if !c.sourceSpec.IgnoreFile(fip.Filename()) {
|
||||||
|
|
||||||
|
err := c.resolveRealPathIn(fip)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// It may have been deleted in the meantime.
|
// It may have been deleted in the meantime.
|
||||||
|
@ -514,23 +535,30 @@ func (c *capturer) readDir(dirname string) ([]fileInfoName, error) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
fis = append(fis, fileInfoName{filename: filename, FileInfo: fi})
|
pfis = append(pfis, fip)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return fis, nil
|
return pfis, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *capturer) newFileInfo(filename string, fi os.FileInfo, tp bundleDirType) (*fileInfo, bool) {
|
func (c *capturer) newFileInfo(fi pathLangFileFi, tp bundleDirType) (*fileInfo, bool) {
|
||||||
f := newFileInfo(c.sourceSpec, c.baseDir, filename, fi, tp)
|
f := newFileInfo(c.sourceSpec, "", "", fi, tp)
|
||||||
return f, !f.disabled
|
return f, !f.disabled
|
||||||
}
|
}
|
||||||
|
|
||||||
type fileInfoName struct {
|
type pathLangFile interface {
|
||||||
os.FileInfo
|
hugofs.LanguageAnnouncer
|
||||||
filename string
|
hugofs.FilePather
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type pathLangFileFi interface {
|
||||||
|
os.FileInfo
|
||||||
|
pathLangFile
|
||||||
|
}
|
||||||
|
|
||||||
|
type pathLangFileFis []pathLangFileFi
|
||||||
|
|
||||||
type bundleDirs struct {
|
type bundleDirs struct {
|
||||||
tp bundleDirType
|
tp bundleDirType
|
||||||
// Maps languages to bundles.
|
// Maps languages to bundles.
|
||||||
|
@ -589,16 +617,17 @@ func (b *bundleDirs) addBundleContentFile(fi *fileInfo) {
|
||||||
b.bundles[fi.Lang()] = dir
|
b.bundles[fi.Lang()] = dir
|
||||||
}
|
}
|
||||||
|
|
||||||
dir.resources[fi.Filename()] = fi
|
dir.resources[fi.Path()] = fi
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *bundleDirs) addBundleFiles(fi *fileInfo) {
|
func (b *bundleDirs) addBundleFiles(fi *fileInfo) {
|
||||||
dir := filepath.ToSlash(fi.Dir())
|
dir := filepath.ToSlash(fi.Dir())
|
||||||
p := dir + fi.TranslationBaseName() + "." + fi.Ext()
|
p := dir + fi.TranslationBaseName() + "." + fi.Ext()
|
||||||
for lang, bdir := range b.bundles {
|
for lang, bdir := range b.bundles {
|
||||||
key := lang + p
|
key := path.Join(lang, p)
|
||||||
|
|
||||||
// Given mypage.de.md (German translation) and mypage.md we pick the most
|
// Given mypage.de.md (German translation) and mypage.md we pick the most
|
||||||
// the specific for that language.
|
// specific for that language.
|
||||||
if fi.Lang() == lang || !b.langOverrides[key] {
|
if fi.Lang() == lang || !b.langOverrides[key] {
|
||||||
bdir.resources[key] = fi
|
bdir.resources[key] = fi
|
||||||
}
|
}
|
||||||
|
@ -623,40 +652,53 @@ func (c *capturer) isSeen(dirname string) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *capturer) getRealFileInfo(path string) (os.FileInfo, string, error) {
|
func (c *capturer) resolveRealPath(path string) (pathLangFileFi, error) {
|
||||||
fileInfo, err := c.lstatIfOs(path)
|
fileInfo, err := c.lstatIfPossible(path)
|
||||||
realPath := path
|
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, "", err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
return fileInfo, c.resolveRealPathIn(fileInfo)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *capturer) resolveRealPathIn(fileInfo pathLangFileFi) error {
|
||||||
|
|
||||||
|
basePath := fileInfo.BaseDir()
|
||||||
|
path := fileInfo.Filename()
|
||||||
|
|
||||||
|
realPath := path
|
||||||
|
|
||||||
if fileInfo.Mode()&os.ModeSymlink == os.ModeSymlink {
|
if fileInfo.Mode()&os.ModeSymlink == os.ModeSymlink {
|
||||||
link, err := filepath.EvalSymlinks(path)
|
link, err := filepath.EvalSymlinks(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, "", fmt.Errorf("Cannot read symbolic link %q, error was: %s", path, err)
|
return fmt.Errorf("Cannot read symbolic link %q, error was: %s", path, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fileInfo, err = c.lstatIfOs(link)
|
// This is a file on the outside of any base fs, so we have to use the os package.
|
||||||
|
sfi, err := os.Stat(link)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, "", fmt.Errorf("Cannot stat %q, error was: %s", link, err)
|
return fmt.Errorf("Cannot stat %q, error was: %s", link, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(bep) improve all of this.
|
||||||
|
if a, ok := fileInfo.(*hugofs.LanguageFileInfo); ok {
|
||||||
|
a.FileInfo = sfi
|
||||||
}
|
}
|
||||||
|
|
||||||
realPath = link
|
realPath = link
|
||||||
|
|
||||||
if realPath != path && fileInfo.IsDir() && c.isSeen(realPath) {
|
if realPath != path && sfi.IsDir() && c.isSeen(realPath) {
|
||||||
// Avoid cyclic symlinks.
|
// Avoid cyclic symlinks.
|
||||||
// Note that this may prevent some uses that isn't cyclic and also
|
// Note that this may prevent some uses that isn't cyclic and also
|
||||||
// potential useful, but this implementation is both robust and simple:
|
// potential useful, but this implementation is both robust and simple:
|
||||||
// We stop at the first directory that we have seen before, e.g.
|
// We stop at the first directory that we have seen before, e.g.
|
||||||
// /content/blog will only be processed once.
|
// /content/blog will only be processed once.
|
||||||
return nil, realPath, errSkipCyclicDir
|
return errSkipCyclicDir
|
||||||
}
|
}
|
||||||
|
|
||||||
if c.contentChanges != nil {
|
if c.contentChanges != nil {
|
||||||
// Keep track of symbolic links in watch mode.
|
// Keep track of symbolic links in watch mode.
|
||||||
var from, to string
|
var from, to string
|
||||||
if fileInfo.IsDir() {
|
if sfi.IsDir() {
|
||||||
from = realPath
|
from = realPath
|
||||||
to = path
|
to = path
|
||||||
|
|
||||||
|
@ -667,12 +709,11 @@ func (c *capturer) getRealFileInfo(path string) (os.FileInfo, string, error) {
|
||||||
from = from + helpers.FilePathSeparator
|
from = from + helpers.FilePathSeparator
|
||||||
}
|
}
|
||||||
|
|
||||||
baseDir := c.baseDir
|
if !strings.HasSuffix(basePath, helpers.FilePathSeparator) {
|
||||||
if !strings.HasSuffix(baseDir, helpers.FilePathSeparator) {
|
basePath = basePath + helpers.FilePathSeparator
|
||||||
baseDir = baseDir + helpers.FilePathSeparator
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if strings.HasPrefix(from, baseDir) {
|
if strings.HasPrefix(from, basePath) {
|
||||||
// With symbolic links inside /content we need to keep
|
// With symbolic links inside /content we need to keep
|
||||||
// a reference to both. This may be confusing with --navigateToChanged
|
// a reference to both. This may be confusing with --navigateToChanged
|
||||||
// but the user has chosen this him or herself.
|
// but the user has chosen this him or herself.
|
||||||
|
@ -688,9 +729,13 @@ func (c *capturer) getRealFileInfo(path string) (os.FileInfo, string, error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return fileInfo, realPath, nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *capturer) lstatIfOs(path string) (os.FileInfo, error) {
|
func (c *capturer) lstatIfPossible(path string) (pathLangFileFi, error) {
|
||||||
return helpers.LstatIfOs(c.fs, path)
|
fi, err := helpers.LstatIfPossible(c.fs, path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return fi.(pathLangFileFi), nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,6 +15,7 @@ package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"sort"
|
"sort"
|
||||||
|
@ -62,13 +63,12 @@ func (s *storeFilenames) handleBundles(d *bundleDirs) {
|
||||||
s.dirKeys = append(s.dirKeys, keys...)
|
s.dirKeys = append(s.dirKeys, keys...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *storeFilenames) handleCopyFiles(names ...string) {
|
func (s *storeFilenames) handleCopyFiles(files ...pathLangFile) {
|
||||||
s.Lock()
|
s.Lock()
|
||||||
defer s.Unlock()
|
defer s.Unlock()
|
||||||
for _, name := range names {
|
for _, file := range files {
|
||||||
s.copyNames = append(s.copyNames, filepath.ToSlash(name))
|
s.copyNames = append(s.copyNames, filepath.ToSlash(file.Filename()))
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *storeFilenames) sortedStr() string {
|
func (s *storeFilenames) sortedStr() string {
|
||||||
|
@ -83,13 +83,12 @@ func (s *storeFilenames) sortedStr() string {
|
||||||
|
|
||||||
func TestPageBundlerCaptureSymlinks(t *testing.T) {
|
func TestPageBundlerCaptureSymlinks(t *testing.T) {
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
cfg, fs, workDir := newTestBundleSymbolicSources(t)
|
ps, workDir := newTestBundleSymbolicSources(t)
|
||||||
contentDir := "base"
|
sourceSpec := source.NewSourceSpec(ps, ps.BaseFs.ContentFs)
|
||||||
sourceSpec := source.NewSourceSpec(cfg, fs)
|
|
||||||
|
|
||||||
fileStore := &storeFilenames{}
|
fileStore := &storeFilenames{}
|
||||||
logger := newErrorLogger()
|
logger := newErrorLogger()
|
||||||
c := newCapturer(logger, sourceSpec, fileStore, nil, filepath.Join(workDir, contentDir))
|
c := newCapturer(logger, sourceSpec, fileStore, nil)
|
||||||
|
|
||||||
assert.NoError(c.capture())
|
assert.NoError(c.capture())
|
||||||
|
|
||||||
|
@ -110,6 +109,7 @@ C:
|
||||||
/base/symbolic3/s1.png
|
/base/symbolic3/s1.png
|
||||||
/base/symbolic3/s2.png
|
/base/symbolic3/s2.png
|
||||||
`
|
`
|
||||||
|
|
||||||
got := strings.Replace(fileStore.sortedStr(), filepath.ToSlash(workDir), "", -1)
|
got := strings.Replace(fileStore.sortedStr(), filepath.ToSlash(workDir), "", -1)
|
||||||
got = strings.Replace(got, "//", "/", -1)
|
got = strings.Replace(got, "//", "/", -1)
|
||||||
|
|
||||||
|
@ -120,20 +120,26 @@ C:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPageBundlerCapture(t *testing.T) {
|
func TestPageBundlerCaptureBasic(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
cfg, fs := newTestBundleSources(t)
|
fs, cfg := newTestBundleSources(t)
|
||||||
|
assert.NoError(loadDefaultSettingsFor(cfg))
|
||||||
|
assert.NoError(loadLanguageSettings(cfg, nil))
|
||||||
|
ps, err := helpers.NewPathSpec(fs, cfg)
|
||||||
|
assert.NoError(err)
|
||||||
|
|
||||||
sourceSpec := source.NewSourceSpec(cfg, fs)
|
sourceSpec := source.NewSourceSpec(ps, ps.BaseFs.ContentFs)
|
||||||
|
|
||||||
fileStore := &storeFilenames{}
|
fileStore := &storeFilenames{}
|
||||||
|
|
||||||
c := newCapturer(newErrorLogger(), sourceSpec, fileStore, nil, filepath.FromSlash("/work/base"))
|
c := newCapturer(newErrorLogger(), sourceSpec, fileStore, nil)
|
||||||
|
|
||||||
assert.NoError(c.capture())
|
assert.NoError(c.capture())
|
||||||
|
|
||||||
|
printFs(fs.Source, "", os.Stdout)
|
||||||
|
|
||||||
expected := `
|
expected := `
|
||||||
F:
|
F:
|
||||||
/work/base/_1.md
|
/work/base/_1.md
|
||||||
|
@ -165,10 +171,16 @@ func TestPageBundlerCaptureMultilingual(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
cfg, fs := newTestBundleSourcesMultilingual(t)
|
fs, cfg := newTestBundleSourcesMultilingual(t)
|
||||||
sourceSpec := source.NewSourceSpec(cfg, fs)
|
assert.NoError(loadDefaultSettingsFor(cfg))
|
||||||
|
assert.NoError(loadLanguageSettings(cfg, nil))
|
||||||
|
|
||||||
|
ps, err := helpers.NewPathSpec(fs, cfg)
|
||||||
|
assert.NoError(err)
|
||||||
|
|
||||||
|
sourceSpec := source.NewSourceSpec(ps, ps.BaseFs.ContentFs)
|
||||||
fileStore := &storeFilenames{}
|
fileStore := &storeFilenames{}
|
||||||
c := newCapturer(newErrorLogger(), sourceSpec, fileStore, nil, filepath.FromSlash("/work/base"))
|
c := newCapturer(newErrorLogger(), sourceSpec, fileStore, nil)
|
||||||
|
|
||||||
assert.NoError(c.capture())
|
assert.NoError(c.capture())
|
||||||
|
|
||||||
|
@ -204,7 +216,7 @@ C:
|
||||||
if expected != got {
|
if expected != got {
|
||||||
diff := helpers.DiffStringSlices(strings.Fields(expected), strings.Fields(got))
|
diff := helpers.DiffStringSlices(strings.Fields(expected), strings.Fields(got))
|
||||||
t.Log(got)
|
t.Log(got)
|
||||||
t.Fatalf("Failed:\n%s", diff)
|
t.Fatalf("Failed:\n%s", strings.Join(diff, "\n"))
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -213,14 +225,15 @@ type noOpFileStore int
|
||||||
|
|
||||||
func (noOpFileStore) handleSingles(fis ...*fileInfo) {}
|
func (noOpFileStore) handleSingles(fis ...*fileInfo) {}
|
||||||
func (noOpFileStore) handleBundles(b *bundleDirs) {}
|
func (noOpFileStore) handleBundles(b *bundleDirs) {}
|
||||||
func (noOpFileStore) handleCopyFiles(names ...string) {}
|
func (noOpFileStore) handleCopyFiles(files ...pathLangFile) {}
|
||||||
|
|
||||||
func BenchmarkPageBundlerCapture(b *testing.B) {
|
func BenchmarkPageBundlerCapture(b *testing.B) {
|
||||||
capturers := make([]*capturer, b.N)
|
capturers := make([]*capturer, b.N)
|
||||||
|
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
cfg, fs := newTestCfg()
|
cfg, fs := newTestCfg()
|
||||||
sourceSpec := source.NewSourceSpec(cfg, fs)
|
ps, _ := helpers.NewPathSpec(fs, cfg)
|
||||||
|
sourceSpec := source.NewSourceSpec(ps, fs.Source)
|
||||||
|
|
||||||
base := fmt.Sprintf("base%d", i)
|
base := fmt.Sprintf("base%d", i)
|
||||||
for j := 1; j <= 5; j++ {
|
for j := 1; j <= 5; j++ {
|
||||||
|
|
|
@ -101,9 +101,6 @@ type handlerContext struct {
|
||||||
|
|
||||||
bundle *bundleDir
|
bundle *bundleDir
|
||||||
|
|
||||||
// The source baseDir, e.g. "/myproject/content/"
|
|
||||||
baseDir string
|
|
||||||
|
|
||||||
source *fileInfo
|
source *fileInfo
|
||||||
|
|
||||||
// Relative path to the target.
|
// Relative path to the target.
|
||||||
|
@ -130,7 +127,7 @@ func (c *handlerContext) targetPath() string {
|
||||||
return c.target
|
return c.target
|
||||||
}
|
}
|
||||||
|
|
||||||
return strings.TrimPrefix(c.source.Filename(), c.baseDir)
|
return c.source.Filename()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *handlerContext) file() *fileInfo {
|
func (c *handlerContext) file() *fileInfo {
|
||||||
|
@ -326,7 +323,6 @@ func (c *contentHandlers) createResource() contentHandler {
|
||||||
|
|
||||||
resource, err := c.s.resourceSpec.NewResourceFromFilename(
|
resource, err := c.s.resourceSpec.NewResourceFromFilename(
|
||||||
ctx.parentPage.subResourceTargetPathFactory,
|
ctx.parentPage.subResourceTargetPathFactory,
|
||||||
c.s.absPublishDir(),
|
|
||||||
ctx.source.Filename(), ctx.target)
|
ctx.source.Filename(), ctx.target)
|
||||||
|
|
||||||
return handlerResult{err: err, handled: true, resource: resource}
|
return handlerResult{err: err, handled: true, resource: resource}
|
||||||
|
@ -335,8 +331,9 @@ func (c *contentHandlers) createResource() contentHandler {
|
||||||
|
|
||||||
func (c *contentHandlers) copyFile() contentHandler {
|
func (c *contentHandlers) copyFile() contentHandler {
|
||||||
return func(ctx *handlerContext) handlerResult {
|
return func(ctx *handlerContext) handlerResult {
|
||||||
f, err := c.s.Fs.Source.Open(ctx.source.Filename())
|
f, err := c.s.BaseFs.ContentFs.Open(ctx.source.Filename())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
err := fmt.Errorf("failed to open file in copyFile: %s", err)
|
||||||
return handlerResult{err: err}
|
return handlerResult{err: err}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,8 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
|
@ -38,7 +40,7 @@ import (
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestPageBundlerSite(t *testing.T) {
|
func TestPageBundlerSiteRegular(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
for _, ugly := range []bool{false, true} {
|
for _, ugly := range []bool{false, true} {
|
||||||
|
@ -46,7 +48,9 @@ func TestPageBundlerSite(t *testing.T) {
|
||||||
func(t *testing.T) {
|
func(t *testing.T) {
|
||||||
|
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
cfg, fs := newTestBundleSources(t)
|
fs, cfg := newTestBundleSources(t)
|
||||||
|
assert.NoError(loadDefaultSettingsFor(cfg))
|
||||||
|
assert.NoError(loadLanguageSettings(cfg, nil))
|
||||||
|
|
||||||
cfg.Set("permalinks", map[string]string{
|
cfg.Set("permalinks", map[string]string{
|
||||||
"a": ":sections/:filename",
|
"a": ":sections/:filename",
|
||||||
|
@ -141,6 +145,8 @@ func TestPageBundlerSite(t *testing.T) {
|
||||||
|
|
||||||
assert.Equal(filepath.FromSlash("/work/base/b/my-bundle/c/logo.png"), image.(resource.Source).AbsSourceFilename())
|
assert.Equal(filepath.FromSlash("/work/base/b/my-bundle/c/logo.png"), image.(resource.Source).AbsSourceFilename())
|
||||||
assert.Equal("https://example.com/2017/pageslug/c/logo.png", image.Permalink())
|
assert.Equal("https://example.com/2017/pageslug/c/logo.png", image.Permalink())
|
||||||
|
|
||||||
|
printFs(th.Fs.Destination, "", os.Stdout)
|
||||||
th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/c/logo.png"), "content")
|
th.assertFileContent(filepath.FromSlash("/work/public/2017/pageslug/c/logo.png"), "content")
|
||||||
th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/c/logo.png"), "content")
|
th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/c/logo.png"), "content")
|
||||||
|
|
||||||
|
@ -195,8 +201,7 @@ func TestPageBundlerSiteMultilingual(t *testing.T) {
|
||||||
func(t *testing.T) {
|
func(t *testing.T) {
|
||||||
|
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
cfg, fs := newTestBundleSourcesMultilingual(t)
|
fs, cfg := newTestBundleSourcesMultilingual(t)
|
||||||
|
|
||||||
cfg.Set("uglyURLs", ugly)
|
cfg.Set("uglyURLs", ugly)
|
||||||
|
|
||||||
assert.NoError(loadDefaultSettingsFor(cfg))
|
assert.NoError(loadDefaultSettingsFor(cfg))
|
||||||
|
@ -260,7 +265,7 @@ func TestMultilingualDisableDefaultLanguage(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
cfg, _ := newTestBundleSourcesMultilingual(t)
|
_, cfg := newTestBundleSourcesMultilingual(t)
|
||||||
|
|
||||||
cfg.Set("disableLanguages", []string{"en"})
|
cfg.Set("disableLanguages", []string{"en"})
|
||||||
|
|
||||||
|
@ -275,10 +280,12 @@ func TestMultilingualDisableLanguage(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
cfg, fs := newTestBundleSourcesMultilingual(t)
|
fs, cfg := newTestBundleSourcesMultilingual(t)
|
||||||
cfg.Set("disableLanguages", []string{"nn"})
|
cfg.Set("disableLanguages", []string{"nn"})
|
||||||
|
|
||||||
assert.NoError(loadDefaultSettingsFor(cfg))
|
assert.NoError(loadDefaultSettingsFor(cfg))
|
||||||
|
assert.NoError(loadLanguageSettings(cfg, nil))
|
||||||
|
|
||||||
sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
|
sites, err := NewHugoSites(deps.DepsCfg{Fs: fs, Cfg: cfg})
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
assert.Equal(1, len(sites.Sites))
|
assert.Equal(1, len(sites.Sites))
|
||||||
|
@ -302,7 +309,9 @@ func TestMultilingualDisableLanguage(t *testing.T) {
|
||||||
|
|
||||||
func TestPageBundlerSiteWitSymbolicLinksInContent(t *testing.T) {
|
func TestPageBundlerSiteWitSymbolicLinksInContent(t *testing.T) {
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
cfg, fs, workDir := newTestBundleSymbolicSources(t)
|
ps, workDir := newTestBundleSymbolicSources(t)
|
||||||
|
cfg := ps.Cfg
|
||||||
|
fs := ps.Fs
|
||||||
|
|
||||||
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg, Logger: newErrorLogger()}, BuildCfg{})
|
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg, Logger: newErrorLogger()}, BuildCfg{})
|
||||||
|
|
||||||
|
@ -401,7 +410,7 @@ HEADLESS {{< myShort >}}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func newTestBundleSources(t *testing.T) (*viper.Viper, *hugofs.Fs) {
|
func newTestBundleSources(t *testing.T) (*hugofs.Fs, *viper.Viper) {
|
||||||
cfg, fs := newTestCfg()
|
cfg, fs := newTestCfg()
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
|
|
||||||
|
@ -543,10 +552,11 @@ Content for 은행.
|
||||||
src.Close()
|
src.Close()
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
|
|
||||||
return cfg, fs
|
return fs, cfg
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func newTestBundleSourcesMultilingual(t *testing.T) (*viper.Viper, *hugofs.Fs) {
|
func newTestBundleSourcesMultilingual(t *testing.T) (*hugofs.Fs, *viper.Viper) {
|
||||||
cfg, fs := newTestCfg()
|
cfg, fs := newTestCfg()
|
||||||
|
|
||||||
workDir := "/work"
|
workDir := "/work"
|
||||||
|
@ -626,10 +636,10 @@ TheContent.
|
||||||
writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "index.nn.md"), pageContent)
|
writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "index.nn.md"), pageContent)
|
||||||
writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "page.md"), pageContent)
|
writeSource(t, fs, filepath.Join(workDir, "base", "bf", "my-bf-bundle", "page.md"), pageContent)
|
||||||
|
|
||||||
return cfg, fs
|
return fs, cfg
|
||||||
}
|
}
|
||||||
|
|
||||||
func newTestBundleSymbolicSources(t *testing.T) (*viper.Viper, *hugofs.Fs, string) {
|
func newTestBundleSymbolicSources(t *testing.T) (*helpers.PathSpec, string) {
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
// We need to use the OS fs for this.
|
// We need to use the OS fs for this.
|
||||||
cfg := viper.New()
|
cfg := viper.New()
|
||||||
|
@ -650,6 +660,10 @@ func newTestBundleSymbolicSources(t *testing.T) (*viper.Viper, *hugofs.Fs, strin
|
||||||
cfg.Set("contentDir", contentDir)
|
cfg.Set("contentDir", contentDir)
|
||||||
cfg.Set("baseURL", "https://example.com")
|
cfg.Set("baseURL", "https://example.com")
|
||||||
|
|
||||||
|
if err := loadLanguageSettings(cfg, nil); err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
layout := `{{ .Title }}|{{ .Content }}`
|
layout := `{{ .Title }}|{{ .Content }}`
|
||||||
pageContent := `---
|
pageContent := `---
|
||||||
slug: %s
|
slug: %s
|
||||||
|
@ -709,5 +723,7 @@ TheContent.
|
||||||
os.Chdir(workDir)
|
os.Chdir(workDir)
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
|
|
||||||
return cfg, fs, workDir
|
ps, _ := helpers.NewPathSpec(fs, cfg)
|
||||||
|
|
||||||
|
return ps, workDir
|
||||||
}
|
}
|
||||||
|
|
|
@ -179,8 +179,8 @@ func (c *PageCollections) addPage(page *Page) {
|
||||||
c.rawAllPages = append(c.rawAllPages, page)
|
c.rawAllPages = append(c.rawAllPages, page)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *PageCollections) removePageByPath(path string) {
|
func (c *PageCollections) removePageFilename(filename string) {
|
||||||
if i := c.rawAllPages.findPagePosByFilePath(path); i >= 0 {
|
if i := c.rawAllPages.findPagePosByFilename(filename); i >= 0 {
|
||||||
c.clearResourceCacheForPage(c.rawAllPages[i])
|
c.clearResourceCacheForPage(c.rawAllPages[i])
|
||||||
c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...)
|
c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...)
|
||||||
}
|
}
|
||||||
|
@ -218,6 +218,7 @@ func (c *PageCollections) clearResourceCacheForPage(page *Page) {
|
||||||
if len(page.Resources) > 0 {
|
if len(page.Resources) > 0 {
|
||||||
first := page.Resources[0]
|
first := page.Resources[0]
|
||||||
dir := path.Dir(first.RelPermalink())
|
dir := path.Dir(first.RelPermalink())
|
||||||
|
dir = strings.TrimPrefix(dir, page.LanguagePrefix())
|
||||||
// This is done to keep the memory usage in check when doing live reloads.
|
// This is done to keep the memory usage in check when doing live reloads.
|
||||||
page.s.resourceSpec.DeleteCacheByPrefix(dir)
|
page.s.resourceSpec.DeleteCacheByPrefix(dir)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1109,60 +1109,6 @@ func TestCreatePage(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPageKind(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
const sep = helpers.FilePathSeparator
|
|
||||||
var tests = []struct {
|
|
||||||
file string
|
|
||||||
kind string
|
|
||||||
}{
|
|
||||||
{"_index.md", KindHome},
|
|
||||||
{"about.md", KindPage},
|
|
||||||
{"sectionA" + sep + "_index.md", KindSection},
|
|
||||||
{"sectionA" + sep + "about.md", KindPage},
|
|
||||||
{"categories" + sep + "_index.md", KindTaxonomyTerm},
|
|
||||||
{"categories" + sep + "categoryA" + sep + "_index.md", KindTaxonomy},
|
|
||||||
{"tags" + sep + "_index.md", KindTaxonomyTerm},
|
|
||||||
{"tags" + sep + "tagA" + sep + "_index.md", KindTaxonomy},
|
|
||||||
|
|
||||||
// nn is configured as a language
|
|
||||||
{"_index.nn.md", KindHome},
|
|
||||||
{"about.nn.md", KindPage},
|
|
||||||
{"sectionA" + sep + "_index.nn.md", KindSection},
|
|
||||||
{"sectionA" + sep + "about.nn.md", KindPage},
|
|
||||||
|
|
||||||
// should NOT be categorized as KindHome
|
|
||||||
{"_indexNOT.md", KindPage},
|
|
||||||
|
|
||||||
// To be consistent with FileInfo.TranslationBaseName(),
|
|
||||||
// language codes not explicitly configured for the site
|
|
||||||
// are not treated as such. "fr" is not configured as
|
|
||||||
// a language in the test site, so ALL of the
|
|
||||||
// following should be KindPage
|
|
||||||
{"_index.fr.md", KindPage}, //not KindHome
|
|
||||||
{"about.fr.md", KindPage},
|
|
||||||
{"sectionA" + sep + "_index.fr.md", KindPage}, // KindSection
|
|
||||||
{"sectionA" + sep + "about.fr.md", KindPage},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range tests {
|
|
||||||
s := newTestSite(t, "languages.nn.languageName", "Nynorsk")
|
|
||||||
taxonomies := make(map[string]string)
|
|
||||||
taxonomies["tag"] = "tags"
|
|
||||||
taxonomies["category"] = "categories"
|
|
||||||
s.Taxonomies = make(TaxonomyList)
|
|
||||||
for _, plural := range taxonomies {
|
|
||||||
s.Taxonomies[plural] = make(Taxonomy)
|
|
||||||
}
|
|
||||||
|
|
||||||
p, _ := s.NewPage(test.file)
|
|
||||||
p.setValuesForKind(s)
|
|
||||||
if p.Kind != test.kind {
|
|
||||||
t.Errorf("for %s expected p.Kind == %s, got %s", test.file, test.kind, p.Kind)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDegenerateInvalidFrontMatterShortDelim(t *testing.T) {
|
func TestDegenerateInvalidFrontMatterShortDelim(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
|
|
|
@ -25,7 +25,9 @@ import (
|
||||||
// GC requires a build first.
|
// GC requires a build first.
|
||||||
func (h *HugoSites) GC() (int, error) {
|
func (h *HugoSites) GC() (int, error) {
|
||||||
s := h.Sites[0]
|
s := h.Sites[0]
|
||||||
imageCacheDir := s.resourceSpec.AbsGenImagePath
|
fs := h.PathSpec.BaseFs.ResourcesFs
|
||||||
|
|
||||||
|
imageCacheDir := s.resourceSpec.GenImagePath
|
||||||
if len(imageCacheDir) < 10 {
|
if len(imageCacheDir) < 10 {
|
||||||
panic("invalid image cache")
|
panic("invalid image cache")
|
||||||
}
|
}
|
||||||
|
@ -43,7 +45,7 @@ func (h *HugoSites) GC() (int, error) {
|
||||||
|
|
||||||
counter := 0
|
counter := 0
|
||||||
|
|
||||||
err := afero.Walk(s.Fs.Source, imageCacheDir, func(path string, info os.FileInfo, err error) error {
|
err := afero.Walk(fs, imageCacheDir, func(path string, info os.FileInfo, err error) error {
|
||||||
if info == nil {
|
if info == nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -53,7 +55,7 @@ func (h *HugoSites) GC() (int, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if info.IsDir() {
|
if info.IsDir() {
|
||||||
f, err := s.Fs.Source.Open(path)
|
f, err := fs.Open(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -69,7 +71,7 @@ func (h *HugoSites) GC() (int, error) {
|
||||||
|
|
||||||
inUse := isInUse(path)
|
inUse := isInUse(path)
|
||||||
if !inUse {
|
if !inUse {
|
||||||
err := s.Fs.Source.Remove(path)
|
err := fs.Remove(path)
|
||||||
if err != nil && !os.IsNotExist(err) {
|
if err != nil && !os.IsNotExist(err) {
|
||||||
s.Log.ERROR.Printf("Failed to remove %q: %s", path, err)
|
s.Log.ERROR.Printf("Failed to remove %q: %s", path, err)
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -746,9 +746,7 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if removed && isContentFile(ev.Name) {
|
if removed && isContentFile(ev.Name) {
|
||||||
path, _ := helpers.GetRelativePath(ev.Name, s.getContentDir(ev.Name))
|
h.removePageByFilename(ev.Name)
|
||||||
|
|
||||||
h.removePageByPath(path)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sourceReallyChanged = append(sourceReallyChanged, ev)
|
sourceReallyChanged = append(sourceReallyChanged, ev)
|
||||||
|
@ -890,7 +888,7 @@ func (s *Site) handleDataFile(r source.ReadableFile) error {
|
||||||
func (s *Site) readData(f source.ReadableFile) (interface{}, error) {
|
func (s *Site) readData(f source.ReadableFile) (interface{}, error) {
|
||||||
file, err := f.Open()
|
file, err := f.Open()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, fmt.Errorf("readData: failed to open data file: %s", err)
|
||||||
}
|
}
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
content := helpers.ReaderToBytes(file)
|
content := helpers.ReaderToBytes(file)
|
||||||
|
@ -1295,9 +1293,9 @@ func (c *contentCaptureResultHandler) handleBundles(d *bundleDirs) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *contentCaptureResultHandler) handleCopyFiles(filenames ...string) {
|
func (c *contentCaptureResultHandler) handleCopyFiles(files ...pathLangFile) {
|
||||||
for _, proc := range c.contentProcessors {
|
for _, proc := range c.contentProcessors {
|
||||||
proc.processAssets(filenames)
|
proc.processAssets(files)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1305,15 +1303,16 @@ func (s *Site) readAndProcessContent(filenames ...string) error {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
g, ctx := errgroup.WithContext(ctx)
|
g, ctx := errgroup.WithContext(ctx)
|
||||||
|
|
||||||
sourceSpec := source.NewSourceSpec(s.owner.Cfg, s.Fs)
|
|
||||||
baseDir := s.absContentDir()
|
|
||||||
defaultContentLanguage := s.SourceSpec.DefaultContentLanguage
|
defaultContentLanguage := s.SourceSpec.DefaultContentLanguage
|
||||||
|
|
||||||
contentProcessors := make(map[string]*siteContentProcessor)
|
contentProcessors := make(map[string]*siteContentProcessor)
|
||||||
var defaultContentProcessor *siteContentProcessor
|
var defaultContentProcessor *siteContentProcessor
|
||||||
sites := s.owner.langSite()
|
sites := s.owner.langSite()
|
||||||
for k, v := range sites {
|
for k, v := range sites {
|
||||||
proc := newSiteContentProcessor(ctx, baseDir, len(filenames) > 0, v)
|
if v.Language.Disabled {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
proc := newSiteContentProcessor(ctx, len(filenames) > 0, v)
|
||||||
contentProcessors[k] = proc
|
contentProcessors[k] = proc
|
||||||
if k == defaultContentLanguage {
|
if k == defaultContentLanguage {
|
||||||
defaultContentProcessor = proc
|
defaultContentProcessor = proc
|
||||||
|
@ -1330,6 +1329,8 @@ func (s *Site) readAndProcessContent(filenames ...string) error {
|
||||||
|
|
||||||
mainHandler := &contentCaptureResultHandler{contentProcessors: contentProcessors, defaultContentProcessor: defaultContentProcessor}
|
mainHandler := &contentCaptureResultHandler{contentProcessors: contentProcessors, defaultContentProcessor: defaultContentProcessor}
|
||||||
|
|
||||||
|
sourceSpec := source.NewSourceSpec(s.PathSpec, s.BaseFs.ContentFs)
|
||||||
|
|
||||||
if s.running() {
|
if s.running() {
|
||||||
// Need to track changes.
|
// Need to track changes.
|
||||||
bundleMap = s.owner.ContentChanges
|
bundleMap = s.owner.ContentChanges
|
||||||
|
@ -1339,7 +1340,7 @@ func (s *Site) readAndProcessContent(filenames ...string) error {
|
||||||
handler = mainHandler
|
handler = mainHandler
|
||||||
}
|
}
|
||||||
|
|
||||||
c := newCapturer(s.Log, sourceSpec, handler, bundleMap, baseDir, filenames...)
|
c := newCapturer(s.Log, sourceSpec, handler, bundleMap, filenames...)
|
||||||
|
|
||||||
err1 := c.capture()
|
err1 := c.capture()
|
||||||
|
|
||||||
|
|
|
@ -119,10 +119,12 @@ Do not go gentle into that good night.
|
||||||
|
|
||||||
notUgly := s.getPage(KindPage, "sect1/p1.md")
|
notUgly := s.getPage(KindPage, "sect1/p1.md")
|
||||||
assert.NotNil(notUgly)
|
assert.NotNil(notUgly)
|
||||||
|
assert.Equal("sect1", notUgly.Section())
|
||||||
assert.Equal("/sect1/p1/", notUgly.RelPermalink())
|
assert.Equal("/sect1/p1/", notUgly.RelPermalink())
|
||||||
|
|
||||||
ugly := s.getPage(KindPage, "sect2/p2.md")
|
ugly := s.getPage(KindPage, "sect2/p2.md")
|
||||||
assert.NotNil(ugly)
|
assert.NotNil(ugly)
|
||||||
|
assert.Equal("sect2", ugly.Section())
|
||||||
assert.Equal("/sect2/p2.html", ugly.RelPermalink())
|
assert.Equal("/sect2/p2.html", ugly.RelPermalink())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -115,8 +115,8 @@ permalinkeds:
|
||||||
writeSource(t, fs, "content/p3.md", fmt.Sprintf(pageTemplate, "t2/c12", "- tag2", "- cat2", "- o1", "- pl1"))
|
writeSource(t, fs, "content/p3.md", fmt.Sprintf(pageTemplate, "t2/c12", "- tag2", "- cat2", "- o1", "- pl1"))
|
||||||
writeSource(t, fs, "content/p4.md", fmt.Sprintf(pageTemplate, "Hello World", "", "", "- \"Hello Hugo world\"", "- pl1"))
|
writeSource(t, fs, "content/p4.md", fmt.Sprintf(pageTemplate, "Hello World", "", "", "- \"Hello Hugo world\"", "- pl1"))
|
||||||
|
|
||||||
writeNewContentFile(t, fs, "Category Terms", "2017-01-01", "content/categories/_index.md", 10)
|
writeNewContentFile(t, fs.Source, "Category Terms", "2017-01-01", "content/categories/_index.md", 10)
|
||||||
writeNewContentFile(t, fs, "Tag1 List", "2017-01-01", "content/tags/Tag1/_index.md", 10)
|
writeNewContentFile(t, fs.Source, "Tag1 List", "2017-01-01", "content/tags/Tag1/_index.md", 10)
|
||||||
|
|
||||||
err := h.Build(BuildCfg{})
|
err := h.Build(BuildCfg{})
|
||||||
|
|
||||||
|
|
|
@ -51,6 +51,11 @@ type sitesBuilder struct {
|
||||||
// Default toml
|
// Default toml
|
||||||
configFormat string
|
configFormat string
|
||||||
|
|
||||||
|
// Default is empty.
|
||||||
|
// TODO(bep) revisit this and consider always setting it to something.
|
||||||
|
// Consider this in relation to using the BaseFs.PublishFs to all publishing.
|
||||||
|
workingDir string
|
||||||
|
|
||||||
// Base data/content
|
// Base data/content
|
||||||
contentFilePairs []string
|
contentFilePairs []string
|
||||||
templateFilePairs []string
|
templateFilePairs []string
|
||||||
|
@ -83,6 +88,11 @@ func (s *sitesBuilder) Running() *sitesBuilder {
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *sitesBuilder) WithWorkingDir(dir string) *sitesBuilder {
|
||||||
|
s.workingDir = dir
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
func (s *sitesBuilder) WithConfigTemplate(data interface{}, format, configTemplate string) *sitesBuilder {
|
func (s *sitesBuilder) WithConfigTemplate(data interface{}, format, configTemplate string) *sitesBuilder {
|
||||||
if format == "" {
|
if format == "" {
|
||||||
format = "toml"
|
format = "toml"
|
||||||
|
@ -233,7 +243,17 @@ func (s *sitesBuilder) writeFilePairs(folder string, filenameContent []string) *
|
||||||
}
|
}
|
||||||
for i := 0; i < len(filenameContent); i += 2 {
|
for i := 0; i < len(filenameContent); i += 2 {
|
||||||
filename, content := filenameContent[i], filenameContent[i+1]
|
filename, content := filenameContent[i], filenameContent[i+1]
|
||||||
writeSource(s.T, s.Fs, filepath.Join(folder, filename), content)
|
target := folder
|
||||||
|
// TODO(bep) clean up this magic.
|
||||||
|
if strings.HasPrefix(filename, folder) {
|
||||||
|
target = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if s.workingDir != "" {
|
||||||
|
target = filepath.Join(s.workingDir, target)
|
||||||
|
}
|
||||||
|
|
||||||
|
writeSource(s.T, s.Fs, filepath.Join(target, filename), content)
|
||||||
}
|
}
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
@ -458,6 +478,7 @@ func newTestDefaultPathSpec() *helpers.PathSpec {
|
||||||
v := viper.New()
|
v := viper.New()
|
||||||
// Easier to reason about in tests.
|
// Easier to reason about in tests.
|
||||||
v.Set("disablePathToLower", true)
|
v.Set("disablePathToLower", true)
|
||||||
|
v.Set("contentDir", "content")
|
||||||
fs := hugofs.NewDefault(v)
|
fs := hugofs.NewDefault(v)
|
||||||
ps, _ := helpers.NewPathSpec(fs, v)
|
ps, _ := helpers.NewPathSpec(fs, v)
|
||||||
return ps
|
return ps
|
||||||
|
|
|
@ -200,6 +200,7 @@ func TestI18nTranslate(t *testing.T) {
|
||||||
var actual, expected string
|
var actual, expected string
|
||||||
v := viper.New()
|
v := viper.New()
|
||||||
v.SetDefault("defaultContentLanguage", "en")
|
v.SetDefault("defaultContentLanguage", "en")
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
|
||||||
// Test without and with placeholders
|
// Test without and with placeholders
|
||||||
for _, enablePlaceholders := range []bool{false, true} {
|
for _, enablePlaceholders := range []bool{false, true} {
|
||||||
|
|
|
@ -39,7 +39,7 @@ func NewTranslationProvider() *TranslationProvider {
|
||||||
// Update updates the i18n func in the provided Deps.
|
// Update updates the i18n func in the provided Deps.
|
||||||
func (tp *TranslationProvider) Update(d *deps.Deps) error {
|
func (tp *TranslationProvider) Update(d *deps.Deps) error {
|
||||||
dir := d.PathSpec.AbsPathify(d.Cfg.GetString("i18nDir"))
|
dir := d.PathSpec.AbsPathify(d.Cfg.GetString("i18nDir"))
|
||||||
sp := source.NewSourceSpec(d.Cfg, d.Fs)
|
sp := source.NewSourceSpec(d.PathSpec, d.Fs.Source)
|
||||||
sources := []source.Input{sp.NewFilesystem(dir)}
|
sources := []source.Input{sp.NewFilesystem(dir)}
|
||||||
|
|
||||||
themeI18nDir, err := d.PathSpec.GetThemeI18nDirPath()
|
themeI18nDir, err := d.PathSpec.GetThemeI18nDirPath()
|
||||||
|
|
|
@ -419,7 +419,7 @@ func (i *Image) initConfig() error {
|
||||||
config image.Config
|
config image.Config
|
||||||
)
|
)
|
||||||
|
|
||||||
f, err = i.spec.Fs.Source.Open(i.AbsSourceFilename())
|
f, err = i.sourceFs().Open(i.AbsSourceFilename())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -432,13 +432,17 @@ func (i *Image) initConfig() error {
|
||||||
i.config = config
|
i.config = config
|
||||||
})
|
})
|
||||||
|
|
||||||
return err
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to load image config: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *Image) decodeSource() (image.Image, error) {
|
func (i *Image) decodeSource() (image.Image, error) {
|
||||||
file, err := i.spec.Fs.Source.Open(i.AbsSourceFilename())
|
file, err := i.sourceFs().Open(i.AbsSourceFilename())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, fmt.Errorf("failed to open image for decode: %s", err)
|
||||||
}
|
}
|
||||||
defer file.Close()
|
defer file.Close()
|
||||||
img, _, err := image.Decode(file)
|
img, _, err := image.Decode(file)
|
||||||
|
@ -448,32 +452,32 @@ func (i *Image) decodeSource() (image.Image, error) {
|
||||||
func (i *Image) copyToDestination(src string) error {
|
func (i *Image) copyToDestination(src string) error {
|
||||||
var res error
|
var res error
|
||||||
i.copyToDestinationInit.Do(func() {
|
i.copyToDestinationInit.Do(func() {
|
||||||
target := filepath.Join(i.absPublishDir, i.target())
|
target := i.target()
|
||||||
|
|
||||||
// Fast path:
|
// Fast path:
|
||||||
// This is a processed version of the original.
|
// This is a processed version of the original.
|
||||||
// If it exists on destination with the same filename and file size, it is
|
// If it exists on destination with the same filename and file size, it is
|
||||||
// the same file, so no need to transfer it again.
|
// the same file, so no need to transfer it again.
|
||||||
if fi, err := i.spec.Fs.Destination.Stat(target); err == nil && fi.Size() == i.osFileInfo.Size() {
|
if fi, err := i.spec.BaseFs.PublishFs.Stat(target); err == nil && fi.Size() == i.osFileInfo.Size() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
in, err := i.spec.Fs.Source.Open(src)
|
in, err := i.sourceFs().Open(src)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
res = err
|
res = err
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
defer in.Close()
|
defer in.Close()
|
||||||
|
|
||||||
out, err := i.spec.Fs.Destination.Create(target)
|
out, err := i.spec.BaseFs.PublishFs.Create(target)
|
||||||
if err != nil && os.IsNotExist(err) {
|
if err != nil && os.IsNotExist(err) {
|
||||||
// When called from shortcodes, the target directory may not exist yet.
|
// When called from shortcodes, the target directory may not exist yet.
|
||||||
// See https://github.com/gohugoio/hugo/issues/4202
|
// See https://github.com/gohugoio/hugo/issues/4202
|
||||||
if err = i.spec.Fs.Destination.MkdirAll(filepath.Dir(target), os.FileMode(0755)); err != nil {
|
if err = i.spec.BaseFs.PublishFs.MkdirAll(filepath.Dir(target), os.FileMode(0755)); err != nil {
|
||||||
res = err
|
res = err
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
out, err = i.spec.Fs.Destination.Create(target)
|
out, err = i.spec.BaseFs.PublishFs.Create(target)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
res = err
|
res = err
|
||||||
return
|
return
|
||||||
|
@ -491,20 +495,23 @@ func (i *Image) copyToDestination(src string) error {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
return res
|
if res != nil {
|
||||||
|
return fmt.Errorf("failed to copy image to destination: %s", res)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (i *Image) encodeToDestinations(img image.Image, conf imageConfig, resourceCacheFilename, filename string) error {
|
func (i *Image) encodeToDestinations(img image.Image, conf imageConfig, resourceCacheFilename, filename string) error {
|
||||||
target := filepath.Join(i.absPublishDir, filename)
|
target := filepath.Clean(filename)
|
||||||
|
|
||||||
file1, err := i.spec.Fs.Destination.Create(target)
|
file1, err := i.spec.BaseFs.PublishFs.Create(target)
|
||||||
if err != nil && os.IsNotExist(err) {
|
if err != nil && os.IsNotExist(err) {
|
||||||
// When called from shortcodes, the target directory may not exist yet.
|
// When called from shortcodes, the target directory may not exist yet.
|
||||||
// See https://github.com/gohugoio/hugo/issues/4202
|
// See https://github.com/gohugoio/hugo/issues/4202
|
||||||
if err = i.spec.Fs.Destination.MkdirAll(filepath.Dir(target), os.FileMode(0755)); err != nil {
|
if err = i.spec.BaseFs.PublishFs.MkdirAll(filepath.Dir(target), os.FileMode(0755)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
file1, err = i.spec.Fs.Destination.Create(target)
|
file1, err = i.spec.BaseFs.PublishFs.Create(target)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -518,11 +525,11 @@ func (i *Image) encodeToDestinations(img image.Image, conf imageConfig, resource
|
||||||
|
|
||||||
if resourceCacheFilename != "" {
|
if resourceCacheFilename != "" {
|
||||||
// Also save it to the image resource cache for later reuse.
|
// Also save it to the image resource cache for later reuse.
|
||||||
if err = i.spec.Fs.Source.MkdirAll(filepath.Dir(resourceCacheFilename), os.FileMode(0755)); err != nil {
|
if err = i.spec.BaseFs.ResourcesFs.MkdirAll(filepath.Dir(resourceCacheFilename), os.FileMode(0755)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
file2, err := i.spec.Fs.Source.Create(resourceCacheFilename)
|
file2, err := i.spec.BaseFs.ResourcesFs.Create(resourceCacheFilename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,8 +24,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
type imageCache struct {
|
type imageCache struct {
|
||||||
absPublishDir string
|
cacheDir string
|
||||||
absCacheDir string
|
|
||||||
pathSpec *helpers.PathSpec
|
pathSpec *helpers.PathSpec
|
||||||
mu sync.RWMutex
|
mu sync.RWMutex
|
||||||
|
|
||||||
|
@ -82,14 +81,14 @@ func (c *imageCache) getOrCreate(
|
||||||
parent.createMu.Lock()
|
parent.createMu.Lock()
|
||||||
defer parent.createMu.Unlock()
|
defer parent.createMu.Unlock()
|
||||||
|
|
||||||
cacheFilename := filepath.Join(c.absCacheDir, key)
|
cacheFilename := filepath.Join(c.cacheDir, key)
|
||||||
|
|
||||||
// The definition of this counter is not that we have processed that amount
|
// The definition of this counter is not that we have processed that amount
|
||||||
// (e.g. resized etc.), it can be fetched from file cache,
|
// (e.g. resized etc.), it can be fetched from file cache,
|
||||||
// but the count of processed image variations for this site.
|
// but the count of processed image variations for this site.
|
||||||
c.pathSpec.ProcessingStats.Incr(&c.pathSpec.ProcessingStats.ProcessedImages)
|
c.pathSpec.ProcessingStats.Incr(&c.pathSpec.ProcessingStats.ProcessedImages)
|
||||||
|
|
||||||
exists, err := helpers.Exists(cacheFilename, c.pathSpec.Fs.Source)
|
exists, err := helpers.Exists(cacheFilename, c.pathSpec.BaseFs.ResourcesFs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -97,7 +96,9 @@ func (c *imageCache) getOrCreate(
|
||||||
if exists {
|
if exists {
|
||||||
img = parent.clone()
|
img = parent.clone()
|
||||||
img.relTargetPath.file = relTarget.file
|
img.relTargetPath.file = relTarget.file
|
||||||
img.absSourceFilename = cacheFilename
|
img.sourceFilename = cacheFilename
|
||||||
|
// We have to look resources file system for this.
|
||||||
|
img.overriddenSourceFs = img.spec.BaseFs.ResourcesFs
|
||||||
} else {
|
} else {
|
||||||
img, err = create(cacheFilename)
|
img, err = create(cacheFilename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -124,8 +125,8 @@ func (c *imageCache) getOrCreate(
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func newImageCache(ps *helpers.PathSpec, absCacheDir, absPublishDir string) *imageCache {
|
func newImageCache(ps *helpers.PathSpec, cacheDir string) *imageCache {
|
||||||
return &imageCache{pathSpec: ps, store: make(map[string]*Image), absCacheDir: absCacheDir, absPublishDir: absPublishDir}
|
return &imageCache{pathSpec: ps, store: make(map[string]*Image), cacheDir: cacheDir}
|
||||||
}
|
}
|
||||||
|
|
||||||
func timeTrack(start time.Time, name string) {
|
func timeTrack(start time.Time, name string) {
|
||||||
|
|
|
@ -16,6 +16,7 @@ package resource
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
"testing"
|
"testing"
|
||||||
|
@ -57,12 +58,14 @@ func TestParseImageConfig(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestImageTransform(t *testing.T) {
|
func TestImageTransformBasic(t *testing.T) {
|
||||||
|
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
|
|
||||||
image := fetchSunset(assert)
|
image := fetchSunset(assert)
|
||||||
|
|
||||||
|
printFs(image.sourceFs(), "", os.Stdout)
|
||||||
|
|
||||||
assert.Equal("/a/sunset.jpg", image.RelPermalink())
|
assert.Equal("/a/sunset.jpg", image.RelPermalink())
|
||||||
assert.Equal("image", image.ResourceType())
|
assert.Equal("image", image.ResourceType())
|
||||||
|
|
||||||
|
@ -75,19 +78,19 @@ func TestImageTransform(t *testing.T) {
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
assert.Equal(320, resized0x.Width())
|
assert.Equal(320, resized0x.Width())
|
||||||
assert.Equal(200, resized0x.Height())
|
assert.Equal(200, resized0x.Height())
|
||||||
assertFileCache(assert, image.spec.Fs, resized0x.RelPermalink(), 320, 200)
|
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, resized0x.RelPermalink(), 320, 200)
|
||||||
|
|
||||||
resizedx0, err := image.Resize("200x")
|
resizedx0, err := image.Resize("200x")
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
assert.Equal(200, resizedx0.Width())
|
assert.Equal(200, resizedx0.Width())
|
||||||
assert.Equal(125, resizedx0.Height())
|
assert.Equal(125, resizedx0.Height())
|
||||||
assertFileCache(assert, image.spec.Fs, resizedx0.RelPermalink(), 200, 125)
|
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, resizedx0.RelPermalink(), 200, 125)
|
||||||
|
|
||||||
resizedAndRotated, err := image.Resize("x200 r90")
|
resizedAndRotated, err := image.Resize("x200 r90")
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
assert.Equal(125, resizedAndRotated.Width())
|
assert.Equal(125, resizedAndRotated.Width())
|
||||||
assert.Equal(200, resizedAndRotated.Height())
|
assert.Equal(200, resizedAndRotated.Height())
|
||||||
assertFileCache(assert, image.spec.Fs, resizedAndRotated.RelPermalink(), 125, 200)
|
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, resizedAndRotated.RelPermalink(), 125, 200)
|
||||||
|
|
||||||
assert.Equal("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x200_resize_q68_linear.jpg", resized.RelPermalink())
|
assert.Equal("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_300x200_resize_q68_linear.jpg", resized.RelPermalink())
|
||||||
assert.Equal(300, resized.Width())
|
assert.Equal(300, resized.Width())
|
||||||
|
@ -112,20 +115,20 @@ func TestImageTransform(t *testing.T) {
|
||||||
assert.Equal("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_fill_q68_linear_bottomleft.jpg", filled.RelPermalink())
|
assert.Equal("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_fill_q68_linear_bottomleft.jpg", filled.RelPermalink())
|
||||||
assert.Equal(200, filled.Width())
|
assert.Equal(200, filled.Width())
|
||||||
assert.Equal(100, filled.Height())
|
assert.Equal(100, filled.Height())
|
||||||
assertFileCache(assert, image.spec.Fs, filled.RelPermalink(), 200, 100)
|
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, filled.RelPermalink(), 200, 100)
|
||||||
|
|
||||||
smart, err := image.Fill("200x100 smart")
|
smart, err := image.Fill("200x100 smart")
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
assert.Equal(fmt.Sprintf("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_fill_q68_linear_smart%d.jpg", smartCropVersionNumber), smart.RelPermalink())
|
assert.Equal(fmt.Sprintf("/a/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_200x100_fill_q68_linear_smart%d.jpg", smartCropVersionNumber), smart.RelPermalink())
|
||||||
assert.Equal(200, smart.Width())
|
assert.Equal(200, smart.Width())
|
||||||
assert.Equal(100, smart.Height())
|
assert.Equal(100, smart.Height())
|
||||||
assertFileCache(assert, image.spec.Fs, smart.RelPermalink(), 200, 100)
|
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, smart.RelPermalink(), 200, 100)
|
||||||
|
|
||||||
// Check cache
|
// Check cache
|
||||||
filledAgain, err := image.Fill("200x100 bottomLeft")
|
filledAgain, err := image.Fill("200x100 bottomLeft")
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
assert.True(filled == filledAgain)
|
assert.True(filled == filledAgain)
|
||||||
assertFileCache(assert, image.spec.Fs, filledAgain.RelPermalink(), 200, 100)
|
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, filledAgain.RelPermalink(), 200, 100)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -295,10 +298,10 @@ func TestImageResizeInSubPath(t *testing.T) {
|
||||||
assert.Equal("/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_2.png", resized.RelPermalink())
|
assert.Equal("/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_2.png", resized.RelPermalink())
|
||||||
assert.Equal(101, resized.Width())
|
assert.Equal(101, resized.Width())
|
||||||
|
|
||||||
assertFileCache(assert, image.spec.Fs, resized.RelPermalink(), 101, 101)
|
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, resized.RelPermalink(), 101, 101)
|
||||||
publishedImageFilename := filepath.Join("/public", resized.RelPermalink())
|
publishedImageFilename := filepath.Clean(resized.RelPermalink())
|
||||||
assertImageFile(assert, image.spec.Fs, publishedImageFilename, 101, 101)
|
assertImageFile(assert, image.spec.BaseFs.PublishFs, publishedImageFilename, 101, 101)
|
||||||
assert.NoError(image.spec.Fs.Destination.Remove(publishedImageFilename))
|
assert.NoError(image.spec.BaseFs.PublishFs.Remove(publishedImageFilename))
|
||||||
|
|
||||||
// Cleare mem cache to simulate reading from the file cache.
|
// Cleare mem cache to simulate reading from the file cache.
|
||||||
resized.spec.imageCache.clear()
|
resized.spec.imageCache.clear()
|
||||||
|
@ -307,8 +310,8 @@ func TestImageResizeInSubPath(t *testing.T) {
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
assert.Equal("/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_2.png", resizedAgain.RelPermalink())
|
assert.Equal("/a/sub/gohugoio2_hu0e1b9e4a4be4d6f86c7b37b9ccce3fbc_73886_101x101_resize_linear_2.png", resizedAgain.RelPermalink())
|
||||||
assert.Equal(101, resizedAgain.Width())
|
assert.Equal(101, resizedAgain.Width())
|
||||||
assertFileCache(assert, image.spec.Fs, resizedAgain.RelPermalink(), 101, 101)
|
assertFileCache(assert, image.spec.BaseFs.ResourcesFs, resizedAgain.RelPermalink(), 101, 101)
|
||||||
assertImageFile(assert, image.spec.Fs, publishedImageFilename, 101, 101)
|
assertImageFile(assert, image.spec.BaseFs.PublishFs, publishedImageFilename, 101, 101)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,8 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
|
||||||
"github.com/spf13/cast"
|
"github.com/spf13/cast"
|
||||||
|
|
||||||
"github.com/gobwas/glob"
|
"github.com/gobwas/glob"
|
||||||
|
@ -214,6 +216,7 @@ func getGlob(pattern string) (glob.Glob, error) {
|
||||||
|
|
||||||
type Spec struct {
|
type Spec struct {
|
||||||
*helpers.PathSpec
|
*helpers.PathSpec
|
||||||
|
|
||||||
mimeTypes media.Types
|
mimeTypes media.Types
|
||||||
|
|
||||||
// Holds default filter settings etc.
|
// Holds default filter settings etc.
|
||||||
|
@ -221,7 +224,7 @@ type Spec struct {
|
||||||
|
|
||||||
imageCache *imageCache
|
imageCache *imageCache
|
||||||
|
|
||||||
AbsGenImagePath string
|
GenImagePath string
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewSpec(s *helpers.PathSpec, mimeTypes media.Types) (*Spec, error) {
|
func NewSpec(s *helpers.PathSpec, mimeTypes media.Types) (*Spec, error) {
|
||||||
|
@ -232,41 +235,44 @@ func NewSpec(s *helpers.PathSpec, mimeTypes media.Types) (*Spec, error) {
|
||||||
}
|
}
|
||||||
s.GetLayoutDirPath()
|
s.GetLayoutDirPath()
|
||||||
|
|
||||||
genImagePath := s.AbsPathify(filepath.Join(s.Cfg.GetString("resourceDir"), "_gen", "images"))
|
genImagePath := filepath.FromSlash("_gen/images")
|
||||||
|
|
||||||
return &Spec{AbsGenImagePath: genImagePath, PathSpec: s, imaging: &imaging, mimeTypes: mimeTypes, imageCache: newImageCache(
|
return &Spec{PathSpec: s,
|
||||||
|
GenImagePath: genImagePath,
|
||||||
|
imaging: &imaging, mimeTypes: mimeTypes, imageCache: newImageCache(
|
||||||
s,
|
s,
|
||||||
// We're going to write a cache pruning routine later, so make it extremely
|
// We're going to write a cache pruning routine later, so make it extremely
|
||||||
// unlikely that the user shoots him or herself in the foot
|
// unlikely that the user shoots him or herself in the foot
|
||||||
// and this is set to a value that represents data he/she
|
// and this is set to a value that represents data he/she
|
||||||
// cares about. This should be set in stone once released.
|
// cares about. This should be set in stone once released.
|
||||||
genImagePath,
|
genImagePath,
|
||||||
s.AbsPathify(s.Cfg.GetString("publishDir")))}, nil
|
)}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Spec) NewResourceFromFile(
|
func (r *Spec) NewResourceFromFile(
|
||||||
targetPathBuilder func(base string) string,
|
targetPathBuilder func(base string) string,
|
||||||
absPublishDir string,
|
|
||||||
file source.File, relTargetFilename string) (Resource, error) {
|
file source.File, relTargetFilename string) (Resource, error) {
|
||||||
|
|
||||||
return r.newResource(targetPathBuilder, absPublishDir, file.Filename(), file.FileInfo(), relTargetFilename)
|
return r.newResource(targetPathBuilder, file.Filename(), file.FileInfo(), relTargetFilename)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Spec) NewResourceFromFilename(
|
func (r *Spec) NewResourceFromFilename(
|
||||||
targetPathBuilder func(base string) string,
|
targetPathBuilder func(base string) string,
|
||||||
absPublishDir,
|
|
||||||
absSourceFilename, relTargetFilename string) (Resource, error) {
|
absSourceFilename, relTargetFilename string) (Resource, error) {
|
||||||
|
|
||||||
fi, err := r.Fs.Source.Stat(absSourceFilename)
|
fi, err := r.sourceFs().Stat(absSourceFilename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return r.newResource(targetPathBuilder, absPublishDir, absSourceFilename, fi, relTargetFilename)
|
return r.newResource(targetPathBuilder, absSourceFilename, fi, relTargetFilename)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Spec) sourceFs() afero.Fs {
|
||||||
|
return r.PathSpec.BaseFs.ContentFs
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Spec) newResource(
|
func (r *Spec) newResource(
|
||||||
targetPathBuilder func(base string) string,
|
targetPathBuilder func(base string) string,
|
||||||
absPublishDir,
|
|
||||||
absSourceFilename string, fi os.FileInfo, relTargetFilename string) (Resource, error) {
|
absSourceFilename string, fi os.FileInfo, relTargetFilename string) (Resource, error) {
|
||||||
|
|
||||||
var mimeType string
|
var mimeType string
|
||||||
|
@ -283,7 +289,7 @@ func (r *Spec) newResource(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
gr := r.newGenericResource(targetPathBuilder, fi, absPublishDir, absSourceFilename, relTargetFilename, mimeType)
|
gr := r.newGenericResource(targetPathBuilder, fi, absSourceFilename, relTargetFilename, mimeType)
|
||||||
|
|
||||||
if mimeType == "image" {
|
if mimeType == "image" {
|
||||||
ext := strings.ToLower(helpers.Ext(absSourceFilename))
|
ext := strings.ToLower(helpers.Ext(absSourceFilename))
|
||||||
|
@ -295,9 +301,9 @@ func (r *Spec) newResource(
|
||||||
return gr, nil
|
return gr, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
f, err := r.Fs.Source.Open(absSourceFilename)
|
f, err := gr.sourceFs().Open(absSourceFilename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, fmt.Errorf("failed to open image source file: %s", err)
|
||||||
}
|
}
|
||||||
defer f.Close()
|
defer f.Close()
|
||||||
|
|
||||||
|
@ -369,15 +375,30 @@ type genericResource struct {
|
||||||
params map[string]interface{}
|
params map[string]interface{}
|
||||||
|
|
||||||
// Absolute filename to the source, including any content folder path.
|
// Absolute filename to the source, including any content folder path.
|
||||||
absSourceFilename string
|
// Note that this is absolute in relation to the filesystem it is stored in.
|
||||||
absPublishDir string
|
// It can be a base path filesystem, and then this filename will not match
|
||||||
|
// the path to the file on the real filesystem.
|
||||||
|
sourceFilename string
|
||||||
|
|
||||||
|
// This may be set to tell us to look in another filesystem for this resource.
|
||||||
|
// We, by default, use the sourceFs filesystem in the spec below.
|
||||||
|
overriddenSourceFs afero.Fs
|
||||||
|
|
||||||
|
spec *Spec
|
||||||
|
|
||||||
resourceType string
|
resourceType string
|
||||||
osFileInfo os.FileInfo
|
osFileInfo os.FileInfo
|
||||||
|
|
||||||
spec *Spec
|
|
||||||
targetPathBuilder func(rel string) string
|
targetPathBuilder func(rel string) string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (l *genericResource) sourceFs() afero.Fs {
|
||||||
|
if l.overriddenSourceFs != nil {
|
||||||
|
return l.overriddenSourceFs
|
||||||
|
}
|
||||||
|
return l.spec.sourceFs()
|
||||||
|
}
|
||||||
|
|
||||||
func (l *genericResource) Permalink() string {
|
func (l *genericResource) Permalink() string {
|
||||||
return l.spec.PermalinkForBaseURL(l.relPermalinkForRel(l.relTargetPath.path(), false), l.spec.BaseURL.String())
|
return l.spec.PermalinkForBaseURL(l.relPermalinkForRel(l.relTargetPath.path(), false), l.spec.BaseURL.String())
|
||||||
}
|
}
|
||||||
|
@ -455,19 +476,16 @@ func (l *genericResource) ResourceType() string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *genericResource) AbsSourceFilename() string {
|
func (l *genericResource) AbsSourceFilename() string {
|
||||||
return l.absSourceFilename
|
return l.sourceFilename
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *genericResource) Publish() error {
|
func (l *genericResource) Publish() error {
|
||||||
f, err := l.spec.Fs.Source.Open(l.AbsSourceFilename())
|
f, err := l.sourceFs().Open(l.AbsSourceFilename())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defer f.Close()
|
defer f.Close()
|
||||||
|
return helpers.WriteToDisk(l.target(), f, l.spec.BaseFs.PublishFs)
|
||||||
target := filepath.Join(l.absPublishDir, l.target())
|
|
||||||
|
|
||||||
return helpers.WriteToDisk(target, f, l.spec.Fs.Destination)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const counterPlaceHolder = ":counter"
|
const counterPlaceHolder = ":counter"
|
||||||
|
@ -574,8 +592,7 @@ func (l *genericResource) target() string {
|
||||||
func (r *Spec) newGenericResource(
|
func (r *Spec) newGenericResource(
|
||||||
targetPathBuilder func(base string) string,
|
targetPathBuilder func(base string) string,
|
||||||
osFileInfo os.FileInfo,
|
osFileInfo os.FileInfo,
|
||||||
absPublishDir,
|
sourceFilename,
|
||||||
absSourceFilename,
|
|
||||||
baseFilename,
|
baseFilename,
|
||||||
resourceType string) *genericResource {
|
resourceType string) *genericResource {
|
||||||
|
|
||||||
|
@ -587,8 +604,7 @@ func (r *Spec) newGenericResource(
|
||||||
return &genericResource{
|
return &genericResource{
|
||||||
targetPathBuilder: targetPathBuilder,
|
targetPathBuilder: targetPathBuilder,
|
||||||
osFileInfo: osFileInfo,
|
osFileInfo: osFileInfo,
|
||||||
absPublishDir: absPublishDir,
|
sourceFilename: sourceFilename,
|
||||||
absSourceFilename: absSourceFilename,
|
|
||||||
relTargetPath: dirFile{dir: fpath, file: fname},
|
relTargetPath: dirFile{dir: fpath, file: fname},
|
||||||
resourceType: resourceType,
|
resourceType: resourceType,
|
||||||
spec: r,
|
spec: r,
|
||||||
|
|
|
@ -29,7 +29,7 @@ func TestGenericResource(t *testing.T) {
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
spec := newTestResourceSpec(assert)
|
spec := newTestResourceSpec(assert)
|
||||||
|
|
||||||
r := spec.newGenericResource(nil, nil, "/public", "/a/foo.css", "foo.css", "css")
|
r := spec.newGenericResource(nil, nil, "/a/foo.css", "foo.css", "css")
|
||||||
|
|
||||||
assert.Equal("https://example.com/foo.css", r.Permalink())
|
assert.Equal("https://example.com/foo.css", r.Permalink())
|
||||||
assert.Equal("/foo.css", r.RelPermalink())
|
assert.Equal("/foo.css", r.RelPermalink())
|
||||||
|
@ -44,7 +44,7 @@ func TestGenericResourceWithLinkFacory(t *testing.T) {
|
||||||
factory := func(s string) string {
|
factory := func(s string) string {
|
||||||
return path.Join("/foo", s)
|
return path.Join("/foo", s)
|
||||||
}
|
}
|
||||||
r := spec.newGenericResource(factory, nil, "/public", "/a/foo.css", "foo.css", "css")
|
r := spec.newGenericResource(factory, nil, "/a/foo.css", "foo.css", "css")
|
||||||
|
|
||||||
assert.Equal("https://example.com/foo/foo.css", r.Permalink())
|
assert.Equal("https://example.com/foo/foo.css", r.Permalink())
|
||||||
assert.Equal("/foo/foo.css", r.RelPermalink())
|
assert.Equal("/foo/foo.css", r.RelPermalink())
|
||||||
|
@ -55,11 +55,11 @@ func TestNewResourceFromFilename(t *testing.T) {
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
spec := newTestResourceSpec(assert)
|
spec := newTestResourceSpec(assert)
|
||||||
|
|
||||||
writeSource(t, spec.Fs, "/project/a/b/logo.png", "image")
|
writeSource(t, spec.Fs, "content/a/b/logo.png", "image")
|
||||||
writeSource(t, spec.Fs, "/root/a/b/data.json", "json")
|
writeSource(t, spec.Fs, "content/a/b/data.json", "json")
|
||||||
|
|
||||||
r, err := spec.NewResourceFromFilename(nil, "/public",
|
r, err := spec.NewResourceFromFilename(nil,
|
||||||
filepath.FromSlash("/project/a/b/logo.png"), filepath.FromSlash("a/b/logo.png"))
|
filepath.FromSlash("a/b/logo.png"), filepath.FromSlash("a/b/logo.png"))
|
||||||
|
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
assert.NotNil(r)
|
assert.NotNil(r)
|
||||||
|
@ -67,7 +67,7 @@ func TestNewResourceFromFilename(t *testing.T) {
|
||||||
assert.Equal("/a/b/logo.png", r.RelPermalink())
|
assert.Equal("/a/b/logo.png", r.RelPermalink())
|
||||||
assert.Equal("https://example.com/a/b/logo.png", r.Permalink())
|
assert.Equal("https://example.com/a/b/logo.png", r.Permalink())
|
||||||
|
|
||||||
r, err = spec.NewResourceFromFilename(nil, "/public", "/root/a/b/data.json", "a/b/data.json")
|
r, err = spec.NewResourceFromFilename(nil, "a/b/data.json", "a/b/data.json")
|
||||||
|
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
assert.NotNil(r)
|
assert.NotNil(r)
|
||||||
|
@ -82,10 +82,10 @@ func TestNewResourceFromFilenameSubPathInBaseURL(t *testing.T) {
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
spec := newTestResourceSpecForBaseURL(assert, "https://example.com/docs")
|
spec := newTestResourceSpecForBaseURL(assert, "https://example.com/docs")
|
||||||
|
|
||||||
writeSource(t, spec.Fs, "/project/a/b/logo.png", "image")
|
writeSource(t, spec.Fs, "content/a/b/logo.png", "image")
|
||||||
|
|
||||||
r, err := spec.NewResourceFromFilename(nil, "/public",
|
r, err := spec.NewResourceFromFilename(nil,
|
||||||
filepath.FromSlash("/project/a/b/logo.png"), filepath.FromSlash("a/b/logo.png"))
|
filepath.FromSlash("a/b/logo.png"), filepath.FromSlash("a/b/logo.png"))
|
||||||
|
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
assert.NotNil(r)
|
assert.NotNil(r)
|
||||||
|
@ -101,10 +101,10 @@ func TestResourcesByType(t *testing.T) {
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
spec := newTestResourceSpec(assert)
|
spec := newTestResourceSpec(assert)
|
||||||
resources := Resources{
|
resources := Resources{
|
||||||
spec.newGenericResource(nil, nil, "/public", "/a/foo1.css", "foo1.css", "css"),
|
spec.newGenericResource(nil, nil, "/a/foo1.css", "foo1.css", "css"),
|
||||||
spec.newGenericResource(nil, nil, "/public", "/a/logo.png", "logo.css", "image"),
|
spec.newGenericResource(nil, nil, "/a/logo.png", "logo.css", "image"),
|
||||||
spec.newGenericResource(nil, nil, "/public", "/a/foo2.css", "foo2.css", "css"),
|
spec.newGenericResource(nil, nil, "/a/foo2.css", "foo2.css", "css"),
|
||||||
spec.newGenericResource(nil, nil, "/public", "/a/foo3.css", "foo3.css", "css")}
|
spec.newGenericResource(nil, nil, "/a/foo3.css", "foo3.css", "css")}
|
||||||
|
|
||||||
assert.Len(resources.ByType("css"), 3)
|
assert.Len(resources.ByType("css"), 3)
|
||||||
assert.Len(resources.ByType("image"), 1)
|
assert.Len(resources.ByType("image"), 1)
|
||||||
|
@ -115,11 +115,11 @@ func TestResourcesGetByPrefix(t *testing.T) {
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
spec := newTestResourceSpec(assert)
|
spec := newTestResourceSpec(assert)
|
||||||
resources := Resources{
|
resources := Resources{
|
||||||
spec.newGenericResource(nil, nil, "/public", "/a/foo1.css", "foo1.css", "css"),
|
spec.newGenericResource(nil, nil, "/a/foo1.css", "foo1.css", "css"),
|
||||||
spec.newGenericResource(nil, nil, "/public", "/a/logo1.png", "logo1.png", "image"),
|
spec.newGenericResource(nil, nil, "/a/logo1.png", "logo1.png", "image"),
|
||||||
spec.newGenericResource(nil, nil, "/public", "/b/Logo2.png", "Logo2.png", "image"),
|
spec.newGenericResource(nil, nil, "/b/Logo2.png", "Logo2.png", "image"),
|
||||||
spec.newGenericResource(nil, nil, "/public", "/b/foo2.css", "foo2.css", "css"),
|
spec.newGenericResource(nil, nil, "/b/foo2.css", "foo2.css", "css"),
|
||||||
spec.newGenericResource(nil, nil, "/public", "/b/foo3.css", "foo3.css", "css")}
|
spec.newGenericResource(nil, nil, "/b/foo3.css", "foo3.css", "css")}
|
||||||
|
|
||||||
assert.Nil(resources.GetByPrefix("asdf"))
|
assert.Nil(resources.GetByPrefix("asdf"))
|
||||||
assert.Equal("/logo1.png", resources.GetByPrefix("logo").RelPermalink())
|
assert.Equal("/logo1.png", resources.GetByPrefix("logo").RelPermalink())
|
||||||
|
@ -144,14 +144,14 @@ func TestResourcesGetMatch(t *testing.T) {
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
spec := newTestResourceSpec(assert)
|
spec := newTestResourceSpec(assert)
|
||||||
resources := Resources{
|
resources := Resources{
|
||||||
spec.newGenericResource(nil, nil, "/public", "/a/foo1.css", "foo1.css", "css"),
|
spec.newGenericResource(nil, nil, "/a/foo1.css", "foo1.css", "css"),
|
||||||
spec.newGenericResource(nil, nil, "/public", "/a/logo1.png", "logo1.png", "image"),
|
spec.newGenericResource(nil, nil, "/a/logo1.png", "logo1.png", "image"),
|
||||||
spec.newGenericResource(nil, nil, "/public", "/b/Logo2.png", "Logo2.png", "image"),
|
spec.newGenericResource(nil, nil, "/b/Logo2.png", "Logo2.png", "image"),
|
||||||
spec.newGenericResource(nil, nil, "/public", "/b/foo2.css", "foo2.css", "css"),
|
spec.newGenericResource(nil, nil, "/b/foo2.css", "foo2.css", "css"),
|
||||||
spec.newGenericResource(nil, nil, "/public", "/b/foo3.css", "foo3.css", "css"),
|
spec.newGenericResource(nil, nil, "/b/foo3.css", "foo3.css", "css"),
|
||||||
spec.newGenericResource(nil, nil, "/public", "/b/c/foo4.css", "c/foo4.css", "css"),
|
spec.newGenericResource(nil, nil, "/b/c/foo4.css", "c/foo4.css", "css"),
|
||||||
spec.newGenericResource(nil, nil, "/public", "/b/c/foo5.css", "c/foo5.css", "css"),
|
spec.newGenericResource(nil, nil, "/b/c/foo5.css", "c/foo5.css", "css"),
|
||||||
spec.newGenericResource(nil, nil, "/public", "/b/c/d/foo6.css", "c/d/foo6.css", "css"),
|
spec.newGenericResource(nil, nil, "/b/c/d/foo6.css", "c/d/foo6.css", "css"),
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal("/logo1.png", resources.GetMatch("logo*").RelPermalink())
|
assert.Equal("/logo1.png", resources.GetMatch("logo*").RelPermalink())
|
||||||
|
@ -373,12 +373,12 @@ func TestAssignMetadata(t *testing.T) {
|
||||||
}},
|
}},
|
||||||
} {
|
} {
|
||||||
|
|
||||||
foo2 = spec.newGenericResource(nil, nil, "/public", "/b/foo2.css", "foo2.css", "css")
|
foo2 = spec.newGenericResource(nil, nil, "/b/foo2.css", "foo2.css", "css")
|
||||||
logo2 = spec.newGenericResource(nil, nil, "/public", "/b/Logo2.png", "Logo2.png", "image")
|
logo2 = spec.newGenericResource(nil, nil, "/b/Logo2.png", "Logo2.png", "image")
|
||||||
foo1 = spec.newGenericResource(nil, nil, "/public", "/a/foo1.css", "foo1.css", "css")
|
foo1 = spec.newGenericResource(nil, nil, "/a/foo1.css", "foo1.css", "css")
|
||||||
logo1 = spec.newGenericResource(nil, nil, "/public", "/a/logo1.png", "logo1.png", "image")
|
logo1 = spec.newGenericResource(nil, nil, "/a/logo1.png", "logo1.png", "image")
|
||||||
foo3 = spec.newGenericResource(nil, nil, "/public", "/b/foo3.css", "foo3.css", "css")
|
foo3 = spec.newGenericResource(nil, nil, "/b/foo3.css", "foo3.css", "css")
|
||||||
logo3 = spec.newGenericResource(nil, nil, "/public", "/b/logo3.png", "logo3.png", "image")
|
logo3 = spec.newGenericResource(nil, nil, "/b/logo3.png", "logo3.png", "image")
|
||||||
|
|
||||||
resources = Resources{
|
resources = Resources{
|
||||||
foo2,
|
foo2,
|
||||||
|
@ -428,7 +428,7 @@ func BenchmarkResourcesMatchA100(b *testing.B) {
|
||||||
a100 := strings.Repeat("a", 100)
|
a100 := strings.Repeat("a", 100)
|
||||||
pattern := "a*a*a*a*a*a*a*a*b"
|
pattern := "a*a*a*a*a*a*a*a*b"
|
||||||
|
|
||||||
resources := Resources{spec.newGenericResource(nil, nil, "/public", "/a/"+a100, a100, "css")}
|
resources := Resources{spec.newGenericResource(nil, nil, "/a/"+a100, a100, "css")}
|
||||||
|
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
|
@ -444,17 +444,17 @@ func benchResources(b *testing.B) Resources {
|
||||||
|
|
||||||
for i := 0; i < 30; i++ {
|
for i := 0; i < 30; i++ {
|
||||||
name := fmt.Sprintf("abcde%d_%d.css", i%5, i)
|
name := fmt.Sprintf("abcde%d_%d.css", i%5, i)
|
||||||
resources = append(resources, spec.newGenericResource(nil, nil, "/public", "/a/"+name, name, "css"))
|
resources = append(resources, spec.newGenericResource(nil, nil, "/a/"+name, name, "css"))
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := 0; i < 30; i++ {
|
for i := 0; i < 30; i++ {
|
||||||
name := fmt.Sprintf("efghi%d_%d.css", i%5, i)
|
name := fmt.Sprintf("efghi%d_%d.css", i%5, i)
|
||||||
resources = append(resources, spec.newGenericResource(nil, nil, "/public", "/a/"+name, name, "css"))
|
resources = append(resources, spec.newGenericResource(nil, nil, "/a/"+name, name, "css"))
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := 0; i < 30; i++ {
|
for i := 0; i < 30; i++ {
|
||||||
name := fmt.Sprintf("jklmn%d_%d.css", i%5, i)
|
name := fmt.Sprintf("jklmn%d_%d.css", i%5, i)
|
||||||
resources = append(resources, spec.newGenericResource(nil, nil, "/public", "/b/sub/"+name, "sub/"+name, "css"))
|
resources = append(resources, spec.newGenericResource(nil, nil, "/b/sub/"+name, "sub/"+name, "css"))
|
||||||
}
|
}
|
||||||
|
|
||||||
return resources
|
return resources
|
||||||
|
@ -482,7 +482,7 @@ func BenchmarkAssignMetadata(b *testing.B) {
|
||||||
}
|
}
|
||||||
for i := 0; i < 20; i++ {
|
for i := 0; i < 20; i++ {
|
||||||
name := fmt.Sprintf("foo%d_%d.css", i%5, i)
|
name := fmt.Sprintf("foo%d_%d.css", i%5, i)
|
||||||
resources = append(resources, spec.newGenericResource(nil, nil, "/public", "/a/"+name, name, "css"))
|
resources = append(resources, spec.newGenericResource(nil, nil, "/a/"+name, name, "css"))
|
||||||
}
|
}
|
||||||
b.StartTimer()
|
b.StartTimer()
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"fmt"
|
||||||
"image"
|
"image"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
@ -27,7 +28,8 @@ func newTestResourceSpec(assert *require.Assertions) *Spec {
|
||||||
func newTestResourceSpecForBaseURL(assert *require.Assertions, baseURL string) *Spec {
|
func newTestResourceSpecForBaseURL(assert *require.Assertions, baseURL string) *Spec {
|
||||||
cfg := viper.New()
|
cfg := viper.New()
|
||||||
cfg.Set("baseURL", baseURL)
|
cfg.Set("baseURL", baseURL)
|
||||||
cfg.Set("resourceDir", "/res")
|
cfg.Set("resourceDir", "resources")
|
||||||
|
cfg.Set("contentDir", "content")
|
||||||
|
|
||||||
imagingCfg := map[string]interface{}{
|
imagingCfg := map[string]interface{}{
|
||||||
"resampleFilter": "linear",
|
"resampleFilter": "linear",
|
||||||
|
@ -60,9 +62,8 @@ func newTestResourceOsFs(assert *require.Assertions) *Spec {
|
||||||
workDir = "/private" + workDir
|
workDir = "/private" + workDir
|
||||||
}
|
}
|
||||||
|
|
||||||
contentDir := "base"
|
|
||||||
cfg.Set("workingDir", workDir)
|
cfg.Set("workingDir", workDir)
|
||||||
cfg.Set("contentDir", contentDir)
|
cfg.Set("contentDir", filepath.Join(workDir, "content"))
|
||||||
cfg.Set("resourceDir", filepath.Join(workDir, "res"))
|
cfg.Set("resourceDir", filepath.Join(workDir, "res"))
|
||||||
|
|
||||||
fs := hugofs.NewFrom(hugofs.Os, cfg)
|
fs := hugofs.NewFrom(hugofs.Os, cfg)
|
||||||
|
@ -97,10 +98,8 @@ func fetchResourceForSpec(spec *Spec, assert *require.Assertions, name string) R
|
||||||
src, err := os.Open(filepath.FromSlash("testdata/" + name))
|
src, err := os.Open(filepath.FromSlash("testdata/" + name))
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
|
|
||||||
workingDir := spec.Cfg.GetString("workingDir")
|
assert.NoError(spec.BaseFs.ContentFs.MkdirAll(filepath.Dir(name), 0755))
|
||||||
f := filepath.Join(workingDir, name)
|
out, err := spec.BaseFs.ContentFs.Create(name)
|
||||||
|
|
||||||
out, err := spec.Fs.Source.Create(f)
|
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
_, err = io.Copy(out, src)
|
_, err = io.Copy(out, src)
|
||||||
out.Close()
|
out.Close()
|
||||||
|
@ -111,14 +110,17 @@ func fetchResourceForSpec(spec *Spec, assert *require.Assertions, name string) R
|
||||||
return path.Join("/a", s)
|
return path.Join("/a", s)
|
||||||
}
|
}
|
||||||
|
|
||||||
r, err := spec.NewResourceFromFilename(factory, "/public", f, name)
|
r, err := spec.NewResourceFromFilename(factory, name, name)
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
|
|
||||||
return r
|
return r
|
||||||
}
|
}
|
||||||
|
|
||||||
func assertImageFile(assert *require.Assertions, fs *hugofs.Fs, filename string, width, height int) {
|
func assertImageFile(assert *require.Assertions, fs afero.Fs, filename string, width, height int) {
|
||||||
f, err := fs.Source.Open(filename)
|
f, err := fs.Open(filename)
|
||||||
|
if err != nil {
|
||||||
|
printFs(fs, "", os.Stdout)
|
||||||
|
}
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
defer f.Close()
|
defer f.Close()
|
||||||
|
|
||||||
|
@ -129,8 +131,8 @@ func assertImageFile(assert *require.Assertions, fs *hugofs.Fs, filename string,
|
||||||
assert.Equal(height, config.Height)
|
assert.Equal(height, config.Height)
|
||||||
}
|
}
|
||||||
|
|
||||||
func assertFileCache(assert *require.Assertions, fs *hugofs.Fs, filename string, width, height int) {
|
func assertFileCache(assert *require.Assertions, fs afero.Fs, filename string, width, height int) {
|
||||||
assertImageFile(assert, fs, filepath.Join("/res/_gen/images", filename), width, height)
|
assertImageFile(assert, fs, filepath.Join("_gen/images", filename), width, height)
|
||||||
}
|
}
|
||||||
|
|
||||||
func writeSource(t testing.TB, fs *hugofs.Fs, filename, content string) {
|
func writeSource(t testing.TB, fs *hugofs.Fs, filename, content string) {
|
||||||
|
@ -142,3 +144,22 @@ func writeToFs(t testing.TB, fs afero.Fs, filename, content string) {
|
||||||
t.Fatalf("Failed to write file: %s", err)
|
t.Fatalf("Failed to write file: %s", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func printFs(fs afero.Fs, path string, w io.Writer) {
|
||||||
|
if fs == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
afero.Walk(fs, path, func(path string, info os.FileInfo, err error) error {
|
||||||
|
if info != nil && !info.IsDir() {
|
||||||
|
s := path
|
||||||
|
if lang, ok := info.(hugofs.LanguageAnnouncer); ok {
|
||||||
|
s = s + "\t" + lang.Lang()
|
||||||
|
}
|
||||||
|
if fp, ok := info.(hugofs.FilePather); ok {
|
||||||
|
s += "\tFilename: " + fp.Filename() + "\tBase: " + fp.BaseDir()
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, " ", s)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
@ -17,11 +17,15 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestIgnoreDotFilesAndDirectories(t *testing.T) {
|
func TestIgnoreDotFilesAndDirectories(t *testing.T) {
|
||||||
|
assert := require.New(t)
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
path string
|
path string
|
||||||
|
@ -35,7 +39,6 @@ func TestIgnoreDotFilesAndDirectories(t *testing.T) {
|
||||||
{"foobar/.barfoo.md", true, nil},
|
{"foobar/.barfoo.md", true, nil},
|
||||||
{".barfoo.md", true, nil},
|
{".barfoo.md", true, nil},
|
||||||
{".md", true, nil},
|
{".md", true, nil},
|
||||||
{"", true, nil},
|
|
||||||
{"foobar/barfoo.md~", true, nil},
|
{"foobar/barfoo.md~", true, nil},
|
||||||
{".foobar/barfoo.md~", true, nil},
|
{".foobar/barfoo.md~", true, nil},
|
||||||
{"foobar~/barfoo.md", false, nil},
|
{"foobar~/barfoo.md", false, nil},
|
||||||
|
@ -51,9 +54,13 @@ func TestIgnoreDotFilesAndDirectories(t *testing.T) {
|
||||||
for i, test := range tests {
|
for i, test := range tests {
|
||||||
|
|
||||||
v := viper.New()
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
v.Set("ignoreFiles", test.ignoreFilesRegexpes)
|
v.Set("ignoreFiles", test.ignoreFilesRegexpes)
|
||||||
|
fs := hugofs.NewMem(v)
|
||||||
|
ps, err := helpers.NewPathSpec(fs, v)
|
||||||
|
assert.NoError(err)
|
||||||
|
|
||||||
s := NewSourceSpec(v, hugofs.NewMem(v))
|
s := NewSourceSpec(ps, fs.Source)
|
||||||
|
|
||||||
if ignored := s.IgnoreFile(filepath.FromSlash(test.path)); test.ignore != ignored {
|
if ignored := s.IgnoreFile(filepath.FromSlash(test.path)); test.ignore != ignored {
|
||||||
t.Errorf("[%d] File not ignored", i)
|
t.Errorf("[%d] File not ignored", i)
|
||||||
|
|
|
@ -101,6 +101,8 @@ func TestStaticDirs(t *testing.T) {
|
||||||
for i, test := range tests {
|
for i, test := range tests {
|
||||||
msg := fmt.Sprintf("Test %d", i)
|
msg := fmt.Sprintf("Test %d", i)
|
||||||
v := viper.New()
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
|
||||||
fs := hugofs.NewMem(v)
|
fs := hugofs.NewMem(v)
|
||||||
cfg := test.setup(v, fs)
|
cfg := test.setup(v, fs)
|
||||||
cfg.Set("workingDir", filepath.FromSlash("/work"))
|
cfg.Set("workingDir", filepath.FromSlash("/work"))
|
||||||
|
@ -134,6 +136,7 @@ func TestStaticDirsFs(t *testing.T) {
|
||||||
v.Set("workingDir", filepath.FromSlash("/work"))
|
v.Set("workingDir", filepath.FromSlash("/work"))
|
||||||
v.Set("theme", "mytheme")
|
v.Set("theme", "mytheme")
|
||||||
v.Set("themesDir", "themes")
|
v.Set("themesDir", "themes")
|
||||||
|
v.Set("contentDir", "content")
|
||||||
v.Set("staticDir", []string{"s1", "s2"})
|
v.Set("staticDir", []string{"s1", "s2"})
|
||||||
v.Set("languagesSorted", helpers.Languages{helpers.NewDefaultLanguage(v)})
|
v.Set("languagesSorted", helpers.Languages{helpers.NewDefaultLanguage(v)})
|
||||||
|
|
||||||
|
|
|
@ -14,12 +14,17 @@
|
||||||
package source
|
package source
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -86,6 +91,9 @@ type FileInfo struct {
|
||||||
|
|
||||||
// Absolute filename to the file on disk.
|
// Absolute filename to the file on disk.
|
||||||
filename string
|
filename string
|
||||||
|
|
||||||
|
sp *SourceSpec
|
||||||
|
|
||||||
fi os.FileInfo
|
fi os.FileInfo
|
||||||
|
|
||||||
// Derived from filename
|
// Derived from filename
|
||||||
|
@ -104,8 +112,6 @@ type FileInfo struct {
|
||||||
|
|
||||||
uniqueID string
|
uniqueID string
|
||||||
|
|
||||||
sp *SourceSpec
|
|
||||||
|
|
||||||
lazyInit sync.Once
|
lazyInit sync.Once
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -146,7 +152,6 @@ func (fi *FileInfo) init() {
|
||||||
fi.lazyInit.Do(func() {
|
fi.lazyInit.Do(func() {
|
||||||
relDir := strings.Trim(fi.relDir, helpers.FilePathSeparator)
|
relDir := strings.Trim(fi.relDir, helpers.FilePathSeparator)
|
||||||
parts := strings.Split(relDir, helpers.FilePathSeparator)
|
parts := strings.Split(relDir, helpers.FilePathSeparator)
|
||||||
|
|
||||||
var section string
|
var section string
|
||||||
if (!fi.isLeafBundle && len(parts) == 1) || len(parts) > 1 {
|
if (!fi.isLeafBundle && len(parts) == 1) || len(parts) > 1 {
|
||||||
section = parts[0]
|
section = parts[0]
|
||||||
|
@ -161,6 +166,19 @@ func (fi *FileInfo) init() {
|
||||||
|
|
||||||
func (sp *SourceSpec) NewFileInfo(baseDir, filename string, isLeafBundle bool, fi os.FileInfo) *FileInfo {
|
func (sp *SourceSpec) NewFileInfo(baseDir, filename string, isLeafBundle bool, fi os.FileInfo) *FileInfo {
|
||||||
|
|
||||||
|
var lang, translationBaseName, relPath string
|
||||||
|
|
||||||
|
if fp, ok := fi.(hugofs.FilePather); ok {
|
||||||
|
filename = fp.Filename()
|
||||||
|
baseDir = fp.BaseDir()
|
||||||
|
relPath = fp.Path()
|
||||||
|
}
|
||||||
|
|
||||||
|
if fl, ok := fi.(hugofs.LanguageAnnouncer); ok {
|
||||||
|
lang = fl.Lang()
|
||||||
|
translationBaseName = fl.TranslationBaseName()
|
||||||
|
}
|
||||||
|
|
||||||
dir, name := filepath.Split(filename)
|
dir, name := filepath.Split(filename)
|
||||||
if !strings.HasSuffix(dir, helpers.FilePathSeparator) {
|
if !strings.HasSuffix(dir, helpers.FilePathSeparator) {
|
||||||
dir = dir + helpers.FilePathSeparator
|
dir = dir + helpers.FilePathSeparator
|
||||||
|
@ -175,19 +193,20 @@ func (sp *SourceSpec) NewFileInfo(baseDir, filename string, isLeafBundle bool, f
|
||||||
|
|
||||||
relDir = strings.TrimPrefix(relDir, helpers.FilePathSeparator)
|
relDir = strings.TrimPrefix(relDir, helpers.FilePathSeparator)
|
||||||
|
|
||||||
relPath := filepath.Join(relDir, name)
|
if relPath == "" {
|
||||||
|
relPath = filepath.Join(relDir, name)
|
||||||
|
}
|
||||||
|
|
||||||
ext := strings.ToLower(strings.TrimPrefix(filepath.Ext(name), "."))
|
ext := strings.ToLower(strings.TrimPrefix(filepath.Ext(name), "."))
|
||||||
baseName := helpers.Filename(name)
|
baseName := helpers.Filename(name)
|
||||||
|
|
||||||
lang := strings.TrimPrefix(filepath.Ext(baseName), ".")
|
if translationBaseName == "" {
|
||||||
var translationBaseName string
|
// This is usyally provided by the filesystem. But this FileInfo is also
|
||||||
|
// created in a standalone context when doing "hugo new". This is
|
||||||
if _, ok := sp.Languages[lang]; lang == "" || !ok {
|
// an approximate implementation, which is "good enough" in that case.
|
||||||
lang = sp.DefaultContentLanguage
|
translationBaseName = strings.TrimSuffix(baseName, ext)
|
||||||
translationBaseName = baseName
|
fileLangExt := filepath.Ext(translationBaseName)
|
||||||
} else {
|
translationBaseName = strings.TrimSuffix(translationBaseName, fileLangExt)
|
||||||
translationBaseName = helpers.Filename(baseName)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
f := &FileInfo{
|
f := &FileInfo{
|
||||||
|
@ -211,5 +230,27 @@ func (sp *SourceSpec) NewFileInfo(baseDir, filename string, isLeafBundle bool, f
|
||||||
|
|
||||||
// Open implements ReadableFile.
|
// Open implements ReadableFile.
|
||||||
func (fi *FileInfo) Open() (io.ReadCloser, error) {
|
func (fi *FileInfo) Open() (io.ReadCloser, error) {
|
||||||
return fi.sp.Fs.Source.Open(fi.Filename())
|
f, err := fi.sp.PathSpec.Fs.Source.Open(fi.Filename())
|
||||||
|
return f, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func printFs(fs afero.Fs, path string, w io.Writer) {
|
||||||
|
if fs == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
afero.Walk(fs, path, func(path string, info os.FileInfo, err error) error {
|
||||||
|
|
||||||
|
if info != nil && !info.IsDir() {
|
||||||
|
|
||||||
|
s := path
|
||||||
|
if lang, ok := info.(hugofs.LanguageAnnouncer); ok {
|
||||||
|
s = s + "\t" + lang.Lang()
|
||||||
|
}
|
||||||
|
if fp, ok := info.(hugofs.FilePather); ok {
|
||||||
|
s = s + "\t" + fp.Filename()
|
||||||
|
}
|
||||||
|
fmt.Fprintln(w, " ", s)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,12 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
|
||||||
|
"github.com/spf13/viper"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
|
"github.com/spf13/afero"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -35,6 +41,8 @@ func TestFileInfo(t *testing.T) {
|
||||||
assert.Equal(filepath.FromSlash("b/"), f.Dir())
|
assert.Equal(filepath.FromSlash("b/"), f.Dir())
|
||||||
assert.Equal(filepath.FromSlash("b/page.md"), f.Path())
|
assert.Equal(filepath.FromSlash("b/page.md"), f.Path())
|
||||||
assert.Equal("b", f.Section())
|
assert.Equal("b", f.Section())
|
||||||
|
assert.Equal(filepath.FromSlash("page"), f.TranslationBaseName())
|
||||||
|
assert.Equal(filepath.FromSlash("page"), f.BaseFileName())
|
||||||
|
|
||||||
}},
|
}},
|
||||||
{filepath.FromSlash("/a/"), filepath.FromSlash("/a/b/c/d/page.md"), func(f *FileInfo) {
|
{filepath.FromSlash("/a/"), filepath.FromSlash("/a/b/c/d/page.md"), func(f *FileInfo) {
|
||||||
|
@ -47,3 +55,39 @@ func TestFileInfo(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestFileInfoLanguage(t *testing.T) {
|
||||||
|
assert := require.New(t)
|
||||||
|
langs := map[string]bool{
|
||||||
|
"sv": true,
|
||||||
|
"en": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
m := afero.NewMemMapFs()
|
||||||
|
lfs := hugofs.NewLanguageFs("sv", langs, m)
|
||||||
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
|
||||||
|
fs := hugofs.NewFrom(m, v)
|
||||||
|
|
||||||
|
ps, err := helpers.NewPathSpec(fs, v)
|
||||||
|
assert.NoError(err)
|
||||||
|
s := SourceSpec{Fs: lfs, PathSpec: ps}
|
||||||
|
s.Languages = map[string]interface{}{
|
||||||
|
"en": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
err = afero.WriteFile(lfs, "page.md", []byte("abc"), 0777)
|
||||||
|
assert.NoError(err)
|
||||||
|
err = afero.WriteFile(lfs, "page.en.md", []byte("abc"), 0777)
|
||||||
|
assert.NoError(err)
|
||||||
|
|
||||||
|
sv, _ := lfs.Stat("page.md")
|
||||||
|
en, _ := lfs.Stat("page.en.md")
|
||||||
|
|
||||||
|
fiSv := s.NewFileInfo("", "page.md", false, sv)
|
||||||
|
fiEn := s.NewFileInfo("", "page.en.md", false, en)
|
||||||
|
|
||||||
|
assert.Equal("sv", fiSv.Lang())
|
||||||
|
assert.Equal("en", fiEn.Lang())
|
||||||
|
}
|
||||||
|
|
|
@ -82,11 +82,11 @@ func (f *Filesystem) captureFiles() {
|
||||||
if f.Fs == nil {
|
if f.Fs == nil {
|
||||||
panic("Must have a fs")
|
panic("Must have a fs")
|
||||||
}
|
}
|
||||||
err := helpers.SymbolicWalk(f.Fs.Source, f.Base, walker)
|
err := helpers.SymbolicWalk(f.Fs, f.Base, walker)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
jww.ERROR.Println(err)
|
jww.ERROR.Println(err)
|
||||||
if err == helpers.ErrWalkRootTooShort {
|
if err == helpers.ErrPathTooShort {
|
||||||
panic("The root path is too short. If this is a test, make sure to init the content paths.")
|
panic("The root path is too short. If this is a test, make sure to init the content paths.")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -100,7 +100,7 @@ func (f *Filesystem) shouldRead(filename string, fi os.FileInfo) (bool, error) {
|
||||||
jww.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", filename, err)
|
jww.ERROR.Printf("Cannot read symbolic link '%s', error was: %s", filename, err)
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
linkfi, err := f.Fs.Source.Stat(link)
|
linkfi, err := f.Fs.Stat(link)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
jww.ERROR.Printf("Cannot stat '%s', error was: %s", link, err)
|
jww.ERROR.Printf("Cannot stat '%s', error was: %s", link, err)
|
||||||
return false, nil
|
return false, nil
|
||||||
|
|
|
@ -18,6 +18,8 @@ import (
|
||||||
"runtime"
|
"runtime"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
|
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
|
@ -69,5 +71,7 @@ func TestUnicodeNorm(t *testing.T) {
|
||||||
|
|
||||||
func newTestSourceSpec() SourceSpec {
|
func newTestSourceSpec() SourceSpec {
|
||||||
v := viper.New()
|
v := viper.New()
|
||||||
return SourceSpec{Fs: hugofs.NewMem(v), Cfg: v}
|
v.Set("contentDir", "content")
|
||||||
|
ps, _ := helpers.NewPathSpec(hugofs.NewMem(v), v)
|
||||||
|
return SourceSpec{Fs: hugofs.NewMem(v).Source, PathSpec: ps}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,17 +18,18 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/spf13/afero"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
|
||||||
"github.com/spf13/cast"
|
"github.com/spf13/cast"
|
||||||
)
|
)
|
||||||
|
|
||||||
// SourceSpec abstracts language-specific file creation.
|
// SourceSpec abstracts language-specific file creation.
|
||||||
// TODO(bep) rename to Spec
|
// TODO(bep) rename to Spec
|
||||||
type SourceSpec struct {
|
type SourceSpec struct {
|
||||||
Cfg config.Provider
|
*helpers.PathSpec
|
||||||
Fs *hugofs.Fs
|
|
||||||
|
Fs afero.Fs
|
||||||
|
|
||||||
// This is set if the ignoreFiles config is set.
|
// This is set if the ignoreFiles config is set.
|
||||||
ignoreFilesRe []*regexp.Regexp
|
ignoreFilesRe []*regexp.Regexp
|
||||||
|
@ -38,8 +39,9 @@ type SourceSpec struct {
|
||||||
DisabledLanguages map[string]bool
|
DisabledLanguages map[string]bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewSourceSpec initializes SourceSpec using languages from a given configuration.
|
// NewSourceSpec initializes SourceSpec using languages the given filesystem and PathSpec.
|
||||||
func NewSourceSpec(cfg config.Provider, fs *hugofs.Fs) *SourceSpec {
|
func NewSourceSpec(ps *helpers.PathSpec, fs afero.Fs) *SourceSpec {
|
||||||
|
cfg := ps.Cfg
|
||||||
defaultLang := cfg.GetString("defaultContentLanguage")
|
defaultLang := cfg.GetString("defaultContentLanguage")
|
||||||
languages := cfg.GetStringMap("languages")
|
languages := cfg.GetStringMap("languages")
|
||||||
|
|
||||||
|
@ -69,10 +71,17 @@ func NewSourceSpec(cfg config.Provider, fs *hugofs.Fs) *SourceSpec {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return &SourceSpec{ignoreFilesRe: regexps, Cfg: cfg, Fs: fs, Languages: languages, DefaultContentLanguage: defaultLang, DisabledLanguages: disabledLangsSet}
|
return &SourceSpec{ignoreFilesRe: regexps, PathSpec: ps, Fs: fs, Languages: languages, DefaultContentLanguage: defaultLang, DisabledLanguages: disabledLangsSet}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SourceSpec) IgnoreFile(filename string) bool {
|
func (s *SourceSpec) IgnoreFile(filename string) bool {
|
||||||
|
if filename == "" {
|
||||||
|
if _, ok := s.Fs.(*afero.OsFs); ok {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
base := filepath.Base(filename)
|
base := filepath.Base(filename)
|
||||||
|
|
||||||
if len(base) > 0 {
|
if len(base) > 0 {
|
||||||
|
@ -99,7 +108,7 @@ func (s *SourceSpec) IgnoreFile(filename string) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *SourceSpec) IsRegularSourceFile(filename string) (bool, error) {
|
func (s *SourceSpec) IsRegularSourceFile(filename string) (bool, error) {
|
||||||
fi, err := helpers.LstatIfOs(s.Fs.Source, filename)
|
fi, err := helpers.LstatIfPossible(s.Fs, filename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
@ -110,7 +119,7 @@ func (s *SourceSpec) IsRegularSourceFile(filename string) (bool, error) {
|
||||||
|
|
||||||
if fi.Mode()&os.ModeSymlink == os.ModeSymlink {
|
if fi.Mode()&os.ModeSymlink == os.ModeSymlink {
|
||||||
link, err := filepath.EvalSymlinks(filename)
|
link, err := filepath.EvalSymlinks(filename)
|
||||||
fi, err = helpers.LstatIfOs(s.Fs.Source, link)
|
fi, err = helpers.LstatIfPossible(s.Fs, link)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -369,7 +369,7 @@ func TestIntersect(t *testing.T) {
|
||||||
func TestIsSet(t *testing.T) {
|
func TestIsSet(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
ns := New(newDeps(viper.New()))
|
ns := newTestNs()
|
||||||
|
|
||||||
for i, test := range []struct {
|
for i, test := range []struct {
|
||||||
a interface{}
|
a interface{}
|
||||||
|
@ -787,3 +787,9 @@ func newDeps(cfg config.Provider) *deps.Deps {
|
||||||
Log: jww.NewNotepad(jww.LevelError, jww.LevelError, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime),
|
Log: jww.NewNotepad(jww.LevelError, jww.LevelError, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func newTestNs() *Namespace {
|
||||||
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
return New(newDeps(v))
|
||||||
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/spf13/viper"
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
@ -29,7 +28,7 @@ import (
|
||||||
func TestGetCSV(t *testing.T) {
|
func TestGetCSV(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
ns := New(newDeps(viper.New()))
|
ns := newTestNs()
|
||||||
|
|
||||||
for i, test := range []struct {
|
for i, test := range []struct {
|
||||||
sep string
|
sep string
|
||||||
|
@ -123,7 +122,7 @@ func TestGetCSV(t *testing.T) {
|
||||||
func TestGetJSON(t *testing.T) {
|
func TestGetJSON(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
ns := New(newDeps(viper.New()))
|
ns := newTestNs()
|
||||||
|
|
||||||
for i, test := range []struct {
|
for i, test := range []struct {
|
||||||
url string
|
url string
|
||||||
|
|
|
@ -127,7 +127,7 @@ func TestScpGetRemote(t *testing.T) {
|
||||||
func TestScpGetRemoteParallel(t *testing.T) {
|
func TestScpGetRemoteParallel(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
ns := New(newDeps(viper.New()))
|
ns := newTestNs()
|
||||||
|
|
||||||
content := []byte(`T€st Content 123`)
|
content := []byte(`T€st Content 123`)
|
||||||
srv, cl := getTestServer(func(w http.ResponseWriter, r *http.Request) {
|
srv, cl := getTestServer(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
@ -176,3 +176,9 @@ func newDeps(cfg config.Provider) *deps.Deps {
|
||||||
ContentSpec: cs,
|
ContentSpec: cs,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func newTestNs() *Namespace {
|
||||||
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
return New(newDeps(v))
|
||||||
|
}
|
||||||
|
|
21
tpl/os/os.go
21
tpl/os/os.go
|
@ -25,13 +25,28 @@ import (
|
||||||
|
|
||||||
// New returns a new instance of the os-namespaced template functions.
|
// New returns a new instance of the os-namespaced template functions.
|
||||||
func New(deps *deps.Deps) *Namespace {
|
func New(deps *deps.Deps) *Namespace {
|
||||||
|
|
||||||
|
// Since Hugo 0.38 we can have multiple content dirs. This can make it hard to
|
||||||
|
// reason about where the file is placed relative to the project root.
|
||||||
|
// To make the {{ readFile .Filename }} variant just work, we create a composite
|
||||||
|
// filesystem that first checks the work dir fs and then the content fs.
|
||||||
|
var rfs afero.Fs
|
||||||
|
if deps.Fs != nil {
|
||||||
|
rfs = deps.Fs.WorkingDir
|
||||||
|
if deps.PathSpec != nil && deps.PathSpec.BaseFs != nil {
|
||||||
|
rfs = afero.NewReadOnlyFs(afero.NewCopyOnWriteFs(deps.PathSpec.BaseFs.ContentFs, deps.Fs.WorkingDir))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return &Namespace{
|
return &Namespace{
|
||||||
|
readFileFs: rfs,
|
||||||
deps: deps,
|
deps: deps,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Namespace provides template functions for the "os" namespace.
|
// Namespace provides template functions for the "os" namespace.
|
||||||
type Namespace struct {
|
type Namespace struct {
|
||||||
|
readFileFs afero.Fs
|
||||||
deps *deps.Deps
|
deps *deps.Deps
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,10 +61,10 @@ func (ns *Namespace) Getenv(key interface{}) (string, error) {
|
||||||
return _os.Getenv(skey), nil
|
return _os.Getenv(skey), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// readFile reads the file named by filename relative to the given basepath
|
// readFile reads the file named by filename in the given filesystem
|
||||||
// and returns the contents as a string.
|
// and returns the contents as a string.
|
||||||
// There is a upper size limit set at 1 megabytes.
|
// There is a upper size limit set at 1 megabytes.
|
||||||
func readFile(fs *afero.BasePathFs, filename string) (string, error) {
|
func readFile(fs afero.Fs, filename string) (string, error) {
|
||||||
if filename == "" {
|
if filename == "" {
|
||||||
return "", errors.New("readFile needs a filename")
|
return "", errors.New("readFile needs a filename")
|
||||||
}
|
}
|
||||||
|
@ -79,7 +94,7 @@ func (ns *Namespace) ReadFile(i interface{}) (string, error) {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
return readFile(ns.deps.Fs.WorkingDir, s)
|
return readFile(ns.readFileFs, s)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ReadDir lists the directory contents relative to the configured WorkingDir.
|
// ReadDir lists the directory contents relative to the configured WorkingDir.
|
||||||
|
|
|
@ -65,6 +65,7 @@ func TestTemplateFuncsExamples(t *testing.T) {
|
||||||
|
|
||||||
v.Set("workingDir", workingDir)
|
v.Set("workingDir", workingDir)
|
||||||
v.Set("multilingual", true)
|
v.Set("multilingual", true)
|
||||||
|
v.Set("contentDir", "content")
|
||||||
v.Set("baseURL", "http://mysite.com/hugo/")
|
v.Set("baseURL", "http://mysite.com/hugo/")
|
||||||
v.Set("CurrentContentLanguage", helpers.NewLanguage("en", v))
|
v.Set("CurrentContentLanguage", helpers.NewLanguage("en", v))
|
||||||
|
|
||||||
|
@ -125,7 +126,10 @@ func TestPartialCached(t *testing.T) {
|
||||||
var data struct {
|
var data struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
config := newDepsConfig(viper.New())
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
|
||||||
|
config := newDepsConfig(v)
|
||||||
|
|
||||||
config.WithTemplate = func(templ tpl.TemplateHandler) error {
|
config.WithTemplate = func(templ tpl.TemplateHandler) error {
|
||||||
err := templ.AddTemplate("partials/"+name, partial)
|
err := templ.AddTemplate("partials/"+name, partial)
|
||||||
|
|
|
@ -35,6 +35,7 @@ func TestHTMLEscape(t *testing.T) {
|
||||||
"other": "<h1>Hi!</h1>",
|
"other": "<h1>Hi!</h1>",
|
||||||
}
|
}
|
||||||
v := viper.New()
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
fs := hugofs.NewMem(v)
|
fs := hugofs.NewMem(v)
|
||||||
|
|
||||||
//afero.WriteFile(fs.Source, filepath.Join(workingDir, "README.txt"), []byte("Hugo Rocks!"), 0755)
|
//afero.WriteFile(fs.Source, filepath.Join(workingDir, "README.txt"), []byte("Hugo Rocks!"), 0755)
|
||||||
|
|
|
@ -25,7 +25,9 @@ import (
|
||||||
func TestRemarshal(t *testing.T) {
|
func TestRemarshal(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
ns := New(newDeps(viper.New()))
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
ns := New(newDeps(v))
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
|
|
||||||
tomlExample := `title = "Test Metadata"
|
tomlExample := `title = "Test Metadata"
|
||||||
|
@ -111,7 +113,10 @@ title: Test Metadata
|
||||||
func TestRemarshalComments(t *testing.T) {
|
func TestRemarshalComments(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
ns := New(newDeps(viper.New()))
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
ns := New(newDeps(v))
|
||||||
|
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
|
|
||||||
input := `
|
input := `
|
||||||
|
@ -153,7 +158,9 @@ Hugo = "Rules"
|
||||||
func TestTestRemarshalError(t *testing.T) {
|
func TestTestRemarshalError(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
ns := New(newDeps(viper.New()))
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
ns := New(newDeps(v))
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
|
|
||||||
_, err := ns.Remarshal("asdf", "asdf")
|
_, err := ns.Remarshal("asdf", "asdf")
|
||||||
|
|
|
@ -32,7 +32,9 @@ type tstNoStringer struct{}
|
||||||
func TestEmojify(t *testing.T) {
|
func TestEmojify(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
ns := New(newDeps(viper.New()))
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
ns := New(newDeps(v))
|
||||||
|
|
||||||
for i, test := range []struct {
|
for i, test := range []struct {
|
||||||
s interface{}
|
s interface{}
|
||||||
|
@ -60,7 +62,9 @@ func TestEmojify(t *testing.T) {
|
||||||
func TestHighlight(t *testing.T) {
|
func TestHighlight(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
ns := New(newDeps(viper.New()))
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
ns := New(newDeps(v))
|
||||||
|
|
||||||
for i, test := range []struct {
|
for i, test := range []struct {
|
||||||
s interface{}
|
s interface{}
|
||||||
|
@ -90,7 +94,9 @@ func TestHighlight(t *testing.T) {
|
||||||
func TestHTMLEscape(t *testing.T) {
|
func TestHTMLEscape(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
ns := New(newDeps(viper.New()))
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
ns := New(newDeps(v))
|
||||||
|
|
||||||
for i, test := range []struct {
|
for i, test := range []struct {
|
||||||
s interface{}
|
s interface{}
|
||||||
|
@ -118,7 +124,9 @@ func TestHTMLEscape(t *testing.T) {
|
||||||
func TestHTMLUnescape(t *testing.T) {
|
func TestHTMLUnescape(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
ns := New(newDeps(viper.New()))
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
ns := New(newDeps(v))
|
||||||
|
|
||||||
for i, test := range []struct {
|
for i, test := range []struct {
|
||||||
s interface{}
|
s interface{}
|
||||||
|
@ -146,7 +154,9 @@ func TestHTMLUnescape(t *testing.T) {
|
||||||
func TestMarkdownify(t *testing.T) {
|
func TestMarkdownify(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
ns := New(newDeps(viper.New()))
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
ns := New(newDeps(v))
|
||||||
|
|
||||||
for i, test := range []struct {
|
for i, test := range []struct {
|
||||||
s interface{}
|
s interface{}
|
||||||
|
@ -176,7 +186,9 @@ func TestMarkdownifyBlocksOfText(t *testing.T) {
|
||||||
|
|
||||||
assert := require.New(t)
|
assert := require.New(t)
|
||||||
|
|
||||||
ns := New(newDeps(viper.New()))
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
ns := New(newDeps(v))
|
||||||
|
|
||||||
text := `
|
text := `
|
||||||
#First
|
#First
|
||||||
|
@ -201,7 +213,9 @@ And then some.
|
||||||
func TestPlainify(t *testing.T) {
|
func TestPlainify(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
ns := New(newDeps(viper.New()))
|
v := viper.New()
|
||||||
|
v.Set("contentDir", "content")
|
||||||
|
ns := New(newDeps(v))
|
||||||
|
|
||||||
for i, test := range []struct {
|
for i, test := range []struct {
|
||||||
s interface{}
|
s interface{}
|
||||||
|
|
Loading…
Reference in a new issue