mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
hugofs: Make FileMeta a struct
This commit started out investigating a `concurrent map read write` issue, ending by replacing the map with a struct. This is easier to reason about, and it's more effective: ``` name old time/op new time/op delta SiteNew/Regular_Deep_content_tree-16 71.5ms ± 3% 69.4ms ± 5% ~ (p=0.200 n=4+4) name old alloc/op new alloc/op delta SiteNew/Regular_Deep_content_tree-16 29.7MB ± 0% 27.9MB ± 0% -5.82% (p=0.029 n=4+4) name old allocs/op new allocs/op delta SiteNew/Regular_Deep_content_tree-16 313k ± 0% 303k ± 0% -3.35% (p=0.029 n=4+4) ``` See #8749
This commit is contained in:
parent
f27e542442
commit
022c479551
44 changed files with 434 additions and 451 deletions
|
@ -697,7 +697,7 @@ func (c *commandeer) getDirList() ([]string, error) {
|
|||
return filepath.SkipDir
|
||||
}
|
||||
|
||||
filenames = append(filenames, fi.Meta().Filename())
|
||||
filenames = append(filenames, fi.Meta().Filename)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
@ -706,7 +706,7 @@ func (c *commandeer) getDirList() ([]string, error) {
|
|||
watchFiles := c.hugo().PathSpec.BaseFs.WatchDirs()
|
||||
for _, fi := range watchFiles {
|
||||
if !fi.IsDir() {
|
||||
filenames = append(filenames, fi.Meta().Filename())
|
||||
filenames = append(filenames, fi.Meta().Filename)
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
|
@ -118,7 +118,7 @@ func newContentPathSection(h *hugolib.HugoSites, path string) (string, string) {
|
|||
|
||||
if h != nil {
|
||||
for _, dir := range h.BaseFs.Content.Dirs {
|
||||
createpath = strings.TrimPrefix(createpath, dir.Meta().Filename())
|
||||
createpath = strings.TrimPrefix(createpath, dir.Meta().Filename)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -102,7 +102,7 @@ func GetExecEnviron(workDir string, cfg config.Provider, fs afero.Fs) []string {
|
|||
if err == nil {
|
||||
for _, fi := range fis {
|
||||
key := fmt.Sprintf("HUGO_FILE_%s", strings.ReplaceAll(strings.ToUpper(fi.Name()), ".", "_"))
|
||||
value := fi.(hugofs.FileMetaInfo).Meta().Filename()
|
||||
value := fi.(hugofs.FileMetaInfo).Meta().Filename
|
||||
config.SetEnvVars(&env, key, value)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -123,7 +123,7 @@ func NewContent(
|
|||
|
||||
func targetSite(sites *hugolib.HugoSites, fi hugofs.FileMetaInfo) *hugolib.Site {
|
||||
for _, s := range sites.Sites {
|
||||
if fi.Meta().Lang() == s.Language().Lang {
|
||||
if fi.Meta().Lang == s.Language().Lang {
|
||||
return s
|
||||
}
|
||||
}
|
||||
|
@ -137,7 +137,7 @@ func newContentFromDir(
|
|||
cm archetypeMap, name, targetPath string) error {
|
||||
for _, f := range cm.otherFiles {
|
||||
meta := f.Meta()
|
||||
filename := meta.Path()
|
||||
filename := meta.Path
|
||||
// Just copy the file to destination.
|
||||
in, err := meta.Open()
|
||||
if err != nil {
|
||||
|
@ -166,7 +166,7 @@ func newContentFromDir(
|
|||
}
|
||||
|
||||
for _, f := range cm.contentFiles {
|
||||
filename := f.Meta().Path()
|
||||
filename := f.Meta().Path
|
||||
s := targetSite(sites, f)
|
||||
targetFilename := filepath.Join(targetPath, strings.TrimPrefix(filename, archetypeDir))
|
||||
|
||||
|
@ -274,7 +274,7 @@ func resolveContentPath(sites *hugolib.HugoSites, fs afero.Fs, targetPath string
|
|||
|
||||
for _, dir := range sites.BaseFs.Content.Dirs {
|
||||
meta := dir.Meta()
|
||||
contentDir := meta.Filename()
|
||||
contentDir := meta.Filename
|
||||
|
||||
if !strings.HasSuffix(contentDir, helpers.FilePathSeparator) {
|
||||
contentDir += helpers.FilePathSeparator
|
||||
|
@ -282,7 +282,7 @@ func resolveContentPath(sites *hugolib.HugoSites, fs afero.Fs, targetPath string
|
|||
|
||||
if strings.HasPrefix(targetPath, contentDir) {
|
||||
siteContentDir = contentDir
|
||||
dirLang = meta.Lang()
|
||||
dirLang = meta.Lang
|
||||
break
|
||||
}
|
||||
}
|
||||
|
@ -317,8 +317,8 @@ func resolveContentPath(sites *hugolib.HugoSites, fs afero.Fs, targetPath string
|
|||
} else {
|
||||
var contentDir string
|
||||
for _, dir := range sites.BaseFs.Content.Dirs {
|
||||
contentDir = dir.Meta().Filename()
|
||||
if dir.Meta().Lang() == s.Lang() {
|
||||
contentDir = dir.Meta().Filename
|
||||
if dir.Meta().Lang == s.Lang() {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
|
|
@ -497,7 +497,7 @@ func PrintFs(fs afero.Fs, path string, w io.Writer) {
|
|||
var filename string
|
||||
var meta interface{}
|
||||
if fim, ok := info.(hugofs.FileMetaInfo); ok {
|
||||
filename = fim.Meta().Filename()
|
||||
filename = fim.Meta().Filename
|
||||
meta = fim.Meta()
|
||||
}
|
||||
fmt.Fprintf(w, " %q %q\t\t%v\n", path, filename, meta)
|
||||
|
|
|
@ -23,7 +23,7 @@ import (
|
|||
"github.com/spf13/afero"
|
||||
)
|
||||
|
||||
func decorateDirs(fs afero.Fs, meta FileMeta) afero.Fs {
|
||||
func decorateDirs(fs afero.Fs, meta *FileMeta) afero.Fs {
|
||||
ffs := &baseFileDecoratorFs{Fs: fs}
|
||||
|
||||
decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) {
|
||||
|
@ -82,9 +82,11 @@ func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero
|
|||
|
||||
decorator := func(fi os.FileInfo, filename string) (os.FileInfo, error) {
|
||||
// Store away the original in case it's a symlink.
|
||||
meta := FileMeta{metaKeyName: fi.Name()}
|
||||
meta := NewFileMeta()
|
||||
meta.Name = fi.Name()
|
||||
|
||||
if fi.IsDir() {
|
||||
meta[metaKeyJoinStat] = func(name string) (FileMetaInfo, error) {
|
||||
meta.JoinStatFunc = func(name string) (FileMetaInfo, error) {
|
||||
joinedFilename := filepath.Join(filename, name)
|
||||
fi, _, err := lstatIfPossible(fs, joinedFilename)
|
||||
if err != nil {
|
||||
|
@ -102,7 +104,7 @@ func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero
|
|||
|
||||
isSymlink := isSymlink(fi)
|
||||
if isSymlink {
|
||||
meta[metaKeyOriginalFilename] = filename
|
||||
meta.OriginalFilename = filename
|
||||
var link string
|
||||
var err error
|
||||
link, fi, err = evalSymlinks(fs, filename)
|
||||
|
@ -110,7 +112,7 @@ func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero
|
|||
return nil, err
|
||||
}
|
||||
filename = link
|
||||
meta[metaKeyIsSymlink] = true
|
||||
meta.IsSymlink = true
|
||||
}
|
||||
|
||||
opener := func() (afero.File, error) {
|
||||
|
|
|
@ -17,6 +17,7 @@ package hugofs
|
|||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"runtime"
|
||||
"sort"
|
||||
"strings"
|
||||
|
@ -27,242 +28,128 @@ import (
|
|||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/spf13/cast"
|
||||
|
||||
"github.com/gohugoio/hugo/common/hreflect"
|
||||
|
||||
"github.com/spf13/afero"
|
||||
)
|
||||
|
||||
const (
|
||||
metaKeyFilename = "filename"
|
||||
|
||||
metaKeySourceRoot = "sourceRoot"
|
||||
metaKeyBaseDir = "baseDir" // Abs base directory of source file.
|
||||
metaKeyMountRoot = "mountRoot"
|
||||
metaKeyModule = "module"
|
||||
metaKeyOriginalFilename = "originalFilename"
|
||||
metaKeyName = "name"
|
||||
metaKeyPath = "path"
|
||||
metaKeyPathWalk = "pathWalk"
|
||||
metaKeyLang = "lang"
|
||||
metaKeyWeight = "weight"
|
||||
metaKeyOrdinal = "ordinal"
|
||||
metaKeyFs = "fs"
|
||||
metaKeyOpener = "opener"
|
||||
metaKeyIsOrdered = "isOrdered"
|
||||
metaKeyIsSymlink = "isSymlink"
|
||||
metaKeyJoinStat = "joinStat"
|
||||
metaKeySkipDir = "skipDir"
|
||||
metaKeyClassifier = "classifier"
|
||||
metaKeyTranslationBaseName = "translationBaseName"
|
||||
metaKeyTranslationBaseNameWithExt = "translationBaseNameWithExt"
|
||||
metaKeyTranslations = "translations"
|
||||
metaKeyDecoraterPath = "decoratorPath"
|
||||
)
|
||||
|
||||
type FileMeta map[string]interface{}
|
||||
|
||||
func (f FileMeta) GetInt(key string) int {
|
||||
return cast.ToInt(f[key])
|
||||
}
|
||||
|
||||
func (f FileMeta) GetString(key string) string {
|
||||
return cast.ToString(f[key])
|
||||
}
|
||||
|
||||
func (f FileMeta) GetBool(key string) bool {
|
||||
return cast.ToBool(f[key])
|
||||
}
|
||||
|
||||
func (f FileMeta) Filename() string {
|
||||
return f.stringV(metaKeyFilename)
|
||||
}
|
||||
|
||||
func (f FileMeta) OriginalFilename() string {
|
||||
return f.stringV(metaKeyOriginalFilename)
|
||||
}
|
||||
|
||||
func (f FileMeta) SkipDir() bool {
|
||||
return f.GetBool(metaKeySkipDir)
|
||||
}
|
||||
|
||||
func (f FileMeta) TranslationBaseName() string {
|
||||
return f.stringV(metaKeyTranslationBaseName)
|
||||
}
|
||||
|
||||
func (f FileMeta) TranslationBaseNameWithExt() string {
|
||||
return f.stringV(metaKeyTranslationBaseNameWithExt)
|
||||
}
|
||||
|
||||
func (f FileMeta) Translations() []string {
|
||||
return cast.ToStringSlice(f[metaKeyTranslations])
|
||||
}
|
||||
|
||||
func (f FileMeta) Name() string {
|
||||
return f.stringV(metaKeyName)
|
||||
}
|
||||
|
||||
func (f FileMeta) Classifier() files.ContentClass {
|
||||
c, found := f[metaKeyClassifier]
|
||||
if found {
|
||||
return c.(files.ContentClass)
|
||||
}
|
||||
|
||||
return files.ContentClassFile // For sorting
|
||||
}
|
||||
|
||||
func (f FileMeta) Lang() string {
|
||||
return f.stringV(metaKeyLang)
|
||||
}
|
||||
|
||||
// Path returns the relative file path to where this file is mounted.
|
||||
func (f FileMeta) Path() string {
|
||||
return f.stringV(metaKeyPath)
|
||||
func NewFileMeta() *FileMeta {
|
||||
return &FileMeta{}
|
||||
}
|
||||
|
||||
// PathFile returns the relative file path for the file source.
|
||||
func (f FileMeta) PathFile() string {
|
||||
base := f.stringV(metaKeyBaseDir)
|
||||
if base == "" {
|
||||
func (f *FileMeta) PathFile() string {
|
||||
if f.BaseDir == "" {
|
||||
return ""
|
||||
}
|
||||
return strings.TrimPrefix(strings.TrimPrefix(f.Filename(), base), filepathSeparator)
|
||||
return strings.TrimPrefix(strings.TrimPrefix(f.Filename, f.BaseDir), filepathSeparator)
|
||||
}
|
||||
|
||||
func (f FileMeta) SourceRoot() string {
|
||||
return f.stringV(metaKeySourceRoot)
|
||||
type FileMeta struct {
|
||||
Name string
|
||||
Filename string
|
||||
Path string
|
||||
PathWalk string
|
||||
OriginalFilename string
|
||||
BaseDir string
|
||||
|
||||
SourceRoot string
|
||||
MountRoot string
|
||||
Module string
|
||||
|
||||
Weight int
|
||||
Ordinal int
|
||||
IsOrdered bool
|
||||
IsSymlink bool
|
||||
IsRootFile bool
|
||||
Watch bool
|
||||
|
||||
Classifier files.ContentClass
|
||||
|
||||
SkipDir bool
|
||||
|
||||
Lang string
|
||||
TranslationBaseName string
|
||||
TranslationBaseNameWithExt string
|
||||
Translations []string
|
||||
|
||||
Fs afero.Fs
|
||||
OpenFunc func() (afero.File, error)
|
||||
JoinStatFunc func(name string) (FileMetaInfo, error)
|
||||
}
|
||||
|
||||
func (f FileMeta) MountRoot() string {
|
||||
return f.stringV(metaKeyMountRoot)
|
||||
}
|
||||
|
||||
func (f FileMeta) Module() string {
|
||||
return f.stringV(metaKeyModule)
|
||||
}
|
||||
|
||||
func (f FileMeta) Weight() int {
|
||||
return f.GetInt(metaKeyWeight)
|
||||
}
|
||||
|
||||
func (f FileMeta) Ordinal() int {
|
||||
return f.GetInt(metaKeyOrdinal)
|
||||
}
|
||||
|
||||
func (f FileMeta) IsOrdered() bool {
|
||||
return f.GetBool(metaKeyIsOrdered)
|
||||
}
|
||||
|
||||
// IsSymlink returns whether this comes from a symlinked file or directory.
|
||||
func (f FileMeta) IsSymlink() bool {
|
||||
return f.GetBool(metaKeyIsSymlink)
|
||||
}
|
||||
|
||||
func (f FileMeta) Watch() bool {
|
||||
if v, found := f["watch"]; found {
|
||||
return v.(bool)
|
||||
func (m *FileMeta) Copy() *FileMeta {
|
||||
if m == nil {
|
||||
return NewFileMeta()
|
||||
}
|
||||
return false
|
||||
c := *m
|
||||
return &c
|
||||
}
|
||||
|
||||
func (f FileMeta) Fs() afero.Fs {
|
||||
if v, found := f[metaKeyFs]; found {
|
||||
return v.(afero.Fs)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f FileMeta) GetOpener() func() (afero.File, error) {
|
||||
o, found := f[metaKeyOpener]
|
||||
if !found {
|
||||
return nil
|
||||
}
|
||||
return o.(func() (afero.File, error))
|
||||
}
|
||||
|
||||
func (f FileMeta) Open() (afero.File, error) {
|
||||
v, found := f[metaKeyOpener]
|
||||
if !found {
|
||||
return nil, errors.New("file opener not found")
|
||||
}
|
||||
return v.(func() (afero.File, error))()
|
||||
}
|
||||
|
||||
func (f FileMeta) JoinStat(name string) (FileMetaInfo, error) {
|
||||
v, found := f[metaKeyJoinStat]
|
||||
if !found {
|
||||
return nil, os.ErrNotExist
|
||||
}
|
||||
return v.(func(name string) (FileMetaInfo, error))(name)
|
||||
}
|
||||
|
||||
func (f FileMeta) stringV(key string) string {
|
||||
if v, found := f[key]; found {
|
||||
return v.(string)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (f FileMeta) setIfNotZero(key string, val interface{}) {
|
||||
if !hreflect.IsTruthful(val) {
|
||||
func (m *FileMeta) Merge(from *FileMeta) {
|
||||
if m == nil || from == nil {
|
||||
return
|
||||
}
|
||||
f[key] = val
|
||||
dstv := reflect.Indirect(reflect.ValueOf(m))
|
||||
srcv := reflect.Indirect(reflect.ValueOf(from))
|
||||
|
||||
for i := 0; i < dstv.NumField(); i++ {
|
||||
v := dstv.Field(i)
|
||||
if !hreflect.IsTruthfulValue(v) {
|
||||
v.Set(srcv.Field(i))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (f *FileMeta) Open() (afero.File, error) {
|
||||
if f.OpenFunc == nil {
|
||||
return nil, errors.New("OpenFunc not set")
|
||||
}
|
||||
return f.OpenFunc()
|
||||
}
|
||||
|
||||
func (f *FileMeta) JoinStat(name string) (FileMetaInfo, error) {
|
||||
if f.JoinStatFunc == nil {
|
||||
return nil, os.ErrNotExist
|
||||
}
|
||||
return f.JoinStatFunc(name)
|
||||
}
|
||||
|
||||
type FileMetaInfo interface {
|
||||
os.FileInfo
|
||||
Meta() FileMeta
|
||||
Meta() *FileMeta
|
||||
}
|
||||
|
||||
type fileInfoMeta struct {
|
||||
os.FileInfo
|
||||
|
||||
m FileMeta
|
||||
m *FileMeta
|
||||
}
|
||||
|
||||
// Name returns the file's name. Note that we follow symlinks,
|
||||
// if supported by the file system, and the Name given here will be the
|
||||
// name of the symlink, which is what Hugo needs in all situations.
|
||||
func (fi *fileInfoMeta) Name() string {
|
||||
if name := fi.m.Name(); name != "" {
|
||||
if name := fi.m.Name; name != "" {
|
||||
return name
|
||||
}
|
||||
return fi.FileInfo.Name()
|
||||
}
|
||||
|
||||
func (fi *fileInfoMeta) Meta() FileMeta {
|
||||
func (fi *fileInfoMeta) Meta() *FileMeta {
|
||||
return fi.m
|
||||
}
|
||||
|
||||
func NewFileMetaInfo(fi os.FileInfo, m FileMeta) FileMetaInfo {
|
||||
func NewFileMetaInfo(fi os.FileInfo, m *FileMeta) FileMetaInfo {
|
||||
if m == nil {
|
||||
panic("FileMeta must be set")
|
||||
}
|
||||
if fim, ok := fi.(FileMetaInfo); ok {
|
||||
mergeFileMeta(fim.Meta(), m)
|
||||
m.Merge(fim.Meta())
|
||||
}
|
||||
return &fileInfoMeta{FileInfo: fi, m: m}
|
||||
}
|
||||
|
||||
func copyFileMeta(m FileMeta) FileMeta {
|
||||
c := make(FileMeta)
|
||||
for k, v := range m {
|
||||
c[k] = v
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
// Merge metadata, last entry wins.
|
||||
func mergeFileMeta(from, to FileMeta) {
|
||||
if from == nil {
|
||||
return
|
||||
}
|
||||
for k, v := range from {
|
||||
if _, found := to[k]; !found {
|
||||
to[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type dirNameOnlyFileInfo struct {
|
||||
name string
|
||||
modTime time.Time
|
||||
|
@ -292,16 +179,16 @@ func (fi *dirNameOnlyFileInfo) Sys() interface{} {
|
|||
return nil
|
||||
}
|
||||
|
||||
func newDirNameOnlyFileInfo(name string, meta FileMeta, fileOpener func() (afero.File, error)) FileMetaInfo {
|
||||
func newDirNameOnlyFileInfo(name string, meta *FileMeta, fileOpener func() (afero.File, error)) FileMetaInfo {
|
||||
name = normalizeFilename(name)
|
||||
_, base := filepath.Split(name)
|
||||
|
||||
m := copyFileMeta(meta)
|
||||
if _, found := m[metaKeyFilename]; !found {
|
||||
m.setIfNotZero(metaKeyFilename, name)
|
||||
m := meta.Copy()
|
||||
if m.Filename == "" {
|
||||
m.Filename = name
|
||||
}
|
||||
m[metaKeyOpener] = fileOpener
|
||||
m[metaKeyIsOrdered] = false
|
||||
m.OpenFunc = fileOpener
|
||||
m.IsOrdered = false
|
||||
|
||||
return NewFileMetaInfo(
|
||||
&dirNameOnlyFileInfo{name: base, modTime: time.Now()},
|
||||
|
@ -312,8 +199,8 @@ func newDirNameOnlyFileInfo(name string, meta FileMeta, fileOpener func() (afero
|
|||
func decorateFileInfo(
|
||||
fi os.FileInfo,
|
||||
fs afero.Fs, opener func() (afero.File, error),
|
||||
filename, filepath string, inMeta FileMeta) FileMetaInfo {
|
||||
var meta FileMeta
|
||||
filename, filepath string, inMeta *FileMeta) FileMetaInfo {
|
||||
var meta *FileMeta
|
||||
var fim FileMetaInfo
|
||||
|
||||
filepath = strings.TrimPrefix(filepath, filepathSeparator)
|
||||
|
@ -322,16 +209,26 @@ func decorateFileInfo(
|
|||
if fim, ok = fi.(FileMetaInfo); ok {
|
||||
meta = fim.Meta()
|
||||
} else {
|
||||
meta = make(FileMeta)
|
||||
meta = NewFileMeta()
|
||||
fim = NewFileMetaInfo(fi, meta)
|
||||
}
|
||||
|
||||
meta.setIfNotZero(metaKeyOpener, opener)
|
||||
meta.setIfNotZero(metaKeyFs, fs)
|
||||
meta.setIfNotZero(metaKeyPath, normalizeFilename(filepath))
|
||||
meta.setIfNotZero(metaKeyFilename, normalizeFilename(filename))
|
||||
if opener != nil {
|
||||
meta.OpenFunc = opener
|
||||
}
|
||||
if fs != nil {
|
||||
meta.Fs = fs
|
||||
}
|
||||
nfilepath := normalizeFilename(filepath)
|
||||
nfilename := normalizeFilename(filename)
|
||||
if nfilepath != "" {
|
||||
meta.Path = nfilepath
|
||||
}
|
||||
if nfilename != "" {
|
||||
meta.Filename = nfilename
|
||||
}
|
||||
|
||||
mergeFileMeta(inMeta, meta)
|
||||
meta.Merge(inMeta)
|
||||
|
||||
return fim
|
||||
}
|
||||
|
@ -377,6 +274,6 @@ func fromSlash(filenames []string) []string {
|
|||
func sortFileInfos(fis []os.FileInfo) {
|
||||
sort.Slice(fis, func(i, j int) bool {
|
||||
fimi, fimj := fis[i].(FileMetaInfo), fis[j].(FileMetaInfo)
|
||||
return fimi.Meta().Filename() < fimj.Meta().Filename()
|
||||
return fimi.Meta().Filename < fimj.Meta().Filename
|
||||
})
|
||||
}
|
||||
|
|
51
hugofs/fileinfo_test.go
Normal file
51
hugofs/fileinfo_test.go
Normal file
|
@ -0,0 +1,51 @@
|
|||
// Copyright 2021 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hugofs
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
)
|
||||
|
||||
func TestFileMeta(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
|
||||
c.Run("Merge", func(c *qt.C) {
|
||||
src := &FileMeta{
|
||||
Filename: "fs1",
|
||||
Path: "ps1",
|
||||
}
|
||||
dst := &FileMeta{
|
||||
Filename: "fd1",
|
||||
}
|
||||
|
||||
dst.Merge(src)
|
||||
|
||||
c.Assert(dst.Path, qt.Equals, "ps1")
|
||||
c.Assert(dst.Filename, qt.Equals, "fd1")
|
||||
})
|
||||
|
||||
c.Run("Copy", func(c *qt.C) {
|
||||
src := &FileMeta{
|
||||
Filename: "fs1",
|
||||
Path: "ps1",
|
||||
}
|
||||
dst := src.Copy()
|
||||
|
||||
c.Assert(dst, qt.Not(qt.Equals), src)
|
||||
c.Assert(dst, qt.DeepEquals, src)
|
||||
})
|
||||
|
||||
}
|
|
@ -44,7 +44,7 @@ func NewLanguageFs(langs map[string]int, fs afero.Fs) (afero.Fs, error) {
|
|||
}
|
||||
|
||||
meta := fi.(FileMetaInfo).Meta()
|
||||
lang := meta.Lang()
|
||||
lang := meta.Lang
|
||||
|
||||
fileLang, translationBaseName, translationBaseNameWithExt := langInfoFrom(langs, fi.Name())
|
||||
weight := 0
|
||||
|
@ -58,14 +58,16 @@ func NewLanguageFs(langs map[string]int, fs afero.Fs) (afero.Fs, error) {
|
|||
lang = fileLang
|
||||
}
|
||||
|
||||
fim := NewFileMetaInfo(fi, FileMeta{
|
||||
metaKeyLang: lang,
|
||||
metaKeyWeight: weight,
|
||||
metaKeyOrdinal: langs[lang],
|
||||
metaKeyTranslationBaseName: translationBaseName,
|
||||
metaKeyTranslationBaseNameWithExt: translationBaseNameWithExt,
|
||||
metaKeyClassifier: files.ClassifyContentFile(fi.Name(), meta.GetOpener()),
|
||||
})
|
||||
fim := NewFileMetaInfo(
|
||||
fi,
|
||||
&FileMeta{
|
||||
Lang: lang,
|
||||
Weight: weight,
|
||||
Ordinal: langs[lang],
|
||||
TranslationBaseName: translationBaseName,
|
||||
TranslationBaseNameWithExt: translationBaseNameWithExt,
|
||||
Classifier: files.ClassifyContentFile(fi.Name(), meta.OpenFunc),
|
||||
})
|
||||
|
||||
fis[i] = fim
|
||||
}
|
||||
|
@ -74,9 +76,9 @@ func NewLanguageFs(langs map[string]int, fs afero.Fs) (afero.Fs, error) {
|
|||
all := func(fis []os.FileInfo) {
|
||||
// Maps translation base name to a list of language codes.
|
||||
translations := make(map[string][]string)
|
||||
trackTranslation := func(meta FileMeta) {
|
||||
name := meta.TranslationBaseNameWithExt()
|
||||
translations[name] = append(translations[name], meta.Lang())
|
||||
trackTranslation := func(meta *FileMeta) {
|
||||
name := meta.TranslationBaseNameWithExt
|
||||
translations[name] = append(translations[name], meta.Lang)
|
||||
}
|
||||
for _, fi := range fis {
|
||||
if fi.IsDir() {
|
||||
|
@ -90,9 +92,9 @@ func NewLanguageFs(langs map[string]int, fs afero.Fs) (afero.Fs, error) {
|
|||
|
||||
for _, fi := range fis {
|
||||
fim := fi.(FileMetaInfo)
|
||||
langs := translations[fim.Meta().TranslationBaseNameWithExt()]
|
||||
langs := translations[fim.Meta().TranslationBaseNameWithExt]
|
||||
if len(langs) > 0 {
|
||||
fim.Meta()["translations"] = sortAndremoveStringDuplicates(langs)
|
||||
fim.Meta().Translations = sortAndremoveStringDuplicates(langs)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -108,7 +110,7 @@ func NewFilterFs(fs afero.Fs) (afero.Fs, error) {
|
|||
applyMeta := func(fs *FilterFs, name string, fis []os.FileInfo) {
|
||||
for i, fi := range fis {
|
||||
if fi.IsDir() {
|
||||
fis[i] = decorateFileInfo(fi, fs, fs.getOpener(fi.(FileMetaInfo).Meta().Filename()), "", "", nil)
|
||||
fis[i] = decorateFileInfo(fi, fs, fs.getOpener(fi.(FileMetaInfo).Meta().Filename), "", "", nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ func TestGlob(t *testing.T) {
|
|||
collect := func(pattern string) []string {
|
||||
var paths []string
|
||||
h := func(fi FileMetaInfo) (bool, error) {
|
||||
paths = append(paths, fi.Meta().Path())
|
||||
paths = append(paths, fi.Meta().Path)
|
||||
return false, nil
|
||||
}
|
||||
err := Glob(fs, pattern, h)
|
||||
|
|
|
@ -59,7 +59,7 @@ var LanguageDirsMerger = func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) {
|
|||
m := make(map[string]FileMetaInfo)
|
||||
|
||||
getKey := func(fim FileMetaInfo) string {
|
||||
return path.Join(fim.Meta().Lang(), fim.Name())
|
||||
return path.Join(fim.Meta().Lang, fim.Name())
|
||||
}
|
||||
|
||||
for _, fi := range lofi {
|
||||
|
|
|
@ -103,7 +103,7 @@ func (fs *noSymlinkFs) checkSymlinkStatus(name string, fi os.FileInfo) (os.FileI
|
|||
|
||||
if fim, ok := fi.(FileMetaInfo); ok {
|
||||
meta := fim.Meta()
|
||||
metaIsSymlink = meta.IsSymlink()
|
||||
metaIsSymlink = meta.IsSymlink
|
||||
}
|
||||
|
||||
if metaIsSymlink {
|
||||
|
|
|
@ -55,19 +55,19 @@ func NewRootMappingFs(fs afero.Fs, rms ...RootMapping) (*RootMappingFs, error) {
|
|||
// Extract "blog" from "content/blog"
|
||||
rm.path = strings.TrimPrefix(strings.TrimPrefix(rm.From, fromBase), filepathSeparator)
|
||||
if rm.Meta == nil {
|
||||
rm.Meta = make(FileMeta)
|
||||
rm.Meta = NewFileMeta()
|
||||
}
|
||||
|
||||
rm.Meta[metaKeySourceRoot] = rm.To
|
||||
rm.Meta[metaKeyBaseDir] = rm.ToBasedir
|
||||
rm.Meta[metaKeyMountRoot] = rm.path
|
||||
rm.Meta[metaKeyModule] = rm.Module
|
||||
rm.Meta.SourceRoot = rm.To
|
||||
rm.Meta.BaseDir = rm.ToBasedir
|
||||
rm.Meta.MountRoot = rm.path
|
||||
rm.Meta.Module = rm.Module
|
||||
|
||||
meta := copyFileMeta(rm.Meta)
|
||||
meta := rm.Meta.Copy()
|
||||
|
||||
if !fi.IsDir() {
|
||||
_, name := filepath.Split(rm.From)
|
||||
meta[metaKeyName] = name
|
||||
meta.Name = name
|
||||
}
|
||||
|
||||
rm.fi = NewFileMetaInfo(fi, meta)
|
||||
|
@ -114,11 +114,11 @@ func newRootMappingFsFromFromTo(
|
|||
|
||||
// RootMapping describes a virtual file or directory mount.
|
||||
type RootMapping struct {
|
||||
From string // The virtual mount.
|
||||
To string // The source directory or file.
|
||||
ToBasedir string // The base of To. May be empty if an absolute path was provided.
|
||||
Module string // The module path/ID.
|
||||
Meta FileMeta // File metadata (lang etc.)
|
||||
From string // The virtual mount.
|
||||
To string // The source directory or file.
|
||||
ToBasedir string // The base of To. May be empty if an absolute path was provided.
|
||||
Module string // The module path/ID.
|
||||
Meta *FileMeta // File metadata (lang etc.)
|
||||
|
||||
fi FileMetaInfo
|
||||
path string // The virtual mount point, e.g. "blog".
|
||||
|
@ -177,7 +177,7 @@ func (fs *RootMappingFs) Dirs(base string) ([]FileMetaInfo, error) {
|
|||
}
|
||||
|
||||
if !fi.IsDir() {
|
||||
mergeFileMeta(r.Meta, fi.(FileMetaInfo).Meta())
|
||||
fi.(FileMetaInfo).Meta().Merge(r.Meta)
|
||||
}
|
||||
|
||||
fss[i] = fi.(FileMetaInfo)
|
||||
|
@ -304,7 +304,7 @@ func (fs *RootMappingFs) newUnionFile(fis ...FileMetaInfo) (afero.File, error) {
|
|||
return f, nil
|
||||
}
|
||||
|
||||
rf := &rootMappingFile{File: f, fs: fs, name: meta.Name(), meta: meta}
|
||||
rf := &rootMappingFile{File: f, fs: fs, name: meta.Name, meta: meta}
|
||||
if len(fis) == 1 {
|
||||
return rf, err
|
||||
}
|
||||
|
@ -367,7 +367,7 @@ func (fs *RootMappingFs) collectDirEntries(prefix string) ([]os.FileInfo, error)
|
|||
|
||||
for _, fi := range direntries {
|
||||
meta := fi.(FileMetaInfo).Meta()
|
||||
mergeFileMeta(rm.Meta, meta)
|
||||
meta.Merge(rm.Meta)
|
||||
if fi.IsDir() {
|
||||
name := fi.Name()
|
||||
if seen[name] {
|
||||
|
@ -556,7 +556,7 @@ func (fs *RootMappingFs) virtualDirOpener(name string) func() (afero.File, error
|
|||
return func() (afero.File, error) { return &rootMappingFile{name: name, fs: fs}, nil }
|
||||
}
|
||||
|
||||
func (fs *RootMappingFs) realDirOpener(name string, meta FileMeta) func() (afero.File, error) {
|
||||
func (fs *RootMappingFs) realDirOpener(name string, meta *FileMeta) func() (afero.File, error) {
|
||||
return func() (afero.File, error) {
|
||||
f, err := fs.Fs.Open(name)
|
||||
if err != nil {
|
||||
|
@ -570,7 +570,7 @@ type rootMappingFile struct {
|
|||
afero.File
|
||||
fs *RootMappingFs
|
||||
name string
|
||||
meta FileMeta
|
||||
meta *FileMeta
|
||||
}
|
||||
|
||||
func (f *rootMappingFile) Close() error {
|
||||
|
|
|
@ -49,27 +49,27 @@ func TestLanguageRootMapping(t *testing.T) {
|
|||
RootMapping{
|
||||
From: "content/blog", // Virtual path, first element is one of content, static, layouts etc.
|
||||
To: "themes/a/mysvblogcontent", // Real path
|
||||
Meta: FileMeta{"lang": "sv"},
|
||||
Meta: &FileMeta{Lang: "sv"},
|
||||
},
|
||||
RootMapping{
|
||||
From: "content/blog",
|
||||
To: "themes/a/myenblogcontent",
|
||||
Meta: FileMeta{"lang": "en"},
|
||||
Meta: &FileMeta{Lang: "en"},
|
||||
},
|
||||
RootMapping{
|
||||
From: "content/blog",
|
||||
To: "content/sv",
|
||||
Meta: FileMeta{"lang": "sv"},
|
||||
Meta: &FileMeta{Lang: "sv"},
|
||||
},
|
||||
RootMapping{
|
||||
From: "content/blog",
|
||||
To: "themes/a/myotherenblogcontent",
|
||||
Meta: FileMeta{"lang": "en"},
|
||||
Meta: &FileMeta{Lang: "en"},
|
||||
},
|
||||
RootMapping{
|
||||
From: "content/docs",
|
||||
To: "themes/a/mysvdocs",
|
||||
Meta: FileMeta{"lang": "sv"},
|
||||
Meta: &FileMeta{Lang: "sv"},
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -122,13 +122,13 @@ func TestLanguageRootMapping(t *testing.T) {
|
|||
}
|
||||
|
||||
rfsEn := rfs.Filter(func(rm RootMapping) bool {
|
||||
return rm.Meta.Lang() == "en"
|
||||
return rm.Meta.Lang == "en"
|
||||
})
|
||||
|
||||
c.Assert(getDirnames("content/blog", rfsEn), qt.DeepEquals, []string{"d1", "en-f.txt", "en-f2.txt"})
|
||||
|
||||
rfsSv := rfs.Filter(func(rm RootMapping) bool {
|
||||
return rm.Meta.Lang() == "sv"
|
||||
return rm.Meta.Lang == "sv"
|
||||
})
|
||||
|
||||
c.Assert(getDirnames("content/blog", rfsSv), qt.DeepEquals, []string{"d1", "sv-f.txt", "svdir"})
|
||||
|
@ -157,7 +157,7 @@ func TestRootMappingFsDirnames(t *testing.T) {
|
|||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(fif.Name(), qt.Equals, "myfile.txt")
|
||||
fifm := fif.(FileMetaInfo).Meta()
|
||||
c.Assert(fifm.Filename(), qt.Equals, filepath.FromSlash("f2t/myfile.txt"))
|
||||
c.Assert(fifm.Filename, qt.Equals, filepath.FromSlash("f2t/myfile.txt"))
|
||||
|
||||
root, err := rfs.Open("static")
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
@ -185,7 +185,7 @@ func TestRootMappingFsFilename(t *testing.T) {
|
|||
fi, err := rfs.Stat(filepath.FromSlash("static/f1/foo/file.txt"))
|
||||
c.Assert(err, qt.IsNil)
|
||||
fim := fi.(FileMetaInfo)
|
||||
c.Assert(fim.Meta().Filename(), qt.Equals, testfilename)
|
||||
c.Assert(fim.Meta().Filename, qt.Equals, testfilename)
|
||||
_, err = rfs.Stat(filepath.FromSlash("static/f1"))
|
||||
c.Assert(err, qt.IsNil)
|
||||
}
|
||||
|
@ -209,30 +209,30 @@ func TestRootMappingFsMount(t *testing.T) {
|
|||
{
|
||||
From: "content/blog",
|
||||
To: "mynoblogcontent",
|
||||
Meta: FileMeta{"lang": "no"},
|
||||
Meta: &FileMeta{Lang: "no"},
|
||||
},
|
||||
{
|
||||
From: "content/blog",
|
||||
To: "myenblogcontent",
|
||||
Meta: FileMeta{"lang": "en"},
|
||||
Meta: &FileMeta{Lang: "en"},
|
||||
},
|
||||
{
|
||||
From: "content/blog",
|
||||
To: "mysvblogcontent",
|
||||
Meta: FileMeta{"lang": "sv"},
|
||||
Meta: &FileMeta{Lang: "sv"},
|
||||
},
|
||||
// Files
|
||||
{
|
||||
From: "content/singles/p1.md",
|
||||
To: "singlefiles/no.txt",
|
||||
ToBasedir: "singlefiles",
|
||||
Meta: FileMeta{"lang": "no"},
|
||||
Meta: &FileMeta{Lang: "no"},
|
||||
},
|
||||
{
|
||||
From: "content/singles/p1.md",
|
||||
To: "singlefiles/sv.txt",
|
||||
ToBasedir: "singlefiles",
|
||||
Meta: FileMeta{"lang": "sv"},
|
||||
Meta: &FileMeta{Lang: "sv"},
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -243,7 +243,7 @@ func TestRootMappingFsMount(t *testing.T) {
|
|||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(blog.IsDir(), qt.Equals, true)
|
||||
blogm := blog.(FileMetaInfo).Meta()
|
||||
c.Assert(blogm.Lang(), qt.Equals, "no") // First match
|
||||
c.Assert(blogm.Lang, qt.Equals, "no") // First match
|
||||
|
||||
f, err := blogm.Open()
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
@ -261,7 +261,7 @@ func TestRootMappingFsMount(t *testing.T) {
|
|||
c.Assert(testfilefi.Name(), qt.Equals, testfile)
|
||||
|
||||
testfilem := testfilefi.(FileMetaInfo).Meta()
|
||||
c.Assert(testfilem.Filename(), qt.Equals, filepath.FromSlash("themes/a/mynoblogcontent/test.txt"))
|
||||
c.Assert(testfilem.Filename, qt.Equals, filepath.FromSlash("themes/a/mynoblogcontent/test.txt"))
|
||||
|
||||
tf, err := testfilem.Open()
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
@ -283,7 +283,7 @@ func TestRootMappingFsMount(t *testing.T) {
|
|||
for i, lang := range []string{"no", "sv"} {
|
||||
fi := singles[i].(FileMetaInfo)
|
||||
c.Assert(fi.Meta().PathFile(), qt.Equals, filepath.FromSlash("themes/a/singlefiles/"+lang+".txt"))
|
||||
c.Assert(fi.Meta().Lang(), qt.Equals, lang)
|
||||
c.Assert(fi.Meta().Lang, qt.Equals, lang)
|
||||
c.Assert(fi.Name(), qt.Equals, "p1.md")
|
||||
}
|
||||
}
|
||||
|
@ -431,7 +431,7 @@ func TestRootMappingFsOs(t *testing.T) {
|
|||
}
|
||||
i++
|
||||
meta := fi.(FileMetaInfo).Meta()
|
||||
c.Assert(meta.Filename(), qt.Equals, filepath.Join(d, fmt.Sprintf("/d1/d2/d3/f-%d.txt", i)))
|
||||
c.Assert(meta.Filename, qt.Equals, filepath.Join(d, fmt.Sprintf("/d1/d2/d3/f-%d.txt", i)))
|
||||
c.Assert(meta.PathFile(), qt.Equals, filepath.FromSlash(fmt.Sprintf("d1/d2/d3/f-%d.txt", i)))
|
||||
}
|
||||
|
||||
|
|
|
@ -144,7 +144,7 @@ func (fs *SliceFs) getOpener(name string) func() (afero.File, error) {
|
|||
func (fs *SliceFs) pickFirst(name string) (os.FileInfo, int, error) {
|
||||
for i, mfs := range fs.dirs {
|
||||
meta := mfs.Meta()
|
||||
fs := meta.Fs()
|
||||
fs := meta.Fs
|
||||
fi, _, err := lstatIfPossible(fs, name)
|
||||
if err == nil {
|
||||
// Gotta match!
|
||||
|
@ -162,8 +162,8 @@ func (fs *SliceFs) pickFirst(name string) (os.FileInfo, int, error) {
|
|||
}
|
||||
|
||||
func (fs *SliceFs) readDirs(name string, startIdx, count int) ([]os.FileInfo, error) {
|
||||
collect := func(lfs FileMeta) ([]os.FileInfo, error) {
|
||||
d, err := lfs.Fs().Open(name)
|
||||
collect := func(lfs *FileMeta) ([]os.FileInfo, error) {
|
||||
d, err := lfs.Fs.Open(name)
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
return nil, err
|
||||
|
@ -204,7 +204,7 @@ func (fs *SliceFs) readDirs(name string, startIdx, count int) ([]os.FileInfo, er
|
|||
duplicates = append(duplicates, i)
|
||||
} else {
|
||||
// Make sure it's opened by this filesystem.
|
||||
dirs[i] = decorateFileInfo(fi, fs, fs.getOpener(fi.(FileMetaInfo).Meta().Filename()), "", "", nil)
|
||||
dirs[i] = decorateFileInfo(fi, fs, fs.getOpener(fi.(FileMetaInfo).Meta().Filename), "", "", nil)
|
||||
seen[fi.Name()] = true
|
||||
}
|
||||
}
|
||||
|
|
|
@ -73,7 +73,7 @@ type WalkwayConfig struct {
|
|||
func NewWalkway(cfg WalkwayConfig) *Walkway {
|
||||
var fs afero.Fs
|
||||
if cfg.Info != nil {
|
||||
fs = cfg.Info.Meta().Fs()
|
||||
fs = cfg.Info.Meta().Fs
|
||||
} else {
|
||||
fs = cfg.Fs
|
||||
}
|
||||
|
@ -184,7 +184,7 @@ func (w *Walkway) walk(path string, info FileMetaInfo, dirEntries []FileMetaInfo
|
|||
}
|
||||
|
||||
meta := info.Meta()
|
||||
filename := meta.Filename()
|
||||
filename := meta.Filename
|
||||
|
||||
if dirEntries == nil {
|
||||
f, err := w.fs.Open(path)
|
||||
|
@ -206,7 +206,7 @@ func (w *Walkway) walk(path string, info FileMetaInfo, dirEntries []FileMetaInfo
|
|||
|
||||
dirEntries = fileInfosToFileMetaInfos(fis)
|
||||
|
||||
if !meta.IsOrdered() {
|
||||
if !meta.IsOrdered {
|
||||
sort.Slice(dirEntries, func(i, j int) bool {
|
||||
fii := dirEntries[i]
|
||||
fij := dirEntries[j]
|
||||
|
@ -214,7 +214,7 @@ func (w *Walkway) walk(path string, info FileMetaInfo, dirEntries []FileMetaInfo
|
|||
fim, fjm := fii.Meta(), fij.Meta()
|
||||
|
||||
// Pull bundle headers to the top.
|
||||
ficlass, fjclass := fim.Classifier(), fjm.Classifier()
|
||||
ficlass, fjclass := fim.Classifier, fjm.Classifier
|
||||
if ficlass != fjclass {
|
||||
return ficlass < fjclass
|
||||
}
|
||||
|
@ -222,20 +222,20 @@ func (w *Walkway) walk(path string, info FileMetaInfo, dirEntries []FileMetaInfo
|
|||
// With multiple content dirs with different languages,
|
||||
// there can be duplicate files, and a weight will be added
|
||||
// to the closest one.
|
||||
fiw, fjw := fim.Weight(), fjm.Weight()
|
||||
fiw, fjw := fim.Weight, fjm.Weight
|
||||
if fiw != fjw {
|
||||
return fiw > fjw
|
||||
}
|
||||
|
||||
// Explicit order set.
|
||||
fio, fjo := fim.Ordinal(), fjm.Ordinal()
|
||||
fio, fjo := fim.Ordinal, fjm.Ordinal
|
||||
if fio != fjo {
|
||||
return fio < fjo
|
||||
}
|
||||
|
||||
// When we walk into a symlink, we keep the reference to
|
||||
// the original name.
|
||||
fin, fjn := fim.Name(), fjm.Name()
|
||||
fin, fjn := fim.Name, fjm.Name
|
||||
if fin != "" && fjn != "" {
|
||||
return fin < fjn
|
||||
}
|
||||
|
@ -252,7 +252,7 @@ func (w *Walkway) walk(path string, info FileMetaInfo, dirEntries []FileMetaInfo
|
|||
meta := fim.Meta()
|
||||
|
||||
// Note that we use the original Name even if it's a symlink.
|
||||
name := meta.Name()
|
||||
name := meta.Name
|
||||
if name == "" {
|
||||
name = fim.Name()
|
||||
}
|
||||
|
@ -267,13 +267,13 @@ func (w *Walkway) walk(path string, info FileMetaInfo, dirEntries []FileMetaInfo
|
|||
pathMeta = strings.TrimPrefix(pathn, w.basePath)
|
||||
}
|
||||
|
||||
meta[metaKeyPath] = normalizeFilename(pathMeta)
|
||||
meta[metaKeyPathWalk] = pathn
|
||||
meta.Path = normalizeFilename(pathMeta)
|
||||
meta.PathWalk = pathn
|
||||
|
||||
if fim.IsDir() && w.isSeen(meta.Filename()) {
|
||||
if fim.IsDir() && w.isSeen(meta.Filename) {
|
||||
// Prevent infinite recursion
|
||||
// Possible cyclic reference
|
||||
meta[metaKeySkipDir] = true
|
||||
meta.SkipDir = true
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -291,11 +291,11 @@ func (w *Walkway) walk(path string, info FileMetaInfo, dirEntries []FileMetaInfo
|
|||
fim := fi.(FileMetaInfo)
|
||||
meta := fim.Meta()
|
||||
|
||||
if meta.SkipDir() {
|
||||
if meta.SkipDir {
|
||||
continue
|
||||
}
|
||||
|
||||
err := w.walk(meta.GetString(metaKeyPathWalk), fim, nil, walkFn)
|
||||
err := w.walk(meta.PathWalk, fim, nil, walkFn)
|
||||
if err != nil {
|
||||
if !fi.IsDir() || err != filepath.SkipDir {
|
||||
return err
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
package hugofs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
@ -23,6 +24,7 @@ import (
|
|||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/gohugoio/hugo/common/para"
|
||||
"github.com/gohugoio/hugo/htesting"
|
||||
|
||||
"github.com/spf13/afero"
|
||||
|
@ -47,38 +49,76 @@ func TestWalk(t *testing.T) {
|
|||
|
||||
func TestWalkRootMappingFs(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
fs := NewBaseFileDecorator(afero.NewMemMapFs())
|
||||
|
||||
testfile := "test.txt"
|
||||
prepare := func(c *qt.C) afero.Fs {
|
||||
fs := NewBaseFileDecorator(afero.NewMemMapFs())
|
||||
|
||||
c.Assert(afero.WriteFile(fs, filepath.Join("a/b", testfile), []byte("some content"), 0755), qt.IsNil)
|
||||
c.Assert(afero.WriteFile(fs, filepath.Join("c/d", testfile), []byte("some content"), 0755), qt.IsNil)
|
||||
c.Assert(afero.WriteFile(fs, filepath.Join("e/f", testfile), []byte("some content"), 0755), qt.IsNil)
|
||||
testfile := "test.txt"
|
||||
|
||||
rm := []RootMapping{
|
||||
{
|
||||
From: "static/b",
|
||||
To: "e/f",
|
||||
},
|
||||
{
|
||||
From: "static/a",
|
||||
To: "c/d",
|
||||
},
|
||||
c.Assert(afero.WriteFile(fs, filepath.Join("a/b", testfile), []byte("some content"), 0755), qt.IsNil)
|
||||
c.Assert(afero.WriteFile(fs, filepath.Join("c/d", testfile), []byte("some content"), 0755), qt.IsNil)
|
||||
c.Assert(afero.WriteFile(fs, filepath.Join("e/f", testfile), []byte("some content"), 0755), qt.IsNil)
|
||||
|
||||
{
|
||||
From: "static/c",
|
||||
To: "a/b",
|
||||
},
|
||||
rm := []RootMapping{
|
||||
{
|
||||
From: "static/b",
|
||||
To: "e/f",
|
||||
},
|
||||
{
|
||||
From: "static/a",
|
||||
To: "c/d",
|
||||
},
|
||||
|
||||
{
|
||||
From: "static/c",
|
||||
To: "a/b",
|
||||
},
|
||||
}
|
||||
|
||||
rfs, err := NewRootMappingFs(fs, rm...)
|
||||
c.Assert(err, qt.IsNil)
|
||||
return afero.NewBasePathFs(rfs, "static")
|
||||
}
|
||||
|
||||
rfs, err := NewRootMappingFs(fs, rm...)
|
||||
c.Assert(err, qt.IsNil)
|
||||
bfs := afero.NewBasePathFs(rfs, "static")
|
||||
c.Run("Basic", func(c *qt.C) {
|
||||
|
||||
names, err := collectFilenames(bfs, "", "")
|
||||
bfs := prepare(c)
|
||||
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(names, qt.DeepEquals, []string{"a/test.txt", "b/test.txt", "c/test.txt"})
|
||||
names, err := collectFilenames(bfs, "", "")
|
||||
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(names, qt.DeepEquals, []string{"a/test.txt", "b/test.txt", "c/test.txt"})
|
||||
|
||||
})
|
||||
|
||||
c.Run("Para", func(c *qt.C) {
|
||||
bfs := prepare(c)
|
||||
|
||||
p := para.New(4)
|
||||
r, _ := p.Start(context.Background())
|
||||
|
||||
for i := 0; i < 8; i++ {
|
||||
r.Run(func() error {
|
||||
_, err := collectFilenames(bfs, "", "")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fi, err := bfs.Stat("b/test.txt")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
meta := fi.(FileMetaInfo).Meta()
|
||||
if meta.Filename == "" {
|
||||
return errors.New("fail")
|
||||
}
|
||||
return nil
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
c.Assert(r.Wait(), qt.IsNil)
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
func skipSymlink() bool {
|
||||
|
@ -157,7 +197,7 @@ func collectFilenames(fs afero.Fs, base, root string) ([]string, error) {
|
|||
return nil
|
||||
}
|
||||
|
||||
filename := info.Meta().Path()
|
||||
filename := info.Meta().Path
|
||||
filename = filepath.ToSlash(filename)
|
||||
|
||||
names = append(names, filename)
|
||||
|
@ -221,7 +261,7 @@ func BenchmarkWalk(b *testing.B) {
|
|||
return nil
|
||||
}
|
||||
|
||||
filename := info.Meta().Filename()
|
||||
filename := info.Meta().Filename
|
||||
if !strings.HasPrefix(filename, "root") {
|
||||
return errors.New(filename)
|
||||
}
|
||||
|
|
|
@ -101,9 +101,9 @@ func newContentMap(cfg contentMapConfig) *contentMap {
|
|||
n := v.(*contentNode)
|
||||
if n.p != nil && !n.p.File().IsZero() {
|
||||
meta := n.p.File().FileInfo().Meta()
|
||||
if meta.Path() != meta.PathFile() {
|
||||
if meta.Path != meta.PathFile() {
|
||||
// Keep track of the original mount source.
|
||||
mountKey := filepath.ToSlash(filepath.Join(meta.Module(), meta.PathFile()))
|
||||
mountKey := filepath.ToSlash(filepath.Join(meta.Module, meta.PathFile()))
|
||||
addToReverseMap(mountKey, n, m)
|
||||
}
|
||||
}
|
||||
|
@ -198,9 +198,9 @@ func (b *cmInsertKeyBuilder) WithFile(fi hugofs.FileMetaInfo) *cmInsertKeyBuilde
|
|||
b.newTopLevel()
|
||||
m := b.m
|
||||
meta := fi.Meta()
|
||||
p := cleanTreeKey(meta.Path())
|
||||
p := cleanTreeKey(meta.Path)
|
||||
bundlePath := m.getBundleDir(meta)
|
||||
isBundle := meta.Classifier().IsBundle()
|
||||
isBundle := meta.Classifier.IsBundle()
|
||||
if isBundle {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
@ -211,7 +211,7 @@ func (b *cmInsertKeyBuilder) WithFile(fi hugofs.FileMetaInfo) *cmInsertKeyBuilde
|
|||
return b
|
||||
}
|
||||
|
||||
id := k + m.reduceKeyPart(p, fi.Meta().Path())
|
||||
id := k + m.reduceKeyPart(p, fi.Meta().Path)
|
||||
b.tree = b.m.resources
|
||||
b.key = id
|
||||
b.baseKey = p
|
||||
|
@ -347,7 +347,7 @@ func (m *contentMap) AddFiles(fis ...hugofs.FileMetaInfo) error {
|
|||
func (m *contentMap) AddFilesBundle(header hugofs.FileMetaInfo, resources ...hugofs.FileMetaInfo) error {
|
||||
var (
|
||||
meta = header.Meta()
|
||||
classifier = meta.Classifier()
|
||||
classifier = meta.Classifier
|
||||
isBranch = classifier == files.ContentClassBranch
|
||||
bundlePath = m.getBundleDir(meta)
|
||||
|
||||
|
@ -387,7 +387,7 @@ func (m *contentMap) AddFilesBundle(header hugofs.FileMetaInfo, resources ...hug
|
|||
}
|
||||
|
||||
for _, r := range resources {
|
||||
rb := b.ForResource(cleanTreeKey(r.Meta().Path()))
|
||||
rb := b.ForResource(cleanTreeKey(r.Meta().Path))
|
||||
rb.Insert(&contentNode{fi: r})
|
||||
}
|
||||
|
||||
|
@ -462,12 +462,12 @@ func (m *contentMap) CreateMissingNodes() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (m *contentMap) getBundleDir(meta hugofs.FileMeta) string {
|
||||
dir := cleanTreeKey(filepath.Dir(meta.Path()))
|
||||
func (m *contentMap) getBundleDir(meta *hugofs.FileMeta) string {
|
||||
dir := cleanTreeKey(filepath.Dir(meta.Path))
|
||||
|
||||
switch meta.Classifier() {
|
||||
switch meta.Classifier {
|
||||
case files.ContentClassContent:
|
||||
return path.Join(dir, meta.TranslationBaseName())
|
||||
return path.Join(dir, meta.TranslationBaseName)
|
||||
default:
|
||||
return dir
|
||||
}
|
||||
|
@ -476,7 +476,7 @@ func (m *contentMap) getBundleDir(meta hugofs.FileMeta) string {
|
|||
func (m *contentMap) newContentNodeFromFi(fi hugofs.FileMetaInfo) *contentNode {
|
||||
return &contentNode{
|
||||
fi: fi,
|
||||
path: strings.TrimPrefix(filepath.ToSlash(fi.Meta().Path()), "/"),
|
||||
path: strings.TrimPrefix(filepath.ToSlash(fi.Meta().Path), "/"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -704,7 +704,7 @@ func (m *contentMap) testDump() string {
|
|||
sb.WriteString("|p:" + c.p.Title())
|
||||
}
|
||||
if c.fi != nil {
|
||||
sb.WriteString("|f:" + filepath.ToSlash(c.fi.Meta().Path()))
|
||||
sb.WriteString("|f:" + filepath.ToSlash(c.fi.Meta().Path))
|
||||
}
|
||||
return sb.String()
|
||||
}
|
||||
|
@ -716,13 +716,13 @@ func (m *contentMap) testDump() string {
|
|||
resourcesPrefix += cmLeafSeparator
|
||||
|
||||
m.pages.WalkPrefix(s+cmBranchSeparator, func(s string, v interface{}) bool {
|
||||
sb.WriteString("\t - P: " + filepath.ToSlash((v.(*contentNode).fi.(hugofs.FileMetaInfo)).Meta().Filename()) + "\n")
|
||||
sb.WriteString("\t - P: " + filepath.ToSlash((v.(*contentNode).fi.(hugofs.FileMetaInfo)).Meta().Filename) + "\n")
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
m.resources.WalkPrefix(resourcesPrefix, func(s string, v interface{}) bool {
|
||||
sb.WriteString("\t - R: " + filepath.ToSlash((v.(*contentNode).fi.(hugofs.FileMetaInfo)).Meta().Filename()) + "\n")
|
||||
sb.WriteString("\t - R: " + filepath.ToSlash((v.(*contentNode).fi.(hugofs.FileMetaInfo)).Meta().Filename) + "\n")
|
||||
return false
|
||||
})
|
||||
|
||||
|
|
|
@ -117,7 +117,7 @@ func (m *pageMap) newPageFromContentNode(n *contentNode, parentBucket *pagesMapB
|
|||
return nil, err
|
||||
}
|
||||
|
||||
if n.fi.Meta().GetBool(walkIsRootFileMetaKey) {
|
||||
if n.fi.Meta().IsRootFile {
|
||||
// Make sure that the bundle/section we start walking from is always
|
||||
// rendered.
|
||||
// This is only relevant in server fast render mode.
|
||||
|
@ -249,7 +249,7 @@ func (m *pageMap) newResource(fim hugofs.FileMetaInfo, owner *pageState) (resour
|
|||
return meta.Open()
|
||||
}
|
||||
|
||||
target := strings.TrimPrefix(meta.Path(), owner.File().Dir())
|
||||
target := strings.TrimPrefix(meta.Path, owner.File().Dir())
|
||||
|
||||
return owner.s.ResourceSpec.New(
|
||||
resources.ResourceSourceDescriptor{
|
||||
|
@ -394,7 +394,7 @@ func (m *pageMap) assembleResources(s string, p *pageState, parentBucket *pagesM
|
|||
m.resources.WalkPrefix(s, func(s string, v interface{}) bool {
|
||||
n := v.(*contentNode)
|
||||
meta := n.fi.Meta()
|
||||
classifier := meta.Classifier()
|
||||
classifier := meta.Classifier
|
||||
var r resource.Resource
|
||||
switch classifier {
|
||||
case files.ContentClassContent:
|
||||
|
|
|
@ -51,9 +51,9 @@ func BenchmarkContentMap(b *testing.B) {
|
|||
meta := fi.Meta()
|
||||
// We have a more elaborate filesystem setup in the
|
||||
// real flow, so simulate this here.
|
||||
meta["lang"] = lang
|
||||
meta["path"] = meta.Filename()
|
||||
meta["classifier"] = files.ClassifyContentFile(fi.Name(), meta.GetOpener())
|
||||
meta.Lang = lang
|
||||
meta.Path = meta.Filename
|
||||
meta.Classifier = files.ClassifyContentFile(fi.Name(), meta.OpenFunc)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -109,10 +109,10 @@ func TestContentMap(t *testing.T) {
|
|||
meta := fi.Meta()
|
||||
// We have a more elaborate filesystem setup in the
|
||||
// real flow, so simulate this here.
|
||||
meta["lang"] = lang
|
||||
meta["path"] = meta.Filename()
|
||||
meta["classifier"] = files.ClassifyContentFile(fi.Name(), meta.GetOpener())
|
||||
meta["translationBaseName"] = paths.Filename(fi.Name())
|
||||
meta.Lang = lang
|
||||
meta.Path = meta.Filename
|
||||
meta.TranslationBaseName = paths.Filename(fi.Name())
|
||||
meta.Classifier = files.ClassifyContentFile(fi.Name(), meta.OpenFunc)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -127,7 +127,7 @@ func TestContentMap(t *testing.T) {
|
|||
|
||||
header := writeFile(c, fs, "blog/a/index.md", "page")
|
||||
|
||||
c.Assert(header.Meta().Lang(), qt.Equals, "en")
|
||||
c.Assert(header.Meta().Lang, qt.Equals, "en")
|
||||
|
||||
resources := []hugofs.FileMetaInfo{
|
||||
writeFile(c, fs, "blog/a/b/data.json", "data"),
|
||||
|
|
|
@ -63,7 +63,7 @@ type BaseFs struct {
|
|||
func (fs *BaseFs) WatchDirs() []hugofs.FileMetaInfo {
|
||||
var dirs []hugofs.FileMetaInfo
|
||||
for _, dir := range fs.AllDirs() {
|
||||
if dir.Meta().Watch() {
|
||||
if dir.Meta().Watch {
|
||||
dirs = append(dirs, dir)
|
||||
}
|
||||
}
|
||||
|
@ -92,9 +92,9 @@ func (fs *BaseFs) AllDirs() []hugofs.FileMetaInfo {
|
|||
// the given filename. The return value is the path and language code.
|
||||
func (b *BaseFs) RelContentDir(filename string) string {
|
||||
for _, dir := range b.SourceFilesystems.Content.Dirs {
|
||||
dirname := dir.Meta().Filename()
|
||||
dirname := dir.Meta().Filename
|
||||
if strings.HasPrefix(filename, dirname) {
|
||||
rel := path.Join(dir.Meta().Path(), strings.TrimPrefix(filename, dirname))
|
||||
rel := path.Join(dir.Meta().Path, strings.TrimPrefix(filename, dirname))
|
||||
return strings.TrimPrefix(rel, filePathSeparator)
|
||||
}
|
||||
}
|
||||
|
@ -108,12 +108,12 @@ func (fs *BaseFs) ResolveJSConfigFile(name string) string {
|
|||
// First look in assets/_jsconfig
|
||||
fi, err := fs.Assets.Fs.Stat(filepath.Join(files.FolderJSConfig, name))
|
||||
if err == nil {
|
||||
return fi.(hugofs.FileMetaInfo).Meta().Filename()
|
||||
return fi.(hugofs.FileMetaInfo).Meta().Filename
|
||||
}
|
||||
// Fall back to the work dir.
|
||||
fi, err = fs.Work.Stat(name)
|
||||
if err == nil {
|
||||
return fi.(hugofs.FileMetaInfo).Meta().Filename()
|
||||
return fi.(hugofs.FileMetaInfo).Meta().Filename
|
||||
}
|
||||
|
||||
return ""
|
||||
|
@ -276,11 +276,11 @@ func (s SourceFilesystems) MakeStaticPathRelative(filename string) string {
|
|||
func (d *SourceFilesystem) MakePathRelative(filename string) (string, bool) {
|
||||
for _, dir := range d.Dirs {
|
||||
meta := dir.(hugofs.FileMetaInfo).Meta()
|
||||
currentPath := meta.Filename()
|
||||
currentPath := meta.Filename
|
||||
|
||||
if strings.HasPrefix(filename, currentPath) {
|
||||
rel := strings.TrimPrefix(filename, currentPath)
|
||||
if mp := meta.Path(); mp != "" {
|
||||
if mp := meta.Path; mp != "" {
|
||||
rel = filepath.Join(mp, rel)
|
||||
}
|
||||
return strings.TrimPrefix(rel, filePathSeparator), true
|
||||
|
@ -295,7 +295,7 @@ func (d *SourceFilesystem) RealFilename(rel string) string {
|
|||
return rel
|
||||
}
|
||||
if realfi, ok := fi.(hugofs.FileMetaInfo); ok {
|
||||
return realfi.Meta().Filename()
|
||||
return realfi.Meta().Filename
|
||||
}
|
||||
|
||||
return rel
|
||||
|
@ -304,7 +304,7 @@ func (d *SourceFilesystem) RealFilename(rel string) string {
|
|||
// Contains returns whether the given filename is a member of the current filesystem.
|
||||
func (d *SourceFilesystem) Contains(filename string) bool {
|
||||
for _, dir := range d.Dirs {
|
||||
if strings.HasPrefix(filename, dir.Meta().Filename()) {
|
||||
if strings.HasPrefix(filename, dir.Meta().Filename) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
@ -316,9 +316,9 @@ func (d *SourceFilesystem) Contains(filename string) bool {
|
|||
func (d *SourceFilesystem) Path(filename string) string {
|
||||
for _, dir := range d.Dirs {
|
||||
meta := dir.Meta()
|
||||
if strings.HasPrefix(filename, meta.Filename()) {
|
||||
p := strings.TrimPrefix(strings.TrimPrefix(filename, meta.Filename()), filePathSeparator)
|
||||
if mountRoot := meta.MountRoot(); mountRoot != "" {
|
||||
if strings.HasPrefix(filename, meta.Filename) {
|
||||
p := strings.TrimPrefix(strings.TrimPrefix(filename, meta.Filename), filePathSeparator)
|
||||
if mountRoot := meta.MountRoot; mountRoot != "" {
|
||||
return filepath.Join(mountRoot, p)
|
||||
}
|
||||
return p
|
||||
|
@ -333,8 +333,8 @@ func (d *SourceFilesystem) RealDirs(from string) []string {
|
|||
var dirnames []string
|
||||
for _, dir := range d.Dirs {
|
||||
meta := dir.Meta()
|
||||
dirname := filepath.Join(meta.Filename(), from)
|
||||
_, err := meta.Fs().Stat(from)
|
||||
dirname := filepath.Join(meta.Filename, from)
|
||||
_, err := meta.Fs.Stat(from)
|
||||
|
||||
if err == nil {
|
||||
dirnames = append(dirnames, dirname)
|
||||
|
@ -568,9 +568,10 @@ func (b *sourceFilesystemsBuilder) createModFs(
|
|||
To: filename,
|
||||
ToBasedir: base,
|
||||
Module: md.Module.Path(),
|
||||
Meta: hugofs.FileMeta{
|
||||
"watch": md.Watch(),
|
||||
"mountWeight": mountWeight,
|
||||
Meta: &hugofs.FileMeta{
|
||||
Watch: md.Watch(),
|
||||
Weight: mountWeight,
|
||||
Classifier: files.ContentClassContent,
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -581,7 +582,7 @@ func (b *sourceFilesystemsBuilder) createModFs(
|
|||
lang = b.p.DefaultContentLanguage
|
||||
}
|
||||
|
||||
rm.Meta["lang"] = lang
|
||||
rm.Meta.Lang = lang
|
||||
|
||||
if isContentMount {
|
||||
fromToContent = append(fromToContent, rm)
|
||||
|
@ -622,7 +623,7 @@ func (b *sourceFilesystemsBuilder) createModFs(
|
|||
lang := l.Lang
|
||||
|
||||
lfs := rmfsStatic.Filter(func(rm hugofs.RootMapping) bool {
|
||||
rlang := rm.Meta.Lang()
|
||||
rlang := rm.Meta.Lang
|
||||
return rlang == "" || rlang == lang
|
||||
})
|
||||
|
||||
|
@ -676,7 +677,7 @@ func printFs(fs afero.Fs, path string, w io.Writer) {
|
|||
}
|
||||
var filename string
|
||||
if fim, ok := info.(hugofs.FileMetaInfo); ok {
|
||||
filename = fim.Meta().Filename()
|
||||
filename = fim.Meta().Filename
|
||||
}
|
||||
fmt.Fprintf(w, " %q %q\n", path, filename)
|
||||
return nil
|
||||
|
|
|
@ -993,7 +993,7 @@ title: P1
|
|||
|
||||
p := b.GetPage("blog/p1.md")
|
||||
f := p.File().FileInfo().Meta()
|
||||
b.Assert(filepath.ToSlash(f.Path()), qt.Equals, "blog/p1.md")
|
||||
b.Assert(filepath.ToSlash(f.Path), qt.Equals, "blog/p1.md")
|
||||
b.Assert(filepath.ToSlash(f.PathFile()), qt.Equals, "content/blog/p1.md")
|
||||
|
||||
b.Assert(b.H.BaseFs.Layouts.Path(filepath.Join(test.workingDir, "layouts", "_default", "single.html")), qt.Equals, filepath.FromSlash("_default/single.html"))
|
||||
|
@ -1046,7 +1046,7 @@ title: P1
|
|||
b.Assert(p1_2, qt.Equals, p1_1)
|
||||
|
||||
f := p1_1.File().FileInfo().Meta()
|
||||
b.Assert(filepath.ToSlash(f.Path()), qt.Equals, "blog/sub/p1.md")
|
||||
b.Assert(filepath.ToSlash(f.Path), qt.Equals, "blog/sub/p1.md")
|
||||
b.Assert(filepath.ToSlash(f.PathFile()), qt.Equals, "mycontent/sub/p1.md")
|
||||
b.Assert(b.H.BaseFs.Layouts.Path(filepath.Join(myPartialsDir, "mypartial.html")), qt.Equals, filepath.FromSlash("partials/mypartial.html"))
|
||||
b.Assert(b.H.BaseFs.Layouts.Path(filepath.Join(absShortcodesDir, "myshort.html")), qt.Equals, filepath.FromSlash("shortcodes/myshort.html"))
|
||||
|
|
|
@ -281,7 +281,7 @@ func (h *HugoSites) GetContentPage(filename string) page.Page {
|
|||
return false
|
||||
}
|
||||
|
||||
if b.fi.Meta().Filename() == filename {
|
||||
if b.fi.Meta().Filename == filename {
|
||||
p = b.p
|
||||
return true
|
||||
}
|
||||
|
@ -769,7 +769,7 @@ func (h *HugoSites) removePageByFilename(filename string) {
|
|||
return false
|
||||
}
|
||||
|
||||
return b.fi.Meta().Filename() == filename
|
||||
return b.fi.Meta().Filename == filename
|
||||
})
|
||||
return nil
|
||||
})
|
||||
|
@ -919,7 +919,7 @@ func (h *HugoSites) errWithFileContext(err error, f source.File) error {
|
|||
return err
|
||||
}
|
||||
|
||||
realFilename := fim.Meta().Filename()
|
||||
realFilename := fim.Meta().Filename
|
||||
|
||||
err, _ = herrors.WithFileContextForFile(
|
||||
err,
|
||||
|
@ -1079,12 +1079,12 @@ func (m *contentChangeMap) resolveAndRemove(filename string) (string, bundleDirT
|
|||
|
||||
func (m *contentChangeMap) addSymbolicLinkMapping(fim hugofs.FileMetaInfo) {
|
||||
meta := fim.Meta()
|
||||
if !meta.IsSymlink() {
|
||||
if !meta.IsSymlink {
|
||||
return
|
||||
}
|
||||
m.symContentMu.Lock()
|
||||
|
||||
from, to := meta.Filename(), meta.OriginalFilename()
|
||||
from, to := meta.Filename, meta.OriginalFilename
|
||||
if fim.IsDir() {
|
||||
if !strings.HasSuffix(from, helpers.FilePathSeparator) {
|
||||
from += helpers.FilePathSeparator
|
||||
|
|
|
@ -356,7 +356,7 @@ func (h *HugoSites) postProcess() error {
|
|||
h.Log.Warnf("Failed to resolve jsconfig.json dir: %s", err)
|
||||
} else {
|
||||
m := fi.(hugofs.FileMetaInfo).Meta()
|
||||
assetsDir := m.SourceRoot()
|
||||
assetsDir := m.SourceRoot
|
||||
if strings.HasPrefix(assetsDir, h.ResourceSpec.WorkingDir) {
|
||||
if jsConfig := h.ResourceSpec.JSConfigBuilder.Build(assetsDir); jsConfig != nil {
|
||||
|
||||
|
|
|
@ -1006,7 +1006,7 @@ func (s *Site) sectionsFromFile(fi source.File) []string {
|
|||
parts := strings.Split(dirname, helpers.FilePathSeparator)
|
||||
|
||||
if fii, ok := fi.(*fileInfo); ok {
|
||||
if len(parts) > 0 && fii.FileInfo().Meta().Classifier() == files.ContentClassLeaf {
|
||||
if len(parts) > 0 && fii.FileInfo().Meta().Classifier == files.ContentClassLeaf {
|
||||
// my-section/mybundle/index.md => my-section
|
||||
return parts[:len(parts)-1]
|
||||
}
|
||||
|
|
|
@ -714,7 +714,7 @@ func (p *pageMeta) applyDefaultValues(n *contentNode) error {
|
|||
} else {
|
||||
source := p.File()
|
||||
if fi, ok := source.(*fileInfo); ok {
|
||||
class := fi.FileInfo().Meta().Classifier()
|
||||
class := fi.FileInfo().Meta().Classifier
|
||||
switch class {
|
||||
case files.ContentClassBranch, files.ContentClassLeaf:
|
||||
p.bundleType = class
|
||||
|
|
|
@ -290,7 +290,8 @@ func TestPageBundlerSiteMultilingual(t *testing.T) {
|
|||
|
||||
c.Assert(len(s.RegularPages()), qt.Equals, 8)
|
||||
c.Assert(len(s.Pages()), qt.Equals, 16)
|
||||
// dumpPages(s.AllPages()...)
|
||||
//dumpPages(s.AllPages()...)
|
||||
|
||||
c.Assert(len(s.AllPages()), qt.Equals, 31)
|
||||
|
||||
bundleWithSubPath := s.getPage(page.KindPage, "lb/index")
|
||||
|
|
|
@ -246,8 +246,8 @@ func (c *PageCollections) getContentNode(context page.Page, isReflink bool, ref
|
|||
base = context.SectionsPath()
|
||||
} else {
|
||||
meta := context.File().FileInfo().Meta()
|
||||
base = filepath.ToSlash(filepath.Dir(meta.Path()))
|
||||
if meta.Classifier() == files.ContentClassLeaf {
|
||||
base = filepath.ToSlash(filepath.Dir(meta.Path))
|
||||
if meta.Classifier == files.ContentClassLeaf {
|
||||
// Bundles are stored in subfolders e.g. blog/mybundle/index.md,
|
||||
// so if the user has not explicitly asked to go up,
|
||||
// look on the "blog" level.
|
||||
|
@ -304,11 +304,11 @@ func (c *PageCollections) getContentNode(context page.Page, isReflink bool, ref
|
|||
|
||||
var module string
|
||||
if context != nil && !context.File().IsZero() {
|
||||
module = context.File().FileInfo().Meta().Module()
|
||||
module = context.File().FileInfo().Meta().Module
|
||||
}
|
||||
|
||||
if module == "" && !c.pageMap.s.home.File().IsZero() {
|
||||
module = c.pageMap.s.home.File().FileInfo().Meta().Module()
|
||||
module = c.pageMap.s.home.File().FileInfo().Meta().Module
|
||||
}
|
||||
|
||||
if module != "" {
|
||||
|
|
|
@ -105,7 +105,7 @@ func (c *pagesCollector) isCascadingEdit(dir contentDirKey) (bool, string) {
|
|||
var isCascade bool
|
||||
|
||||
c.contentMap.walkBranchesPrefix(prefix, func(s string, n *contentNode) bool {
|
||||
if n.fi == nil || dir.filename != n.fi.Meta().Filename() {
|
||||
if n.fi == nil || dir.filename != n.fi.Meta().Filename {
|
||||
return false
|
||||
}
|
||||
|
||||
|
@ -198,7 +198,7 @@ func (c *pagesCollector) Collect() (collectErr error) {
|
|||
default:
|
||||
// We always start from a directory.
|
||||
collectErr = c.collectDir(dir.dirname, true, func(fim hugofs.FileMetaInfo) bool {
|
||||
return dir.filename == fim.Meta().Filename()
|
||||
return dir.filename == fim.Meta().Filename
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -213,12 +213,12 @@ func (c *pagesCollector) Collect() (collectErr error) {
|
|||
}
|
||||
|
||||
func (c *pagesCollector) isBundleHeader(fi hugofs.FileMetaInfo) bool {
|
||||
class := fi.Meta().Classifier()
|
||||
class := fi.Meta().Classifier
|
||||
return class == files.ContentClassLeaf || class == files.ContentClassBranch
|
||||
}
|
||||
|
||||
func (c *pagesCollector) getLang(fi hugofs.FileMetaInfo) string {
|
||||
lang := fi.Meta().Lang()
|
||||
lang := fi.Meta().Lang
|
||||
if lang != "" {
|
||||
return lang
|
||||
}
|
||||
|
@ -253,7 +253,7 @@ func (c *pagesCollector) addToBundle(info hugofs.FileMetaInfo, btyp bundleDirTyp
|
|||
}
|
||||
|
||||
clone := c.cloneFileInfo(source.header)
|
||||
clone.Meta()["lang"] = lang
|
||||
clone.Meta().Lang = lang
|
||||
|
||||
return &fileinfoBundle{
|
||||
header: clone,
|
||||
|
@ -265,10 +265,10 @@ func (c *pagesCollector) addToBundle(info hugofs.FileMetaInfo, btyp bundleDirTyp
|
|||
isBundleHeader := c.isBundleHeader(info)
|
||||
if bundle != nil && isBundleHeader {
|
||||
// index.md file inside a bundle, see issue 6208.
|
||||
info.Meta()["classifier"] = files.ContentClassContent
|
||||
info.Meta().Classifier = files.ContentClassContent
|
||||
isBundleHeader = false
|
||||
}
|
||||
classifier := info.Meta().Classifier()
|
||||
classifier := info.Meta().Classifier
|
||||
isContent := classifier == files.ContentClassContent
|
||||
if bundle == nil {
|
||||
if isBundleHeader {
|
||||
|
@ -295,14 +295,14 @@ func (c *pagesCollector) addToBundle(info hugofs.FileMetaInfo, btyp bundleDirTyp
|
|||
}
|
||||
|
||||
if classifier == files.ContentClassFile {
|
||||
translations := info.Meta().Translations()
|
||||
translations := info.Meta().Translations
|
||||
|
||||
for lang, b := range bundles {
|
||||
if !stringSliceContains(lang, translations...) && !b.containsResource(info.Name()) {
|
||||
|
||||
// Clone and add it to the bundle.
|
||||
clone := c.cloneFileInfo(info)
|
||||
clone.Meta()["lang"] = lang
|
||||
clone.Meta().Lang = lang
|
||||
b.resources = append(b.resources, clone)
|
||||
}
|
||||
}
|
||||
|
@ -312,16 +312,7 @@ func (c *pagesCollector) addToBundle(info hugofs.FileMetaInfo, btyp bundleDirTyp
|
|||
}
|
||||
|
||||
func (c *pagesCollector) cloneFileInfo(fi hugofs.FileMetaInfo) hugofs.FileMetaInfo {
|
||||
cm := hugofs.FileMeta{}
|
||||
meta := fi.Meta()
|
||||
if meta == nil {
|
||||
panic(fmt.Sprintf("not meta: %v", fi.Name()))
|
||||
}
|
||||
for k, v := range meta {
|
||||
cm[k] = v
|
||||
}
|
||||
|
||||
return hugofs.NewFileMetaInfo(fi, cm)
|
||||
return hugofs.NewFileMetaInfo(fi, hugofs.NewFileMeta())
|
||||
}
|
||||
|
||||
func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func(fim hugofs.FileMetaInfo) bool) error {
|
||||
|
@ -365,11 +356,11 @@ func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func(
|
|||
}
|
||||
|
||||
filter := func(fim hugofs.FileMetaInfo) bool {
|
||||
if fim.Meta().SkipDir() {
|
||||
if fim.Meta().SkipDir {
|
||||
return false
|
||||
}
|
||||
|
||||
if c.sp.IgnoreFile(fim.Meta().Filename()) {
|
||||
if c.sp.IgnoreFile(fim.Meta().Filename) {
|
||||
return false
|
||||
}
|
||||
|
||||
|
@ -393,7 +384,7 @@ func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func(
|
|||
}
|
||||
}
|
||||
}
|
||||
walkRoot := dir.Meta().GetBool(walkIsRootFileMetaKey)
|
||||
walkRoot := dir.Meta().IsRootFile
|
||||
readdir = filtered
|
||||
|
||||
// We merge language directories, so there can be duplicates, but they
|
||||
|
@ -408,12 +399,10 @@ func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func(
|
|||
}
|
||||
|
||||
meta := fi.Meta()
|
||||
if walkRoot {
|
||||
meta[walkIsRootFileMetaKey] = true
|
||||
}
|
||||
class := meta.Classifier()
|
||||
translationBase := meta.TranslationBaseNameWithExt()
|
||||
key := pth.Join(meta.Lang(), translationBase)
|
||||
meta.IsRootFile = walkRoot
|
||||
class := meta.Classifier
|
||||
translationBase := meta.TranslationBaseNameWithExt
|
||||
key := pth.Join(meta.Lang, translationBase)
|
||||
|
||||
if seen[key] {
|
||||
duplicates = append(duplicates, i)
|
||||
|
@ -435,10 +424,10 @@ func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func(
|
|||
// The branch variant will win because of sort order, but log
|
||||
// a warning about it.
|
||||
if thisBtype > bundleNot && btype > bundleNot && thisBtype != btype {
|
||||
c.logger.Warnf("Content directory %q have both index.* and _index.* files, pick one.", dir.Meta().Filename())
|
||||
c.logger.Warnf("Content directory %q have both index.* and _index.* files, pick one.", dir.Meta().Filename)
|
||||
// Reclassify it so it will be handled as a content file inside the
|
||||
// section, which is in line with the <= 0.55 behaviour.
|
||||
meta["classifier"] = files.ContentClassContent
|
||||
meta.Classifier = files.ContentClassContent
|
||||
} else if thisBtype > bundleNot {
|
||||
btype = thisBtype
|
||||
}
|
||||
|
@ -488,7 +477,7 @@ func (c *pagesCollector) collectDir(dirname string, partial bool, inFilter func(
|
|||
fim := fi.(hugofs.FileMetaInfo)
|
||||
// Make sure the pages in this directory gets re-rendered,
|
||||
// even in fast render mode.
|
||||
fim.Meta()[walkIsRootFileMetaKey] = true
|
||||
fim.Meta().IsRootFile = true
|
||||
|
||||
w := hugofs.NewWalkway(hugofs.WalkwayConfig{
|
||||
Fs: c.fs,
|
||||
|
@ -517,7 +506,7 @@ func (c *pagesCollector) handleBundleBranch(readdir []hugofs.FileMetaInfo) error
|
|||
|
||||
meta := fim.Meta()
|
||||
|
||||
switch meta.Classifier() {
|
||||
switch meta.Classifier {
|
||||
case files.ContentClassContent:
|
||||
contentFiles = append(contentFiles, fim)
|
||||
default:
|
||||
|
|
|
@ -89,7 +89,7 @@ func (proc *pagesProcessor) Wait() error {
|
|||
}
|
||||
|
||||
func (proc *pagesProcessor) getProcFromFi(fi hugofs.FileMetaInfo) pagesCollectorProcessorProvider {
|
||||
if p, found := proc.procs[fi.Meta().Lang()]; found {
|
||||
if p, found := proc.procs[fi.Meta().Lang]; found {
|
||||
return p
|
||||
}
|
||||
return defaultPageProcessor
|
||||
|
@ -151,7 +151,7 @@ func (p *sitePagesProcessor) copyFile(fim hugofs.FileMetaInfo) error {
|
|||
|
||||
s := p.m.s
|
||||
|
||||
target := filepath.Join(s.PathSpec.GetTargetLanguageBasePath(), meta.Path())
|
||||
target := filepath.Join(s.PathSpec.GetTargetLanguageBasePath(), meta.Path)
|
||||
|
||||
defer f.Close()
|
||||
|
||||
|
@ -171,7 +171,7 @@ func (p *sitePagesProcessor) doProcess(item interface{}) error {
|
|||
}
|
||||
meta := v.Meta()
|
||||
|
||||
classifier := meta.Classifier()
|
||||
classifier := meta.Classifier
|
||||
switch classifier {
|
||||
case files.ContentClassContent:
|
||||
if err := m.AddFilesBundle(v); err != nil {
|
||||
|
@ -192,5 +192,5 @@ func (p *sitePagesProcessor) doProcess(item interface{}) error {
|
|||
|
||||
func (p *sitePagesProcessor) shouldSkip(fim hugofs.FileMetaInfo) bool {
|
||||
// TODO(ep) unify
|
||||
return p.m.s.SourceSpec.DisabledLanguages[fim.Meta().Lang()]
|
||||
return p.m.s.SourceSpec.DisabledLanguages[fim.Meta().Lang]
|
||||
}
|
||||
|
|
|
@ -126,7 +126,7 @@ func errWithFileContext(inerr error, r source.File) error {
|
|||
}
|
||||
|
||||
meta := fim.Meta()
|
||||
realFilename := meta.Filename()
|
||||
realFilename := meta.Filename
|
||||
f, err := meta.Open()
|
||||
if err != nil {
|
||||
return inerr
|
||||
|
|
|
@ -80,12 +80,12 @@ func Pack(fs afero.Fs, fis []hugofs.FileMetaInfo) error {
|
|||
}
|
||||
|
||||
meta := fi.(hugofs.FileMetaInfo).Meta()
|
||||
masterFilename := meta.Filename()
|
||||
masterFilename := meta.Filename
|
||||
f, err := meta.Open()
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "npm pack: failed to open package file")
|
||||
}
|
||||
b = newPackageBuilder(meta.Module(), f)
|
||||
b = newPackageBuilder(meta.Module, f)
|
||||
f.Close()
|
||||
|
||||
for _, fi := range fis {
|
||||
|
@ -100,7 +100,7 @@ func Pack(fs afero.Fs, fis []hugofs.FileMetaInfo) error {
|
|||
|
||||
meta := fi.(hugofs.FileMetaInfo).Meta()
|
||||
|
||||
if meta.Filename() == masterFilename {
|
||||
if meta.Filename == masterFilename {
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -108,7 +108,7 @@ func Pack(fs afero.Fs, fis []hugofs.FileMetaInfo) error {
|
|||
if err != nil {
|
||||
return errors.Wrap(err, "npm pack: failed to open package file")
|
||||
}
|
||||
b.Add(meta.Module(), f)
|
||||
b.Add(meta.Module, f)
|
||||
f.Close()
|
||||
}
|
||||
|
||||
|
|
|
@ -311,7 +311,7 @@ func (i *imageResource) doWithImageConfig(conf images.ImageConfig, f func(src im
|
|||
})
|
||||
if err != nil {
|
||||
if i.root != nil && i.root.getFileInfo() != nil {
|
||||
return nil, errors.Wrapf(err, "image %q", i.root.getFileInfo().Meta().Filename())
|
||||
return nil, errors.Wrapf(err, "image %q", i.root.getFileInfo().Meta().Filename)
|
||||
}
|
||||
}
|
||||
return img, nil
|
||||
|
@ -365,7 +365,7 @@ func (i *imageResource) getImageMetaCacheTargetPath() string {
|
|||
cfgHash := i.getSpec().imaging.Cfg.CfgHash
|
||||
df := i.getResourcePaths().relTargetDirFile
|
||||
if fi := i.getFileInfo(); fi != nil {
|
||||
df.dir = filepath.Dir(fi.Meta().Path())
|
||||
df.dir = filepath.Dir(fi.Meta().Path)
|
||||
}
|
||||
p1, _ := paths.FileAndExt(df.file)
|
||||
h, _ := i.hash()
|
||||
|
|
|
@ -72,7 +72,7 @@ func (c *imageCache) getOrCreate(
|
|||
// For the file cache we want to generate and store it once if possible.
|
||||
fileKeyPath := relTarget
|
||||
if fi := parent.root.getFileInfo(); fi != nil {
|
||||
fileKeyPath.dir = filepath.ToSlash(filepath.Dir(fi.Meta().Path()))
|
||||
fileKeyPath.dir = filepath.ToSlash(filepath.Dir(fi.Meta().Path))
|
||||
}
|
||||
fileKey := fileKeyPath.path()
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ func (c *Client) match(pattern string, firstOnly bool) (resource.Resources, erro
|
|||
OpenReadSeekCloser: func() (hugio.ReadSeekCloser, error) {
|
||||
return meta.Open()
|
||||
},
|
||||
RelTargetFilename: meta.Path(),
|
||||
RelTargetFilename: meta.Path,
|
||||
})
|
||||
if err != nil {
|
||||
return true, err
|
||||
|
|
|
@ -118,7 +118,7 @@ func (t *buildTransformation) Transform(ctx *resources.ResourceTransformationCtx
|
|||
return errors.Errorf("inject: file %q not found", ext)
|
||||
}
|
||||
|
||||
opts.Inject[i] = m.Filename()
|
||||
opts.Inject[i] = m.Filename
|
||||
|
||||
}
|
||||
|
||||
|
@ -150,7 +150,7 @@ func (t *buildTransformation) Transform(ctx *resources.ResourceTransformationCtx
|
|||
fi, err = t.c.sfs.Fs.Stat(path)
|
||||
if err == nil {
|
||||
m := fi.(hugofs.FileMetaInfo).Meta()
|
||||
path = m.Filename()
|
||||
path = m.Filename
|
||||
f, err = m.Open()
|
||||
}
|
||||
|
||||
|
|
|
@ -143,8 +143,8 @@ func loaderFromFilename(filename string) api.Loader {
|
|||
return api.LoaderJS
|
||||
}
|
||||
|
||||
func resolveComponentInAssets(fs afero.Fs, impPath string) hugofs.FileMeta {
|
||||
findFirst := func(base string) hugofs.FileMeta {
|
||||
func resolveComponentInAssets(fs afero.Fs, impPath string) *hugofs.FileMeta {
|
||||
findFirst := func(base string) *hugofs.FileMeta {
|
||||
// This is the most common sub-set of ESBuild's default extensions.
|
||||
// We assume that imports of JSON, CSS etc. will be using their full
|
||||
// name with extension.
|
||||
|
@ -158,7 +158,7 @@ func resolveComponentInAssets(fs afero.Fs, impPath string) hugofs.FileMeta {
|
|||
return nil
|
||||
}
|
||||
|
||||
var m hugofs.FileMeta
|
||||
var m *hugofs.FileMeta
|
||||
|
||||
// First the path as is.
|
||||
fi, err := fs.Stat(impPath)
|
||||
|
@ -217,8 +217,8 @@ func createBuildPlugins(c *Client, opts Options) ([]api.Plugin, error) {
|
|||
// This should be a small number of elements, and when
|
||||
// in server mode, we may get stale entries on renames etc.,
|
||||
// but that shouldn't matter too much.
|
||||
c.rs.JSConfigBuilder.AddSourceRoot(m.SourceRoot())
|
||||
return api.OnResolveResult{Path: m.Filename(), Namespace: nsImportHugo}, nil
|
||||
c.rs.JSConfigBuilder.AddSourceRoot(m.SourceRoot)
|
||||
return api.OnResolveResult{Path: m.Filename, Namespace: nsImportHugo}, nil
|
||||
}
|
||||
|
||||
// Fall back to ESBuild's resolve.
|
||||
|
|
|
@ -394,7 +394,7 @@ func (imp *importResolver) toFileError(output string) error {
|
|||
if err != nil {
|
||||
return inErr
|
||||
}
|
||||
realFilename := fi.(hugofs.FileMetaInfo).Meta().Filename()
|
||||
realFilename := fi.(hugofs.FileMetaInfo).Meta().Filename
|
||||
|
||||
ferr := herrors.NewFileError("css", -1, file.Offset+1, 1, inErr)
|
||||
|
||||
|
|
|
@ -90,7 +90,7 @@ func (t *transform) Transform(ctx *resources.ResourceTransformationCtx) error {
|
|||
for _, ip := range opts.IncludePaths {
|
||||
info, err := t.c.workFs.Stat(filepath.Clean(ip))
|
||||
if err == nil {
|
||||
filename := info.(hugofs.FileMetaInfo).Meta().Filename()
|
||||
filename := info.(hugofs.FileMetaInfo).Meta().Filename
|
||||
args.IncludePaths = append(args.IncludePaths, filename)
|
||||
}
|
||||
}
|
||||
|
@ -191,7 +191,7 @@ func (t importResolver) CanonicalizeURL(url string) (string, error) {
|
|||
fi, err := t.c.sfs.Fs.Stat(filenameToCheck)
|
||||
if err == nil {
|
||||
if fim, ok := fi.(hugofs.FileMetaInfo); ok {
|
||||
return "file://" + filepath.ToSlash(fim.Meta().Filename()), nil
|
||||
return "file://" + filepath.ToSlash(fim.Meta().Filename), nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,7 +55,7 @@ func (t *toCSSTransformation) Transform(ctx *resources.ResourceTransformationCtx
|
|||
for _, ip := range options.from.IncludePaths {
|
||||
info, err := t.c.workFs.Stat(filepath.Clean(ip))
|
||||
if err == nil {
|
||||
filename := info.(hugofs.FileMetaInfo).Meta().Filename()
|
||||
filename := info.(hugofs.FileMetaInfo).Meta().Filename
|
||||
options.to.IncludePaths = append(options.to.IncludePaths, filename)
|
||||
}
|
||||
}
|
||||
|
@ -105,7 +105,7 @@ func (t *toCSSTransformation) Transform(ctx *resources.ResourceTransformationCtx
|
|||
fi, err := t.c.sfs.Fs.Stat(filenameToCheck)
|
||||
if err == nil {
|
||||
if fim, ok := fi.(hugofs.FileMetaInfo); ok {
|
||||
return fim.Meta().Filename(), "", true
|
||||
return fim.Meta().Filename, "", true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2021 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -225,9 +225,9 @@ func NewTestFile(filename string) *FileInfo {
|
|||
}
|
||||
|
||||
func (sp *SourceSpec) NewFileInfoFrom(path, filename string) (*FileInfo, error) {
|
||||
meta := hugofs.FileMeta{
|
||||
"filename": filename,
|
||||
"path": path,
|
||||
meta := &hugofs.FileMeta{
|
||||
Filename: filename,
|
||||
Path: path,
|
||||
}
|
||||
|
||||
return sp.NewFileInfo(hugofs.NewFileMetaInfo(nil, meta))
|
||||
|
@ -236,16 +236,16 @@ func (sp *SourceSpec) NewFileInfoFrom(path, filename string) (*FileInfo, error)
|
|||
func (sp *SourceSpec) NewFileInfo(fi hugofs.FileMetaInfo) (*FileInfo, error) {
|
||||
m := fi.Meta()
|
||||
|
||||
filename := m.Filename()
|
||||
relPath := m.Path()
|
||||
isLeafBundle := m.Classifier() == files.ContentClassLeaf
|
||||
filename := m.Filename
|
||||
relPath := m.Path
|
||||
isLeafBundle := m.Classifier == files.ContentClassLeaf
|
||||
|
||||
if relPath == "" {
|
||||
return nil, errors.Errorf("no Path provided by %v (%T)", m, m.Fs())
|
||||
return nil, errors.Errorf("no Path provided by %v (%T)", m, m.Fs)
|
||||
}
|
||||
|
||||
if filename == "" {
|
||||
return nil, errors.Errorf("no Filename provided by %v (%T)", m, m.Fs())
|
||||
return nil, errors.Errorf("no Filename provided by %v (%T)", m, m.Fs)
|
||||
}
|
||||
|
||||
relDir := filepath.Dir(relPath)
|
||||
|
@ -256,8 +256,8 @@ func (sp *SourceSpec) NewFileInfo(fi hugofs.FileMetaInfo) (*FileInfo, error) {
|
|||
relDir = relDir + helpers.FilePathSeparator
|
||||
}
|
||||
|
||||
lang := m.Lang()
|
||||
translationBaseName := m.GetString("translationBaseName")
|
||||
lang := m.Lang
|
||||
translationBaseName := m.TranslationBaseName
|
||||
|
||||
dir, name := filepath.Split(relPath)
|
||||
if !strings.HasSuffix(dir, helpers.FilePathSeparator) {
|
||||
|
|
|
@ -80,7 +80,7 @@ func (f *Filesystem) captureFiles() error {
|
|||
}
|
||||
|
||||
meta := fi.Meta()
|
||||
filename := meta.Filename()
|
||||
filename := meta.Filename
|
||||
|
||||
b, err := f.shouldRead(filename, fi)
|
||||
if err != nil {
|
||||
|
@ -105,7 +105,7 @@ func (f *Filesystem) captureFiles() error {
|
|||
}
|
||||
|
||||
func (f *Filesystem) shouldRead(filename string, fi hugofs.FileMetaInfo) (bool, error) {
|
||||
ignore := f.SourceSpec.IgnoreFile(fi.Meta().Filename())
|
||||
ignore := f.SourceSpec.IgnoreFile(fi.Meta().Filename)
|
||||
|
||||
if fi.IsDir() {
|
||||
if ignore {
|
||||
|
|
|
@ -60,7 +60,7 @@ func TestUnicodeNorm(t *testing.T) {
|
|||
}
|
||||
|
||||
ss := newTestSourceSpec()
|
||||
fi := hugofs.NewFileMetaInfo(nil, hugofs.FileMeta{})
|
||||
fi := hugofs.NewFileMetaInfo(nil, hugofs.NewFileMeta())
|
||||
|
||||
for i, path := range paths {
|
||||
base := fmt.Sprintf("base%d", i)
|
||||
|
|
|
@ -538,7 +538,7 @@ func (t *templateHandler) addTemplateFile(name, path string) error {
|
|||
realFilename := filename
|
||||
if fi, err := fs.Stat(filename); err == nil {
|
||||
if fim, ok := fi.(hugofs.FileMetaInfo); ok {
|
||||
realFilename = fim.Meta().Filename()
|
||||
realFilename = fim.Meta().Filename
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue