mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
hugofs: Add includeFiles and excludeFiles to mount configuration
Fixes #9042
This commit is contained in:
parent
94a5bac5b2
commit
471ed91c60
15 changed files with 797 additions and 133 deletions
|
@ -137,8 +137,9 @@ func (b *contentBuilder) buildDir() error {
|
||||||
if !b.dirMap.siteUsed {
|
if !b.dirMap.siteUsed {
|
||||||
// We don't need to build everything.
|
// We don't need to build everything.
|
||||||
contentInclusionFilter = glob.NewFilenameFilterForInclusionFunc(func(filename string) bool {
|
contentInclusionFilter = glob.NewFilenameFilterForInclusionFunc(func(filename string) bool {
|
||||||
|
filename = strings.TrimPrefix(filename, string(os.PathSeparator))
|
||||||
for _, cn := range contentTargetFilenames {
|
for _, cn := range contentTargetFilenames {
|
||||||
if strings.HasPrefix(cn, filename) {
|
if strings.Contains(cn, filename) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -205,7 +206,8 @@ func (b *contentBuilder) buildFile() error {
|
||||||
if !usesSite {
|
if !usesSite {
|
||||||
// We don't need to build everything.
|
// We don't need to build everything.
|
||||||
contentInclusionFilter = glob.NewFilenameFilterForInclusionFunc(func(filename string) bool {
|
contentInclusionFilter = glob.NewFilenameFilterForInclusionFunc(func(filename string) bool {
|
||||||
return strings.HasPrefix(contentPlaceholderAbsFilename, filename)
|
filename = strings.TrimPrefix(filename, string(os.PathSeparator))
|
||||||
|
return strings.Contains(contentPlaceholderAbsFilename, filename)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -155,3 +155,15 @@ target
|
||||||
lang
|
lang
|
||||||
: The language code, e.g. "en". Only relevant for `content` mounts, and `static` mounts when in multihost mode.
|
: The language code, e.g. "en". Only relevant for `content` mounts, and `static` mounts when in multihost mode.
|
||||||
|
|
||||||
|
includeFiles (string or slice)
|
||||||
|
: One or more [glob](https://github.com/gobwas/glob) patterns matching files or directories to include. If `excludeFiles` is not set, the files matching `includeFiles` will be the files mounted.
|
||||||
|
|
||||||
|
The glob patterns are matched to the filenames starting from the `source` root, they should have Unix styled slashes even on Windows, `/` matches the mount root and `**` can be used as a super-asterisk to match recursively down all directories, e.g `/posts/**.jpg`.
|
||||||
|
|
||||||
|
The search is case-insensitive.
|
||||||
|
|
||||||
|
{{< new-in "0.89.0" >}}
|
||||||
|
|
||||||
|
excludeFiles (string or slice)
|
||||||
|
: One or more glob patterns matching files to exclude.
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,8 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/hugofs/glob"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/hugofs/files"
|
"github.com/gohugoio/hugo/hugofs/files"
|
||||||
"golang.org/x/text/unicode/norm"
|
"golang.org/x/text/unicode/norm"
|
||||||
|
|
||||||
|
@ -76,6 +78,9 @@ type FileMeta struct {
|
||||||
Fs afero.Fs
|
Fs afero.Fs
|
||||||
OpenFunc func() (afero.File, error)
|
OpenFunc func() (afero.File, error)
|
||||||
JoinStatFunc func(name string) (FileMetaInfo, error)
|
JoinStatFunc func(name string) (FileMetaInfo, error)
|
||||||
|
|
||||||
|
// Include only files or directories that match.
|
||||||
|
InclusionFilter *glob.FilenameFilter
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *FileMeta) Copy() *FileMeta {
|
func (m *FileMeta) Copy() *FileMeta {
|
||||||
|
@ -95,10 +100,17 @@ func (m *FileMeta) Merge(from *FileMeta) {
|
||||||
|
|
||||||
for i := 0; i < dstv.NumField(); i++ {
|
for i := 0; i < dstv.NumField(); i++ {
|
||||||
v := dstv.Field(i)
|
v := dstv.Field(i)
|
||||||
|
if !v.CanSet() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
if !hreflect.IsTruthfulValue(v) {
|
if !hreflect.IsTruthfulValue(v) {
|
||||||
v.Set(srcv.Field(i))
|
v.Set(srcv.Field(i))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if m.InclusionFilter == nil {
|
||||||
|
m.InclusionFilter = from.InclusionFilter
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *FileMeta) Open() (afero.File, error) {
|
func (f *FileMeta) Open() (afero.File, error) {
|
||||||
|
|
170
hugofs/filename_filter_fs.go
Normal file
170
hugofs/filename_filter_fs.go
Normal file
|
@ -0,0 +1,170 @@
|
||||||
|
// Copyright 2021 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package hugofs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
"syscall"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/hugofs/glob"
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
)
|
||||||
|
|
||||||
|
func newFilenameFilterFs(fs afero.Fs, base string, filter *glob.FilenameFilter) afero.Fs {
|
||||||
|
return &filenameFilterFs{
|
||||||
|
fs: fs,
|
||||||
|
base: base,
|
||||||
|
filter: filter,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// filenameFilterFs is a filesystem that filters by filename.
|
||||||
|
type filenameFilterFs struct {
|
||||||
|
base string
|
||||||
|
fs afero.Fs
|
||||||
|
|
||||||
|
filter *glob.FilenameFilter
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *filenameFilterFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
|
||||||
|
fi, b, err := fs.fs.(afero.Lstater).LstatIfPossible(name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, false, err
|
||||||
|
}
|
||||||
|
if !fs.filter.Match(name, fi.IsDir()) {
|
||||||
|
return nil, false, os.ErrNotExist
|
||||||
|
}
|
||||||
|
return fi, b, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *filenameFilterFs) Open(name string) (afero.File, error) {
|
||||||
|
fi, err := fs.fs.Stat(name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !fs.filter.Match(name, fi.IsDir()) {
|
||||||
|
return nil, os.ErrNotExist
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := fs.fs.Open(name)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !fi.IsDir() {
|
||||||
|
return f, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &filenameFilterDir{
|
||||||
|
File: f,
|
||||||
|
base: fs.base,
|
||||||
|
filter: fs.filter,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *filenameFilterFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
|
||||||
|
return fs.Open(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *filenameFilterFs) Stat(name string) (os.FileInfo, error) {
|
||||||
|
fi, _, err := fs.LstatIfPossible(name)
|
||||||
|
return fi, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *filenameFilterFs) getOpener(name string) func() (afero.File, error) {
|
||||||
|
return func() (afero.File, error) {
|
||||||
|
return fs.Open(name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type filenameFilterDir struct {
|
||||||
|
afero.File
|
||||||
|
base string
|
||||||
|
filter *glob.FilenameFilter
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *filenameFilterDir) Readdir(count int) ([]os.FileInfo, error) {
|
||||||
|
fis, err := f.File.Readdir(-1)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var result []os.FileInfo
|
||||||
|
for _, fi := range fis {
|
||||||
|
fim := fi.(FileMetaInfo)
|
||||||
|
if f.filter.Match(strings.TrimPrefix(fim.Meta().Filename, f.base), fim.IsDir()) {
|
||||||
|
result = append(result, fi)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *filenameFilterDir) Readdirnames(count int) ([]string, error) {
|
||||||
|
dirsi, err := f.Readdir(count)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
dirs := make([]string, len(dirsi))
|
||||||
|
for i, d := range dirsi {
|
||||||
|
dirs[i] = d.Name()
|
||||||
|
}
|
||||||
|
return dirs, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *filenameFilterFs) Chmod(n string, m os.FileMode) error {
|
||||||
|
return syscall.EPERM
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *filenameFilterFs) Chtimes(n string, a, m time.Time) error {
|
||||||
|
return syscall.EPERM
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *filenameFilterFs) Chown(n string, uid, gid int) error {
|
||||||
|
return syscall.EPERM
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *filenameFilterFs) ReadDir(name string) ([]os.FileInfo, error) {
|
||||||
|
panic("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *filenameFilterFs) Remove(n string) error {
|
||||||
|
return syscall.EPERM
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *filenameFilterFs) RemoveAll(p string) error {
|
||||||
|
return syscall.EPERM
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *filenameFilterFs) Rename(o, n string) error {
|
||||||
|
return syscall.EPERM
|
||||||
|
}
|
||||||
|
func (fs *filenameFilterFs) Create(n string) (afero.File, error) {
|
||||||
|
return nil, syscall.EPERM
|
||||||
|
}
|
||||||
|
func (fs *filenameFilterFs) Name() string {
|
||||||
|
return "FinameFilterFS"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *filenameFilterFs) Mkdir(n string, p os.FileMode) error {
|
||||||
|
return syscall.EPERM
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *filenameFilterFs) MkdirAll(n string, p os.FileMode) error {
|
||||||
|
return syscall.EPERM
|
||||||
|
}
|
83
hugofs/filename_filter_fs_test.go
Normal file
83
hugofs/filename_filter_fs_test.go
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package hugofs
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/hugofs/glob"
|
||||||
|
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
|
||||||
|
qt "github.com/frankban/quicktest"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestFilenameFilterFs(t *testing.T) {
|
||||||
|
c := qt.New(t)
|
||||||
|
|
||||||
|
base := filepath.FromSlash("/mybase")
|
||||||
|
|
||||||
|
fs := NewBaseFileDecorator(afero.NewMemMapFs())
|
||||||
|
|
||||||
|
for _, letter := range []string{"a", "b", "c"} {
|
||||||
|
for i := 1; i <= 3; i++ {
|
||||||
|
c.Assert(afero.WriteFile(fs, filepath.Join(base, letter, fmt.Sprintf("my%d.txt", i)), []byte("some text file for"+letter), 0755), qt.IsNil)
|
||||||
|
c.Assert(afero.WriteFile(fs, filepath.Join(base, letter, fmt.Sprintf("my%d.json", i)), []byte("some json file for"+letter), 0755), qt.IsNil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fs = afero.NewBasePathFs(fs, base)
|
||||||
|
|
||||||
|
filter, err := glob.NewFilenameFilter(nil, []string{"/b/**.txt"})
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
|
fs = newFilenameFilterFs(fs, base, filter)
|
||||||
|
|
||||||
|
assertExists := func(filename string, shouldExist bool) {
|
||||||
|
filename = filepath.Clean(filename)
|
||||||
|
_, err1 := fs.Stat(filename)
|
||||||
|
f, err2 := fs.Open(filename)
|
||||||
|
if shouldExist {
|
||||||
|
c.Assert(err1, qt.IsNil)
|
||||||
|
c.Assert(err2, qt.IsNil)
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
} else {
|
||||||
|
for _, err := range []error{err1, err2} {
|
||||||
|
c.Assert(err, qt.Not(qt.IsNil))
|
||||||
|
c.Assert(errors.Is(err, os.ErrNotExist), qt.IsTrue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assertExists("/a/my1.txt", true)
|
||||||
|
assertExists("/b/my1.txt", false)
|
||||||
|
|
||||||
|
dirB, err := fs.Open("/b")
|
||||||
|
defer dirB.Close()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
dirBEntries, err := dirB.Readdirnames(-1)
|
||||||
|
c.Assert(dirBEntries, qt.DeepEquals, []string{"my1.json", "my2.json", "my3.json"})
|
||||||
|
|
||||||
|
dirC, err := fs.Open("/c")
|
||||||
|
defer dirC.Close()
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
dirCEntries, err := dirC.Readdirnames(-1)
|
||||||
|
c.Assert(dirCEntries, qt.DeepEquals, []string{"my1.json", "my1.txt", "my2.json", "my2.txt", "my3.json", "my3.txt"})
|
||||||
|
|
||||||
|
}
|
159
hugofs/glob/filename_filter.go
Normal file
159
hugofs/glob/filename_filter.go
Normal file
|
@ -0,0 +1,159 @@
|
||||||
|
// Copyright 2021 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package glob
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/gobwas/glob"
|
||||||
|
)
|
||||||
|
|
||||||
|
type FilenameFilter struct {
|
||||||
|
shouldInclude func(filename string) bool
|
||||||
|
inclusions []glob.Glob
|
||||||
|
dirInclusions []glob.Glob
|
||||||
|
exclusions []glob.Glob
|
||||||
|
isWindows bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func normalizeFilenameGlobPattern(s string) string {
|
||||||
|
// Use Unix separators even on Windows.
|
||||||
|
s = filepath.ToSlash(s)
|
||||||
|
if !strings.HasPrefix(s, "/") {
|
||||||
|
s = "/" + s
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewFilenameFilter creates a new Glob where the Match method will
|
||||||
|
// return true if the file should be included.
|
||||||
|
// Note that the inclusions will be checked first.
|
||||||
|
func NewFilenameFilter(inclusions, exclusions []string) (*FilenameFilter, error) {
|
||||||
|
if inclusions == nil && exclusions == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
filter := &FilenameFilter{isWindows: isWindows}
|
||||||
|
|
||||||
|
for _, include := range inclusions {
|
||||||
|
include = normalizeFilenameGlobPattern(include)
|
||||||
|
g, err := filenamesGlobCache.GetGlob(include)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
filter.inclusions = append(filter.inclusions, g)
|
||||||
|
|
||||||
|
// For mounts that do directory walking (e.g. content) we
|
||||||
|
// must make sure that all directories up to this inclusion also
|
||||||
|
// gets included.
|
||||||
|
dir := path.Dir(include)
|
||||||
|
parts := strings.Split(dir, "/")
|
||||||
|
for i, _ := range parts {
|
||||||
|
pattern := "/" + filepath.Join(parts[:i+1]...)
|
||||||
|
g, err := filenamesGlobCache.GetGlob(pattern)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
filter.dirInclusions = append(filter.dirInclusions, g)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, exclude := range exclusions {
|
||||||
|
exclude = normalizeFilenameGlobPattern(exclude)
|
||||||
|
g, err := filenamesGlobCache.GetGlob(exclude)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
filter.exclusions = append(filter.exclusions, g)
|
||||||
|
}
|
||||||
|
|
||||||
|
return filter, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MustNewFilenameFilter invokes NewFilenameFilter and panics on error.
|
||||||
|
func MustNewFilenameFilter(inclusions, exclusions []string) *FilenameFilter {
|
||||||
|
filter, err := NewFilenameFilter(inclusions, exclusions)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return filter
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewFilenameFilterForInclusionFunc create a new filter using the provided inclusion func.
|
||||||
|
func NewFilenameFilterForInclusionFunc(shouldInclude func(filename string) bool) *FilenameFilter {
|
||||||
|
return &FilenameFilter{shouldInclude: shouldInclude, isWindows: isWindows}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Match returns whether filename should be included.
|
||||||
|
func (f *FilenameFilter) Match(filename string, isDir bool) bool {
|
||||||
|
if f == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return f.doMatch(filename, isDir)
|
||||||
|
/*if f.shouldInclude == nil {
|
||||||
|
fmt.Printf("Match: %q (%t) => %t\n", filename, isDir, isMatch)
|
||||||
|
}
|
||||||
|
return isMatch*/
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *FilenameFilter) doMatch(filename string, isDir bool) bool {
|
||||||
|
if f == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if !strings.HasPrefix(filename, filepathSeparator) {
|
||||||
|
filename = filepathSeparator + filename
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.shouldInclude != nil {
|
||||||
|
if f.shouldInclude(filename) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if f.isWindows {
|
||||||
|
// The Glob matchers below handles this by themselves,
|
||||||
|
// for the shouldInclude we need to take some extra steps
|
||||||
|
// to make this robust.
|
||||||
|
winFilename := filepath.FromSlash(filename)
|
||||||
|
if filename != winFilename {
|
||||||
|
if f.shouldInclude(winFilename) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, inclusion := range f.inclusions {
|
||||||
|
if inclusion.Match(filename) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if isDir && f.inclusions != nil {
|
||||||
|
for _, inclusion := range f.dirInclusions {
|
||||||
|
if inclusion.Match(filename) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, exclusion := range f.exclusions {
|
||||||
|
if exclusion.Match(filename) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return f.inclusions == nil && f.shouldInclude == nil
|
||||||
|
}
|
70
hugofs/glob/filename_filter_test.go
Normal file
70
hugofs/glob/filename_filter_test.go
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
// Copyright 2021 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package glob
|
||||||
|
|
||||||
|
import (
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
qt "github.com/frankban/quicktest"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestFilenameFilter(t *testing.T) {
|
||||||
|
c := qt.New(t)
|
||||||
|
|
||||||
|
excludeAlmostAllJSON, err := NewFilenameFilter([]string{"/a/b/c/foo.json"}, []string{"**.json"})
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("/data/my.json"), false), qt.Equals, false)
|
||||||
|
c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("/a/b/c/foo.json"), false), qt.Equals, true)
|
||||||
|
c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("/a/b/c/foo.bar"), false), qt.Equals, false)
|
||||||
|
c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("/a/b/c"), true), qt.Equals, true)
|
||||||
|
c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("/a/b"), true), qt.Equals, true)
|
||||||
|
c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("/a"), true), qt.Equals, true)
|
||||||
|
c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("/"), true), qt.Equals, true)
|
||||||
|
c.Assert(excludeAlmostAllJSON.Match("", true), qt.Equals, true)
|
||||||
|
|
||||||
|
excludeAllButFooJSON, err := NewFilenameFilter([]string{"/a/**/foo.json"}, []string{"**.json"})
|
||||||
|
c.Assert(excludeAllButFooJSON.Match(filepath.FromSlash("/data/my.json"), false), qt.Equals, false)
|
||||||
|
c.Assert(excludeAllButFooJSON.Match(filepath.FromSlash("/a/b/c/d/e/foo.json"), false), qt.Equals, true)
|
||||||
|
c.Assert(excludeAllButFooJSON.Match(filepath.FromSlash("/a/b/c"), true), qt.Equals, true)
|
||||||
|
c.Assert(excludeAllButFooJSON.Match(filepath.FromSlash("/a/b/"), true), qt.Equals, true)
|
||||||
|
c.Assert(excludeAllButFooJSON.Match(filepath.FromSlash("/"), true), qt.Equals, true)
|
||||||
|
c.Assert(excludeAllButFooJSON.Match(filepath.FromSlash("/b"), true), qt.Equals, false)
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
|
nopFilter, err := NewFilenameFilter(nil, nil)
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(nopFilter.Match("ab.txt", false), qt.Equals, true)
|
||||||
|
|
||||||
|
includeOnlyFilter, err := NewFilenameFilter([]string{"**.json", "**.jpg"}, nil)
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(includeOnlyFilter.Match("ab.json", false), qt.Equals, true)
|
||||||
|
c.Assert(includeOnlyFilter.Match("ab.jpg", false), qt.Equals, true)
|
||||||
|
c.Assert(includeOnlyFilter.Match("ab.gif", false), qt.Equals, false)
|
||||||
|
|
||||||
|
exlcudeOnlyFilter, err := NewFilenameFilter(nil, []string{"**.json", "**.jpg"})
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(exlcudeOnlyFilter.Match("ab.json", false), qt.Equals, false)
|
||||||
|
c.Assert(exlcudeOnlyFilter.Match("ab.jpg", false), qt.Equals, false)
|
||||||
|
c.Assert(exlcudeOnlyFilter.Match("ab.gif", false), qt.Equals, true)
|
||||||
|
|
||||||
|
var nilFilter *FilenameFilter
|
||||||
|
c.Assert(nilFilter.Match("ab.gif", false), qt.Equals, true)
|
||||||
|
|
||||||
|
funcFilter := NewFilenameFilterForInclusionFunc(func(s string) bool { return strings.HasSuffix(s, ".json") })
|
||||||
|
c.Assert(funcFilter.Match("ab.json", false), qt.Equals, true)
|
||||||
|
c.Assert(funcFilter.Match("ab.bson", false), qt.Equals, false)
|
||||||
|
|
||||||
|
}
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
// Copyright 2021 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -14,6 +14,7 @@
|
||||||
package glob
|
package glob
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"runtime"
|
"runtime"
|
||||||
|
@ -24,6 +25,8 @@ import (
|
||||||
"github.com/gobwas/glob/syntax"
|
"github.com/gobwas/glob/syntax"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const filepathSeparator = string(os.PathSeparator)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
isWindows = runtime.GOOS == "windows"
|
isWindows = runtime.GOOS == "windows"
|
||||||
defaultGlobCache = &globCache{
|
defaultGlobCache = &globCache{
|
||||||
|
@ -33,7 +36,7 @@ var (
|
||||||
}
|
}
|
||||||
|
|
||||||
filenamesGlobCache = &globCache{
|
filenamesGlobCache = &globCache{
|
||||||
isCaseSensitive: true, // TODO(bep) bench
|
isCaseSensitive: false, // As long as the search strings are all lower case, this does not allocate.
|
||||||
isWindows: isWindows,
|
isWindows: isWindows,
|
||||||
cache: make(map[string]globErr),
|
cache: make(map[string]globErr),
|
||||||
}
|
}
|
||||||
|
@ -161,78 +164,3 @@ func HasGlobChar(s string) bool {
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
type FilenameFilter struct {
|
|
||||||
shouldInclude func(filename string) bool
|
|
||||||
inclusions []glob.Glob
|
|
||||||
exclusions []glob.Glob
|
|
||||||
isWindows bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewFilenameFilter creates a new Glob where the Match method will
|
|
||||||
// return true if the file should be exluded.
|
|
||||||
// Note that the inclusions will be checked first.
|
|
||||||
func NewFilenameFilter(inclusions, exclusions []string) (*FilenameFilter, error) {
|
|
||||||
filter := &FilenameFilter{isWindows: isWindows}
|
|
||||||
|
|
||||||
for _, include := range inclusions {
|
|
||||||
g, err := filenamesGlobCache.GetGlob(filepath.FromSlash(include))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
filter.inclusions = append(filter.inclusions, g)
|
|
||||||
}
|
|
||||||
for _, exclude := range exclusions {
|
|
||||||
g, err := filenamesGlobCache.GetGlob(filepath.FromSlash(exclude))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
filter.exclusions = append(filter.exclusions, g)
|
|
||||||
}
|
|
||||||
|
|
||||||
return filter, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewFilenameFilterForInclusionFunc create a new filter using the provided inclusion func.
|
|
||||||
func NewFilenameFilterForInclusionFunc(shouldInclude func(filename string) bool) *FilenameFilter {
|
|
||||||
return &FilenameFilter{shouldInclude: shouldInclude, isWindows: isWindows}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Match returns whether filename should be included.
|
|
||||||
func (f *FilenameFilter) Match(filename string) bool {
|
|
||||||
if f == nil {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
if f.shouldInclude != nil {
|
|
||||||
if f.shouldInclude(filename) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if f.isWindows {
|
|
||||||
// The Glob matchers below handles this by themselves,
|
|
||||||
// for the shouldInclude we need to take some extra steps
|
|
||||||
// to make this robust.
|
|
||||||
winFilename := filepath.FromSlash(filename)
|
|
||||||
if filename != winFilename {
|
|
||||||
if f.shouldInclude(winFilename) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, inclusion := range f.inclusions {
|
|
||||||
if inclusion.Match(filename) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, exclusion := range f.exclusions {
|
|
||||||
if exclusion.Match(filename) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return f.inclusions == nil && f.shouldInclude == nil
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
// Copyright 2021 The Hugo Authors. All rights reserved.
|
||||||
//
|
//
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
// you may not use this file except in compliance with the License.
|
// you may not use this file except in compliance with the License.
|
||||||
|
@ -15,7 +15,6 @@ package glob
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
qt "github.com/frankban/quicktest"
|
||||||
|
@ -67,51 +66,38 @@ func TestNormalizePath(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetGlob(t *testing.T) {
|
func TestGetGlob(t *testing.T) {
|
||||||
c := qt.New(t)
|
for _, cache := range []*globCache{defaultGlobCache, filenamesGlobCache} {
|
||||||
g, err := GetGlob("**.JSON")
|
c := qt.New(t)
|
||||||
c.Assert(err, qt.IsNil)
|
g, err := cache.GetGlob("**.JSON")
|
||||||
c.Assert(g.Match("data/my.json"), qt.Equals, true)
|
c.Assert(err, qt.IsNil)
|
||||||
}
|
c.Assert(g.Match("data/my.jSon"), qt.Equals, true)
|
||||||
|
}
|
||||||
func TestFilenameFilter(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
|
|
||||||
excludeAlmostAllJSON, err := NewFilenameFilter([]string{"a/b/c/foo.json"}, []string{"**.json"})
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("data/my.json")), qt.Equals, false)
|
|
||||||
c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("a/b/c/foo.json")), qt.Equals, true)
|
|
||||||
c.Assert(excludeAlmostAllJSON.Match(filepath.FromSlash("a/b/c/foo.bar")), qt.Equals, false)
|
|
||||||
|
|
||||||
nopFilter, err := NewFilenameFilter(nil, nil)
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(nopFilter.Match("ab.txt"), qt.Equals, true)
|
|
||||||
|
|
||||||
includeOnlyFilter, err := NewFilenameFilter([]string{"**.json", "**.jpg"}, nil)
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(includeOnlyFilter.Match("ab.json"), qt.Equals, true)
|
|
||||||
c.Assert(includeOnlyFilter.Match("ab.jpg"), qt.Equals, true)
|
|
||||||
c.Assert(includeOnlyFilter.Match("ab.gif"), qt.Equals, false)
|
|
||||||
|
|
||||||
exlcudeOnlyFilter, err := NewFilenameFilter(nil, []string{"**.json", "**.jpg"})
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
c.Assert(exlcudeOnlyFilter.Match("ab.json"), qt.Equals, false)
|
|
||||||
c.Assert(exlcudeOnlyFilter.Match("ab.jpg"), qt.Equals, false)
|
|
||||||
c.Assert(exlcudeOnlyFilter.Match("ab.gif"), qt.Equals, true)
|
|
||||||
|
|
||||||
var nilFilter *FilenameFilter
|
|
||||||
c.Assert(nilFilter.Match("ab.gif"), qt.Equals, true)
|
|
||||||
|
|
||||||
funcFilter := NewFilenameFilterForInclusionFunc(func(s string) bool { return strings.HasSuffix(s, ".json") })
|
|
||||||
c.Assert(funcFilter.Match("ab.json"), qt.Equals, true)
|
|
||||||
c.Assert(funcFilter.Match("ab.bson"), qt.Equals, false)
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkGetGlob(b *testing.B) {
|
func BenchmarkGetGlob(b *testing.B) {
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
_, err := GetGlob("**/foo")
|
runBench := func(name string, cache *globCache, search string) {
|
||||||
if err != nil {
|
b.Run(name, func(b *testing.B) {
|
||||||
b.Fatal(err)
|
g, err := GetGlob("**/foo")
|
||||||
}
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
_ = g.Match(search)
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
runBench("Default cache", defaultGlobCache, "abcde")
|
||||||
|
runBench("Filenames cache, lowercase searchs", filenamesGlobCache, "abcde")
|
||||||
|
runBench("Filenames cache, mixed case searchs", filenamesGlobCache, "abCDe")
|
||||||
|
|
||||||
|
b.Run("GetGlob", func(b *testing.B) {
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
_, err := GetGlob("**/foo")
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -142,6 +142,13 @@ func (r RootMapping) filename(name string) string {
|
||||||
return filepath.Join(r.To, strings.TrimPrefix(name, r.From))
|
return filepath.Join(r.To, strings.TrimPrefix(name, r.From))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r RootMapping) trimFrom(name string) string {
|
||||||
|
if name == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return strings.TrimPrefix(name, r.From)
|
||||||
|
}
|
||||||
|
|
||||||
// A RootMappingFs maps several roots into one. Note that the root of this filesystem
|
// A RootMappingFs maps several roots into one. Note that the root of this filesystem
|
||||||
// is directories only, and they will be returned in Readdir and Readdirnames
|
// is directories only, and they will be returned in Readdir and Readdirnames
|
||||||
// in the order given.
|
// in the order given.
|
||||||
|
@ -170,7 +177,12 @@ func (fs *RootMappingFs) Dirs(base string) ([]FileMetaInfo, error) {
|
||||||
p = strings.TrimLeft(p, filepathSeparator)
|
p = strings.TrimLeft(p, filepathSeparator)
|
||||||
return p
|
return p
|
||||||
})
|
})
|
||||||
fs := decorateDirs(bfs, r.Meta)
|
|
||||||
|
fs := bfs
|
||||||
|
if r.Meta.InclusionFilter != nil {
|
||||||
|
fs = newFilenameFilterFs(fs, r.To, r.Meta.InclusionFilter)
|
||||||
|
}
|
||||||
|
fs = decorateDirs(fs, r.Meta)
|
||||||
fi, err := fs.Stat("")
|
fi, err := fs.Stat("")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "RootMappingFs.Dirs")
|
return nil, errors.Wrap(err, "RootMappingFs.Dirs")
|
||||||
|
@ -368,6 +380,10 @@ func (fs *RootMappingFs) collectDirEntries(prefix string) ([]os.FileInfo, error)
|
||||||
for _, fi := range direntries {
|
for _, fi := range direntries {
|
||||||
meta := fi.(FileMetaInfo).Meta()
|
meta := fi.(FileMetaInfo).Meta()
|
||||||
meta.Merge(rm.Meta)
|
meta.Merge(rm.Meta)
|
||||||
|
if !rm.Meta.InclusionFilter.Match(strings.TrimPrefix(meta.Filename, meta.SourceRoot), fi.IsDir()) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
if fi.IsDir() {
|
if fi.IsDir() {
|
||||||
name := fi.Name()
|
name := fi.Name()
|
||||||
if seen[name] {
|
if seen[name] {
|
||||||
|
@ -508,7 +524,14 @@ func (fs *RootMappingFs) doLstat(name string) ([]FileMetaInfo, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fileCount := 0
|
fileCount := 0
|
||||||
|
var wasFiltered bool
|
||||||
for _, root := range roots {
|
for _, root := range roots {
|
||||||
|
meta := root.fi.Meta()
|
||||||
|
if !meta.InclusionFilter.Match(strings.TrimPrefix(meta.Filename, meta.SourceRoot), root.fi.IsDir()) {
|
||||||
|
wasFiltered = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
if !root.fi.IsDir() {
|
if !root.fi.IsDir() {
|
||||||
fileCount++
|
fileCount++
|
||||||
}
|
}
|
||||||
|
@ -518,6 +541,9 @@ func (fs *RootMappingFs) doLstat(name string) ([]FileMetaInfo, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if fileCount == 0 {
|
if fileCount == 0 {
|
||||||
|
if wasFiltered {
|
||||||
|
return nil, os.ErrNotExist
|
||||||
|
}
|
||||||
// Dir only.
|
// Dir only.
|
||||||
return []FileMetaInfo{newDirNameOnlyFileInfo(name, roots[0].Meta, fs.virtualDirOpener(name))}, nil
|
return []FileMetaInfo{newDirNameOnlyFileInfo(name, roots[0].Meta, fs.virtualDirOpener(name))}, nil
|
||||||
}
|
}
|
||||||
|
@ -531,6 +557,9 @@ func (fs *RootMappingFs) doLstat(name string) ([]FileMetaInfo, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (fs *RootMappingFs) statRoot(root RootMapping, name string) (FileMetaInfo, bool, error) {
|
func (fs *RootMappingFs) statRoot(root RootMapping, name string) (FileMetaInfo, bool, error) {
|
||||||
|
if !root.Meta.InclusionFilter.Match(root.trimFrom(name), root.fi.IsDir()) {
|
||||||
|
return nil, false, os.ErrNotExist
|
||||||
|
}
|
||||||
filename := root.filename(name)
|
filename := root.filename(name)
|
||||||
|
|
||||||
fi, b, err := lstatIfPossible(fs.Fs, filename)
|
fi, b, err := lstatIfPossible(fs.Fs, filename)
|
||||||
|
@ -586,16 +615,23 @@ func (f *rootMappingFile) Name() string {
|
||||||
|
|
||||||
func (f *rootMappingFile) Readdir(count int) ([]os.FileInfo, error) {
|
func (f *rootMappingFile) Readdir(count int) ([]os.FileInfo, error) {
|
||||||
if f.File != nil {
|
if f.File != nil {
|
||||||
|
|
||||||
fis, err := f.File.Readdir(count)
|
fis, err := f.File.Readdir(count)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, fi := range fis {
|
var result []os.FileInfo
|
||||||
fis[i] = decorateFileInfo(fi, f.fs, nil, "", "", f.meta)
|
for _, fi := range fis {
|
||||||
|
fim := decorateFileInfo(fi, f.fs, nil, "", "", f.meta)
|
||||||
|
meta := fim.Meta()
|
||||||
|
if f.meta.InclusionFilter.Match(strings.TrimPrefix(meta.Filename, meta.SourceRoot), fim.IsDir()) {
|
||||||
|
result = append(result, fim)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return fis, nil
|
return result, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return f.fs.collectDirEntries(f.name)
|
return f.fs.collectDirEntries(f.name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,8 @@ import (
|
||||||
"sort"
|
"sort"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/hugofs/glob"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/gohugoio/hugo/config"
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
qt "github.com/frankban/quicktest"
|
||||||
|
@ -483,3 +485,70 @@ func TestRootMappingFsOsBase(t *testing.T) {
|
||||||
|
|
||||||
c.Assert(getDirnames("static/a/b/c"), qt.DeepEquals, []string{"d4", "f-1.txt", "f-2.txt", "f-3.txt", "ms-1.txt"})
|
c.Assert(getDirnames("static/a/b/c"), qt.DeepEquals, []string{"d4", "f-1.txt", "f-2.txt", "f-3.txt", "ms-1.txt"})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestRootMappingFileFilter(t *testing.T) {
|
||||||
|
c := qt.New(t)
|
||||||
|
fs := NewBaseFileDecorator(afero.NewMemMapFs())
|
||||||
|
|
||||||
|
for _, lang := range []string{"no", "en", "fr"} {
|
||||||
|
for i := 1; i <= 3; i++ {
|
||||||
|
c.Assert(afero.WriteFile(fs, filepath.Join(lang, fmt.Sprintf("my%s%d.txt", lang, i)), []byte("some text file for"+lang), 0755), qt.IsNil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, lang := range []string{"no", "en", "fr"} {
|
||||||
|
for i := 1; i <= 3; i++ {
|
||||||
|
c.Assert(afero.WriteFile(fs, filepath.Join(lang, "sub", fmt.Sprintf("mysub%s%d.txt", lang, i)), []byte("some text file for"+lang), 0755), qt.IsNil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rm := []RootMapping{
|
||||||
|
{
|
||||||
|
From: "content",
|
||||||
|
To: "no",
|
||||||
|
Meta: &FileMeta{Lang: "no", InclusionFilter: glob.MustNewFilenameFilter(nil, []string{"**.txt"})},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
From: "content",
|
||||||
|
To: "en",
|
||||||
|
Meta: &FileMeta{Lang: "en"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
From: "content",
|
||||||
|
To: "fr",
|
||||||
|
Meta: &FileMeta{Lang: "fr", InclusionFilter: glob.MustNewFilenameFilter(nil, []string{"**.txt"})},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
rfs, err := NewRootMappingFs(fs, rm...)
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
|
assertExists := func(filename string, shouldExist bool) {
|
||||||
|
c.Helper()
|
||||||
|
filename = filepath.Clean(filename)
|
||||||
|
_, err1 := rfs.Stat(filename)
|
||||||
|
f, err2 := rfs.Open(filename)
|
||||||
|
if shouldExist {
|
||||||
|
c.Assert(err1, qt.IsNil)
|
||||||
|
c.Assert(err2, qt.IsNil)
|
||||||
|
c.Assert(f.Close(), qt.IsNil)
|
||||||
|
} else {
|
||||||
|
c.Assert(err1, qt.Not(qt.IsNil))
|
||||||
|
c.Assert(err2, qt.Not(qt.IsNil))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assertExists("content/myno1.txt", false)
|
||||||
|
assertExists("content/myen1.txt", true)
|
||||||
|
assertExists("content/myfr1.txt", false)
|
||||||
|
|
||||||
|
dirEntriesSub, err := afero.ReadDir(rfs, filepath.Join("content", "sub"))
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(len(dirEntriesSub), qt.Equals, 3)
|
||||||
|
|
||||||
|
dirEntries, err := afero.ReadDir(rfs, "content")
|
||||||
|
|
||||||
|
c.Assert(err, qt.IsNil)
|
||||||
|
c.Assert(len(dirEntries), qt.Equals, 4)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
|
@ -25,6 +25,9 @@ import (
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/htesting"
|
"github.com/gohugoio/hugo/htesting"
|
||||||
|
"github.com/gohugoio/hugo/hugofs/glob"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/common/types"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/loggers"
|
"github.com/gohugoio/hugo/common/loggers"
|
||||||
"github.com/rogpeppe/go-internal/lockedfile"
|
"github.com/rogpeppe/go-internal/lockedfile"
|
||||||
|
@ -127,7 +130,8 @@ func (b *BaseFs) RelContentDir(filename string) string {
|
||||||
return filename
|
return filename
|
||||||
}
|
}
|
||||||
|
|
||||||
// AbsProjectContentDir tries to create a TODO1
|
// AbsProjectContentDir tries to construct a filename below the most
|
||||||
|
// relevant content directory.
|
||||||
func (b *BaseFs) AbsProjectContentDir(filename string) (string, string) {
|
func (b *BaseFs) AbsProjectContentDir(filename string) (string, string) {
|
||||||
isAbs := filepath.IsAbs(filename)
|
isAbs := filepath.IsAbs(filename)
|
||||||
for _, dir := range b.SourceFilesystems.Content.Dirs {
|
for _, dir := range b.SourceFilesystems.Content.Dirs {
|
||||||
|
@ -623,6 +627,14 @@ func (b *sourceFilesystemsBuilder) createModFs(
|
||||||
mountWeight++
|
mountWeight++
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inclusionFilter, err := glob.NewFilenameFilter(
|
||||||
|
types.ToStringSlicePreserveString(mount.IncludeFiles),
|
||||||
|
types.ToStringSlicePreserveString(mount.ExcludeFiles),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
base, filename := absPathify(mount.Source)
|
base, filename := absPathify(mount.Source)
|
||||||
|
|
||||||
rm := hugofs.RootMapping{
|
rm := hugofs.RootMapping{
|
||||||
|
@ -631,9 +643,10 @@ func (b *sourceFilesystemsBuilder) createModFs(
|
||||||
ToBasedir: base,
|
ToBasedir: base,
|
||||||
Module: md.Module.Path(),
|
Module: md.Module.Path(),
|
||||||
Meta: &hugofs.FileMeta{
|
Meta: &hugofs.FileMeta{
|
||||||
Watch: md.Watch(),
|
Watch: md.Watch(),
|
||||||
Weight: mountWeight,
|
Weight: mountWeight,
|
||||||
Classifier: files.ContentClassContent,
|
Classifier: files.ContentClassContent,
|
||||||
|
InclusionFilter: inclusionFilter,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
119
hugolib/mount_filters_test.go
Normal file
119
hugolib/mount_filters_test.go
Normal file
|
@ -0,0 +1,119 @@
|
||||||
|
// Copyright 2021 The Hugo Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package hugolib
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/common/loggers"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/hugofs/files"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/htesting"
|
||||||
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
|
|
||||||
|
qt "github.com/frankban/quicktest"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMountFilters(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
b := newTestSitesBuilder(t)
|
||||||
|
workingDir, clean, err := htesting.CreateTempDir(hugofs.Os, "hugo-test-mountfilters")
|
||||||
|
b.Assert(err, qt.IsNil)
|
||||||
|
defer clean()
|
||||||
|
|
||||||
|
for _, component := range files.ComponentFolders {
|
||||||
|
b.Assert(os.MkdirAll(filepath.Join(workingDir, component), 0777), qt.IsNil)
|
||||||
|
}
|
||||||
|
b.WithWorkingDir(workingDir).WithLogger(loggers.NewInfoLogger())
|
||||||
|
b.WithConfigFile("toml", fmt.Sprintf(`
|
||||||
|
workingDir = %q
|
||||||
|
|
||||||
|
[module]
|
||||||
|
[[module.mounts]]
|
||||||
|
source = 'content'
|
||||||
|
target = 'content'
|
||||||
|
excludeFiles = "/a/c/**"
|
||||||
|
[[module.mounts]]
|
||||||
|
source = 'static'
|
||||||
|
target = 'static'
|
||||||
|
[[module.mounts]]
|
||||||
|
source = 'layouts'
|
||||||
|
target = 'layouts'
|
||||||
|
excludeFiles = "/**/foo.html"
|
||||||
|
[[module.mounts]]
|
||||||
|
source = 'data'
|
||||||
|
target = 'data'
|
||||||
|
includeFiles = "/mydata/**"
|
||||||
|
[[module.mounts]]
|
||||||
|
source = 'assets'
|
||||||
|
target = 'assets'
|
||||||
|
excludeFiles = "/**exclude.*"
|
||||||
|
[[module.mounts]]
|
||||||
|
source = 'i18n'
|
||||||
|
target = 'i18n'
|
||||||
|
[[module.mounts]]
|
||||||
|
source = 'archetypes'
|
||||||
|
target = 'archetypes'
|
||||||
|
|
||||||
|
|
||||||
|
`, workingDir))
|
||||||
|
|
||||||
|
b.WithContent("/a/b/p1.md", "---\ntitle: Include\n---")
|
||||||
|
b.WithContent("/a/c/p2.md", "---\ntitle: Exclude\n---")
|
||||||
|
|
||||||
|
b.WithSourceFile(
|
||||||
|
"data/mydata/b.toml", `b1='bval'`,
|
||||||
|
"data/nodata/c.toml", `c1='bval'`,
|
||||||
|
"layouts/partials/foo.html", `foo`,
|
||||||
|
"assets/exclude.txt", `foo`,
|
||||||
|
"assets/js/exclude.js", `foo`,
|
||||||
|
"assets/js/include.js", `foo`,
|
||||||
|
"assets/js/exclude.js", `foo`,
|
||||||
|
)
|
||||||
|
|
||||||
|
b.WithTemplatesAdded("index.html", `
|
||||||
|
|
||||||
|
Data: {{ site.Data }}:END
|
||||||
|
|
||||||
|
Template: {{ templates.Exists "partials/foo.html" }}:END
|
||||||
|
Resource1: {{ resources.Get "js/include.js" }}:END
|
||||||
|
Resource2: {{ resources.Get "js/exclude.js" }}:END
|
||||||
|
Resource3: {{ resources.Get "exclude.txt" }}:END
|
||||||
|
Resources: {{ resources.Match "**.js" }}
|
||||||
|
`)
|
||||||
|
|
||||||
|
b.Build(BuildCfg{})
|
||||||
|
|
||||||
|
assertExists := func(name string, shouldExist bool) {
|
||||||
|
b.Helper()
|
||||||
|
b.Assert(b.CheckExists(filepath.Join(workingDir, name)), qt.Equals, shouldExist)
|
||||||
|
}
|
||||||
|
|
||||||
|
assertExists("public/a/b/p1/index.html", true)
|
||||||
|
assertExists("public/a/c/p2/index.html", false)
|
||||||
|
|
||||||
|
b.AssertFileContent(filepath.Join(workingDir, "public", "index.html"), `
|
||||||
|
Data: map[mydata:map[b:map[b1:bval]]]:END
|
||||||
|
Template: false
|
||||||
|
Resource1: js/include.js:END
|
||||||
|
Resource2: :END
|
||||||
|
Resource3: :END
|
||||||
|
Resources: [js/include.js]
|
||||||
|
`)
|
||||||
|
|
||||||
|
}
|
|
@ -379,6 +379,11 @@ type Mount struct {
|
||||||
|
|
||||||
Lang string // any language code associated with this mount.
|
Lang string // any language code associated with this mount.
|
||||||
|
|
||||||
|
// Include only files matching the given Glob patterns (string or slice).
|
||||||
|
IncludeFiles interface{}
|
||||||
|
|
||||||
|
// Exclude all files matching the given Glob patterns (string or slice).
|
||||||
|
ExcludeFiles interface{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m Mount) Component() string {
|
func (m Mount) Component() string {
|
||||||
|
|
|
@ -74,7 +74,7 @@ func NewSourceSpec(ps *helpers.PathSpec, inclusionFilter *glob.FilenameFilter, f
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
shouldInclude := func(filename string) bool {
|
shouldInclude := func(filename string) bool {
|
||||||
if !inclusionFilter.Match(filename) {
|
if !inclusionFilter.Match(filename, false) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
for _, r := range regexps {
|
for _, r := range regexps {
|
||||||
|
|
Loading…
Reference in a new issue