mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-07 20:30:36 -05:00
parent
32471b57bd
commit
d90e37e0c6
442 changed files with 1426 additions and 2254 deletions
5
cache/filecache/filecache.go
vendored
5
cache/filecache/filecache.go
vendored
|
@ -158,7 +158,6 @@ func (c *Cache) ReadOrCreate(id string,
|
|||
err = create(info, f)
|
||||
|
||||
return
|
||||
|
||||
}
|
||||
|
||||
// GetOrCreate tries to get the file with the given id from cache. If not found or expired, create will
|
||||
|
@ -220,7 +219,6 @@ func (c *Cache) GetOrCreateBytes(id string, create func() ([]byte, error)) (Item
|
|||
return info, nil, err
|
||||
}
|
||||
return info, b, nil
|
||||
|
||||
}
|
||||
|
||||
// GetBytes gets the file content with the given id from the cahce, nil if none found.
|
||||
|
@ -276,7 +274,6 @@ func (c *Cache) getOrRemove(id string) hugio.ReadSeekCloser {
|
|||
}
|
||||
|
||||
f, err := c.Fs.Open(id)
|
||||
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
@ -299,7 +296,6 @@ func (c *Cache) getString(id string) string {
|
|||
defer c.nlocker.Unlock(id)
|
||||
|
||||
f, err := c.Fs.Open(id)
|
||||
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
|
@ -307,7 +303,6 @@ func (c *Cache) getString(id string) string {
|
|||
|
||||
b, _ := ioutil.ReadAll(f)
|
||||
return string(b)
|
||||
|
||||
}
|
||||
|
||||
// Caches is a named set of caches.
|
||||
|
|
3
cache/filecache/filecache_config_test.go
vendored
3
cache/filecache/filecache_config_test.go
vendored
|
@ -69,7 +69,6 @@ dir = "/path/to/c3"
|
|||
c3 := decoded["images"]
|
||||
c.Assert(c3.MaxAge, qt.Equals, time.Duration(-1))
|
||||
c.Assert(c3.Dir, qt.Equals, filepath.FromSlash("/path/to/c3/filecache/images"))
|
||||
|
||||
}
|
||||
|
||||
func TestDecodeConfigIgnoreCache(t *testing.T) {
|
||||
|
@ -110,7 +109,6 @@ dir = "/path/to/c3"
|
|||
for _, v := range decoded {
|
||||
c.Assert(v.MaxAge, qt.Equals, time.Duration(0))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestDecodeConfigDefault(t *testing.T) {
|
||||
|
@ -178,7 +176,6 @@ dir = "/"
|
|||
|
||||
_, err = DecodeConfig(fs, cfg)
|
||||
c.Assert(err, qt.Not(qt.IsNil))
|
||||
|
||||
}
|
||||
|
||||
func newTestConfig() *viper.Viper {
|
||||
|
|
2
cache/filecache/filecache_pruner.go
vendored
2
cache/filecache/filecache_pruner.go
vendored
|
@ -110,7 +110,6 @@ func (c *Cache) Prune(force bool) (int, error) {
|
|||
}
|
||||
|
||||
func (c *Cache) pruneRootDir(force bool) (int, error) {
|
||||
|
||||
info, err := c.Fs.Stat(c.pruneAllRootDir)
|
||||
if err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
|
@ -124,5 +123,4 @@ func (c *Cache) pruneRootDir(force bool) (int, error) {
|
|||
}
|
||||
|
||||
return hugofs.MakeReadableAndRemoveAllModulePkgDir(c.Fs, c.pruneAllRootDir)
|
||||
|
||||
}
|
||||
|
|
1
cache/filecache/filecache_pruner_test.go
vendored
1
cache/filecache/filecache_pruner_test.go
vendored
|
@ -107,5 +107,4 @@ dir = ":resourceDir/_gen"
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
3
cache/filecache/filecache_test.go
vendored
3
cache/filecache/filecache_test.go
vendored
|
@ -183,7 +183,6 @@ dir = ":cacheDir/c"
|
|||
c.Assert(string(b), qt.Equals, "Hugo is great!")
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestFileCacheConcurrent(t *testing.T) {
|
||||
|
@ -253,7 +252,6 @@ func TestFileCacheReadOrCreateErrorInRead(t *testing.T) {
|
|||
var result string
|
||||
|
||||
rf := func(failLevel int) func(info ItemInfo, r io.ReadSeeker) error {
|
||||
|
||||
return func(info ItemInfo, r io.ReadSeeker) error {
|
||||
if failLevel > 0 {
|
||||
if failLevel > 1 {
|
||||
|
@ -347,5 +345,4 @@ func newPathsSpec(t *testing.T, fs afero.Fs, configStr string) *helpers.PathSpec
|
|||
p, err := helpers.NewPathSpec(hugofs.NewFrom(fs, cfg), cfg, nil)
|
||||
c.Assert(err, qt.IsNil)
|
||||
return p
|
||||
|
||||
}
|
||||
|
|
1
cache/namedmemcache/named_cache.go
vendored
1
cache/namedmemcache/named_cache.go
vendored
|
@ -49,7 +49,6 @@ func (c *Cache) Clear() {
|
|||
|
||||
c.cache = make(map[string]cacheEntry)
|
||||
c.nlocker = locker.NewLocker()
|
||||
|
||||
}
|
||||
|
||||
// GetOrCreate tries to get the value with the given cache key, if not found
|
||||
|
|
|
@ -58,7 +58,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
|
|||
|
||||
var methods Methods
|
||||
|
||||
var excludes = make(map[string]bool)
|
||||
excludes := make(map[string]bool)
|
||||
|
||||
if len(exclude) > 0 {
|
||||
for _, m := range c.MethodsFromTypes(exclude, nil) {
|
||||
|
@ -99,11 +99,9 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
|
|||
name = pkgPrefix + name
|
||||
|
||||
return name, pkg
|
||||
|
||||
}
|
||||
|
||||
for _, t := range include {
|
||||
|
||||
for i := 0; i < t.NumMethod(); i++ {
|
||||
|
||||
m := t.Method(i)
|
||||
|
@ -153,7 +151,6 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
|
|||
|
||||
methods = append(methods, method)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
sort.SliceStable(methods, func(i, j int) bool {
|
||||
|
@ -167,16 +164,13 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
|
|||
}
|
||||
|
||||
return wi < wj
|
||||
|
||||
})
|
||||
|
||||
return methods
|
||||
|
||||
}
|
||||
|
||||
func (c *Inspector) parseSource() {
|
||||
c.init.Do(func() {
|
||||
|
||||
if !strings.Contains(c.ProjectRootDir, "hugo") {
|
||||
panic("dir must be set to the Hugo root")
|
||||
}
|
||||
|
@ -200,7 +194,6 @@ func (c *Inspector) parseSource() {
|
|||
filenames = append(filenames, path)
|
||||
|
||||
return nil
|
||||
|
||||
})
|
||||
|
||||
for _, filename := range filenames {
|
||||
|
@ -230,7 +223,6 @@ func (c *Inspector) parseSource() {
|
|||
c.methodWeight[iface] = weights
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
@ -247,7 +239,6 @@ func (c *Inspector) parseSource() {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -385,7 +376,7 @@ func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (st
|
|||
fmt.Fprintf(&sb, "func Marshal%sToJSON(%s %s) ([]byte, error) {\n", what, r, receiver)
|
||||
|
||||
var methods Methods
|
||||
var excludeRes = make([]*regexp.Regexp, len(excludes))
|
||||
excludeRes := make([]*regexp.Regexp, len(excludes))
|
||||
|
||||
for i, exclude := range excludes {
|
||||
excludeRes[i] = regexp.MustCompile(exclude)
|
||||
|
@ -450,7 +441,6 @@ func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (st
|
|||
}
|
||||
|
||||
return sb.String(), pkgImports
|
||||
|
||||
}
|
||||
|
||||
func collectMethodsRecursive(pkg string, f []*ast.Field) []string {
|
||||
|
@ -481,7 +471,6 @@ func collectMethodsRecursive(pkg string, f []*ast.Field) []string {
|
|||
}
|
||||
|
||||
return methodNames
|
||||
|
||||
}
|
||||
|
||||
func firstToLower(name string) string {
|
||||
|
@ -544,5 +533,4 @@ func varName(name string) string {
|
|||
}
|
||||
|
||||
return name
|
||||
|
||||
}
|
||||
|
|
|
@ -25,7 +25,6 @@ import (
|
|||
)
|
||||
|
||||
func TestMethods(t *testing.T) {
|
||||
|
||||
var (
|
||||
zeroIE = reflect.TypeOf((*IEmbed)(nil)).Elem()
|
||||
zeroIEOnly = reflect.TypeOf((*IEOnly)(nil)).Elem()
|
||||
|
@ -58,7 +57,6 @@ func TestMethods(t *testing.T) {
|
|||
methodsStr := fmt.Sprint(methods)
|
||||
|
||||
c.Assert(methodsStr, qt.Contains, "MethodEmbed3(arg0 string) string")
|
||||
|
||||
})
|
||||
|
||||
t.Run("ToMarshalJSON", func(t *testing.T) {
|
||||
|
@ -76,9 +74,7 @@ func TestMethods(t *testing.T) {
|
|||
c.Assert(pkg, qt.Contains, "encoding/json")
|
||||
|
||||
fmt.Println(pkg)
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
type I interface {
|
||||
|
|
|
@ -26,7 +26,8 @@ type checkCmd struct {
|
|||
}
|
||||
|
||||
func newCheckCmd() *checkCmd {
|
||||
return &checkCmd{baseCmd: &baseCmd{cmd: &cobra.Command{
|
||||
return &checkCmd{baseCmd: &baseCmd{
|
||||
cmd: &cobra.Command{
|
||||
Use: "check",
|
||||
Short: "Contains some verification checks",
|
||||
},
|
||||
|
|
|
@ -24,7 +24,8 @@ type checkCmd struct {
|
|||
}
|
||||
|
||||
func newCheckCmd() *checkCmd {
|
||||
cc := &checkCmd{baseCmd: &baseCmd{cmd: &cobra.Command{
|
||||
cc := &checkCmd{baseCmd: &baseCmd{
|
||||
cmd: &cobra.Command{
|
||||
Use: "check",
|
||||
Short: "Contains some verification checks",
|
||||
},
|
||||
|
|
|
@ -16,24 +16,22 @@ package commands
|
|||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
hconfig "github.com/gohugoio/hugo/config"
|
||||
|
||||
"golang.org/x/sync/semaphore"
|
||||
|
||||
"io/ioutil"
|
||||
|
||||
"github.com/gohugoio/hugo/common/herrors"
|
||||
"github.com/gohugoio/hugo/common/hugo"
|
||||
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/common/loggers"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
|
||||
|
@ -156,7 +154,6 @@ func (c *commandeer) initFs(fs *hugofs.Fs) error {
|
|||
}
|
||||
|
||||
func newCommandeer(mustHaveConfigFile, running bool, h *hugoBuilderCommon, f flagsToConfigHandler, cfgInit func(c *commandeer) error, subCmdVs ...*cobra.Command) (*commandeer, error) {
|
||||
|
||||
var rebuildDebouncer func(f func())
|
||||
if running {
|
||||
// The time value used is tested with mass content replacements in a fairly big Hugo site.
|
||||
|
@ -248,7 +245,6 @@ func (f *fileChangeDetector) PrepareNew() {
|
|||
}
|
||||
|
||||
func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
|
||||
|
||||
if c.DepsCfg == nil {
|
||||
c.DepsCfg = &deps.DepsCfg{}
|
||||
}
|
||||
|
@ -277,7 +273,6 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
|
|||
environment := c.h.getEnvironment(running)
|
||||
|
||||
doWithConfig := func(cfg config.Provider) error {
|
||||
|
||||
if c.ftch != nil {
|
||||
c.ftch.flagsToConfig(cfg)
|
||||
}
|
||||
|
@ -309,7 +304,8 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
|
|||
Filename: c.h.cfgFile,
|
||||
AbsConfigDir: c.h.getConfigDir(dir),
|
||||
Environ: os.Environ(),
|
||||
Environment: environment},
|
||||
Environment: environment,
|
||||
},
|
||||
cfgSetAndInit,
|
||||
doWithConfig)
|
||||
|
||||
|
@ -402,7 +398,6 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
|
|||
h, err = hugolib.NewHugoSites(*c.DepsCfg)
|
||||
c.hugoSites = h
|
||||
close(c.created)
|
||||
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
|
@ -418,5 +413,4 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
|
|||
cfg.Logger.Infoln("Using config file:", config.ConfigFileUsed())
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
|
|
|
@ -88,6 +88,7 @@ var _ commandsBuilderGetter = (*baseBuilderCmd)(nil)
|
|||
type commandsBuilderGetter interface {
|
||||
getCommandsBuilder() *commandsBuilder
|
||||
}
|
||||
|
||||
type baseBuilderCmd struct {
|
||||
*baseCmd
|
||||
*commandsBuilder
|
||||
|
@ -138,7 +139,6 @@ func (c *nilCommand) getCommand() *cobra.Command {
|
|||
}
|
||||
|
||||
func (c *nilCommand) flagsToConfig(cfg config.Provider) {
|
||||
|
||||
}
|
||||
|
||||
func (b *commandsBuilder) newHugoCmd() *hugoCmd {
|
||||
|
|
|
@ -35,7 +35,6 @@ import (
|
|||
)
|
||||
|
||||
func TestExecute(t *testing.T) {
|
||||
|
||||
c := qt.New(t)
|
||||
|
||||
createSite := func(c *qt.C) (string, func()) {
|
||||
|
@ -124,7 +123,6 @@ func TestExecute(t *testing.T) {
|
|||
c.Assert(config, qt.Contains, "baseURL = \"http://example.org/\"")
|
||||
checkNewSiteInited(c, siteDir)
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func checkNewSiteInited(c *qt.C, basepath string) {
|
||||
|
@ -185,7 +183,8 @@ func TestFlags(t *testing.T) {
|
|||
},
|
||||
{
|
||||
name: "Persistent flags",
|
||||
args: []string{"server",
|
||||
args: []string{
|
||||
"server",
|
||||
"--config=myconfig.toml",
|
||||
"--configDir=myconfigdir",
|
||||
"--contentDir=mycontent",
|
||||
|
@ -235,12 +234,12 @@ func TestFlags(t *testing.T) {
|
|||
|
||||
// The flag is named i18n-warnings
|
||||
c.Assert(cfg.GetBool("logI18nWarnings"), qt.Equals, true)
|
||||
|
||||
}}}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
c.Run(test.name, func(c *qt.C) {
|
||||
|
||||
b := newCommandsBuilder()
|
||||
root := b.addAll().build()
|
||||
|
||||
|
@ -257,11 +256,9 @@ func TestFlags(t *testing.T) {
|
|||
test.check(c, b.commands[0].(*serverCmd))
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestCommandsExecute(t *testing.T) {
|
||||
|
||||
c := qt.New(t)
|
||||
|
||||
dir, clean, err := createSimpleTestSite(t, testSiteConfig{})
|
||||
|
@ -330,7 +327,6 @@ func TestCommandsExecute(t *testing.T) {
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
type testSiteConfig struct {
|
||||
|
@ -399,7 +395,6 @@ Environment: {{ hugo.Environment }}
|
|||
`)
|
||||
|
||||
return d, clean, nil
|
||||
|
||||
}
|
||||
|
||||
func writeFile(t *testing.T, filename, content string) {
|
||||
|
|
|
@ -16,6 +16,7 @@ package commands
|
|||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
|
@ -34,14 +35,10 @@ import (
|
|||
|
||||
"github.com/gohugoio/hugo/hugolib"
|
||||
|
||||
"path/filepath"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
_ cmder = (*convertCmd)(nil)
|
||||
)
|
||||
var _ cmder = (*convertCmd)(nil)
|
||||
|
||||
type convertCmd struct {
|
||||
outputDir string
|
||||
|
|
|
@ -27,7 +27,8 @@ type envCmd struct {
|
|||
}
|
||||
|
||||
func newEnvCmd() *envCmd {
|
||||
return &envCmd{baseCmd: newBaseCmd(&cobra.Command{
|
||||
return &envCmd{
|
||||
baseCmd: newBaseCmd(&cobra.Command{
|
||||
Use: "env",
|
||||
Short: "Print Hugo version and environment info",
|
||||
Long: `Print Hugo version and environment info. This is useful in Hugo bug reports.`,
|
||||
|
|
|
@ -59,7 +59,6 @@ or just source them in directly:
|
|||
}
|
||||
|
||||
err := cmd.Root().GenBashCompletionFile(cc.autocompleteTarget)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -22,9 +22,7 @@ import (
|
|||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
_ cmder = (*genChromaStyles)(nil)
|
||||
)
|
||||
var _ cmder = (*genChromaStyles)(nil)
|
||||
|
||||
type genChromaStyles struct {
|
||||
style string
|
||||
|
|
|
@ -23,9 +23,7 @@ import (
|
|||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
_ cmder = (*genDocsHelper)(nil)
|
||||
)
|
||||
var _ cmder = (*genDocsHelper)(nil)
|
||||
|
||||
type genDocsHelper struct {
|
||||
target string
|
||||
|
@ -70,5 +68,4 @@ func (g *genDocsHelper) generate() error {
|
|||
|
||||
fmt.Println("Done!")
|
||||
return nil
|
||||
|
||||
}
|
||||
|
|
|
@ -19,10 +19,16 @@ import (
|
|||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/signal"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"runtime/pprof"
|
||||
"runtime/trace"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
|
||||
|
@ -34,18 +40,10 @@ import (
|
|||
"github.com/gohugoio/hugo/common/loggers"
|
||||
"github.com/gohugoio/hugo/common/terminal"
|
||||
|
||||
"syscall"
|
||||
|
||||
"github.com/gohugoio/hugo/hugolib/filesystems"
|
||||
|
||||
"golang.org/x/sync/errgroup"
|
||||
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/config"
|
||||
|
||||
flag "github.com/spf13/pflag"
|
||||
|
@ -82,7 +80,6 @@ func (r Response) IsUserError() bool {
|
|||
// Execute adds all child commands to the root command HugoCmd and sets flags appropriately.
|
||||
// The args are usually filled with os.Args[1:].
|
||||
func Execute(args []string) Response {
|
||||
|
||||
hugoCmd := newCommandsBuilder().addAll().build()
|
||||
cmd := hugoCmd.getCommand()
|
||||
cmd.SetArgs(args)
|
||||
|
@ -120,14 +117,12 @@ func initializeConfig(mustHaveConfigFile, running bool,
|
|||
h *hugoBuilderCommon,
|
||||
f flagsToConfigHandler,
|
||||
cfgInit func(c *commandeer) error) (*commandeer, error) {
|
||||
|
||||
c, err := newCommandeer(mustHaveConfigFile, running, h, f, cfgInit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return c, nil
|
||||
|
||||
}
|
||||
|
||||
func (c *commandeer) createLogger(cfg config.Provider, running bool) (loggers.Logger, error) {
|
||||
|
@ -246,7 +241,6 @@ func initializeFlags(cmd *cobra.Command, cfg config.Provider) {
|
|||
setValueFromFlag(cmd.Flags(), "destination", cfg, "publishDir", false)
|
||||
setValueFromFlag(cmd.Flags(), "i18n-warnings", cfg, "logI18nWarnings", false)
|
||||
setValueFromFlag(cmd.Flags(), "path-warnings", cfg, "logPathWarnings", false)
|
||||
|
||||
}
|
||||
|
||||
func setValueFromFlag(flags *flag.FlagSet, key string, cfg config.Provider, targetKey string, force bool) {
|
||||
|
@ -282,7 +276,6 @@ func isTerminal() bool {
|
|||
}
|
||||
|
||||
func (c *commandeer) fullBuild() error {
|
||||
|
||||
var (
|
||||
g errgroup.Group
|
||||
langCount map[string]uint64
|
||||
|
@ -298,7 +291,6 @@ func (c *commandeer) fullBuild() error {
|
|||
}
|
||||
|
||||
copyStaticFunc := func() error {
|
||||
|
||||
cnt, err := c.copyStatic()
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "Error copying static files")
|
||||
|
@ -346,7 +338,6 @@ func (c *commandeer) fullBuild() error {
|
|||
}
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
func (c *commandeer) initCPUProfile() (func(), error) {
|
||||
|
@ -419,7 +410,6 @@ func (c *commandeer) initMutexProfile() (func(), error) {
|
|||
pprof.Lookup("mutex").WriteTo(f, 0)
|
||||
f.Close()
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
||||
func (c *commandeer) initMemTicker() func() {
|
||||
|
@ -429,7 +419,6 @@ func (c *commandeer) initMemTicker() func() {
|
|||
var m runtime.MemStats
|
||||
runtime.ReadMemStats(&m)
|
||||
fmt.Printf("\n\nAlloc = %v\nTotalAlloc = %v\nSys = %v\nNumGC = %v\n\n", formatByteCount(m.Alloc), formatByteCount(m.TotalAlloc), formatByteCount(m.Sys), m.NumGC)
|
||||
|
||||
}
|
||||
|
||||
go func() {
|
||||
|
@ -442,7 +431,6 @@ func (c *commandeer) initMemTicker() func() {
|
|||
printMem()
|
||||
return
|
||||
}
|
||||
|
||||
}
|
||||
}()
|
||||
|
||||
|
@ -452,7 +440,6 @@ func (c *commandeer) initMemTicker() func() {
|
|||
}
|
||||
|
||||
func (c *commandeer) initProfiling() (func(), error) {
|
||||
|
||||
stopCPUProf, err := c.initCPUProfile()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -538,7 +525,7 @@ func (c *commandeer) build() error {
|
|||
checkErr(c.Logger, err)
|
||||
defer watcher.Close()
|
||||
|
||||
var sigs = make(chan os.Signal, 1)
|
||||
sigs := make(chan os.Signal, 1)
|
||||
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
<-sigs
|
||||
|
@ -584,7 +571,6 @@ func (c *commandeer) copyStatic() (map[string]uint64, error) {
|
|||
}
|
||||
|
||||
func (c *commandeer) doWithPublishDirs(f func(sourceFs *filesystems.SourceFilesystem) (uint64, error)) (map[string]uint64, error) {
|
||||
|
||||
langCount := make(map[string]uint64)
|
||||
|
||||
staticFilesystems := c.hugo().BaseFs.SourceFilesystems.Static
|
||||
|
@ -712,7 +698,6 @@ func (c *commandeer) getDirList() ([]string, error) {
|
|||
}
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
|
||||
watchFiles := c.hugo().PathSpec.BaseFs.WatchDirs()
|
||||
|
@ -753,7 +738,6 @@ func (c *commandeer) rebuildSites(events []fsnotify.Event) error {
|
|||
c.buildErr = nil
|
||||
visited := c.visitedURLs.PeekAllSet()
|
||||
if c.fastRenderMode {
|
||||
|
||||
// Make sure we always render the home pages
|
||||
for _, l := range c.languages {
|
||||
langPath := c.hugo().PathSpec.GetLangSubDir(l.Lang)
|
||||
|
@ -763,7 +747,6 @@ func (c *commandeer) rebuildSites(events []fsnotify.Event) error {
|
|||
home := c.hugo().PathSpec.PrependBasePath("/"+langPath, false)
|
||||
visited[home] = true
|
||||
}
|
||||
|
||||
}
|
||||
return c.hugo().Build(hugolib.BuildCfg{RecentlyVisited: visited, ErrRecovery: c.wasError}, events...)
|
||||
}
|
||||
|
@ -793,13 +776,11 @@ func (c *commandeer) fullRebuild(changeType string) {
|
|||
c.fullRebuildSem.Acquire(context.Background(), 1)
|
||||
|
||||
go func() {
|
||||
|
||||
defer c.fullRebuildSem.Release(1)
|
||||
|
||||
c.printChangeDetected(changeType)
|
||||
|
||||
defer func() {
|
||||
|
||||
// Allow any file system events to arrive back.
|
||||
// This will block any rebuild on config changes for the
|
||||
// duration of the sleep.
|
||||
|
@ -848,7 +829,6 @@ func (c *commandeer) newWatcher(dirList ...string) (*watcher.Batcher, error) {
|
|||
}
|
||||
|
||||
watcher, err := watcher.New(1 * time.Second)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -909,7 +889,6 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
|
|||
staticSyncer *staticSyncer,
|
||||
evs []fsnotify.Event,
|
||||
configSet map[string]bool) {
|
||||
|
||||
defer func() {
|
||||
c.wasError = false
|
||||
}()
|
||||
|
@ -950,7 +929,6 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
|
|||
time.Sleep(100 * time.Millisecond)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Config file(s) changed. Need full rebuild.
|
||||
|
@ -1194,7 +1172,6 @@ func partitionDynamicEvents(sourceFs *filesystems.SourceFilesystems, events []fs
|
|||
}
|
||||
}
|
||||
return
|
||||
|
||||
}
|
||||
|
||||
func pickOneWriteOrCreatePath(events []fsnotify.Event) string {
|
||||
|
|
|
@ -44,5 +44,4 @@ contentDir = "thisdoesnotexist"
|
|||
|
||||
_, err = cmd.ExecuteC()
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
}
|
||||
|
|
|
@ -74,11 +74,9 @@ Import from Jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root
|
|||
cc.cmd.AddCommand(importJekyllCmd)
|
||||
|
||||
return cc
|
||||
|
||||
}
|
||||
|
||||
func (i *importCmd) importFromJekyll(cmd *cobra.Command, args []string) error {
|
||||
|
||||
if len(args) < 2 {
|
||||
return newUserError(`import from jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.")
|
||||
}
|
||||
|
@ -255,13 +253,11 @@ func (i *importCmd) loadJekyllConfig(fs afero.Fs, jekyllRoot string) map[string]
|
|||
defer f.Close()
|
||||
|
||||
b, err := ioutil.ReadAll(f)
|
||||
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
c, err := metadecoders.Default.UnmarshalToMap(b, metadecoders.YAML)
|
||||
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
@ -338,8 +334,10 @@ func (i *importCmd) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPos
|
|||
}
|
||||
} else {
|
||||
lowerEntryName := strings.ToLower(entry.Name())
|
||||
exceptSuffix := []string{".md", ".markdown", ".html", ".htm",
|
||||
".xml", ".textile", "rakefile", "gemfile", ".lock"}
|
||||
exceptSuffix := []string{
|
||||
".md", ".markdown", ".html", ".htm",
|
||||
".xml", ".textile", "rakefile", "gemfile", ".lock",
|
||||
}
|
||||
isExcept := false
|
||||
for _, suffix := range exceptSuffix {
|
||||
if strings.HasSuffix(lowerEntryName, suffix) {
|
||||
|
@ -602,8 +600,8 @@ func replaceImageTag(match string) string {
|
|||
}
|
||||
result.WriteString(">}}")
|
||||
return result.String()
|
||||
|
||||
}
|
||||
|
||||
func replaceOptionalPart(buffer *bytes.Buffer, partName string, part string) {
|
||||
if len(part) > 0 {
|
||||
buffer.WriteString(partName + "=\"" + part + "\" ")
|
||||
|
|
|
@ -53,25 +53,41 @@ func TestConvertJekyllMetadata(t *testing.T) {
|
|||
draft bool
|
||||
expect string
|
||||
}{
|
||||
{map[interface{}]interface{}{}, "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
||||
`{"date":"2015-10-01T00:00:00Z"}`},
|
||||
{map[interface{}]interface{}{}, "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), true,
|
||||
`{"date":"2015-10-01T00:00:00Z","draft":true}`},
|
||||
{map[interface{}]interface{}{"Permalink": "/permalink.html", "layout": "post"},
|
||||
{
|
||||
map[interface{}]interface{}{},
|
||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
||||
`{"date":"2015-10-01T00:00:00Z","url":"/permalink.html"}`},
|
||||
{map[interface{}]interface{}{"permalink": "/permalink.html"},
|
||||
`{"date":"2015-10-01T00:00:00Z"}`,
|
||||
},
|
||||
{
|
||||
map[interface{}]interface{}{},
|
||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), true,
|
||||
`{"date":"2015-10-01T00:00:00Z","draft":true}`,
|
||||
},
|
||||
{
|
||||
map[interface{}]interface{}{"Permalink": "/permalink.html", "layout": "post"},
|
||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
||||
`{"date":"2015-10-01T00:00:00Z","url":"/permalink.html"}`},
|
||||
{map[interface{}]interface{}{"category": nil, "permalink": 123},
|
||||
`{"date":"2015-10-01T00:00:00Z","url":"/permalink.html"}`,
|
||||
},
|
||||
{
|
||||
map[interface{}]interface{}{"permalink": "/permalink.html"},
|
||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
||||
`{"date":"2015-10-01T00:00:00Z"}`},
|
||||
{map[interface{}]interface{}{"Excerpt_Separator": "sep"},
|
||||
`{"date":"2015-10-01T00:00:00Z","url":"/permalink.html"}`,
|
||||
},
|
||||
{
|
||||
map[interface{}]interface{}{"category": nil, "permalink": 123},
|
||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
||||
`{"date":"2015-10-01T00:00:00Z","excerpt_separator":"sep"}`},
|
||||
{map[interface{}]interface{}{"category": "book", "layout": "post", "Others": "Goods", "Date": "2015-10-01 12:13:11"},
|
||||
`{"date":"2015-10-01T00:00:00Z"}`,
|
||||
},
|
||||
{
|
||||
map[interface{}]interface{}{"Excerpt_Separator": "sep"},
|
||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
||||
`{"Others":"Goods","categories":["book"],"date":"2015-10-01T12:13:11Z"}`},
|
||||
`{"date":"2015-10-01T00:00:00Z","excerpt_separator":"sep"}`,
|
||||
},
|
||||
{
|
||||
map[interface{}]interface{}{"category": "book", "layout": "post", "Others": "Goods", "Date": "2015-10-01 12:13:11"},
|
||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
||||
`{"Others":"Goods","categories":["book"],"date":"2015-10-01T12:13:11Z"}`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, data := range testDataList {
|
||||
|
@ -90,44 +106,68 @@ func TestConvertJekyllContent(t *testing.T) {
|
|||
content string
|
||||
expect string
|
||||
}{
|
||||
{map[interface{}]interface{}{},
|
||||
"Test content\r\n<!-- more -->\npart2 content", "Test content\n<!--more-->\npart2 content"},
|
||||
{map[interface{}]interface{}{},
|
||||
"Test content\n<!-- More -->\npart2 content", "Test content\n<!--more-->\npart2 content"},
|
||||
{map[interface{}]interface{}{"excerpt_separator": "<!--sep-->"},
|
||||
{
|
||||
map[interface{}]interface{}{},
|
||||
"Test content\r\n<!-- more -->\npart2 content", "Test content\n<!--more-->\npart2 content",
|
||||
},
|
||||
{
|
||||
map[interface{}]interface{}{},
|
||||
"Test content\n<!-- More -->\npart2 content", "Test content\n<!--more-->\npart2 content",
|
||||
},
|
||||
{
|
||||
map[interface{}]interface{}{"excerpt_separator": "<!--sep-->"},
|
||||
"Test content\n<!--sep-->\npart2 content",
|
||||
"---\nexcerpt_separator: <!--sep-->\n---\nTest content\n<!--more-->\npart2 content"},
|
||||
"---\nexcerpt_separator: <!--sep-->\n---\nTest content\n<!--more-->\npart2 content",
|
||||
},
|
||||
{map[interface{}]interface{}{}, "{% raw %}text{% endraw %}", "text"},
|
||||
{map[interface{}]interface{}{}, "{%raw%} text2 {%endraw %}", "text2"},
|
||||
{map[interface{}]interface{}{},
|
||||
{
|
||||
map[interface{}]interface{}{},
|
||||
"{% highlight go %}\nvar s int\n{% endhighlight %}",
|
||||
"{{< highlight go >}}\nvar s int\n{{< / highlight >}}"},
|
||||
{map[interface{}]interface{}{},
|
||||
"{{< highlight go >}}\nvar s int\n{{< / highlight >}}",
|
||||
},
|
||||
{
|
||||
map[interface{}]interface{}{},
|
||||
"{% highlight go linenos hl_lines=\"1 2\" %}\nvar s string\nvar i int\n{% endhighlight %}",
|
||||
"{{< highlight go \"linenos=table,hl_lines=1 2\" >}}\nvar s string\nvar i int\n{{< / highlight >}}"},
|
||||
"{{< highlight go \"linenos=table,hl_lines=1 2\" >}}\nvar s string\nvar i int\n{{< / highlight >}}",
|
||||
},
|
||||
|
||||
// Octopress image tag
|
||||
{map[interface{}]interface{}{},
|
||||
{
|
||||
map[interface{}]interface{}{},
|
||||
"{% img http://placekitten.com/890/280 %}",
|
||||
"{{< figure src=\"http://placekitten.com/890/280\" >}}"},
|
||||
{map[interface{}]interface{}{},
|
||||
"{{< figure src=\"http://placekitten.com/890/280\" >}}",
|
||||
},
|
||||
{
|
||||
map[interface{}]interface{}{},
|
||||
"{% img left http://placekitten.com/320/250 Place Kitten #2 %}",
|
||||
"{{< figure class=\"left\" src=\"http://placekitten.com/320/250\" title=\"Place Kitten #2\" >}}"},
|
||||
{map[interface{}]interface{}{},
|
||||
"{{< figure class=\"left\" src=\"http://placekitten.com/320/250\" title=\"Place Kitten #2\" >}}",
|
||||
},
|
||||
{
|
||||
map[interface{}]interface{}{},
|
||||
"{% img right http://placekitten.com/300/500 150 250 'Place Kitten #3' %}",
|
||||
"{{< figure class=\"right\" src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #3\" >}}"},
|
||||
{map[interface{}]interface{}{},
|
||||
"{{< figure class=\"right\" src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #3\" >}}",
|
||||
},
|
||||
{
|
||||
map[interface{}]interface{}{},
|
||||
"{% img right http://placekitten.com/300/500 150 250 'Place Kitten #4' 'An image of a very cute kitten' %}",
|
||||
"{{< figure class=\"right\" src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}"},
|
||||
{map[interface{}]interface{}{},
|
||||
"{{< figure class=\"right\" src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}",
|
||||
},
|
||||
{
|
||||
map[interface{}]interface{}{},
|
||||
"{% img http://placekitten.com/300/500 150 250 'Place Kitten #4' 'An image of a very cute kitten' %}",
|
||||
"{{< figure src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}"},
|
||||
{map[interface{}]interface{}{},
|
||||
"{{< figure src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}",
|
||||
},
|
||||
{
|
||||
map[interface{}]interface{}{},
|
||||
"{% img right /placekitten/300/500 'Place Kitten #4' 'An image of a very cute kitten' %}",
|
||||
"{{< figure class=\"right\" src=\"/placekitten/300/500\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}"},
|
||||
{map[interface{}]interface{}{"category": "book", "layout": "post", "Date": "2015-10-01 12:13:11"},
|
||||
"{{< figure class=\"right\" src=\"/placekitten/300/500\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}",
|
||||
},
|
||||
{
|
||||
map[interface{}]interface{}{"category": "book", "layout": "post", "Date": "2015-10-01 12:13:11"},
|
||||
"somecontent",
|
||||
"---\nDate: \"2015-10-01 12:13:11\"\ncategory: book\nlayout: post\n---\nsomecontent"},
|
||||
"---\nDate: \"2015-10-01 12:13:11\"\ncategory: book\nlayout: post\n---\nsomecontent",
|
||||
},
|
||||
}
|
||||
for _, data := range testDataList {
|
||||
result, err := convertJekyllContent(data.metadata, data.content)
|
||||
|
|
|
@ -46,7 +46,6 @@ func (lc *listCmd) buildSites(config map[string]interface{}) (*hugolib.HugoSites
|
|||
}
|
||||
|
||||
sites, err := hugolib.NewHugoSites(*c.DepsCfg)
|
||||
|
||||
if err != nil {
|
||||
return nil, newSystemError("Error creating sites", err)
|
||||
}
|
||||
|
@ -77,7 +76,6 @@ List requires a subcommand, e.g. ` + "`hugo list drafts`.",
|
|||
Long: `List all of the drafts in your content directory.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
sites, err := cc.buildSites(map[string]interface{}{"buildDrafts": true})
|
||||
|
||||
if err != nil {
|
||||
return newSystemError("Error building sites", err)
|
||||
}
|
||||
|
@ -97,7 +95,6 @@ List requires a subcommand, e.g. ` + "`hugo list drafts`.",
|
|||
Long: `List all of the posts in your content directory which will be posted in the future.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
sites, err := cc.buildSites(map[string]interface{}{"buildFuture": true})
|
||||
|
||||
if err != nil {
|
||||
return newSystemError("Error building sites", err)
|
||||
}
|
||||
|
@ -126,7 +123,6 @@ List requires a subcommand, e.g. ` + "`hugo list drafts`.",
|
|||
Long: `List all of the posts in your content directory which has already expired.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
sites, err := cc.buildSites(map[string]interface{}{"buildExpired": true})
|
||||
|
||||
if err != nil {
|
||||
return newSystemError("Error building sites", err)
|
||||
}
|
||||
|
@ -159,7 +155,6 @@ List requires a subcommand, e.g. ` + "`hugo list drafts`.",
|
|||
"buildDrafts": true,
|
||||
"buildFuture": true,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return newSystemError("Error building sites", err)
|
||||
}
|
||||
|
|
|
@ -91,7 +91,6 @@ Also note that if you configure a positive maxAge for the "modules" file cache,
|
|||
}
|
||||
|
||||
func (b *commandsBuilder) newModCmd() *modCmd {
|
||||
|
||||
c := &modCmd{}
|
||||
|
||||
const commonUsage = `
|
||||
|
@ -264,7 +263,6 @@ If a module is vendored, that is where Hugo will look for it's dependencies.
|
|||
c.baseBuilderCmd = b.newBuilderCmd(cmd)
|
||||
|
||||
return c
|
||||
|
||||
}
|
||||
|
||||
func (c *modCmd) withModsClient(failOnMissingConfig bool, f func(*modules.Client) error) error {
|
||||
|
|
|
@ -20,7 +20,6 @@ import (
|
|||
)
|
||||
|
||||
func newModNPMCmd(c *modCmd) *cobra.Command {
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "npm",
|
||||
Short: "Various npm helpers.",
|
||||
|
@ -47,7 +46,6 @@ removed from Hugo, but we need to test this out in "real life" to get a feel of
|
|||
so this may/will change in future versions of Hugo.
|
||||
`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
|
||||
return c.withHugo(func(h *hugolib.HugoSites) error {
|
||||
return npm.Pack(h.BaseFs.SourceFs, h.BaseFs.Assets.Dirs)
|
||||
})
|
||||
|
|
|
@ -72,7 +72,6 @@ func (n *newCmd) newContent(cmd *cobra.Command, args []string) error {
|
|||
}
|
||||
|
||||
c, err := initializeConfig(true, false, &n.hugoBuilderCommon, n, cfgInit)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -58,7 +58,6 @@ Use ` + "`hugo new [contentPath]`" + ` to create new content.`,
|
|||
cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
|
||||
|
||||
return cc
|
||||
|
||||
}
|
||||
|
||||
func (n *newSiteCmd) doNewSite(fs *hugofs.Fs, basepath string, force bool) error {
|
||||
|
|
|
@ -53,7 +53,6 @@ as you see fit.`,
|
|||
// newTheme creates a new Hugo theme template
|
||||
func (n *newThemeCmd) newTheme(cmd *cobra.Command, args []string) error {
|
||||
c, err := initializeConfig(false, false, &n.hugoBuilderCommon, n, nil)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -145,7 +144,6 @@ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|||
}
|
||||
|
||||
func (n *newThemeCmd) createThemeMD(fs *hugofs.Fs, inpath string) (err error) {
|
||||
|
||||
by := []byte(`# theme.toml template for a Hugo theme
|
||||
# See https://github.com/gohugoio/hugoThemes#themetoml for an example
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@ package commands
|
|||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
|
|
|
@ -61,7 +61,6 @@ func (c *releaseCommandeer) getCommand() *cobra.Command {
|
|||
}
|
||||
|
||||
func (c *releaseCommandeer) flagsToConfig(cfg config.Provider) {
|
||||
|
||||
}
|
||||
|
||||
func (r *releaseCommandeer) release() error {
|
||||
|
|
|
@ -228,7 +228,6 @@ func (sc *serverCmd) server(cmd *cobra.Command, args []string) error {
|
|||
}
|
||||
|
||||
return err
|
||||
|
||||
}
|
||||
|
||||
if err := memStats(); err != nil {
|
||||
|
@ -262,7 +261,6 @@ func (sc *serverCmd) server(cmd *cobra.Command, args []string) error {
|
|||
jww.FEEDBACK.Printf("Watching for changes in %s\n", group)
|
||||
}
|
||||
watcher, err := c.newWatcher(watchDirs...)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -272,7 +270,6 @@ func (sc *serverCmd) server(cmd *cobra.Command, args []string) error {
|
|||
}
|
||||
|
||||
return c.serve(sc)
|
||||
|
||||
}
|
||||
|
||||
func getRootWatchDirsStr(baseDir string, watchDirs []string) string {
|
||||
|
@ -301,7 +298,6 @@ func (f *fileServer) rewriteRequest(r *http.Request, toPath string) *http.Reques
|
|||
r2.Header.Set("X-Rewrite-Original-URI", r.URL.RequestURI())
|
||||
|
||||
return r2
|
||||
|
||||
}
|
||||
|
||||
func (f *fileServer) createEndpoint(i int) (*http.ServeMux, string, string, error) {
|
||||
|
@ -393,7 +389,6 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, string, string, erro
|
|||
} else {
|
||||
doRedirect = false
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -413,7 +408,6 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, string, string, erro
|
|||
}
|
||||
|
||||
if f.c.fastRenderMode && f.c.buildErr == nil {
|
||||
|
||||
if strings.HasSuffix(requestURI, "/") || strings.HasSuffix(requestURI, "html") || strings.HasSuffix(requestURI, "htm") {
|
||||
if !f.c.visitedURLs.Contains(requestURI) {
|
||||
// If not already on stack, re-render that single page.
|
||||
|
@ -453,8 +447,8 @@ var logErrorRe = regexp.MustCompile(`(?s)ERROR \d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{
|
|||
func removeErrorPrefixFromLog(content string) string {
|
||||
return logErrorRe.ReplaceAllLiteralString(content, "")
|
||||
}
|
||||
func (c *commandeer) serve(s *serverCmd) error {
|
||||
|
||||
func (c *commandeer) serve(s *serverCmd) error {
|
||||
isMultiHost := c.hugo().IsMultihost()
|
||||
|
||||
var (
|
||||
|
@ -496,7 +490,7 @@ func (c *commandeer) serve(s *serverCmd) error {
|
|||
livereload.Initialize()
|
||||
}
|
||||
|
||||
var sigs = make(chan os.Signal, 1)
|
||||
sigs := make(chan os.Signal, 1)
|
||||
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
for i := range baseURLs {
|
||||
|
|
|
@ -73,7 +73,6 @@ func TestServer(t *testing.T) {
|
|||
|
||||
// Stop the server.
|
||||
stop <- true
|
||||
|
||||
}
|
||||
|
||||
func TestFixURL(t *testing.T) {
|
||||
|
@ -127,7 +126,6 @@ ERROR 2018/10/07 13:11:12 Rebuild failed: logged 1 error(s)
|
|||
withoutError := removeErrorPrefixFromLog(content)
|
||||
|
||||
c.Assert(strings.Contains(withoutError, "ERROR"), qt.Equals, false)
|
||||
|
||||
}
|
||||
|
||||
func isWindowsCI() bool {
|
||||
|
|
|
@ -128,5 +128,4 @@ func (s *staticSyncer) syncsStaticEvents(staticEvents []fsnotify.Event) error {
|
|||
|
||||
_, err := c.doWithPublishDirs(syncFn)
|
||||
return err
|
||||
|
||||
}
|
||||
|
|
|
@ -51,7 +51,6 @@ func Append(to interface{}, from ...interface{}) (interface{}, error) {
|
|||
} else if !fromt.AssignableTo(tot) {
|
||||
// Fall back to a []interface{} slice.
|
||||
return appendToInterfaceSliceFromValues(tov, fromv)
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,28 +36,44 @@ func TestAppend(t *testing.T) {
|
|||
{nil, []interface{}{"a", "b"}, []string{"a", "b"}},
|
||||
{nil, []interface{}{nil}, []interface{}{nil}},
|
||||
{[]interface{}{}, []interface{}{[]string{"c", "d", "e"}}, []string{"c", "d", "e"}},
|
||||
{tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}},
|
||||
{
|
||||
tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}},
|
||||
[]interface{}{&tstSlicer{"c"}},
|
||||
tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}, &tstSlicer{"c"}}},
|
||||
{&tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}},
|
||||
tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}, &tstSlicer{"c"}},
|
||||
},
|
||||
{
|
||||
&tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}},
|
||||
[]interface{}{&tstSlicer{"c"}},
|
||||
tstSlicers{&tstSlicer{"a"},
|
||||
tstSlicers{
|
||||
&tstSlicer{"a"},
|
||||
&tstSlicer{"b"},
|
||||
&tstSlicer{"c"}}},
|
||||
{testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn1{"b"}},
|
||||
&tstSlicer{"c"},
|
||||
},
|
||||
},
|
||||
{
|
||||
testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn1{"b"}},
|
||||
[]interface{}{&tstSlicerIn1{"c"}},
|
||||
testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn1{"b"}, &tstSlicerIn1{"c"}}},
|
||||
testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn1{"b"}, &tstSlicerIn1{"c"}},
|
||||
},
|
||||
//https://github.com/gohugoio/hugo/issues/5361
|
||||
{[]string{"a", "b"}, []interface{}{tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}}},
|
||||
[]interface{}{"a", "b", &tstSlicer{"a"}, &tstSlicer{"b"}}},
|
||||
{[]string{"a", "b"}, []interface{}{&tstSlicer{"a"}},
|
||||
[]interface{}{"a", "b", &tstSlicer{"a"}}},
|
||||
{
|
||||
[]string{"a", "b"},
|
||||
[]interface{}{tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}}},
|
||||
[]interface{}{"a", "b", &tstSlicer{"a"}, &tstSlicer{"b"}},
|
||||
},
|
||||
{
|
||||
[]string{"a", "b"},
|
||||
[]interface{}{&tstSlicer{"a"}},
|
||||
[]interface{}{"a", "b", &tstSlicer{"a"}},
|
||||
},
|
||||
// Errors
|
||||
{"", []interface{}{[]string{"a", "b"}}, false},
|
||||
// No string concatenation.
|
||||
{"ab",
|
||||
{
|
||||
"ab",
|
||||
[]interface{}{"c"},
|
||||
false},
|
||||
false,
|
||||
},
|
||||
} {
|
||||
|
||||
result, err := Append(test.start, test.addend...)
|
||||
|
@ -71,5 +87,4 @@ func TestAppend(t *testing.T) {
|
|||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(result, qt.DeepEquals, test.expected)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,11 +20,13 @@ import (
|
|||
qt "github.com/frankban/quicktest"
|
||||
)
|
||||
|
||||
var _ Slicer = (*tstSlicer)(nil)
|
||||
var _ Slicer = (*tstSlicerIn1)(nil)
|
||||
var _ Slicer = (*tstSlicerIn2)(nil)
|
||||
var _ testSlicerInterface = (*tstSlicerIn1)(nil)
|
||||
var _ testSlicerInterface = (*tstSlicerIn1)(nil)
|
||||
var (
|
||||
_ Slicer = (*tstSlicer)(nil)
|
||||
_ Slicer = (*tstSlicerIn1)(nil)
|
||||
_ Slicer = (*tstSlicerIn2)(nil)
|
||||
_ testSlicerInterface = (*tstSlicerIn1)(nil)
|
||||
_ testSlicerInterface = (*tstSlicerIn1)(nil)
|
||||
)
|
||||
|
||||
type testSlicerInterface interface {
|
||||
Name() string
|
||||
|
@ -54,7 +56,6 @@ func (p *tstSlicerIn1) Slice(in interface{}) (interface{}, error) {
|
|||
default:
|
||||
return nil, errors.New("invalid type")
|
||||
}
|
||||
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
@ -120,5 +121,4 @@ func TestSlice(t *testing.T) {
|
|||
|
||||
c.Assert(test.expected, qt.DeepEquals, result, errMsg)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -125,5 +125,4 @@ E`, offsetMatcher)
|
|||
c.Assert(location.Lines, qt.DeepEquals, []string{"A", "B", "C", "D"})
|
||||
c.Assert(location.Position().LineNumber, qt.Equals, 2)
|
||||
c.Assert(location.LinesPos, qt.Equals, 1)
|
||||
|
||||
}
|
||||
|
|
|
@ -21,9 +21,7 @@ import (
|
|||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
var (
|
||||
_ causer = (*fileError)(nil)
|
||||
)
|
||||
var _ causer = (*fileError)(nil)
|
||||
|
||||
// FileError represents an error when handling a file: Parsing a config file,
|
||||
// execute a template etc.
|
||||
|
|
|
@ -52,5 +52,4 @@ func TestToLineNumberError(t *testing.T) {
|
|||
c.Assert(pos.ColumnNumber, qt.Equals, test.columnNumber, errMsg)
|
||||
c.Assert(errors.Cause(got), qt.Not(qt.IsNil))
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -67,7 +67,6 @@ func IsTruthful(in interface{}) bool {
|
|||
default:
|
||||
return IsTruthfulValue(reflect.ValueOf(in))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
var zeroType = reflect.TypeOf((*types.Zeroer)(nil)).Elem()
|
||||
|
|
|
@ -35,5 +35,4 @@ func TestHugoInfo(t *testing.T) {
|
|||
|
||||
devHugoInfo := NewInfo("development")
|
||||
c.Assert(devHugoInfo.IsProduction(), qt.Equals, false)
|
||||
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@ package hugo
|
|||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
|
@ -146,7 +145,6 @@ func BuildVersionString() string {
|
|||
}
|
||||
|
||||
return fmt.Sprintf("%s %s %s BuildDate: %s", program, version, osArch, date)
|
||||
|
||||
}
|
||||
|
||||
func version(version float32, patchVersion int, suffix string) string {
|
||||
|
|
|
@ -34,7 +34,6 @@ func NewIgnorableLogger(logger Logger, statements ...string) IgnorableLogger {
|
|||
statementsSet := make(map[string]bool)
|
||||
for _, s := range statements {
|
||||
statementsSet[strings.ToLower(s)] = true
|
||||
|
||||
}
|
||||
return ignorableLogger{
|
||||
Logger: logger,
|
||||
|
|
|
@ -29,10 +29,8 @@ import (
|
|||
jww "github.com/spf13/jwalterweatherman"
|
||||
)
|
||||
|
||||
var (
|
||||
// Counts ERROR logs to the global jww logger.
|
||||
GlobalErrorCounter *jww.Counter
|
||||
)
|
||||
var GlobalErrorCounter *jww.Counter
|
||||
|
||||
func init() {
|
||||
GlobalErrorCounter = &jww.Counter{}
|
||||
|
@ -253,7 +251,6 @@ func (a labelColorizer) Write(p []byte) (n int, err error) {
|
|||
// bytes, so we lie a little.
|
||||
_, err = a.w.Write([]byte(replaced))
|
||||
return len(p), err
|
||||
|
||||
}
|
||||
|
||||
// InitGlobalLogger initializes the global logger, used in some rare cases.
|
||||
|
@ -264,7 +261,6 @@ func InitGlobalLogger(stdoutThreshold, logThreshold jww.Threshold, outHandle, lo
|
|||
jww.SetLogOutput(logHandle)
|
||||
jww.SetLogThreshold(logThreshold)
|
||||
jww.SetStdoutThreshold(stdoutThreshold)
|
||||
|
||||
}
|
||||
|
||||
func getLogWriters(outHandle, logHandle io.Writer) (io.Writer, io.Writer) {
|
||||
|
@ -279,7 +275,6 @@ func getLogWriters(outHandle, logHandle io.Writer) (io.Writer, io.Writer) {
|
|||
}
|
||||
|
||||
return outHandle, logHandle
|
||||
|
||||
}
|
||||
|
||||
type fatalLogWriter int
|
||||
|
|
|
@ -31,7 +31,6 @@ func TestLogger(t *testing.T) {
|
|||
l.Warnln("A warning")
|
||||
|
||||
c.Assert(l.LogCounters().ErrorCounter.Count(), qt.Equals, uint64(2))
|
||||
|
||||
}
|
||||
|
||||
func TestLoggerToWriterWithPrefix(t *testing.T) {
|
||||
|
|
|
@ -154,5 +154,4 @@ func TestRenameKeys(t *testing.T) {
|
|||
if !reflect.DeepEqual(expected, m) {
|
||||
t.Errorf("Expected\n%#v, got\n%#v\n", expected, m)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -80,7 +80,6 @@ func GetNestedParam(keyStr, separator string, candidates ...Params) (interface{}
|
|||
}
|
||||
|
||||
return nil, nil
|
||||
|
||||
}
|
||||
|
||||
func GetNestedParamFn(keyStr, separator string, lookupFn func(key string) interface{}) (interface{}, string, map[string]interface{}, error) {
|
||||
|
|
|
@ -20,7 +20,6 @@ import (
|
|||
)
|
||||
|
||||
func TestGetNestedParam(t *testing.T) {
|
||||
|
||||
m := map[string]interface{}{
|
||||
"string": "value",
|
||||
"first": 1,
|
||||
|
@ -48,12 +47,10 @@ func TestGetNestedParam(t *testing.T) {
|
|||
c.Assert(must("nested.nestednested.color", ".", m), qt.Equals, "green")
|
||||
c.Assert(must("string.name", ".", m), qt.IsNil)
|
||||
c.Assert(must("nested.foo", ".", m), qt.IsNil)
|
||||
|
||||
}
|
||||
|
||||
// https://github.com/gohugoio/hugo/issues/7903
|
||||
func TestGetNestedParamFnNestedNewKey(t *testing.T) {
|
||||
|
||||
c := qt.New(t)
|
||||
|
||||
nested := map[string]interface{}{
|
||||
|
@ -71,5 +68,4 @@ func TestGetNestedParamFnNestedNewKey(t *testing.T) {
|
|||
c.Assert(existing, qt.IsNil)
|
||||
c.Assert(nestedKey, qt.Equals, "new")
|
||||
c.Assert(owner, qt.DeepEquals, nested)
|
||||
|
||||
}
|
||||
|
|
|
@ -51,7 +51,6 @@ func NewScratcher() Scratcher {
|
|||
//
|
||||
// If the first add for a key is an array or slice, then the next value(s) will be appended.
|
||||
func (c *Scratch) Add(key string, newAddend interface{}) (string, error) {
|
||||
|
||||
var newVal interface{}
|
||||
c.mu.RLock()
|
||||
existingAddend, found := c.values[key]
|
||||
|
|
|
@ -53,7 +53,6 @@ func TestScratchAdd(t *testing.T) {
|
|||
if err == nil {
|
||||
t.Errorf("Expected error from invalid arithmetic")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestScratchAddSlice(t *testing.T) {
|
||||
|
@ -96,7 +95,6 @@ func TestScratchAddTypedSliceToInterfaceSlice(t *testing.T) {
|
|||
_, err := scratch.Add("slice", []int{1, 2})
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(scratch.Get("slice"), qt.DeepEquals, []int{1, 2})
|
||||
|
||||
}
|
||||
|
||||
// https://github.com/gohugoio/hugo/issues/5361
|
||||
|
@ -110,7 +108,6 @@ func TestScratchAddDifferentTypedSliceToInterfaceSlice(t *testing.T) {
|
|||
_, err := scratch.Add("slice", []int{1, 2})
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(scratch.Get("slice"), qt.DeepEquals, []interface{}{"foo", 1, 2})
|
||||
|
||||
}
|
||||
|
||||
func TestScratchSet(t *testing.T) {
|
||||
|
|
|
@ -16,7 +16,6 @@ package para
|
|||
import (
|
||||
"context"
|
||||
"runtime"
|
||||
|
||||
"sort"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
|
@ -60,7 +59,6 @@ func TestPara(t *testing.T) {
|
|||
c.Assert(sort.IntsAreSorted(result), qt.Equals, false, qt.Commentf("Para does not seem to be parallel"))
|
||||
sort.Ints(result)
|
||||
c.Assert(result, qt.DeepEquals, ints)
|
||||
|
||||
})
|
||||
|
||||
c.Run("Time", func(c *qt.C) {
|
||||
|
@ -84,7 +82,5 @@ func TestPara(t *testing.T) {
|
|||
c.Assert(r.Wait(), qt.IsNil)
|
||||
c.Assert(counter, qt.Equals, int64(n))
|
||||
c.Assert(time.Since(start) < n/2*time.Millisecond, qt.Equals, true)
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
|
|
@ -50,12 +50,11 @@ func (pos Position) IsValid() bool {
|
|||
var positionStringFormatfunc func(p Position) string
|
||||
|
||||
func createPositionStringFormatter(formatStr string) func(p Position) string {
|
||||
|
||||
if formatStr == "" {
|
||||
formatStr = "\":file::line::col\""
|
||||
}
|
||||
|
||||
var identifiers = []string{":file", ":line", ":col"}
|
||||
identifiers := []string{":file", ":line", ":col"}
|
||||
var identifiersFound []string
|
||||
|
||||
for i := range formatStr {
|
||||
|
|
|
@ -29,5 +29,4 @@ func TestPositionStringFormatter(t *testing.T) {
|
|||
c.Assert(createPositionStringFormatter("好::col")(pos), qt.Equals, "好:13")
|
||||
c.Assert(createPositionStringFormatter("")(pos), qt.Equals, "\"/my/file.txt:12:13\"")
|
||||
c.Assert(pos.String(), qt.Equals, "\"/my/file.txt:12:13\"")
|
||||
|
||||
}
|
||||
|
|
|
@ -25,5 +25,4 @@ func TestRemoveAccents(t *testing.T) {
|
|||
c.Assert(string(RemoveAccents([]byte("Resumé"))), qt.Equals, "Resume")
|
||||
c.Assert(string(RemoveAccents([]byte("Hugo Rocks!"))), qt.Equals, "Hugo Rocks!")
|
||||
c.Assert(string(RemoveAccentsString("Resumé")), qt.Equals, "Resume")
|
||||
|
||||
}
|
||||
|
|
|
@ -26,7 +26,6 @@ func TestToStringSlicePreserveString(t *testing.T) {
|
|||
c.Assert(ToStringSlicePreserveString("Hugo"), qt.DeepEquals, []string{"Hugo"})
|
||||
c.Assert(ToStringSlicePreserveString([]interface{}{"A", "B"}), qt.DeepEquals, []string{"A", "B"})
|
||||
c.Assert(ToStringSlicePreserveString(nil), qt.IsNil)
|
||||
|
||||
}
|
||||
|
||||
func TestToString(t *testing.T) {
|
||||
|
@ -34,5 +33,4 @@ func TestToString(t *testing.T) {
|
|||
|
||||
c.Assert(ToString([]byte("Hugo")), qt.Equals, "Hugo")
|
||||
c.Assert(ToString(json.RawMessage("Hugo")), qt.Equals, "Hugo")
|
||||
|
||||
}
|
||||
|
|
|
@ -61,5 +61,4 @@ func TestLexicographicSort(t *testing.T) {
|
|||
})
|
||||
|
||||
c.Assert(s, qt.DeepEquals, []string{"A", "b", "Ba", "ba", "ba", "Bz"})
|
||||
|
||||
}
|
||||
|
|
|
@ -14,12 +14,12 @@
|
|||
package config
|
||||
|
||||
import (
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/gohugoio/hugo/common/types"
|
||||
|
||||
"github.com/gobwas/glob"
|
||||
|
@ -88,7 +88,6 @@ type Sitemap struct {
|
|||
}
|
||||
|
||||
func DecodeSitemap(prototype Sitemap, input map[string]interface{}) Sitemap {
|
||||
|
||||
for key, value := range input {
|
||||
switch key {
|
||||
case "changefreq":
|
||||
|
@ -116,7 +115,6 @@ type Server struct {
|
|||
}
|
||||
|
||||
func (s *Server) init() {
|
||||
|
||||
s.compiledInit.Do(func() {
|
||||
for _, h := range s.Headers {
|
||||
s.compiledHeaders = append(s.compiledHeaders, glob.MustCompile(h.For))
|
||||
|
@ -150,7 +148,6 @@ func (s *Server) MatchHeaders(pattern string) []types.KeyValueStr {
|
|||
})
|
||||
|
||||
return matches
|
||||
|
||||
}
|
||||
|
||||
func (s *Server) MatchRedirect(pattern string) Redirect {
|
||||
|
@ -176,7 +173,6 @@ func (s *Server) MatchRedirect(pattern string) Redirect {
|
|||
}
|
||||
|
||||
return Redirect{}
|
||||
|
||||
}
|
||||
|
||||
type Headers struct {
|
||||
|
|
|
@ -57,7 +57,6 @@ func TestBuild(t *testing.T) {
|
|||
c.Assert(b.UseResourceCache(herrors.ErrFeatureNotAvailable), qt.Equals, false)
|
||||
c.Assert(b.UseResourceCache(errors.New("err")), qt.Equals, false)
|
||||
c.Assert(b.UseResourceCache(nil), qt.Equals, false)
|
||||
|
||||
}
|
||||
|
||||
func TestServer(t *testing.T) {
|
||||
|
@ -98,7 +97,8 @@ status = 301
|
|||
c.Assert(s.MatchHeaders("/foo.jpg"), qt.DeepEquals, []types.KeyValueStr{
|
||||
{Key: "X-Content-Type-Options", Value: "nosniff"},
|
||||
{Key: "X-Frame-Options", Value: "DENY"},
|
||||
{Key: "X-XSS-Protection", Value: "1; mode=block"}})
|
||||
{Key: "X-XSS-Protection", Value: "1; mode=block"},
|
||||
})
|
||||
|
||||
c.Assert(s.MatchRedirect("/foo/bar/baz"), qt.DeepEquals, Redirect{
|
||||
From: "/foo/**",
|
||||
|
@ -122,7 +122,8 @@ status = 301
|
|||
c.Assert(s.MatchRedirect("/default/index.html"), qt.DeepEquals, Redirect{})
|
||||
c.Assert(s.MatchRedirect("/default/"), qt.DeepEquals, Redirect{})
|
||||
|
||||
for _, errorCase := range []string{`[[server.redirects]]
|
||||
for _, errorCase := range []string{
|
||||
`[[server.redirects]]
|
||||
from = "/**"
|
||||
to = "/file"
|
||||
status = 301`,
|
||||
|
@ -138,5 +139,4 @@ status = 301`,
|
|||
c.Assert(err, qt.Not(qt.IsNil))
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -86,7 +86,6 @@ func readConfig(format metadecoders.Format, data []byte) (map[string]interface{}
|
|||
RenameKeys(m)
|
||||
|
||||
return m, nil
|
||||
|
||||
}
|
||||
|
||||
func loadConfigFromFile(fs afero.Fs, filename string) (map[string]interface{}, error) {
|
||||
|
|
|
@ -69,7 +69,6 @@ simple = true
|
|||
}
|
||||
|
||||
c.Assert(got, qt.All(qt.Equals), true)
|
||||
|
||||
}
|
||||
|
||||
func TestDecodeConfigFromTOMLCaseInsensitive(t *testing.T) {
|
||||
|
|
|
@ -65,5 +65,4 @@ func TestUseSettingsFromRootIfSet(t *testing.T) {
|
|||
|
||||
c.Assert(config.Disqus.Shortname, qt.Equals, "root_short")
|
||||
c.Assert(config.GoogleAnalytics.ID, qt.Equals, "ga_root")
|
||||
|
||||
}
|
||||
|
|
|
@ -16,15 +16,14 @@ package create
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs/files"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
|
@ -131,7 +130,6 @@ func newContentFromDir(
|
|||
sites *hugolib.HugoSites,
|
||||
targetFs afero.Fs,
|
||||
cm archetypeMap, name, targetPath string) error {
|
||||
|
||||
for _, f := range cm.otherFiles {
|
||||
meta := f.Meta()
|
||||
filename := meta.Path()
|
||||
|
@ -196,11 +194,9 @@ func mapArcheTypeDir(
|
|||
ps *helpers.PathSpec,
|
||||
fs afero.Fs,
|
||||
archetypeDir string) (archetypeMap, error) {
|
||||
|
||||
var m archetypeMap
|
||||
|
||||
walkFn := func(path string, fi hugofs.FileMetaInfo, err error) error {
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -308,7 +304,6 @@ func resolveContentPath(sites *hugolib.HugoSites, fs afero.Fs, targetPath string
|
|||
}
|
||||
|
||||
if siteContentDir == "" {
|
||||
|
||||
}
|
||||
|
||||
if siteContentDir != "" {
|
||||
|
@ -324,7 +319,6 @@ func resolveContentPath(sites *hugolib.HugoSites, fs afero.Fs, targetPath string
|
|||
}
|
||||
return s.PathSpec.AbsPathify(filepath.Join(contentDir, targetPath)), s
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// FindArchetype takes a given kind/archetype of content and returns the path
|
||||
|
|
|
@ -83,7 +83,6 @@ var (
|
|||
)
|
||||
|
||||
func executeArcheTypeAsTemplate(s *hugolib.Site, name, kind, targetPath, archetypeFilename string) ([]byte, error) {
|
||||
|
||||
var (
|
||||
archetypeContent []byte
|
||||
archetypeTemplate []byte
|
||||
|
@ -145,5 +144,4 @@ func executeArcheTypeAsTemplate(s *hugolib.Site, name, kind, targetPath, archety
|
|||
archetypeContent = []byte(archetypeShortcodeReplacementsPost.Replace(buff.String()))
|
||||
|
||||
return archetypeContent, nil
|
||||
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
package create_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
@ -23,8 +24,6 @@ import (
|
|||
|
||||
"github.com/gohugoio/hugo/hugolib"
|
||||
|
||||
"fmt"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
|
@ -35,7 +34,6 @@ import (
|
|||
)
|
||||
|
||||
func TestNewContent(t *testing.T) {
|
||||
|
||||
cases := []struct {
|
||||
kind string
|
||||
path string
|
||||
|
@ -59,7 +57,8 @@ func TestNewContent(t *testing.T) {
|
|||
`title = "GO"`,
|
||||
"{{< myshortcode >}}",
|
||||
"{{% myshortcode %}}",
|
||||
"{{</* comment */>}}\n{{%/* comment */%}}"}}, // shortcodes
|
||||
"{{</* comment */>}}\n{{%/* comment */%}}",
|
||||
}}, // shortcodes
|
||||
}
|
||||
|
||||
for i, cas := range cases {
|
||||
|
@ -140,7 +139,6 @@ i18n: {{ T "hugo" }}
|
|||
c.Assert(create.NewContent(h, "my-theme-bundle", "post/my-theme-post"), qt.IsNil)
|
||||
cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/index.md")), `File: index.md`, `Site Lang: en`, `Name: My Theme Post`, `i18n: Hugo Rocks!`)
|
||||
cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/resources/hugo1.json")), `hugo1: {{ printf "no template handling in here" }}`)
|
||||
|
||||
}
|
||||
|
||||
func initFs(fs afero.Fs) error {
|
||||
|
@ -248,7 +246,6 @@ func readFileFromFs(t *testing.T, fs afero.Fs, filename string) string {
|
|||
}
|
||||
|
||||
func newTestCfg(c *qt.C, mm afero.Fs) (*viper.Viper, *hugofs.Fs) {
|
||||
|
||||
cfg := `
|
||||
|
||||
theme = "mytheme"
|
||||
|
@ -281,5 +278,4 @@ other = "Hugo Rokkar!"`), 0755), qt.IsNil)
|
|||
c.Assert(err, qt.IsNil)
|
||||
|
||||
return v, hugofs.NewFrom(mm, v)
|
||||
|
||||
}
|
||||
|
|
|
@ -466,7 +466,7 @@ func (lf *localFile) MD5() []byte {
|
|||
// knownHiddenDirectory checks if the specified name is a well known
|
||||
// hidden directory.
|
||||
func knownHiddenDirectory(name string) bool {
|
||||
var knownDirectories = []string{
|
||||
knownDirectories := []string{
|
||||
".well-known",
|
||||
}
|
||||
|
||||
|
@ -697,7 +697,6 @@ func findDiffs(localFiles map[string]*localFile, remoteFiles map[string]*blob.Li
|
|||
//
|
||||
// The subslices are sorted by Local.SlashPath.
|
||||
func applyOrdering(ordering []*regexp.Regexp, uploads []*fileToUpload) [][]*fileToUpload {
|
||||
|
||||
// Sort the whole slice by Local.SlashPath first.
|
||||
sort.Slice(uploads, func(i, j int) bool { return uploads[i].Local.SlashPath < uploads[j].Local.SlashPath })
|
||||
|
||||
|
|
|
@ -112,7 +112,6 @@ func (m *matcher) Matches(path string) bool {
|
|||
|
||||
// decode creates a config from a given Hugo configuration.
|
||||
func decodeConfig(cfg config.Provider) (deployConfig, error) {
|
||||
|
||||
var (
|
||||
mediaTypesConfig []map[string]interface{}
|
||||
dcfg deployConfig
|
||||
|
|
|
@ -211,7 +211,6 @@ func TestFindDiffs(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestWalkLocal(t *testing.T) {
|
||||
|
||||
tests := map[string]struct {
|
||||
Given []string
|
||||
Expect []string
|
||||
|
@ -355,7 +354,10 @@ func TestLocalFile(t *testing.T) {
|
|||
MediaTypesConfig: []map[string]interface{}{
|
||||
{
|
||||
"hugo/custom": map[string]interface{}{
|
||||
"suffixes": []string{"hugo"}}}},
|
||||
"suffixes": []string{"hugo"},
|
||||
},
|
||||
},
|
||||
},
|
||||
WantContent: contentBytes,
|
||||
WantSize: contentLen,
|
||||
WantMD5: contentMD5[:],
|
||||
|
|
2
deps/deps.go
vendored
2
deps/deps.go
vendored
|
@ -231,7 +231,6 @@ func New(cfg DepsCfg) (*Deps, error) {
|
|||
}
|
||||
|
||||
ps, err := helpers.NewPathSpec(fs, cfg.Language, logger)
|
||||
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "create PathSpec")
|
||||
}
|
||||
|
@ -347,7 +346,6 @@ func (d Deps) ForLanguage(cfg DepsCfg, onCreated func(d *Deps) error) (*Deps, er
|
|||
d.BuildStartListeners = &Listeners{}
|
||||
|
||||
return &d, nil
|
||||
|
||||
}
|
||||
|
||||
// DepsCfg contains configuration options that can be used to configure Hugo
|
||||
|
|
2
deps/deps_test.go
vendored
2
deps/deps_test.go
vendored
|
@ -20,7 +20,6 @@ import (
|
|||
)
|
||||
|
||||
func TestBuildFlags(t *testing.T) {
|
||||
|
||||
c := qt.New(t)
|
||||
var bf BuildState
|
||||
bf.Incr()
|
||||
|
@ -28,5 +27,4 @@ func TestBuildFlags(t *testing.T) {
|
|||
bf.Incr()
|
||||
|
||||
c.Assert(bf.Incr(), qt.Equals, 4)
|
||||
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@ package helpers
|
|||
import (
|
||||
"bytes"
|
||||
"html/template"
|
||||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
|
@ -33,8 +34,6 @@ import (
|
|||
|
||||
bp "github.com/gohugoio/hugo/bufferpool"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
|
||||
"strings"
|
||||
)
|
||||
|
||||
// SummaryDivider denotes where content summarization should end. The default is "<!--more-->".
|
||||
|
@ -66,7 +65,6 @@ type ContentSpec struct {
|
|||
// NewContentSpec returns a ContentSpec initialized
|
||||
// with the appropriate fields from the given config.Provider.
|
||||
func NewContentSpec(cfg config.Provider, logger loggers.Logger, contentFs afero.Fs) (*ContentSpec, error) {
|
||||
|
||||
spec := &ContentSpec{
|
||||
summaryLength: cfg.GetInt("summaryLength"),
|
||||
BuildFuture: cfg.GetBool("buildFuture"),
|
||||
|
@ -81,7 +79,6 @@ func NewContentSpec(cfg config.Provider, logger loggers.Logger, contentFs afero.
|
|||
ContentFs: contentFs,
|
||||
Logger: logger,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -112,7 +109,6 @@ var stripHTMLReplacer = strings.NewReplacer("\n", " ", "</p>", "\n", "<br>", "\n
|
|||
|
||||
// StripHTML accepts a string, strips out all HTML tags and returns it.
|
||||
func StripHTML(s string) string {
|
||||
|
||||
// Shortcut strings with no tags in them
|
||||
if !strings.ContainsAny(s, "<>") {
|
||||
return s
|
||||
|
|
|
@ -118,7 +118,6 @@ func TestNewContentSpec(t *testing.T) {
|
|||
c.Assert(spec.BuildFuture, qt.Equals, true)
|
||||
c.Assert(spec.BuildExpired, qt.Equals, true)
|
||||
c.Assert(spec.BuildDrafts, qt.Equals, true)
|
||||
|
||||
}
|
||||
|
||||
var benchmarkTruncateString = strings.Repeat("This is a sentence about nothing.", 20)
|
||||
|
@ -255,7 +254,6 @@ func TestExtractNoTOC(t *testing.T) {
|
|||
var totalWordsBenchmarkString = strings.Repeat("Hugo Rocks ", 200)
|
||||
|
||||
func TestTotalWords(t *testing.T) {
|
||||
|
||||
for i, this := range []struct {
|
||||
s string
|
||||
words int
|
||||
|
|
|
@ -11,9 +11,7 @@ import (
|
|||
|
||||
// This is is just some helpers used to create some JSON used in the Hugo docs.
|
||||
func init() {
|
||||
|
||||
docsProvider := func() docshelper.DocProvider {
|
||||
|
||||
var chromaLexers []interface{}
|
||||
|
||||
sort.Sort(lexers.Registry.Lexers)
|
||||
|
@ -50,7 +48,6 @@ func init() {
|
|||
}
|
||||
|
||||
return docshelper.DocProvider{"chroma": map[string]interface{}{"lexers": chromaLexers}}
|
||||
|
||||
}
|
||||
|
||||
docshelper.AddDocProviderFunc(docsProvider)
|
||||
|
|
|
@ -93,5 +93,4 @@ func initEmoji() {
|
|||
emojiMaxSize = len(k)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -74,7 +74,6 @@ func TestEmojiCustom(t *testing.T) {
|
|||
// Hugo have a byte slice, wants a byte slice and doesn't mind if the original is modified.
|
||||
|
||||
func BenchmarkEmojiKyokomiFprint(b *testing.B) {
|
||||
|
||||
f := func(in []byte) []byte {
|
||||
buff := bufferpool.GetBuffer()
|
||||
defer bufferpool.PutBuffer(buff)
|
||||
|
@ -89,7 +88,6 @@ func BenchmarkEmojiKyokomiFprint(b *testing.B) {
|
|||
}
|
||||
|
||||
func BenchmarkEmojiKyokomiSprint(b *testing.B) {
|
||||
|
||||
f := func(in []byte) []byte {
|
||||
return []byte(emoji.Sprint(string(in)))
|
||||
}
|
||||
|
@ -102,7 +100,6 @@ func BenchmarkHugoEmoji(b *testing.B) {
|
|||
}
|
||||
|
||||
func doBenchmarkEmoji(b *testing.B, f func(in []byte) []byte) {
|
||||
|
||||
type input struct {
|
||||
in []byte
|
||||
expect []byte
|
||||
|
@ -119,8 +116,8 @@ func doBenchmarkEmoji(b *testing.B, f func(in []byte) []byte) {
|
|||
{"No smiles for you or " + strings.Repeat("you ", 1000), "No smiles for you or " + strings.Repeat("you ", 1000)},
|
||||
}
|
||||
|
||||
var in = make([]input, b.N*len(data))
|
||||
var cnt = 0
|
||||
in := make([]input, b.N*len(data))
|
||||
cnt := 0
|
||||
for i := 0; i < b.N; i++ {
|
||||
for _, this := range data {
|
||||
in[cnt] = input{[]byte(this.input), []byte(this.expect)}
|
||||
|
@ -142,6 +139,5 @@ func doBenchmarkEmoji(b *testing.B, f func(in []byte) []byte) {
|
|||
b.Fatalf("[%d] emoji std, got \n%q but expected \n%q", j, result, currIn.expect)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -157,7 +157,6 @@ func ReaderToString(lines io.Reader) string {
|
|||
|
||||
// ReaderContains reports whether subslice is within r.
|
||||
func ReaderContains(r io.Reader, subslice []byte) bool {
|
||||
|
||||
if r == nil || len(subslice) == 0 {
|
||||
return false
|
||||
}
|
||||
|
@ -345,7 +344,6 @@ func InitLoggers() {
|
|||
func Deprecated(item, alternative string, err bool) {
|
||||
if err {
|
||||
DistinctErrorLog.Printf("%s is deprecated and will be removed in Hugo %s. %s", item, hugo.CurrentVersion.Next().ReleaseVersion(), alternative)
|
||||
|
||||
} else {
|
||||
DistinctWarnLog.Printf("%s is deprecated and will be removed in a future release. %s", item, alternative)
|
||||
}
|
||||
|
|
|
@ -219,7 +219,6 @@ func TestGetTitleFunc(t *testing.T) {
|
|||
c.Assert(GetTitleFunc("ap")(title), qt.Equals, "Somewhere Over the Rainbow")
|
||||
c.Assert(GetTitleFunc("")(title), qt.Equals, "Somewhere Over the Rainbow")
|
||||
c.Assert(GetTitleFunc("unknown")(title), qt.Equals, "Somewhere Over the Rainbow")
|
||||
|
||||
}
|
||||
|
||||
func BenchmarkReaderContains(b *testing.B) {
|
||||
|
@ -354,7 +353,6 @@ func BenchmarkMD5FromFileFast(b *testing.B) {
|
|||
}
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func BenchmarkUniqueStrings(b *testing.B) {
|
||||
|
@ -406,7 +404,6 @@ func BenchmarkUniqueStrings(b *testing.B) {
|
|||
}
|
||||
}
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
func TestHashString(t *testing.T) {
|
||||
|
|
|
@ -36,10 +36,8 @@ import (
|
|||
"github.com/spf13/afero"
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrThemeUndefined is returned when a theme has not be defined by the user.
|
||||
ErrThemeUndefined = errors.New("no theme set")
|
||||
)
|
||||
var ErrThemeUndefined = errors.New("no theme set")
|
||||
|
||||
// filepathPathBridge is a bridge for common functionality in filepath vs path
|
||||
type filepathPathBridge interface {
|
||||
|
@ -169,7 +167,6 @@ func ReplaceExtension(path string, newExt string) string {
|
|||
}
|
||||
|
||||
func makePathRelative(inPath string, possibleDirectories ...string) (string, error) {
|
||||
|
||||
for _, currentPath := range possibleDirectories {
|
||||
if strings.HasPrefix(inPath, currentPath) {
|
||||
return strings.TrimPrefix(inPath, currentPath), nil
|
||||
|
@ -281,7 +278,6 @@ func fileAndExt(in string, b filepathPathBridge) (name string, ext string) {
|
|||
}
|
||||
|
||||
func extractFilename(in, ext, base, pathSeparator string) (name string) {
|
||||
|
||||
// No file name cases. These are defined as:
|
||||
// 1. any "in" path that ends in a pathSeparator
|
||||
// 2. any "base" consisting of just an pathSeparator
|
||||
|
@ -299,7 +295,6 @@ func extractFilename(in, ext, base, pathSeparator string) (name string) {
|
|||
name = base
|
||||
}
|
||||
return
|
||||
|
||||
}
|
||||
|
||||
// GetRelativePath returns the relative path of a given path.
|
||||
|
@ -474,21 +469,18 @@ func ExtractRootPaths(paths []string) []string {
|
|||
r[i] = root
|
||||
}
|
||||
return r
|
||||
|
||||
}
|
||||
|
||||
// FindCWD returns the current working directory from where the Hugo
|
||||
// executable is run.
|
||||
func FindCWD() (string, error) {
|
||||
serverFile, err := filepath.Abs(os.Args[0])
|
||||
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("can't get absolute path for executable: %v", err)
|
||||
}
|
||||
|
||||
path := filepath.Dir(serverFile)
|
||||
realFile, err := filepath.EvalSymlinks(serverFile)
|
||||
|
||||
if err != nil {
|
||||
if _, err = os.Stat(serverFile + ".exe"); err == nil {
|
||||
realFile = filepath.Clean(serverFile + ".exe")
|
||||
|
@ -516,7 +508,6 @@ func SymbolicWalk(fs afero.Fs, root string, walker hugofs.WalkFunc) error {
|
|||
})
|
||||
|
||||
return w.Walk()
|
||||
|
||||
}
|
||||
|
||||
// LstatIfPossible can be used to call Lstat if possible, else Stat.
|
||||
|
@ -555,7 +546,6 @@ func OpenFilesForWriting(fs afero.Fs, filenames ...string) (io.WriteCloser, erro
|
|||
}
|
||||
|
||||
return hugio.NewMultiWriteCloser(writeClosers...), nil
|
||||
|
||||
}
|
||||
|
||||
// OpenFileForWriting opens or creates the given file. If the target directory
|
||||
|
@ -598,7 +588,6 @@ func GetCacheDir(fs afero.Fs, cfg config.Provider) (string, error) {
|
|||
|
||||
// Fall back to a cache in /tmp.
|
||||
return GetTempDir("hugo_cache", fs), nil
|
||||
|
||||
}
|
||||
|
||||
func getCacheDir(cfg config.Provider) string {
|
||||
|
@ -614,7 +603,6 @@ func getCacheDir(cfg config.Provider) string {
|
|||
// is this project:
|
||||
// https://github.com/philhawksworth/content-shards/blob/master/gulpfile.js
|
||||
return "/opt/build/cache/hugo_cache/"
|
||||
|
||||
}
|
||||
|
||||
// This will fall back to an hugo_cache folder in the tmp dir, which should work fine for most CI
|
||||
|
|
|
@ -184,7 +184,6 @@ func TestGetDottedRelativePath(t *testing.T) {
|
|||
for _, f := range []func(string) string{filepath.FromSlash, func(s string) string { return s }} {
|
||||
doTestGetDottedRelativePath(f, t)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func doTestGetDottedRelativePath(urlFixer func(string) string, t *testing.T) {
|
||||
|
@ -422,7 +421,6 @@ func createTempDirWithZeroLengthFiles() (string, error) {
|
|||
}
|
||||
// the dir now has one, zero length file in it
|
||||
return d, nil
|
||||
|
||||
}
|
||||
|
||||
func createTempDirWithNonZeroLengthFiles() (string, error) {
|
||||
|
@ -451,7 +449,6 @@ func createTempDirWithNonZeroLengthFiles() (string, error) {
|
|||
|
||||
// the dir now has one, zero length file in it
|
||||
return d, nil
|
||||
|
||||
}
|
||||
|
||||
func deleteTempDir(d string) {
|
||||
|
@ -490,7 +487,6 @@ func TestExists(t *testing.T) {
|
|||
t.Errorf("Test %d failed. Expected %q got %q", i, d.expectedErr, err)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestAbsPathify(t *testing.T) {
|
||||
|
@ -544,7 +540,6 @@ func TestAbsPathify(t *testing.T) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestExtNoDelimiter(t *testing.T) {
|
||||
|
@ -611,15 +606,12 @@ func TestFileAndExt(t *testing.T) {
|
|||
t.Errorf("Test %d failed. Expected extension %q got %q.", i, d.expectedExt, ext)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestPathPrep(t *testing.T) {
|
||||
|
||||
}
|
||||
|
||||
func TestPrettifyPath(t *testing.T) {
|
||||
|
||||
}
|
||||
|
||||
func TestExtractAndGroupRootPaths(t *testing.T) {
|
||||
|
@ -642,16 +634,19 @@ func TestExtractAndGroupRootPaths(t *testing.T) {
|
|||
|
||||
// Make sure the original is preserved
|
||||
c.Assert(in, qt.DeepEquals, inCopy)
|
||||
|
||||
}
|
||||
|
||||
func TestExtractRootPaths(t *testing.T) {
|
||||
tests := []struct {
|
||||
input []string
|
||||
expected []string
|
||||
}{{[]string{filepath.FromSlash("a/b"), filepath.FromSlash("a/b/c/"), "b",
|
||||
filepath.FromSlash("/c/d"), filepath.FromSlash("d/"), filepath.FromSlash("//e//")},
|
||||
[]string{"a", "a", "b", "c", "d", "e"}}}
|
||||
}{{
|
||||
[]string{
|
||||
filepath.FromSlash("a/b"), filepath.FromSlash("a/b/c/"), "b",
|
||||
filepath.FromSlash("/c/d"), filepath.FromSlash("d/"), filepath.FromSlash("//e//"),
|
||||
},
|
||||
[]string{"a", "a", "b", "c", "d", "e"},
|
||||
}}
|
||||
|
||||
for _, test := range tests {
|
||||
output := ExtractRootPaths(test.input)
|
||||
|
|
|
@ -45,7 +45,6 @@ func NewPathSpec(fs *hugofs.Fs, cfg config.Provider, logger loggers.Logger) (*Pa
|
|||
// NewPathSpecWithBaseBaseFsProvided creats a new PathSpec from the given filesystems and language.
|
||||
// If an existing BaseFs is provided, parts of that is reused.
|
||||
func NewPathSpecWithBaseBaseFsProvided(fs *hugofs.Fs, cfg config.Provider, logger loggers.Logger, baseBaseFs *filesystems.BaseFs) (*PathSpec, error) {
|
||||
|
||||
p, err := paths.New(fs, cfg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -85,5 +84,4 @@ func (p *PathSpec) PermalinkForBaseURL(link, baseURL string) string {
|
|||
baseURL += "/"
|
||||
}
|
||||
return baseURL + link
|
||||
|
||||
}
|
||||
|
|
|
@ -56,5 +56,4 @@ func TestNewPathSpecFromConfig(t *testing.T) {
|
|||
c.Assert(p.BaseURL.String(), qt.Equals, "http://base.com")
|
||||
c.Assert(p.ThemesDir, qt.Equals, "thethemes")
|
||||
c.Assert(p.WorkingDir, qt.Equals, "thework")
|
||||
|
||||
}
|
||||
|
|
|
@ -83,7 +83,6 @@ func (s *ProcessingStats) Table(w io.Writer) {
|
|||
table.SetHeader([]string{"", s.Name})
|
||||
table.SetBorder(false)
|
||||
table.Render()
|
||||
|
||||
}
|
||||
|
||||
// ProcessingStatsTable writes a table-formatted representation of stats to w.
|
||||
|
@ -108,7 +107,6 @@ func ProcessingStatsTable(w io.Writer, stats ...*ProcessingStats) {
|
|||
} else {
|
||||
data[j] = append(data[j], strconv.Itoa(int(tv.val)))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -119,5 +117,4 @@ func ProcessingStatsTable(w io.Writer, stats ...*ProcessingStats) {
|
|||
table.SetHeader(names)
|
||||
table.SetBorder(false)
|
||||
table.Render()
|
||||
|
||||
}
|
||||
|
|
|
@ -32,7 +32,6 @@ func newTestCfgFor(fs *hugofs.Fs) *viper.Viper {
|
|||
v.SetFs(fs.Source)
|
||||
|
||||
return v
|
||||
|
||||
}
|
||||
|
||||
func newTestCfg() *viper.Viper {
|
||||
|
|
|
@ -103,7 +103,6 @@ func SanitizeURLKeepTrailingSlash(in string) string {
|
|||
// urlize: vim-text-editor
|
||||
func (p *PathSpec) URLize(uri string) string {
|
||||
return p.URLEscape(p.MakePathSanitized(uri))
|
||||
|
||||
}
|
||||
|
||||
// URLizeFilename creates an URL from a filename by esacaping unicode letters
|
||||
|
@ -130,7 +129,6 @@ func (p *PathSpec) URLEscape(uri string) string {
|
|||
// path: post/how-i-blog
|
||||
// result: http://spf13.com/post/how-i-blog
|
||||
func MakePermalink(host, plink string) *url.URL {
|
||||
|
||||
base, err := url.Parse(host)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
|
@ -275,7 +273,6 @@ func (p *PathSpec) RelURL(in string, addLanguage bool) string {
|
|||
// For relative URL entries on sites with a base url with a context root set (i.e. http://example.com/mysite),
|
||||
// relative URLs must not include the context root if canonifyURLs is enabled. But if it's disabled, it must be set.
|
||||
func AddContextRoot(baseURL, relativePath string) string {
|
||||
|
||||
url, err := url.Parse(baseURL)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
|
|
|
@ -23,7 +23,6 @@ import (
|
|||
)
|
||||
|
||||
func TestURLize(t *testing.T) {
|
||||
|
||||
v := newTestCfg()
|
||||
l := langs.NewDefaultLanguage(v)
|
||||
p, _ := NewPathSpec(hugofs.NewMem(v), l, nil)
|
||||
|
@ -113,7 +112,6 @@ func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool,
|
|||
} else {
|
||||
expected = strings.Replace(expected, "MULTI", lang+"/", 1)
|
||||
}
|
||||
|
||||
} else {
|
||||
expected = strings.Replace(expected, "MULTI", "", 1)
|
||||
}
|
||||
|
@ -294,7 +292,6 @@ func TestURLPrep(t *testing.T) {
|
|||
t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestAddContextRoot(t *testing.T) {
|
||||
|
|
|
@ -48,7 +48,6 @@ func BailOut(after time.Duration) {
|
|||
runtime.Stack(buf, true)
|
||||
panic(string(buf))
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
// Rnd is used only for testing.
|
||||
|
|
|
@ -38,11 +38,9 @@ func decorateDirs(fs afero.Fs, meta FileMeta) afero.Fs {
|
|||
ffs.decorate = decorator
|
||||
|
||||
return ffs
|
||||
|
||||
}
|
||||
|
||||
func decoratePath(fs afero.Fs, createPath func(name string) string) afero.Fs {
|
||||
|
||||
ffs := &baseFileDecoratorFs{Fs: fs}
|
||||
|
||||
decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) {
|
||||
|
@ -54,7 +52,6 @@ func decoratePath(fs afero.Fs, createPath func(name string) string) afero.Fs {
|
|||
ffs.decorate = decorator
|
||||
|
||||
return ffs
|
||||
|
||||
}
|
||||
|
||||
// DecorateBasePathFs adds Path info to files and directories in the
|
||||
|
@ -81,7 +78,6 @@ func DecorateBasePathFs(base *afero.BasePathFs) afero.Fs {
|
|||
// NewBaseFileDecorator decorates the given Fs to provide the real filename
|
||||
// and an Opener func.
|
||||
func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero.Fs {
|
||||
|
||||
ffs := &baseFileDecoratorFs{Fs: fs}
|
||||
|
||||
decorator := func(fi os.FileInfo, filename string) (os.FileInfo, error) {
|
||||
|
@ -128,7 +124,6 @@ func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero
|
|||
}
|
||||
|
||||
return fim, nil
|
||||
|
||||
}
|
||||
|
||||
ffs.decorate = decorator
|
||||
|
@ -161,7 +156,6 @@ func (fs *baseFileDecoratorFs) Stat(name string) (os.FileInfo, error) {
|
|||
}
|
||||
|
||||
return fs.decorate(fi, name)
|
||||
|
||||
}
|
||||
|
||||
func (fs *baseFileDecoratorFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
|
||||
|
|
|
@ -86,6 +86,7 @@ func (f FileMeta) OriginalFilename() string {
|
|||
func (f FileMeta) SkipDir() bool {
|
||||
return f.GetBool(metaKeySkipDir)
|
||||
}
|
||||
|
||||
func (f FileMeta) TranslationBaseName() string {
|
||||
return f.stringV(metaKeyTranslationBaseName)
|
||||
}
|
||||
|
@ -236,7 +237,6 @@ func (fi *fileInfoMeta) Meta() FileMeta {
|
|||
}
|
||||
|
||||
func NewFileMetaInfo(fi os.FileInfo, m FileMeta) FileMetaInfo {
|
||||
|
||||
if fim, ok := fi.(FileMetaInfo); ok {
|
||||
mergeFileMeta(fim.Meta(), m)
|
||||
}
|
||||
|
@ -312,7 +312,6 @@ func decorateFileInfo(
|
|||
fi os.FileInfo,
|
||||
fs afero.Fs, opener func() (afero.File, error),
|
||||
filename, filepath string, inMeta FileMeta) FileMetaInfo {
|
||||
|
||||
var meta FileMeta
|
||||
var fim FileMetaInfo
|
||||
|
||||
|
@ -334,7 +333,6 @@ func decorateFileInfo(
|
|||
mergeFileMeta(inMeta, meta)
|
||||
|
||||
return fim
|
||||
|
||||
}
|
||||
|
||||
func isSymlink(fi os.FileInfo) bool {
|
||||
|
@ -379,6 +377,5 @@ func sortFileInfos(fis []os.FileInfo) {
|
|||
sort.Slice(fis, func(i, j int) bool {
|
||||
fimi, fimj := fis[i].(FileMetaInfo), fis[j].(FileMetaInfo)
|
||||
return fimi.Meta().Filename() < fimj.Meta().Filename()
|
||||
|
||||
})
|
||||
}
|
||||
|
|
|
@ -42,12 +42,14 @@ var (
|
|||
"rest", "rst",
|
||||
"mmark",
|
||||
"org",
|
||||
"pandoc", "pdc"}
|
||||
"pandoc", "pdc",
|
||||
}
|
||||
|
||||
contentFileExtensionsSet map[string]bool
|
||||
|
||||
htmlFileExtensions = []string{
|
||||
"html", "htm"}
|
||||
"html", "htm",
|
||||
}
|
||||
|
||||
htmlFileExtensionsSet map[string]bool
|
||||
)
|
||||
|
|
|
@ -39,7 +39,6 @@ func TestIsHTMLContent(t *testing.T) {
|
|||
c.Assert(isHTMLContent(strings.NewReader(" <!--")), qt.Equals, true)
|
||||
c.Assert(isHTMLContent(strings.NewReader(" ---<")), qt.Equals, false)
|
||||
c.Assert(isHTMLContent(strings.NewReader(" foo <")), qt.Equals, false)
|
||||
|
||||
}
|
||||
|
||||
func TestComponentFolders(t *testing.T) {
|
||||
|
@ -57,5 +56,4 @@ func TestComponentFolders(t *testing.T) {
|
|||
c.Assert(IsComponentFolder("content"), qt.Equals, true)
|
||||
c.Assert(IsComponentFolder("foo"), qt.Equals, false)
|
||||
c.Assert(IsComponentFolder(""), qt.Equals, false)
|
||||
|
||||
}
|
||||
|
|
|
@ -35,9 +35,7 @@ var (
|
|||
)
|
||||
|
||||
func NewLanguageFs(langs map[string]int, fs afero.Fs) (afero.Fs, error) {
|
||||
|
||||
applyMeta := func(fs *FilterFs, name string, fis []os.FileInfo) {
|
||||
|
||||
for i, fi := range fis {
|
||||
if fi.IsDir() {
|
||||
filename := filepath.Join(name, fi.Name())
|
||||
|
@ -104,11 +102,9 @@ func NewLanguageFs(langs map[string]int, fs afero.Fs) (afero.Fs, error) {
|
|||
applyPerSource: applyMeta,
|
||||
applyAll: all,
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
||||
func NewFilterFs(fs afero.Fs) (afero.Fs, error) {
|
||||
|
||||
applyMeta := func(fs *FilterFs, name string, fis []os.FileInfo) {
|
||||
for i, fi := range fis {
|
||||
if fi.IsDir() {
|
||||
|
@ -123,7 +119,6 @@ func NewFilterFs(fs afero.Fs) (afero.Fs, error) {
|
|||
}
|
||||
|
||||
return ffs, nil
|
||||
|
||||
}
|
||||
|
||||
// FilterFs is an ordered composite filesystem.
|
||||
|
@ -144,7 +139,6 @@ func (fs *FilterFs) Chtimes(n string, a, m time.Time) error {
|
|||
|
||||
func (fs *FilterFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
|
||||
fi, b, err := lstatIfPossible(fs.fs, name)
|
||||
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
@ -157,7 +151,6 @@ func (fs *FilterFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
|
|||
fs.applyFilters(parent, -1, fi)
|
||||
|
||||
return fi, b, nil
|
||||
|
||||
}
|
||||
|
||||
func (fs *FilterFs) Mkdir(n string, p os.FileMode) error {
|
||||
|
@ -182,7 +175,6 @@ func (fs *FilterFs) Open(name string) (afero.File, error) {
|
|||
File: f,
|
||||
ffs: fs,
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
||||
func (fs *FilterFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
|
||||
|
@ -255,7 +247,6 @@ func (fs *FilterFs) applyFilters(name string, count int, fis ...os.FileInfo) ([]
|
|||
}
|
||||
|
||||
return fis, nil
|
||||
|
||||
}
|
||||
|
||||
type filterDir struct {
|
||||
|
@ -313,7 +304,6 @@ func langInfoFrom(languages map[string]int, name string) (string, string, string
|
|||
}
|
||||
|
||||
return lang, translationBaseName, translationBaseNameWithExt
|
||||
|
||||
}
|
||||
|
||||
func printFs(fs afero.Fs, path string, w io.Writer) {
|
||||
|
|
|
@ -21,7 +21,6 @@ import (
|
|||
)
|
||||
|
||||
func TestLangInfoFrom(t *testing.T) {
|
||||
|
||||
langs := map[string]int{
|
||||
"sv": 10,
|
||||
"en": 20,
|
||||
|
@ -44,5 +43,4 @@ func TestLangInfoFrom(t *testing.T) {
|
|||
v1, v2, v3 := langInfoFrom(langs, test.input)
|
||||
c.Assert([]string{v1, v2, v3}, qt.DeepEquals, test.expected)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -23,10 +23,8 @@ import (
|
|||
"github.com/spf13/afero"
|
||||
)
|
||||
|
||||
var (
|
||||
// Os points to the (real) Os filesystem.
|
||||
Os = &afero.OsFs{}
|
||||
)
|
||||
var Os = &afero.OsFs{}
|
||||
|
||||
// Fs abstracts the file system to separate source and destination file systems
|
||||
// and allows both to be mocked for testing.
|
||||
|
|
|
@ -31,7 +31,6 @@ func TestNewDefault(t *testing.T) {
|
|||
c.Assert(f.Source, hqt.IsSameType, new(afero.OsFs))
|
||||
c.Assert(f.Os, qt.Not(qt.IsNil))
|
||||
c.Assert(f.WorkingDir, qt.IsNil)
|
||||
|
||||
}
|
||||
|
||||
func TestNewMem(t *testing.T) {
|
||||
|
@ -57,5 +56,4 @@ func TestWorkingDir(t *testing.T) {
|
|||
|
||||
c.Assert(f.WorkingDir, qt.Not(qt.IsNil))
|
||||
c.Assert(f.WorkingDir, hqt.IsSameType, new(afero.BasePathFs))
|
||||
|
||||
}
|
||||
|
|
|
@ -81,5 +81,4 @@ func Glob(fs afero.Fs, pattern string, handle func(fi FileMetaInfo) (bool, error
|
|||
}
|
||||
|
||||
return nil
|
||||
|
||||
}
|
||||
|
|
|
@ -53,7 +53,6 @@ func GetGlob(pattern string) (glob.Glob, error) {
|
|||
globMu.Unlock()
|
||||
|
||||
return eg.glob, eg.err
|
||||
|
||||
}
|
||||
|
||||
func NormalizePath(p string) string {
|
||||
|
@ -98,5 +97,4 @@ func HasGlobChar(s string) bool {
|
|||
}
|
||||
}
|
||||
return false
|
||||
|
||||
}
|
||||
|
|
|
@ -32,7 +32,6 @@ func TestResolveRootDir(t *testing.T) {
|
|||
{"dat?a/foo.json", ""},
|
||||
{"a/b[a-c]/foo.json", "a"},
|
||||
} {
|
||||
|
||||
c.Assert(ResolveRootDir(test.input), qt.Equals, test.expected)
|
||||
}
|
||||
}
|
||||
|
@ -46,7 +45,6 @@ func TestFilterGlobParts(t *testing.T) {
|
|||
}{
|
||||
{[]string{"a", "*", "c"}, []string{"a", "c"}},
|
||||
} {
|
||||
|
||||
c.Assert(FilterGlobParts(test.input), qt.DeepEquals, test.expected)
|
||||
}
|
||||
}
|
||||
|
@ -63,7 +61,6 @@ func TestNormalizePath(t *testing.T) {
|
|||
{filepath.FromSlash("./FOO.json"), "foo.json"},
|
||||
{"//", ""},
|
||||
} {
|
||||
|
||||
c.Assert(NormalizePath(test.input), qt.Equals, test.expected)
|
||||
}
|
||||
}
|
||||
|
@ -82,5 +79,4 @@ func BenchmarkGetGlob(b *testing.B) {
|
|||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -57,5 +57,4 @@ func TestGlob(t *testing.T) {
|
|||
c.Assert(collect("*.json"), qt.HasLen, 1)
|
||||
c.Assert(collect("**.xml"), qt.HasLen, 1)
|
||||
c.Assert(collect(filepath.FromSlash("/jsonfiles/*.json")), qt.HasLen, 2)
|
||||
|
||||
}
|
||||
|
|
|
@ -22,9 +22,7 @@ import (
|
|||
"github.com/spf13/afero"
|
||||
)
|
||||
|
||||
var (
|
||||
_ afero.Fs = (*md5HashingFs)(nil)
|
||||
)
|
||||
var _ afero.Fs = (*md5HashingFs)(nil)
|
||||
|
||||
// FileHashReceiver will receive the filename an the content's MD5 sum on file close.
|
||||
type FileHashReceiver interface {
|
||||
|
|
|
@ -49,5 +49,4 @@ func TestHashingFs(t *testing.T) {
|
|||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(f.Close(), qt.IsNil)
|
||||
c.Assert(observer.sum, qt.Equals, "d41d8cd98f00b204e9800998ecf8427e")
|
||||
|
||||
}
|
||||
|
|
|
@ -23,9 +23,7 @@ import (
|
|||
"github.com/spf13/afero"
|
||||
)
|
||||
|
||||
var (
|
||||
ErrPermissionSymlink = errors.New("symlinks not allowed in this filesystem")
|
||||
)
|
||||
var ErrPermissionSymlink = errors.New("symlinks not allowed in this filesystem")
|
||||
|
||||
// NewNoSymlinkFs creates a new filesystem that prevents symlinks.
|
||||
func NewNoSymlinkFs(fs afero.Fs, logger loggers.Logger, allowFiles bool) afero.Fs {
|
||||
|
@ -79,7 +77,6 @@ func (fs *noSymlinkFs) Stat(name string) (os.FileInfo, error) {
|
|||
}
|
||||
|
||||
func (fs *noSymlinkFs) stat(name string) (os.FileInfo, bool, error) {
|
||||
|
||||
var (
|
||||
fi os.FileInfo
|
||||
wasLstat bool
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue