all: Format code with gofumpt

See https://github.com/mvdan/gofumpt
This commit is contained in:
Bjørn Erik Pedersen 2020-12-02 13:23:25 +01:00
parent 32471b57bd
commit d90e37e0c6
442 changed files with 1426 additions and 2254 deletions

View file

@ -158,7 +158,6 @@ func (c *Cache) ReadOrCreate(id string,
err = create(info, f) err = create(info, f)
return return
} }
// GetOrCreate tries to get the file with the given id from cache. If not found or expired, create will // GetOrCreate tries to get the file with the given id from cache. If not found or expired, create will
@ -220,7 +219,6 @@ func (c *Cache) GetOrCreateBytes(id string, create func() ([]byte, error)) (Item
return info, nil, err return info, nil, err
} }
return info, b, nil return info, b, nil
} }
// GetBytes gets the file content with the given id from the cahce, nil if none found. // GetBytes gets the file content with the given id from the cahce, nil if none found.
@ -276,7 +274,6 @@ func (c *Cache) getOrRemove(id string) hugio.ReadSeekCloser {
} }
f, err := c.Fs.Open(id) f, err := c.Fs.Open(id)
if err != nil { if err != nil {
return nil return nil
} }
@ -299,7 +296,6 @@ func (c *Cache) getString(id string) string {
defer c.nlocker.Unlock(id) defer c.nlocker.Unlock(id)
f, err := c.Fs.Open(id) f, err := c.Fs.Open(id)
if err != nil { if err != nil {
return "" return ""
} }
@ -307,7 +303,6 @@ func (c *Cache) getString(id string) string {
b, _ := ioutil.ReadAll(f) b, _ := ioutil.ReadAll(f)
return string(b) return string(b)
} }
// Caches is a named set of caches. // Caches is a named set of caches.

View file

@ -69,7 +69,6 @@ dir = "/path/to/c3"
c3 := decoded["images"] c3 := decoded["images"]
c.Assert(c3.MaxAge, qt.Equals, time.Duration(-1)) c.Assert(c3.MaxAge, qt.Equals, time.Duration(-1))
c.Assert(c3.Dir, qt.Equals, filepath.FromSlash("/path/to/c3/filecache/images")) c.Assert(c3.Dir, qt.Equals, filepath.FromSlash("/path/to/c3/filecache/images"))
} }
func TestDecodeConfigIgnoreCache(t *testing.T) { func TestDecodeConfigIgnoreCache(t *testing.T) {
@ -110,7 +109,6 @@ dir = "/path/to/c3"
for _, v := range decoded { for _, v := range decoded {
c.Assert(v.MaxAge, qt.Equals, time.Duration(0)) c.Assert(v.MaxAge, qt.Equals, time.Duration(0))
} }
} }
func TestDecodeConfigDefault(t *testing.T) { func TestDecodeConfigDefault(t *testing.T) {
@ -178,7 +176,6 @@ dir = "/"
_, err = DecodeConfig(fs, cfg) _, err = DecodeConfig(fs, cfg)
c.Assert(err, qt.Not(qt.IsNil)) c.Assert(err, qt.Not(qt.IsNil))
} }
func newTestConfig() *viper.Viper { func newTestConfig() *viper.Viper {

View file

@ -110,7 +110,6 @@ func (c *Cache) Prune(force bool) (int, error) {
} }
func (c *Cache) pruneRootDir(force bool) (int, error) { func (c *Cache) pruneRootDir(force bool) (int, error) {
info, err := c.Fs.Stat(c.pruneAllRootDir) info, err := c.Fs.Stat(c.pruneAllRootDir)
if err != nil { if err != nil {
if os.IsNotExist(err) { if os.IsNotExist(err) {
@ -124,5 +123,4 @@ func (c *Cache) pruneRootDir(force bool) (int, error) {
} }
return hugofs.MakeReadableAndRemoveAllModulePkgDir(c.Fs, c.pruneAllRootDir) return hugofs.MakeReadableAndRemoveAllModulePkgDir(c.Fs, c.pruneAllRootDir)
} }

View file

@ -107,5 +107,4 @@ dir = ":resourceDir/_gen"
} }
} }
} }

View file

@ -183,7 +183,6 @@ dir = ":cacheDir/c"
c.Assert(string(b), qt.Equals, "Hugo is great!") c.Assert(string(b), qt.Equals, "Hugo is great!")
} }
} }
func TestFileCacheConcurrent(t *testing.T) { func TestFileCacheConcurrent(t *testing.T) {
@ -253,7 +252,6 @@ func TestFileCacheReadOrCreateErrorInRead(t *testing.T) {
var result string var result string
rf := func(failLevel int) func(info ItemInfo, r io.ReadSeeker) error { rf := func(failLevel int) func(info ItemInfo, r io.ReadSeeker) error {
return func(info ItemInfo, r io.ReadSeeker) error { return func(info ItemInfo, r io.ReadSeeker) error {
if failLevel > 0 { if failLevel > 0 {
if failLevel > 1 { if failLevel > 1 {
@ -347,5 +345,4 @@ func newPathsSpec(t *testing.T, fs afero.Fs, configStr string) *helpers.PathSpec
p, err := helpers.NewPathSpec(hugofs.NewFrom(fs, cfg), cfg, nil) p, err := helpers.NewPathSpec(hugofs.NewFrom(fs, cfg), cfg, nil)
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
return p return p
} }

View file

@ -49,7 +49,6 @@ func (c *Cache) Clear() {
c.cache = make(map[string]cacheEntry) c.cache = make(map[string]cacheEntry)
c.nlocker = locker.NewLocker() c.nlocker = locker.NewLocker()
} }
// GetOrCreate tries to get the value with the given cache key, if not found // GetOrCreate tries to get the value with the given cache key, if not found

View file

@ -58,7 +58,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
var methods Methods var methods Methods
var excludes = make(map[string]bool) excludes := make(map[string]bool)
if len(exclude) > 0 { if len(exclude) > 0 {
for _, m := range c.MethodsFromTypes(exclude, nil) { for _, m := range c.MethodsFromTypes(exclude, nil) {
@ -99,11 +99,9 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
name = pkgPrefix + name name = pkgPrefix + name
return name, pkg return name, pkg
} }
for _, t := range include { for _, t := range include {
for i := 0; i < t.NumMethod(); i++ { for i := 0; i < t.NumMethod(); i++ {
m := t.Method(i) m := t.Method(i)
@ -153,7 +151,6 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
methods = append(methods, method) methods = append(methods, method)
} }
} }
sort.SliceStable(methods, func(i, j int) bool { sort.SliceStable(methods, func(i, j int) bool {
@ -167,16 +164,13 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
} }
return wi < wj return wi < wj
}) })
return methods return methods
} }
func (c *Inspector) parseSource() { func (c *Inspector) parseSource() {
c.init.Do(func() { c.init.Do(func() {
if !strings.Contains(c.ProjectRootDir, "hugo") { if !strings.Contains(c.ProjectRootDir, "hugo") {
panic("dir must be set to the Hugo root") panic("dir must be set to the Hugo root")
} }
@ -200,7 +194,6 @@ func (c *Inspector) parseSource() {
filenames = append(filenames, path) filenames = append(filenames, path)
return nil return nil
}) })
for _, filename := range filenames { for _, filename := range filenames {
@ -230,7 +223,6 @@ func (c *Inspector) parseSource() {
c.methodWeight[iface] = weights c.methodWeight[iface] = weights
} }
} }
} }
return true return true
}) })
@ -247,7 +239,6 @@ func (c *Inspector) parseSource() {
} }
} }
} }
}) })
} }
@ -385,7 +376,7 @@ func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (st
fmt.Fprintf(&sb, "func Marshal%sToJSON(%s %s) ([]byte, error) {\n", what, r, receiver) fmt.Fprintf(&sb, "func Marshal%sToJSON(%s %s) ([]byte, error) {\n", what, r, receiver)
var methods Methods var methods Methods
var excludeRes = make([]*regexp.Regexp, len(excludes)) excludeRes := make([]*regexp.Regexp, len(excludes))
for i, exclude := range excludes { for i, exclude := range excludes {
excludeRes[i] = regexp.MustCompile(exclude) excludeRes[i] = regexp.MustCompile(exclude)
@ -450,7 +441,6 @@ func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (st
} }
return sb.String(), pkgImports return sb.String(), pkgImports
} }
func collectMethodsRecursive(pkg string, f []*ast.Field) []string { func collectMethodsRecursive(pkg string, f []*ast.Field) []string {
@ -481,7 +471,6 @@ func collectMethodsRecursive(pkg string, f []*ast.Field) []string {
} }
return methodNames return methodNames
} }
func firstToLower(name string) string { func firstToLower(name string) string {
@ -544,5 +533,4 @@ func varName(name string) string {
} }
return name return name
} }

View file

@ -25,7 +25,6 @@ import (
) )
func TestMethods(t *testing.T) { func TestMethods(t *testing.T) {
var ( var (
zeroIE = reflect.TypeOf((*IEmbed)(nil)).Elem() zeroIE = reflect.TypeOf((*IEmbed)(nil)).Elem()
zeroIEOnly = reflect.TypeOf((*IEOnly)(nil)).Elem() zeroIEOnly = reflect.TypeOf((*IEOnly)(nil)).Elem()
@ -58,7 +57,6 @@ func TestMethods(t *testing.T) {
methodsStr := fmt.Sprint(methods) methodsStr := fmt.Sprint(methods)
c.Assert(methodsStr, qt.Contains, "MethodEmbed3(arg0 string) string") c.Assert(methodsStr, qt.Contains, "MethodEmbed3(arg0 string) string")
}) })
t.Run("ToMarshalJSON", func(t *testing.T) { t.Run("ToMarshalJSON", func(t *testing.T) {
@ -76,9 +74,7 @@ func TestMethods(t *testing.T) {
c.Assert(pkg, qt.Contains, "encoding/json") c.Assert(pkg, qt.Contains, "encoding/json")
fmt.Println(pkg) fmt.Println(pkg)
}) })
} }
type I interface { type I interface {

View file

@ -26,7 +26,8 @@ type checkCmd struct {
} }
func newCheckCmd() *checkCmd { func newCheckCmd() *checkCmd {
return &checkCmd{baseCmd: &baseCmd{cmd: &cobra.Command{ return &checkCmd{baseCmd: &baseCmd{
cmd: &cobra.Command{
Use: "check", Use: "check",
Short: "Contains some verification checks", Short: "Contains some verification checks",
}, },

View file

@ -24,7 +24,8 @@ type checkCmd struct {
} }
func newCheckCmd() *checkCmd { func newCheckCmd() *checkCmd {
cc := &checkCmd{baseCmd: &baseCmd{cmd: &cobra.Command{ cc := &checkCmd{baseCmd: &baseCmd{
cmd: &cobra.Command{
Use: "check", Use: "check",
Short: "Contains some verification checks", Short: "Contains some verification checks",
}, },

View file

@ -16,24 +16,22 @@ package commands
import ( import (
"bytes" "bytes"
"errors" "errors"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"sync" "sync"
"time"
hconfig "github.com/gohugoio/hugo/config" hconfig "github.com/gohugoio/hugo/config"
"golang.org/x/sync/semaphore" "golang.org/x/sync/semaphore"
"io/ioutil"
"github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/hugo"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
"os"
"path/filepath"
"regexp"
"time"
"github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
@ -156,7 +154,6 @@ func (c *commandeer) initFs(fs *hugofs.Fs) error {
} }
func newCommandeer(mustHaveConfigFile, running bool, h *hugoBuilderCommon, f flagsToConfigHandler, cfgInit func(c *commandeer) error, subCmdVs ...*cobra.Command) (*commandeer, error) { func newCommandeer(mustHaveConfigFile, running bool, h *hugoBuilderCommon, f flagsToConfigHandler, cfgInit func(c *commandeer) error, subCmdVs ...*cobra.Command) (*commandeer, error) {
var rebuildDebouncer func(f func()) var rebuildDebouncer func(f func())
if running { if running {
// The time value used is tested with mass content replacements in a fairly big Hugo site. // The time value used is tested with mass content replacements in a fairly big Hugo site.
@ -248,7 +245,6 @@ func (f *fileChangeDetector) PrepareNew() {
} }
func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error { func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
if c.DepsCfg == nil { if c.DepsCfg == nil {
c.DepsCfg = &deps.DepsCfg{} c.DepsCfg = &deps.DepsCfg{}
} }
@ -277,7 +273,6 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
environment := c.h.getEnvironment(running) environment := c.h.getEnvironment(running)
doWithConfig := func(cfg config.Provider) error { doWithConfig := func(cfg config.Provider) error {
if c.ftch != nil { if c.ftch != nil {
c.ftch.flagsToConfig(cfg) c.ftch.flagsToConfig(cfg)
} }
@ -309,7 +304,8 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
Filename: c.h.cfgFile, Filename: c.h.cfgFile,
AbsConfigDir: c.h.getConfigDir(dir), AbsConfigDir: c.h.getConfigDir(dir),
Environ: os.Environ(), Environ: os.Environ(),
Environment: environment}, Environment: environment,
},
cfgSetAndInit, cfgSetAndInit,
doWithConfig) doWithConfig)
@ -402,7 +398,6 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
h, err = hugolib.NewHugoSites(*c.DepsCfg) h, err = hugolib.NewHugoSites(*c.DepsCfg)
c.hugoSites = h c.hugoSites = h
close(c.created) close(c.created)
}) })
if err != nil { if err != nil {
@ -418,5 +413,4 @@ func (c *commandeer) loadConfig(mustHaveConfigFile, running bool) error {
cfg.Logger.Infoln("Using config file:", config.ConfigFileUsed()) cfg.Logger.Infoln("Using config file:", config.ConfigFileUsed())
return nil return nil
} }

View file

@ -88,6 +88,7 @@ var _ commandsBuilderGetter = (*baseBuilderCmd)(nil)
type commandsBuilderGetter interface { type commandsBuilderGetter interface {
getCommandsBuilder() *commandsBuilder getCommandsBuilder() *commandsBuilder
} }
type baseBuilderCmd struct { type baseBuilderCmd struct {
*baseCmd *baseCmd
*commandsBuilder *commandsBuilder
@ -138,7 +139,6 @@ func (c *nilCommand) getCommand() *cobra.Command {
} }
func (c *nilCommand) flagsToConfig(cfg config.Provider) { func (c *nilCommand) flagsToConfig(cfg config.Provider) {
} }
func (b *commandsBuilder) newHugoCmd() *hugoCmd { func (b *commandsBuilder) newHugoCmd() *hugoCmd {

View file

@ -35,7 +35,6 @@ import (
) )
func TestExecute(t *testing.T) { func TestExecute(t *testing.T) {
c := qt.New(t) c := qt.New(t)
createSite := func(c *qt.C) (string, func()) { createSite := func(c *qt.C) (string, func()) {
@ -124,7 +123,6 @@ func TestExecute(t *testing.T) {
c.Assert(config, qt.Contains, "baseURL = \"http://example.org/\"") c.Assert(config, qt.Contains, "baseURL = \"http://example.org/\"")
checkNewSiteInited(c, siteDir) checkNewSiteInited(c, siteDir)
}) })
} }
func checkNewSiteInited(c *qt.C, basepath string) { func checkNewSiteInited(c *qt.C, basepath string) {
@ -185,7 +183,8 @@ func TestFlags(t *testing.T) {
}, },
{ {
name: "Persistent flags", name: "Persistent flags",
args: []string{"server", args: []string{
"server",
"--config=myconfig.toml", "--config=myconfig.toml",
"--configDir=myconfigdir", "--configDir=myconfigdir",
"--contentDir=mycontent", "--contentDir=mycontent",
@ -235,12 +234,12 @@ func TestFlags(t *testing.T) {
// The flag is named i18n-warnings // The flag is named i18n-warnings
c.Assert(cfg.GetBool("logI18nWarnings"), qt.Equals, true) c.Assert(cfg.GetBool("logI18nWarnings"), qt.Equals, true)
},
}}} },
}
for _, test := range tests { for _, test := range tests {
c.Run(test.name, func(c *qt.C) { c.Run(test.name, func(c *qt.C) {
b := newCommandsBuilder() b := newCommandsBuilder()
root := b.addAll().build() root := b.addAll().build()
@ -257,11 +256,9 @@ func TestFlags(t *testing.T) {
test.check(c, b.commands[0].(*serverCmd)) test.check(c, b.commands[0].(*serverCmd))
}) })
} }
} }
func TestCommandsExecute(t *testing.T) { func TestCommandsExecute(t *testing.T) {
c := qt.New(t) c := qt.New(t)
dir, clean, err := createSimpleTestSite(t, testSiteConfig{}) dir, clean, err := createSimpleTestSite(t, testSiteConfig{})
@ -330,7 +327,6 @@ func TestCommandsExecute(t *testing.T) {
} }
} }
} }
type testSiteConfig struct { type testSiteConfig struct {
@ -399,7 +395,6 @@ Environment: {{ hugo.Environment }}
`) `)
return d, clean, nil return d, clean, nil
} }
func writeFile(t *testing.T, filename, content string) { func writeFile(t *testing.T, filename, content string) {

View file

@ -16,6 +16,7 @@ package commands
import ( import (
"bytes" "bytes"
"fmt" "fmt"
"path/filepath"
"strings" "strings"
"time" "time"
@ -34,14 +35,10 @@ import (
"github.com/gohugoio/hugo/hugolib" "github.com/gohugoio/hugo/hugolib"
"path/filepath"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
var ( var _ cmder = (*convertCmd)(nil)
_ cmder = (*convertCmd)(nil)
)
type convertCmd struct { type convertCmd struct {
outputDir string outputDir string

View file

@ -27,7 +27,8 @@ type envCmd struct {
} }
func newEnvCmd() *envCmd { func newEnvCmd() *envCmd {
return &envCmd{baseCmd: newBaseCmd(&cobra.Command{ return &envCmd{
baseCmd: newBaseCmd(&cobra.Command{
Use: "env", Use: "env",
Short: "Print Hugo version and environment info", Short: "Print Hugo version and environment info",
Long: `Print Hugo version and environment info. This is useful in Hugo bug reports.`, Long: `Print Hugo version and environment info. This is useful in Hugo bug reports.`,

View file

@ -59,7 +59,6 @@ or just source them in directly:
} }
err := cmd.Root().GenBashCompletionFile(cc.autocompleteTarget) err := cmd.Root().GenBashCompletionFile(cc.autocompleteTarget)
if err != nil { if err != nil {
return err return err
} }

View file

@ -22,9 +22,7 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
var ( var _ cmder = (*genChromaStyles)(nil)
_ cmder = (*genChromaStyles)(nil)
)
type genChromaStyles struct { type genChromaStyles struct {
style string style string

View file

@ -23,9 +23,7 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
var ( var _ cmder = (*genDocsHelper)(nil)
_ cmder = (*genDocsHelper)(nil)
)
type genDocsHelper struct { type genDocsHelper struct {
target string target string
@ -70,5 +68,4 @@ func (g *genDocsHelper) generate() error {
fmt.Println("Done!") fmt.Println("Done!")
return nil return nil
} }

View file

@ -19,10 +19,16 @@ import (
"context" "context"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"os"
"os/signal" "os/signal"
"path/filepath"
"runtime"
"runtime/pprof" "runtime/pprof"
"runtime/trace" "runtime/trace"
"strings"
"sync/atomic" "sync/atomic"
"syscall"
"time"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
@ -34,18 +40,10 @@ import (
"github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/common/terminal" "github.com/gohugoio/hugo/common/terminal"
"syscall"
"github.com/gohugoio/hugo/hugolib/filesystems" "github.com/gohugoio/hugo/hugolib/filesystems"
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
"os"
"path/filepath"
"runtime"
"strings"
"time"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
flag "github.com/spf13/pflag" flag "github.com/spf13/pflag"
@ -82,7 +80,6 @@ func (r Response) IsUserError() bool {
// Execute adds all child commands to the root command HugoCmd and sets flags appropriately. // Execute adds all child commands to the root command HugoCmd and sets flags appropriately.
// The args are usually filled with os.Args[1:]. // The args are usually filled with os.Args[1:].
func Execute(args []string) Response { func Execute(args []string) Response {
hugoCmd := newCommandsBuilder().addAll().build() hugoCmd := newCommandsBuilder().addAll().build()
cmd := hugoCmd.getCommand() cmd := hugoCmd.getCommand()
cmd.SetArgs(args) cmd.SetArgs(args)
@ -120,14 +117,12 @@ func initializeConfig(mustHaveConfigFile, running bool,
h *hugoBuilderCommon, h *hugoBuilderCommon,
f flagsToConfigHandler, f flagsToConfigHandler,
cfgInit func(c *commandeer) error) (*commandeer, error) { cfgInit func(c *commandeer) error) (*commandeer, error) {
c, err := newCommandeer(mustHaveConfigFile, running, h, f, cfgInit) c, err := newCommandeer(mustHaveConfigFile, running, h, f, cfgInit)
if err != nil { if err != nil {
return nil, err return nil, err
} }
return c, nil return c, nil
} }
func (c *commandeer) createLogger(cfg config.Provider, running bool) (loggers.Logger, error) { func (c *commandeer) createLogger(cfg config.Provider, running bool) (loggers.Logger, error) {
@ -246,7 +241,6 @@ func initializeFlags(cmd *cobra.Command, cfg config.Provider) {
setValueFromFlag(cmd.Flags(), "destination", cfg, "publishDir", false) setValueFromFlag(cmd.Flags(), "destination", cfg, "publishDir", false)
setValueFromFlag(cmd.Flags(), "i18n-warnings", cfg, "logI18nWarnings", false) setValueFromFlag(cmd.Flags(), "i18n-warnings", cfg, "logI18nWarnings", false)
setValueFromFlag(cmd.Flags(), "path-warnings", cfg, "logPathWarnings", false) setValueFromFlag(cmd.Flags(), "path-warnings", cfg, "logPathWarnings", false)
} }
func setValueFromFlag(flags *flag.FlagSet, key string, cfg config.Provider, targetKey string, force bool) { func setValueFromFlag(flags *flag.FlagSet, key string, cfg config.Provider, targetKey string, force bool) {
@ -282,7 +276,6 @@ func isTerminal() bool {
} }
func (c *commandeer) fullBuild() error { func (c *commandeer) fullBuild() error {
var ( var (
g errgroup.Group g errgroup.Group
langCount map[string]uint64 langCount map[string]uint64
@ -298,7 +291,6 @@ func (c *commandeer) fullBuild() error {
} }
copyStaticFunc := func() error { copyStaticFunc := func() error {
cnt, err := c.copyStatic() cnt, err := c.copyStatic()
if err != nil { if err != nil {
return errors.Wrap(err, "Error copying static files") return errors.Wrap(err, "Error copying static files")
@ -346,7 +338,6 @@ func (c *commandeer) fullBuild() error {
} }
return nil return nil
} }
func (c *commandeer) initCPUProfile() (func(), error) { func (c *commandeer) initCPUProfile() (func(), error) {
@ -419,7 +410,6 @@ func (c *commandeer) initMutexProfile() (func(), error) {
pprof.Lookup("mutex").WriteTo(f, 0) pprof.Lookup("mutex").WriteTo(f, 0)
f.Close() f.Close()
}, nil }, nil
} }
func (c *commandeer) initMemTicker() func() { func (c *commandeer) initMemTicker() func() {
@ -429,7 +419,6 @@ func (c *commandeer) initMemTicker() func() {
var m runtime.MemStats var m runtime.MemStats
runtime.ReadMemStats(&m) runtime.ReadMemStats(&m)
fmt.Printf("\n\nAlloc = %v\nTotalAlloc = %v\nSys = %v\nNumGC = %v\n\n", formatByteCount(m.Alloc), formatByteCount(m.TotalAlloc), formatByteCount(m.Sys), m.NumGC) fmt.Printf("\n\nAlloc = %v\nTotalAlloc = %v\nSys = %v\nNumGC = %v\n\n", formatByteCount(m.Alloc), formatByteCount(m.TotalAlloc), formatByteCount(m.Sys), m.NumGC)
} }
go func() { go func() {
@ -442,7 +431,6 @@ func (c *commandeer) initMemTicker() func() {
printMem() printMem()
return return
} }
} }
}() }()
@ -452,7 +440,6 @@ func (c *commandeer) initMemTicker() func() {
} }
func (c *commandeer) initProfiling() (func(), error) { func (c *commandeer) initProfiling() (func(), error) {
stopCPUProf, err := c.initCPUProfile() stopCPUProf, err := c.initCPUProfile()
if err != nil { if err != nil {
return nil, err return nil, err
@ -538,7 +525,7 @@ func (c *commandeer) build() error {
checkErr(c.Logger, err) checkErr(c.Logger, err)
defer watcher.Close() defer watcher.Close()
var sigs = make(chan os.Signal, 1) sigs := make(chan os.Signal, 1)
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
<-sigs <-sigs
@ -584,7 +571,6 @@ func (c *commandeer) copyStatic() (map[string]uint64, error) {
} }
func (c *commandeer) doWithPublishDirs(f func(sourceFs *filesystems.SourceFilesystem) (uint64, error)) (map[string]uint64, error) { func (c *commandeer) doWithPublishDirs(f func(sourceFs *filesystems.SourceFilesystem) (uint64, error)) (map[string]uint64, error) {
langCount := make(map[string]uint64) langCount := make(map[string]uint64)
staticFilesystems := c.hugo().BaseFs.SourceFilesystems.Static staticFilesystems := c.hugo().BaseFs.SourceFilesystems.Static
@ -712,7 +698,6 @@ func (c *commandeer) getDirList() ([]string, error) {
} }
return nil return nil
} }
watchFiles := c.hugo().PathSpec.BaseFs.WatchDirs() watchFiles := c.hugo().PathSpec.BaseFs.WatchDirs()
@ -753,7 +738,6 @@ func (c *commandeer) rebuildSites(events []fsnotify.Event) error {
c.buildErr = nil c.buildErr = nil
visited := c.visitedURLs.PeekAllSet() visited := c.visitedURLs.PeekAllSet()
if c.fastRenderMode { if c.fastRenderMode {
// Make sure we always render the home pages // Make sure we always render the home pages
for _, l := range c.languages { for _, l := range c.languages {
langPath := c.hugo().PathSpec.GetLangSubDir(l.Lang) langPath := c.hugo().PathSpec.GetLangSubDir(l.Lang)
@ -763,7 +747,6 @@ func (c *commandeer) rebuildSites(events []fsnotify.Event) error {
home := c.hugo().PathSpec.PrependBasePath("/"+langPath, false) home := c.hugo().PathSpec.PrependBasePath("/"+langPath, false)
visited[home] = true visited[home] = true
} }
} }
return c.hugo().Build(hugolib.BuildCfg{RecentlyVisited: visited, ErrRecovery: c.wasError}, events...) return c.hugo().Build(hugolib.BuildCfg{RecentlyVisited: visited, ErrRecovery: c.wasError}, events...)
} }
@ -793,13 +776,11 @@ func (c *commandeer) fullRebuild(changeType string) {
c.fullRebuildSem.Acquire(context.Background(), 1) c.fullRebuildSem.Acquire(context.Background(), 1)
go func() { go func() {
defer c.fullRebuildSem.Release(1) defer c.fullRebuildSem.Release(1)
c.printChangeDetected(changeType) c.printChangeDetected(changeType)
defer func() { defer func() {
// Allow any file system events to arrive back. // Allow any file system events to arrive back.
// This will block any rebuild on config changes for the // This will block any rebuild on config changes for the
// duration of the sleep. // duration of the sleep.
@ -848,7 +829,6 @@ func (c *commandeer) newWatcher(dirList ...string) (*watcher.Batcher, error) {
} }
watcher, err := watcher.New(1 * time.Second) watcher, err := watcher.New(1 * time.Second)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -909,7 +889,6 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
staticSyncer *staticSyncer, staticSyncer *staticSyncer,
evs []fsnotify.Event, evs []fsnotify.Event,
configSet map[string]bool) { configSet map[string]bool) {
defer func() { defer func() {
c.wasError = false c.wasError = false
}() }()
@ -950,7 +929,6 @@ func (c *commandeer) handleEvents(watcher *watcher.Batcher,
time.Sleep(100 * time.Millisecond) time.Sleep(100 * time.Millisecond)
} }
} }
} }
// Config file(s) changed. Need full rebuild. // Config file(s) changed. Need full rebuild.
@ -1194,7 +1172,6 @@ func partitionDynamicEvents(sourceFs *filesystems.SourceFilesystems, events []fs
} }
} }
return return
} }
func pickOneWriteOrCreatePath(events []fsnotify.Event) string { func pickOneWriteOrCreatePath(events []fsnotify.Event) string {

View file

@ -44,5 +44,4 @@ contentDir = "thisdoesnotexist"
_, err = cmd.ExecuteC() _, err = cmd.ExecuteC()
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
} }

View file

@ -74,11 +74,9 @@ Import from Jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root
cc.cmd.AddCommand(importJekyllCmd) cc.cmd.AddCommand(importJekyllCmd)
return cc return cc
} }
func (i *importCmd) importFromJekyll(cmd *cobra.Command, args []string) error { func (i *importCmd) importFromJekyll(cmd *cobra.Command, args []string) error {
if len(args) < 2 { if len(args) < 2 {
return newUserError(`import from jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.") return newUserError(`import from jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.")
} }
@ -255,13 +253,11 @@ func (i *importCmd) loadJekyllConfig(fs afero.Fs, jekyllRoot string) map[string]
defer f.Close() defer f.Close()
b, err := ioutil.ReadAll(f) b, err := ioutil.ReadAll(f)
if err != nil { if err != nil {
return nil return nil
} }
c, err := metadecoders.Default.UnmarshalToMap(b, metadecoders.YAML) c, err := metadecoders.Default.UnmarshalToMap(b, metadecoders.YAML)
if err != nil { if err != nil {
return nil return nil
} }
@ -338,8 +334,10 @@ func (i *importCmd) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPos
} }
} else { } else {
lowerEntryName := strings.ToLower(entry.Name()) lowerEntryName := strings.ToLower(entry.Name())
exceptSuffix := []string{".md", ".markdown", ".html", ".htm", exceptSuffix := []string{
".xml", ".textile", "rakefile", "gemfile", ".lock"} ".md", ".markdown", ".html", ".htm",
".xml", ".textile", "rakefile", "gemfile", ".lock",
}
isExcept := false isExcept := false
for _, suffix := range exceptSuffix { for _, suffix := range exceptSuffix {
if strings.HasSuffix(lowerEntryName, suffix) { if strings.HasSuffix(lowerEntryName, suffix) {
@ -602,8 +600,8 @@ func replaceImageTag(match string) string {
} }
result.WriteString(">}}") result.WriteString(">}}")
return result.String() return result.String()
} }
func replaceOptionalPart(buffer *bytes.Buffer, partName string, part string) { func replaceOptionalPart(buffer *bytes.Buffer, partName string, part string) {
if len(part) > 0 { if len(part) > 0 {
buffer.WriteString(partName + "=\"" + part + "\" ") buffer.WriteString(partName + "=\"" + part + "\" ")

View file

@ -53,25 +53,41 @@ func TestConvertJekyllMetadata(t *testing.T) {
draft bool draft bool
expect string expect string
}{ }{
{map[interface{}]interface{}{}, "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false, {
`{"date":"2015-10-01T00:00:00Z"}`}, map[interface{}]interface{}{},
{map[interface{}]interface{}{}, "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), true,
`{"date":"2015-10-01T00:00:00Z","draft":true}`},
{map[interface{}]interface{}{"Permalink": "/permalink.html", "layout": "post"},
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false, "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
`{"date":"2015-10-01T00:00:00Z","url":"/permalink.html"}`}, `{"date":"2015-10-01T00:00:00Z"}`,
{map[interface{}]interface{}{"permalink": "/permalink.html"}, },
{
map[interface{}]interface{}{},
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), true,
`{"date":"2015-10-01T00:00:00Z","draft":true}`,
},
{
map[interface{}]interface{}{"Permalink": "/permalink.html", "layout": "post"},
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false, "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
`{"date":"2015-10-01T00:00:00Z","url":"/permalink.html"}`}, `{"date":"2015-10-01T00:00:00Z","url":"/permalink.html"}`,
{map[interface{}]interface{}{"category": nil, "permalink": 123}, },
{
map[interface{}]interface{}{"permalink": "/permalink.html"},
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false, "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
`{"date":"2015-10-01T00:00:00Z"}`}, `{"date":"2015-10-01T00:00:00Z","url":"/permalink.html"}`,
{map[interface{}]interface{}{"Excerpt_Separator": "sep"}, },
{
map[interface{}]interface{}{"category": nil, "permalink": 123},
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false, "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
`{"date":"2015-10-01T00:00:00Z","excerpt_separator":"sep"}`}, `{"date":"2015-10-01T00:00:00Z"}`,
{map[interface{}]interface{}{"category": "book", "layout": "post", "Others": "Goods", "Date": "2015-10-01 12:13:11"}, },
{
map[interface{}]interface{}{"Excerpt_Separator": "sep"},
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false, "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
`{"Others":"Goods","categories":["book"],"date":"2015-10-01T12:13:11Z"}`}, `{"date":"2015-10-01T00:00:00Z","excerpt_separator":"sep"}`,
},
{
map[interface{}]interface{}{"category": "book", "layout": "post", "Others": "Goods", "Date": "2015-10-01 12:13:11"},
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
`{"Others":"Goods","categories":["book"],"date":"2015-10-01T12:13:11Z"}`,
},
} }
for _, data := range testDataList { for _, data := range testDataList {
@ -90,44 +106,68 @@ func TestConvertJekyllContent(t *testing.T) {
content string content string
expect string expect string
}{ }{
{map[interface{}]interface{}{}, {
"Test content\r\n<!-- more -->\npart2 content", "Test content\n<!--more-->\npart2 content"}, map[interface{}]interface{}{},
{map[interface{}]interface{}{}, "Test content\r\n<!-- more -->\npart2 content", "Test content\n<!--more-->\npart2 content",
"Test content\n<!-- More -->\npart2 content", "Test content\n<!--more-->\npart2 content"}, },
{map[interface{}]interface{}{"excerpt_separator": "<!--sep-->"}, {
map[interface{}]interface{}{},
"Test content\n<!-- More -->\npart2 content", "Test content\n<!--more-->\npart2 content",
},
{
map[interface{}]interface{}{"excerpt_separator": "<!--sep-->"},
"Test content\n<!--sep-->\npart2 content", "Test content\n<!--sep-->\npart2 content",
"---\nexcerpt_separator: <!--sep-->\n---\nTest content\n<!--more-->\npart2 content"}, "---\nexcerpt_separator: <!--sep-->\n---\nTest content\n<!--more-->\npart2 content",
},
{map[interface{}]interface{}{}, "{% raw %}text{% endraw %}", "text"}, {map[interface{}]interface{}{}, "{% raw %}text{% endraw %}", "text"},
{map[interface{}]interface{}{}, "{%raw%} text2 {%endraw %}", "text2"}, {map[interface{}]interface{}{}, "{%raw%} text2 {%endraw %}", "text2"},
{map[interface{}]interface{}{}, {
map[interface{}]interface{}{},
"{% highlight go %}\nvar s int\n{% endhighlight %}", "{% highlight go %}\nvar s int\n{% endhighlight %}",
"{{< highlight go >}}\nvar s int\n{{< / highlight >}}"}, "{{< highlight go >}}\nvar s int\n{{< / highlight >}}",
{map[interface{}]interface{}{}, },
{
map[interface{}]interface{}{},
"{% highlight go linenos hl_lines=\"1 2\" %}\nvar s string\nvar i int\n{% endhighlight %}", "{% highlight go linenos hl_lines=\"1 2\" %}\nvar s string\nvar i int\n{% endhighlight %}",
"{{< highlight go \"linenos=table,hl_lines=1 2\" >}}\nvar s string\nvar i int\n{{< / highlight >}}"}, "{{< highlight go \"linenos=table,hl_lines=1 2\" >}}\nvar s string\nvar i int\n{{< / highlight >}}",
},
// Octopress image tag // Octopress image tag
{map[interface{}]interface{}{}, {
map[interface{}]interface{}{},
"{% img http://placekitten.com/890/280 %}", "{% img http://placekitten.com/890/280 %}",
"{{< figure src=\"http://placekitten.com/890/280\" >}}"}, "{{< figure src=\"http://placekitten.com/890/280\" >}}",
{map[interface{}]interface{}{}, },
{
map[interface{}]interface{}{},
"{% img left http://placekitten.com/320/250 Place Kitten #2 %}", "{% img left http://placekitten.com/320/250 Place Kitten #2 %}",
"{{< figure class=\"left\" src=\"http://placekitten.com/320/250\" title=\"Place Kitten #2\" >}}"}, "{{< figure class=\"left\" src=\"http://placekitten.com/320/250\" title=\"Place Kitten #2\" >}}",
{map[interface{}]interface{}{}, },
{
map[interface{}]interface{}{},
"{% img right http://placekitten.com/300/500 150 250 'Place Kitten #3' %}", "{% img right http://placekitten.com/300/500 150 250 'Place Kitten #3' %}",
"{{< figure class=\"right\" src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #3\" >}}"}, "{{< figure class=\"right\" src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #3\" >}}",
{map[interface{}]interface{}{}, },
{
map[interface{}]interface{}{},
"{% img right http://placekitten.com/300/500 150 250 'Place Kitten #4' 'An image of a very cute kitten' %}", "{% img right http://placekitten.com/300/500 150 250 'Place Kitten #4' 'An image of a very cute kitten' %}",
"{{< figure class=\"right\" src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}"}, "{{< figure class=\"right\" src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}",
{map[interface{}]interface{}{}, },
{
map[interface{}]interface{}{},
"{% img http://placekitten.com/300/500 150 250 'Place Kitten #4' 'An image of a very cute kitten' %}", "{% img http://placekitten.com/300/500 150 250 'Place Kitten #4' 'An image of a very cute kitten' %}",
"{{< figure src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}"}, "{{< figure src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}",
{map[interface{}]interface{}{}, },
{
map[interface{}]interface{}{},
"{% img right /placekitten/300/500 'Place Kitten #4' 'An image of a very cute kitten' %}", "{% img right /placekitten/300/500 'Place Kitten #4' 'An image of a very cute kitten' %}",
"{{< figure class=\"right\" src=\"/placekitten/300/500\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}"}, "{{< figure class=\"right\" src=\"/placekitten/300/500\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}",
{map[interface{}]interface{}{"category": "book", "layout": "post", "Date": "2015-10-01 12:13:11"}, },
{
map[interface{}]interface{}{"category": "book", "layout": "post", "Date": "2015-10-01 12:13:11"},
"somecontent", "somecontent",
"---\nDate: \"2015-10-01 12:13:11\"\ncategory: book\nlayout: post\n---\nsomecontent"}, "---\nDate: \"2015-10-01 12:13:11\"\ncategory: book\nlayout: post\n---\nsomecontent",
},
} }
for _, data := range testDataList { for _, data := range testDataList {
result, err := convertJekyllContent(data.metadata, data.content) result, err := convertJekyllContent(data.metadata, data.content)

View file

@ -46,7 +46,6 @@ func (lc *listCmd) buildSites(config map[string]interface{}) (*hugolib.HugoSites
} }
sites, err := hugolib.NewHugoSites(*c.DepsCfg) sites, err := hugolib.NewHugoSites(*c.DepsCfg)
if err != nil { if err != nil {
return nil, newSystemError("Error creating sites", err) return nil, newSystemError("Error creating sites", err)
} }
@ -77,7 +76,6 @@ List requires a subcommand, e.g. ` + "`hugo list drafts`.",
Long: `List all of the drafts in your content directory.`, Long: `List all of the drafts in your content directory.`,
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
sites, err := cc.buildSites(map[string]interface{}{"buildDrafts": true}) sites, err := cc.buildSites(map[string]interface{}{"buildDrafts": true})
if err != nil { if err != nil {
return newSystemError("Error building sites", err) return newSystemError("Error building sites", err)
} }
@ -97,7 +95,6 @@ List requires a subcommand, e.g. ` + "`hugo list drafts`.",
Long: `List all of the posts in your content directory which will be posted in the future.`, Long: `List all of the posts in your content directory which will be posted in the future.`,
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
sites, err := cc.buildSites(map[string]interface{}{"buildFuture": true}) sites, err := cc.buildSites(map[string]interface{}{"buildFuture": true})
if err != nil { if err != nil {
return newSystemError("Error building sites", err) return newSystemError("Error building sites", err)
} }
@ -126,7 +123,6 @@ List requires a subcommand, e.g. ` + "`hugo list drafts`.",
Long: `List all of the posts in your content directory which has already expired.`, Long: `List all of the posts in your content directory which has already expired.`,
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
sites, err := cc.buildSites(map[string]interface{}{"buildExpired": true}) sites, err := cc.buildSites(map[string]interface{}{"buildExpired": true})
if err != nil { if err != nil {
return newSystemError("Error building sites", err) return newSystemError("Error building sites", err)
} }
@ -159,7 +155,6 @@ List requires a subcommand, e.g. ` + "`hugo list drafts`.",
"buildDrafts": true, "buildDrafts": true,
"buildFuture": true, "buildFuture": true,
}) })
if err != nil { if err != nil {
return newSystemError("Error building sites", err) return newSystemError("Error building sites", err)
} }

View file

@ -91,7 +91,6 @@ Also note that if you configure a positive maxAge for the "modules" file cache,
} }
func (b *commandsBuilder) newModCmd() *modCmd { func (b *commandsBuilder) newModCmd() *modCmd {
c := &modCmd{} c := &modCmd{}
const commonUsage = ` const commonUsage = `
@ -264,7 +263,6 @@ If a module is vendored, that is where Hugo will look for it's dependencies.
c.baseBuilderCmd = b.newBuilderCmd(cmd) c.baseBuilderCmd = b.newBuilderCmd(cmd)
return c return c
} }
func (c *modCmd) withModsClient(failOnMissingConfig bool, f func(*modules.Client) error) error { func (c *modCmd) withModsClient(failOnMissingConfig bool, f func(*modules.Client) error) error {

View file

@ -20,7 +20,6 @@ import (
) )
func newModNPMCmd(c *modCmd) *cobra.Command { func newModNPMCmd(c *modCmd) *cobra.Command {
cmd := &cobra.Command{ cmd := &cobra.Command{
Use: "npm", Use: "npm",
Short: "Various npm helpers.", Short: "Various npm helpers.",
@ -47,7 +46,6 @@ removed from Hugo, but we need to test this out in "real life" to get a feel of
so this may/will change in future versions of Hugo. so this may/will change in future versions of Hugo.
`, `,
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
return c.withHugo(func(h *hugolib.HugoSites) error { return c.withHugo(func(h *hugolib.HugoSites) error {
return npm.Pack(h.BaseFs.SourceFs, h.BaseFs.Assets.Dirs) return npm.Pack(h.BaseFs.SourceFs, h.BaseFs.Assets.Dirs)
}) })

View file

@ -72,7 +72,6 @@ func (n *newCmd) newContent(cmd *cobra.Command, args []string) error {
} }
c, err := initializeConfig(true, false, &n.hugoBuilderCommon, n, cfgInit) c, err := initializeConfig(true, false, &n.hugoBuilderCommon, n, cfgInit)
if err != nil { if err != nil {
return err return err
} }

View file

@ -58,7 +58,6 @@ Use ` + "`hugo new [contentPath]`" + ` to create new content.`,
cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd) cc.baseBuilderCmd = b.newBuilderBasicCmd(cmd)
return cc return cc
} }
func (n *newSiteCmd) doNewSite(fs *hugofs.Fs, basepath string, force bool) error { func (n *newSiteCmd) doNewSite(fs *hugofs.Fs, basepath string, force bool) error {

View file

@ -53,7 +53,6 @@ as you see fit.`,
// newTheme creates a new Hugo theme template // newTheme creates a new Hugo theme template
func (n *newThemeCmd) newTheme(cmd *cobra.Command, args []string) error { func (n *newThemeCmd) newTheme(cmd *cobra.Command, args []string) error {
c, err := initializeConfig(false, false, &n.hugoBuilderCommon, n, nil) c, err := initializeConfig(false, false, &n.hugoBuilderCommon, n, nil)
if err != nil { if err != nil {
return err return err
} }
@ -145,7 +144,6 @@ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
} }
func (n *newThemeCmd) createThemeMD(fs *hugofs.Fs, inpath string) (err error) { func (n *newThemeCmd) createThemeMD(fs *hugofs.Fs, inpath string) (err error) {
by := []byte(`# theme.toml template for a Hugo theme by := []byte(`# theme.toml template for a Hugo theme
# See https://github.com/gohugoio/hugoThemes#themetoml for an example # See https://github.com/gohugoio/hugoThemes#themetoml for an example

View file

@ -17,6 +17,7 @@ package commands
import ( import (
"errors" "errors"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )

View file

@ -61,7 +61,6 @@ func (c *releaseCommandeer) getCommand() *cobra.Command {
} }
func (c *releaseCommandeer) flagsToConfig(cfg config.Provider) { func (c *releaseCommandeer) flagsToConfig(cfg config.Provider) {
} }
func (r *releaseCommandeer) release() error { func (r *releaseCommandeer) release() error {

View file

@ -228,7 +228,6 @@ func (sc *serverCmd) server(cmd *cobra.Command, args []string) error {
} }
return err return err
} }
if err := memStats(); err != nil { if err := memStats(); err != nil {
@ -262,7 +261,6 @@ func (sc *serverCmd) server(cmd *cobra.Command, args []string) error {
jww.FEEDBACK.Printf("Watching for changes in %s\n", group) jww.FEEDBACK.Printf("Watching for changes in %s\n", group)
} }
watcher, err := c.newWatcher(watchDirs...) watcher, err := c.newWatcher(watchDirs...)
if err != nil { if err != nil {
return err return err
} }
@ -272,7 +270,6 @@ func (sc *serverCmd) server(cmd *cobra.Command, args []string) error {
} }
return c.serve(sc) return c.serve(sc)
} }
func getRootWatchDirsStr(baseDir string, watchDirs []string) string { func getRootWatchDirsStr(baseDir string, watchDirs []string) string {
@ -301,7 +298,6 @@ func (f *fileServer) rewriteRequest(r *http.Request, toPath string) *http.Reques
r2.Header.Set("X-Rewrite-Original-URI", r.URL.RequestURI()) r2.Header.Set("X-Rewrite-Original-URI", r.URL.RequestURI())
return r2 return r2
} }
func (f *fileServer) createEndpoint(i int) (*http.ServeMux, string, string, error) { func (f *fileServer) createEndpoint(i int) (*http.ServeMux, string, string, error) {
@ -393,7 +389,6 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, string, string, erro
} else { } else {
doRedirect = false doRedirect = false
} }
} }
} }
@ -413,7 +408,6 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, string, string, erro
} }
if f.c.fastRenderMode && f.c.buildErr == nil { if f.c.fastRenderMode && f.c.buildErr == nil {
if strings.HasSuffix(requestURI, "/") || strings.HasSuffix(requestURI, "html") || strings.HasSuffix(requestURI, "htm") { if strings.HasSuffix(requestURI, "/") || strings.HasSuffix(requestURI, "html") || strings.HasSuffix(requestURI, "htm") {
if !f.c.visitedURLs.Contains(requestURI) { if !f.c.visitedURLs.Contains(requestURI) {
// If not already on stack, re-render that single page. // If not already on stack, re-render that single page.
@ -453,8 +447,8 @@ var logErrorRe = regexp.MustCompile(`(?s)ERROR \d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{
func removeErrorPrefixFromLog(content string) string { func removeErrorPrefixFromLog(content string) string {
return logErrorRe.ReplaceAllLiteralString(content, "") return logErrorRe.ReplaceAllLiteralString(content, "")
} }
func (c *commandeer) serve(s *serverCmd) error {
func (c *commandeer) serve(s *serverCmd) error {
isMultiHost := c.hugo().IsMultihost() isMultiHost := c.hugo().IsMultihost()
var ( var (
@ -496,7 +490,7 @@ func (c *commandeer) serve(s *serverCmd) error {
livereload.Initialize() livereload.Initialize()
} }
var sigs = make(chan os.Signal, 1) sigs := make(chan os.Signal, 1)
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
for i := range baseURLs { for i := range baseURLs {

View file

@ -73,7 +73,6 @@ func TestServer(t *testing.T) {
// Stop the server. // Stop the server.
stop <- true stop <- true
} }
func TestFixURL(t *testing.T) { func TestFixURL(t *testing.T) {
@ -127,7 +126,6 @@ ERROR 2018/10/07 13:11:12 Rebuild failed: logged 1 error(s)
withoutError := removeErrorPrefixFromLog(content) withoutError := removeErrorPrefixFromLog(content)
c.Assert(strings.Contains(withoutError, "ERROR"), qt.Equals, false) c.Assert(strings.Contains(withoutError, "ERROR"), qt.Equals, false)
} }
func isWindowsCI() bool { func isWindowsCI() bool {

View file

@ -128,5 +128,4 @@ func (s *staticSyncer) syncsStaticEvents(staticEvents []fsnotify.Event) error {
_, err := c.doWithPublishDirs(syncFn) _, err := c.doWithPublishDirs(syncFn)
return err return err
} }

View file

@ -51,7 +51,6 @@ func Append(to interface{}, from ...interface{}) (interface{}, error) {
} else if !fromt.AssignableTo(tot) { } else if !fromt.AssignableTo(tot) {
// Fall back to a []interface{} slice. // Fall back to a []interface{} slice.
return appendToInterfaceSliceFromValues(tov, fromv) return appendToInterfaceSliceFromValues(tov, fromv)
} }
} }
} }

View file

@ -36,28 +36,44 @@ func TestAppend(t *testing.T) {
{nil, []interface{}{"a", "b"}, []string{"a", "b"}}, {nil, []interface{}{"a", "b"}, []string{"a", "b"}},
{nil, []interface{}{nil}, []interface{}{nil}}, {nil, []interface{}{nil}, []interface{}{nil}},
{[]interface{}{}, []interface{}{[]string{"c", "d", "e"}}, []string{"c", "d", "e"}}, {[]interface{}{}, []interface{}{[]string{"c", "d", "e"}}, []string{"c", "d", "e"}},
{tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}}, {
tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}},
[]interface{}{&tstSlicer{"c"}}, []interface{}{&tstSlicer{"c"}},
tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}, &tstSlicer{"c"}}}, tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}, &tstSlicer{"c"}},
{&tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}}, },
{
&tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}},
[]interface{}{&tstSlicer{"c"}}, []interface{}{&tstSlicer{"c"}},
tstSlicers{&tstSlicer{"a"}, tstSlicers{
&tstSlicer{"a"},
&tstSlicer{"b"}, &tstSlicer{"b"},
&tstSlicer{"c"}}}, &tstSlicer{"c"},
{testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn1{"b"}}, },
},
{
testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn1{"b"}},
[]interface{}{&tstSlicerIn1{"c"}}, []interface{}{&tstSlicerIn1{"c"}},
testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn1{"b"}, &tstSlicerIn1{"c"}}}, testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn1{"b"}, &tstSlicerIn1{"c"}},
},
//https://github.com/gohugoio/hugo/issues/5361 //https://github.com/gohugoio/hugo/issues/5361
{[]string{"a", "b"}, []interface{}{tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}}}, {
[]interface{}{"a", "b", &tstSlicer{"a"}, &tstSlicer{"b"}}}, []string{"a", "b"},
{[]string{"a", "b"}, []interface{}{&tstSlicer{"a"}}, []interface{}{tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}}},
[]interface{}{"a", "b", &tstSlicer{"a"}}}, []interface{}{"a", "b", &tstSlicer{"a"}, &tstSlicer{"b"}},
},
{
[]string{"a", "b"},
[]interface{}{&tstSlicer{"a"}},
[]interface{}{"a", "b", &tstSlicer{"a"}},
},
// Errors // Errors
{"", []interface{}{[]string{"a", "b"}}, false}, {"", []interface{}{[]string{"a", "b"}}, false},
// No string concatenation. // No string concatenation.
{"ab", {
"ab",
[]interface{}{"c"}, []interface{}{"c"},
false}, false,
},
} { } {
result, err := Append(test.start, test.addend...) result, err := Append(test.start, test.addend...)
@ -71,5 +87,4 @@ func TestAppend(t *testing.T) {
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
c.Assert(result, qt.DeepEquals, test.expected) c.Assert(result, qt.DeepEquals, test.expected)
} }
} }

View file

@ -20,11 +20,13 @@ import (
qt "github.com/frankban/quicktest" qt "github.com/frankban/quicktest"
) )
var _ Slicer = (*tstSlicer)(nil) var (
var _ Slicer = (*tstSlicerIn1)(nil) _ Slicer = (*tstSlicer)(nil)
var _ Slicer = (*tstSlicerIn2)(nil) _ Slicer = (*tstSlicerIn1)(nil)
var _ testSlicerInterface = (*tstSlicerIn1)(nil) _ Slicer = (*tstSlicerIn2)(nil)
var _ testSlicerInterface = (*tstSlicerIn1)(nil) _ testSlicerInterface = (*tstSlicerIn1)(nil)
_ testSlicerInterface = (*tstSlicerIn1)(nil)
)
type testSlicerInterface interface { type testSlicerInterface interface {
Name() string Name() string
@ -54,7 +56,6 @@ func (p *tstSlicerIn1) Slice(in interface{}) (interface{}, error) {
default: default:
return nil, errors.New("invalid type") return nil, errors.New("invalid type")
} }
} }
return result, nil return result, nil
} }
@ -120,5 +121,4 @@ func TestSlice(t *testing.T) {
c.Assert(test.expected, qt.DeepEquals, result, errMsg) c.Assert(test.expected, qt.DeepEquals, result, errMsg)
} }
} }

View file

@ -125,5 +125,4 @@ E`, offsetMatcher)
c.Assert(location.Lines, qt.DeepEquals, []string{"A", "B", "C", "D"}) c.Assert(location.Lines, qt.DeepEquals, []string{"A", "B", "C", "D"})
c.Assert(location.Position().LineNumber, qt.Equals, 2) c.Assert(location.Position().LineNumber, qt.Equals, 2)
c.Assert(location.LinesPos, qt.Equals, 1) c.Assert(location.LinesPos, qt.Equals, 1)
} }

View file

@ -21,9 +21,7 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
) )
var ( var _ causer = (*fileError)(nil)
_ causer = (*fileError)(nil)
)
// FileError represents an error when handling a file: Parsing a config file, // FileError represents an error when handling a file: Parsing a config file,
// execute a template etc. // execute a template etc.

View file

@ -52,5 +52,4 @@ func TestToLineNumberError(t *testing.T) {
c.Assert(pos.ColumnNumber, qt.Equals, test.columnNumber, errMsg) c.Assert(pos.ColumnNumber, qt.Equals, test.columnNumber, errMsg)
c.Assert(errors.Cause(got), qt.Not(qt.IsNil)) c.Assert(errors.Cause(got), qt.Not(qt.IsNil))
} }
} }

View file

@ -67,7 +67,6 @@ func IsTruthful(in interface{}) bool {
default: default:
return IsTruthfulValue(reflect.ValueOf(in)) return IsTruthfulValue(reflect.ValueOf(in))
} }
} }
var zeroType = reflect.TypeOf((*types.Zeroer)(nil)).Elem() var zeroType = reflect.TypeOf((*types.Zeroer)(nil)).Elem()

View file

@ -35,5 +35,4 @@ func TestHugoInfo(t *testing.T) {
devHugoInfo := NewInfo("development") devHugoInfo := NewInfo("development")
c.Assert(devHugoInfo.IsProduction(), qt.Equals, false) c.Assert(devHugoInfo.IsProduction(), qt.Equals, false)
} }

View file

@ -16,7 +16,6 @@ package hugo
import ( import (
"fmt" "fmt"
"io" "io"
"runtime" "runtime"
"strings" "strings"
@ -146,7 +145,6 @@ func BuildVersionString() string {
} }
return fmt.Sprintf("%s %s %s BuildDate: %s", program, version, osArch, date) return fmt.Sprintf("%s %s %s BuildDate: %s", program, version, osArch, date)
} }
func version(version float32, patchVersion int, suffix string) string { func version(version float32, patchVersion int, suffix string) string {

View file

@ -34,7 +34,6 @@ func NewIgnorableLogger(logger Logger, statements ...string) IgnorableLogger {
statementsSet := make(map[string]bool) statementsSet := make(map[string]bool)
for _, s := range statements { for _, s := range statements {
statementsSet[strings.ToLower(s)] = true statementsSet[strings.ToLower(s)] = true
} }
return ignorableLogger{ return ignorableLogger{
Logger: logger, Logger: logger,

View file

@ -29,10 +29,8 @@ import (
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
) )
var (
// Counts ERROR logs to the global jww logger. // Counts ERROR logs to the global jww logger.
GlobalErrorCounter *jww.Counter var GlobalErrorCounter *jww.Counter
)
func init() { func init() {
GlobalErrorCounter = &jww.Counter{} GlobalErrorCounter = &jww.Counter{}
@ -253,7 +251,6 @@ func (a labelColorizer) Write(p []byte) (n int, err error) {
// bytes, so we lie a little. // bytes, so we lie a little.
_, err = a.w.Write([]byte(replaced)) _, err = a.w.Write([]byte(replaced))
return len(p), err return len(p), err
} }
// InitGlobalLogger initializes the global logger, used in some rare cases. // InitGlobalLogger initializes the global logger, used in some rare cases.
@ -264,7 +261,6 @@ func InitGlobalLogger(stdoutThreshold, logThreshold jww.Threshold, outHandle, lo
jww.SetLogOutput(logHandle) jww.SetLogOutput(logHandle)
jww.SetLogThreshold(logThreshold) jww.SetLogThreshold(logThreshold)
jww.SetStdoutThreshold(stdoutThreshold) jww.SetStdoutThreshold(stdoutThreshold)
} }
func getLogWriters(outHandle, logHandle io.Writer) (io.Writer, io.Writer) { func getLogWriters(outHandle, logHandle io.Writer) (io.Writer, io.Writer) {
@ -279,7 +275,6 @@ func getLogWriters(outHandle, logHandle io.Writer) (io.Writer, io.Writer) {
} }
return outHandle, logHandle return outHandle, logHandle
} }
type fatalLogWriter int type fatalLogWriter int

View file

@ -31,7 +31,6 @@ func TestLogger(t *testing.T) {
l.Warnln("A warning") l.Warnln("A warning")
c.Assert(l.LogCounters().ErrorCounter.Count(), qt.Equals, uint64(2)) c.Assert(l.LogCounters().ErrorCounter.Count(), qt.Equals, uint64(2))
} }
func TestLoggerToWriterWithPrefix(t *testing.T) { func TestLoggerToWriterWithPrefix(t *testing.T) {

View file

@ -154,5 +154,4 @@ func TestRenameKeys(t *testing.T) {
if !reflect.DeepEqual(expected, m) { if !reflect.DeepEqual(expected, m) {
t.Errorf("Expected\n%#v, got\n%#v\n", expected, m) t.Errorf("Expected\n%#v, got\n%#v\n", expected, m)
} }
} }

View file

@ -80,7 +80,6 @@ func GetNestedParam(keyStr, separator string, candidates ...Params) (interface{}
} }
return nil, nil return nil, nil
} }
func GetNestedParamFn(keyStr, separator string, lookupFn func(key string) interface{}) (interface{}, string, map[string]interface{}, error) { func GetNestedParamFn(keyStr, separator string, lookupFn func(key string) interface{}) (interface{}, string, map[string]interface{}, error) {

View file

@ -20,7 +20,6 @@ import (
) )
func TestGetNestedParam(t *testing.T) { func TestGetNestedParam(t *testing.T) {
m := map[string]interface{}{ m := map[string]interface{}{
"string": "value", "string": "value",
"first": 1, "first": 1,
@ -48,12 +47,10 @@ func TestGetNestedParam(t *testing.T) {
c.Assert(must("nested.nestednested.color", ".", m), qt.Equals, "green") c.Assert(must("nested.nestednested.color", ".", m), qt.Equals, "green")
c.Assert(must("string.name", ".", m), qt.IsNil) c.Assert(must("string.name", ".", m), qt.IsNil)
c.Assert(must("nested.foo", ".", m), qt.IsNil) c.Assert(must("nested.foo", ".", m), qt.IsNil)
} }
// https://github.com/gohugoio/hugo/issues/7903 // https://github.com/gohugoio/hugo/issues/7903
func TestGetNestedParamFnNestedNewKey(t *testing.T) { func TestGetNestedParamFnNestedNewKey(t *testing.T) {
c := qt.New(t) c := qt.New(t)
nested := map[string]interface{}{ nested := map[string]interface{}{
@ -71,5 +68,4 @@ func TestGetNestedParamFnNestedNewKey(t *testing.T) {
c.Assert(existing, qt.IsNil) c.Assert(existing, qt.IsNil)
c.Assert(nestedKey, qt.Equals, "new") c.Assert(nestedKey, qt.Equals, "new")
c.Assert(owner, qt.DeepEquals, nested) c.Assert(owner, qt.DeepEquals, nested)
} }

View file

@ -51,7 +51,6 @@ func NewScratcher() Scratcher {
// //
// If the first add for a key is an array or slice, then the next value(s) will be appended. // If the first add for a key is an array or slice, then the next value(s) will be appended.
func (c *Scratch) Add(key string, newAddend interface{}) (string, error) { func (c *Scratch) Add(key string, newAddend interface{}) (string, error) {
var newVal interface{} var newVal interface{}
c.mu.RLock() c.mu.RLock()
existingAddend, found := c.values[key] existingAddend, found := c.values[key]

View file

@ -53,7 +53,6 @@ func TestScratchAdd(t *testing.T) {
if err == nil { if err == nil {
t.Errorf("Expected error from invalid arithmetic") t.Errorf("Expected error from invalid arithmetic")
} }
} }
func TestScratchAddSlice(t *testing.T) { func TestScratchAddSlice(t *testing.T) {
@ -96,7 +95,6 @@ func TestScratchAddTypedSliceToInterfaceSlice(t *testing.T) {
_, err := scratch.Add("slice", []int{1, 2}) _, err := scratch.Add("slice", []int{1, 2})
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
c.Assert(scratch.Get("slice"), qt.DeepEquals, []int{1, 2}) c.Assert(scratch.Get("slice"), qt.DeepEquals, []int{1, 2})
} }
// https://github.com/gohugoio/hugo/issues/5361 // https://github.com/gohugoio/hugo/issues/5361
@ -110,7 +108,6 @@ func TestScratchAddDifferentTypedSliceToInterfaceSlice(t *testing.T) {
_, err := scratch.Add("slice", []int{1, 2}) _, err := scratch.Add("slice", []int{1, 2})
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
c.Assert(scratch.Get("slice"), qt.DeepEquals, []interface{}{"foo", 1, 2}) c.Assert(scratch.Get("slice"), qt.DeepEquals, []interface{}{"foo", 1, 2})
} }
func TestScratchSet(t *testing.T) { func TestScratchSet(t *testing.T) {

View file

@ -16,7 +16,6 @@ package para
import ( import (
"context" "context"
"runtime" "runtime"
"sort" "sort"
"sync" "sync"
"sync/atomic" "sync/atomic"
@ -60,7 +59,6 @@ func TestPara(t *testing.T) {
c.Assert(sort.IntsAreSorted(result), qt.Equals, false, qt.Commentf("Para does not seem to be parallel")) c.Assert(sort.IntsAreSorted(result), qt.Equals, false, qt.Commentf("Para does not seem to be parallel"))
sort.Ints(result) sort.Ints(result)
c.Assert(result, qt.DeepEquals, ints) c.Assert(result, qt.DeepEquals, ints)
}) })
c.Run("Time", func(c *qt.C) { c.Run("Time", func(c *qt.C) {
@ -84,7 +82,5 @@ func TestPara(t *testing.T) {
c.Assert(r.Wait(), qt.IsNil) c.Assert(r.Wait(), qt.IsNil)
c.Assert(counter, qt.Equals, int64(n)) c.Assert(counter, qt.Equals, int64(n))
c.Assert(time.Since(start) < n/2*time.Millisecond, qt.Equals, true) c.Assert(time.Since(start) < n/2*time.Millisecond, qt.Equals, true)
}) })
} }

View file

@ -50,12 +50,11 @@ func (pos Position) IsValid() bool {
var positionStringFormatfunc func(p Position) string var positionStringFormatfunc func(p Position) string
func createPositionStringFormatter(formatStr string) func(p Position) string { func createPositionStringFormatter(formatStr string) func(p Position) string {
if formatStr == "" { if formatStr == "" {
formatStr = "\":file::line::col\"" formatStr = "\":file::line::col\""
} }
var identifiers = []string{":file", ":line", ":col"} identifiers := []string{":file", ":line", ":col"}
var identifiersFound []string var identifiersFound []string
for i := range formatStr { for i := range formatStr {

View file

@ -29,5 +29,4 @@ func TestPositionStringFormatter(t *testing.T) {
c.Assert(createPositionStringFormatter("好::col")(pos), qt.Equals, "好:13") c.Assert(createPositionStringFormatter("好::col")(pos), qt.Equals, "好:13")
c.Assert(createPositionStringFormatter("")(pos), qt.Equals, "\"/my/file.txt:12:13\"") c.Assert(createPositionStringFormatter("")(pos), qt.Equals, "\"/my/file.txt:12:13\"")
c.Assert(pos.String(), qt.Equals, "\"/my/file.txt:12:13\"") c.Assert(pos.String(), qt.Equals, "\"/my/file.txt:12:13\"")
} }

View file

@ -25,5 +25,4 @@ func TestRemoveAccents(t *testing.T) {
c.Assert(string(RemoveAccents([]byte("Resumé"))), qt.Equals, "Resume") c.Assert(string(RemoveAccents([]byte("Resumé"))), qt.Equals, "Resume")
c.Assert(string(RemoveAccents([]byte("Hugo Rocks!"))), qt.Equals, "Hugo Rocks!") c.Assert(string(RemoveAccents([]byte("Hugo Rocks!"))), qt.Equals, "Hugo Rocks!")
c.Assert(string(RemoveAccentsString("Resumé")), qt.Equals, "Resume") c.Assert(string(RemoveAccentsString("Resumé")), qt.Equals, "Resume")
} }

View file

@ -26,7 +26,6 @@ func TestToStringSlicePreserveString(t *testing.T) {
c.Assert(ToStringSlicePreserveString("Hugo"), qt.DeepEquals, []string{"Hugo"}) c.Assert(ToStringSlicePreserveString("Hugo"), qt.DeepEquals, []string{"Hugo"})
c.Assert(ToStringSlicePreserveString([]interface{}{"A", "B"}), qt.DeepEquals, []string{"A", "B"}) c.Assert(ToStringSlicePreserveString([]interface{}{"A", "B"}), qt.DeepEquals, []string{"A", "B"})
c.Assert(ToStringSlicePreserveString(nil), qt.IsNil) c.Assert(ToStringSlicePreserveString(nil), qt.IsNil)
} }
func TestToString(t *testing.T) { func TestToString(t *testing.T) {
@ -34,5 +33,4 @@ func TestToString(t *testing.T) {
c.Assert(ToString([]byte("Hugo")), qt.Equals, "Hugo") c.Assert(ToString([]byte("Hugo")), qt.Equals, "Hugo")
c.Assert(ToString(json.RawMessage("Hugo")), qt.Equals, "Hugo") c.Assert(ToString(json.RawMessage("Hugo")), qt.Equals, "Hugo")
} }

View file

@ -61,5 +61,4 @@ func TestLexicographicSort(t *testing.T) {
}) })
c.Assert(s, qt.DeepEquals, []string{"A", "b", "Ba", "ba", "ba", "Bz"}) c.Assert(s, qt.DeepEquals, []string{"A", "b", "Ba", "ba", "ba", "Bz"})
} }

View file

@ -14,12 +14,12 @@
package config package config
import ( import (
"github.com/pkg/errors"
"sort" "sort"
"strings" "strings"
"sync" "sync"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/common/types"
"github.com/gobwas/glob" "github.com/gobwas/glob"
@ -88,7 +88,6 @@ type Sitemap struct {
} }
func DecodeSitemap(prototype Sitemap, input map[string]interface{}) Sitemap { func DecodeSitemap(prototype Sitemap, input map[string]interface{}) Sitemap {
for key, value := range input { for key, value := range input {
switch key { switch key {
case "changefreq": case "changefreq":
@ -116,7 +115,6 @@ type Server struct {
} }
func (s *Server) init() { func (s *Server) init() {
s.compiledInit.Do(func() { s.compiledInit.Do(func() {
for _, h := range s.Headers { for _, h := range s.Headers {
s.compiledHeaders = append(s.compiledHeaders, glob.MustCompile(h.For)) s.compiledHeaders = append(s.compiledHeaders, glob.MustCompile(h.For))
@ -150,7 +148,6 @@ func (s *Server) MatchHeaders(pattern string) []types.KeyValueStr {
}) })
return matches return matches
} }
func (s *Server) MatchRedirect(pattern string) Redirect { func (s *Server) MatchRedirect(pattern string) Redirect {
@ -176,7 +173,6 @@ func (s *Server) MatchRedirect(pattern string) Redirect {
} }
return Redirect{} return Redirect{}
} }
type Headers struct { type Headers struct {

View file

@ -57,7 +57,6 @@ func TestBuild(t *testing.T) {
c.Assert(b.UseResourceCache(herrors.ErrFeatureNotAvailable), qt.Equals, false) c.Assert(b.UseResourceCache(herrors.ErrFeatureNotAvailable), qt.Equals, false)
c.Assert(b.UseResourceCache(errors.New("err")), qt.Equals, false) c.Assert(b.UseResourceCache(errors.New("err")), qt.Equals, false)
c.Assert(b.UseResourceCache(nil), qt.Equals, false) c.Assert(b.UseResourceCache(nil), qt.Equals, false)
} }
func TestServer(t *testing.T) { func TestServer(t *testing.T) {
@ -98,7 +97,8 @@ status = 301
c.Assert(s.MatchHeaders("/foo.jpg"), qt.DeepEquals, []types.KeyValueStr{ c.Assert(s.MatchHeaders("/foo.jpg"), qt.DeepEquals, []types.KeyValueStr{
{Key: "X-Content-Type-Options", Value: "nosniff"}, {Key: "X-Content-Type-Options", Value: "nosniff"},
{Key: "X-Frame-Options", Value: "DENY"}, {Key: "X-Frame-Options", Value: "DENY"},
{Key: "X-XSS-Protection", Value: "1; mode=block"}}) {Key: "X-XSS-Protection", Value: "1; mode=block"},
})
c.Assert(s.MatchRedirect("/foo/bar/baz"), qt.DeepEquals, Redirect{ c.Assert(s.MatchRedirect("/foo/bar/baz"), qt.DeepEquals, Redirect{
From: "/foo/**", From: "/foo/**",
@ -122,7 +122,8 @@ status = 301
c.Assert(s.MatchRedirect("/default/index.html"), qt.DeepEquals, Redirect{}) c.Assert(s.MatchRedirect("/default/index.html"), qt.DeepEquals, Redirect{})
c.Assert(s.MatchRedirect("/default/"), qt.DeepEquals, Redirect{}) c.Assert(s.MatchRedirect("/default/"), qt.DeepEquals, Redirect{})
for _, errorCase := range []string{`[[server.redirects]] for _, errorCase := range []string{
`[[server.redirects]]
from = "/**" from = "/**"
to = "/file" to = "/file"
status = 301`, status = 301`,
@ -138,5 +139,4 @@ status = 301`,
c.Assert(err, qt.Not(qt.IsNil)) c.Assert(err, qt.Not(qt.IsNil))
} }
} }

View file

@ -86,7 +86,6 @@ func readConfig(format metadecoders.Format, data []byte) (map[string]interface{}
RenameKeys(m) RenameKeys(m)
return m, nil return m, nil
} }
func loadConfigFromFile(fs afero.Fs, filename string) (map[string]interface{}, error) { func loadConfigFromFile(fs afero.Fs, filename string) (map[string]interface{}, error) {

View file

@ -69,7 +69,6 @@ simple = true
} }
c.Assert(got, qt.All(qt.Equals), true) c.Assert(got, qt.All(qt.Equals), true)
} }
func TestDecodeConfigFromTOMLCaseInsensitive(t *testing.T) { func TestDecodeConfigFromTOMLCaseInsensitive(t *testing.T) {

View file

@ -65,5 +65,4 @@ func TestUseSettingsFromRootIfSet(t *testing.T) {
c.Assert(config.Disqus.Shortname, qt.Equals, "root_short") c.Assert(config.Disqus.Shortname, qt.Equals, "root_short")
c.Assert(config.GoogleAnalytics.ID, qt.Equals, "ga_root") c.Assert(config.GoogleAnalytics.ID, qt.Equals, "ga_root")
} }

View file

@ -16,15 +16,14 @@ package create
import ( import (
"bytes" "bytes"
"github.com/pkg/errors"
"io" "io"
"os" "os"
"os/exec" "os/exec"
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/pkg/errors"
"github.com/gohugoio/hugo/hugofs/files" "github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
@ -131,7 +130,6 @@ func newContentFromDir(
sites *hugolib.HugoSites, sites *hugolib.HugoSites,
targetFs afero.Fs, targetFs afero.Fs,
cm archetypeMap, name, targetPath string) error { cm archetypeMap, name, targetPath string) error {
for _, f := range cm.otherFiles { for _, f := range cm.otherFiles {
meta := f.Meta() meta := f.Meta()
filename := meta.Path() filename := meta.Path()
@ -196,11 +194,9 @@ func mapArcheTypeDir(
ps *helpers.PathSpec, ps *helpers.PathSpec,
fs afero.Fs, fs afero.Fs,
archetypeDir string) (archetypeMap, error) { archetypeDir string) (archetypeMap, error) {
var m archetypeMap var m archetypeMap
walkFn := func(path string, fi hugofs.FileMetaInfo, err error) error { walkFn := func(path string, fi hugofs.FileMetaInfo, err error) error {
if err != nil { if err != nil {
return err return err
} }
@ -308,7 +304,6 @@ func resolveContentPath(sites *hugolib.HugoSites, fs afero.Fs, targetPath string
} }
if siteContentDir == "" { if siteContentDir == "" {
} }
if siteContentDir != "" { if siteContentDir != "" {
@ -324,7 +319,6 @@ func resolveContentPath(sites *hugolib.HugoSites, fs afero.Fs, targetPath string
} }
return s.PathSpec.AbsPathify(filepath.Join(contentDir, targetPath)), s return s.PathSpec.AbsPathify(filepath.Join(contentDir, targetPath)), s
} }
} }
// FindArchetype takes a given kind/archetype of content and returns the path // FindArchetype takes a given kind/archetype of content and returns the path

View file

@ -83,7 +83,6 @@ var (
) )
func executeArcheTypeAsTemplate(s *hugolib.Site, name, kind, targetPath, archetypeFilename string) ([]byte, error) { func executeArcheTypeAsTemplate(s *hugolib.Site, name, kind, targetPath, archetypeFilename string) ([]byte, error) {
var ( var (
archetypeContent []byte archetypeContent []byte
archetypeTemplate []byte archetypeTemplate []byte
@ -145,5 +144,4 @@ func executeArcheTypeAsTemplate(s *hugolib.Site, name, kind, targetPath, archety
archetypeContent = []byte(archetypeShortcodeReplacementsPost.Replace(buff.String())) archetypeContent = []byte(archetypeShortcodeReplacementsPost.Replace(buff.String()))
return archetypeContent, nil return archetypeContent, nil
} }

View file

@ -14,6 +14,7 @@
package create_test package create_test
import ( import (
"fmt"
"os" "os"
"path/filepath" "path/filepath"
"strings" "strings"
@ -23,8 +24,6 @@ import (
"github.com/gohugoio/hugo/hugolib" "github.com/gohugoio/hugo/hugolib"
"fmt"
"github.com/gohugoio/hugo/hugofs" "github.com/gohugoio/hugo/hugofs"
qt "github.com/frankban/quicktest" qt "github.com/frankban/quicktest"
@ -35,7 +34,6 @@ import (
) )
func TestNewContent(t *testing.T) { func TestNewContent(t *testing.T) {
cases := []struct { cases := []struct {
kind string kind string
path string path string
@ -59,7 +57,8 @@ func TestNewContent(t *testing.T) {
`title = "GO"`, `title = "GO"`,
"{{< myshortcode >}}", "{{< myshortcode >}}",
"{{% myshortcode %}}", "{{% myshortcode %}}",
"{{</* comment */>}}\n{{%/* comment */%}}"}}, // shortcodes "{{</* comment */>}}\n{{%/* comment */%}}",
}}, // shortcodes
} }
for i, cas := range cases { for i, cas := range cases {
@ -140,7 +139,6 @@ i18n: {{ T "hugo" }}
c.Assert(create.NewContent(h, "my-theme-bundle", "post/my-theme-post"), qt.IsNil) c.Assert(create.NewContent(h, "my-theme-bundle", "post/my-theme-post"), qt.IsNil)
cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/index.md")), `File: index.md`, `Site Lang: en`, `Name: My Theme Post`, `i18n: Hugo Rocks!`) cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/index.md")), `File: index.md`, `Site Lang: en`, `Name: My Theme Post`, `i18n: Hugo Rocks!`)
cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/resources/hugo1.json")), `hugo1: {{ printf "no template handling in here" }}`) cContains(c, readFileFromFs(t, fs.Source, filepath.Join("content", "post/my-theme-post/resources/hugo1.json")), `hugo1: {{ printf "no template handling in here" }}`)
} }
func initFs(fs afero.Fs) error { func initFs(fs afero.Fs) error {
@ -248,7 +246,6 @@ func readFileFromFs(t *testing.T, fs afero.Fs, filename string) string {
} }
func newTestCfg(c *qt.C, mm afero.Fs) (*viper.Viper, *hugofs.Fs) { func newTestCfg(c *qt.C, mm afero.Fs) (*viper.Viper, *hugofs.Fs) {
cfg := ` cfg := `
theme = "mytheme" theme = "mytheme"
@ -281,5 +278,4 @@ other = "Hugo Rokkar!"`), 0755), qt.IsNil)
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
return v, hugofs.NewFrom(mm, v) return v, hugofs.NewFrom(mm, v)
} }

View file

@ -466,7 +466,7 @@ func (lf *localFile) MD5() []byte {
// knownHiddenDirectory checks if the specified name is a well known // knownHiddenDirectory checks if the specified name is a well known
// hidden directory. // hidden directory.
func knownHiddenDirectory(name string) bool { func knownHiddenDirectory(name string) bool {
var knownDirectories = []string{ knownDirectories := []string{
".well-known", ".well-known",
} }
@ -697,7 +697,6 @@ func findDiffs(localFiles map[string]*localFile, remoteFiles map[string]*blob.Li
// //
// The subslices are sorted by Local.SlashPath. // The subslices are sorted by Local.SlashPath.
func applyOrdering(ordering []*regexp.Regexp, uploads []*fileToUpload) [][]*fileToUpload { func applyOrdering(ordering []*regexp.Regexp, uploads []*fileToUpload) [][]*fileToUpload {
// Sort the whole slice by Local.SlashPath first. // Sort the whole slice by Local.SlashPath first.
sort.Slice(uploads, func(i, j int) bool { return uploads[i].Local.SlashPath < uploads[j].Local.SlashPath }) sort.Slice(uploads, func(i, j int) bool { return uploads[i].Local.SlashPath < uploads[j].Local.SlashPath })

View file

@ -112,7 +112,6 @@ func (m *matcher) Matches(path string) bool {
// decode creates a config from a given Hugo configuration. // decode creates a config from a given Hugo configuration.
func decodeConfig(cfg config.Provider) (deployConfig, error) { func decodeConfig(cfg config.Provider) (deployConfig, error) {
var ( var (
mediaTypesConfig []map[string]interface{} mediaTypesConfig []map[string]interface{}
dcfg deployConfig dcfg deployConfig

View file

@ -211,7 +211,6 @@ func TestFindDiffs(t *testing.T) {
} }
func TestWalkLocal(t *testing.T) { func TestWalkLocal(t *testing.T) {
tests := map[string]struct { tests := map[string]struct {
Given []string Given []string
Expect []string Expect []string
@ -355,7 +354,10 @@ func TestLocalFile(t *testing.T) {
MediaTypesConfig: []map[string]interface{}{ MediaTypesConfig: []map[string]interface{}{
{ {
"hugo/custom": map[string]interface{}{ "hugo/custom": map[string]interface{}{
"suffixes": []string{"hugo"}}}}, "suffixes": []string{"hugo"},
},
},
},
WantContent: contentBytes, WantContent: contentBytes,
WantSize: contentLen, WantSize: contentLen,
WantMD5: contentMD5[:], WantMD5: contentMD5[:],

2
deps/deps.go vendored
View file

@ -231,7 +231,6 @@ func New(cfg DepsCfg) (*Deps, error) {
} }
ps, err := helpers.NewPathSpec(fs, cfg.Language, logger) ps, err := helpers.NewPathSpec(fs, cfg.Language, logger)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "create PathSpec") return nil, errors.Wrap(err, "create PathSpec")
} }
@ -347,7 +346,6 @@ func (d Deps) ForLanguage(cfg DepsCfg, onCreated func(d *Deps) error) (*Deps, er
d.BuildStartListeners = &Listeners{} d.BuildStartListeners = &Listeners{}
return &d, nil return &d, nil
} }
// DepsCfg contains configuration options that can be used to configure Hugo // DepsCfg contains configuration options that can be used to configure Hugo

2
deps/deps_test.go vendored
View file

@ -20,7 +20,6 @@ import (
) )
func TestBuildFlags(t *testing.T) { func TestBuildFlags(t *testing.T) {
c := qt.New(t) c := qt.New(t)
var bf BuildState var bf BuildState
bf.Incr() bf.Incr()
@ -28,5 +27,4 @@ func TestBuildFlags(t *testing.T) {
bf.Incr() bf.Incr()
c.Assert(bf.Incr(), qt.Equals, 4) c.Assert(bf.Incr(), qt.Equals, 4)
} }

View file

@ -20,6 +20,7 @@ package helpers
import ( import (
"bytes" "bytes"
"html/template" "html/template"
"strings"
"unicode" "unicode"
"unicode/utf8" "unicode/utf8"
@ -33,8 +34,6 @@ import (
bp "github.com/gohugoio/hugo/bufferpool" bp "github.com/gohugoio/hugo/bufferpool"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"strings"
) )
// SummaryDivider denotes where content summarization should end. The default is "<!--more-->". // SummaryDivider denotes where content summarization should end. The default is "<!--more-->".
@ -66,7 +65,6 @@ type ContentSpec struct {
// NewContentSpec returns a ContentSpec initialized // NewContentSpec returns a ContentSpec initialized
// with the appropriate fields from the given config.Provider. // with the appropriate fields from the given config.Provider.
func NewContentSpec(cfg config.Provider, logger loggers.Logger, contentFs afero.Fs) (*ContentSpec, error) { func NewContentSpec(cfg config.Provider, logger loggers.Logger, contentFs afero.Fs) (*ContentSpec, error) {
spec := &ContentSpec{ spec := &ContentSpec{
summaryLength: cfg.GetInt("summaryLength"), summaryLength: cfg.GetInt("summaryLength"),
BuildFuture: cfg.GetBool("buildFuture"), BuildFuture: cfg.GetBool("buildFuture"),
@ -81,7 +79,6 @@ func NewContentSpec(cfg config.Provider, logger loggers.Logger, contentFs afero.
ContentFs: contentFs, ContentFs: contentFs,
Logger: logger, Logger: logger,
}) })
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -112,7 +109,6 @@ var stripHTMLReplacer = strings.NewReplacer("\n", " ", "</p>", "\n", "<br>", "\n
// StripHTML accepts a string, strips out all HTML tags and returns it. // StripHTML accepts a string, strips out all HTML tags and returns it.
func StripHTML(s string) string { func StripHTML(s string) string {
// Shortcut strings with no tags in them // Shortcut strings with no tags in them
if !strings.ContainsAny(s, "<>") { if !strings.ContainsAny(s, "<>") {
return s return s

View file

@ -118,7 +118,6 @@ func TestNewContentSpec(t *testing.T) {
c.Assert(spec.BuildFuture, qt.Equals, true) c.Assert(spec.BuildFuture, qt.Equals, true)
c.Assert(spec.BuildExpired, qt.Equals, true) c.Assert(spec.BuildExpired, qt.Equals, true)
c.Assert(spec.BuildDrafts, qt.Equals, true) c.Assert(spec.BuildDrafts, qt.Equals, true)
} }
var benchmarkTruncateString = strings.Repeat("This is a sentence about nothing.", 20) var benchmarkTruncateString = strings.Repeat("This is a sentence about nothing.", 20)
@ -255,7 +254,6 @@ func TestExtractNoTOC(t *testing.T) {
var totalWordsBenchmarkString = strings.Repeat("Hugo Rocks ", 200) var totalWordsBenchmarkString = strings.Repeat("Hugo Rocks ", 200)
func TestTotalWords(t *testing.T) { func TestTotalWords(t *testing.T) {
for i, this := range []struct { for i, this := range []struct {
s string s string
words int words int

View file

@ -11,9 +11,7 @@ import (
// This is is just some helpers used to create some JSON used in the Hugo docs. // This is is just some helpers used to create some JSON used in the Hugo docs.
func init() { func init() {
docsProvider := func() docshelper.DocProvider { docsProvider := func() docshelper.DocProvider {
var chromaLexers []interface{} var chromaLexers []interface{}
sort.Sort(lexers.Registry.Lexers) sort.Sort(lexers.Registry.Lexers)
@ -50,7 +48,6 @@ func init() {
} }
return docshelper.DocProvider{"chroma": map[string]interface{}{"lexers": chromaLexers}} return docshelper.DocProvider{"chroma": map[string]interface{}{"lexers": chromaLexers}}
} }
docshelper.AddDocProviderFunc(docsProvider) docshelper.AddDocProviderFunc(docsProvider)

View file

@ -93,5 +93,4 @@ func initEmoji() {
emojiMaxSize = len(k) emojiMaxSize = len(k)
} }
} }
} }

View file

@ -74,7 +74,6 @@ func TestEmojiCustom(t *testing.T) {
// Hugo have a byte slice, wants a byte slice and doesn't mind if the original is modified. // Hugo have a byte slice, wants a byte slice and doesn't mind if the original is modified.
func BenchmarkEmojiKyokomiFprint(b *testing.B) { func BenchmarkEmojiKyokomiFprint(b *testing.B) {
f := func(in []byte) []byte { f := func(in []byte) []byte {
buff := bufferpool.GetBuffer() buff := bufferpool.GetBuffer()
defer bufferpool.PutBuffer(buff) defer bufferpool.PutBuffer(buff)
@ -89,7 +88,6 @@ func BenchmarkEmojiKyokomiFprint(b *testing.B) {
} }
func BenchmarkEmojiKyokomiSprint(b *testing.B) { func BenchmarkEmojiKyokomiSprint(b *testing.B) {
f := func(in []byte) []byte { f := func(in []byte) []byte {
return []byte(emoji.Sprint(string(in))) return []byte(emoji.Sprint(string(in)))
} }
@ -102,7 +100,6 @@ func BenchmarkHugoEmoji(b *testing.B) {
} }
func doBenchmarkEmoji(b *testing.B, f func(in []byte) []byte) { func doBenchmarkEmoji(b *testing.B, f func(in []byte) []byte) {
type input struct { type input struct {
in []byte in []byte
expect []byte expect []byte
@ -119,8 +116,8 @@ func doBenchmarkEmoji(b *testing.B, f func(in []byte) []byte) {
{"No smiles for you or " + strings.Repeat("you ", 1000), "No smiles for you or " + strings.Repeat("you ", 1000)}, {"No smiles for you or " + strings.Repeat("you ", 1000), "No smiles for you or " + strings.Repeat("you ", 1000)},
} }
var in = make([]input, b.N*len(data)) in := make([]input, b.N*len(data))
var cnt = 0 cnt := 0
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
for _, this := range data { for _, this := range data {
in[cnt] = input{[]byte(this.input), []byte(this.expect)} in[cnt] = input{[]byte(this.input), []byte(this.expect)}
@ -142,6 +139,5 @@ func doBenchmarkEmoji(b *testing.B, f func(in []byte) []byte) {
b.Fatalf("[%d] emoji std, got \n%q but expected \n%q", j, result, currIn.expect) b.Fatalf("[%d] emoji std, got \n%q but expected \n%q", j, result, currIn.expect)
} }
} }
} }
} }

View file

@ -157,7 +157,6 @@ func ReaderToString(lines io.Reader) string {
// ReaderContains reports whether subslice is within r. // ReaderContains reports whether subslice is within r.
func ReaderContains(r io.Reader, subslice []byte) bool { func ReaderContains(r io.Reader, subslice []byte) bool {
if r == nil || len(subslice) == 0 { if r == nil || len(subslice) == 0 {
return false return false
} }
@ -345,7 +344,6 @@ func InitLoggers() {
func Deprecated(item, alternative string, err bool) { func Deprecated(item, alternative string, err bool) {
if err { if err {
DistinctErrorLog.Printf("%s is deprecated and will be removed in Hugo %s. %s", item, hugo.CurrentVersion.Next().ReleaseVersion(), alternative) DistinctErrorLog.Printf("%s is deprecated and will be removed in Hugo %s. %s", item, hugo.CurrentVersion.Next().ReleaseVersion(), alternative)
} else { } else {
DistinctWarnLog.Printf("%s is deprecated and will be removed in a future release. %s", item, alternative) DistinctWarnLog.Printf("%s is deprecated and will be removed in a future release. %s", item, alternative)
} }

View file

@ -219,7 +219,6 @@ func TestGetTitleFunc(t *testing.T) {
c.Assert(GetTitleFunc("ap")(title), qt.Equals, "Somewhere Over the Rainbow") c.Assert(GetTitleFunc("ap")(title), qt.Equals, "Somewhere Over the Rainbow")
c.Assert(GetTitleFunc("")(title), qt.Equals, "Somewhere Over the Rainbow") c.Assert(GetTitleFunc("")(title), qt.Equals, "Somewhere Over the Rainbow")
c.Assert(GetTitleFunc("unknown")(title), qt.Equals, "Somewhere Over the Rainbow") c.Assert(GetTitleFunc("unknown")(title), qt.Equals, "Somewhere Over the Rainbow")
} }
func BenchmarkReaderContains(b *testing.B) { func BenchmarkReaderContains(b *testing.B) {
@ -354,7 +353,6 @@ func BenchmarkMD5FromFileFast(b *testing.B) {
} }
}) })
} }
} }
func BenchmarkUniqueStrings(b *testing.B) { func BenchmarkUniqueStrings(b *testing.B) {
@ -406,7 +404,6 @@ func BenchmarkUniqueStrings(b *testing.B) {
} }
} }
}) })
} }
func TestHashString(t *testing.T) { func TestHashString(t *testing.T) {

View file

@ -36,10 +36,8 @@ import (
"github.com/spf13/afero" "github.com/spf13/afero"
) )
var (
// ErrThemeUndefined is returned when a theme has not be defined by the user. // ErrThemeUndefined is returned when a theme has not be defined by the user.
ErrThemeUndefined = errors.New("no theme set") var ErrThemeUndefined = errors.New("no theme set")
)
// filepathPathBridge is a bridge for common functionality in filepath vs path // filepathPathBridge is a bridge for common functionality in filepath vs path
type filepathPathBridge interface { type filepathPathBridge interface {
@ -169,7 +167,6 @@ func ReplaceExtension(path string, newExt string) string {
} }
func makePathRelative(inPath string, possibleDirectories ...string) (string, error) { func makePathRelative(inPath string, possibleDirectories ...string) (string, error) {
for _, currentPath := range possibleDirectories { for _, currentPath := range possibleDirectories {
if strings.HasPrefix(inPath, currentPath) { if strings.HasPrefix(inPath, currentPath) {
return strings.TrimPrefix(inPath, currentPath), nil return strings.TrimPrefix(inPath, currentPath), nil
@ -281,7 +278,6 @@ func fileAndExt(in string, b filepathPathBridge) (name string, ext string) {
} }
func extractFilename(in, ext, base, pathSeparator string) (name string) { func extractFilename(in, ext, base, pathSeparator string) (name string) {
// No file name cases. These are defined as: // No file name cases. These are defined as:
// 1. any "in" path that ends in a pathSeparator // 1. any "in" path that ends in a pathSeparator
// 2. any "base" consisting of just an pathSeparator // 2. any "base" consisting of just an pathSeparator
@ -299,7 +295,6 @@ func extractFilename(in, ext, base, pathSeparator string) (name string) {
name = base name = base
} }
return return
} }
// GetRelativePath returns the relative path of a given path. // GetRelativePath returns the relative path of a given path.
@ -474,21 +469,18 @@ func ExtractRootPaths(paths []string) []string {
r[i] = root r[i] = root
} }
return r return r
} }
// FindCWD returns the current working directory from where the Hugo // FindCWD returns the current working directory from where the Hugo
// executable is run. // executable is run.
func FindCWD() (string, error) { func FindCWD() (string, error) {
serverFile, err := filepath.Abs(os.Args[0]) serverFile, err := filepath.Abs(os.Args[0])
if err != nil { if err != nil {
return "", fmt.Errorf("can't get absolute path for executable: %v", err) return "", fmt.Errorf("can't get absolute path for executable: %v", err)
} }
path := filepath.Dir(serverFile) path := filepath.Dir(serverFile)
realFile, err := filepath.EvalSymlinks(serverFile) realFile, err := filepath.EvalSymlinks(serverFile)
if err != nil { if err != nil {
if _, err = os.Stat(serverFile + ".exe"); err == nil { if _, err = os.Stat(serverFile + ".exe"); err == nil {
realFile = filepath.Clean(serverFile + ".exe") realFile = filepath.Clean(serverFile + ".exe")
@ -516,7 +508,6 @@ func SymbolicWalk(fs afero.Fs, root string, walker hugofs.WalkFunc) error {
}) })
return w.Walk() return w.Walk()
} }
// LstatIfPossible can be used to call Lstat if possible, else Stat. // LstatIfPossible can be used to call Lstat if possible, else Stat.
@ -555,7 +546,6 @@ func OpenFilesForWriting(fs afero.Fs, filenames ...string) (io.WriteCloser, erro
} }
return hugio.NewMultiWriteCloser(writeClosers...), nil return hugio.NewMultiWriteCloser(writeClosers...), nil
} }
// OpenFileForWriting opens or creates the given file. If the target directory // OpenFileForWriting opens or creates the given file. If the target directory
@ -598,7 +588,6 @@ func GetCacheDir(fs afero.Fs, cfg config.Provider) (string, error) {
// Fall back to a cache in /tmp. // Fall back to a cache in /tmp.
return GetTempDir("hugo_cache", fs), nil return GetTempDir("hugo_cache", fs), nil
} }
func getCacheDir(cfg config.Provider) string { func getCacheDir(cfg config.Provider) string {
@ -614,7 +603,6 @@ func getCacheDir(cfg config.Provider) string {
// is this project: // is this project:
// https://github.com/philhawksworth/content-shards/blob/master/gulpfile.js // https://github.com/philhawksworth/content-shards/blob/master/gulpfile.js
return "/opt/build/cache/hugo_cache/" return "/opt/build/cache/hugo_cache/"
} }
// This will fall back to an hugo_cache folder in the tmp dir, which should work fine for most CI // This will fall back to an hugo_cache folder in the tmp dir, which should work fine for most CI

View file

@ -184,7 +184,6 @@ func TestGetDottedRelativePath(t *testing.T) {
for _, f := range []func(string) string{filepath.FromSlash, func(s string) string { return s }} { for _, f := range []func(string) string{filepath.FromSlash, func(s string) string { return s }} {
doTestGetDottedRelativePath(f, t) doTestGetDottedRelativePath(f, t)
} }
} }
func doTestGetDottedRelativePath(urlFixer func(string) string, t *testing.T) { func doTestGetDottedRelativePath(urlFixer func(string) string, t *testing.T) {
@ -422,7 +421,6 @@ func createTempDirWithZeroLengthFiles() (string, error) {
} }
// the dir now has one, zero length file in it // the dir now has one, zero length file in it
return d, nil return d, nil
} }
func createTempDirWithNonZeroLengthFiles() (string, error) { func createTempDirWithNonZeroLengthFiles() (string, error) {
@ -451,7 +449,6 @@ func createTempDirWithNonZeroLengthFiles() (string, error) {
// the dir now has one, zero length file in it // the dir now has one, zero length file in it
return d, nil return d, nil
} }
func deleteTempDir(d string) { func deleteTempDir(d string) {
@ -490,7 +487,6 @@ func TestExists(t *testing.T) {
t.Errorf("Test %d failed. Expected %q got %q", i, d.expectedErr, err) t.Errorf("Test %d failed. Expected %q got %q", i, d.expectedErr, err)
} }
} }
} }
func TestAbsPathify(t *testing.T) { func TestAbsPathify(t *testing.T) {
@ -544,7 +540,6 @@ func TestAbsPathify(t *testing.T) {
} }
} }
} }
} }
func TestExtNoDelimiter(t *testing.T) { func TestExtNoDelimiter(t *testing.T) {
@ -611,15 +606,12 @@ func TestFileAndExt(t *testing.T) {
t.Errorf("Test %d failed. Expected extension %q got %q.", i, d.expectedExt, ext) t.Errorf("Test %d failed. Expected extension %q got %q.", i, d.expectedExt, ext)
} }
} }
} }
func TestPathPrep(t *testing.T) { func TestPathPrep(t *testing.T) {
} }
func TestPrettifyPath(t *testing.T) { func TestPrettifyPath(t *testing.T) {
} }
func TestExtractAndGroupRootPaths(t *testing.T) { func TestExtractAndGroupRootPaths(t *testing.T) {
@ -642,16 +634,19 @@ func TestExtractAndGroupRootPaths(t *testing.T) {
// Make sure the original is preserved // Make sure the original is preserved
c.Assert(in, qt.DeepEquals, inCopy) c.Assert(in, qt.DeepEquals, inCopy)
} }
func TestExtractRootPaths(t *testing.T) { func TestExtractRootPaths(t *testing.T) {
tests := []struct { tests := []struct {
input []string input []string
expected []string expected []string
}{{[]string{filepath.FromSlash("a/b"), filepath.FromSlash("a/b/c/"), "b", }{{
filepath.FromSlash("/c/d"), filepath.FromSlash("d/"), filepath.FromSlash("//e//")}, []string{
[]string{"a", "a", "b", "c", "d", "e"}}} filepath.FromSlash("a/b"), filepath.FromSlash("a/b/c/"), "b",
filepath.FromSlash("/c/d"), filepath.FromSlash("d/"), filepath.FromSlash("//e//"),
},
[]string{"a", "a", "b", "c", "d", "e"},
}}
for _, test := range tests { for _, test := range tests {
output := ExtractRootPaths(test.input) output := ExtractRootPaths(test.input)

View file

@ -45,7 +45,6 @@ func NewPathSpec(fs *hugofs.Fs, cfg config.Provider, logger loggers.Logger) (*Pa
// NewPathSpecWithBaseBaseFsProvided creats a new PathSpec from the given filesystems and language. // NewPathSpecWithBaseBaseFsProvided creats a new PathSpec from the given filesystems and language.
// If an existing BaseFs is provided, parts of that is reused. // If an existing BaseFs is provided, parts of that is reused.
func NewPathSpecWithBaseBaseFsProvided(fs *hugofs.Fs, cfg config.Provider, logger loggers.Logger, baseBaseFs *filesystems.BaseFs) (*PathSpec, error) { func NewPathSpecWithBaseBaseFsProvided(fs *hugofs.Fs, cfg config.Provider, logger loggers.Logger, baseBaseFs *filesystems.BaseFs) (*PathSpec, error) {
p, err := paths.New(fs, cfg) p, err := paths.New(fs, cfg)
if err != nil { if err != nil {
return nil, err return nil, err
@ -85,5 +84,4 @@ func (p *PathSpec) PermalinkForBaseURL(link, baseURL string) string {
baseURL += "/" baseURL += "/"
} }
return baseURL + link return baseURL + link
} }

View file

@ -56,5 +56,4 @@ func TestNewPathSpecFromConfig(t *testing.T) {
c.Assert(p.BaseURL.String(), qt.Equals, "http://base.com") c.Assert(p.BaseURL.String(), qt.Equals, "http://base.com")
c.Assert(p.ThemesDir, qt.Equals, "thethemes") c.Assert(p.ThemesDir, qt.Equals, "thethemes")
c.Assert(p.WorkingDir, qt.Equals, "thework") c.Assert(p.WorkingDir, qt.Equals, "thework")
} }

View file

@ -83,7 +83,6 @@ func (s *ProcessingStats) Table(w io.Writer) {
table.SetHeader([]string{"", s.Name}) table.SetHeader([]string{"", s.Name})
table.SetBorder(false) table.SetBorder(false)
table.Render() table.Render()
} }
// ProcessingStatsTable writes a table-formatted representation of stats to w. // ProcessingStatsTable writes a table-formatted representation of stats to w.
@ -108,7 +107,6 @@ func ProcessingStatsTable(w io.Writer, stats ...*ProcessingStats) {
} else { } else {
data[j] = append(data[j], strconv.Itoa(int(tv.val))) data[j] = append(data[j], strconv.Itoa(int(tv.val)))
} }
} }
} }
@ -119,5 +117,4 @@ func ProcessingStatsTable(w io.Writer, stats ...*ProcessingStats) {
table.SetHeader(names) table.SetHeader(names)
table.SetBorder(false) table.SetBorder(false)
table.Render() table.Render()
} }

View file

@ -32,7 +32,6 @@ func newTestCfgFor(fs *hugofs.Fs) *viper.Viper {
v.SetFs(fs.Source) v.SetFs(fs.Source)
return v return v
} }
func newTestCfg() *viper.Viper { func newTestCfg() *viper.Viper {

View file

@ -103,7 +103,6 @@ func SanitizeURLKeepTrailingSlash(in string) string {
// urlize: vim-text-editor // urlize: vim-text-editor
func (p *PathSpec) URLize(uri string) string { func (p *PathSpec) URLize(uri string) string {
return p.URLEscape(p.MakePathSanitized(uri)) return p.URLEscape(p.MakePathSanitized(uri))
} }
// URLizeFilename creates an URL from a filename by esacaping unicode letters // URLizeFilename creates an URL from a filename by esacaping unicode letters
@ -130,7 +129,6 @@ func (p *PathSpec) URLEscape(uri string) string {
// path: post/how-i-blog // path: post/how-i-blog
// result: http://spf13.com/post/how-i-blog // result: http://spf13.com/post/how-i-blog
func MakePermalink(host, plink string) *url.URL { func MakePermalink(host, plink string) *url.URL {
base, err := url.Parse(host) base, err := url.Parse(host)
if err != nil { if err != nil {
panic(err) panic(err)
@ -275,7 +273,6 @@ func (p *PathSpec) RelURL(in string, addLanguage bool) string {
// For relative URL entries on sites with a base url with a context root set (i.e. http://example.com/mysite), // For relative URL entries on sites with a base url with a context root set (i.e. http://example.com/mysite),
// relative URLs must not include the context root if canonifyURLs is enabled. But if it's disabled, it must be set. // relative URLs must not include the context root if canonifyURLs is enabled. But if it's disabled, it must be set.
func AddContextRoot(baseURL, relativePath string) string { func AddContextRoot(baseURL, relativePath string) string {
url, err := url.Parse(baseURL) url, err := url.Parse(baseURL)
if err != nil { if err != nil {
panic(err) panic(err)

View file

@ -23,7 +23,6 @@ import (
) )
func TestURLize(t *testing.T) { func TestURLize(t *testing.T) {
v := newTestCfg() v := newTestCfg()
l := langs.NewDefaultLanguage(v) l := langs.NewDefaultLanguage(v)
p, _ := NewPathSpec(hugofs.NewMem(v), l, nil) p, _ := NewPathSpec(hugofs.NewMem(v), l, nil)
@ -113,7 +112,6 @@ func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool,
} else { } else {
expected = strings.Replace(expected, "MULTI", lang+"/", 1) expected = strings.Replace(expected, "MULTI", lang+"/", 1)
} }
} else { } else {
expected = strings.Replace(expected, "MULTI", "", 1) expected = strings.Replace(expected, "MULTI", "", 1)
} }
@ -294,7 +292,6 @@ func TestURLPrep(t *testing.T) {
t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output) t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output)
} }
} }
} }
func TestAddContextRoot(t *testing.T) { func TestAddContextRoot(t *testing.T) {

View file

@ -48,7 +48,6 @@ func BailOut(after time.Duration) {
runtime.Stack(buf, true) runtime.Stack(buf, true)
panic(string(buf)) panic(string(buf))
}) })
} }
// Rnd is used only for testing. // Rnd is used only for testing.

View file

@ -38,11 +38,9 @@ func decorateDirs(fs afero.Fs, meta FileMeta) afero.Fs {
ffs.decorate = decorator ffs.decorate = decorator
return ffs return ffs
} }
func decoratePath(fs afero.Fs, createPath func(name string) string) afero.Fs { func decoratePath(fs afero.Fs, createPath func(name string) string) afero.Fs {
ffs := &baseFileDecoratorFs{Fs: fs} ffs := &baseFileDecoratorFs{Fs: fs}
decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) { decorator := func(fi os.FileInfo, name string) (os.FileInfo, error) {
@ -54,7 +52,6 @@ func decoratePath(fs afero.Fs, createPath func(name string) string) afero.Fs {
ffs.decorate = decorator ffs.decorate = decorator
return ffs return ffs
} }
// DecorateBasePathFs adds Path info to files and directories in the // DecorateBasePathFs adds Path info to files and directories in the
@ -81,7 +78,6 @@ func DecorateBasePathFs(base *afero.BasePathFs) afero.Fs {
// NewBaseFileDecorator decorates the given Fs to provide the real filename // NewBaseFileDecorator decorates the given Fs to provide the real filename
// and an Opener func. // and an Opener func.
func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero.Fs { func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero.Fs {
ffs := &baseFileDecoratorFs{Fs: fs} ffs := &baseFileDecoratorFs{Fs: fs}
decorator := func(fi os.FileInfo, filename string) (os.FileInfo, error) { decorator := func(fi os.FileInfo, filename string) (os.FileInfo, error) {
@ -128,7 +124,6 @@ func NewBaseFileDecorator(fs afero.Fs, callbacks ...func(fi FileMetaInfo)) afero
} }
return fim, nil return fim, nil
} }
ffs.decorate = decorator ffs.decorate = decorator
@ -161,7 +156,6 @@ func (fs *baseFileDecoratorFs) Stat(name string) (os.FileInfo, error) {
} }
return fs.decorate(fi, name) return fs.decorate(fi, name)
} }
func (fs *baseFileDecoratorFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { func (fs *baseFileDecoratorFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {

View file

@ -86,6 +86,7 @@ func (f FileMeta) OriginalFilename() string {
func (f FileMeta) SkipDir() bool { func (f FileMeta) SkipDir() bool {
return f.GetBool(metaKeySkipDir) return f.GetBool(metaKeySkipDir)
} }
func (f FileMeta) TranslationBaseName() string { func (f FileMeta) TranslationBaseName() string {
return f.stringV(metaKeyTranslationBaseName) return f.stringV(metaKeyTranslationBaseName)
} }
@ -236,7 +237,6 @@ func (fi *fileInfoMeta) Meta() FileMeta {
} }
func NewFileMetaInfo(fi os.FileInfo, m FileMeta) FileMetaInfo { func NewFileMetaInfo(fi os.FileInfo, m FileMeta) FileMetaInfo {
if fim, ok := fi.(FileMetaInfo); ok { if fim, ok := fi.(FileMetaInfo); ok {
mergeFileMeta(fim.Meta(), m) mergeFileMeta(fim.Meta(), m)
} }
@ -312,7 +312,6 @@ func decorateFileInfo(
fi os.FileInfo, fi os.FileInfo,
fs afero.Fs, opener func() (afero.File, error), fs afero.Fs, opener func() (afero.File, error),
filename, filepath string, inMeta FileMeta) FileMetaInfo { filename, filepath string, inMeta FileMeta) FileMetaInfo {
var meta FileMeta var meta FileMeta
var fim FileMetaInfo var fim FileMetaInfo
@ -334,7 +333,6 @@ func decorateFileInfo(
mergeFileMeta(inMeta, meta) mergeFileMeta(inMeta, meta)
return fim return fim
} }
func isSymlink(fi os.FileInfo) bool { func isSymlink(fi os.FileInfo) bool {
@ -379,6 +377,5 @@ func sortFileInfos(fis []os.FileInfo) {
sort.Slice(fis, func(i, j int) bool { sort.Slice(fis, func(i, j int) bool {
fimi, fimj := fis[i].(FileMetaInfo), fis[j].(FileMetaInfo) fimi, fimj := fis[i].(FileMetaInfo), fis[j].(FileMetaInfo)
return fimi.Meta().Filename() < fimj.Meta().Filename() return fimi.Meta().Filename() < fimj.Meta().Filename()
}) })
} }

View file

@ -42,12 +42,14 @@ var (
"rest", "rst", "rest", "rst",
"mmark", "mmark",
"org", "org",
"pandoc", "pdc"} "pandoc", "pdc",
}
contentFileExtensionsSet map[string]bool contentFileExtensionsSet map[string]bool
htmlFileExtensions = []string{ htmlFileExtensions = []string{
"html", "htm"} "html", "htm",
}
htmlFileExtensionsSet map[string]bool htmlFileExtensionsSet map[string]bool
) )

View file

@ -39,7 +39,6 @@ func TestIsHTMLContent(t *testing.T) {
c.Assert(isHTMLContent(strings.NewReader(" <!--")), qt.Equals, true) c.Assert(isHTMLContent(strings.NewReader(" <!--")), qt.Equals, true)
c.Assert(isHTMLContent(strings.NewReader(" ---<")), qt.Equals, false) c.Assert(isHTMLContent(strings.NewReader(" ---<")), qt.Equals, false)
c.Assert(isHTMLContent(strings.NewReader(" foo <")), qt.Equals, false) c.Assert(isHTMLContent(strings.NewReader(" foo <")), qt.Equals, false)
} }
func TestComponentFolders(t *testing.T) { func TestComponentFolders(t *testing.T) {
@ -57,5 +56,4 @@ func TestComponentFolders(t *testing.T) {
c.Assert(IsComponentFolder("content"), qt.Equals, true) c.Assert(IsComponentFolder("content"), qt.Equals, true)
c.Assert(IsComponentFolder("foo"), qt.Equals, false) c.Assert(IsComponentFolder("foo"), qt.Equals, false)
c.Assert(IsComponentFolder(""), qt.Equals, false) c.Assert(IsComponentFolder(""), qt.Equals, false)
} }

View file

@ -35,9 +35,7 @@ var (
) )
func NewLanguageFs(langs map[string]int, fs afero.Fs) (afero.Fs, error) { func NewLanguageFs(langs map[string]int, fs afero.Fs) (afero.Fs, error) {
applyMeta := func(fs *FilterFs, name string, fis []os.FileInfo) { applyMeta := func(fs *FilterFs, name string, fis []os.FileInfo) {
for i, fi := range fis { for i, fi := range fis {
if fi.IsDir() { if fi.IsDir() {
filename := filepath.Join(name, fi.Name()) filename := filepath.Join(name, fi.Name())
@ -104,11 +102,9 @@ func NewLanguageFs(langs map[string]int, fs afero.Fs) (afero.Fs, error) {
applyPerSource: applyMeta, applyPerSource: applyMeta,
applyAll: all, applyAll: all,
}, nil }, nil
} }
func NewFilterFs(fs afero.Fs) (afero.Fs, error) { func NewFilterFs(fs afero.Fs) (afero.Fs, error) {
applyMeta := func(fs *FilterFs, name string, fis []os.FileInfo) { applyMeta := func(fs *FilterFs, name string, fis []os.FileInfo) {
for i, fi := range fis { for i, fi := range fis {
if fi.IsDir() { if fi.IsDir() {
@ -123,7 +119,6 @@ func NewFilterFs(fs afero.Fs) (afero.Fs, error) {
} }
return ffs, nil return ffs, nil
} }
// FilterFs is an ordered composite filesystem. // FilterFs is an ordered composite filesystem.
@ -144,7 +139,6 @@ func (fs *FilterFs) Chtimes(n string, a, m time.Time) error {
func (fs *FilterFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { func (fs *FilterFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
fi, b, err := lstatIfPossible(fs.fs, name) fi, b, err := lstatIfPossible(fs.fs, name)
if err != nil { if err != nil {
return nil, false, err return nil, false, err
} }
@ -157,7 +151,6 @@ func (fs *FilterFs) LstatIfPossible(name string) (os.FileInfo, bool, error) {
fs.applyFilters(parent, -1, fi) fs.applyFilters(parent, -1, fi)
return fi, b, nil return fi, b, nil
} }
func (fs *FilterFs) Mkdir(n string, p os.FileMode) error { func (fs *FilterFs) Mkdir(n string, p os.FileMode) error {
@ -182,7 +175,6 @@ func (fs *FilterFs) Open(name string) (afero.File, error) {
File: f, File: f,
ffs: fs, ffs: fs,
}, nil }, nil
} }
func (fs *FilterFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) { func (fs *FilterFs) OpenFile(name string, flag int, perm os.FileMode) (afero.File, error) {
@ -255,7 +247,6 @@ func (fs *FilterFs) applyFilters(name string, count int, fis ...os.FileInfo) ([]
} }
return fis, nil return fis, nil
} }
type filterDir struct { type filterDir struct {
@ -313,7 +304,6 @@ func langInfoFrom(languages map[string]int, name string) (string, string, string
} }
return lang, translationBaseName, translationBaseNameWithExt return lang, translationBaseName, translationBaseNameWithExt
} }
func printFs(fs afero.Fs, path string, w io.Writer) { func printFs(fs afero.Fs, path string, w io.Writer) {

View file

@ -21,7 +21,6 @@ import (
) )
func TestLangInfoFrom(t *testing.T) { func TestLangInfoFrom(t *testing.T) {
langs := map[string]int{ langs := map[string]int{
"sv": 10, "sv": 10,
"en": 20, "en": 20,
@ -44,5 +43,4 @@ func TestLangInfoFrom(t *testing.T) {
v1, v2, v3 := langInfoFrom(langs, test.input) v1, v2, v3 := langInfoFrom(langs, test.input)
c.Assert([]string{v1, v2, v3}, qt.DeepEquals, test.expected) c.Assert([]string{v1, v2, v3}, qt.DeepEquals, test.expected)
} }
} }

View file

@ -23,10 +23,8 @@ import (
"github.com/spf13/afero" "github.com/spf13/afero"
) )
var (
// Os points to the (real) Os filesystem. // Os points to the (real) Os filesystem.
Os = &afero.OsFs{} var Os = &afero.OsFs{}
)
// Fs abstracts the file system to separate source and destination file systems // Fs abstracts the file system to separate source and destination file systems
// and allows both to be mocked for testing. // and allows both to be mocked for testing.

View file

@ -31,7 +31,6 @@ func TestNewDefault(t *testing.T) {
c.Assert(f.Source, hqt.IsSameType, new(afero.OsFs)) c.Assert(f.Source, hqt.IsSameType, new(afero.OsFs))
c.Assert(f.Os, qt.Not(qt.IsNil)) c.Assert(f.Os, qt.Not(qt.IsNil))
c.Assert(f.WorkingDir, qt.IsNil) c.Assert(f.WorkingDir, qt.IsNil)
} }
func TestNewMem(t *testing.T) { func TestNewMem(t *testing.T) {
@ -57,5 +56,4 @@ func TestWorkingDir(t *testing.T) {
c.Assert(f.WorkingDir, qt.Not(qt.IsNil)) c.Assert(f.WorkingDir, qt.Not(qt.IsNil))
c.Assert(f.WorkingDir, hqt.IsSameType, new(afero.BasePathFs)) c.Assert(f.WorkingDir, hqt.IsSameType, new(afero.BasePathFs))
} }

View file

@ -81,5 +81,4 @@ func Glob(fs afero.Fs, pattern string, handle func(fi FileMetaInfo) (bool, error
} }
return nil return nil
} }

View file

@ -53,7 +53,6 @@ func GetGlob(pattern string) (glob.Glob, error) {
globMu.Unlock() globMu.Unlock()
return eg.glob, eg.err return eg.glob, eg.err
} }
func NormalizePath(p string) string { func NormalizePath(p string) string {
@ -98,5 +97,4 @@ func HasGlobChar(s string) bool {
} }
} }
return false return false
} }

View file

@ -32,7 +32,6 @@ func TestResolveRootDir(t *testing.T) {
{"dat?a/foo.json", ""}, {"dat?a/foo.json", ""},
{"a/b[a-c]/foo.json", "a"}, {"a/b[a-c]/foo.json", "a"},
} { } {
c.Assert(ResolveRootDir(test.input), qt.Equals, test.expected) c.Assert(ResolveRootDir(test.input), qt.Equals, test.expected)
} }
} }
@ -46,7 +45,6 @@ func TestFilterGlobParts(t *testing.T) {
}{ }{
{[]string{"a", "*", "c"}, []string{"a", "c"}}, {[]string{"a", "*", "c"}, []string{"a", "c"}},
} { } {
c.Assert(FilterGlobParts(test.input), qt.DeepEquals, test.expected) c.Assert(FilterGlobParts(test.input), qt.DeepEquals, test.expected)
} }
} }
@ -63,7 +61,6 @@ func TestNormalizePath(t *testing.T) {
{filepath.FromSlash("./FOO.json"), "foo.json"}, {filepath.FromSlash("./FOO.json"), "foo.json"},
{"//", ""}, {"//", ""},
} { } {
c.Assert(NormalizePath(test.input), qt.Equals, test.expected) c.Assert(NormalizePath(test.input), qt.Equals, test.expected)
} }
} }
@ -82,5 +79,4 @@ func BenchmarkGetGlob(b *testing.B) {
b.Fatal(err) b.Fatal(err)
} }
} }
} }

View file

@ -57,5 +57,4 @@ func TestGlob(t *testing.T) {
c.Assert(collect("*.json"), qt.HasLen, 1) c.Assert(collect("*.json"), qt.HasLen, 1)
c.Assert(collect("**.xml"), qt.HasLen, 1) c.Assert(collect("**.xml"), qt.HasLen, 1)
c.Assert(collect(filepath.FromSlash("/jsonfiles/*.json")), qt.HasLen, 2) c.Assert(collect(filepath.FromSlash("/jsonfiles/*.json")), qt.HasLen, 2)
} }

View file

@ -22,9 +22,7 @@ import (
"github.com/spf13/afero" "github.com/spf13/afero"
) )
var ( var _ afero.Fs = (*md5HashingFs)(nil)
_ afero.Fs = (*md5HashingFs)(nil)
)
// FileHashReceiver will receive the filename an the content's MD5 sum on file close. // FileHashReceiver will receive the filename an the content's MD5 sum on file close.
type FileHashReceiver interface { type FileHashReceiver interface {

View file

@ -49,5 +49,4 @@ func TestHashingFs(t *testing.T) {
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
c.Assert(f.Close(), qt.IsNil) c.Assert(f.Close(), qt.IsNil)
c.Assert(observer.sum, qt.Equals, "d41d8cd98f00b204e9800998ecf8427e") c.Assert(observer.sum, qt.Equals, "d41d8cd98f00b204e9800998ecf8427e")
} }

View file

@ -23,9 +23,7 @@ import (
"github.com/spf13/afero" "github.com/spf13/afero"
) )
var ( var ErrPermissionSymlink = errors.New("symlinks not allowed in this filesystem")
ErrPermissionSymlink = errors.New("symlinks not allowed in this filesystem")
)
// NewNoSymlinkFs creates a new filesystem that prevents symlinks. // NewNoSymlinkFs creates a new filesystem that prevents symlinks.
func NewNoSymlinkFs(fs afero.Fs, logger loggers.Logger, allowFiles bool) afero.Fs { func NewNoSymlinkFs(fs afero.Fs, logger loggers.Logger, allowFiles bool) afero.Fs {
@ -79,7 +77,6 @@ func (fs *noSymlinkFs) Stat(name string) (os.FileInfo, error) {
} }
func (fs *noSymlinkFs) stat(name string) (os.FileInfo, bool, error) { func (fs *noSymlinkFs) stat(name string) (os.FileInfo, bool, error) {
var ( var (
fi os.FileInfo fi os.FileInfo
wasLstat bool wasLstat bool

Some files were not shown because too many files have changed in this diff Show more