mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-07 20:30:36 -05:00
all: Fix minor typos
This commit is contained in:
parent
21fa1e86f2
commit
04b89857e1
67 changed files with 124 additions and 124 deletions
|
@ -3,7 +3,7 @@
|
||||||
# allow user to override go executable by running as GOEXE=xxx make ...
|
# allow user to override go executable by running as GOEXE=xxx make ...
|
||||||
GOEXE="${GOEXE-go}"
|
GOEXE="${GOEXE-go}"
|
||||||
|
|
||||||
# Send in a regexp mathing the benchmarks you want to run, i.e. './benchSite.sh "YAML"'.
|
# Send in a regexp matching the benchmarks you want to run, i.e. './benchSite.sh "YAML"'.
|
||||||
# Note the quotes, which will be needed for more complex expressions.
|
# Note the quotes, which will be needed for more complex expressions.
|
||||||
# The above will run all variations, but only for front matter YAML.
|
# The above will run all variations, but only for front matter YAML.
|
||||||
|
|
||||||
|
|
2
cache/filecache/filecache.go
vendored
2
cache/filecache/filecache.go
vendored
|
@ -221,7 +221,7 @@ func (c *Cache) GetOrCreateBytes(id string, create func() ([]byte, error)) (Item
|
||||||
return info, b, nil
|
return info, b, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetBytes gets the file content with the given id from the cahce, nil if none found.
|
// GetBytes gets the file content with the given id from the cache, nil if none found.
|
||||||
func (c *Cache) GetBytes(id string) (ItemInfo, []byte, error) {
|
func (c *Cache) GetBytes(id string) (ItemInfo, []byte, error) {
|
||||||
id = cleanID(id)
|
id = cleanID(id)
|
||||||
|
|
||||||
|
|
2
cache/namedmemcache/named_cache.go
vendored
2
cache/namedmemcache/named_cache.go
vendored
|
@ -54,7 +54,7 @@ func (c *Cache) Clear() {
|
||||||
// GetOrCreate tries to get the value with the given cache key, if not found
|
// GetOrCreate tries to get the value with the given cache key, if not found
|
||||||
// create will be called and cached.
|
// create will be called and cached.
|
||||||
// This method is thread safe. It also guarantees that the create func for a given
|
// This method is thread safe. It also guarantees that the create func for a given
|
||||||
// key is invoced only once for this cache.
|
// key is invoked only once for this cache.
|
||||||
func (c *Cache) GetOrCreate(key string, create func() (interface{}, error)) (interface{}, error) {
|
func (c *Cache) GetOrCreate(key string, create func() (interface{}, error)) (interface{}, error) {
|
||||||
c.mu.RLock()
|
c.mu.RLock()
|
||||||
entry, found := c.cache[key]
|
entry, found := c.cache[key]
|
||||||
|
|
|
@ -365,7 +365,7 @@ func (m Methods) Imports() []string {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToMarshalJSON creates a MarshalJSON method for these methods. Any method name
|
// ToMarshalJSON creates a MarshalJSON method for these methods. Any method name
|
||||||
// matchin any of the regexps in excludes will be ignored.
|
// matching any of the regexps in excludes will be ignored.
|
||||||
func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (string, []string) {
|
func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (string, []string) {
|
||||||
var sb strings.Builder
|
var sb strings.Builder
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ type ReadSeekCloser interface {
|
||||||
}
|
}
|
||||||
|
|
||||||
// ReadSeekerNoOpCloser implements ReadSeekCloser by doing nothing in Close.
|
// ReadSeekerNoOpCloser implements ReadSeekCloser by doing nothing in Close.
|
||||||
// TODO(bep) rename this and simila to ReadSeekerNopCloser, naming used in stdlib, which kind of makes sense.
|
// TODO(bep) rename this and similar to ReadSeekerNopCloser, naming used in stdlib, which kind of makes sense.
|
||||||
type ReadSeekerNoOpCloser struct {
|
type ReadSeekerNoOpCloser struct {
|
||||||
ReadSeeker
|
ReadSeeker
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,7 +34,7 @@ var DefaultBuild = Build{
|
||||||
WriteStats: false,
|
WriteStats: false,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build holds some build related condfiguration.
|
// Build holds some build related configuration.
|
||||||
type Build struct {
|
type Build struct {
|
||||||
UseResourceCacheWhen string // never, fallback, always. Default is fallback
|
UseResourceCacheWhen string // never, fallback, always. Default is fallback
|
||||||
|
|
||||||
|
|
|
@ -128,7 +128,7 @@ func ishex(c rune) bool {
|
||||||
|
|
||||||
// UnicodeSanitize sanitizes string to be used in Hugo URL's, allowing only
|
// UnicodeSanitize sanitizes string to be used in Hugo URL's, allowing only
|
||||||
// a predefined set of special Unicode characters.
|
// a predefined set of special Unicode characters.
|
||||||
// If RemovePathAccents configuration flag is enabled, Uniccode accents
|
// If RemovePathAccents configuration flag is enabled, Unicode accents
|
||||||
// are also removed.
|
// are also removed.
|
||||||
// Spaces will be replaced with a single hyphen, and sequential hyphens will be reduced to one.
|
// Spaces will be replaced with a single hyphen, and sequential hyphens will be reduced to one.
|
||||||
func (p *PathSpec) UnicodeSanitize(s string) string {
|
func (p *PathSpec) UnicodeSanitize(s string) string {
|
||||||
|
@ -212,12 +212,12 @@ func GetDottedRelativePath(inPath string) string {
|
||||||
return dottedPath
|
return dottedPath
|
||||||
}
|
}
|
||||||
|
|
||||||
// ExtNoDelimiter takes a path and returns the extension, excluding the delmiter, i.e. "md".
|
// ExtNoDelimiter takes a path and returns the extension, excluding the delimiter, i.e. "md".
|
||||||
func ExtNoDelimiter(in string) string {
|
func ExtNoDelimiter(in string) string {
|
||||||
return strings.TrimPrefix(Ext(in), ".")
|
return strings.TrimPrefix(Ext(in), ".")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ext takes a path and returns the extension, including the delmiter, i.e. ".md".
|
// Ext takes a path and returns the extension, including the delimiter, i.e. ".md".
|
||||||
func Ext(in string) string {
|
func Ext(in string) string {
|
||||||
_, ext := fileAndExt(in, fpb)
|
_, ext := fileAndExt(in, fpb)
|
||||||
return ext
|
return ext
|
||||||
|
@ -229,13 +229,13 @@ func PathAndExt(in string) (string, string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// FileAndExt takes a path and returns the file and extension separated,
|
// FileAndExt takes a path and returns the file and extension separated,
|
||||||
// the extension including the delmiter, i.e. ".md".
|
// the extension including the delimiter, i.e. ".md".
|
||||||
func FileAndExt(in string) (string, string) {
|
func FileAndExt(in string) (string, string) {
|
||||||
return fileAndExt(in, fpb)
|
return fileAndExt(in, fpb)
|
||||||
}
|
}
|
||||||
|
|
||||||
// FileAndExtNoDelimiter takes a path and returns the file and extension separated,
|
// FileAndExtNoDelimiter takes a path and returns the file and extension separated,
|
||||||
// the extension excluding the delmiter, e.g "md".
|
// the extension excluding the delimiter, e.g "md".
|
||||||
func FileAndExtNoDelimiter(in string) (string, string) {
|
func FileAndExtNoDelimiter(in string) (string, string) {
|
||||||
file, ext := fileAndExt(in, fpb)
|
file, ext := fileAndExt(in, fpb)
|
||||||
return file, strings.TrimPrefix(ext, ".")
|
return file, strings.TrimPrefix(ext, ".")
|
||||||
|
|
|
@ -37,7 +37,7 @@ type PathSpec struct {
|
||||||
Cfg config.Provider
|
Cfg config.Provider
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewPathSpec creats a new PathSpec from the given filesystems and language.
|
// NewPathSpec creates a new PathSpec from the given filesystems and language.
|
||||||
func NewPathSpec(fs *hugofs.Fs, cfg config.Provider, logger loggers.Logger) (*PathSpec, error) {
|
func NewPathSpec(fs *hugofs.Fs, cfg config.Provider, logger loggers.Logger) (*PathSpec, error) {
|
||||||
return NewPathSpecWithBaseBaseFsProvided(fs, cfg, logger, nil)
|
return NewPathSpecWithBaseBaseFsProvided(fs, cfg, logger, nil)
|
||||||
}
|
}
|
||||||
|
|
|
@ -105,7 +105,7 @@ func (p *PathSpec) URLize(uri string) string {
|
||||||
return p.URLEscape(p.MakePathSanitized(uri))
|
return p.URLEscape(p.MakePathSanitized(uri))
|
||||||
}
|
}
|
||||||
|
|
||||||
// URLizeFilename creates an URL from a filename by esacaping unicode letters
|
// URLizeFilename creates an URL from a filename by escaping unicode letters
|
||||||
// and turn any filepath separator into forward slashes.
|
// and turn any filepath separator into forward slashes.
|
||||||
func (p *PathSpec) URLizeFilename(filename string) string {
|
func (p *PathSpec) URLizeFilename(filename string) string {
|
||||||
return p.URLEscape(filepath.ToSlash(filename))
|
return p.URLEscape(filepath.ToSlash(filename))
|
||||||
|
@ -280,7 +280,7 @@ func AddContextRoot(baseURL, relativePath string) string {
|
||||||
|
|
||||||
newPath := path.Join(url.Path, relativePath)
|
newPath := path.Join(url.Path, relativePath)
|
||||||
|
|
||||||
// path strips traling slash, ignore root path.
|
// path strips trailing slash, ignore root path.
|
||||||
if newPath != "/" && strings.HasSuffix(relativePath, "/") {
|
if newPath != "/" && strings.HasSuffix(relativePath, "/") {
|
||||||
newPath += "/"
|
newPath += "/"
|
||||||
}
|
}
|
||||||
|
|
|
@ -276,7 +276,7 @@ func (f *filterDir) Readdirnames(count int) ([]string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try to extract the language from the given filename.
|
// Try to extract the language from the given filename.
|
||||||
// Any valid language identificator in the name will win over the
|
// Any valid language identifier in the name will win over the
|
||||||
// language set on the file system, e.g. "mypost.en.md".
|
// language set on the file system, e.g. "mypost.en.md".
|
||||||
func langInfoFrom(languages map[string]int, name string) (string, string, string) {
|
func langInfoFrom(languages map[string]int, name string) (string, string, string) {
|
||||||
var lang string
|
var lang string
|
||||||
|
|
|
@ -135,7 +135,7 @@ func TestNoSymlinkFs(t *testing.T) {
|
||||||
// Check readdir
|
// Check readdir
|
||||||
f, err = fs.Open(workDir)
|
f, err = fs.Open(workDir)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
// There is at least one unsported symlink inside workDir
|
// There is at least one unsupported symlink inside workDir
|
||||||
_, err = f.Readdir(-1)
|
_, err = f.Readdir(-1)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
f.Close()
|
f.Close()
|
||||||
|
|
|
@ -270,7 +270,7 @@ func TestRootMappingFsMount(t *testing.T) {
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(string(b), qt.Equals, "some no content")
|
c.Assert(string(b), qt.Equals, "some no content")
|
||||||
|
|
||||||
// Ambigous
|
// Ambiguous
|
||||||
_, err = rfs.Stat(filepath.FromSlash("content/singles/p1.md"))
|
_, err = rfs.Stat(filepath.FromSlash("content/singles/p1.md"))
|
||||||
c.Assert(err, qt.Not(qt.IsNil))
|
c.Assert(err, qt.Not(qt.IsNil))
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test073(t *testing.T) {
|
func Test073(t *testing.T) {
|
||||||
asertDisabledTaxonomyAndTerm := func(b *sitesBuilder, taxonomy, term bool) {
|
assertDisabledTaxonomyAndTerm := func(b *sitesBuilder, taxonomy, term bool) {
|
||||||
b.Assert(b.CheckExists("public/tags/index.html"), qt.Equals, taxonomy)
|
b.Assert(b.CheckExists("public/tags/index.html"), qt.Equals, taxonomy)
|
||||||
b.Assert(b.CheckExists("public/tags/tag1/index.html"), qt.Equals, term)
|
b.Assert(b.CheckExists("public/tags/tag1/index.html"), qt.Equals, term)
|
||||||
}
|
}
|
||||||
|
@ -88,7 +88,7 @@ taxonomy = ["JSON"]
|
||||||
`disableKinds = ["taxonomy", "taxonomyTerm"]`,
|
`disableKinds = ["taxonomy", "taxonomyTerm"]`,
|
||||||
func(err error, out string, b *sitesBuilder) {
|
func(err error, out string, b *sitesBuilder) {
|
||||||
b.Assert(err, qt.IsNil)
|
b.Assert(err, qt.IsNil)
|
||||||
asertDisabledTaxonomyAndTerm(b, false, false)
|
assertDisabledTaxonomyAndTerm(b, false, false)
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -96,7 +96,7 @@ taxonomy = ["JSON"]
|
||||||
`disableKinds = ["taxonomyTerm"]`,
|
`disableKinds = ["taxonomyTerm"]`,
|
||||||
func(err error, out string, b *sitesBuilder) {
|
func(err error, out string, b *sitesBuilder) {
|
||||||
b.Assert(err, qt.IsNil)
|
b.Assert(err, qt.IsNil)
|
||||||
asertDisabledTaxonomyAndTerm(b, false, true)
|
assertDisabledTaxonomyAndTerm(b, false, true)
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -113,7 +113,7 @@ taxonomy = ["JSON"]
|
||||||
ignoreErrors = ["error-disable-taxonomy"]`,
|
ignoreErrors = ["error-disable-taxonomy"]`,
|
||||||
func(err error, out string, b *sitesBuilder) {
|
func(err error, out string, b *sitesBuilder) {
|
||||||
b.Assert(err, qt.IsNil)
|
b.Assert(err, qt.IsNil)
|
||||||
asertDisabledTaxonomyAndTerm(b, false, true)
|
assertDisabledTaxonomyAndTerm(b, false, true)
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
} {
|
} {
|
||||||
|
|
|
@ -58,8 +58,8 @@ const (
|
||||||
cmLeafSeparator = "__hl_"
|
cmLeafSeparator = "__hl_"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Used to mark ambigous keys in reverse index lookups.
|
// Used to mark ambiguous keys in reverse index lookups.
|
||||||
var ambigousContentNode = &contentNode{}
|
var ambiguousContentNode = &contentNode{}
|
||||||
|
|
||||||
func newContentMap(cfg contentMapConfig) *contentMap {
|
func newContentMap(cfg contentMapConfig) *contentMap {
|
||||||
m := &contentMap{
|
m := &contentMap{
|
||||||
|
@ -86,8 +86,8 @@ func newContentMap(cfg contentMapConfig) *contentMap {
|
||||||
addToReverseMap := func(k string, n *contentNode, m map[interface{}]*contentNode) {
|
addToReverseMap := func(k string, n *contentNode, m map[interface{}]*contentNode) {
|
||||||
k = strings.ToLower(k)
|
k = strings.ToLower(k)
|
||||||
existing, found := m[k]
|
existing, found := m[k]
|
||||||
if found && existing != ambigousContentNode {
|
if found && existing != ambiguousContentNode {
|
||||||
m[k] = ambigousContentNode
|
m[k] = ambiguousContentNode
|
||||||
} else if !found {
|
} else if !found {
|
||||||
m[k] = n
|
m[k] = n
|
||||||
}
|
}
|
||||||
|
|
|
@ -232,7 +232,7 @@ func (m *pageMap) newResource(fim hugofs.FileMetaInfo, owner *pageState) (resour
|
||||||
outputFormats := owner.m.outputFormats()
|
outputFormats := owner.m.outputFormats()
|
||||||
seen := make(map[string]bool)
|
seen := make(map[string]bool)
|
||||||
var targetBasePaths []string
|
var targetBasePaths []string
|
||||||
// Make sure bundled resources are published to all of the ouptput formats'
|
// Make sure bundled resources are published to all of the output formats'
|
||||||
// sub paths.
|
// sub paths.
|
||||||
for _, f := range outputFormats {
|
for _, f := range outputFormats {
|
||||||
p := f.Path
|
p := f.Path
|
||||||
|
|
|
@ -185,7 +185,7 @@ func TestShortcodeYoutube(t *testing.T) {
|
||||||
`{{< youtube id="w7Ft2ymGmfc" class="video" autoplay="true" >}}`,
|
`{{< youtube id="w7Ft2ymGmfc" class="video" autoplay="true" >}}`,
|
||||||
"(?s)\n<div class=\"video\">.*?<iframe src=\"https://www.youtube.com/embed/w7Ft2ymGmfc\\?autoplay=1\".*?allowfullscreen title=\"YouTube Video\">.*?</iframe>.*?</div>",
|
"(?s)\n<div class=\"video\">.*?<iframe src=\"https://www.youtube.com/embed/w7Ft2ymGmfc\\?autoplay=1\".*?allowfullscreen title=\"YouTube Video\">.*?</iframe>.*?</div>",
|
||||||
},
|
},
|
||||||
// set custom title for accessability)
|
// set custom title for accessibility)
|
||||||
{
|
{
|
||||||
`{{< youtube id="w7Ft2ymGmfc" title="A New Hugo Site in Under Two Minutes" >}}`,
|
`{{< youtube id="w7Ft2ymGmfc" title="A New Hugo Site in Under Two Minutes" >}}`,
|
||||||
"(?s)\n<div style=\".*?\">.*?<iframe src=\"https://www.youtube.com/embed/w7Ft2ymGmfc\" style=\".*?\" allowfullscreen title=\"A New Hugo Site in Under Two Minutes\">.*?</iframe>.*?</div>",
|
"(?s)\n<div style=\".*?\">.*?<iframe src=\"https://www.youtube.com/embed/w7Ft2ymGmfc\" style=\".*?\" allowfullscreen title=\"A New Hugo Site in Under Two Minutes\">.*?</iframe>.*?</div>",
|
||||||
|
|
|
@ -375,7 +375,7 @@ module github.com/gohugoio/tests/testHugoModules
|
||||||
}
|
}
|
||||||
|
|
||||||
func createChildModMatchers(m *mods.Md, ignoreVendor, vendored bool) []string {
|
func createChildModMatchers(m *mods.Md, ignoreVendor, vendored bool) []string {
|
||||||
// Child depdendencies are one behind.
|
// Child dependencies are one behind.
|
||||||
expectMinorVersion := 3
|
expectMinorVersion := 3
|
||||||
|
|
||||||
if !ignoreVendor && vendored {
|
if !ignoreVendor && vendored {
|
||||||
|
|
|
@ -584,7 +584,7 @@ type renderStringOpts struct {
|
||||||
Markup string
|
Markup string
|
||||||
}
|
}
|
||||||
|
|
||||||
var defualtRenderStringOpts = renderStringOpts{
|
var defaultRenderStringOpts = renderStringOpts{
|
||||||
Display: "inline",
|
Display: "inline",
|
||||||
Markup: "", // Will inherit the page's value when not set.
|
Markup: "", // Will inherit the page's value when not set.
|
||||||
}
|
}
|
||||||
|
@ -595,7 +595,7 @@ func (p *pageState) RenderString(args ...interface{}) (template.HTML, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
var s string
|
var s string
|
||||||
opts := defualtRenderStringOpts
|
opts := defaultRenderStringOpts
|
||||||
sidx := 1
|
sidx := 1
|
||||||
|
|
||||||
if len(args) == 1 {
|
if len(args) == 1 {
|
||||||
|
@ -978,7 +978,7 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error {
|
||||||
//
|
//
|
||||||
// For pages that have a source file, it is returns the path to this file as an
|
// For pages that have a source file, it is returns the path to this file as an
|
||||||
// absolute path rooted in this site's content dir.
|
// absolute path rooted in this site's content dir.
|
||||||
// For pages that do not (sections witout content page etc.), it returns the
|
// For pages that do not (sections without content page etc.), it returns the
|
||||||
// virtual path, consistent with where you would add a source file.
|
// virtual path, consistent with where you would add a source file.
|
||||||
func (p *pageState) sourceRef() string {
|
func (p *pageState) sourceRef() string {
|
||||||
if !p.File().IsZero() {
|
if !p.File().IsZero() {
|
||||||
|
|
|
@ -57,7 +57,7 @@ type pageCommon struct {
|
||||||
bucket *pagesMapBucket
|
bucket *pagesMapBucket
|
||||||
treeRef *contentTreeRef
|
treeRef *contentTreeRef
|
||||||
|
|
||||||
// Laziliy initialized dependencies.
|
// Lazily initialized dependencies.
|
||||||
init *lazy.Init
|
init *lazy.Init
|
||||||
|
|
||||||
// All of these represents the common parts of a page.Page
|
// All of these represents the common parts of a page.Page
|
||||||
|
|
|
@ -302,7 +302,7 @@ func TestPageBundlerSiteMultilingual(t *testing.T) {
|
||||||
// A bundle in a/b/index.en.md
|
// A bundle in a/b/index.en.md
|
||||||
// a/b/index.en.md => OK
|
// a/b/index.en.md => OK
|
||||||
// a/b/index => OK
|
// a/b/index => OK
|
||||||
// index.en.md => ambigous, but OK.
|
// index.en.md => ambiguous, but OK.
|
||||||
// With bundles, the file name has little meaning, the folder it lives in does. So this should also work:
|
// With bundles, the file name has little meaning, the folder it lives in does. So this should also work:
|
||||||
// a/b
|
// a/b
|
||||||
// and probably also just b (aka "my-bundle")
|
// and probably also just b (aka "my-bundle")
|
||||||
|
|
|
@ -98,7 +98,7 @@ func newPageCollections(m *pageMap) *PageCollections {
|
||||||
|
|
||||||
// This is an adapter func for the old API with Kind as first argument.
|
// This is an adapter func for the old API with Kind as first argument.
|
||||||
// This is invoked when you do .Site.GetPage. We drop the Kind and fails
|
// This is invoked when you do .Site.GetPage. We drop the Kind and fails
|
||||||
// if there are more than 2 arguments, which would be ambigous.
|
// if there are more than 2 arguments, which would be ambiguous.
|
||||||
func (c *PageCollections) getPageOldVersion(ref ...string) (page.Page, error) {
|
func (c *PageCollections) getPageOldVersion(ref ...string) (page.Page, error) {
|
||||||
var refs []string
|
var refs []string
|
||||||
for _, r := range ref {
|
for _, r := range ref {
|
||||||
|
@ -291,7 +291,7 @@ func (c *PageCollections) getContentNode(context page.Page, isReflink bool, ref
|
||||||
getByName := func(s string) (*contentNode, error) {
|
getByName := func(s string) (*contentNode, error) {
|
||||||
n := m.pageReverseIndex.Get(s)
|
n := m.pageReverseIndex.Get(s)
|
||||||
if n != nil {
|
if n != nil {
|
||||||
if n == ambigousContentNode {
|
if n == ambiguousContentNode {
|
||||||
return nil, fmt.Errorf("page reference %q is ambiguous", ref)
|
return nil, fmt.Errorf("page reference %q is ambiguous", ref)
|
||||||
}
|
}
|
||||||
return n, nil
|
return n, nil
|
||||||
|
|
|
@ -278,7 +278,7 @@ func (c *pagesCollector) addToBundle(info hugofs.FileMetaInfo, btyp bundleDirTyp
|
||||||
if btyp == bundleBranch {
|
if btyp == bundleBranch {
|
||||||
// No special logic for branch bundles.
|
// No special logic for branch bundles.
|
||||||
// Every language needs its own _index.md file.
|
// Every language needs its own _index.md file.
|
||||||
// Also, we only clone bundle headers for lonsesome, bundled,
|
// Also, we only clone bundle headers for lonesome, bundled,
|
||||||
// content files.
|
// content files.
|
||||||
return c.handleFiles(info)
|
return c.handleFiles(info)
|
||||||
}
|
}
|
||||||
|
|
|
@ -268,7 +268,7 @@ func (p *Paths) RelPathify(filename string) string {
|
||||||
return strings.TrimPrefix(strings.TrimPrefix(filename, p.WorkingDir), FilePathSeparator)
|
return strings.TrimPrefix(strings.TrimPrefix(filename, p.WorkingDir), FilePathSeparator)
|
||||||
}
|
}
|
||||||
|
|
||||||
// AbsPathify creates an absolute path if given a working dir and arelative path.
|
// AbsPathify creates an absolute path if given a working dir and a relative path.
|
||||||
// If already absolute, the path is just cleaned.
|
// If already absolute, the path is just cleaned.
|
||||||
func AbsPathify(workingDir, inPath string) string {
|
func AbsPathify(workingDir, inPath string) string {
|
||||||
if filepath.IsAbs(inPath) {
|
if filepath.IsAbs(inPath) {
|
||||||
|
|
|
@ -956,7 +956,7 @@ func (s *Site) translateFileEvents(events []fsnotify.Event) []fsnotify.Event {
|
||||||
eventMap := make(map[string][]fsnotify.Event)
|
eventMap := make(map[string][]fsnotify.Event)
|
||||||
|
|
||||||
// We often get a Remove etc. followed by a Create, a Create followed by a Write.
|
// We often get a Remove etc. followed by a Create, a Create followed by a Write.
|
||||||
// Remove the superflous events to mage the update logic simpler.
|
// Remove the superfluous events to mage the update logic simpler.
|
||||||
for _, ev := range events {
|
for _, ev := range events {
|
||||||
eventMap[ev.Name] = append(eventMap[ev.Name], ev)
|
eventMap[ev.Name] = append(eventMap[ev.Name], ev)
|
||||||
}
|
}
|
||||||
|
@ -999,7 +999,7 @@ var (
|
||||||
)
|
)
|
||||||
|
|
||||||
// reBuild partially rebuilds a site given the filesystem events.
|
// reBuild partially rebuilds a site given the filesystem events.
|
||||||
// It returns whetever the content source was changed.
|
// It returns whatever the content source was changed.
|
||||||
// TODO(bep) clean up/rewrite this method.
|
// TODO(bep) clean up/rewrite this method.
|
||||||
func (s *Site) processPartial(config *BuildCfg, init func(config *BuildCfg) error, events []fsnotify.Event) error {
|
func (s *Site) processPartial(config *BuildCfg, init func(config *BuildCfg) error, events []fsnotify.Event) error {
|
||||||
events = s.filterFileEvents(events)
|
events = s.filterFileEvents(events)
|
||||||
|
@ -1513,7 +1513,7 @@ func (s *Site) assembleMenus() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// get any lanaguagecode to prefix the target file path with.
|
// get any language code to prefix the target file path with.
|
||||||
func (s *Site) getLanguageTargetPathLang(alwaysInSubDir bool) string {
|
func (s *Site) getLanguageTargetPathLang(alwaysInSubDir bool) string {
|
||||||
if s.h.IsMultihost() {
|
if s.h.IsMultihost() {
|
||||||
return s.Language().Lang
|
return s.Language().Lang
|
||||||
|
|
|
@ -454,7 +454,7 @@ categories: ["funny"]
|
||||||
`,
|
`,
|
||||||
"categories/_index.md", "---\ntitle: Categories Page\n---",
|
"categories/_index.md", "---\ntitle: Categories Page\n---",
|
||||||
"categories/data.json", "Category data",
|
"categories/data.json", "Category data",
|
||||||
"categories/funny/_index.md", "---\ntitle: Funnny Category\n---",
|
"categories/funny/_index.md", "---\ntitle: Funny Category\n---",
|
||||||
"categories/funny/funnydata.json", "Category funny data",
|
"categories/funny/funnydata.json", "Category funny data",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -928,13 +928,13 @@ func buildSingleSite(t testing.TB, depsCfg deps.DepsCfg, buildCfg BuildCfg) *Sit
|
||||||
return buildSingleSiteExpected(t, false, false, depsCfg, buildCfg)
|
return buildSingleSiteExpected(t, false, false, depsCfg, buildCfg)
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildSingleSiteExpected(t testing.TB, expectSiteInitEror, expectBuildError bool, depsCfg deps.DepsCfg, buildCfg BuildCfg) *Site {
|
func buildSingleSiteExpected(t testing.TB, expectSiteInitError, expectBuildError bool, depsCfg deps.DepsCfg, buildCfg BuildCfg) *Site {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
b := newTestSitesBuilderFromDepsCfg(t, depsCfg).WithNothingAdded()
|
b := newTestSitesBuilderFromDepsCfg(t, depsCfg).WithNothingAdded()
|
||||||
|
|
||||||
err := b.CreateSitesE()
|
err := b.CreateSitesE()
|
||||||
|
|
||||||
if expectSiteInitEror {
|
if expectSiteInitError {
|
||||||
b.Assert(err, qt.Not(qt.IsNil))
|
b.Assert(err, qt.Not(qt.IsNil))
|
||||||
return nil
|
return nil
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -126,7 +126,7 @@ func (l Languages) Less(i, j int) bool {
|
||||||
|
|
||||||
func (l Languages) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
|
func (l Languages) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
|
||||||
|
|
||||||
// Params retunrs language-specific params merged with the global params.
|
// Params returns language-specific params merged with the global params.
|
||||||
func (l *Language) Params() maps.Params {
|
func (l *Language) Params() maps.Params {
|
||||||
// TODO(bep) this construct should not be needed. Create the
|
// TODO(bep) this construct should not be needed. Create the
|
||||||
// language params in one go.
|
// language params in one go.
|
||||||
|
|
|
@ -155,7 +155,7 @@ func parseOptions(in string) (map[string]interface{}, error) {
|
||||||
return opts, nil
|
return opts, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// startLine compansates for https://github.com/alecthomas/chroma/issues/30
|
// startLine compensates for https://github.com/alecthomas/chroma/issues/30
|
||||||
func hlLinesToRanges(startLine int, s string) ([][2]int, error) {
|
func hlLinesToRanges(startLine int, s string) ([][2]int, error) {
|
||||||
var ranges [][2]int
|
var ranges [][2]int
|
||||||
s = strings.TrimSpace(s)
|
s = strings.TrimSpace(s)
|
||||||
|
|
|
@ -40,7 +40,7 @@ type Type struct {
|
||||||
SubType string `json:"subType"` // i.e. html
|
SubType string `json:"subType"` // i.e. html
|
||||||
|
|
||||||
// This is the optional suffix after the "+" in the MIME type,
|
// This is the optional suffix after the "+" in the MIME type,
|
||||||
// e.g. "xml" in "applicatiion/rss+xml".
|
// e.g. "xml" in "application/rss+xml".
|
||||||
mimeSuffix string
|
mimeSuffix string
|
||||||
|
|
||||||
Delimiter string `json:"delimiter"` // e.g. "."
|
Delimiter string `json:"delimiter"` // e.g. "."
|
||||||
|
@ -107,7 +107,7 @@ func (m Type) FullSuffix() string {
|
||||||
return m.Delimiter + m.Suffix()
|
return m.Delimiter + m.Suffix()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Suffix returns the file suffix without any delmiter prepended.
|
// Suffix returns the file suffix without any delimiter prepended.
|
||||||
func (m Type) Suffix() string {
|
func (m Type) Suffix() string {
|
||||||
if m.fileSuffix != "" {
|
if m.fileSuffix != "" {
|
||||||
return m.fileSuffix
|
return m.fileSuffix
|
||||||
|
|
|
@ -95,7 +95,7 @@ func TestConfigureMinify(t *testing.T) {
|
||||||
}{
|
}{
|
||||||
{media.HTMLType, "<hello> Hugo! </hello>", "<hello> Hugo! </hello>", false}, // configured minifier
|
{media.HTMLType, "<hello> Hugo! </hello>", "<hello> Hugo! </hello>", false}, // configured minifier
|
||||||
{media.CSSType, " body { color: blue; } ", "body{color:blue}", false}, // default minifier
|
{media.CSSType, " body { color: blue; } ", "body{color:blue}", false}, // default minifier
|
||||||
{media.XMLType, " <hello> Hugo! </hello> ", "", true}, // disable Xml minificatin
|
{media.XMLType, " <hello> Hugo! </hello> ", "", true}, // disable Xml minification
|
||||||
} {
|
} {
|
||||||
var b bytes.Buffer
|
var b bytes.Buffer
|
||||||
if !test.errorExpected {
|
if !test.errorExpected {
|
||||||
|
|
|
@ -559,7 +559,7 @@ func (c *collector) mountCommonJSConfig(owner *moduleAdapter, mounts []Mount) ([
|
||||||
for _, m := range mounts {
|
for _, m := range mounts {
|
||||||
if strings.HasPrefix(m.Target, files.JsConfigFolderMountPrefix) {
|
if strings.HasPrefix(m.Target, files.JsConfigFolderMountPrefix) {
|
||||||
// This follows the convention of the other component types (assets, content, etc.),
|
// This follows the convention of the other component types (assets, content, etc.),
|
||||||
// if one or more is specificed by the user, we skip the defaults.
|
// if one or more is specified by the user, we skip the defaults.
|
||||||
// These mounts were added to Hugo in 0.75.
|
// These mounts were added to Hugo in 0.75.
|
||||||
return mounts, nil
|
return mounts, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -105,7 +105,7 @@ func (d Decoder) UnmarshalStringTo(data string, typ interface{}) (interface{}, e
|
||||||
case float64:
|
case float64:
|
||||||
return cast.ToFloat64E(data)
|
return cast.ToFloat64E(data)
|
||||||
default:
|
default:
|
||||||
return nil, errors.Errorf("unmarshal: %T not supportedd", typ)
|
return nil, errors.Errorf("unmarshal: %T not supported", typ)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -426,7 +426,7 @@ func lexMainSection(l *pageLexer) stateFunc {
|
||||||
}
|
}
|
||||||
|
|
||||||
if l.isInHTMLComment {
|
if l.isInHTMLComment {
|
||||||
return lexEndFromtMatterHTMLComment
|
return lexEndFrontMatterHTMLComment
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fast forward as far as possible.
|
// Fast forward as far as possible.
|
||||||
|
|
|
@ -56,7 +56,7 @@ LOOP:
|
||||||
return lexMainSection
|
return lexMainSection
|
||||||
}
|
}
|
||||||
|
|
||||||
func lexEndFromtMatterHTMLComment(l *pageLexer) stateFunc {
|
func lexEndFrontMatterHTMLComment(l *pageLexer) stateFunc {
|
||||||
l.isInHTMLComment = false
|
l.isInHTMLComment = false
|
||||||
right := l.index(htmlCommentEnd)
|
right := l.index(htmlCommentEnd)
|
||||||
if right == -1 {
|
if right == -1 {
|
||||||
|
|
|
@ -185,7 +185,7 @@ Loop:
|
||||||
l.backup()
|
l.backup()
|
||||||
break Loop
|
break Loop
|
||||||
} else if openQuoteFound {
|
} else if openQuoteFound {
|
||||||
// the coming quoute is inside
|
// the coming quote is inside
|
||||||
escapedInnerQuoteFound = true
|
escapedInnerQuoteFound = true
|
||||||
escapedQuoteState = 1
|
escapedQuoteState = 1
|
||||||
}
|
}
|
||||||
|
|
|
@ -172,7 +172,7 @@ func (t *Iterator) PeekWalk(walkFn func(item Item) bool) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Consume is a convencience method to consume the next n tokens,
|
// Consume is a convenience method to consume the next n tokens,
|
||||||
// but back off Errors and EOF.
|
// but back off Errors and EOF.
|
||||||
func (t *Iterator) Consume(cnt int) {
|
func (t *Iterator) Consume(cnt int) {
|
||||||
for i := 0; i < cnt; i++ {
|
for i := 0; i < cnt; i++ {
|
||||||
|
|
|
@ -140,7 +140,7 @@ func (w *cssClassCollectorWriter) Write(p []byte) (n int, err error) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// The net/html parser does not handle single table elemnts as input, e.g. tbody.
|
// The net/html parser does not handle single table elements as input, e.g. tbody.
|
||||||
// We only care about the element/class/ids, so just store away the original tag name
|
// We only care about the element/class/ids, so just store away the original tag name
|
||||||
// and pretend it's a <div>.
|
// and pretend it's a <div>.
|
||||||
func (c *cssClassCollectorWriter) insertStandinHTMLElement(el string) (string, string) {
|
func (c *cssClassCollectorWriter) insertStandinHTMLElement(el string) (string, string) {
|
||||||
|
|
|
@ -131,12 +131,12 @@ func (r *ReleaseHandler) Run() error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
prepareRelaseNotes := isPatch || relNotesState == releaseNotesNone
|
prepareReleaseNotes := isPatch || relNotesState == releaseNotesNone
|
||||||
shouldRelease := isPatch || relNotesState == releaseNotesReady
|
shouldRelease := isPatch || relNotesState == releaseNotesReady
|
||||||
|
|
||||||
defer r.gitPush() // TODO(bep)
|
defer r.gitPush() // TODO(bep)
|
||||||
|
|
||||||
if prepareRelaseNotes || shouldRelease {
|
if prepareReleaseNotes || shouldRelease {
|
||||||
gitCommits, err = getGitInfos(changeLogFromTag, "hugo", "", !r.try)
|
gitCommits, err = getGitInfos(changeLogFromTag, "hugo", "", !r.try)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -150,11 +150,11 @@ func (r *ReleaseHandler) Run() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if relNotesState == releaseNotesCreated {
|
if relNotesState == releaseNotesCreated {
|
||||||
fmt.Println("Release notes created, but not ready. Reneame to *-ready.md to continue ...")
|
fmt.Println("Release notes created, but not ready. Rename to *-ready.md to continue ...")
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if prepareRelaseNotes {
|
if prepareReleaseNotes {
|
||||||
releaseNotesFile, err := r.writeReleaseNotesToTemp(version, isPatch, gitCommits, gitCommitsDocs)
|
releaseNotesFile, err := r.writeReleaseNotesToTemp(version, isPatch, gitCommits, gitCommitsDocs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|
|
@ -152,7 +152,7 @@ func (i *imageResource) cloneWithUpdates(u *transformationUpdate) (baseResource,
|
||||||
|
|
||||||
var img *images.Image
|
var img *images.Image
|
||||||
|
|
||||||
if u.isContenChanged() {
|
if u.isContentChanged() {
|
||||||
img = i.WithSpec(base)
|
img = i.WithSpec(base)
|
||||||
} else {
|
} else {
|
||||||
img = i.Image
|
img = i.Image
|
||||||
|
|
|
@ -46,7 +46,7 @@ func (c *imageCache) deleteIfContains(s string) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// The cache key is a lowecase path with Unix style slashes and it always starts with
|
// The cache key is a lowercase path with Unix style slashes and it always starts with
|
||||||
// a leading slash.
|
// a leading slash.
|
||||||
func (c *imageCache) normalizeKey(key string) string {
|
func (c *imageCache) normalizeKey(key string) string {
|
||||||
return "/" + c.normalizeKeyBase(key)
|
return "/" + c.normalizeKeyBase(key)
|
||||||
|
|
|
@ -354,7 +354,7 @@ func TestImageResizeInSubPath(t *testing.T) {
|
||||||
assertImageFile(c, spec.BaseFs.PublishFs, publishedImageFilename, 101, 101)
|
assertImageFile(c, spec.BaseFs.PublishFs, publishedImageFilename, 101, 101)
|
||||||
c.Assert(spec.BaseFs.PublishFs.Remove(publishedImageFilename), qt.IsNil)
|
c.Assert(spec.BaseFs.PublishFs.Remove(publishedImageFilename), qt.IsNil)
|
||||||
|
|
||||||
// Cleare mem cache to simulate reading from the file cache.
|
// Clear mem cache to simulate reading from the file cache.
|
||||||
spec.imageCache.clear()
|
spec.imageCache.clear()
|
||||||
|
|
||||||
resizedAgain, err := image.Resize("101x101")
|
resizedAgain, err := image.Resize("101x101")
|
||||||
|
|
|
@ -81,7 +81,7 @@ func generateMarshalJSON(c *codegen.Inspector) error {
|
||||||
|
|
||||||
// Exclude these methods
|
// Exclude these methods
|
||||||
excludes := []reflect.Type{
|
excludes := []reflect.Type{
|
||||||
// We need to eveluate the deprecated vs JSON in the future,
|
// We need to evaluate the deprecated vs JSON in the future,
|
||||||
// but leave them out for now.
|
// but leave them out for now.
|
||||||
pageInterfaceDeprecated,
|
pageInterfaceDeprecated,
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,7 @@ type OutputFormats []OutputFormat
|
||||||
|
|
||||||
// OutputFormat links to a representation of a resource.
|
// OutputFormat links to a representation of a resource.
|
||||||
type OutputFormat struct {
|
type OutputFormat struct {
|
||||||
// Rel constains a value that can be used to construct a rel link.
|
// Rel contains a value that can be used to construct a rel link.
|
||||||
// This is value is fetched from the output format definition.
|
// This is value is fetched from the output format definition.
|
||||||
// Note that for pages with only one output format,
|
// Note that for pages with only one output format,
|
||||||
// this method will always return "canonical".
|
// this method will always return "canonical".
|
||||||
|
|
|
@ -343,7 +343,7 @@ func (p Pages) GroupByParamDate(key string, format string, order ...string) (Pag
|
||||||
return p.groupByDateField(sorter, formatter, order...)
|
return p.groupByDateField(sorter, formatter, order...)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ProbablyEq wraps comare.ProbablyEqer
|
// ProbablyEq wraps compare.ProbablyEqer
|
||||||
func (p PageGroup) ProbablyEq(other interface{}) bool {
|
func (p PageGroup) ProbablyEq(other interface{}) bool {
|
||||||
otherP, ok := other.(PageGroup)
|
otherP, ok := other.(PageGroup)
|
||||||
if !ok {
|
if !ok {
|
||||||
|
@ -387,7 +387,7 @@ func (psg PagesGroup) Len() int {
|
||||||
return l
|
return l
|
||||||
}
|
}
|
||||||
|
|
||||||
// ProbablyEq wraps comare.ProbablyEqer
|
// ProbablyEq wraps compare.ProbablyEqer
|
||||||
func (psg PagesGroup) ProbablyEq(other interface{}) bool {
|
func (psg PagesGroup) ProbablyEq(other interface{}) bool {
|
||||||
otherPsg, ok := other.(PagesGroup)
|
otherPsg, ok := other.(PagesGroup)
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|
|
@ -104,7 +104,7 @@ func (p Pages) Len() int {
|
||||||
return len(p)
|
return len(p)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ProbablyEq wraps comare.ProbablyEqer
|
// ProbablyEq wraps compare.ProbablyEqer
|
||||||
func (pages Pages) ProbablyEq(other interface{}) bool {
|
func (pages Pages) ProbablyEq(other interface{}) bool {
|
||||||
otherPages, ok := other.(Pages)
|
otherPages, ok := other.(Pages)
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
|
|
||||||
package page
|
package page
|
||||||
|
|
||||||
// Next returns the next page reletive to the given
|
// Next returns the next page relative to the given
|
||||||
func (p Pages) Next(cur Page) Page {
|
func (p Pages) Next(cur Page) Page {
|
||||||
x := searchPage(cur, p)
|
x := searchPage(cur, p)
|
||||||
if x <= 0 {
|
if x <= 0 {
|
||||||
|
|
|
@ -108,7 +108,7 @@ func (p Pages) withInvertedIndex(search func(idx *related.InvertedIndex) ([]rela
|
||||||
|
|
||||||
d, ok := p[0].(InternalDependencies)
|
d, ok := p[0].(InternalDependencies)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, errors.Errorf("invalid type %T in related serch", p[0])
|
return nil, errors.Errorf("invalid type %T in related search", p[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
cache := d.GetRelatedDocsHandler()
|
cache := d.GetRelatedDocsHandler()
|
||||||
|
|
|
@ -440,7 +440,7 @@ func (l *genericResource) openDestinationsForWriting() (w io.WriteCloser, err er
|
||||||
|
|
||||||
// Fast path:
|
// Fast path:
|
||||||
// This is a processed version of the original;
|
// This is a processed version of the original;
|
||||||
// check if it already existis at the destination.
|
// check if it already exists at the destination.
|
||||||
for _, targetFilename := range targetFilenames {
|
for _, targetFilename := range targetFilenames {
|
||||||
if _, err := l.getSpec().BaseFs.PublishFs.Stat(targetFilename); err == nil {
|
if _, err := l.getSpec().BaseFs.PublishFs.Stat(targetFilename); err == nil {
|
||||||
continue
|
continue
|
||||||
|
@ -662,7 +662,7 @@ type resourcePathDescriptor struct {
|
||||||
targetPathBuilder func() page.TargetPaths
|
targetPathBuilder func() page.TargetPaths
|
||||||
|
|
||||||
// This will normally be the same as above, but this will only apply to publishing
|
// This will normally be the same as above, but this will only apply to publishing
|
||||||
// of resources. It may be mulltiple values when in multihost mode.
|
// of resources. It may be multiple values when in multihost mode.
|
||||||
baseTargetPathDirs []string
|
baseTargetPathDirs []string
|
||||||
|
|
||||||
// baseOffset is set when the output format's path has a offset, e.g. for AMP.
|
// baseOffset is set when the output format's path has a offset, e.g. for AMP.
|
||||||
|
|
|
@ -274,7 +274,7 @@ func (r *Spec) newResource(sourceFs afero.Fs, fd ResourceSourceDescriptor) (reso
|
||||||
|
|
||||||
ext := strings.ToLower(filepath.Ext(fd.RelTargetFilename))
|
ext := strings.ToLower(filepath.Ext(fd.RelTargetFilename))
|
||||||
mimeType, found := r.MediaTypes.GetFirstBySuffix(strings.TrimPrefix(ext, "."))
|
mimeType, found := r.MediaTypes.GetFirstBySuffix(strings.TrimPrefix(ext, "."))
|
||||||
// TODO(bep) we need to handle these ambigous types better, but in this context
|
// TODO(bep) we need to handle these ambiguous types better, but in this context
|
||||||
// we most likely want the application/xml type.
|
// we most likely want the application/xml type.
|
||||||
if mimeType.Suffix() == "xml" && mimeType.SubType == "rss" {
|
if mimeType.Suffix() == "xml" && mimeType.SubType == "rss" {
|
||||||
mimeType, found = r.MediaTypes.GetByType("application/xml")
|
mimeType, found = r.MediaTypes.GetByType("application/xml")
|
||||||
|
|
|
@ -41,7 +41,7 @@ func TestGenericResource(t *testing.T) {
|
||||||
c.Assert(r.ResourceType(), qt.Equals, "css")
|
c.Assert(r.ResourceType(), qt.Equals, "css")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGenericResourceWithLinkFacory(t *testing.T) {
|
func TestGenericResourceWithLinkFactory(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
spec := newTestResourceSpec(specDescriptor{c: c})
|
spec := newTestResourceSpec(specDescriptor{c: c})
|
||||||
|
|
||||||
|
|
|
@ -131,11 +131,11 @@ func (t *babelTransformation) Transform(ctx *resources.ResourceTransformationCtx
|
||||||
|
|
||||||
configFile = filepath.Clean(configFile)
|
configFile = filepath.Clean(configFile)
|
||||||
|
|
||||||
// We need an abolute filename to the config file.
|
// We need an absolute filename to the config file.
|
||||||
if !filepath.IsAbs(configFile) {
|
if !filepath.IsAbs(configFile) {
|
||||||
configFile = t.rs.BaseFs.ResolveJSConfigFile(configFile)
|
configFile = t.rs.BaseFs.ResolveJSConfigFile(configFile)
|
||||||
if configFile == "" && t.options.Config != "" {
|
if configFile == "" && t.options.Config != "" {
|
||||||
// Only fail if the user specificed config file is not found.
|
// Only fail if the user specified config file is not found.
|
||||||
return errors.Errorf("babel config %q not found:", configFile)
|
return errors.Errorf("babel config %q not found:", configFile)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,7 @@ import (
|
||||||
"github.com/gohugoio/hugo/resources/resource"
|
"github.com/gohugoio/hugo/resources/resource"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Client for minification of Resource objects. Supported minfiers are:
|
// Client for minification of Resource objects. Supported minifiers are:
|
||||||
// css, html, js, json, svg and xml.
|
// css, html, js, json, svg and xml.
|
||||||
type Client struct {
|
type Client struct {
|
||||||
rs *resources.Spec
|
rs *resources.Spec
|
||||||
|
|
|
@ -166,11 +166,11 @@ func (t *postcssTransformation) Transform(ctx *resources.ResourceTransformationC
|
||||||
|
|
||||||
configFile = filepath.Clean(configFile)
|
configFile = filepath.Clean(configFile)
|
||||||
|
|
||||||
// We need an abolute filename to the config file.
|
// We need an absolute filename to the config file.
|
||||||
if !filepath.IsAbs(configFile) {
|
if !filepath.IsAbs(configFile) {
|
||||||
configFile = t.rs.BaseFs.ResolveJSConfigFile(configFile)
|
configFile = t.rs.BaseFs.ResolveJSConfigFile(configFile)
|
||||||
if configFile == "" && t.options.Config != "" {
|
if configFile == "" && t.options.Config != "" {
|
||||||
// Only fail if the user specificed config file is not found.
|
// Only fail if the user specified config file is not found.
|
||||||
return errors.Errorf("postcss config %q not found:", configFile)
|
return errors.Errorf("postcss config %q not found:", configFile)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -105,7 +105,7 @@ type ResourceTransformationCtx struct {
|
||||||
// to be simple types, as it needs to be serialized to JSON and back.
|
// to be simple types, as it needs to be serialized to JSON and back.
|
||||||
Data map[string]interface{}
|
Data map[string]interface{}
|
||||||
|
|
||||||
// This is used to publis additional artifacts, e.g. source maps.
|
// This is used to publish additional artifacts, e.g. source maps.
|
||||||
// We may improve this.
|
// We may improve this.
|
||||||
OpenResourcePublisher func(relTargetPath string) (io.WriteCloser, error)
|
OpenResourcePublisher func(relTargetPath string) (io.WriteCloser, error)
|
||||||
}
|
}
|
||||||
|
@ -479,7 +479,7 @@ func (r *resourceAdapter) transform(publish, setContent bool) error {
|
||||||
publishwriters = append(publishwriters, metaw)
|
publishwriters = append(publishwriters, metaw)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Any transofrmations reading from From must also write to To.
|
// Any transformations reading from From must also write to To.
|
||||||
// This means that if the target buffer is empty, we can just reuse
|
// This means that if the target buffer is empty, we can just reuse
|
||||||
// the original reader.
|
// the original reader.
|
||||||
if b, ok := tctx.To.(*bytes.Buffer); ok && b.Len() > 0 {
|
if b, ok := tctx.To.(*bytes.Buffer); ok && b.Len() > 0 {
|
||||||
|
@ -587,7 +587,7 @@ type transformationUpdate struct {
|
||||||
startCtx ResourceTransformationCtx
|
startCtx ResourceTransformationCtx
|
||||||
}
|
}
|
||||||
|
|
||||||
func (u *transformationUpdate) isContenChanged() bool {
|
func (u *transformationUpdate) isContentChanged() bool {
|
||||||
return u.content != nil || u.sourceFilename != nil
|
return u.content != nil || u.sourceFilename != nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -152,7 +152,7 @@ func (fi *FileInfo) LogicalName() string { return fi.name }
|
||||||
func (fi *FileInfo) BaseFileName() string { return fi.baseName }
|
func (fi *FileInfo) BaseFileName() string { return fi.baseName }
|
||||||
|
|
||||||
// TranslationBaseName returns a file's translation base name without the
|
// TranslationBaseName returns a file's translation base name without the
|
||||||
// language segement (ie. "page").
|
// language segment (ie. "page").
|
||||||
func (fi *FileInfo) TranslationBaseName() string { return fi.translationBaseName }
|
func (fi *FileInfo) TranslationBaseName() string { return fi.translationBaseName }
|
||||||
|
|
||||||
// ContentBaseName is a either TranslationBaseName or name of containing folder
|
// ContentBaseName is a either TranslationBaseName or name of containing folder
|
||||||
|
@ -266,7 +266,7 @@ func (sp *SourceSpec) NewFileInfo(fi hugofs.FileMetaInfo) (*FileInfo, error) {
|
||||||
baseName := helpers.Filename(name)
|
baseName := helpers.Filename(name)
|
||||||
|
|
||||||
if translationBaseName == "" {
|
if translationBaseName == "" {
|
||||||
// This is usyally provided by the filesystem. But this FileInfo is also
|
// This is usually provided by the filesystem. But this FileInfo is also
|
||||||
// created in a standalone context when doing "hugo new". This is
|
// created in a standalone context when doing "hugo new". This is
|
||||||
// an approximate implementation, which is "good enough" in that case.
|
// an approximate implementation, which is "good enough" in that case.
|
||||||
fileLangExt := filepath.Ext(baseName)
|
fileLangExt := filepath.Ext(baseName)
|
||||||
|
|
|
@ -673,7 +673,7 @@ func TestShuffleRandomising(t *testing.T) {
|
||||||
|
|
||||||
// Note that this test can fail with false negative result if the shuffle
|
// Note that this test can fail with false negative result if the shuffle
|
||||||
// of the sequence happens to be the same as the original sequence. However
|
// of the sequence happens to be the same as the original sequence. However
|
||||||
// the propability of the event is 10^-158 which is negligible.
|
// the probability of the event is 10^-158 which is negligible.
|
||||||
seqLen := 100
|
seqLen := 100
|
||||||
rand.Seed(time.Now().UTC().UnixNano())
|
rand.Seed(time.Now().UTC().UnixNano())
|
||||||
|
|
||||||
|
@ -835,7 +835,7 @@ func TestUniq(t *testing.T) {
|
||||||
// Structs
|
// Structs
|
||||||
{pagesVals{p3v, p2v, p3v, p2v}, pagesVals{p3v, p2v}, false},
|
{pagesVals{p3v, p2v, p3v, p2v}, pagesVals{p3v, p2v}, false},
|
||||||
|
|
||||||
// not Comparable(), use hashstruscture
|
// not Comparable(), use hashstructure
|
||||||
{[]map[string]int{
|
{[]map[string]int{
|
||||||
{"K1": 1}, {"K2": 2}, {"K1": 1}, {"K2": 1},
|
{"K1": 1}, {"K2": 2}, {"K1": 1}, {"K2": 1},
|
||||||
}, []map[string]int{
|
}, []map[string]int{
|
||||||
|
|
|
@ -23,7 +23,7 @@ import (
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Merge creates a copy of the final parameter and merges the preceeding
|
// Merge creates a copy of the final parameter and merges the preceding
|
||||||
// parameters into it in reverse order.
|
// parameters into it in reverse order.
|
||||||
// Currently only maps are supported. Key handling is case insensitive.
|
// Currently only maps are supported. Key handling is case insensitive.
|
||||||
func (ns *Namespace) Merge(params ...interface{}) (interface{}, error) {
|
func (ns *Namespace) Merge(params ...interface{}) (interface{}, error) {
|
||||||
|
|
|
@ -53,7 +53,7 @@ func TestSort(t *testing.T) {
|
||||||
|
|
||||||
{[]int{1, 2, 3, 4, 5}, nil, "asc", []int{1, 2, 3, 4, 5}},
|
{[]int{1, 2, 3, 4, 5}, nil, "asc", []int{1, 2, 3, 4, 5}},
|
||||||
{[]int{5, 4, 3, 1, 2}, nil, "asc", []int{1, 2, 3, 4, 5}},
|
{[]int{5, 4, 3, 1, 2}, nil, "asc", []int{1, 2, 3, 4, 5}},
|
||||||
// test sort key parameter is focibly set empty
|
// test sort key parameter is forcibly set empty
|
||||||
{[]string{"class3", "class1", "class2"}, map[int]string{1: "a"}, "asc", []string{"class1", "class2", "class3"}},
|
{[]string{"class3", "class1", "class2"}, map[int]string{1: "a"}, "asc", []string{"class1", "class2", "class3"}},
|
||||||
// test map sorting by keys
|
// test map sorting by keys
|
||||||
{map[string]int{"1": 10, "2": 20, "3": 30, "4": 40, "5": 50}, nil, "asc", []int{10, 20, 30, 40, 50}},
|
{map[string]int{"1": 10, "2": 20, "3": 30, "4": 40, "5": 50}, nil, "asc", []int{10, 20, 30, 40, 50}},
|
||||||
|
|
|
@ -43,7 +43,7 @@ type Namespace struct {
|
||||||
func (*Namespace) Default(dflt interface{}, given ...interface{}) (interface{}, error) {
|
func (*Namespace) Default(dflt interface{}, given ...interface{}) (interface{}, error) {
|
||||||
// given is variadic because the following construct will not pass a piped
|
// given is variadic because the following construct will not pass a piped
|
||||||
// argument when the key is missing: {{ index . "key" | default "foo" }}
|
// argument when the key is missing: {{ index . "key" | default "foo" }}
|
||||||
// The Go template will complain that we got 1 argument when we expectd 2.
|
// The Go template will complain that we got 1 argument when we expected 2.
|
||||||
|
|
||||||
if len(given) == 0 {
|
if len(given) == 0 {
|
||||||
return dflt, nil
|
return dflt, nil
|
||||||
|
|
|
@ -94,7 +94,7 @@ func (t *Template) executeWithState(state *state, value reflect.Value) (err erro
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Below are modifed structs etc. The changes are marked with "Added for Hugo."
|
// Below are modified structs etc. The changes are marked with "Added for Hugo."
|
||||||
|
|
||||||
// state represents the state of an execution. It's not part of the
|
// state represents the state of an execution. It's not part of the
|
||||||
// template so that multiple executions of the same template
|
// template so that multiple executions of the same template
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
// Package math provides template functions for mathmatical operations.
|
// Package math provides template functions for mathematical operations.
|
||||||
package math
|
package math
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|
|
@ -43,7 +43,7 @@ type Namespace struct {
|
||||||
deps *deps.Deps
|
deps *deps.Deps
|
||||||
}
|
}
|
||||||
|
|
||||||
// CountRunes returns the number of runes in s, excluding whitepace.
|
// CountRunes returns the number of runes in s, excluding whitespace.
|
||||||
func (ns *Namespace) CountRunes(s interface{}) (int, error) {
|
func (ns *Namespace) CountRunes(s interface{}) (int, error) {
|
||||||
ss, err := cast.ToStringE(s)
|
ss, err := cast.ToStringE(s)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -87,7 +87,7 @@ type TemplateParseFinder interface {
|
||||||
TemplateFinder
|
TemplateFinder
|
||||||
}
|
}
|
||||||
|
|
||||||
// TemplateDebugger prints some debug info to stdoud.
|
// TemplateDebugger prints some debug info to stdout.
|
||||||
type TemplateDebugger interface {
|
type TemplateDebugger interface {
|
||||||
Debug()
|
Debug()
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,7 +27,7 @@ import (
|
||||||
func main() {
|
func main() {
|
||||||
templateFolder := filepath.Join("..", "templates")
|
templateFolder := filepath.Join("..", "templates")
|
||||||
|
|
||||||
temlatePath := filepath.Join(".", templateFolder)
|
templatePath := filepath.Join(".", templateFolder)
|
||||||
|
|
||||||
file, err := os.Create("../templates.autogen.go")
|
file, err := os.Create("../templates.autogen.go")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -37,7 +37,7 @@ func main() {
|
||||||
|
|
||||||
var nameValues []string
|
var nameValues []string
|
||||||
|
|
||||||
err = filepath.Walk(temlatePath, func(path string, info os.FileInfo, err error) error {
|
err = filepath.Walk(templatePath, func(path string, info os.FileInfo, err error) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -141,7 +141,7 @@ func (ns *Namespace) refArgsToMap(args interface{}) (map[string]interface{}, err
|
||||||
return m, nil
|
return m, nil
|
||||||
case []string:
|
case []string:
|
||||||
if len(v) == 0 || len(v) > 2 {
|
if len(v) == 0 || len(v) > 2 {
|
||||||
return nil, fmt.Errorf("invalid numer of arguments to ref")
|
return nil, fmt.Errorf("invalid number of arguments to ref")
|
||||||
}
|
}
|
||||||
// These where the options before we introduced the map type:
|
// These where the options before we introduced the map type:
|
||||||
s = v[0]
|
s = v[0]
|
||||||
|
|
|
@ -30,7 +30,7 @@ func TestChainZeroTransformers(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestChaingMultipleTransformers(t *testing.T) {
|
func TestChainingMultipleTransformers(t *testing.T) {
|
||||||
f1 := func(ct FromTo) error {
|
f1 := func(ct FromTo) error {
|
||||||
_, err := ct.To().Write(bytes.Replace(ct.From().Bytes(), []byte("f1"), []byte("f1r"), -1))
|
_, err := ct.To().Write(bytes.Replace(ct.From().Bytes(), []byte("f1"), []byte("f1r"), -1))
|
||||||
return err
|
return err
|
||||||
|
|
|
@ -28,10 +28,10 @@ const (
|
||||||
h5JsContentSingleQuote = "<!DOCTYPE html><html><head><script src='foobar.js'></script><script src='/barfoo.js'></script></head><body><nav><h1>title</h1></nav><article>content <a href='foobar'>foobar</a>. <a href='/foobar'>Follow up</a></article></body></html>"
|
h5JsContentSingleQuote = "<!DOCTYPE html><html><head><script src='foobar.js'></script><script src='/barfoo.js'></script></head><body><nav><h1>title</h1></nav><article>content <a href='foobar'>foobar</a>. <a href='/foobar'>Follow up</a></article></body></html>"
|
||||||
h5JsContentAbsURL = "<!DOCTYPE html><html><head><script src=\"http://user@host:10234/foobar.js\"></script></head><body><nav><h1>title</h1></nav><article>content <a href=\"https://host/foobar\">foobar</a>. Follow up</article></body></html>"
|
h5JsContentAbsURL = "<!DOCTYPE html><html><head><script src=\"http://user@host:10234/foobar.js\"></script></head><body><nav><h1>title</h1></nav><article>content <a href=\"https://host/foobar\">foobar</a>. Follow up</article></body></html>"
|
||||||
h5JsContentAbsURLSchemaless = "<!DOCTYPE html><html><head><script src=\"//host/foobar.js\"></script><script src='//host2/barfoo.js'></head><body><nav><h1>title</h1></nav><article>content <a href=\"//host/foobar\">foobar</a>. <a href='//host2/foobar'>Follow up</a></article></body></html>"
|
h5JsContentAbsURLSchemaless = "<!DOCTYPE html><html><head><script src=\"//host/foobar.js\"></script><script src='//host2/barfoo.js'></head><body><nav><h1>title</h1></nav><article>content <a href=\"//host/foobar\">foobar</a>. <a href='//host2/foobar'>Follow up</a></article></body></html>"
|
||||||
corectOutputSrcHrefDq = "<!DOCTYPE html><html><head><script src=\"foobar.js\"></script><script src=\"http://base/barfoo.js\"></script></head><body><nav><h1>title</h1></nav><article>content <a href=\"foobar\">foobar</a>. <a href=\"http://base/foobar\">Follow up</a></article></body></html>"
|
correctOutputSrcHrefDq = "<!DOCTYPE html><html><head><script src=\"foobar.js\"></script><script src=\"http://base/barfoo.js\"></script></head><body><nav><h1>title</h1></nav><article>content <a href=\"foobar\">foobar</a>. <a href=\"http://base/foobar\">Follow up</a></article></body></html>"
|
||||||
corectOutputSrcHrefSq = "<!DOCTYPE html><html><head><script src='foobar.js'></script><script src='http://base/barfoo.js'></script></head><body><nav><h1>title</h1></nav><article>content <a href='foobar'>foobar</a>. <a href='http://base/foobar'>Follow up</a></article></body></html>"
|
correctOutputSrcHrefSq = "<!DOCTYPE html><html><head><script src='foobar.js'></script><script src='http://base/barfoo.js'></script></head><body><nav><h1>title</h1></nav><article>content <a href='foobar'>foobar</a>. <a href='http://base/foobar'>Follow up</a></article></body></html>"
|
||||||
|
|
||||||
h5XMLXontentAbsURL = "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?><feed xmlns=\"http://www.w3.org/2005/Atom\"><entry><content type=\"html\"><p><a href="/foobar">foobar</a></p> <p>A video: <iframe src='/foo'></iframe></p></content></entry></feed>"
|
h5XMLContentAbsURL = "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?><feed xmlns=\"http://www.w3.org/2005/Atom\"><entry><content type=\"html\"><p><a href="/foobar">foobar</a></p> <p>A video: <iframe src='/foo'></iframe></p></content></entry></feed>"
|
||||||
correctOutputSrcHrefInXML = "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?><feed xmlns=\"http://www.w3.org/2005/Atom\"><entry><content type=\"html\"><p><a href="http://base/foobar">foobar</a></p> <p>A video: <iframe src='http://base/foo'></iframe></p></content></entry></feed>"
|
correctOutputSrcHrefInXML = "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?><feed xmlns=\"http://www.w3.org/2005/Atom\"><entry><content type=\"html\"><p><a href="http://base/foobar">foobar</a></p> <p>A video: <iframe src='http://base/foo'></iframe></p></content></entry></feed>"
|
||||||
h5XMLContentGuarded = "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?><feed xmlns=\"http://www.w3.org/2005/Atom\"><entry><content type=\"html\"><p><a href="//foobar">foobar</a></p> <p>A video: <iframe src='//foo'></iframe></p></content></entry></feed>"
|
h5XMLContentGuarded = "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?><feed xmlns=\"http://www.w3.org/2005/Atom\"><entry><content type=\"html\"><p><a href="//foobar">foobar</a></p> <p>A video: <iframe src='//foo'></iframe></p></content></entry></feed>"
|
||||||
)
|
)
|
||||||
|
@ -96,14 +96,14 @@ schemaless2: <img srcset="//img.jpg" src="//basic.jpg2> POS
|
||||||
|
|
||||||
var (
|
var (
|
||||||
absURLlBenchTests = []test{
|
absURLlBenchTests = []test{
|
||||||
{h5JsContentDoubleQuote, corectOutputSrcHrefDq},
|
{h5JsContentDoubleQuote, correctOutputSrcHrefDq},
|
||||||
{h5JsContentSingleQuote, corectOutputSrcHrefSq},
|
{h5JsContentSingleQuote, correctOutputSrcHrefSq},
|
||||||
{h5JsContentAbsURL, h5JsContentAbsURL},
|
{h5JsContentAbsURL, h5JsContentAbsURL},
|
||||||
{h5JsContentAbsURLSchemaless, h5JsContentAbsURLSchemaless},
|
{h5JsContentAbsURLSchemaless, h5JsContentAbsURLSchemaless},
|
||||||
}
|
}
|
||||||
|
|
||||||
xmlAbsURLBenchTests = []test{
|
xmlAbsURLBenchTests = []test{
|
||||||
{h5XMLXontentAbsURL, correctOutputSrcHrefInXML},
|
{h5XMLContentAbsURL, correctOutputSrcHrefInXML},
|
||||||
{h5XMLContentGuarded, h5XMLContentGuarded},
|
{h5XMLContentGuarded, h5XMLContentGuarded},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -155,7 +155,7 @@ func TestAbsURL(t *testing.T) {
|
||||||
apply(t.Errorf, tr, absURLTests)
|
apply(t.Errorf, tr, absURLTests)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAbsURLUnqoted(t *testing.T) {
|
func TestAbsURLUnquoted(t *testing.T) {
|
||||||
tr := transform.New(NewAbsURLTransformer(testBaseURL))
|
tr := transform.New(NewAbsURLTransformer(testBaseURL))
|
||||||
|
|
||||||
apply(t.Errorf, tr, []test{
|
apply(t.Errorf, tr, []test{
|
||||||
|
|
Loading…
Reference in a new issue