mirror of
https://github.com/gohugoio/hugo.git
synced 2025-03-23 08:23:35 +00:00
More initialism corrections (golint)
Thanks to @bep's new, brilliant helpers.Deprecated() function, the following functions or variables are transitioned to their new names, preserving backward compatibility for v0.14 and warning the user of upcoming obsolescence in v0.15: * .Url → .URL (for node, menu and paginator) * .Site.BaseUrl → .Site.BaseURL * .Site.Indexes → .Site.Taxonomies * .Site.Recent → .Site.Pages * getJson → getJSON * getCsv → getCSV * safeHtml → safeHTML * safeCss → safeCSS * safeUrl → safeURL Also fix related initialisms in strings and comments. Continued effort in fixing #959.
This commit is contained in:
parent
ca69cad8aa
commit
8b8fb417ae
20 changed files with 184 additions and 129 deletions
|
@ -135,7 +135,7 @@ func serve(port int) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// fixUrl massages the BaseUrl into a form needed for serving
|
// fixURL massages the BaseURL into a form needed for serving
|
||||||
// all pages correctly.
|
// all pages correctly.
|
||||||
func fixURL(s string) (string, error) {
|
func fixURL(s string) (string, error) {
|
||||||
useLocalhost := false
|
useLocalhost := false
|
||||||
|
@ -164,7 +164,7 @@ func fixURL(s string) (string, error) {
|
||||||
if strings.Contains(host, ":") {
|
if strings.Contains(host, ":") {
|
||||||
host, _, err = net.SplitHostPort(u.Host)
|
host, _, err = net.SplitHostPort(u.Host)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", fmt.Errorf("Failed to split BaseUrl hostpost: %s", err)
|
return "", fmt.Errorf("Failed to split BaseURL hostpost: %s", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
u.Host = fmt.Sprintf("%s:%d", host, serverPort)
|
u.Host = fmt.Sprintf("%s:%d", host, serverPort)
|
||||||
|
|
|
@ -330,7 +330,7 @@ func PathPrep(ugly bool, in string) string {
|
||||||
return PrettifyPath(in)
|
return PrettifyPath(in)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Same as PrettifyUrlPath() but for file paths.
|
// Same as PrettifyURLPath() but for file paths.
|
||||||
// /section/name.html becomes /section/name/index.html
|
// /section/name.html becomes /section/name/index.html
|
||||||
// /section/name/ becomes /section/name/index.html
|
// /section/name/ becomes /section/name/index.html
|
||||||
// /section/name/index.html becomes /section/name/index.html
|
// /section/name/index.html becomes /section/name/index.html
|
||||||
|
|
|
@ -69,7 +69,7 @@ func sanitizeURLWithFlags(in string, f purell.NormalizationFlags) string {
|
||||||
// in issues #157, #622, etc., without forcing
|
// in issues #157, #622, etc., without forcing
|
||||||
// relative URLs to begin with '/'.
|
// relative URLs to begin with '/'.
|
||||||
// Once the fixes are in, let's remove this kludge
|
// Once the fixes are in, let's remove this kludge
|
||||||
// and restore SanitizeUrl() to the way it was.
|
// and restore SanitizeURL() to the way it was.
|
||||||
// -- @anthonyfok, 2015-02-16
|
// -- @anthonyfok, 2015-02-16
|
||||||
//
|
//
|
||||||
// Begin temporary kludge
|
// Begin temporary kludge
|
||||||
|
@ -87,12 +87,12 @@ func sanitizeURLWithFlags(in string, f purell.NormalizationFlags) string {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// SanitizeUrl sanitizes the input URL string.
|
// SanitizeURL sanitizes the input URL string.
|
||||||
func SanitizeURL(in string) string {
|
func SanitizeURL(in string) string {
|
||||||
return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveTrailingSlash|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
|
return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveTrailingSlash|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SanitizeUrlKeepTrailingSlash is the same as SanitizeUrl, but will keep any trailing slash.
|
// SanitizeURLKeepTrailingSlash is the same as SanitizeURL, but will keep any trailing slash.
|
||||||
func SanitizeURLKeepTrailingSlash(in string) string {
|
func SanitizeURLKeepTrailingSlash(in string) string {
|
||||||
return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
|
return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
|
||||||
}
|
}
|
||||||
|
@ -147,7 +147,7 @@ func MakePermalink(host, plink string) *url.URL {
|
||||||
|
|
||||||
// AddContextRoot adds the context root to an URL if it's not already set.
|
// AddContextRoot adds the context root to an URL if it's not already set.
|
||||||
// For relative URL entries on sites with a base url with a context root set (i.e. http://example.com/mysite),
|
// For relative URL entries on sites with a base url with a context root set (i.e. http://example.com/mysite),
|
||||||
// relative URLs must not include the context root if canonifyUrls is enabled. But if it's disabled, it must be set.
|
// relative URLs must not include the context root if canonifyURLs is enabled. But if it's disabled, it must be set.
|
||||||
func AddContextRoot(baseURL, relativePath string) string {
|
func AddContextRoot(baseURL, relativePath string) string {
|
||||||
|
|
||||||
url, err := url.Parse(baseURL)
|
url, err := url.Parse(baseURL)
|
||||||
|
@ -185,7 +185,7 @@ func URLPrep(ugly bool, in string) string {
|
||||||
return url
|
return url
|
||||||
}
|
}
|
||||||
|
|
||||||
// PrettifyUrl takes a URL string and returns a semantic, clean URL.
|
// PrettifyURL takes a URL string and returns a semantic, clean URL.
|
||||||
func PrettifyURL(in string) string {
|
func PrettifyURL(in string) string {
|
||||||
x := PrettifyURLPath(in)
|
x := PrettifyURLPath(in)
|
||||||
|
|
||||||
|
@ -200,7 +200,7 @@ func PrettifyURL(in string) string {
|
||||||
return x
|
return x
|
||||||
}
|
}
|
||||||
|
|
||||||
// PrettifyUrlPath takes a URL path to a content and converts it
|
// PrettifyURLPath takes a URL path to a content and converts it
|
||||||
// to enable pretty URLs.
|
// to enable pretty URLs.
|
||||||
// /section/name.html becomes /section/name/index.html
|
// /section/name.html becomes /section/name/index.html
|
||||||
// /section/name/ becomes /section/name/index.html
|
// /section/name/ becomes /section/name/index.html
|
||||||
|
@ -209,7 +209,7 @@ func PrettifyURLPath(in string) string {
|
||||||
return PrettiyPath(in, pathBridge)
|
return PrettiyPath(in, pathBridge)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Uglify does the opposite of PrettifyUrlPath().
|
// Uglify does the opposite of PrettifyURLPath().
|
||||||
// /section/name/index.html becomes /section/name.html
|
// /section/name/index.html becomes /section/name.html
|
||||||
// /section/name/ becomes /section/name.html
|
// /section/name/ becomes /section/name.html
|
||||||
// /section/name.html becomes /section/name.html
|
// /section/name.html becomes /section/name.html
|
||||||
|
|
|
@ -6,7 +6,7 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestUrlize(t *testing.T) {
|
func TestURLize(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
input string
|
input string
|
||||||
expected string
|
expected string
|
||||||
|
@ -26,7 +26,7 @@ func TestUrlize(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSanitizeUrl(t *testing.T) {
|
func TestSanitizeURL(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
input string
|
input string
|
||||||
expected string
|
expected string
|
||||||
|
@ -76,7 +76,7 @@ func TestMakePermalink(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUrlPrep(t *testing.T) {
|
func TestURLPrep(t *testing.T) {
|
||||||
type test struct {
|
type test struct {
|
||||||
ugly bool
|
ugly bool
|
||||||
input string
|
input string
|
||||||
|
|
|
@ -19,10 +19,11 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/spf13/cast"
|
"github.com/spf13/cast"
|
||||||
|
"github.com/spf13/hugo/helpers"
|
||||||
)
|
)
|
||||||
|
|
||||||
type MenuEntry struct {
|
type MenuEntry struct {
|
||||||
Url string
|
URL string
|
||||||
Name string
|
Name string
|
||||||
Menu string
|
Menu string
|
||||||
Identifier string
|
Identifier string
|
||||||
|
@ -37,6 +38,12 @@ type Menu []*MenuEntry
|
||||||
type Menus map[string]*Menu
|
type Menus map[string]*Menu
|
||||||
type PageMenus map[string]*MenuEntry
|
type PageMenus map[string]*MenuEntry
|
||||||
|
|
||||||
|
// Url is deprecated. Will be removed in 0.15.
|
||||||
|
func (me *MenuEntry) Url() string {
|
||||||
|
helpers.Deprecated("MenuEntry", ".Url", ".URL")
|
||||||
|
return me.URL
|
||||||
|
}
|
||||||
|
|
||||||
func (me *MenuEntry) AddChild(child *MenuEntry) {
|
func (me *MenuEntry) AddChild(child *MenuEntry) {
|
||||||
me.Children = append(me.Children, child)
|
me.Children = append(me.Children, child)
|
||||||
me.Children.Sort()
|
me.Children.Sort()
|
||||||
|
@ -53,22 +60,22 @@ func (me *MenuEntry) KeyName() string {
|
||||||
return me.Name
|
return me.Name
|
||||||
}
|
}
|
||||||
|
|
||||||
func (me *MenuEntry) hopefullyUniqueId() string {
|
func (me *MenuEntry) hopefullyUniqueID() string {
|
||||||
if me.Identifier != "" {
|
if me.Identifier != "" {
|
||||||
return me.Identifier
|
return me.Identifier
|
||||||
} else if me.Url != "" {
|
} else if me.URL != "" {
|
||||||
return me.Url
|
return me.URL
|
||||||
} else {
|
} else {
|
||||||
return me.Name
|
return me.Name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (me *MenuEntry) IsEqual(inme *MenuEntry) bool {
|
func (me *MenuEntry) IsEqual(inme *MenuEntry) bool {
|
||||||
return me.hopefullyUniqueId() == inme.hopefullyUniqueId() && me.Parent == inme.Parent
|
return me.hopefullyUniqueID() == inme.hopefullyUniqueID() && me.Parent == inme.Parent
|
||||||
}
|
}
|
||||||
|
|
||||||
func (me *MenuEntry) IsSameResource(inme *MenuEntry) bool {
|
func (me *MenuEntry) IsSameResource(inme *MenuEntry) bool {
|
||||||
return me.Url != "" && inme.Url != "" && me.Url == inme.Url
|
return me.URL != "" && inme.URL != "" && me.URL == inme.URL
|
||||||
}
|
}
|
||||||
|
|
||||||
func (me *MenuEntry) MarshallMap(ime map[string]interface{}) {
|
func (me *MenuEntry) MarshallMap(ime map[string]interface{}) {
|
||||||
|
@ -76,7 +83,7 @@ func (me *MenuEntry) MarshallMap(ime map[string]interface{}) {
|
||||||
loki := strings.ToLower(k)
|
loki := strings.ToLower(k)
|
||||||
switch loki {
|
switch loki {
|
||||||
case "url":
|
case "url":
|
||||||
me.Url = cast.ToString(v)
|
me.URL = cast.ToString(v)
|
||||||
case "weight":
|
case "weight":
|
||||||
me.Weight = cast.ToInt(v)
|
me.Weight = cast.ToInt(v)
|
||||||
case "name":
|
case "name":
|
||||||
|
|
|
@ -180,8 +180,8 @@ func doTestPageMenuWithIdentifier(t *testing.T, menuPageSources []source.ByteSou
|
||||||
assert.NotNil(t, me1)
|
assert.NotNil(t, me1)
|
||||||
assert.NotNil(t, me2)
|
assert.NotNil(t, me2)
|
||||||
|
|
||||||
assert.True(t, strings.Contains(me1.Url, "doc1"))
|
assert.True(t, strings.Contains(me1.URL, "doc1"))
|
||||||
assert.True(t, strings.Contains(me2.Url, "doc2"))
|
assert.True(t, strings.Contains(me2.URL, "doc2"))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -216,8 +216,8 @@ func doTestPageMenuWithDuplicateName(t *testing.T, menuPageSources []source.Byte
|
||||||
assert.NotNil(t, me1)
|
assert.NotNil(t, me1)
|
||||||
assert.NotNil(t, me2)
|
assert.NotNil(t, me2)
|
||||||
|
|
||||||
assert.True(t, strings.Contains(me1.Url, "doc1"))
|
assert.True(t, strings.Contains(me1.URL, "doc1"))
|
||||||
assert.True(t, strings.Contains(me2.Url, "doc2"))
|
assert.True(t, strings.Contains(me2.URL, "doc2"))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -275,7 +275,7 @@ func TestMenuWithHashInURL(t *testing.T) {
|
||||||
|
|
||||||
assert.NotNil(t, me)
|
assert.NotNil(t, me)
|
||||||
|
|
||||||
assert.Equal(t, "/Zoo/resource/#anchor", me.Url)
|
assert.Equal(t, "/Zoo/resource/#anchor", me.URL)
|
||||||
}
|
}
|
||||||
|
|
||||||
// issue #719
|
// issue #719
|
||||||
|
@ -309,7 +309,7 @@ func doTestMenuWithUnicodeURLs(t *testing.T, canonifyURLs, uglyURLs bool) {
|
||||||
expected = expectedBase + "/"
|
expected = expectedBase + "/"
|
||||||
}
|
}
|
||||||
|
|
||||||
assert.Equal(t, expected, unicodeRussian.Url, "uglyURLs[%t]", uglyURLs)
|
assert.Equal(t, expected, unicodeRussian.URL, "uglyURLs[%t]", uglyURLs)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTaxonomyNodeMenu(t *testing.T) {
|
func TestTaxonomyNodeMenu(t *testing.T) {
|
||||||
|
@ -329,7 +329,7 @@ func TestTaxonomyNodeMenu(t *testing.T) {
|
||||||
{"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"},
|
{"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"},
|
||||||
ts.findTestMenuEntryByID("tax", "2"), true, false},
|
ts.findTestMenuEntryByID("tax", "2"), true, false},
|
||||||
{"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"},
|
{"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"},
|
||||||
&MenuEntry{Name: "Somewhere else", Url: "/somewhereelse"}, false, false},
|
&MenuEntry{Name: "Somewhere else", URL: "/somewhereelse"}, false, false},
|
||||||
} {
|
} {
|
||||||
|
|
||||||
n, _ := ts.site.newTaxonomyNode(this.taxInfo)
|
n, _ := ts.site.newTaxonomyNode(this.taxInfo)
|
||||||
|
@ -349,7 +349,7 @@ func TestTaxonomyNodeMenu(t *testing.T) {
|
||||||
|
|
||||||
menuEntryXML := ts.findTestMenuEntryByID("tax", "xml")
|
menuEntryXML := ts.findTestMenuEntryByID("tax", "xml")
|
||||||
|
|
||||||
if strings.HasSuffix(menuEntryXML.Url, "/") {
|
if strings.HasSuffix(menuEntryXML.URL, "/") {
|
||||||
t.Error("RSS menu item should not be padded with trailing slash")
|
t.Error("RSS menu item should not be padded with trailing slash")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -359,7 +359,7 @@ func TestHomeNodeMenu(t *testing.T) {
|
||||||
defer resetMenuTestState(ts)
|
defer resetMenuTestState(ts)
|
||||||
|
|
||||||
home := ts.site.newHomeNode()
|
home := ts.site.newHomeNode()
|
||||||
homeMenuEntry := &MenuEntry{Name: home.Title, Url: home.Url}
|
homeMenuEntry := &MenuEntry{Name: home.Title, URL: home.URL}
|
||||||
|
|
||||||
for i, this := range []struct {
|
for i, this := range []struct {
|
||||||
menu string
|
menu string
|
||||||
|
@ -369,7 +369,7 @@ func TestHomeNodeMenu(t *testing.T) {
|
||||||
}{
|
}{
|
||||||
{"main", homeMenuEntry, true, false},
|
{"main", homeMenuEntry, true, false},
|
||||||
{"doesnotexist", homeMenuEntry, false, false},
|
{"doesnotexist", homeMenuEntry, false, false},
|
||||||
{"main", &MenuEntry{Name: "Somewhere else", Url: "/somewhereelse"}, false, false},
|
{"main", &MenuEntry{Name: "Somewhere else", URL: "/somewhereelse"}, false, false},
|
||||||
{"grandparent", ts.findTestMenuEntryByID("grandparent", "grandparentId"), false, false},
|
{"grandparent", ts.findTestMenuEntryByID("grandparent", "grandparentId"), false, false},
|
||||||
{"grandparent", ts.findTestMenuEntryByID("grandparent", "parentId"), false, true},
|
{"grandparent", ts.findTestMenuEntryByID("grandparent", "parentId"), false, true},
|
||||||
{"grandparent", ts.findTestMenuEntryByID("grandparent", "grandchildId"), true, false},
|
{"grandparent", ts.findTestMenuEntryByID("grandparent", "grandchildId"), true, false},
|
||||||
|
|
|
@ -17,6 +17,7 @@ import (
|
||||||
"html/template"
|
"html/template"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
"github.com/spf13/hugo/helpers"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Node struct {
|
type Node struct {
|
||||||
|
@ -30,7 +31,7 @@ type Node struct {
|
||||||
Params map[string]interface{}
|
Params map[string]interface{}
|
||||||
Date time.Time
|
Date time.Time
|
||||||
Sitemap Sitemap
|
Sitemap Sitemap
|
||||||
UrlPath
|
URLPath
|
||||||
paginator *pager
|
paginator *pager
|
||||||
paginatorInit sync.Once
|
paginatorInit sync.Once
|
||||||
scratch *Scratch
|
scratch *Scratch
|
||||||
|
@ -42,7 +43,7 @@ func (n *Node) Now() time.Time {
|
||||||
|
|
||||||
func (n *Node) HasMenuCurrent(menuID string, inme *MenuEntry) bool {
|
func (n *Node) HasMenuCurrent(menuID string, inme *MenuEntry) bool {
|
||||||
if inme.HasChildren() {
|
if inme.HasChildren() {
|
||||||
me := MenuEntry{Name: n.Title, Url: n.Url}
|
me := MenuEntry{Name: n.Title, URL: n.URL}
|
||||||
|
|
||||||
for _, child := range inme.Children {
|
for _, child := range inme.Children {
|
||||||
if me.IsSameResource(child) {
|
if me.IsSameResource(child) {
|
||||||
|
@ -56,7 +57,7 @@ func (n *Node) HasMenuCurrent(menuID string, inme *MenuEntry) bool {
|
||||||
|
|
||||||
func (n *Node) IsMenuCurrent(menuID string, inme *MenuEntry) bool {
|
func (n *Node) IsMenuCurrent(menuID string, inme *MenuEntry) bool {
|
||||||
|
|
||||||
me := MenuEntry{Name: n.Title, Url: n.Url}
|
me := MenuEntry{Name: n.Title, URL: n.URL}
|
||||||
if !me.IsSameResource(inme) {
|
if !me.IsSameResource(inme) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
@ -119,13 +120,19 @@ func (n *Node) RelRef(ref string) (string, error) {
|
||||||
return n.Site.RelRef(ref, nil)
|
return n.Site.RelRef(ref, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
type UrlPath struct {
|
type URLPath struct {
|
||||||
Url string
|
URL string
|
||||||
Permalink template.HTML
|
Permalink template.HTML
|
||||||
Slug string
|
Slug string
|
||||||
Section string
|
Section string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Url is deprecated. Will be removed in 0.15.
|
||||||
|
func (n *Node) Url() string {
|
||||||
|
helpers.Deprecated("Node", ".Url", ".URL")
|
||||||
|
return n.URL
|
||||||
|
}
|
||||||
|
|
||||||
// Scratch returns the writable context associated with this Node.
|
// Scratch returns the writable context associated with this Node.
|
||||||
func (n *Node) Scratch() *Scratch {
|
func (n *Node) Scratch() *Scratch {
|
||||||
if n.scratch == nil {
|
if n.scratch == nil {
|
||||||
|
|
|
@ -341,10 +341,10 @@ func (p *Page) analyzePage() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Page) permalink() (*url.URL, error) {
|
func (p *Page) permalink() (*url.URL, error) {
|
||||||
baseURL := string(p.Site.BaseUrl)
|
baseURL := string(p.Site.BaseURL)
|
||||||
dir := strings.TrimSpace(filepath.ToSlash(p.Source.Dir()))
|
dir := strings.TrimSpace(filepath.ToSlash(p.Source.Dir()))
|
||||||
pSlug := strings.TrimSpace(p.Slug)
|
pSlug := strings.TrimSpace(p.Slug)
|
||||||
pURL := strings.TrimSpace(p.Url)
|
pURL := strings.TrimSpace(p.URL)
|
||||||
var permalink string
|
var permalink string
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
|
@ -420,9 +420,9 @@ func (p *Page) RelPermalink() (string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if viper.GetBool("CanonifyURLs") {
|
if viper.GetBool("CanonifyURLs") {
|
||||||
// replacements for relpermalink with baseUrl on the form http://myhost.com/sub/ will fail later on
|
// replacements for relpermalink with baseURL on the form http://myhost.com/sub/ will fail later on
|
||||||
// have to return the Url relative from baseUrl
|
// have to return the URL relative from baseURL
|
||||||
relpath, err := helpers.GetRelativePath(link.String(), string(p.Site.BaseUrl))
|
relpath, err := helpers.GetRelativePath(link.String(), string(p.Site.BaseURL))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
@ -455,9 +455,9 @@ func (p *Page) update(f interface{}) error {
|
||||||
p.Slug = helpers.URLize(cast.ToString(v))
|
p.Slug = helpers.URLize(cast.ToString(v))
|
||||||
case "url":
|
case "url":
|
||||||
if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
|
if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
|
||||||
return fmt.Errorf("Only relative urls are supported, %v provided", url)
|
return fmt.Errorf("Only relative URLs are supported, %v provided", url)
|
||||||
}
|
}
|
||||||
p.Url = helpers.URLize(cast.ToString(v))
|
p.URL = helpers.URLize(cast.ToString(v))
|
||||||
case "type":
|
case "type":
|
||||||
p.contentType = cast.ToString(v)
|
p.contentType = cast.ToString(v)
|
||||||
case "extension", "ext":
|
case "extension", "ext":
|
||||||
|
@ -588,7 +588,7 @@ func (p *Page) Menus() PageMenus {
|
||||||
if ms, ok := p.Params["menu"]; ok {
|
if ms, ok := p.Params["menu"]; ok {
|
||||||
link, _ := p.RelPermalink()
|
link, _ := p.RelPermalink()
|
||||||
|
|
||||||
me := MenuEntry{Name: p.LinkTitle(), Weight: p.Weight, Url: link}
|
me := MenuEntry{Name: p.LinkTitle(), Weight: p.Weight, URL: link}
|
||||||
|
|
||||||
// Could be the name of the menu to attach it to
|
// Could be the name of the menu to attach it to
|
||||||
mname, err := cast.ToStringE(ms)
|
mname, err := cast.ToStringE(ms)
|
||||||
|
@ -618,7 +618,7 @@ func (p *Page) Menus() PageMenus {
|
||||||
}
|
}
|
||||||
|
|
||||||
for name, menu := range menus {
|
for name, menu := range menus {
|
||||||
menuEntry := MenuEntry{Name: p.LinkTitle(), Url: link, Weight: p.Weight, Menu: name}
|
menuEntry := MenuEntry{Name: p.LinkTitle(), URL: link, Weight: p.Weight, Menu: name}
|
||||||
jww.DEBUG.Printf("found menu: %q, in %q\n", name, p.Title)
|
jww.DEBUG.Printf("found menu: %q, in %q\n", name, p.Title)
|
||||||
|
|
||||||
ime, err := cast.ToStringMapE(menu)
|
ime, err := cast.ToStringMapE(menu)
|
||||||
|
@ -785,9 +785,9 @@ func (p *Page) FullFilePath() string {
|
||||||
|
|
||||||
func (p *Page) TargetPath() (outfile string) {
|
func (p *Page) TargetPath() (outfile string) {
|
||||||
|
|
||||||
// Always use Url if it's specified
|
// Always use URL if it's specified
|
||||||
if len(strings.TrimSpace(p.Url)) > 2 {
|
if len(strings.TrimSpace(p.URL)) > 2 {
|
||||||
outfile = strings.TrimSpace(p.Url)
|
outfile = strings.TrimSpace(p.URL)
|
||||||
|
|
||||||
if strings.HasSuffix(outfile, "/") {
|
if strings.HasSuffix(outfile, "/") {
|
||||||
outfile = outfile + "index.html"
|
outfile = outfile + "index.html"
|
||||||
|
|
|
@ -35,7 +35,7 @@ func TestPermalink(t *testing.T) {
|
||||||
{"x/y/z/boofar.md", "x/y/z/", "http://barnew/boo/", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"},
|
{"x/y/z/boofar.md", "x/y/z/", "http://barnew/boo/", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"},
|
||||||
{"x/y/z/boofar.md", "x/y/z/", "http://barnew/boo", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"},
|
{"x/y/z/boofar.md", "x/y/z/", "http://barnew/boo", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"},
|
||||||
|
|
||||||
// test url overrides
|
// test URL overrides
|
||||||
{"x/y/z/boofar.md", "x/y/z", "", "", "/z/y/q/", false, false, "/z/y/q/", "/z/y/q/"},
|
{"x/y/z/boofar.md", "x/y/z", "", "", "/z/y/q/", false, false, "/z/y/q/", "/z/y/q/"},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -46,12 +46,12 @@ func TestPermalink(t *testing.T) {
|
||||||
viper.Set("canonifyurls", test.canonifyURLs)
|
viper.Set("canonifyurls", test.canonifyURLs)
|
||||||
p := &Page{
|
p := &Page{
|
||||||
Node: Node{
|
Node: Node{
|
||||||
UrlPath: UrlPath{
|
URLPath: URLPath{
|
||||||
Section: "z",
|
Section: "z",
|
||||||
Url: test.url,
|
URL: test.url,
|
||||||
},
|
},
|
||||||
Site: &SiteInfo{
|
Site: &SiteInfo{
|
||||||
BaseUrl: test.base,
|
BaseURL: test.base,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Source: Source{File: *source.NewFile(filepath.FromSlash(test.file))},
|
Source: Source{File: *source.NewFile(filepath.FromSlash(test.file))},
|
||||||
|
|
|
@ -47,11 +47,17 @@ func (p *pager) PageNumber() int {
|
||||||
return p.number
|
return p.number
|
||||||
}
|
}
|
||||||
|
|
||||||
// Url returns the url to the current page.
|
// URL returns the URL to the current page.
|
||||||
func (p *pager) Url() template.HTML {
|
func (p *pager) URL() template.HTML {
|
||||||
return template.HTML(p.paginationURLFactory(p.PageNumber()))
|
return template.HTML(p.paginationURLFactory(p.PageNumber()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Url is deprecated. Will be removed in 0.15.
|
||||||
|
func (p *pager) Url() template.HTML {
|
||||||
|
helpers.Deprecated("Paginator", ".Url", ".URL")
|
||||||
|
return p.URL()
|
||||||
|
}
|
||||||
|
|
||||||
// Pages returns the elements on this page.
|
// Pages returns the elements on this page.
|
||||||
func (p *pager) Pages() Pages {
|
func (p *pager) Pages() Pages {
|
||||||
if len(p.paginatedPages) == 0 {
|
if len(p.paginatedPages) == 0 {
|
||||||
|
@ -142,7 +148,7 @@ func (n *Node) Paginator() (*pager, error) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
pagers, err := paginatePages(n.Data["Pages"], n.Url)
|
pagers, err := paginatePages(n.Data["Pages"], n.URL)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
initError = err
|
initError = err
|
||||||
|
@ -184,7 +190,7 @@ func (n *Node) Paginate(seq interface{}) (*pager, error) {
|
||||||
if n.paginator != nil {
|
if n.paginator != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
pagers, err := paginatePages(seq, n.Url)
|
pagers, err := paginatePages(seq, n.URL)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
initError = err
|
initError = err
|
||||||
|
|
|
@ -43,7 +43,7 @@ func TestPager(t *testing.T) {
|
||||||
assert.Equal(t, 5, paginator.TotalPages())
|
assert.Equal(t, 5, paginator.TotalPages())
|
||||||
|
|
||||||
first := paginatorPages[0]
|
first := paginatorPages[0]
|
||||||
assert.Equal(t, "page/1/", first.Url())
|
assert.Equal(t, "page/1/", first.URL())
|
||||||
assert.Equal(t, first, first.First())
|
assert.Equal(t, first, first.First())
|
||||||
assert.True(t, first.HasNext())
|
assert.True(t, first.HasNext())
|
||||||
assert.Equal(t, paginatorPages[1], first.Next())
|
assert.Equal(t, paginatorPages[1], first.Next())
|
||||||
|
@ -58,7 +58,7 @@ func TestPager(t *testing.T) {
|
||||||
assert.Equal(t, paginatorPages[1], third.Prev())
|
assert.Equal(t, paginatorPages[1], third.Prev())
|
||||||
|
|
||||||
last := paginatorPages[4]
|
last := paginatorPages[4]
|
||||||
assert.Equal(t, "page/5/", last.Url())
|
assert.Equal(t, "page/5/", last.URL())
|
||||||
assert.Equal(t, last, last.Last())
|
assert.Equal(t, last, last.Last())
|
||||||
assert.False(t, last.HasNext())
|
assert.False(t, last.HasNext())
|
||||||
assert.Nil(t, last.Next())
|
assert.Nil(t, last.Next())
|
||||||
|
@ -97,7 +97,7 @@ func TestPagerNoPages(t *testing.T) {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPaginationUrlFactory(t *testing.T) {
|
func TestPaginationURLFactory(t *testing.T) {
|
||||||
viper.Set("PaginatePath", "zoo")
|
viper.Set("PaginatePath", "zoo")
|
||||||
unicode := newPaginationURLFactory("новости проекта")
|
unicode := newPaginationURLFactory("новости проекта")
|
||||||
fooBar := newPaginationURLFactory("foo", "bar")
|
fooBar := newPaginationURLFactory("foo", "bar")
|
||||||
|
@ -197,12 +197,12 @@ func createTestPages(num int) Pages {
|
||||||
for i := 0; i < num; i++ {
|
for i := 0; i < num; i++ {
|
||||||
pages[i] = &Page{
|
pages[i] = &Page{
|
||||||
Node: Node{
|
Node: Node{
|
||||||
UrlPath: UrlPath{
|
URLPath: URLPath{
|
||||||
Section: "z",
|
Section: "z",
|
||||||
Url: fmt.Sprintf("http://base/x/y/p%d.html", num),
|
URL: fmt.Sprintf("http://base/x/y/p%d.html", num),
|
||||||
},
|
},
|
||||||
Site: &SiteInfo{
|
Site: &SiteInfo{
|
||||||
BaseUrl: "http://base/",
|
BaseURL: "http://base/",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Source: Source{File: *source.NewFile(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", num)))},
|
Source: Source{File: *source.NewFile(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", num)))},
|
||||||
|
|
|
@ -138,7 +138,7 @@ func pageToPermalinkDate(p *Page, dateField string) (string, error) {
|
||||||
// pageToPermalinkTitle returns the URL-safe form of the title
|
// pageToPermalinkTitle returns the URL-safe form of the title
|
||||||
func pageToPermalinkTitle(p *Page, _ string) (string, error) {
|
func pageToPermalinkTitle(p *Page, _ string) (string, error) {
|
||||||
// Page contains Node which has Title
|
// Page contains Node which has Title
|
||||||
// (also contains UrlPath which has Slug, sometimes)
|
// (also contains URLPath which has Slug, sometimes)
|
||||||
return helpers.URLize(p.Title), nil
|
return helpers.URLize(p.Title), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,7 +166,7 @@ func pageToPermalinkSlugElseTitle(p *Page, a string) (string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func pageToPermalinkSection(p *Page, _ string) (string, error) {
|
func pageToPermalinkSection(p *Page, _ string) (string, error) {
|
||||||
// Page contains Node contains UrlPath which has Section
|
// Page contains Node contains URLPath which has Section
|
||||||
return p.Section(), nil
|
return p.Section(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -93,15 +93,13 @@ type targetList struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type SiteInfo struct {
|
type SiteInfo struct {
|
||||||
BaseUrl template.URL
|
BaseURL template.URL
|
||||||
Taxonomies TaxonomyList
|
Taxonomies TaxonomyList
|
||||||
Authors AuthorList
|
Authors AuthorList
|
||||||
Social SiteSocial
|
Social SiteSocial
|
||||||
Indexes *TaxonomyList // legacy, should be identical to Taxonomies
|
|
||||||
Sections Taxonomy
|
Sections Taxonomy
|
||||||
Pages *Pages
|
Pages *Pages
|
||||||
Files []*source.File
|
Files []*source.File
|
||||||
Recent *Pages // legacy, should be identical to Pages
|
|
||||||
Menus *Menus
|
Menus *Menus
|
||||||
Hugo *HugoInfo
|
Hugo *HugoInfo
|
||||||
Title string
|
Title string
|
||||||
|
@ -133,6 +131,24 @@ type SiteInfo struct {
|
||||||
// linkedin
|
// linkedin
|
||||||
type SiteSocial map[string]string
|
type SiteSocial map[string]string
|
||||||
|
|
||||||
|
// BaseUrl is deprecated. Will be removed in 0.15.
|
||||||
|
func (s *SiteInfo) BaseUrl() template.URL {
|
||||||
|
helpers.Deprecated("Site", ".BaseUrl", ".BaseURL")
|
||||||
|
return s.BaseURL
|
||||||
|
}
|
||||||
|
|
||||||
|
// Recent is deprecated. Will be removed in 0.15.
|
||||||
|
func (s *SiteInfo) Recent() *Pages {
|
||||||
|
helpers.Deprecated("Site", ".Recent", ".Pages")
|
||||||
|
return s.Pages
|
||||||
|
}
|
||||||
|
|
||||||
|
// Indexes is deprecated. Will be removed in 0.15.
|
||||||
|
func (s *SiteInfo) Indexes() *TaxonomyList {
|
||||||
|
helpers.Deprecated("Site", ".Indexes", ".Taxonomies")
|
||||||
|
return &s.Taxonomies
|
||||||
|
}
|
||||||
|
|
||||||
func (s *SiteInfo) GetParam(key string) interface{} {
|
func (s *SiteInfo) GetParam(key string) interface{} {
|
||||||
v := s.Params[strings.ToLower(key)]
|
v := s.Params[strings.ToLower(key)]
|
||||||
|
|
||||||
|
@ -445,7 +461,7 @@ func (s *Site) initializeSiteInfo() {
|
||||||
}
|
}
|
||||||
|
|
||||||
s.Info = SiteInfo{
|
s.Info = SiteInfo{
|
||||||
BaseUrl: template.URL(helpers.SanitizeURLKeepTrailingSlash(viper.GetString("BaseURL"))),
|
BaseURL: template.URL(helpers.SanitizeURLKeepTrailingSlash(viper.GetString("BaseURL"))),
|
||||||
Title: viper.GetString("Title"),
|
Title: viper.GetString("Title"),
|
||||||
Author: viper.GetStringMap("author"),
|
Author: viper.GetStringMap("author"),
|
||||||
LanguageCode: viper.GetString("languagecode"),
|
LanguageCode: viper.GetString("languagecode"),
|
||||||
|
@ -454,7 +470,6 @@ func (s *Site) initializeSiteInfo() {
|
||||||
BuildDrafts: viper.GetBool("BuildDrafts"),
|
BuildDrafts: viper.GetBool("BuildDrafts"),
|
||||||
canonifyURLs: viper.GetBool("CanonifyURLs"),
|
canonifyURLs: viper.GetBool("CanonifyURLs"),
|
||||||
Pages: &s.Pages,
|
Pages: &s.Pages,
|
||||||
Recent: &s.Pages,
|
|
||||||
Menus: &s.Menus,
|
Menus: &s.Menus,
|
||||||
Params: params,
|
Params: params,
|
||||||
Permalinks: permalinks,
|
Permalinks: permalinks,
|
||||||
|
@ -705,14 +720,14 @@ func (s *Site) getMenusFromConfig() Menus {
|
||||||
|
|
||||||
menuEntry.MarshallMap(ime)
|
menuEntry.MarshallMap(ime)
|
||||||
|
|
||||||
if strings.HasPrefix(menuEntry.Url, "/") {
|
if strings.HasPrefix(menuEntry.URL, "/") {
|
||||||
// make it match the nodes
|
// make it match the nodes
|
||||||
menuEntryURL := menuEntry.Url
|
menuEntryURL := menuEntry.URL
|
||||||
menuEntryURL = helpers.URLizeAndPrep(menuEntryURL)
|
menuEntryURL = helpers.URLizeAndPrep(menuEntryURL)
|
||||||
if !s.Info.canonifyURLs {
|
if !s.Info.canonifyURLs {
|
||||||
menuEntryURL = helpers.AddContextRoot(string(s.Info.BaseUrl), menuEntryURL)
|
menuEntryURL = helpers.AddContextRoot(string(s.Info.BaseURL), menuEntryURL)
|
||||||
}
|
}
|
||||||
menuEntry.Url = menuEntryURL
|
menuEntry.URL = menuEntryURL
|
||||||
}
|
}
|
||||||
|
|
||||||
if ret[name] == nil {
|
if ret[name] == nil {
|
||||||
|
@ -764,8 +779,8 @@ func (s *Site) assembleMenus() {
|
||||||
for p, childmenu := range children {
|
for p, childmenu := range children {
|
||||||
_, ok := flat[twoD{p.MenuName, p.EntryName}]
|
_, ok := flat[twoD{p.MenuName, p.EntryName}]
|
||||||
if !ok {
|
if !ok {
|
||||||
// if parent does not exist, create one without a url
|
// if parent does not exist, create one without a URL
|
||||||
flat[twoD{p.MenuName, p.EntryName}] = &MenuEntry{Name: p.EntryName, Url: ""}
|
flat[twoD{p.MenuName, p.EntryName}] = &MenuEntry{Name: p.EntryName, URL: ""}
|
||||||
}
|
}
|
||||||
flat[twoD{p.MenuName, p.EntryName}].Children = childmenu
|
flat[twoD{p.MenuName, p.EntryName}].Children = childmenu
|
||||||
}
|
}
|
||||||
|
@ -818,7 +833,6 @@ func (s *Site) assembleTaxonomies() {
|
||||||
}
|
}
|
||||||
|
|
||||||
s.Info.Taxonomies = s.Taxonomies
|
s.Info.Taxonomies = s.Taxonomies
|
||||||
s.Info.Indexes = &s.Taxonomies
|
|
||||||
s.Info.Sections = s.Sections
|
s.Info.Sections = s.Sections
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1021,7 +1035,7 @@ func (s *Site) newTaxonomyNode(t taxRenderInfo) (*Node, string) {
|
||||||
base := t.plural + "/" + t.key
|
base := t.plural + "/" + t.key
|
||||||
n := s.NewNode()
|
n := s.NewNode()
|
||||||
n.Title = strings.Replace(strings.Title(t.key), "-", " ", -1)
|
n.Title = strings.Replace(strings.Title(t.key), "-", " ", -1)
|
||||||
s.setUrls(n, base)
|
s.setURLs(n, base)
|
||||||
if len(t.pages) > 0 {
|
if len(t.pages) > 0 {
|
||||||
n.Date = t.pages[0].Page.Date
|
n.Date = t.pages[0].Page.Date
|
||||||
}
|
}
|
||||||
|
@ -1081,7 +1095,7 @@ func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error,
|
||||||
|
|
||||||
if !viper.GetBool("DisableRSS") {
|
if !viper.GetBool("DisableRSS") {
|
||||||
// XML Feed
|
// XML Feed
|
||||||
n.Url = s.permalinkStr(base + "/index.xml")
|
n.URL = s.permalinkStr(base + "/index.xml")
|
||||||
n.Permalink = s.permalink(base)
|
n.Permalink = s.permalink(base)
|
||||||
rssLayouts := []string{"taxonomy/" + t.singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
|
rssLayouts := []string{"taxonomy/" + t.singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
|
||||||
|
|
||||||
|
@ -1099,7 +1113,7 @@ func (s *Site) RenderListsOfTaxonomyTerms() (err error) {
|
||||||
for singular, plural := range taxonomies {
|
for singular, plural := range taxonomies {
|
||||||
n := s.NewNode()
|
n := s.NewNode()
|
||||||
n.Title = strings.Title(plural)
|
n.Title = strings.Title(plural)
|
||||||
s.setUrls(n, plural)
|
s.setURLs(n, plural)
|
||||||
n.Data["Singular"] = singular
|
n.Data["Singular"] = singular
|
||||||
n.Data["Plural"] = plural
|
n.Data["Plural"] = plural
|
||||||
n.Data["Terms"] = s.Taxonomies[plural]
|
n.Data["Terms"] = s.Taxonomies[plural]
|
||||||
|
@ -1125,7 +1139,7 @@ func (s *Site) newSectionListNode(section string, data WeightedPages) *Node {
|
||||||
} else {
|
} else {
|
||||||
n.Title = strings.Title(section)
|
n.Title = strings.Title(section)
|
||||||
}
|
}
|
||||||
s.setUrls(n, section)
|
s.setURLs(n, section)
|
||||||
n.Date = data[0].Page.Date
|
n.Date = data[0].Page.Date
|
||||||
n.Data["Pages"] = data.Pages()
|
n.Data["Pages"] = data.Pages()
|
||||||
|
|
||||||
|
@ -1175,7 +1189,7 @@ func (s *Site) RenderSectionLists() error {
|
||||||
|
|
||||||
if !viper.GetBool("DisableRSS") && section != "" {
|
if !viper.GetBool("DisableRSS") && section != "" {
|
||||||
// XML Feed
|
// XML Feed
|
||||||
n.Url = s.permalinkStr(section + "/index.xml")
|
n.URL = s.permalinkStr(section + "/index.xml")
|
||||||
n.Permalink = s.permalink(section)
|
n.Permalink = s.permalink(section)
|
||||||
rssLayouts := []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
|
rssLayouts := []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
|
||||||
if err := s.renderAndWriteXML("section "+section+" rss", section+"/index.xml", n, s.appendThemeTemplates(rssLayouts)...); err != nil {
|
if err := s.renderAndWriteXML("section "+section+" rss", section+"/index.xml", n, s.appendThemeTemplates(rssLayouts)...); err != nil {
|
||||||
|
@ -1189,7 +1203,7 @@ func (s *Site) RenderSectionLists() error {
|
||||||
func (s *Site) newHomeNode() *Node {
|
func (s *Site) newHomeNode() *Node {
|
||||||
n := s.NewNode()
|
n := s.NewNode()
|
||||||
n.Title = n.Site.Title
|
n.Title = n.Site.Title
|
||||||
s.setUrls(n, "/")
|
s.setURLs(n, "/")
|
||||||
n.Data["Pages"] = s.Pages
|
n.Data["Pages"] = s.Pages
|
||||||
return n
|
return n
|
||||||
}
|
}
|
||||||
|
@ -1232,7 +1246,7 @@ func (s *Site) RenderHomePage() error {
|
||||||
|
|
||||||
if !viper.GetBool("DisableRSS") {
|
if !viper.GetBool("DisableRSS") {
|
||||||
// XML Feed
|
// XML Feed
|
||||||
n.Url = s.permalinkStr("index.xml")
|
n.URL = s.permalinkStr("index.xml")
|
||||||
n.Title = ""
|
n.Title = ""
|
||||||
high := 50
|
high := 50
|
||||||
if len(s.Pages) < high {
|
if len(s.Pages) < high {
|
||||||
|
@ -1250,7 +1264,7 @@ func (s *Site) RenderHomePage() error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
n.Url = helpers.URLize("404.html")
|
n.URL = helpers.URLize("404.html")
|
||||||
n.Title = "404 Page not found"
|
n.Title = "404 Page not found"
|
||||||
n.Permalink = s.permalink("404.html")
|
n.Permalink = s.permalink("404.html")
|
||||||
|
|
||||||
|
@ -1277,7 +1291,7 @@ func (s *Site) RenderSitemap() error {
|
||||||
page := &Page{}
|
page := &Page{}
|
||||||
page.Date = s.Info.LastChange
|
page.Date = s.Info.LastChange
|
||||||
page.Site = &s.Info
|
page.Site = &s.Info
|
||||||
page.Url = "/"
|
page.URL = "/"
|
||||||
|
|
||||||
pages = append(pages, page)
|
pages = append(pages, page)
|
||||||
pages = append(pages, s.Pages...)
|
pages = append(pages, s.Pages...)
|
||||||
|
@ -1315,9 +1329,9 @@ func (s *Site) Stats() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Site) setUrls(n *Node, in string) {
|
func (s *Site) setURLs(n *Node, in string) {
|
||||||
n.Url = helpers.URLizeAndPrep(in)
|
n.URL = helpers.URLizeAndPrep(in)
|
||||||
n.Permalink = s.permalink(n.Url)
|
n.Permalink = s.permalink(n.URL)
|
||||||
n.RSSLink = s.permalink(in + ".xml")
|
n.RSSLink = s.permalink(in + ".xml")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -62,7 +62,7 @@ func TestPageTranslatorBase(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTranslateUglyUrls(t *testing.T) {
|
func TestTranslateUglyURLs(t *testing.T) {
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
content string
|
content string
|
||||||
expected string
|
expected string
|
||||||
|
|
|
@ -938,7 +938,7 @@ func SafeHTML(text string) template.HTML {
|
||||||
return template.HTML(text)
|
return template.HTML(text)
|
||||||
}
|
}
|
||||||
|
|
||||||
// "safeHtmlAttr" is currently disabled, pending further discussion
|
// "safeHTMLAttr" is currently disabled, pending further discussion
|
||||||
// on its use case. 2015-01-19
|
// on its use case. 2015-01-19
|
||||||
func SafeHTMLAttr(text string) template.HTMLAttr {
|
func SafeHTMLAttr(text string) template.HTMLAttr {
|
||||||
return template.HTMLAttr(text)
|
return template.HTMLAttr(text)
|
||||||
|
@ -1308,11 +1308,8 @@ func init() {
|
||||||
"isset": IsSet,
|
"isset": IsSet,
|
||||||
"echoParam": ReturnWhenSet,
|
"echoParam": ReturnWhenSet,
|
||||||
"safeHTML": SafeHTML,
|
"safeHTML": SafeHTML,
|
||||||
"safeHtml": SafeHTML,
|
|
||||||
"safeCSS": SafeCSS,
|
"safeCSS": SafeCSS,
|
||||||
"safeCss": SafeCSS,
|
|
||||||
"safeURL": SafeURL,
|
"safeURL": SafeURL,
|
||||||
"safeUrl": SafeURL,
|
|
||||||
"markdownify": Markdownify,
|
"markdownify": Markdownify,
|
||||||
"first": First,
|
"first": First,
|
||||||
"where": Where,
|
"where": Where,
|
||||||
|
@ -1337,11 +1334,35 @@ func init() {
|
||||||
"trim": Trim,
|
"trim": Trim,
|
||||||
"dateFormat": DateFormat,
|
"dateFormat": DateFormat,
|
||||||
"getJSON": GetJSON,
|
"getJSON": GetJSON,
|
||||||
"getJson": GetJSON,
|
|
||||||
"getCSV": GetCSV,
|
"getCSV": GetCSV,
|
||||||
"getCsv": GetCSV,
|
|
||||||
"seq": helpers.Seq,
|
"seq": helpers.Seq,
|
||||||
"getenv": func(varName string) string { return os.Getenv(varName) },
|
"getenv": func(varName string) string { return os.Getenv(varName) },
|
||||||
|
|
||||||
|
// "getJson" is deprecated. Will be removed in 0.15.
|
||||||
|
"getJson": func(urlParts ...string) interface{} {
|
||||||
|
helpers.Deprecated("Template", "getJson", "getJSON")
|
||||||
|
return GetJSON(urlParts...)
|
||||||
|
},
|
||||||
|
// "getJson" is deprecated. Will be removed in 0.15.
|
||||||
|
"getCsv": func(sep string, urlParts ...string) [][]string {
|
||||||
|
helpers.Deprecated("Template", "getCsv", "getCSV")
|
||||||
|
return GetCSV(sep, urlParts...)
|
||||||
|
},
|
||||||
|
// "safeHtml" is deprecated. Will be removed in 0.15.
|
||||||
|
"safeHtml": func(text string) template.HTML {
|
||||||
|
helpers.Deprecated("Template", "safeHtml", "safeHTML")
|
||||||
|
return SafeHTML(text)
|
||||||
|
},
|
||||||
|
// "safeCss" is deprecated. Will be removed in 0.15.
|
||||||
|
"safeCss": func(text string) template.CSS {
|
||||||
|
helpers.Deprecated("Template", "safeCss", "safeCSS")
|
||||||
|
return SafeCSS(text)
|
||||||
|
},
|
||||||
|
// "safeUrl" is deprecated. Will be removed in 0.15.
|
||||||
|
"safeUrl": func(text string) template.URL {
|
||||||
|
helpers.Deprecated("Template", "safeUrl", "safeURL")
|
||||||
|
return SafeURL(text)
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,13 +55,13 @@ func (t *GoHTMLTemplate) EmbedTemplates() {
|
||||||
<managingEditor>{{.}}{{ with $.Site.Author.name }} ({{.}}){{end}}</managingEditor>{{end}}{{ with .Site.Author.email }}
|
<managingEditor>{{.}}{{ with $.Site.Author.name }} ({{.}}){{end}}</managingEditor>{{end}}{{ with .Site.Author.email }}
|
||||||
<webMaster>{{.}}{{ with $.Site.Author.name }} ({{.}}){{end}}</webMaster>{{end}}{{ with .Site.Copyright }}
|
<webMaster>{{.}}{{ with $.Site.Author.name }} ({{.}}){{end}}</webMaster>{{end}}{{ with .Site.Copyright }}
|
||||||
<copyright>{{.}}</copyright>{{end}}{{ if not .Date.IsZero }}
|
<copyright>{{.}}</copyright>{{end}}{{ if not .Date.IsZero }}
|
||||||
<lastBuildDate>{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 -0700" | safeHtml }}</lastBuildDate>{{ end }}
|
<lastBuildDate>{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 -0700" | safeHTML }}</lastBuildDate>{{ end }}
|
||||||
<atom:link href="{{.Url}}" rel="self" type="application/rss+xml" />
|
<atom:link href="{{.URL}}" rel="self" type="application/rss+xml" />
|
||||||
{{ range first 15 .Data.Pages }}
|
{{ range first 15 .Data.Pages }}
|
||||||
<item>
|
<item>
|
||||||
<title>{{ .Title }}</title>
|
<title>{{ .Title }}</title>
|
||||||
<link>{{ .Permalink }}</link>
|
<link>{{ .Permalink }}</link>
|
||||||
<pubDate>{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 -0700" | safeHtml }}</pubDate>
|
<pubDate>{{ .Date.Format "Mon, 02 Jan 2006 15:04:05 -0700" | safeHTML }}</pubDate>
|
||||||
{{ with .Site.Author.email }}<author>{{.}}{{ with $.Site.Author.name }} ({{.}}){{end}}</author>{{end}}
|
{{ with .Site.Author.email }}<author>{{.}}{{ with $.Site.Author.name }} ({{.}}){{end}}</author>{{end}}
|
||||||
<guid>{{ .Permalink }}</guid>
|
<guid>{{ .Permalink }}</guid>
|
||||||
<description>{{ .Content | html }}</description>
|
<description>{{ .Content | html }}</description>
|
||||||
|
@ -74,7 +74,7 @@ func (t *GoHTMLTemplate) EmbedTemplates() {
|
||||||
{{ range .Data.Pages }}
|
{{ range .Data.Pages }}
|
||||||
<url>
|
<url>
|
||||||
<loc>{{ .Permalink }}</loc>{{ if not .Date.IsZero }}
|
<loc>{{ .Permalink }}</loc>{{ if not .Date.IsZero }}
|
||||||
<lastmod>{{ safeHtml ( .Date.Format "2006-01-02T15:04:05-07:00" ) }}</lastmod>{{ end }}{{ with .Sitemap.ChangeFreq }}
|
<lastmod>{{ safeHTML ( .Date.Format "2006-01-02T15:04:05-07:00" ) }}</lastmod>{{ end }}{{ with .Sitemap.ChangeFreq }}
|
||||||
<changefreq>{{ . }}</changefreq>{{ end }}{{ if ge .Sitemap.Priority 0.0 }}
|
<changefreq>{{ . }}</changefreq>{{ end }}{{ if ge .Sitemap.Priority 0.0 }}
|
||||||
<priority>{{ .Sitemap.Priority }}</priority>{{ end }}
|
<priority>{{ .Sitemap.Priority }}</priority>{{ end }}
|
||||||
</url>
|
</url>
|
||||||
|
@ -86,24 +86,24 @@ func (t *GoHTMLTemplate) EmbedTemplates() {
|
||||||
<ul class="pagination">
|
<ul class="pagination">
|
||||||
{{ with $pag.First }}
|
{{ with $pag.First }}
|
||||||
<li>
|
<li>
|
||||||
<a href="{{ .Url }}" aria-label="First"><span aria-hidden="true">««</span></a>
|
<a href="{{ .URL }}" aria-label="First"><span aria-hidden="true">««</span></a>
|
||||||
</li>
|
</li>
|
||||||
{{ end }}
|
{{ end }}
|
||||||
<li
|
<li
|
||||||
{{ if not $pag.HasPrev }}class="disabled"{{ end }}>
|
{{ if not $pag.HasPrev }}class="disabled"{{ end }}>
|
||||||
<a href="{{ if $pag.HasPrev }}{{ $pag.Prev.Url }}{{ end }}" aria-label="Previous"><span aria-hidden="true">«</span></a>
|
<a href="{{ if $pag.HasPrev }}{{ $pag.Prev.URL }}{{ end }}" aria-label="Previous"><span aria-hidden="true">«</span></a>
|
||||||
</li>
|
</li>
|
||||||
{{ range $pag.Pagers }}
|
{{ range $pag.Pagers }}
|
||||||
<li
|
<li
|
||||||
{{ if eq . $pag }}class="active"{{ end }}><a href="{{ .Url }}">{{ .PageNumber }}</a></li>
|
{{ if eq . $pag }}class="active"{{ end }}><a href="{{ .URL }}">{{ .PageNumber }}</a></li>
|
||||||
{{ end }}
|
{{ end }}
|
||||||
<li
|
<li
|
||||||
{{ if not $pag.HasNext }}class="disabled"{{ end }}>
|
{{ if not $pag.HasNext }}class="disabled"{{ end }}>
|
||||||
<a href="{{ if $pag.HasNext }}{{ $pag.Next.Url }}{{ end }}" aria-label="Next"><span aria-hidden="true">»</span></a>
|
<a href="{{ if $pag.HasNext }}{{ $pag.Next.URL }}{{ end }}" aria-label="Next"><span aria-hidden="true">»</span></a>
|
||||||
</li>
|
</li>
|
||||||
{{ with $pag.Last }}
|
{{ with $pag.Last }}
|
||||||
<li>
|
<li>
|
||||||
<a href="{{ .Url }}" aria-label="Last"><span aria-hidden="true">»»</span></a>
|
<a href="{{ .URL }}" aria-label="Last"><span aria-hidden="true">»»</span></a>
|
||||||
</li>
|
</li>
|
||||||
{{ end }}
|
{{ end }}
|
||||||
</ul>
|
</ul>
|
||||||
|
@ -134,7 +134,7 @@ func (t *GoHTMLTemplate) EmbedTemplates() {
|
||||||
<meta property="og:image" content="{{ . }}" />
|
<meta property="og:image" content="{{ . }}" />
|
||||||
{{ end }}{{ end }}
|
{{ end }}{{ end }}
|
||||||
|
|
||||||
{{ if not .Date.IsZero }}<meta property="og:updated_time" content="{{ .Date.Format "2006-01-02T15:04:05-07:00" | safeHtml }}"/>{{ end }}{{ with .Params.audio }}
|
{{ if not .Date.IsZero }}<meta property="og:updated_time" content="{{ .Date.Format "2006-01-02T15:04:05-07:00" | safeHTML }}"/>{{ end }}{{ with .Params.audio }}
|
||||||
<meta property="og:audio" content="{{ . }}" />{{ end }}{{ with .Params.locale }}
|
<meta property="og:audio" content="{{ . }}" />{{ end }}{{ with .Params.locale }}
|
||||||
<meta property="og:locale" content="{{ . }}" />{{ end }}{{ with .Site.Params.title }}
|
<meta property="og:locale" content="{{ . }}" />{{ end }}{{ with .Site.Params.title }}
|
||||||
<meta property="og:site_name" content="{{ . }}" />{{ end }}{{ with .Params.videos }}
|
<meta property="og:site_name" content="{{ . }}" />{{ end }}{{ with .Params.videos }}
|
||||||
|
@ -193,8 +193,8 @@ func (t *GoHTMLTemplate) EmbedTemplates() {
|
||||||
<meta itemprop="description" content="{{ with .Description }}{{ . }}{{ else }}{{if .IsPage}}{{ .Summary }}{{ else }}{{ with .Site.Params.description }}{{ . }}{{ end }}{{ end }}{{ end }}">
|
<meta itemprop="description" content="{{ with .Description }}{{ . }}{{ else }}{{if .IsPage}}{{ .Summary }}{{ else }}{{ with .Site.Params.description }}{{ . }}{{ end }}{{ end }}{{ end }}">
|
||||||
|
|
||||||
{{if .IsPage}}{{ $ISO8601 := "2006-01-02T15:04:05-07:00" }}{{ if not .PublishDate.IsZero }}
|
{{if .IsPage}}{{ $ISO8601 := "2006-01-02T15:04:05-07:00" }}{{ if not .PublishDate.IsZero }}
|
||||||
<meta itemprop="datePublished" content="{{ .PublishDate.Format $ISO8601 | safeHtml }}" />{{ end }}
|
<meta itemprop="datePublished" content="{{ .PublishDate.Format $ISO8601 | safeHTML }}" />{{ end }}
|
||||||
{{ if not .Date.IsZero }}<meta itemprop="dateModified" content="{{ .Date.Format $ISO8601 | safeHtml }}" />{{ end }}
|
{{ if not .Date.IsZero }}<meta itemprop="dateModified" content="{{ .Date.Format $ISO8601 | safeHTML }}" />{{ end }}
|
||||||
<meta itemprop="wordCount" content="{{ .WordCount }}">
|
<meta itemprop="wordCount" content="{{ .WordCount }}">
|
||||||
{{ with .Params.images }}{{ range first 6 . }}
|
{{ with .Params.images }}{{ range first 6 . }}
|
||||||
<meta itemprop="image" content="{{ . }}">
|
<meta itemprop="image" content="{{ . }}">
|
||||||
|
|
|
@ -38,7 +38,7 @@ type remoteLock struct {
|
||||||
m map[string]*sync.Mutex
|
m map[string]*sync.Mutex
|
||||||
}
|
}
|
||||||
|
|
||||||
// resLock locks an URL during download
|
// URLLock locks an URL during download
|
||||||
func (l *remoteLock) URLLock(url string) {
|
func (l *remoteLock) URLLock(url string) {
|
||||||
l.Lock()
|
l.Lock()
|
||||||
if _, ok := l.m[url]; !ok {
|
if _, ok := l.m[url]; !ok {
|
||||||
|
@ -48,7 +48,7 @@ func (l *remoteLock) URLLock(url string) {
|
||||||
l.m[url].Lock()
|
l.m[url].Lock()
|
||||||
}
|
}
|
||||||
|
|
||||||
// resUnlock unlocks an URL when the download has been finished. Use only in defer calls.
|
// URLUnlock unlocks an URL when the download has been finished. Use only in defer calls.
|
||||||
func (l *remoteLock) URLUnlock(url string) {
|
func (l *remoteLock) URLUnlock(url string) {
|
||||||
l.RLock()
|
l.RLock()
|
||||||
defer l.RUnlock()
|
defer l.RUnlock()
|
||||||
|
@ -57,7 +57,7 @@ func (l *remoteLock) URLUnlock(url string) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// getFileID returns the cache ID for a string
|
// getCacheFileID returns the cache ID for a string
|
||||||
func getCacheFileID(id string) string {
|
func getCacheFileID(id string) string {
|
||||||
return viper.GetString("CacheDir") + url.QueryEscape(id)
|
return viper.GetString("CacheDir") + url.QueryEscape(id)
|
||||||
}
|
}
|
||||||
|
@ -173,9 +173,9 @@ func resGetResource(url string) ([]byte, error) {
|
||||||
return resGetLocal(url, hugofs.SourceFs)
|
return resGetLocal(url, hugofs.SourceFs)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetJson expects one or n-parts of a URL to a resource which can either be a local or a remote one.
|
// GetJSON expects one or n-parts of a URL to a resource which can either be a local or a remote one.
|
||||||
// If you provide multiple parts they will be joined together to the final URL.
|
// If you provide multiple parts they will be joined together to the final URL.
|
||||||
// GetJson returns nil or parsed JSON to use in a short code.
|
// GetJSON returns nil or parsed JSON to use in a short code.
|
||||||
func GetJSON(urlParts ...string) interface{} {
|
func GetJSON(urlParts ...string) interface{} {
|
||||||
url := strings.Join(urlParts, "")
|
url := strings.Join(urlParts, "")
|
||||||
c, err := resGetResource(url)
|
c, err := resGetResource(url)
|
||||||
|
@ -193,7 +193,7 @@ func GetJSON(urlParts ...string) interface{} {
|
||||||
return v
|
return v
|
||||||
}
|
}
|
||||||
|
|
||||||
// parseCsv parses bytes of csv data into a slice slice string or an error
|
// parseCSV parses bytes of CSV data into a slice slice string or an error
|
||||||
func parseCSV(c []byte, sep string) ([][]string, error) {
|
func parseCSV(c []byte, sep string) ([][]string, error) {
|
||||||
if len(sep) != 1 {
|
if len(sep) != 1 {
|
||||||
return nil, errors.New("Incorrect length of csv separator: " + sep)
|
return nil, errors.New("Incorrect length of csv separator: " + sep)
|
||||||
|
@ -206,11 +206,11 @@ func parseCSV(c []byte, sep string) ([][]string, error) {
|
||||||
return r.ReadAll()
|
return r.ReadAll()
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetCsv expects a data separator and one or n-parts of a URL to a resource which
|
// GetCSV expects a data separator and one or n-parts of a URL to a resource which
|
||||||
// can either be a local or a remote one.
|
// can either be a local or a remote one.
|
||||||
// The data separator can be a comma, semi-colon, pipe, etc, but only one character.
|
// The data separator can be a comma, semi-colon, pipe, etc, but only one character.
|
||||||
// If you provide multiple parts for the URL they will be joined together to the final URL.
|
// If you provide multiple parts for the URL they will be joined together to the final URL.
|
||||||
// GetCsv returns nil or a slice slice to use in a short code.
|
// GetCSV returns nil or a slice slice to use in a short code.
|
||||||
func GetCSV(sep string, urlParts ...string) [][]string {
|
func GetCSV(sep string, urlParts ...string) [][]string {
|
||||||
url := strings.Join(urlParts, "")
|
url := strings.Join(urlParts, "")
|
||||||
c, err := resGetResource(url)
|
c, err := resGetResource(url)
|
||||||
|
|
|
@ -999,10 +999,10 @@ func TestSafeHTML(t *testing.T) {
|
||||||
buf.Reset()
|
buf.Reset()
|
||||||
err = tmpl.Execute(buf, SafeHTML(this.str))
|
err = tmpl.Execute(buf, SafeHTML(this.str))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("[%d] execute template with an escaped string value by SafeHtml returns unexpected error: %s", i, err)
|
t.Errorf("[%d] execute template with an escaped string value by SafeHTML returns unexpected error: %s", i, err)
|
||||||
}
|
}
|
||||||
if buf.String() != this.expectWithEscape {
|
if buf.String() != this.expectWithEscape {
|
||||||
t.Errorf("[%d] execute template with an escaped string value by SafeHtml, got %v but expected %v", i, buf.String(), this.expectWithEscape)
|
t.Errorf("[%d] execute template with an escaped string value by SafeHTML, got %v but expected %v", i, buf.String(), this.expectWithEscape)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1034,10 +1034,10 @@ func TestSafeHTMLAttr(t *testing.T) {
|
||||||
buf.Reset()
|
buf.Reset()
|
||||||
err = tmpl.Execute(buf, SafeHTMLAttr(this.str))
|
err = tmpl.Execute(buf, SafeHTMLAttr(this.str))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("[%d] execute template with an escaped string value by SafeHtmlAttr returns unexpected error: %s", i, err)
|
t.Errorf("[%d] execute template with an escaped string value by SafeHTMLAttr returns unexpected error: %s", i, err)
|
||||||
}
|
}
|
||||||
if buf.String() != this.expectWithEscape {
|
if buf.String() != this.expectWithEscape {
|
||||||
t.Errorf("[%d] execute template with an escaped string value by SafeHtmlAttr, got %v but expected %v", i, buf.String(), this.expectWithEscape)
|
t.Errorf("[%d] execute template with an escaped string value by SafeHTMLAttr, got %v but expected %v", i, buf.String(), this.expectWithEscape)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1069,10 +1069,10 @@ func TestSafeCSS(t *testing.T) {
|
||||||
buf.Reset()
|
buf.Reset()
|
||||||
err = tmpl.Execute(buf, SafeCSS(this.str))
|
err = tmpl.Execute(buf, SafeCSS(this.str))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("[%d] execute template with an escaped string value by SafeCss returns unexpected error: %s", i, err)
|
t.Errorf("[%d] execute template with an escaped string value by SafeCSS returns unexpected error: %s", i, err)
|
||||||
}
|
}
|
||||||
if buf.String() != this.expectWithEscape {
|
if buf.String() != this.expectWithEscape {
|
||||||
t.Errorf("[%d] execute template with an escaped string value by SafeCss, got %v but expected %v", i, buf.String(), this.expectWithEscape)
|
t.Errorf("[%d] execute template with an escaped string value by SafeCSS, got %v but expected %v", i, buf.String(), this.expectWithEscape)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1104,10 +1104,10 @@ func TestSafeURL(t *testing.T) {
|
||||||
buf.Reset()
|
buf.Reset()
|
||||||
err = tmpl.Execute(buf, SafeURL(this.str))
|
err = tmpl.Execute(buf, SafeURL(this.str))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("[%d] execute template with an escaped string value by SafeUrl returns unexpected error: %s", i, err)
|
t.Errorf("[%d] execute template with an escaped string value by SafeURL returns unexpected error: %s", i, err)
|
||||||
}
|
}
|
||||||
if buf.String() != this.expectWithEscape {
|
if buf.String() != this.expectWithEscape {
|
||||||
t.Errorf("[%d] execute template with an escaped string value by SafeUrl, got %v but expected %v", i, buf.String(), this.expectWithEscape)
|
t.Errorf("[%d] execute template with an escaped string value by SafeURL, got %v but expected %v", i, buf.String(), this.expectWithEscape)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,15 +7,15 @@ import (
|
||||||
var absURLInit sync.Once
|
var absURLInit sync.Once
|
||||||
var ar *absURLReplacer
|
var ar *absURLReplacer
|
||||||
|
|
||||||
// for performance reasons, we reuse the first baseUrl given
|
// for performance reasons, we reuse the first baseURL given
|
||||||
func initAbsurlReplacer(baseURL string) {
|
func initAbsURLReplacer(baseURL string) {
|
||||||
absURLInit.Do(func() {
|
absURLInit.Do(func() {
|
||||||
ar = newAbsurlReplacer(baseURL)
|
ar = newAbsURLReplacer(baseURL)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func AbsURL(absURL string) (trs []link, err error) {
|
func AbsURL(absURL string) (trs []link, err error) {
|
||||||
initAbsurlReplacer(absURL)
|
initAbsURLReplacer(absURL)
|
||||||
|
|
||||||
trs = append(trs, func(content []byte) []byte {
|
trs = append(trs, func(content []byte) []byte {
|
||||||
return ar.replaceInHTML(content)
|
return ar.replaceInHTML(content)
|
||||||
|
@ -24,7 +24,7 @@ func AbsURL(absURL string) (trs []link, err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func AbsURLInXML(absURL string) (trs []link, err error) {
|
func AbsURLInXML(absURL string) (trs []link, err error) {
|
||||||
initAbsurlReplacer(absURL)
|
initAbsURLReplacer(absURL)
|
||||||
|
|
||||||
trs = append(trs, func(content []byte) []byte {
|
trs = append(trs, func(content []byte) []byte {
|
||||||
return ar.replaceInXML(content)
|
return ar.replaceInXML(content)
|
||||||
|
|
|
@ -120,7 +120,7 @@ func checkCandidate(l *contentlexer) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if bytes.HasPrefix(l.content[l.pos:], m.match) {
|
if bytes.HasPrefix(l.content[l.pos:], m.match) {
|
||||||
// check for schemaless urls
|
// check for schemaless URLs
|
||||||
posAfter := l.pos + len(m.match)
|
posAfter := l.pos + len(m.match)
|
||||||
if int(posAfter) >= len(l.content) {
|
if int(posAfter) >= len(l.content) {
|
||||||
return
|
return
|
||||||
|
@ -196,7 +196,7 @@ type absURLReplacer struct {
|
||||||
xmlMatchers []absURLMatcher
|
xmlMatchers []absURLMatcher
|
||||||
}
|
}
|
||||||
|
|
||||||
func newAbsurlReplacer(baseURL string) *absURLReplacer {
|
func newAbsURLReplacer(baseURL string) *absURLReplacer {
|
||||||
u, _ := url.Parse(baseURL)
|
u, _ := url.Parse(baseURL)
|
||||||
base := strings.TrimRight(u.String(), "/")
|
base := strings.TrimRight(u.String(), "/")
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue