mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
Correct initialisms as suggested by golint
First step to use initialisms that golint suggests, for example: Line 116: func GetHtmlRenderer should be GetHTMLRenderer as see on http://goreportcard.com/report/spf13/hugo Thanks to @bep for the idea! Note that command-line flags (cobra and pflag) as well as struct fields like .BaseUrl and .Url that are used in Go HTML templates need more work to maintain backward-compatibility, and thus are NOT yet dealt with in this commit. First step in fixing #959.
This commit is contained in:
parent
00f07c5374
commit
67df33f500
33 changed files with 310 additions and 303 deletions
|
@ -56,8 +56,8 @@ Complete documentation is available at http://gohugo.io`,
|
|||
var hugoCmdV *cobra.Command
|
||||
|
||||
//Flags that are to be added to commands.
|
||||
var BuildWatch, IgnoreCache, Draft, Future, UglyUrls, Verbose, Logging, VerboseLog, DisableRSS, DisableSitemap, PluralizeListTitles, NoTimes bool
|
||||
var Source, CacheDir, Destination, Theme, BaseUrl, CfgFile, LogFile, Editor string
|
||||
var BuildWatch, IgnoreCache, Draft, Future, UglyURLs, Verbose, Logging, VerboseLog, DisableRSS, DisableSitemap, PluralizeListTitles, NoTimes bool
|
||||
var Source, CacheDir, Destination, Theme, BaseURL, CfgFile, LogFile, Editor string
|
||||
|
||||
//Execute adds all child commands to the root command HugoCmd and sets flags appropriately.
|
||||
func Execute() {
|
||||
|
@ -89,8 +89,8 @@ func init() {
|
|||
HugoCmd.PersistentFlags().StringVarP(&Destination, "destination", "d", "", "filesystem path to write files to")
|
||||
HugoCmd.PersistentFlags().StringVarP(&Theme, "theme", "t", "", "theme to use (located in /themes/THEMENAME/)")
|
||||
HugoCmd.PersistentFlags().BoolVarP(&Verbose, "verbose", "v", false, "verbose output")
|
||||
HugoCmd.PersistentFlags().BoolVar(&UglyUrls, "uglyUrls", false, "if true, use /filename.html instead of /filename/")
|
||||
HugoCmd.PersistentFlags().StringVarP(&BaseUrl, "baseUrl", "b", "", "hostname (and path) to the root eg. http://spf13.com/")
|
||||
HugoCmd.PersistentFlags().BoolVar(&UglyURLs, "uglyUrls", false, "if true, use /filename.html instead of /filename/")
|
||||
HugoCmd.PersistentFlags().StringVarP(&BaseURL, "baseUrl", "b", "", "hostname (and path) to the root eg. http://spf13.com/")
|
||||
HugoCmd.PersistentFlags().StringVar(&CfgFile, "config", "", "config file (default is path/config.yaml|json|toml)")
|
||||
HugoCmd.PersistentFlags().StringVar(&Editor, "editor", "", "edit new content with this editor, if provided")
|
||||
HugoCmd.PersistentFlags().BoolVar(&Logging, "log", false, "Enable Logging")
|
||||
|
@ -127,10 +127,10 @@ func InitializeConfig() {
|
|||
viper.SetDefault("DefaultLayout", "post")
|
||||
viper.SetDefault("BuildDrafts", false)
|
||||
viper.SetDefault("BuildFuture", false)
|
||||
viper.SetDefault("UglyUrls", false)
|
||||
viper.SetDefault("UglyURLs", false)
|
||||
viper.SetDefault("Verbose", false)
|
||||
viper.SetDefault("IgnoreCache", false)
|
||||
viper.SetDefault("CanonifyUrls", false)
|
||||
viper.SetDefault("CanonifyURLs", false)
|
||||
viper.SetDefault("Taxonomies", map[string]string{"tag": "tags", "category": "categories"})
|
||||
viper.SetDefault("Permalinks", make(hugolib.PermalinkOverrides, 0))
|
||||
viper.SetDefault("Sitemap", hugolib.Sitemap{Priority: -1})
|
||||
|
@ -155,7 +155,7 @@ func InitializeConfig() {
|
|||
}
|
||||
|
||||
if hugoCmdV.PersistentFlags().Lookup("uglyUrls").Changed {
|
||||
viper.Set("UglyUrls", UglyUrls)
|
||||
viper.Set("UglyURLs", UglyURLs)
|
||||
}
|
||||
|
||||
if hugoCmdV.PersistentFlags().Lookup("disableRSS").Changed {
|
||||
|
@ -181,14 +181,14 @@ func InitializeConfig() {
|
|||
if hugoCmdV.PersistentFlags().Lookup("logFile").Changed {
|
||||
viper.Set("LogFile", LogFile)
|
||||
}
|
||||
if BaseUrl != "" {
|
||||
if !strings.HasSuffix(BaseUrl, "/") {
|
||||
BaseUrl = BaseUrl + "/"
|
||||
if BaseURL != "" {
|
||||
if !strings.HasSuffix(BaseURL, "/") {
|
||||
BaseURL = BaseURL + "/"
|
||||
}
|
||||
viper.Set("BaseUrl", BaseUrl)
|
||||
viper.Set("BaseURL", BaseURL)
|
||||
}
|
||||
|
||||
if viper.GetString("BaseUrl") == "" {
|
||||
if viper.GetString("BaseURL") == "" {
|
||||
jww.ERROR.Println("No 'baseurl' set in configuration or as a flag. Features like page menus will not work without one.")
|
||||
}
|
||||
|
||||
|
|
|
@ -84,11 +84,11 @@ func server(cmd *cobra.Command, args []string) {
|
|||
|
||||
viper.Set("port", serverPort)
|
||||
|
||||
BaseUrl, err := fixUrl(BaseUrl)
|
||||
BaseURL, err := fixURL(BaseURL)
|
||||
if err != nil {
|
||||
jww.ERROR.Fatal(err)
|
||||
}
|
||||
viper.Set("BaseUrl", BaseUrl)
|
||||
viper.Set("BaseURL", BaseURL)
|
||||
|
||||
if err := memStats(); err != nil {
|
||||
jww.ERROR.Println("memstats error:", err)
|
||||
|
@ -114,9 +114,9 @@ func serve(port int) {
|
|||
httpFs := &afero.HttpFs{SourceFs: hugofs.DestinationFS}
|
||||
fileserver := http.FileServer(httpFs.Dir(helpers.AbsPathify(viper.GetString("PublishDir"))))
|
||||
|
||||
u, err := url.Parse(viper.GetString("BaseUrl"))
|
||||
u, err := url.Parse(viper.GetString("BaseURL"))
|
||||
if err != nil {
|
||||
jww.ERROR.Fatalf("Invalid BaseUrl: %s", err)
|
||||
jww.ERROR.Fatalf("Invalid BaseURL: %s", err)
|
||||
}
|
||||
if u.Path == "" || u.Path == "/" {
|
||||
http.Handle("/", fileserver)
|
||||
|
@ -137,10 +137,10 @@ func serve(port int) {
|
|||
|
||||
// fixUrl massages the BaseUrl into a form needed for serving
|
||||
// all pages correctly.
|
||||
func fixUrl(s string) (string, error) {
|
||||
func fixURL(s string) (string, error) {
|
||||
useLocalhost := false
|
||||
if s == "" {
|
||||
s = viper.GetString("BaseUrl")
|
||||
s = viper.GetString("BaseURL")
|
||||
useLocalhost = true
|
||||
}
|
||||
if !strings.HasPrefix(s, "http://") && !strings.HasPrefix(s, "https://") {
|
||||
|
|
|
@ -6,11 +6,11 @@ import (
|
|||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
func TestFixUrl(t *testing.T) {
|
||||
func TestFixURL(t *testing.T) {
|
||||
type data struct {
|
||||
TestName string
|
||||
CliBaseUrl string
|
||||
CfgBaseUrl string
|
||||
CLIBaseURL string
|
||||
CfgBaseURL string
|
||||
AppendPort bool
|
||||
Port int
|
||||
Result string
|
||||
|
@ -28,11 +28,11 @@ func TestFixUrl(t *testing.T) {
|
|||
}
|
||||
|
||||
for i, test := range tests {
|
||||
BaseUrl = test.CliBaseUrl
|
||||
viper.Set("BaseUrl", test.CfgBaseUrl)
|
||||
BaseURL = test.CLIBaseURL
|
||||
viper.Set("BaseURL", test.CfgBaseURL)
|
||||
serverAppend = test.AppendPort
|
||||
serverPort = test.Port
|
||||
result, err := fixUrl(BaseUrl)
|
||||
result, err := fixURL(BaseURL)
|
||||
if err != nil {
|
||||
t.Errorf("Test #%d %s: unexpected error %s", i, test.TestName, err)
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ var SummaryDivider = []byte("<!--more-->")
|
|||
type Blackfriday struct {
|
||||
AngledQuotes bool
|
||||
Fractions bool
|
||||
PlainIdAnchors bool
|
||||
PlainIDAnchors bool
|
||||
Extensions []string
|
||||
}
|
||||
|
||||
|
@ -50,7 +50,7 @@ func NewBlackfriday() *Blackfriday {
|
|||
return &Blackfriday{
|
||||
AngledQuotes: false,
|
||||
Fractions: true,
|
||||
PlainIdAnchors: false,
|
||||
PlainIDAnchors: false,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -113,17 +113,17 @@ func BytesToHTML(b []byte) template.HTML {
|
|||
}
|
||||
|
||||
// GetHtmlRenderer creates a new Renderer with the given configuration.
|
||||
func GetHtmlRenderer(defaultFlags int, ctx *RenderingContext) blackfriday.Renderer {
|
||||
func GetHTMLRenderer(defaultFlags int, ctx *RenderingContext) blackfriday.Renderer {
|
||||
renderParameters := blackfriday.HtmlRendererParameters{
|
||||
FootnoteAnchorPrefix: viper.GetString("FootnoteAnchorPrefix"),
|
||||
FootnoteReturnLinkContents: viper.GetString("FootnoteReturnLinkContents"),
|
||||
}
|
||||
|
||||
b := len(ctx.DocumentId) != 0
|
||||
b := len(ctx.DocumentID) != 0
|
||||
|
||||
if b && !ctx.getConfig().PlainIdAnchors {
|
||||
renderParameters.FootnoteAnchorPrefix = ctx.DocumentId + ":" + renderParameters.FootnoteAnchorPrefix
|
||||
renderParameters.HeaderIDSuffix = ":" + ctx.DocumentId
|
||||
if b && !ctx.getConfig().PlainIDAnchors {
|
||||
renderParameters.FootnoteAnchorPrefix = ctx.DocumentID + ":" + renderParameters.FootnoteAnchorPrefix
|
||||
renderParameters.HeaderIDSuffix = ":" + ctx.DocumentID
|
||||
}
|
||||
|
||||
htmlFlags := defaultFlags
|
||||
|
@ -158,13 +158,13 @@ func getMarkdownExtensions(ctx *RenderingContext) int {
|
|||
}
|
||||
|
||||
func markdownRender(ctx *RenderingContext) []byte {
|
||||
return blackfriday.Markdown(ctx.Content, GetHtmlRenderer(0, ctx),
|
||||
return blackfriday.Markdown(ctx.Content, GetHTMLRenderer(0, ctx),
|
||||
getMarkdownExtensions(ctx))
|
||||
}
|
||||
|
||||
func markdownRenderWithTOC(ctx *RenderingContext) []byte {
|
||||
return blackfriday.Markdown(ctx.Content,
|
||||
GetHtmlRenderer(blackfriday.HTML_TOC, ctx),
|
||||
GetHTMLRenderer(blackfriday.HTML_TOC, ctx),
|
||||
getMarkdownExtensions(ctx))
|
||||
}
|
||||
|
||||
|
@ -209,7 +209,7 @@ func ExtractTOC(content []byte) (newcontent []byte, toc []byte) {
|
|||
type RenderingContext struct {
|
||||
Content []byte
|
||||
PageFmt string
|
||||
DocumentId string
|
||||
DocumentID string
|
||||
Config *Blackfriday
|
||||
configInit sync.Once
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
"testing"
|
||||
)
|
||||
|
||||
const tstHtmlContent = "<!DOCTYPE html><html><head><script src=\"http://two/foobar.js\"></script></head><body><nav><ul><li hugo-nav=\"section_0\"></li><li hugo-nav=\"section_1\"></li></ul></nav><article>content <a href=\"http://two/foobar\">foobar</a>. Follow up</article><p>This is some text.<br>And some more.</p></body></html>"
|
||||
const tstHTMLContent = "<!DOCTYPE html><html><head><script src=\"http://two/foobar.js\"></script></head><body><nav><ul><li hugo-nav=\"section_0\"></li><li hugo-nav=\"section_1\"></li></ul></nav><article>content <a href=\"http://two/foobar\">foobar</a>. Follow up</article><p>This is some text.<br>And some more.</p></body></html>"
|
||||
|
||||
func TestStripHTML(t *testing.T) {
|
||||
type test struct {
|
||||
|
@ -31,7 +31,7 @@ func TestStripHTML(t *testing.T) {
|
|||
func BenchmarkStripHTML(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
StripHTML(tstHtmlContent)
|
||||
StripHTML(tstHTMLContent)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -52,7 +52,7 @@ func (PathBridge) Separator() string {
|
|||
|
||||
var pathBridge PathBridge
|
||||
|
||||
func sanitizeUrlWithFlags(in string, f purell.NormalizationFlags) string {
|
||||
func sanitizeURLWithFlags(in string, f purell.NormalizationFlags) string {
|
||||
s, err := purell.NormalizeURLString(in, f)
|
||||
if err != nil {
|
||||
return in
|
||||
|
@ -88,20 +88,20 @@ func sanitizeUrlWithFlags(in string, f purell.NormalizationFlags) string {
|
|||
}
|
||||
|
||||
// SanitizeUrl sanitizes the input URL string.
|
||||
func SanitizeUrl(in string) string {
|
||||
return sanitizeUrlWithFlags(in, purell.FlagsSafe|purell.FlagRemoveTrailingSlash|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
|
||||
func SanitizeURL(in string) string {
|
||||
return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveTrailingSlash|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
|
||||
}
|
||||
|
||||
// SanitizeUrlKeepTrailingSlash is the same as SanitizeUrl, but will keep any trailing slash.
|
||||
func SanitizeUrlKeepTrailingSlash(in string) string {
|
||||
return sanitizeUrlWithFlags(in, purell.FlagsSafe|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
|
||||
func SanitizeURLKeepTrailingSlash(in string) string {
|
||||
return sanitizeURLWithFlags(in, purell.FlagsSafe|purell.FlagRemoveDotSegments|purell.FlagRemoveDuplicateSlashes|purell.FlagRemoveUnnecessaryHostDots|purell.FlagRemoveEmptyPortSeparator)
|
||||
}
|
||||
|
||||
// Similar to MakePath, but with Unicode handling
|
||||
// Example:
|
||||
// uri: Vim (text editor)
|
||||
// urlize: vim-text-editor
|
||||
func Urlize(uri string) string {
|
||||
func URLize(uri string) string {
|
||||
sanitized := MakePathToLower(uri)
|
||||
|
||||
// escape unicode letters
|
||||
|
@ -148,9 +148,9 @@ func MakePermalink(host, plink string) *url.URL {
|
|||
// AddContextRoot adds the context root to an URL if it's not already set.
|
||||
// For relative URL entries on sites with a base url with a context root set (i.e. http://example.com/mysite),
|
||||
// relative URLs must not include the context root if canonifyUrls is enabled. But if it's disabled, it must be set.
|
||||
func AddContextRoot(baseUrl, relativePath string) string {
|
||||
func AddContextRoot(baseURL, relativePath string) string {
|
||||
|
||||
url, err := url.Parse(baseUrl)
|
||||
url, err := url.Parse(baseURL)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
@ -164,16 +164,16 @@ func AddContextRoot(baseUrl, relativePath string) string {
|
|||
return newPath
|
||||
}
|
||||
|
||||
func UrlizeAndPrep(in string) string {
|
||||
return UrlPrep(viper.GetBool("UglyUrls"), Urlize(in))
|
||||
func URLizeAndPrep(in string) string {
|
||||
return URLPrep(viper.GetBool("UglyURLs"), URLize(in))
|
||||
}
|
||||
|
||||
func UrlPrep(ugly bool, in string) string {
|
||||
func URLPrep(ugly bool, in string) string {
|
||||
if ugly {
|
||||
x := Uglify(SanitizeUrl(in))
|
||||
x := Uglify(SanitizeURL(in))
|
||||
return x
|
||||
}
|
||||
x := PrettifyUrl(SanitizeUrl(in))
|
||||
x := PrettifyURL(SanitizeURL(in))
|
||||
if path.Ext(x) == ".xml" {
|
||||
return x
|
||||
}
|
||||
|
@ -186,8 +186,8 @@ func UrlPrep(ugly bool, in string) string {
|
|||
}
|
||||
|
||||
// PrettifyUrl takes a URL string and returns a semantic, clean URL.
|
||||
func PrettifyUrl(in string) string {
|
||||
x := PrettifyUrlPath(in)
|
||||
func PrettifyURL(in string) string {
|
||||
x := PrettifyURLPath(in)
|
||||
|
||||
if path.Base(x) == "index.html" {
|
||||
return path.Dir(x)
|
||||
|
@ -205,7 +205,7 @@ func PrettifyUrl(in string) string {
|
|||
// /section/name.html becomes /section/name/index.html
|
||||
// /section/name/ becomes /section/name/index.html
|
||||
// /section/name/index.html becomes /section/name/index.html
|
||||
func PrettifyUrlPath(in string) string {
|
||||
func PrettifyURLPath(in string) string {
|
||||
return PrettiyPath(in, pathBridge)
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@ func TestUrlize(t *testing.T) {
|
|||
}
|
||||
|
||||
for _, test := range tests {
|
||||
output := Urlize(test.input)
|
||||
output := URLize(test.input)
|
||||
if output != test.expected {
|
||||
t.Errorf("Expected %#v, got %#v\n", test.expected, output)
|
||||
}
|
||||
|
@ -36,8 +36,8 @@ func TestSanitizeUrl(t *testing.T) {
|
|||
}
|
||||
|
||||
for _, test := range tests {
|
||||
o1 := SanitizeUrl(test.input)
|
||||
o2 := SanitizeUrlKeepTrailingSlash(test.input)
|
||||
o1 := SanitizeURL(test.input)
|
||||
o2 := SanitizeURLKeepTrailingSlash(test.input)
|
||||
|
||||
expected2 := test.expected
|
||||
|
||||
|
@ -88,7 +88,7 @@ func TestUrlPrep(t *testing.T) {
|
|||
{true, "/section/name/index.html", "/section/name.html"},
|
||||
}
|
||||
for i, d := range data {
|
||||
output := UrlPrep(d.ugly, d.input)
|
||||
output := URLPrep(d.ugly, d.input)
|
||||
if d.output != output {
|
||||
t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output)
|
||||
}
|
||||
|
@ -98,7 +98,7 @@ func TestUrlPrep(t *testing.T) {
|
|||
|
||||
func TestAddContextRoot(t *testing.T) {
|
||||
tests := []struct {
|
||||
baseUrl string
|
||||
baseURL string
|
||||
url string
|
||||
expected string
|
||||
}{
|
||||
|
@ -114,7 +114,7 @@ func TestAddContextRoot(t *testing.T) {
|
|||
}
|
||||
|
||||
for _, test := range tests {
|
||||
output := AddContextRoot(test.baseUrl, test.url)
|
||||
output := AddContextRoot(test.baseURL, test.url)
|
||||
if output != test.expected {
|
||||
t.Errorf("Expected %#v, got %#v\n", test.expected, output)
|
||||
}
|
||||
|
@ -122,22 +122,22 @@ func TestAddContextRoot(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestPretty(t *testing.T) {
|
||||
assert.Equal(t, PrettifyUrlPath("/section/name.html"), "/section/name/index.html")
|
||||
assert.Equal(t, PrettifyUrlPath("/section/sub/name.html"), "/section/sub/name/index.html")
|
||||
assert.Equal(t, PrettifyUrlPath("/section/name/"), "/section/name/index.html")
|
||||
assert.Equal(t, PrettifyUrlPath("/section/name/index.html"), "/section/name/index.html")
|
||||
assert.Equal(t, PrettifyUrlPath("/index.html"), "/index.html")
|
||||
assert.Equal(t, PrettifyUrlPath("/name.xml"), "/name/index.xml")
|
||||
assert.Equal(t, PrettifyUrlPath("/"), "/")
|
||||
assert.Equal(t, PrettifyUrlPath(""), "/")
|
||||
assert.Equal(t, PrettifyUrl("/section/name.html"), "/section/name")
|
||||
assert.Equal(t, PrettifyUrl("/section/sub/name.html"), "/section/sub/name")
|
||||
assert.Equal(t, PrettifyUrl("/section/name/"), "/section/name")
|
||||
assert.Equal(t, PrettifyUrl("/section/name/index.html"), "/section/name")
|
||||
assert.Equal(t, PrettifyUrl("/index.html"), "/")
|
||||
assert.Equal(t, PrettifyUrl("/name.xml"), "/name/index.xml")
|
||||
assert.Equal(t, PrettifyUrl("/"), "/")
|
||||
assert.Equal(t, PrettifyUrl(""), "/")
|
||||
assert.Equal(t, PrettifyURLPath("/section/name.html"), "/section/name/index.html")
|
||||
assert.Equal(t, PrettifyURLPath("/section/sub/name.html"), "/section/sub/name/index.html")
|
||||
assert.Equal(t, PrettifyURLPath("/section/name/"), "/section/name/index.html")
|
||||
assert.Equal(t, PrettifyURLPath("/section/name/index.html"), "/section/name/index.html")
|
||||
assert.Equal(t, PrettifyURLPath("/index.html"), "/index.html")
|
||||
assert.Equal(t, PrettifyURLPath("/name.xml"), "/name/index.xml")
|
||||
assert.Equal(t, PrettifyURLPath("/"), "/")
|
||||
assert.Equal(t, PrettifyURLPath(""), "/")
|
||||
assert.Equal(t, PrettifyURL("/section/name.html"), "/section/name")
|
||||
assert.Equal(t, PrettifyURL("/section/sub/name.html"), "/section/sub/name")
|
||||
assert.Equal(t, PrettifyURL("/section/name/"), "/section/name")
|
||||
assert.Equal(t, PrettifyURL("/section/name/index.html"), "/section/name")
|
||||
assert.Equal(t, PrettifyURL("/index.html"), "/")
|
||||
assert.Equal(t, PrettifyURL("/name.xml"), "/name/index.xml")
|
||||
assert.Equal(t, PrettifyURL("/"), "/")
|
||||
assert.Equal(t, PrettifyURL(""), "/")
|
||||
}
|
||||
|
||||
func TestUgly(t *testing.T) {
|
||||
|
|
|
@ -95,7 +95,7 @@ var MENU_PAGE_SOURCES = []source.ByteSource{
|
|||
{"sect/doc3.md", MENU_PAGE_3},
|
||||
}
|
||||
|
||||
func tstCreateMenuPageWithNameToml(title, menu, name string) []byte {
|
||||
func tstCreateMenuPageWithNameTOML(title, menu, name string) []byte {
|
||||
return []byte(fmt.Sprintf(`+++
|
||||
title = "%s"
|
||||
weight = 1
|
||||
|
@ -106,7 +106,7 @@ weight = 1
|
|||
Front Matter with Menu with Name`, title, menu, name))
|
||||
}
|
||||
|
||||
func tstCreateMenuPageWithIdentifierToml(title, menu, identifier string) []byte {
|
||||
func tstCreateMenuPageWithIdentifierTOML(title, menu, identifier string) []byte {
|
||||
return []byte(fmt.Sprintf(`+++
|
||||
title = "%s"
|
||||
weight = 1
|
||||
|
@ -118,7 +118,7 @@ weight = 1
|
|||
Front Matter with Menu with Identifier`, title, menu, identifier))
|
||||
}
|
||||
|
||||
func tstCreateMenuPageWithNameYaml(title, menu, name string) []byte {
|
||||
func tstCreateMenuPageWithNameYAML(title, menu, name string) []byte {
|
||||
return []byte(fmt.Sprintf(`---
|
||||
title: "%s"
|
||||
weight: 1
|
||||
|
@ -129,7 +129,7 @@ menu:
|
|||
Front Matter with Menu with Name`, title, menu, name))
|
||||
}
|
||||
|
||||
func tstCreateMenuPageWithIdentifierYaml(title, menu, identifier string) []byte {
|
||||
func tstCreateMenuPageWithIdentifierYAML(title, menu, identifier string) []byte {
|
||||
return []byte(fmt.Sprintf(`---
|
||||
title: "%s"
|
||||
weight: 1
|
||||
|
@ -144,22 +144,22 @@ Front Matter with Menu with Identifier`, title, menu, identifier))
|
|||
type testMenuState struct {
|
||||
site *Site
|
||||
oldMenu interface{}
|
||||
oldBaseUrl interface{}
|
||||
oldBaseURL interface{}
|
||||
}
|
||||
|
||||
// Issue 817 - identifier should trump everything
|
||||
func TestPageMenuWithIdentifier(t *testing.T) {
|
||||
|
||||
toml := []source.ByteSource{
|
||||
{"sect/doc1.md", tstCreateMenuPageWithIdentifierToml("t1", "m1", "i1")},
|
||||
{"sect/doc2.md", tstCreateMenuPageWithIdentifierToml("t1", "m1", "i2")},
|
||||
{"sect/doc3.md", tstCreateMenuPageWithIdentifierToml("t1", "m1", "i2")}, // duplicate
|
||||
{"sect/doc1.md", tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i1")},
|
||||
{"sect/doc2.md", tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i2")},
|
||||
{"sect/doc3.md", tstCreateMenuPageWithIdentifierTOML("t1", "m1", "i2")}, // duplicate
|
||||
}
|
||||
|
||||
yaml := []source.ByteSource{
|
||||
{"sect/doc1.md", tstCreateMenuPageWithIdentifierYaml("t1", "m1", "i1")},
|
||||
{"sect/doc2.md", tstCreateMenuPageWithIdentifierYaml("t1", "m1", "i2")},
|
||||
{"sect/doc3.md", tstCreateMenuPageWithIdentifierYaml("t1", "m1", "i2")}, // duplicate
|
||||
{"sect/doc1.md", tstCreateMenuPageWithIdentifierYAML("t1", "m1", "i1")},
|
||||
{"sect/doc2.md", tstCreateMenuPageWithIdentifierYAML("t1", "m1", "i2")},
|
||||
{"sect/doc3.md", tstCreateMenuPageWithIdentifierYAML("t1", "m1", "i2")}, // duplicate
|
||||
}
|
||||
|
||||
doTestPageMenuWithIdentifier(t, toml)
|
||||
|
@ -174,8 +174,8 @@ func doTestPageMenuWithIdentifier(t *testing.T, menuPageSources []source.ByteSou
|
|||
|
||||
assert.Equal(t, 3, len(ts.site.Pages), "Not enough pages")
|
||||
|
||||
me1 := ts.findTestMenuEntryById("m1", "i1")
|
||||
me2 := ts.findTestMenuEntryById("m1", "i2")
|
||||
me1 := ts.findTestMenuEntryByID("m1", "i1")
|
||||
me2 := ts.findTestMenuEntryByID("m1", "i2")
|
||||
|
||||
assert.NotNil(t, me1)
|
||||
assert.NotNil(t, me2)
|
||||
|
@ -188,15 +188,15 @@ func doTestPageMenuWithIdentifier(t *testing.T, menuPageSources []source.ByteSou
|
|||
// Issue 817 contd - name should be second identifier in
|
||||
func TestPageMenuWithDuplicateName(t *testing.T) {
|
||||
toml := []source.ByteSource{
|
||||
{"sect/doc1.md", tstCreateMenuPageWithNameToml("t1", "m1", "n1")},
|
||||
{"sect/doc2.md", tstCreateMenuPageWithNameToml("t1", "m1", "n2")},
|
||||
{"sect/doc3.md", tstCreateMenuPageWithNameToml("t1", "m1", "n2")}, // duplicate
|
||||
{"sect/doc1.md", tstCreateMenuPageWithNameTOML("t1", "m1", "n1")},
|
||||
{"sect/doc2.md", tstCreateMenuPageWithNameTOML("t1", "m1", "n2")},
|
||||
{"sect/doc3.md", tstCreateMenuPageWithNameTOML("t1", "m1", "n2")}, // duplicate
|
||||
}
|
||||
|
||||
yaml := []source.ByteSource{
|
||||
{"sect/doc1.md", tstCreateMenuPageWithNameYaml("t1", "m1", "n1")},
|
||||
{"sect/doc2.md", tstCreateMenuPageWithNameYaml("t1", "m1", "n2")},
|
||||
{"sect/doc3.md", tstCreateMenuPageWithNameYaml("t1", "m1", "n2")}, // duplicate
|
||||
{"sect/doc1.md", tstCreateMenuPageWithNameYAML("t1", "m1", "n1")},
|
||||
{"sect/doc2.md", tstCreateMenuPageWithNameYAML("t1", "m1", "n2")},
|
||||
{"sect/doc3.md", tstCreateMenuPageWithNameYAML("t1", "m1", "n2")}, // duplicate
|
||||
}
|
||||
|
||||
doTestPageMenuWithDuplicateName(t, toml)
|
||||
|
@ -234,7 +234,7 @@ func TestPageMenu(t *testing.T) {
|
|||
third := ts.site.Pages[2]
|
||||
|
||||
pOne := ts.findTestMenuEntryByName("p_one", "One")
|
||||
pTwo := ts.findTestMenuEntryById("p_two", "Two")
|
||||
pTwo := ts.findTestMenuEntryByID("p_two", "Two")
|
||||
|
||||
for i, this := range []struct {
|
||||
menu string
|
||||
|
@ -267,11 +267,11 @@ func TestPageMenu(t *testing.T) {
|
|||
}
|
||||
|
||||
// issue #888
|
||||
func TestMenuWithHashInUrl(t *testing.T) {
|
||||
func TestMenuWithHashInURL(t *testing.T) {
|
||||
ts := setupMenuTests(t, MENU_PAGE_SOURCES)
|
||||
defer resetMenuTestState(ts)
|
||||
|
||||
me := ts.findTestMenuEntryById("hash", "hash")
|
||||
me := ts.findTestMenuEntryByID("hash", "hash")
|
||||
|
||||
assert.NotNil(t, me)
|
||||
|
||||
|
@ -279,41 +279,41 @@ func TestMenuWithHashInUrl(t *testing.T) {
|
|||
}
|
||||
|
||||
// issue #719
|
||||
func TestMenuWithUnicodeUrls(t *testing.T) {
|
||||
for _, uglyUrls := range []bool{true, false} {
|
||||
for _, canonifyUrls := range []bool{true, false} {
|
||||
doTestMenuWithUnicodeUrls(t, canonifyUrls, uglyUrls)
|
||||
func TestMenuWithUnicodeURLs(t *testing.T) {
|
||||
for _, uglyURLs := range []bool{true, false} {
|
||||
for _, canonifyURLs := range []bool{true, false} {
|
||||
doTestMenuWithUnicodeURLs(t, canonifyURLs, uglyURLs)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func doTestMenuWithUnicodeUrls(t *testing.T, canonifyUrls, uglyUrls bool) {
|
||||
viper.Set("CanonifyUrls", canonifyUrls)
|
||||
viper.Set("UglyUrls", uglyUrls)
|
||||
func doTestMenuWithUnicodeURLs(t *testing.T, canonifyURLs, uglyURLs bool) {
|
||||
viper.Set("CanonifyURLs", canonifyURLs)
|
||||
viper.Set("UglyURLs", uglyURLs)
|
||||
|
||||
ts := setupMenuTests(t, MENU_PAGE_SOURCES)
|
||||
defer resetMenuTestState(ts)
|
||||
|
||||
unicodeRussian := ts.findTestMenuEntryById("unicode", "unicode-russian")
|
||||
unicodeRussian := ts.findTestMenuEntryByID("unicode", "unicode-russian")
|
||||
|
||||
expectedBase := "/%D0%BD%D0%BE%D0%B2%D0%BE%D1%81%D1%82%D0%B8-%D0%BF%D1%80%D0%BE%D0%B5%D0%BA%D1%82%D0%B0"
|
||||
|
||||
if !canonifyUrls {
|
||||
if !canonifyURLs {
|
||||
expectedBase = "/Zoo" + expectedBase
|
||||
}
|
||||
|
||||
var expected string
|
||||
if uglyUrls {
|
||||
if uglyURLs {
|
||||
expected = expectedBase + ".html"
|
||||
} else {
|
||||
expected = expectedBase + "/"
|
||||
}
|
||||
|
||||
assert.Equal(t, expected, unicodeRussian.Url, "uglyUrls[%t]", uglyUrls)
|
||||
assert.Equal(t, expected, unicodeRussian.Url, "uglyURLs[%t]", uglyURLs)
|
||||
}
|
||||
|
||||
func TestTaxonomyNodeMenu(t *testing.T) {
|
||||
viper.Set("CanonifyUrls", true)
|
||||
viper.Set("CanonifyURLs", true)
|
||||
ts := setupMenuTests(t, MENU_PAGE_SOURCES)
|
||||
defer resetMenuTestState(ts)
|
||||
|
||||
|
@ -325,9 +325,9 @@ func TestTaxonomyNodeMenu(t *testing.T) {
|
|||
hasMenuCurrent bool
|
||||
}{
|
||||
{"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"},
|
||||
ts.findTestMenuEntryById("tax", "1"), true, false},
|
||||
ts.findTestMenuEntryByID("tax", "1"), true, false},
|
||||
{"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"},
|
||||
ts.findTestMenuEntryById("tax", "2"), true, false},
|
||||
ts.findTestMenuEntryByID("tax", "2"), true, false},
|
||||
{"tax", taxRenderInfo{key: "key", singular: "one", plural: "two"},
|
||||
&MenuEntry{Name: "Somewhere else", Url: "/somewhereelse"}, false, false},
|
||||
} {
|
||||
|
@ -347,9 +347,9 @@ func TestTaxonomyNodeMenu(t *testing.T) {
|
|||
|
||||
}
|
||||
|
||||
menuEntryXml := ts.findTestMenuEntryById("tax", "xml")
|
||||
menuEntryXML := ts.findTestMenuEntryByID("tax", "xml")
|
||||
|
||||
if strings.HasSuffix(menuEntryXml.Url, "/") {
|
||||
if strings.HasSuffix(menuEntryXML.Url, "/") {
|
||||
t.Error("RSS menu item should not be padded with trailing slash")
|
||||
}
|
||||
}
|
||||
|
@ -370,9 +370,9 @@ func TestHomeNodeMenu(t *testing.T) {
|
|||
{"main", homeMenuEntry, true, false},
|
||||
{"doesnotexist", homeMenuEntry, false, false},
|
||||
{"main", &MenuEntry{Name: "Somewhere else", Url: "/somewhereelse"}, false, false},
|
||||
{"grandparent", ts.findTestMenuEntryById("grandparent", "grandparentId"), false, false},
|
||||
{"grandparent", ts.findTestMenuEntryById("grandparent", "parentId"), false, true},
|
||||
{"grandparent", ts.findTestMenuEntryById("grandparent", "grandchildId"), true, false},
|
||||
{"grandparent", ts.findTestMenuEntryByID("grandparent", "grandparentId"), false, false},
|
||||
{"grandparent", ts.findTestMenuEntryByID("grandparent", "parentId"), false, true},
|
||||
{"grandparent", ts.findTestMenuEntryByID("grandparent", "grandchildId"), true, false},
|
||||
} {
|
||||
|
||||
isMenuCurrent := home.IsMenuCurrent(this.menu, this.menuItem)
|
||||
|
@ -391,7 +391,7 @@ func TestHomeNodeMenu(t *testing.T) {
|
|||
var testMenuIdentityMatcher = func(me *MenuEntry, id string) bool { return me.Identifier == id }
|
||||
var testMenuNameMatcher = func(me *MenuEntry, id string) bool { return me.Name == id }
|
||||
|
||||
func (ts testMenuState) findTestMenuEntryById(mn string, id string) *MenuEntry {
|
||||
func (ts testMenuState) findTestMenuEntryByID(mn string, id string) *MenuEntry {
|
||||
return ts.findTestMenuEntry(mn, id, testMenuIdentityMatcher)
|
||||
}
|
||||
func (ts testMenuState) findTestMenuEntryByName(mn string, id string) *MenuEntry {
|
||||
|
@ -447,7 +447,7 @@ func (ts testMenuState) findDescendantTestMenuEntry(parent *MenuEntry, id string
|
|||
}
|
||||
|
||||
func getTestMenuState(s *Site, t *testing.T) *testMenuState {
|
||||
menuState := &testMenuState{site: s, oldBaseUrl: viper.Get("baseurl"), oldMenu: viper.Get("menu")}
|
||||
menuState := &testMenuState{site: s, oldBaseURL: viper.Get("baseurl"), oldMenu: viper.Get("menu")}
|
||||
|
||||
menus, err := tomlToMap(CONF_MENU1)
|
||||
|
||||
|
@ -471,7 +471,7 @@ func setupMenuTests(t *testing.T, pageSources []source.ByteSource) *testMenuStat
|
|||
|
||||
func resetMenuTestState(state *testMenuState) {
|
||||
viper.Set("menu", state.oldMenu)
|
||||
viper.Set("baseurl", state.oldBaseUrl)
|
||||
viper.Set("baseurl", state.oldBaseURL)
|
||||
}
|
||||
|
||||
func createTestSite(pageSources []source.ByteSource) *Site {
|
||||
|
|
|
@ -40,7 +40,7 @@ func (n *Node) Now() time.Time {
|
|||
return time.Now()
|
||||
}
|
||||
|
||||
func (n *Node) HasMenuCurrent(menuId string, inme *MenuEntry) bool {
|
||||
func (n *Node) HasMenuCurrent(menuID string, inme *MenuEntry) bool {
|
||||
if inme.HasChildren() {
|
||||
me := MenuEntry{Name: n.Title, Url: n.Url}
|
||||
|
||||
|
@ -54,7 +54,7 @@ func (n *Node) HasMenuCurrent(menuId string, inme *MenuEntry) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
func (n *Node) IsMenuCurrent(menuId string, inme *MenuEntry) bool {
|
||||
func (n *Node) IsMenuCurrent(menuID string, inme *MenuEntry) bool {
|
||||
|
||||
me := MenuEntry{Name: n.Title, Url: n.Url}
|
||||
if !me.IsSameResource(inme) {
|
||||
|
@ -63,7 +63,7 @@ func (n *Node) IsMenuCurrent(menuId string, inme *MenuEntry) bool {
|
|||
|
||||
// this resource may be included in several menus
|
||||
// search for it to make sure that it is in the menu with the given menuId
|
||||
if menu, ok := (*n.Site.Menus)[menuId]; ok {
|
||||
if menu, ok := (*n.Site.Menus)[menuID]; ok {
|
||||
for _, menuEntry := range *menu {
|
||||
if menuEntry.IsSameResource(inme) {
|
||||
return true
|
||||
|
|
|
@ -149,8 +149,8 @@ func (p *Page) Authors() AuthorList {
|
|||
return al
|
||||
}
|
||||
|
||||
func (p *Page) UniqueId() string {
|
||||
return p.Source.UniqueId()
|
||||
func (p *Page) UniqueID() string {
|
||||
return p.Source.UniqueID()
|
||||
}
|
||||
|
||||
func (p *Page) Ref(ref string) (string, error) {
|
||||
|
@ -200,12 +200,12 @@ func (p *Page) setSummary() {
|
|||
func (p *Page) renderBytes(content []byte) []byte {
|
||||
return helpers.RenderBytes(
|
||||
&helpers.RenderingContext{Content: content, PageFmt: p.guessMarkupType(),
|
||||
DocumentId: p.UniqueId(), Config: p.getRenderingConfig()})
|
||||
DocumentID: p.UniqueID(), Config: p.getRenderingConfig()})
|
||||
}
|
||||
|
||||
func (p *Page) renderContent(content []byte) []byte {
|
||||
return helpers.RenderBytesWithTOC(&helpers.RenderingContext{Content: content, PageFmt: p.guessMarkupType(),
|
||||
DocumentId: p.UniqueId(), Config: p.getRenderingConfig()})
|
||||
DocumentID: p.UniqueID(), Config: p.getRenderingConfig()})
|
||||
}
|
||||
|
||||
func (p *Page) getRenderingConfig() *helpers.Blackfriday {
|
||||
|
@ -341,15 +341,15 @@ func (p *Page) analyzePage() {
|
|||
}
|
||||
|
||||
func (p *Page) permalink() (*url.URL, error) {
|
||||
baseUrl := string(p.Site.BaseUrl)
|
||||
baseURL := string(p.Site.BaseUrl)
|
||||
dir := strings.TrimSpace(filepath.ToSlash(p.Source.Dir()))
|
||||
pSlug := strings.TrimSpace(p.Slug)
|
||||
pUrl := strings.TrimSpace(p.Url)
|
||||
pURL := strings.TrimSpace(p.Url)
|
||||
var permalink string
|
||||
var err error
|
||||
|
||||
if len(pUrl) > 0 {
|
||||
return helpers.MakePermalink(baseUrl, pUrl), nil
|
||||
if len(pURL) > 0 {
|
||||
return helpers.MakePermalink(baseURL, pURL), nil
|
||||
}
|
||||
|
||||
if override, ok := p.Site.Permalinks[p.Section()]; ok {
|
||||
|
@ -361,14 +361,14 @@ func (p *Page) permalink() (*url.URL, error) {
|
|||
// fmt.Printf("have a section override for %q in section %s → %s\n", p.Title, p.Section, permalink)
|
||||
} else {
|
||||
if len(pSlug) > 0 {
|
||||
permalink = helpers.UrlPrep(viper.GetBool("UglyUrls"), path.Join(dir, p.Slug+"."+p.Extension()))
|
||||
permalink = helpers.URLPrep(viper.GetBool("UglyURLs"), path.Join(dir, p.Slug+"."+p.Extension()))
|
||||
} else {
|
||||
_, t := filepath.Split(p.Source.LogicalName())
|
||||
permalink = helpers.UrlPrep(viper.GetBool("UglyUrls"), path.Join(dir, helpers.ReplaceExtension(strings.TrimSpace(t), p.Extension())))
|
||||
permalink = helpers.URLPrep(viper.GetBool("UglyURLs"), path.Join(dir, helpers.ReplaceExtension(strings.TrimSpace(t), p.Extension())))
|
||||
}
|
||||
}
|
||||
|
||||
return helpers.MakePermalink(baseUrl, permalink), nil
|
||||
return helpers.MakePermalink(baseURL, permalink), nil
|
||||
}
|
||||
|
||||
func (p *Page) Extension() string {
|
||||
|
@ -419,7 +419,7 @@ func (p *Page) RelPermalink() (string, error) {
|
|||
return "", err
|
||||
}
|
||||
|
||||
if viper.GetBool("CanonifyUrls") {
|
||||
if viper.GetBool("CanonifyURLs") {
|
||||
// replacements for relpermalink with baseUrl on the form http://myhost.com/sub/ will fail later on
|
||||
// have to return the Url relative from baseUrl
|
||||
relpath, err := helpers.GetRelativePath(link.String(), string(p.Site.BaseUrl))
|
||||
|
@ -452,12 +452,12 @@ func (p *Page) update(f interface{}) error {
|
|||
case "description":
|
||||
p.Description = cast.ToString(v)
|
||||
case "slug":
|
||||
p.Slug = helpers.Urlize(cast.ToString(v))
|
||||
p.Slug = helpers.URLize(cast.ToString(v))
|
||||
case "url":
|
||||
if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
|
||||
return fmt.Errorf("Only relative urls are supported, %v provided", url)
|
||||
}
|
||||
p.Url = helpers.Urlize(cast.ToString(v))
|
||||
p.Url = helpers.URLize(cast.ToString(v))
|
||||
case "type":
|
||||
p.contentType = cast.ToString(v)
|
||||
case "extension", "ext":
|
||||
|
|
|
@ -16,8 +16,8 @@ func TestPermalink(t *testing.T) {
|
|||
base template.URL
|
||||
slug string
|
||||
url string
|
||||
uglyUrls bool
|
||||
canonifyUrls bool
|
||||
uglyURLs bool
|
||||
canonifyURLs bool
|
||||
expectedAbs string
|
||||
expectedRel string
|
||||
}{
|
||||
|
@ -42,8 +42,8 @@ func TestPermalink(t *testing.T) {
|
|||
viper.Set("DefaultExtension", "html")
|
||||
|
||||
for i, test := range tests {
|
||||
viper.Set("uglyurls", test.uglyUrls)
|
||||
viper.Set("canonifyurls", test.canonifyUrls)
|
||||
viper.Set("uglyurls", test.uglyURLs)
|
||||
viper.Set("canonifyurls", test.canonifyURLs)
|
||||
p := &Page{
|
||||
Node: Node{
|
||||
UrlPath: UrlPath{
|
||||
|
|
|
@ -15,7 +15,7 @@ package hugolib
|
|||
|
||||
func (p Pages) Prev(cur *Page) *Page {
|
||||
for x, c := range p {
|
||||
if c.UniqueId() == cur.UniqueId() {
|
||||
if c.UniqueID() == cur.UniqueID() {
|
||||
if x == 0 {
|
||||
return p[len(p)-1]
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ func (p Pages) Prev(cur *Page) *Page {
|
|||
|
||||
func (p Pages) Next(cur *Page) *Page {
|
||||
for x, c := range p {
|
||||
if c.UniqueId() == cur.UniqueId() {
|
||||
if c.UniqueID() == cur.UniqueID() {
|
||||
if x < len(p)-1 {
|
||||
return p[x+1]
|
||||
}
|
||||
|
|
|
@ -35,12 +35,12 @@ var paginatorEmptyPages Pages
|
|||
type paginator struct {
|
||||
paginatedPages []Pages
|
||||
pagers
|
||||
paginationUrlFactory
|
||||
paginationURLFactory
|
||||
total int
|
||||
size int
|
||||
}
|
||||
|
||||
type paginationUrlFactory func(int) string
|
||||
type paginationURLFactory func(int) string
|
||||
|
||||
// PageNumber returns the current page's number in the pager sequence.
|
||||
func (p *pager) PageNumber() int {
|
||||
|
@ -49,7 +49,7 @@ func (p *pager) PageNumber() int {
|
|||
|
||||
// Url returns the url to the current page.
|
||||
func (p *pager) Url() template.HTML {
|
||||
return template.HTML(p.paginationUrlFactory(p.PageNumber()))
|
||||
return template.HTML(p.paginationURLFactory(p.PageNumber()))
|
||||
}
|
||||
|
||||
// Pages returns the elements on this page.
|
||||
|
@ -225,14 +225,14 @@ func paginatePages(seq interface{}, section string) (pagers, error) {
|
|||
return nil, errors.New(fmt.Sprintf("unsupported type in paginate, got %T", seq))
|
||||
}
|
||||
|
||||
urlFactory := newPaginationUrlFactory(section)
|
||||
urlFactory := newPaginationURLFactory(section)
|
||||
paginator, _ := newPaginator(pages, paginateSize, urlFactory)
|
||||
pagers := paginator.Pagers()
|
||||
|
||||
return pagers, nil
|
||||
}
|
||||
|
||||
func newPaginator(pages Pages, size int, urlFactory paginationUrlFactory) (*paginator, error) {
|
||||
func newPaginator(pages Pages, size int, urlFactory paginationURLFactory) (*paginator, error) {
|
||||
|
||||
if size <= 0 {
|
||||
return nil, errors.New("Paginator size must be positive")
|
||||
|
@ -240,7 +240,7 @@ func newPaginator(pages Pages, size int, urlFactory paginationUrlFactory) (*pagi
|
|||
|
||||
split := splitPages(pages, size)
|
||||
|
||||
p := &paginator{total: len(pages), paginatedPages: split, size: size, paginationUrlFactory: urlFactory}
|
||||
p := &paginator{total: len(pages), paginatedPages: split, size: size, paginationURLFactory: urlFactory}
|
||||
|
||||
var ps pagers
|
||||
|
||||
|
@ -259,7 +259,7 @@ func newPaginator(pages Pages, size int, urlFactory paginationUrlFactory) (*pagi
|
|||
return p, nil
|
||||
}
|
||||
|
||||
func newPaginationUrlFactory(pathElements ...string) paginationUrlFactory {
|
||||
func newPaginationURLFactory(pathElements ...string) paginationURLFactory {
|
||||
paginatePath := viper.GetString("paginatePath")
|
||||
|
||||
return func(page int) string {
|
||||
|
@ -270,6 +270,6 @@ func newPaginationUrlFactory(pathElements ...string) paginationUrlFactory {
|
|||
rel = fmt.Sprintf("/%s/%s/%d/", path.Join(pathElements...), paginatePath, page)
|
||||
}
|
||||
|
||||
return helpers.UrlizeAndPrep(rel)
|
||||
return helpers.URLizeAndPrep(rel)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -99,8 +99,8 @@ func TestPagerNoPages(t *testing.T) {
|
|||
|
||||
func TestPaginationUrlFactory(t *testing.T) {
|
||||
viper.Set("PaginatePath", "zoo")
|
||||
unicode := newPaginationUrlFactory("новости проекта")
|
||||
fooBar := newPaginationUrlFactory("foo", "bar")
|
||||
unicode := newPaginationURLFactory("новости проекта")
|
||||
fooBar := newPaginationURLFactory("foo", "bar")
|
||||
|
||||
assert.Equal(t, "/%D0%BD%D0%BE%D0%B2%D0%BE%D1%81%D1%82%D0%B8-%D0%BF%D1%80%D0%BE%D0%B5%D0%BA%D1%82%D0%B0/", unicode(1))
|
||||
assert.Equal(t, "/foo/bar/", fooBar(1))
|
||||
|
|
|
@ -11,7 +11,7 @@ const (
|
|||
)
|
||||
|
||||
func TestTemplatePathSeparator(t *testing.T) {
|
||||
tmpl := new(tpl.GoHtmlTemplate)
|
||||
tmpl := new(tpl.GoHTMLTemplate)
|
||||
if name := tmpl.GenerateTemplateNameFrom(win_base, win_path); name != "sub1/index.html" {
|
||||
t.Fatalf("Template name incorrect. Expected: %s, Got: %s", "sub1/index.html", name)
|
||||
}
|
||||
|
|
|
@ -139,14 +139,14 @@ func pageToPermalinkDate(p *Page, dateField string) (string, error) {
|
|||
func pageToPermalinkTitle(p *Page, _ string) (string, error) {
|
||||
// Page contains Node which has Title
|
||||
// (also contains UrlPath which has Slug, sometimes)
|
||||
return helpers.Urlize(p.Title), nil
|
||||
return helpers.URLize(p.Title), nil
|
||||
}
|
||||
|
||||
// pageToPermalinkFilename returns the URL-safe form of the filename
|
||||
func pageToPermalinkFilename(p *Page, _ string) (string, error) {
|
||||
//var extension = p.Source.Ext
|
||||
//var name = p.Source.Path()[0 : len(p.Source.Path())-len(extension)]
|
||||
return helpers.Urlize(p.Source.BaseFileName()), nil
|
||||
return helpers.URLize(p.Source.BaseFileName()), nil
|
||||
}
|
||||
|
||||
// if the page has a slug, return the slug, else return the title
|
||||
|
|
|
@ -203,7 +203,7 @@ func renderShortcode(sc shortcode, p *Page, t tpl.Template) string {
|
|||
if sc.doMarkup {
|
||||
newInner := helpers.RenderBytes(&helpers.RenderingContext{
|
||||
Content: []byte(inner), PageFmt: p.guessMarkupType(),
|
||||
DocumentId: p.UniqueId(), Config: p.getRenderingConfig()})
|
||||
DocumentID: p.UniqueID(), Config: p.getRenderingConfig()})
|
||||
|
||||
// If the type is “unknown” or “markdown”, we assume the markdown
|
||||
// generation has been performed. Given the input: `a line`, markdown
|
||||
|
|
|
@ -113,7 +113,7 @@ type SiteInfo struct {
|
|||
Permalinks PermalinkOverrides
|
||||
Params map[string]interface{}
|
||||
BuildDrafts bool
|
||||
canonifyUrls bool
|
||||
canonifyURLs bool
|
||||
paginationPageCount uint64
|
||||
Data *map[string]interface{}
|
||||
}
|
||||
|
@ -158,10 +158,10 @@ func (s *SiteInfo) GetParam(key string) interface{} {
|
|||
}
|
||||
|
||||
func (s *SiteInfo) refLink(ref string, page *Page, relative bool) (string, error) {
|
||||
var refUrl *url.URL
|
||||
var refURL *url.URL
|
||||
var err error
|
||||
|
||||
refUrl, err = url.Parse(ref)
|
||||
refURL, err = url.Parse(ref)
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
@ -170,16 +170,16 @@ func (s *SiteInfo) refLink(ref string, page *Page, relative bool) (string, error
|
|||
var target *Page
|
||||
var link string
|
||||
|
||||
if refUrl.Path != "" {
|
||||
if refURL.Path != "" {
|
||||
for _, page := range []*Page(*s.Pages) {
|
||||
if page.Source.Path() == refUrl.Path || page.Source.LogicalName() == refUrl.Path {
|
||||
if page.Source.Path() == refURL.Path || page.Source.LogicalName() == refURL.Path {
|
||||
target = page
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if target == nil {
|
||||
return "", fmt.Errorf("No page found with path or logical name \"%s\".\n", refUrl.Path)
|
||||
return "", fmt.Errorf("No page found with path or logical name \"%s\".\n", refURL.Path)
|
||||
}
|
||||
|
||||
if relative {
|
||||
|
@ -193,13 +193,13 @@ func (s *SiteInfo) refLink(ref string, page *Page, relative bool) (string, error
|
|||
}
|
||||
}
|
||||
|
||||
if refUrl.Fragment != "" {
|
||||
link = link + "#" + refUrl.Fragment
|
||||
if refURL.Fragment != "" {
|
||||
link = link + "#" + refURL.Fragment
|
||||
|
||||
if refUrl.Path != "" && target != nil && !target.getRenderingConfig().PlainIdAnchors {
|
||||
link = link + ":" + target.UniqueId()
|
||||
} else if page != nil && !page.getRenderingConfig().PlainIdAnchors {
|
||||
link = link + ":" + page.UniqueId()
|
||||
if refURL.Path != "" && target != nil && !target.getRenderingConfig().PlainIDAnchors {
|
||||
link = link + ":" + target.UniqueID()
|
||||
} else if page != nil && !page.getRenderingConfig().PlainIDAnchors {
|
||||
link = link + ":" + page.UniqueID()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -316,11 +316,11 @@ func (s *Site) loadData(sources []source.Input) (err error) {
|
|||
func readData(f *source.File) (interface{}, error) {
|
||||
switch f.Extension() {
|
||||
case "yaml", "yml":
|
||||
return parser.HandleYamlMetaData(f.Bytes())
|
||||
return parser.HandleYAMLMetaData(f.Bytes())
|
||||
case "json":
|
||||
return parser.HandleJsonMetaData(f.Bytes())
|
||||
return parser.HandleJSONMetaData(f.Bytes())
|
||||
case "toml":
|
||||
return parser.HandleTomlMetaData(f.Bytes())
|
||||
return parser.HandleTOMLMetaData(f.Bytes())
|
||||
default:
|
||||
return nil, fmt.Errorf("Data not supported for extension '%s'", f.Extension())
|
||||
}
|
||||
|
@ -444,14 +444,14 @@ func (s *Site) initializeSiteInfo() {
|
|||
}
|
||||
|
||||
s.Info = SiteInfo{
|
||||
BaseUrl: template.URL(helpers.SanitizeUrlKeepTrailingSlash(viper.GetString("BaseUrl"))),
|
||||
BaseUrl: template.URL(helpers.SanitizeURLKeepTrailingSlash(viper.GetString("BaseURL"))),
|
||||
Title: viper.GetString("Title"),
|
||||
Author: viper.GetStringMap("author"),
|
||||
LanguageCode: viper.GetString("languagecode"),
|
||||
Copyright: viper.GetString("copyright"),
|
||||
DisqusShortname: viper.GetString("DisqusShortname"),
|
||||
BuildDrafts: viper.GetBool("BuildDrafts"),
|
||||
canonifyUrls: viper.GetBool("CanonifyUrls"),
|
||||
canonifyURLs: viper.GetBool("CanonifyURLs"),
|
||||
Pages: &s.Pages,
|
||||
Recent: &s.Pages,
|
||||
Menus: &s.Menus,
|
||||
|
@ -706,12 +706,12 @@ func (s *Site) getMenusFromConfig() Menus {
|
|||
|
||||
if strings.HasPrefix(menuEntry.Url, "/") {
|
||||
// make it match the nodes
|
||||
menuEntryUrl := menuEntry.Url
|
||||
menuEntryUrl = helpers.UrlizeAndPrep(menuEntryUrl)
|
||||
if !s.Info.canonifyUrls {
|
||||
menuEntryUrl = helpers.AddContextRoot(string(s.Info.BaseUrl), menuEntryUrl)
|
||||
menuEntryURL := menuEntry.Url
|
||||
menuEntryURL = helpers.URLizeAndPrep(menuEntryURL)
|
||||
if !s.Info.canonifyURLs {
|
||||
menuEntryURL = helpers.AddContextRoot(string(s.Info.BaseUrl), menuEntryURL)
|
||||
}
|
||||
menuEntry.Url = menuEntryUrl
|
||||
menuEntry.Url = menuEntryURL
|
||||
}
|
||||
|
||||
if ret[name] == nil {
|
||||
|
@ -1249,7 +1249,7 @@ func (s *Site) RenderHomePage() error {
|
|||
}
|
||||
}
|
||||
|
||||
n.Url = helpers.Urlize("404.html")
|
||||
n.Url = helpers.URLize("404.html")
|
||||
n.Title = "404 Page not found"
|
||||
n.Permalink = s.permalink("404.html")
|
||||
|
||||
|
@ -1315,7 +1315,7 @@ func (s *Site) Stats() {
|
|||
}
|
||||
|
||||
func (s *Site) setUrls(n *Node, in string) {
|
||||
n.Url = helpers.UrlizeAndPrep(in)
|
||||
n.Url = helpers.URLizeAndPrep(in)
|
||||
n.Permalink = s.permalink(n.Url)
|
||||
n.RSSLink = s.permalink(in + ".xml")
|
||||
}
|
||||
|
@ -1325,7 +1325,7 @@ func (s *Site) permalink(plink string) template.HTML {
|
|||
}
|
||||
|
||||
func (s *Site) permalinkStr(plink string) string {
|
||||
return helpers.MakePermalink(string(viper.GetString("BaseUrl")), helpers.UrlizeAndPrep(plink)).String()
|
||||
return helpers.MakePermalink(string(viper.GetString("BaseURL")), helpers.URLizeAndPrep(plink)).String()
|
||||
}
|
||||
|
||||
func (s *Site) NewNode() *Node {
|
||||
|
@ -1348,7 +1348,7 @@ func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layout
|
|||
|
||||
err := s.render(name, d, renderBuffer, layouts...)
|
||||
|
||||
absURLInXML, err := transform.AbsURLInXML(viper.GetString("BaseUrl"))
|
||||
absURLInXML, err := transform.AbsURLInXML(viper.GetString("BaseURL"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -1377,8 +1377,8 @@ func (s *Site) renderAndWritePage(name string, dest string, d interface{}, layou
|
|||
|
||||
transformLinks := transform.NewEmptyTransforms()
|
||||
|
||||
if viper.GetBool("CanonifyUrls") {
|
||||
absURL, err := transform.AbsURL(viper.GetString("BaseUrl"))
|
||||
if viper.GetBool("CanonifyURLs") {
|
||||
absURL, err := transform.AbsURL(viper.GetString("BaseURL"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -1460,7 +1460,7 @@ func (s *Site) initTargetList() {
|
|||
if s.Targets.Page == nil {
|
||||
s.Targets.Page = &target.PagePub{
|
||||
PublishDir: s.absPublishDir(),
|
||||
UglyUrls: viper.GetBool("UglyUrls"),
|
||||
UglyURLs: viper.GetBool("UglyURLs"),
|
||||
}
|
||||
}
|
||||
if s.Targets.File == nil {
|
||||
|
|
|
@ -92,7 +92,7 @@ func TestFileTarget(t *testing.T) {
|
|||
|
||||
func TestPageTargetUgly(t *testing.T) {
|
||||
s := &Site{
|
||||
Targets: targetList{Page: &target.PagePub{UglyUrls: true}},
|
||||
Targets: targetList{Page: &target.PagePub{UglyURLs: true}},
|
||||
Source: &source.InMemorySource{ByteSource: fakeSource},
|
||||
}
|
||||
s.AliasTarget()
|
||||
|
|
|
@ -309,18 +309,18 @@ func TestDraftAndFutureRender(t *testing.T) {
|
|||
|
||||
// Issue #939
|
||||
func Test404ShouldAlwaysHaveUglyUrls(t *testing.T) {
|
||||
for _, uglyUrls := range []bool{true, false} {
|
||||
doTest404ShouldAlwaysHaveUglyUrls(t, uglyUrls)
|
||||
for _, uglyURLs := range []bool{true, false} {
|
||||
doTest404ShouldAlwaysHaveUglyUrls(t, uglyURLs)
|
||||
}
|
||||
}
|
||||
|
||||
func doTest404ShouldAlwaysHaveUglyUrls(t *testing.T, uglyUrls bool) {
|
||||
func doTest404ShouldAlwaysHaveUglyUrls(t *testing.T, uglyURLs bool) {
|
||||
viper.Set("verbose", true)
|
||||
viper.Set("baseurl", "http://auth/bub")
|
||||
viper.Set("DisableSitemap", false)
|
||||
viper.Set("DisableRSS", false)
|
||||
|
||||
viper.Set("UglyUrls", uglyUrls)
|
||||
viper.Set("UglyURLs", uglyURLs)
|
||||
|
||||
sources := []source.ByteSource{
|
||||
{filepath.FromSlash("sect/doc1.html"), []byte("---\nmarkup: markdown\n---\n# title\nsome *content*")},
|
||||
|
@ -328,7 +328,7 @@ func doTest404ShouldAlwaysHaveUglyUrls(t *testing.T, uglyUrls bool) {
|
|||
|
||||
s := &Site{
|
||||
Source: &source.InMemorySource{ByteSource: sources},
|
||||
Targets: targetList{Page: &target.PagePub{UglyUrls: uglyUrls}},
|
||||
Targets: targetList{Page: &target.PagePub{UglyURLs: uglyURLs}},
|
||||
}
|
||||
|
||||
s.initializeSiteInfo()
|
||||
|
@ -347,7 +347,7 @@ func doTest404ShouldAlwaysHaveUglyUrls(t *testing.T, uglyUrls bool) {
|
|||
s.RenderSitemap()
|
||||
|
||||
var expectedPagePath string
|
||||
if uglyUrls {
|
||||
if uglyURLs {
|
||||
expectedPagePath = "sect/doc1.html"
|
||||
} else {
|
||||
expectedPagePath = "sect/doc1/index.html"
|
||||
|
@ -392,11 +392,11 @@ func TestSkipRender(t *testing.T) {
|
|||
}
|
||||
|
||||
viper.Set("verbose", true)
|
||||
viper.Set("CanonifyUrls", true)
|
||||
viper.Set("CanonifyURLs", true)
|
||||
viper.Set("baseurl", "http://auth/bub")
|
||||
s := &Site{
|
||||
Source: &source.InMemorySource{ByteSource: sources},
|
||||
Targets: targetList{Page: &target.PagePub{UglyUrls: true}},
|
||||
Targets: targetList{Page: &target.PagePub{UglyURLs: true}},
|
||||
}
|
||||
|
||||
s.initializeSiteInfo()
|
||||
|
@ -442,13 +442,13 @@ func TestAbsUrlify(t *testing.T) {
|
|||
{filepath.FromSlash("content/blue/doc2.html"), []byte("---\nf: t\n---\n<!doctype html><html><body>more content</body></html>")},
|
||||
}
|
||||
for _, canonify := range []bool{true, false} {
|
||||
viper.Set("CanonifyUrls", canonify)
|
||||
viper.Set("BaseUrl", "http://auth/bub")
|
||||
viper.Set("CanonifyURLs", canonify)
|
||||
viper.Set("BaseURL", "http://auth/bub")
|
||||
s := &Site{
|
||||
Source: &source.InMemorySource{ByteSource: sources},
|
||||
Targets: targetList{Page: &target.PagePub{UglyUrls: true}},
|
||||
Targets: targetList{Page: &target.PagePub{UglyURLs: true}},
|
||||
}
|
||||
t.Logf("Rendering with BaseUrl %q and CanonifyUrls set %v", viper.GetString("baseUrl"), canonify)
|
||||
t.Logf("Rendering with BaseURL %q and CanonifyURLs set %v", viper.GetString("baseURL"), canonify)
|
||||
s.initializeSiteInfo()
|
||||
templatePrep(s)
|
||||
must(s.addTemplate("blue/single.html", TEMPLATE_WITH_URL_ABS))
|
||||
|
@ -823,13 +823,13 @@ func TestWeightedTaxonomies(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestDataDirJson(t *testing.T) {
|
||||
func TestDataDirJSON(t *testing.T) {
|
||||
sources := []source.ByteSource{
|
||||
{filepath.FromSlash("test/foo.json"), []byte(`{ "bar": "foofoo" }`)},
|
||||
{filepath.FromSlash("test.json"), []byte(`{ "hello": [ { "world": "foo" } ] }`)},
|
||||
}
|
||||
|
||||
expected, err := parser.HandleJsonMetaData([]byte(`{ "test": { "hello": [{ "world": "foo" }] , "foo": { "bar":"foofoo" } } }`))
|
||||
expected, err := parser.HandleJSONMetaData([]byte(`{ "test": { "hello": [{ "world": "foo" }] , "foo": { "bar":"foofoo" } } }`))
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error %s", err)
|
||||
|
@ -843,7 +843,7 @@ func TestDataDirToml(t *testing.T) {
|
|||
{filepath.FromSlash("test/kung.toml"), []byte("[foo]\nbar = 1")},
|
||||
}
|
||||
|
||||
expected, err := parser.HandleTomlMetaData([]byte("[test]\n[test.kung]\n[test.kung.foo]\nbar = 1"))
|
||||
expected, err := parser.HandleTOMLMetaData([]byte("[test]\n[test.kung]\n[test.kung.foo]\nbar = 1"))
|
||||
|
||||
if err != nil {
|
||||
t.Fatalf("Error %s", err)
|
||||
|
@ -852,7 +852,7 @@ func TestDataDirToml(t *testing.T) {
|
|||
doTestDataDir(t, expected, []source.Input{&source.InMemorySource{ByteSource: sources}})
|
||||
}
|
||||
|
||||
func TestDataDirYamlWithOverridenValue(t *testing.T) {
|
||||
func TestDataDirYAMLWithOverridenValue(t *testing.T) {
|
||||
sources := []source.ByteSource{
|
||||
// filepath.Walk walks the files in lexical order, '/' comes before '.'. Simulate this:
|
||||
{filepath.FromSlash("a.yaml"), []byte("a: 1")},
|
||||
|
@ -878,7 +878,7 @@ func TestDataDirMultipleSources(t *testing.T) {
|
|||
{filepath.FromSlash("test/second.toml"), []byte("tender = 2")},
|
||||
}
|
||||
|
||||
expected, _ := parser.HandleTomlMetaData([]byte("[test.first]\nbar = 1\n[test.second]\ntender=2"))
|
||||
expected, _ := parser.HandleTOMLMetaData([]byte("[test.first]\nbar = 1\n[test.second]\ntender=2"))
|
||||
|
||||
doTestDataDir(t, expected, []source.Input{&source.InMemorySource{ByteSource: s1}, &source.InMemorySource{ByteSource: s2}})
|
||||
|
||||
|
|
|
@ -162,7 +162,7 @@ func (wp WeightedPages) Pages() Pages {
|
|||
|
||||
func (wp WeightedPages) Prev(cur *Page) *Page {
|
||||
for x, c := range wp {
|
||||
if c.Page.UniqueId() == cur.UniqueId() {
|
||||
if c.Page.UniqueID() == cur.UniqueID() {
|
||||
if x == 0 {
|
||||
return wp[len(wp)-1].Page
|
||||
}
|
||||
|
@ -174,7 +174,7 @@ func (wp WeightedPages) Prev(cur *Page) *Page {
|
|||
|
||||
func (wp WeightedPages) Next(cur *Page) *Page {
|
||||
for x, c := range wp {
|
||||
if c.Page.UniqueId() == cur.UniqueId() {
|
||||
if c.Page.UniqueID() == cur.UniqueID() {
|
||||
if x < len(wp)-1 {
|
||||
return wp[x+1].Page
|
||||
}
|
||||
|
|
|
@ -151,30 +151,30 @@ func FormatSanitize(kind string) string {
|
|||
func DetectFrontMatter(mark rune) (f *FrontmatterType) {
|
||||
switch mark {
|
||||
case '-':
|
||||
return &FrontmatterType{[]byte(YAML_DELIM), []byte(YAML_DELIM), HandleYamlMetaData, false}
|
||||
return &FrontmatterType{[]byte(YAML_DELIM), []byte(YAML_DELIM), HandleYAMLMetaData, false}
|
||||
case '+':
|
||||
return &FrontmatterType{[]byte(TOML_DELIM), []byte(TOML_DELIM), HandleTomlMetaData, false}
|
||||
return &FrontmatterType{[]byte(TOML_DELIM), []byte(TOML_DELIM), HandleTOMLMetaData, false}
|
||||
case '{':
|
||||
return &FrontmatterType{[]byte{'{'}, []byte{'}'}, HandleJsonMetaData, true}
|
||||
return &FrontmatterType{[]byte{'{'}, []byte{'}'}, HandleJSONMetaData, true}
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func HandleTomlMetaData(datum []byte) (interface{}, error) {
|
||||
func HandleTOMLMetaData(datum []byte) (interface{}, error) {
|
||||
m := map[string]interface{}{}
|
||||
datum = removeTomlIdentifier(datum)
|
||||
datum = removeTOMLIdentifier(datum)
|
||||
if _, err := toml.Decode(string(datum), &m); err != nil {
|
||||
return m, err
|
||||
}
|
||||
return m, nil
|
||||
}
|
||||
|
||||
func removeTomlIdentifier(datum []byte) []byte {
|
||||
func removeTOMLIdentifier(datum []byte) []byte {
|
||||
return bytes.Replace(datum, []byte(TOML_DELIM), []byte(""), -1)
|
||||
}
|
||||
|
||||
func HandleYamlMetaData(datum []byte) (interface{}, error) {
|
||||
func HandleYAMLMetaData(datum []byte) (interface{}, error) {
|
||||
m := map[string]interface{}{}
|
||||
if err := yaml.Unmarshal(datum, &m); err != nil {
|
||||
return m, err
|
||||
|
@ -182,7 +182,7 @@ func HandleYamlMetaData(datum []byte) (interface{}, error) {
|
|||
return m, nil
|
||||
}
|
||||
|
||||
func HandleJsonMetaData(datum []byte) (interface{}, error) {
|
||||
func HandleJSONMetaData(datum []byte) (interface{}, error) {
|
||||
var f interface{}
|
||||
if err := json.Unmarshal(datum, &f); err != nil {
|
||||
return f, err
|
||||
|
|
|
@ -27,14 +27,14 @@ type File struct {
|
|||
section string // The first directory
|
||||
dir string // The full directory Path (minus file name)
|
||||
ext string // Just the ext (eg txt)
|
||||
uniqueId string // MD5 of the filename
|
||||
uniqueID string // MD5 of the filename
|
||||
}
|
||||
|
||||
func (f *File) UniqueId() string {
|
||||
if f.uniqueId == "" {
|
||||
f.uniqueId = helpers.Md5String(f.LogicalName())
|
||||
func (f *File) UniqueID() string {
|
||||
if f.uniqueID == "" {
|
||||
f.uniqueID = helpers.Md5String(f.LogicalName())
|
||||
}
|
||||
return f.uniqueId
|
||||
return f.uniqueID
|
||||
}
|
||||
|
||||
func (f *File) String() string {
|
||||
|
|
|
@ -16,7 +16,7 @@ type PagePublisher interface {
|
|||
}
|
||||
|
||||
type PagePub struct {
|
||||
UglyUrls bool
|
||||
UglyURLs bool
|
||||
DefaultExtension string
|
||||
PublishDir string
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ func (pp *PagePub) Translate(src string) (dest string, err error) {
|
|||
dir = filepath.Join(pp.PublishDir, dir)
|
||||
}
|
||||
|
||||
if pp.UglyUrls || file == "index.html" || file == "404.html" {
|
||||
if pp.UglyURLs || file == "index.html" || file == "404.html" {
|
||||
return filepath.Join(dir, fmt.Sprintf("%s%s", name, ext)), nil
|
||||
}
|
||||
|
||||
|
|
|
@ -74,7 +74,7 @@ func TestTranslateUglyUrls(t *testing.T) {
|
|||
}
|
||||
|
||||
for _, test := range tests {
|
||||
f := &PagePub{UglyUrls: true}
|
||||
f := &PagePub{UglyURLs: true}
|
||||
dest, err := f.Translate(filepath.FromSlash(test.content))
|
||||
if err != nil {
|
||||
t.Fatalf("Translate returned an unexpected err: %s", err)
|
||||
|
|
|
@ -58,7 +58,7 @@ type templateErr struct {
|
|||
err error
|
||||
}
|
||||
|
||||
type GoHtmlTemplate struct {
|
||||
type GoHTMLTemplate struct {
|
||||
template.Template
|
||||
errors []*templateErr
|
||||
}
|
||||
|
@ -81,7 +81,7 @@ func InitializeT() Template {
|
|||
// Return a new Hugo Template System
|
||||
// With all the additional features, templates & functions
|
||||
func New() Template {
|
||||
var templates = &GoHtmlTemplate{
|
||||
var templates = &GoHTMLTemplate{
|
||||
Template: *template.New(""),
|
||||
errors: make([]*templateErr, 0),
|
||||
}
|
||||
|
@ -934,21 +934,21 @@ func DateFormat(layout string, v interface{}) (string, error) {
|
|||
return t.Format(layout), nil
|
||||
}
|
||||
|
||||
func SafeHtml(text string) template.HTML {
|
||||
func SafeHTML(text string) template.HTML {
|
||||
return template.HTML(text)
|
||||
}
|
||||
|
||||
// "safeHtmlAttr" is currently disabled, pending further discussion
|
||||
// on its use case. 2015-01-19
|
||||
func SafeHtmlAttr(text string) template.HTMLAttr {
|
||||
func SafeHTMLAttr(text string) template.HTMLAttr {
|
||||
return template.HTMLAttr(text)
|
||||
}
|
||||
|
||||
func SafeCss(text string) template.CSS {
|
||||
func SafeCSS(text string) template.CSS {
|
||||
return template.CSS(text)
|
||||
}
|
||||
|
||||
func SafeUrl(text string) template.URL {
|
||||
func SafeURL(text string) template.URL {
|
||||
return template.URL(text)
|
||||
}
|
||||
|
||||
|
@ -1151,12 +1151,12 @@ func ExecuteTemplateToHTML(context interface{}, layouts ...string) template.HTML
|
|||
return template.HTML(b.String())
|
||||
}
|
||||
|
||||
func (t *GoHtmlTemplate) LoadEmbedded() {
|
||||
func (t *GoHTMLTemplate) LoadEmbedded() {
|
||||
t.EmbedShortcodes()
|
||||
t.EmbedTemplates()
|
||||
}
|
||||
|
||||
func (t *GoHtmlTemplate) AddInternalTemplate(prefix, name, tpl string) error {
|
||||
func (t *GoHTMLTemplate) AddInternalTemplate(prefix, name, tpl string) error {
|
||||
if prefix != "" {
|
||||
return t.AddTemplate("_internal/"+prefix+"/"+name, tpl)
|
||||
} else {
|
||||
|
@ -1164,11 +1164,11 @@ func (t *GoHtmlTemplate) AddInternalTemplate(prefix, name, tpl string) error {
|
|||
}
|
||||
}
|
||||
|
||||
func (t *GoHtmlTemplate) AddInternalShortcode(name, content string) error {
|
||||
func (t *GoHTMLTemplate) AddInternalShortcode(name, content string) error {
|
||||
return t.AddInternalTemplate("shortcodes", name, content)
|
||||
}
|
||||
|
||||
func (t *GoHtmlTemplate) AddTemplate(name, tpl string) error {
|
||||
func (t *GoHTMLTemplate) AddTemplate(name, tpl string) error {
|
||||
_, err := t.New(name).Parse(tpl)
|
||||
if err != nil {
|
||||
t.errors = append(t.errors, &templateErr{name: name, err: err})
|
||||
|
@ -1176,7 +1176,7 @@ func (t *GoHtmlTemplate) AddTemplate(name, tpl string) error {
|
|||
return err
|
||||
}
|
||||
|
||||
func (t *GoHtmlTemplate) AddTemplateFile(name, path string) error {
|
||||
func (t *GoHTMLTemplate) AddTemplateFile(name, path string) error {
|
||||
// get the suffix and switch on that
|
||||
ext := filepath.Ext(path)
|
||||
switch ext {
|
||||
|
@ -1221,7 +1221,7 @@ func (t *GoHtmlTemplate) AddTemplateFile(name, path string) error {
|
|||
|
||||
}
|
||||
|
||||
func (t *GoHtmlTemplate) GenerateTemplateNameFrom(base, path string) string {
|
||||
func (t *GoHTMLTemplate) GenerateTemplateNameFrom(base, path string) string {
|
||||
name, _ := filepath.Rel(base, path)
|
||||
return filepath.ToSlash(name)
|
||||
}
|
||||
|
@ -1234,7 +1234,7 @@ func isBackupFile(path string) bool {
|
|||
return path[len(path)-1] == '~'
|
||||
}
|
||||
|
||||
func (t *GoHtmlTemplate) loadTemplates(absPath string, prefix string) {
|
||||
func (t *GoHTMLTemplate) loadTemplates(absPath string, prefix string) {
|
||||
walker := func(path string, fi os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return nil
|
||||
|
@ -1277,15 +1277,15 @@ func (t *GoHtmlTemplate) loadTemplates(absPath string, prefix string) {
|
|||
filepath.Walk(absPath, walker)
|
||||
}
|
||||
|
||||
func (t *GoHtmlTemplate) LoadTemplatesWithPrefix(absPath string, prefix string) {
|
||||
func (t *GoHTMLTemplate) LoadTemplatesWithPrefix(absPath string, prefix string) {
|
||||
t.loadTemplates(absPath, prefix)
|
||||
}
|
||||
|
||||
func (t *GoHtmlTemplate) LoadTemplates(absPath string) {
|
||||
func (t *GoHTMLTemplate) LoadTemplates(absPath string) {
|
||||
t.loadTemplates(absPath, "")
|
||||
}
|
||||
|
||||
func (t *GoHtmlTemplate) PrintErrors() {
|
||||
func (t *GoHTMLTemplate) PrintErrors() {
|
||||
for _, e := range t.errors {
|
||||
jww.ERROR.Println(e.err)
|
||||
}
|
||||
|
@ -1293,8 +1293,9 @@ func (t *GoHtmlTemplate) PrintErrors() {
|
|||
|
||||
func init() {
|
||||
funcMap = template.FuncMap{
|
||||
"urlize": helpers.Urlize,
|
||||
"sanitizeurl": helpers.SanitizeUrl,
|
||||
"urlize": helpers.URLize,
|
||||
"sanitizeURL": helpers.SanitizeURL,
|
||||
"sanitizeurl": helpers.SanitizeURL,
|
||||
"eq": Eq,
|
||||
"ne": Ne,
|
||||
"gt": Gt,
|
||||
|
@ -1303,11 +1304,15 @@ func init() {
|
|||
"le": Le,
|
||||
"in": In,
|
||||
"intersect": Intersect,
|
||||
"isSet": IsSet,
|
||||
"isset": IsSet,
|
||||
"echoParam": ReturnWhenSet,
|
||||
"safeHtml": SafeHtml,
|
||||
"safeCss": SafeCss,
|
||||
"safeUrl": SafeUrl,
|
||||
"safeHTML": SafeHTML,
|
||||
"safeHtml": SafeHTML,
|
||||
"safeCSS": SafeCSS,
|
||||
"safeCss": SafeCSS,
|
||||
"safeURL": SafeURL,
|
||||
"safeUrl": SafeURL,
|
||||
"markdownify": Markdownify,
|
||||
"first": First,
|
||||
"where": Where,
|
||||
|
@ -1331,8 +1336,10 @@ func init() {
|
|||
"replace": Replace,
|
||||
"trim": Trim,
|
||||
"dateFormat": DateFormat,
|
||||
"getJson": GetJson,
|
||||
"getCsv": GetCsv,
|
||||
"getJSON": GetJSON,
|
||||
"getJson": GetJSON,
|
||||
"getCSV": GetCSV,
|
||||
"getCsv": GetCSV,
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@ type Tmpl struct {
|
|||
Data string
|
||||
}
|
||||
|
||||
func (t *GoHtmlTemplate) EmbedShortcodes() {
|
||||
func (t *GoHTMLTemplate) EmbedShortcodes() {
|
||||
t.AddInternalShortcode("ref.html", `{{ .Get 0 | ref .Page }}`)
|
||||
t.AddInternalShortcode("relref.html", `{{ .Get 0 | relref .Page }}`)
|
||||
t.AddInternalShortcode("highlight.html", `{{ .Get 0 | highlight .Inner }}`)
|
||||
|
@ -43,7 +43,7 @@ func (t *GoHtmlTemplate) EmbedShortcodes() {
|
|||
<!-- image -->`)
|
||||
}
|
||||
|
||||
func (t *GoHtmlTemplate) EmbedTemplates() {
|
||||
func (t *GoHTMLTemplate) EmbedTemplates() {
|
||||
|
||||
t.AddInternalTemplate("_default", "rss.xml", `<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">
|
||||
<channel>
|
||||
|
|
|
@ -31,7 +31,7 @@ import (
|
|||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
var remoteUrlLock = &remoteLock{m: make(map[string]*sync.Mutex)}
|
||||
var remoteURLLock = &remoteLock{m: make(map[string]*sync.Mutex)}
|
||||
|
||||
type remoteLock struct {
|
||||
sync.RWMutex
|
||||
|
@ -39,7 +39,7 @@ type remoteLock struct {
|
|||
}
|
||||
|
||||
// resLock locks an URL during download
|
||||
func (l *remoteLock) UrlLock(url string) {
|
||||
func (l *remoteLock) URLLock(url string) {
|
||||
l.Lock()
|
||||
if _, ok := l.m[url]; !ok {
|
||||
l.m[url] = &sync.Mutex{}
|
||||
|
@ -49,7 +49,7 @@ func (l *remoteLock) UrlLock(url string) {
|
|||
}
|
||||
|
||||
// resUnlock unlocks an URL when the download has been finished. Use only in defer calls.
|
||||
func (l *remoteLock) UrlUnlock(url string) {
|
||||
func (l *remoteLock) URLUnlock(url string) {
|
||||
l.RLock()
|
||||
defer l.RUnlock()
|
||||
if um, ok := l.m[url]; ok {
|
||||
|
@ -111,8 +111,8 @@ func resGetRemote(url string, fs afero.Fs, hc *http.Client) ([]byte, error) {
|
|||
}
|
||||
|
||||
// avoid race condition with locks, block other goroutines if the current url is processing
|
||||
remoteUrlLock.UrlLock(url)
|
||||
defer func() { remoteUrlLock.UrlUnlock(url) }()
|
||||
remoteURLLock.URLLock(url)
|
||||
defer func() { remoteURLLock.URLUnlock(url) }()
|
||||
|
||||
// avoid multiple locks due to calling resGetCache twice
|
||||
c, err = resGetCache(url, fs, viper.GetBool("IgnoreCache"))
|
||||
|
@ -176,7 +176,7 @@ func resGetResource(url string) ([]byte, error) {
|
|||
// GetJson expects one or n-parts of a URL to a resource which can either be a local or a remote one.
|
||||
// If you provide multiple parts they will be joined together to the final URL.
|
||||
// GetJson returns nil or parsed JSON to use in a short code.
|
||||
func GetJson(urlParts ...string) interface{} {
|
||||
func GetJSON(urlParts ...string) interface{} {
|
||||
url := strings.Join(urlParts, "")
|
||||
c, err := resGetResource(url)
|
||||
if err != nil {
|
||||
|
@ -194,7 +194,7 @@ func GetJson(urlParts ...string) interface{} {
|
|||
}
|
||||
|
||||
// parseCsv parses bytes of csv data into a slice slice string or an error
|
||||
func parseCsv(c []byte, sep string) ([][]string, error) {
|
||||
func parseCSV(c []byte, sep string) ([][]string, error) {
|
||||
if len(sep) != 1 {
|
||||
return nil, errors.New("Incorrect length of csv separator: " + sep)
|
||||
}
|
||||
|
@ -211,14 +211,14 @@ func parseCsv(c []byte, sep string) ([][]string, error) {
|
|||
// The data separator can be a comma, semi-colon, pipe, etc, but only one character.
|
||||
// If you provide multiple parts for the URL they will be joined together to the final URL.
|
||||
// GetCsv returns nil or a slice slice to use in a short code.
|
||||
func GetCsv(sep string, urlParts ...string) [][]string {
|
||||
func GetCSV(sep string, urlParts ...string) [][]string {
|
||||
url := strings.Join(urlParts, "")
|
||||
c, err := resGetResource(url)
|
||||
if err != nil {
|
||||
jww.ERROR.Printf("Failed to get csv resource %s with error message %s", url, err)
|
||||
return nil
|
||||
}
|
||||
d, err := parseCsv(c, sep)
|
||||
d, err := parseCSV(c, sep)
|
||||
if err != nil {
|
||||
jww.ERROR.Printf("Failed to read csv resource %s with error message %s", url, err)
|
||||
return nil
|
||||
|
|
|
@ -157,7 +157,7 @@ func TestScpGetRemote(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestParseCsv(t *testing.T) {
|
||||
func TestParseCSV(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
csv []byte
|
||||
|
@ -173,7 +173,7 @@ func TestParseCsv(t *testing.T) {
|
|||
{[]byte("z|y|c\nd|e|f"), "|", "zycdef", false},
|
||||
}
|
||||
for _, test := range tests {
|
||||
csv, err := parseCsv(test.csv, test.sep)
|
||||
csv, err := parseCSV(test.csv, test.sep)
|
||||
if test.err && err == nil {
|
||||
t.Error("Expecting an error")
|
||||
}
|
||||
|
|
|
@ -972,7 +972,7 @@ func TestDateFormat(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestSafeHtml(t *testing.T) {
|
||||
func TestSafeHTML(t *testing.T) {
|
||||
for i, this := range []struct {
|
||||
str string
|
||||
tmplStr string
|
||||
|
@ -997,7 +997,7 @@ func TestSafeHtml(t *testing.T) {
|
|||
}
|
||||
|
||||
buf.Reset()
|
||||
err = tmpl.Execute(buf, SafeHtml(this.str))
|
||||
err = tmpl.Execute(buf, SafeHTML(this.str))
|
||||
if err != nil {
|
||||
t.Errorf("[%d] execute template with an escaped string value by SafeHtml returns unexpected error: %s", i, err)
|
||||
}
|
||||
|
@ -1007,7 +1007,7 @@ func TestSafeHtml(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestSafeHtmlAttr(t *testing.T) {
|
||||
func TestSafeHTMLAttr(t *testing.T) {
|
||||
for i, this := range []struct {
|
||||
str string
|
||||
tmplStr string
|
||||
|
@ -1032,7 +1032,7 @@ func TestSafeHtmlAttr(t *testing.T) {
|
|||
}
|
||||
|
||||
buf.Reset()
|
||||
err = tmpl.Execute(buf, SafeHtmlAttr(this.str))
|
||||
err = tmpl.Execute(buf, SafeHTMLAttr(this.str))
|
||||
if err != nil {
|
||||
t.Errorf("[%d] execute template with an escaped string value by SafeHtmlAttr returns unexpected error: %s", i, err)
|
||||
}
|
||||
|
@ -1042,7 +1042,7 @@ func TestSafeHtmlAttr(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestSafeCss(t *testing.T) {
|
||||
func TestSafeCSS(t *testing.T) {
|
||||
for i, this := range []struct {
|
||||
str string
|
||||
tmplStr string
|
||||
|
@ -1067,7 +1067,7 @@ func TestSafeCss(t *testing.T) {
|
|||
}
|
||||
|
||||
buf.Reset()
|
||||
err = tmpl.Execute(buf, SafeCss(this.str))
|
||||
err = tmpl.Execute(buf, SafeCSS(this.str))
|
||||
if err != nil {
|
||||
t.Errorf("[%d] execute template with an escaped string value by SafeCss returns unexpected error: %s", i, err)
|
||||
}
|
||||
|
@ -1077,7 +1077,7 @@ func TestSafeCss(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestSafeUrl(t *testing.T) {
|
||||
func TestSafeURL(t *testing.T) {
|
||||
for i, this := range []struct {
|
||||
str string
|
||||
tmplStr string
|
||||
|
@ -1102,7 +1102,7 @@ func TestSafeUrl(t *testing.T) {
|
|||
}
|
||||
|
||||
buf.Reset()
|
||||
err = tmpl.Execute(buf, SafeUrl(this.str))
|
||||
err = tmpl.Execute(buf, SafeURL(this.str))
|
||||
if err != nil {
|
||||
t.Errorf("[%d] execute template with an escaped string value by SafeUrl returns unexpected error: %s", i, err)
|
||||
}
|
||||
|
|
|
@ -4,12 +4,12 @@ import (
|
|||
"sync"
|
||||
)
|
||||
|
||||
var absUrlInit sync.Once
|
||||
var ar *absurlReplacer
|
||||
var absURLInit sync.Once
|
||||
var ar *absURLReplacer
|
||||
|
||||
// for performance reasons, we reuse the first baseUrl given
|
||||
func initAbsurlReplacer(baseURL string) {
|
||||
absUrlInit.Do(func() {
|
||||
absURLInit.Do(func() {
|
||||
ar = newAbsurlReplacer(baseURL)
|
||||
})
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ func AbsURL(absURL string) (trs []link, err error) {
|
|||
initAbsurlReplacer(absURL)
|
||||
|
||||
trs = append(trs, func(content []byte) []byte {
|
||||
return ar.replaceInHtml(content)
|
||||
return ar.replaceInHTML(content)
|
||||
})
|
||||
return
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ func AbsURLInXML(absURL string) (trs []link, err error) {
|
|||
initAbsurlReplacer(absURL)
|
||||
|
||||
trs = append(trs, func(content []byte) []byte {
|
||||
return ar.replaceInXml(content)
|
||||
return ar.replaceInXML(content)
|
||||
})
|
||||
return
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ type contentlexer struct {
|
|||
start int // item start position
|
||||
width int // width of last element
|
||||
|
||||
matchers []absurlMatcher
|
||||
matchers []absURLMatcher
|
||||
state stateFunc
|
||||
prefixLookup *prefixes
|
||||
|
||||
|
@ -101,13 +101,13 @@ func (l *contentlexer) emit() {
|
|||
|
||||
var mainPrefixRunes = []prefixRunes{{'s', 'r', 'c', '='}, {'h', 'r', 'e', 'f', '='}}
|
||||
|
||||
type absurlMatcher struct {
|
||||
type absURLMatcher struct {
|
||||
prefix int
|
||||
match []byte
|
||||
replacement []byte
|
||||
}
|
||||
|
||||
func (a absurlMatcher) isSourceType() bool {
|
||||
func (a absURLMatcher) isSourceType() bool {
|
||||
return a.prefix == matchPrefixSrc
|
||||
}
|
||||
|
||||
|
@ -177,7 +177,7 @@ func (l *contentlexer) replace() {
|
|||
}
|
||||
}
|
||||
|
||||
func doReplace(content []byte, matchers []absurlMatcher) []byte {
|
||||
func doReplace(content []byte, matchers []absURLMatcher) []byte {
|
||||
b := bp.GetBuffer()
|
||||
defer bp.PutBuffer(b)
|
||||
|
||||
|
@ -191,48 +191,48 @@ func doReplace(content []byte, matchers []absurlMatcher) []byte {
|
|||
return b.Bytes()
|
||||
}
|
||||
|
||||
type absurlReplacer struct {
|
||||
htmlMatchers []absurlMatcher
|
||||
xmlMatchers []absurlMatcher
|
||||
type absURLReplacer struct {
|
||||
htmlMatchers []absURLMatcher
|
||||
xmlMatchers []absURLMatcher
|
||||
}
|
||||
|
||||
func newAbsurlReplacer(baseUrl string) *absurlReplacer {
|
||||
u, _ := url.Parse(baseUrl)
|
||||
func newAbsurlReplacer(baseURL string) *absURLReplacer {
|
||||
u, _ := url.Parse(baseURL)
|
||||
base := strings.TrimRight(u.String(), "/")
|
||||
|
||||
// HTML
|
||||
dqHtmlMatch := []byte("\"/")
|
||||
sqHtmlMatch := []byte("'/")
|
||||
dqHTMLMatch := []byte("\"/")
|
||||
sqHTMLMatch := []byte("'/")
|
||||
|
||||
// XML
|
||||
dqXmlMatch := []byte(""/")
|
||||
sqXmlMatch := []byte("'/")
|
||||
dqXMLMatch := []byte(""/")
|
||||
sqXMLMatch := []byte("'/")
|
||||
|
||||
dqHtml := []byte("\"" + base + "/")
|
||||
sqHtml := []byte("'" + base + "/")
|
||||
dqHTML := []byte("\"" + base + "/")
|
||||
sqHTML := []byte("'" + base + "/")
|
||||
|
||||
dqXml := []byte(""" + base + "/")
|
||||
sqXml := []byte("'" + base + "/")
|
||||
dqXML := []byte(""" + base + "/")
|
||||
sqXML := []byte("'" + base + "/")
|
||||
|
||||
return &absurlReplacer{
|
||||
htmlMatchers: []absurlMatcher{
|
||||
{matchPrefixSrc, dqHtmlMatch, dqHtml},
|
||||
{matchPrefixSrc, sqHtmlMatch, sqHtml},
|
||||
{matchPrefixHref, dqHtmlMatch, dqHtml},
|
||||
{matchPrefixHref, sqHtmlMatch, sqHtml}},
|
||||
xmlMatchers: []absurlMatcher{
|
||||
{matchPrefixSrc, dqXmlMatch, dqXml},
|
||||
{matchPrefixSrc, sqXmlMatch, sqXml},
|
||||
{matchPrefixHref, dqXmlMatch, dqXml},
|
||||
{matchPrefixHref, sqXmlMatch, sqXml},
|
||||
return &absURLReplacer{
|
||||
htmlMatchers: []absURLMatcher{
|
||||
{matchPrefixSrc, dqHTMLMatch, dqHTML},
|
||||
{matchPrefixSrc, sqHTMLMatch, sqHTML},
|
||||
{matchPrefixHref, dqHTMLMatch, dqHTML},
|
||||
{matchPrefixHref, sqHTMLMatch, sqHTML}},
|
||||
xmlMatchers: []absURLMatcher{
|
||||
{matchPrefixSrc, dqXMLMatch, dqXML},
|
||||
{matchPrefixSrc, sqXMLMatch, sqXML},
|
||||
{matchPrefixHref, dqXMLMatch, dqXML},
|
||||
{matchPrefixHref, sqXMLMatch, sqXML},
|
||||
}}
|
||||
|
||||
}
|
||||
|
||||
func (au *absurlReplacer) replaceInHtml(content []byte) []byte {
|
||||
func (au *absURLReplacer) replaceInHTML(content []byte) []byte {
|
||||
return doReplace(content, au.htmlMatchers)
|
||||
}
|
||||
|
||||
func (au *absurlReplacer) replaceInXml(content []byte) []byte {
|
||||
func (au *absURLReplacer) replaceInXML(content []byte) []byte {
|
||||
return doReplace(content, au.xmlMatchers)
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ func TestChainZeroTransformers(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func BenchmarkAbsUrl(b *testing.B) {
|
||||
func BenchmarkAbsURL(b *testing.B) {
|
||||
absURL, _ := AbsURL("http://base")
|
||||
tr := NewChain(absURL...)
|
||||
|
||||
|
@ -64,7 +64,7 @@ func BenchmarkAbsUrl(b *testing.B) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestAbsUrl(t *testing.T) {
|
||||
func TestAbsURL(t *testing.T) {
|
||||
absURL, _ := AbsURL("http://base")
|
||||
tr := NewChain(absURL...)
|
||||
|
||||
|
@ -72,7 +72,7 @@ func TestAbsUrl(t *testing.T) {
|
|||
|
||||
}
|
||||
|
||||
func BenchmarkXmlAbsUrl(b *testing.B) {
|
||||
func BenchmarkXMLAbsURL(b *testing.B) {
|
||||
absURLInXML, _ := AbsURLInXML("http://base")
|
||||
tr := NewChain(absURLInXML...)
|
||||
|
||||
|
@ -82,7 +82,7 @@ func BenchmarkXmlAbsUrl(b *testing.B) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestXMLAbsUrl(t *testing.T) {
|
||||
func TestXMLAbsURL(t *testing.T) {
|
||||
absURLInXML, _ := AbsURLInXML("http://base")
|
||||
tr := NewChain(absURLInXML...)
|
||||
apply(t.Errorf, tr, xml_abs_url_tests)
|
||||
|
|
Loading…
Reference in a new issue