mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
resource: Add front matter metadata to Resource
This commit expands the Resource interface with 3 new methods: * Name * Title * Params All of these can be set in the Page front matter. `Name` will get its default value from the base filename, and is the value used in the ByPrefix and GetByPrefix lookup methods. Fixes #4244
This commit is contained in:
parent
f8a119b606
commit
20c9b6ec81
27 changed files with 627 additions and 187 deletions
|
@ -225,7 +225,7 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
|
||||||
|
|
||||||
gp1 := sites.GetContentPage(filepath.FromSlash("content/sect/doc1.en.md"))
|
gp1 := sites.GetContentPage(filepath.FromSlash("content/sect/doc1.en.md"))
|
||||||
require.NotNil(t, gp1)
|
require.NotNil(t, gp1)
|
||||||
require.Equal(t, "doc1", gp1.Title)
|
require.Equal(t, "doc1", gp1.title)
|
||||||
gp2 := sites.GetContentPage(filepath.FromSlash("content/dummysect/notfound.md"))
|
gp2 := sites.GetContentPage(filepath.FromSlash("content/dummysect/notfound.md"))
|
||||||
require.Nil(t, gp2)
|
require.Nil(t, gp2)
|
||||||
|
|
||||||
|
@ -317,9 +317,9 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
|
||||||
require.Len(t, homeEn.Translations(), 3)
|
require.Len(t, homeEn.Translations(), 3)
|
||||||
require.Equal(t, "fr", homeEn.Translations()[0].Lang())
|
require.Equal(t, "fr", homeEn.Translations()[0].Lang())
|
||||||
require.Equal(t, "nn", homeEn.Translations()[1].Lang())
|
require.Equal(t, "nn", homeEn.Translations()[1].Lang())
|
||||||
require.Equal(t, "På nynorsk", homeEn.Translations()[1].Title)
|
require.Equal(t, "På nynorsk", homeEn.Translations()[1].title)
|
||||||
require.Equal(t, "nb", homeEn.Translations()[2].Lang())
|
require.Equal(t, "nb", homeEn.Translations()[2].Lang())
|
||||||
require.Equal(t, "På bokmål", homeEn.Translations()[2].Title, configSuffix)
|
require.Equal(t, "På bokmål", homeEn.Translations()[2].title, configSuffix)
|
||||||
require.Equal(t, "Bokmål", homeEn.Translations()[2].Language().LanguageName, configSuffix)
|
require.Equal(t, "Bokmål", homeEn.Translations()[2].Language().LanguageName, configSuffix)
|
||||||
|
|
||||||
sectFr := frSite.getPage(KindSection, "sect")
|
sectFr := frSite.getPage(KindSection, "sect")
|
||||||
|
@ -328,7 +328,7 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
|
||||||
require.Equal(t, "fr", sectFr.Lang())
|
require.Equal(t, "fr", sectFr.Lang())
|
||||||
require.Len(t, sectFr.Translations(), 1)
|
require.Len(t, sectFr.Translations(), 1)
|
||||||
require.Equal(t, "en", sectFr.Translations()[0].Lang())
|
require.Equal(t, "en", sectFr.Translations()[0].Lang())
|
||||||
require.Equal(t, "Sects", sectFr.Translations()[0].Title)
|
require.Equal(t, "Sects", sectFr.Translations()[0].title)
|
||||||
|
|
||||||
nnSite := sites.Sites[2]
|
nnSite := sites.Sites[2]
|
||||||
require.Equal(t, "nn", nnSite.Language.Lang)
|
require.Equal(t, "nn", nnSite.Language.Lang)
|
||||||
|
@ -495,9 +495,9 @@ func TestMultiSitesRebuild(t *testing.T) {
|
||||||
require.Len(t, enSite.RegularPages, 6)
|
require.Len(t, enSite.RegularPages, 6)
|
||||||
require.Len(t, enSite.AllPages, 34)
|
require.Len(t, enSite.AllPages, 34)
|
||||||
require.Len(t, frSite.RegularPages, 5)
|
require.Len(t, frSite.RegularPages, 5)
|
||||||
require.Equal(t, "new_fr_1", frSite.RegularPages[3].Title)
|
require.Equal(t, "new_fr_1", frSite.RegularPages[3].title)
|
||||||
require.Equal(t, "new_en_2", enSite.RegularPages[0].Title)
|
require.Equal(t, "new_en_2", enSite.RegularPages[0].title)
|
||||||
require.Equal(t, "new_en_1", enSite.RegularPages[1].Title)
|
require.Equal(t, "new_en_1", enSite.RegularPages[1].title)
|
||||||
|
|
||||||
rendered := readDestination(t, fs, "public/en/new1/index.html")
|
rendered := readDestination(t, fs, "public/en/new1/index.html")
|
||||||
require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
|
require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
|
||||||
|
@ -531,7 +531,7 @@ func TestMultiSitesRebuild(t *testing.T) {
|
||||||
},
|
},
|
||||||
func(t *testing.T) {
|
func(t *testing.T) {
|
||||||
require.Len(t, enSite.RegularPages, 6, "Rename")
|
require.Len(t, enSite.RegularPages, 6, "Rename")
|
||||||
require.Equal(t, "new_en_1", enSite.RegularPages[1].Title)
|
require.Equal(t, "new_en_1", enSite.RegularPages[1].title)
|
||||||
rendered := readDestination(t, fs, "public/en/new1renamed/index.html")
|
rendered := readDestination(t, fs, "public/en/new1renamed/index.html")
|
||||||
require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
|
require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
|
||||||
}},
|
}},
|
||||||
|
@ -683,7 +683,7 @@ title = "Svenska"
|
||||||
// Veriy Swedish site
|
// Veriy Swedish site
|
||||||
require.Len(t, svSite.RegularPages, 1)
|
require.Len(t, svSite.RegularPages, 1)
|
||||||
svPage := svSite.RegularPages[0]
|
svPage := svSite.RegularPages[0]
|
||||||
require.Equal(t, "Swedish Contentfile", svPage.Title)
|
require.Equal(t, "Swedish Contentfile", svPage.title)
|
||||||
require.Equal(t, "sv", svPage.Lang())
|
require.Equal(t, "sv", svPage.Lang())
|
||||||
require.Len(t, svPage.Translations(), 2)
|
require.Len(t, svPage.Translations(), 2)
|
||||||
require.Len(t, svPage.AllTranslations(), 3)
|
require.Len(t, svPage.AllTranslations(), 3)
|
||||||
|
|
|
@ -104,7 +104,7 @@ func doTestNodeAsPage(t *testing.T, ugly, preserveTaxonomyNames bool) {
|
||||||
require.True(t, home.Path() != "")
|
require.True(t, home.Path() != "")
|
||||||
|
|
||||||
section2 := nodes[5]
|
section2 := nodes[5]
|
||||||
require.Equal(t, "Section2", section2.Title)
|
require.Equal(t, "Section2", section2.title)
|
||||||
|
|
||||||
pages := sites.findAllPagesByKind(KindPage)
|
pages := sites.findAllPagesByKind(KindPage)
|
||||||
require.Len(t, pages, 4)
|
require.Len(t, pages, 4)
|
||||||
|
@ -252,9 +252,9 @@ func doTestNodesWithNoContentFile(t *testing.T, ugly bool) {
|
||||||
for _, p := range pages {
|
for _, p := range pages {
|
||||||
var want string
|
var want string
|
||||||
if ugly {
|
if ugly {
|
||||||
want = "/" + p.s.PathSpec.URLize(p.Title) + ".html"
|
want = "/" + p.s.PathSpec.URLize(p.title) + ".html"
|
||||||
} else {
|
} else {
|
||||||
want = "/" + p.s.PathSpec.URLize(p.Title) + "/"
|
want = "/" + p.s.PathSpec.URLize(p.title) + "/"
|
||||||
}
|
}
|
||||||
if p.URL() != want {
|
if p.URL() != want {
|
||||||
t.Errorf("Taxonomy term URL mismatch: want %q, got %q", want, p.URL())
|
t.Errorf("Taxonomy term URL mismatch: want %q, got %q", want, p.URL())
|
||||||
|
|
166
hugolib/page.go
166
hugolib/page.go
|
@ -111,6 +111,10 @@ type Page struct {
|
||||||
// provided by the Resource object.
|
// provided by the Resource object.
|
||||||
Resources resource.Resources
|
Resources resource.Resources
|
||||||
|
|
||||||
|
// This is the raw front matter metadata that is going to be assigned to
|
||||||
|
// the Resources above.
|
||||||
|
resourcesMetadata []map[string]interface{}
|
||||||
|
|
||||||
// translations will contain references to this page in other language
|
// translations will contain references to this page in other language
|
||||||
// if available.
|
// if available.
|
||||||
translations Pages
|
translations Pages
|
||||||
|
@ -120,7 +124,7 @@ type Page struct {
|
||||||
translationKey string
|
translationKey string
|
||||||
|
|
||||||
// Params contains configuration defined in the params section of page frontmatter.
|
// Params contains configuration defined in the params section of page frontmatter.
|
||||||
Params map[string]interface{}
|
params map[string]interface{}
|
||||||
|
|
||||||
// Content sections
|
// Content sections
|
||||||
Content template.HTML
|
Content template.HTML
|
||||||
|
@ -214,7 +218,7 @@ type Page struct {
|
||||||
|
|
||||||
Site *SiteInfo `json:"-"`
|
Site *SiteInfo `json:"-"`
|
||||||
|
|
||||||
Title string
|
title string
|
||||||
Description string
|
Description string
|
||||||
Keywords []string
|
Keywords []string
|
||||||
Data map[string]interface{}
|
Data map[string]interface{}
|
||||||
|
@ -468,7 +472,7 @@ func (p *Page) Param(key interface{}) (interface{}, error) {
|
||||||
|
|
||||||
func (p *Page) traverseDirect(key string) (interface{}, error) {
|
func (p *Page) traverseDirect(key string) (interface{}, error) {
|
||||||
keyStr := strings.ToLower(key)
|
keyStr := strings.ToLower(key)
|
||||||
if val, ok := p.Params[keyStr]; ok {
|
if val, ok := p.params[keyStr]; ok {
|
||||||
return val, nil
|
return val, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -476,7 +480,7 @@ func (p *Page) traverseDirect(key string) (interface{}, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Page) traverseNested(keySegments []string) (interface{}, error) {
|
func (p *Page) traverseNested(keySegments []string) (interface{}, error) {
|
||||||
result := traverse(keySegments, p.Params)
|
result := traverse(keySegments, p.params)
|
||||||
if result != nil {
|
if result != nil {
|
||||||
return result, nil
|
return result, nil
|
||||||
}
|
}
|
||||||
|
@ -519,7 +523,7 @@ func (p *Page) Author() Author {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Page) Authors() AuthorList {
|
func (p *Page) Authors() AuthorList {
|
||||||
authorKeys, ok := p.Params["authors"]
|
authorKeys, ok := p.params["authors"]
|
||||||
if !ok {
|
if !ok {
|
||||||
return AuthorList{}
|
return AuthorList{}
|
||||||
}
|
}
|
||||||
|
@ -757,7 +761,7 @@ func (s *Site) newPageFromFile(fi *fileInfo) *Page {
|
||||||
contentType: "",
|
contentType: "",
|
||||||
Source: Source{File: fi},
|
Source: Source{File: fi},
|
||||||
Keywords: []string{}, Sitemap: Sitemap{Priority: -1},
|
Keywords: []string{}, Sitemap: Sitemap{Priority: -1},
|
||||||
Params: make(map[string]interface{}),
|
params: make(map[string]interface{}),
|
||||||
translations: make(Pages, 0),
|
translations: make(Pages, 0),
|
||||||
sections: sectionsFromDir(fi.Dir()),
|
sections: sectionsFromDir(fi.Dir()),
|
||||||
Site: &s.Info,
|
Site: &s.Info,
|
||||||
|
@ -927,7 +931,7 @@ func (p *Page) LinkTitle() string {
|
||||||
if len(p.linkTitle) > 0 {
|
if len(p.linkTitle) > 0 {
|
||||||
return p.linkTitle
|
return p.linkTitle
|
||||||
}
|
}
|
||||||
return p.Title
|
return p.title
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Page) shouldBuild() bool {
|
func (p *Page) shouldBuild() bool {
|
||||||
|
@ -988,6 +992,22 @@ func (p *Page) RelPermalink() string {
|
||||||
return p.relPermalink
|
return p.relPermalink
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// See resource.Resource
|
||||||
|
func (p *Page) Name() string {
|
||||||
|
if p.File != nil {
|
||||||
|
return p.File.BaseFileName()
|
||||||
|
}
|
||||||
|
return p.title
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Page) Title() string {
|
||||||
|
return p.title
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Page) Params() map[string]interface{} {
|
||||||
|
return p.params
|
||||||
|
}
|
||||||
|
|
||||||
func (p *Page) subResourceTargetPathFactory(base string) string {
|
func (p *Page) subResourceTargetPathFactory(base string) string {
|
||||||
return path.Join(p.relTargetPathBase, base)
|
return path.Join(p.relTargetPathBase, base)
|
||||||
}
|
}
|
||||||
|
@ -1094,39 +1114,39 @@ func (p *Page) update(f interface{}) error {
|
||||||
loki := strings.ToLower(k)
|
loki := strings.ToLower(k)
|
||||||
switch loki {
|
switch loki {
|
||||||
case "title":
|
case "title":
|
||||||
p.Title = cast.ToString(v)
|
p.title = cast.ToString(v)
|
||||||
p.Params[loki] = p.Title
|
p.params[loki] = p.title
|
||||||
case "linktitle":
|
case "linktitle":
|
||||||
p.linkTitle = cast.ToString(v)
|
p.linkTitle = cast.ToString(v)
|
||||||
p.Params[loki] = p.linkTitle
|
p.params[loki] = p.linkTitle
|
||||||
case "description":
|
case "description":
|
||||||
p.Description = cast.ToString(v)
|
p.Description = cast.ToString(v)
|
||||||
p.Params[loki] = p.Description
|
p.params[loki] = p.Description
|
||||||
case "slug":
|
case "slug":
|
||||||
p.Slug = cast.ToString(v)
|
p.Slug = cast.ToString(v)
|
||||||
p.Params[loki] = p.Slug
|
p.params[loki] = p.Slug
|
||||||
case "url":
|
case "url":
|
||||||
if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
|
if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
|
||||||
return fmt.Errorf("Only relative URLs are supported, %v provided", url)
|
return fmt.Errorf("Only relative URLs are supported, %v provided", url)
|
||||||
}
|
}
|
||||||
p.URLPath.URL = cast.ToString(v)
|
p.URLPath.URL = cast.ToString(v)
|
||||||
p.URLPath.frontMatterURL = p.URLPath.URL
|
p.URLPath.frontMatterURL = p.URLPath.URL
|
||||||
p.Params[loki] = p.URLPath.URL
|
p.params[loki] = p.URLPath.URL
|
||||||
case "type":
|
case "type":
|
||||||
p.contentType = cast.ToString(v)
|
p.contentType = cast.ToString(v)
|
||||||
p.Params[loki] = p.contentType
|
p.params[loki] = p.contentType
|
||||||
case "extension", "ext":
|
case "extension", "ext":
|
||||||
p.extension = cast.ToString(v)
|
p.extension = cast.ToString(v)
|
||||||
p.Params[loki] = p.extension
|
p.params[loki] = p.extension
|
||||||
case "keywords":
|
case "keywords":
|
||||||
p.Keywords = cast.ToStringSlice(v)
|
p.Keywords = cast.ToStringSlice(v)
|
||||||
p.Params[loki] = p.Keywords
|
p.params[loki] = p.Keywords
|
||||||
case "date":
|
case "date":
|
||||||
p.Date, err = cast.ToTimeE(v)
|
p.Date, err = cast.ToTimeE(v)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.s.Log.ERROR.Printf("Failed to parse date '%v' in page %s", v, p.File.Path())
|
p.s.Log.ERROR.Printf("Failed to parse date '%v' in page %s", v, p.File.Path())
|
||||||
}
|
}
|
||||||
p.Params[loki] = p.Date
|
p.params[loki] = p.Date
|
||||||
case "lastmod":
|
case "lastmod":
|
||||||
p.Lastmod, err = cast.ToTimeE(v)
|
p.Lastmod, err = cast.ToTimeE(v)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -1135,10 +1155,10 @@ func (p *Page) update(f interface{}) error {
|
||||||
case "modified":
|
case "modified":
|
||||||
vv, err := cast.ToTimeE(v)
|
vv, err := cast.ToTimeE(v)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
p.Params[loki] = vv
|
p.params[loki] = vv
|
||||||
modified = vv
|
modified = vv
|
||||||
} else {
|
} else {
|
||||||
p.Params[loki] = cast.ToString(v)
|
p.params[loki] = cast.ToString(v)
|
||||||
}
|
}
|
||||||
case "outputs":
|
case "outputs":
|
||||||
o := cast.ToStringSlice(v)
|
o := cast.ToStringSlice(v)
|
||||||
|
@ -1150,17 +1170,16 @@ func (p *Page) update(f interface{}) error {
|
||||||
p.s.Log.ERROR.Printf("Failed to resolve output formats: %s", err)
|
p.s.Log.ERROR.Printf("Failed to resolve output formats: %s", err)
|
||||||
} else {
|
} else {
|
||||||
p.outputFormats = outFormats
|
p.outputFormats = outFormats
|
||||||
p.Params[loki] = outFormats
|
p.params[loki] = outFormats
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
//p.Params[loki] = p.Keywords
|
|
||||||
case "publishdate", "pubdate":
|
case "publishdate", "pubdate":
|
||||||
p.PublishDate, err = cast.ToTimeE(v)
|
p.PublishDate, err = cast.ToTimeE(v)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.s.Log.ERROR.Printf("Failed to parse publishdate '%v' in page %s", v, p.File.Path())
|
p.s.Log.ERROR.Printf("Failed to parse publishdate '%v' in page %s", v, p.File.Path())
|
||||||
}
|
}
|
||||||
p.Params[loki] = p.PublishDate
|
p.params[loki] = p.PublishDate
|
||||||
case "expirydate", "unpublishdate":
|
case "expirydate", "unpublishdate":
|
||||||
p.ExpiryDate, err = cast.ToTimeE(v)
|
p.ExpiryDate, err = cast.ToTimeE(v)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -1178,20 +1197,20 @@ func (p *Page) update(f interface{}) error {
|
||||||
vv, err := cast.ToTimeE(v)
|
vv, err := cast.ToTimeE(v)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
p.PublishDate = vv
|
p.PublishDate = vv
|
||||||
p.Params[loki] = p.PublishDate
|
p.params[loki] = p.PublishDate
|
||||||
} else {
|
} else {
|
||||||
p.Params[loki] = cast.ToString(v)
|
p.params[loki] = cast.ToString(v)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case "layout":
|
case "layout":
|
||||||
p.Layout = cast.ToString(v)
|
p.Layout = cast.ToString(v)
|
||||||
p.Params[loki] = p.Layout
|
p.params[loki] = p.Layout
|
||||||
case "markup":
|
case "markup":
|
||||||
p.Markup = cast.ToString(v)
|
p.Markup = cast.ToString(v)
|
||||||
p.Params[loki] = p.Markup
|
p.params[loki] = p.Markup
|
||||||
case "weight":
|
case "weight":
|
||||||
p.Weight = cast.ToInt(v)
|
p.Weight = cast.ToInt(v)
|
||||||
p.Params[loki] = p.Weight
|
p.params[loki] = p.Weight
|
||||||
case "aliases":
|
case "aliases":
|
||||||
p.Aliases = cast.ToStringSlice(v)
|
p.Aliases = cast.ToStringSlice(v)
|
||||||
for _, alias := range p.Aliases {
|
for _, alias := range p.Aliases {
|
||||||
|
@ -1199,56 +1218,89 @@ func (p *Page) update(f interface{}) error {
|
||||||
return fmt.Errorf("Only relative aliases are supported, %v provided", alias)
|
return fmt.Errorf("Only relative aliases are supported, %v provided", alias)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
p.Params[loki] = p.Aliases
|
p.params[loki] = p.Aliases
|
||||||
case "status":
|
case "status":
|
||||||
p.Status = cast.ToString(v)
|
p.Status = cast.ToString(v)
|
||||||
p.Params[loki] = p.Status
|
p.params[loki] = p.Status
|
||||||
case "sitemap":
|
case "sitemap":
|
||||||
p.Sitemap = parseSitemap(cast.ToStringMap(v))
|
p.Sitemap = parseSitemap(cast.ToStringMap(v))
|
||||||
p.Params[loki] = p.Sitemap
|
p.params[loki] = p.Sitemap
|
||||||
case "iscjklanguage":
|
case "iscjklanguage":
|
||||||
isCJKLanguage = new(bool)
|
isCJKLanguage = new(bool)
|
||||||
*isCJKLanguage = cast.ToBool(v)
|
*isCJKLanguage = cast.ToBool(v)
|
||||||
case "translationkey":
|
case "translationkey":
|
||||||
p.translationKey = cast.ToString(v)
|
p.translationKey = cast.ToString(v)
|
||||||
p.Params[loki] = p.translationKey
|
p.params[loki] = p.translationKey
|
||||||
|
case "resources":
|
||||||
|
var resources []map[string]interface{}
|
||||||
|
handled := true
|
||||||
|
|
||||||
|
switch vv := v.(type) {
|
||||||
|
case []map[interface{}]interface{}:
|
||||||
|
for _, vvv := range vv {
|
||||||
|
resources = append(resources, cast.ToStringMap(vvv))
|
||||||
|
}
|
||||||
|
case []map[string]interface{}:
|
||||||
|
for _, vvv := range vv {
|
||||||
|
resources = append(resources, vvv)
|
||||||
|
}
|
||||||
|
case []interface{}:
|
||||||
|
for _, vvv := range vv {
|
||||||
|
switch vvvv := vvv.(type) {
|
||||||
|
case map[interface{}]interface{}:
|
||||||
|
resources = append(resources, cast.ToStringMap(vvvv))
|
||||||
|
case map[string]interface{}:
|
||||||
|
resources = append(resources, vvvv)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
handled = false
|
||||||
|
}
|
||||||
|
|
||||||
|
if handled {
|
||||||
|
p.params[loki] = resources
|
||||||
|
p.resourcesMetadata = resources
|
||||||
|
break
|
||||||
|
}
|
||||||
|
fallthrough
|
||||||
|
|
||||||
default:
|
default:
|
||||||
// If not one of the explicit values, store in Params
|
// If not one of the explicit values, store in Params
|
||||||
switch vv := v.(type) {
|
switch vv := v.(type) {
|
||||||
case bool:
|
case bool:
|
||||||
p.Params[loki] = vv
|
p.params[loki] = vv
|
||||||
case string:
|
case string:
|
||||||
p.Params[loki] = vv
|
p.params[loki] = vv
|
||||||
case int64, int32, int16, int8, int:
|
case int64, int32, int16, int8, int:
|
||||||
p.Params[loki] = vv
|
p.params[loki] = vv
|
||||||
case float64, float32:
|
case float64, float32:
|
||||||
p.Params[loki] = vv
|
p.params[loki] = vv
|
||||||
case time.Time:
|
case time.Time:
|
||||||
p.Params[loki] = vv
|
p.params[loki] = vv
|
||||||
default: // handle array of strings as well
|
default: // handle array of strings as well
|
||||||
switch vvv := vv.(type) {
|
switch vvv := vv.(type) {
|
||||||
case []interface{}:
|
case []interface{}:
|
||||||
if len(vvv) > 0 {
|
if len(vvv) > 0 {
|
||||||
switch vvv[0].(type) {
|
switch vvv[0].(type) {
|
||||||
case map[interface{}]interface{}: // Proper parsing structured array from YAML based FrontMatter
|
case map[interface{}]interface{}: // Proper parsing structured array from YAML based FrontMatter
|
||||||
p.Params[loki] = vvv
|
p.params[loki] = vvv
|
||||||
case map[string]interface{}: // Proper parsing structured array from JSON based FrontMatter
|
case map[string]interface{}: // Proper parsing structured array from JSON based FrontMatter
|
||||||
p.Params[loki] = vvv
|
p.params[loki] = vvv
|
||||||
case []interface{}:
|
case []interface{}:
|
||||||
p.Params[loki] = vvv
|
p.params[loki] = vvv
|
||||||
default:
|
default:
|
||||||
a := make([]string, len(vvv))
|
a := make([]string, len(vvv))
|
||||||
for i, u := range vvv {
|
for i, u := range vvv {
|
||||||
a[i] = cast.ToString(u)
|
a[i] = cast.ToString(u)
|
||||||
}
|
}
|
||||||
|
|
||||||
p.Params[loki] = a
|
p.params[loki] = a
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
p.Params[loki] = []string{}
|
p.params[loki] = []string{}
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
p.Params[loki] = vv
|
p.params[loki] = vv
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1263,7 +1315,7 @@ func (p *Page) update(f interface{}) error {
|
||||||
} else if published != nil {
|
} else if published != nil {
|
||||||
p.Draft = !*published
|
p.Draft = !*published
|
||||||
}
|
}
|
||||||
p.Params["draft"] = p.Draft
|
p.params["draft"] = p.Draft
|
||||||
|
|
||||||
if p.Date.IsZero() {
|
if p.Date.IsZero() {
|
||||||
p.Date = p.PublishDate
|
p.Date = p.PublishDate
|
||||||
|
@ -1277,7 +1329,7 @@ func (p *Page) update(f interface{}) error {
|
||||||
fi, err := p.s.Fs.Source.Stat(filepath.Join(p.s.PathSpec.AbsPathify(p.s.Cfg.GetString("contentDir")), p.File.Path()))
|
fi, err := p.s.Fs.Source.Stat(filepath.Join(p.s.PathSpec.AbsPathify(p.s.Cfg.GetString("contentDir")), p.File.Path()))
|
||||||
if err == nil {
|
if err == nil {
|
||||||
p.Date = fi.ModTime()
|
p.Date = fi.ModTime()
|
||||||
p.Params["date"] = p.Date
|
p.params["date"] = p.Date
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1289,9 +1341,9 @@ func (p *Page) update(f interface{}) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
p.Params["lastmod"] = p.Lastmod
|
p.params["lastmod"] = p.Lastmod
|
||||||
p.Params["publishdate"] = p.PublishDate
|
p.params["publishdate"] = p.PublishDate
|
||||||
p.Params["expirydate"] = p.ExpiryDate
|
p.params["expirydate"] = p.ExpiryDate
|
||||||
|
|
||||||
if isCJKLanguage != nil {
|
if isCJKLanguage != nil {
|
||||||
p.isCJKLanguage = *isCJKLanguage
|
p.isCJKLanguage = *isCJKLanguage
|
||||||
|
@ -1302,7 +1354,7 @@ func (p *Page) update(f interface{}) error {
|
||||||
p.isCJKLanguage = false
|
p.isCJKLanguage = false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
p.Params["iscjklanguage"] = p.isCJKLanguage
|
p.params["iscjklanguage"] = p.isCJKLanguage
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
||||||
|
@ -1317,7 +1369,7 @@ func (p *Page) getParamToLower(key string) interface{} {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Page) getParam(key string, stringToLower bool) interface{} {
|
func (p *Page) getParam(key string, stringToLower bool) interface{} {
|
||||||
v := p.Params[strings.ToLower(key)]
|
v := p.params[strings.ToLower(key)]
|
||||||
|
|
||||||
if v == nil {
|
if v == nil {
|
||||||
return nil
|
return nil
|
||||||
|
@ -1390,7 +1442,7 @@ func (p *Page) HasMenuCurrent(menuID string, me *MenuEntry) bool {
|
||||||
|
|
||||||
// The following logic is kept from back when Hugo had both Page and Node types.
|
// The following logic is kept from back when Hugo had both Page and Node types.
|
||||||
// TODO(bep) consolidate / clean
|
// TODO(bep) consolidate / clean
|
||||||
nme := MenuEntry{Page: p, Name: p.Title, URL: p.URL()}
|
nme := MenuEntry{Page: p, Name: p.title, URL: p.URL()}
|
||||||
|
|
||||||
for _, child := range me.Children {
|
for _, child := range me.Children {
|
||||||
if nme.IsSameResource(child) {
|
if nme.IsSameResource(child) {
|
||||||
|
@ -1421,7 +1473,7 @@ func (p *Page) IsMenuCurrent(menuID string, inme *MenuEntry) bool {
|
||||||
|
|
||||||
// The following logic is kept from back when Hugo had both Page and Node types.
|
// The following logic is kept from back when Hugo had both Page and Node types.
|
||||||
// TODO(bep) consolidate / clean
|
// TODO(bep) consolidate / clean
|
||||||
me := MenuEntry{Page: p, Name: p.Title, URL: p.URL()}
|
me := MenuEntry{Page: p, Name: p.title, URL: p.URL()}
|
||||||
|
|
||||||
if !me.IsSameResource(inme) {
|
if !me.IsSameResource(inme) {
|
||||||
return false
|
return false
|
||||||
|
@ -1465,7 +1517,7 @@ func (p *Page) Menus() PageMenus {
|
||||||
p.pageMenusInit.Do(func() {
|
p.pageMenusInit.Do(func() {
|
||||||
p.pageMenus = PageMenus{}
|
p.pageMenus = PageMenus{}
|
||||||
|
|
||||||
if ms, ok := p.Params["menu"]; ok {
|
if ms, ok := p.params["menu"]; ok {
|
||||||
link := p.RelPermalink()
|
link := p.RelPermalink()
|
||||||
|
|
||||||
me := MenuEntry{Page: p, Name: p.LinkTitle(), Weight: p.Weight, URL: link}
|
me := MenuEntry{Page: p, Name: p.LinkTitle(), Weight: p.Weight, URL: link}
|
||||||
|
@ -1494,16 +1546,16 @@ func (p *Page) Menus() PageMenus {
|
||||||
menus, err := cast.ToStringMapE(ms)
|
menus, err := cast.ToStringMapE(ms)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.s.Log.ERROR.Printf("unable to process menus for %q\n", p.Title)
|
p.s.Log.ERROR.Printf("unable to process menus for %q\n", p.title)
|
||||||
}
|
}
|
||||||
|
|
||||||
for name, menu := range menus {
|
for name, menu := range menus {
|
||||||
menuEntry := MenuEntry{Page: p, Name: p.LinkTitle(), URL: link, Weight: p.Weight, Menu: name}
|
menuEntry := MenuEntry{Page: p, Name: p.LinkTitle(), URL: link, Weight: p.Weight, Menu: name}
|
||||||
if menu != nil {
|
if menu != nil {
|
||||||
p.s.Log.DEBUG.Printf("found menu: %q, in %q\n", name, p.Title)
|
p.s.Log.DEBUG.Printf("found menu: %q, in %q\n", name, p.title)
|
||||||
ime, err := cast.ToStringMapE(menu)
|
ime, err := cast.ToStringMapE(menu)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
p.s.Log.ERROR.Printf("unable to process menus for %q: %s", p.Title, err)
|
p.s.Log.ERROR.Printf("unable to process menus for %q: %s", p.title, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
menuEntry.marshallMap(ime)
|
menuEntry.marshallMap(ime)
|
||||||
|
@ -1805,7 +1857,7 @@ func (p *Page) RelRef(refs ...string) (string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Page) String() string {
|
func (p *Page) String() string {
|
||||||
return fmt.Sprintf("Page(%q)", p.Title)
|
return fmt.Sprintf("Page(%q)", p.title)
|
||||||
}
|
}
|
||||||
|
|
||||||
type URLPath struct {
|
type URLPath struct {
|
||||||
|
@ -2003,5 +2055,5 @@ func (p *Page) pathOrTitle() string {
|
||||||
if p.Path() != "" {
|
if p.Path() != "" {
|
||||||
return p.Path()
|
return p.Path()
|
||||||
}
|
}
|
||||||
return p.Title
|
return p.title
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,8 +49,8 @@ func preparePageGroupTestPages(t *testing.T) Pages {
|
||||||
p.Date = cast.ToTime(src.date)
|
p.Date = cast.ToTime(src.date)
|
||||||
p.PublishDate = cast.ToTime(src.date)
|
p.PublishDate = cast.ToTime(src.date)
|
||||||
p.ExpiryDate = cast.ToTime(src.date)
|
p.ExpiryDate = cast.ToTime(src.date)
|
||||||
p.Params["custom_param"] = src.param
|
p.params["custom_param"] = src.param
|
||||||
p.Params["custom_date"] = cast.ToTime(src.date)
|
p.params["custom_date"] = cast.ToTime(src.date)
|
||||||
pages = append(pages, p)
|
pages = append(pages, p)
|
||||||
}
|
}
|
||||||
return pages
|
return pages
|
||||||
|
@ -253,7 +253,7 @@ func TestGroupByParamCalledWithCapitalLetterString(t *testing.T) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("failed to prepare test page %s", f)
|
t.Fatalf("failed to prepare test page %s", f)
|
||||||
}
|
}
|
||||||
p.Params["custom_param"] = testStr
|
p.params["custom_param"] = testStr
|
||||||
pages := Pages{p}
|
pages := Pages{p}
|
||||||
|
|
||||||
groups, err := pages.GroupByParam("custom_param")
|
groups, err := pages.GroupByParam("custom_param")
|
||||||
|
@ -268,9 +268,9 @@ func TestGroupByParamCalledWithCapitalLetterString(t *testing.T) {
|
||||||
func TestGroupByParamCalledWithSomeUnavailableParams(t *testing.T) {
|
func TestGroupByParamCalledWithSomeUnavailableParams(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
pages := preparePageGroupTestPages(t)
|
pages := preparePageGroupTestPages(t)
|
||||||
delete(pages[1].Params, "custom_param")
|
delete(pages[1].params, "custom_param")
|
||||||
delete(pages[3].Params, "custom_param")
|
delete(pages[3].params, "custom_param")
|
||||||
delete(pages[4].Params, "custom_param")
|
delete(pages[4].params, "custom_param")
|
||||||
|
|
||||||
expect := PagesGroup{
|
expect := PagesGroup{
|
||||||
{Key: "foo", Pages: Pages{pages[0], pages[2]}},
|
{Key: "foo", Pages: Pages{pages[0], pages[2]}},
|
||||||
|
|
|
@ -129,7 +129,7 @@ func (p Pages) ByTitle() Pages {
|
||||||
key := "pageSort.ByTitle"
|
key := "pageSort.ByTitle"
|
||||||
|
|
||||||
title := func(p1, p2 *Page) bool {
|
title := func(p1, p2 *Page) bool {
|
||||||
return p1.Title < p2.Title
|
return p1.title < p2.title
|
||||||
}
|
}
|
||||||
|
|
||||||
pages, _ := spc.get(key, p, pageBy(title).Sort)
|
pages, _ := spc.get(key, p, pageBy(title).Sort)
|
||||||
|
|
|
@ -74,7 +74,7 @@ func TestSortByN(t *testing.T) {
|
||||||
assertFunc func(p Pages) bool
|
assertFunc func(p Pages) bool
|
||||||
}{
|
}{
|
||||||
{(Pages).ByWeight, func(p Pages) bool { return p[0].Weight == 1 }},
|
{(Pages).ByWeight, func(p Pages) bool { return p[0].Weight == 1 }},
|
||||||
{(Pages).ByTitle, func(p Pages) bool { return p[0].Title == "ab" }},
|
{(Pages).ByTitle, func(p Pages) bool { return p[0].title == "ab" }},
|
||||||
{(Pages).ByLinkTitle, func(p Pages) bool { return p[0].LinkTitle() == "abl" }},
|
{(Pages).ByLinkTitle, func(p Pages) bool { return p[0].LinkTitle() == "abl" }},
|
||||||
{(Pages).ByDate, func(p Pages) bool { return p[0].Date == d4 }},
|
{(Pages).ByDate, func(p Pages) bool { return p[0].Date == d4 }},
|
||||||
{(Pages).ByPublishDate, func(p Pages) bool { return p[0].PublishDate == d4 }},
|
{(Pages).ByPublishDate, func(p Pages) bool { return p[0].PublishDate == d4 }},
|
||||||
|
@ -124,7 +124,7 @@ func TestPageSortByParam(t *testing.T) {
|
||||||
s := newTestSite(t)
|
s := newTestSite(t)
|
||||||
|
|
||||||
unsorted := createSortTestPages(s, 10)
|
unsorted := createSortTestPages(s, 10)
|
||||||
delete(unsorted[9].Params, "arbitrarily")
|
delete(unsorted[9].params, "arbitrarily")
|
||||||
|
|
||||||
firstSetValue, _ := unsorted[0].Param(k)
|
firstSetValue, _ := unsorted[0].Param(k)
|
||||||
secondSetValue, _ := unsorted[1].Param(k)
|
secondSetValue, _ := unsorted[1].Param(k)
|
||||||
|
@ -163,9 +163,9 @@ func setSortVals(dates [4]time.Time, titles [4]string, weights [4]int, pages Pag
|
||||||
pages[i].Date = dates[i]
|
pages[i].Date = dates[i]
|
||||||
pages[i].Lastmod = dates[i]
|
pages[i].Lastmod = dates[i]
|
||||||
pages[i].Weight = weights[i]
|
pages[i].Weight = weights[i]
|
||||||
pages[i].Title = titles[i]
|
pages[i].title = titles[i]
|
||||||
// make sure we compare apples and ... apples ...
|
// make sure we compare apples and ... apples ...
|
||||||
pages[len(dates)-1-i].linkTitle = pages[i].Title + "l"
|
pages[len(dates)-1-i].linkTitle = pages[i].title + "l"
|
||||||
pages[len(dates)-1-i].PublishDate = dates[i]
|
pages[len(dates)-1-i].PublishDate = dates[i]
|
||||||
pages[len(dates)-1-i].ExpiryDate = dates[i]
|
pages[len(dates)-1-i].ExpiryDate = dates[i]
|
||||||
pages[len(dates)-1-i].Content = template.HTML(titles[i] + "_content")
|
pages[len(dates)-1-i].Content = template.HTML(titles[i] + "_content")
|
||||||
|
@ -180,7 +180,7 @@ func createSortTestPages(s *Site, num int) Pages {
|
||||||
|
|
||||||
for i := 0; i < num; i++ {
|
for i := 0; i < num; i++ {
|
||||||
p := s.newPage(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", i)))
|
p := s.newPage(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", i)))
|
||||||
p.Params = map[string]interface{}{
|
p.params = map[string]interface{}{
|
||||||
"arbitrarily": map[string]interface{}{
|
"arbitrarily": map[string]interface{}{
|
||||||
"nested": ("xyz" + fmt.Sprintf("%v", 100-i)),
|
"nested": ("xyz" + fmt.Sprintf("%v", 100-i)),
|
||||||
},
|
},
|
||||||
|
|
|
@ -254,6 +254,12 @@ func (c *contentHandlers) parsePage(h contentHandler) contentHandler {
|
||||||
|
|
||||||
return p.Resources[i].RelPermalink() < p.Resources[j].RelPermalink()
|
return p.Resources[i].RelPermalink() < p.Resources[j].RelPermalink()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Assign metadata from front matter if set
|
||||||
|
if len(p.resourcesMetadata) > 0 {
|
||||||
|
resource.AssignMetadata(p.resourcesMetadata, p.Resources...)
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return h(ctx)
|
return h(ctx)
|
||||||
|
|
|
@ -136,8 +136,14 @@ func TestPageBundlerSite(t *testing.T) {
|
||||||
"TheContent",
|
"TheContent",
|
||||||
"Sunset RelPermalink: /2017/pageslug/sunset1.jpg",
|
"Sunset RelPermalink: /2017/pageslug/sunset1.jpg",
|
||||||
"Thumb Width: 123",
|
"Thumb Width: 123",
|
||||||
|
"Thumb Name: my-sunset-1",
|
||||||
"Short Sunset RelPermalink: /2017/pageslug/sunset2.jpg",
|
"Short Sunset RelPermalink: /2017/pageslug/sunset2.jpg",
|
||||||
"Short Thumb Width: 56",
|
"Short Thumb Width: 56",
|
||||||
|
"1: Image Title: Sunset Galore 1",
|
||||||
|
"1: Image Params: map[myparam:My Sunny Param]",
|
||||||
|
"2: Image Title: Sunset Galore 2",
|
||||||
|
"2: Image Params: map[myparam:My Sunny Param]",
|
||||||
|
"1: Image myParam: Lower: My Sunny Param Caps: My Sunny Param",
|
||||||
)
|
)
|
||||||
th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug.html"), "TheContent")
|
th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug.html"), "TheContent")
|
||||||
|
|
||||||
|
@ -205,10 +211,16 @@ date: 2017-10-09
|
||||||
TheContent.
|
TheContent.
|
||||||
`
|
`
|
||||||
|
|
||||||
pageWithImageShortcodeContent := `---
|
pageWithImageShortcodeAndResourceMetadataContent := `---
|
||||||
title: "Bundle Galore"
|
title: "Bundle Galore"
|
||||||
slug: pageslug
|
slug: pageslug
|
||||||
date: 2017-10-09
|
date: 2017-10-09
|
||||||
|
resources:
|
||||||
|
- src: "*.jpg"
|
||||||
|
name: "my-sunset-:counter"
|
||||||
|
title: "Sunset Galore :counter"
|
||||||
|
params:
|
||||||
|
myParam: "My Sunny Param"
|
||||||
---
|
---
|
||||||
|
|
||||||
TheContent.
|
TheContent.
|
||||||
|
@ -227,17 +239,25 @@ TheContent.
|
||||||
singleLayout := `
|
singleLayout := `
|
||||||
Title: {{ .Title }}
|
Title: {{ .Title }}
|
||||||
Content: {{ .Content }}
|
Content: {{ .Content }}
|
||||||
{{ $sunset := .Resources.GetByPrefix "sunset1" }}
|
{{ $sunset := .Resources.GetByPrefix "my-sunset-1" }}
|
||||||
{{ with $sunset }}
|
{{ with $sunset }}
|
||||||
Sunset RelPermalink: {{ .RelPermalink }}
|
Sunset RelPermalink: {{ .RelPermalink }}
|
||||||
{{ $thumb := .Fill "123x123" }}
|
{{ $thumb := .Fill "123x123" }}
|
||||||
Thumb Width: {{ $thumb.Width }}
|
Thumb Width: {{ $thumb.Width }}
|
||||||
|
Thumb Name: {{ $thumb.Name }}
|
||||||
|
Thumb Title: {{ $thumb.Title }}
|
||||||
|
Thumb RelPermalink: {{ $thumb.RelPermalink }}
|
||||||
|
{{ end }}
|
||||||
|
{{ range $i, $e := .Resources.ByType "image" }}
|
||||||
|
{{ $i }}: Image Title: {{ .Title }}
|
||||||
|
{{ $i }}: Image Name: {{ .Name }}
|
||||||
|
{{ $i }}: Image Params: {{ printf "%v" .Params }}
|
||||||
|
{{ $i }}: Image myParam: Lower: {{ .Params.myparam }} Caps: {{ .Params.MYPARAM }}
|
||||||
{{ end }}
|
{{ end }}
|
||||||
|
|
||||||
`
|
`
|
||||||
|
|
||||||
myShort := `
|
myShort := `
|
||||||
{{ $sunset := .Page.Resources.GetByPrefix "sunset2" }}
|
{{ $sunset := .Page.Resources.GetByPrefix "my-sunset-2" }}
|
||||||
{{ with $sunset }}
|
{{ with $sunset }}
|
||||||
Short Sunset RelPermalink: {{ .RelPermalink }}
|
Short Sunset RelPermalink: {{ .RelPermalink }}
|
||||||
{{ $thumb := .Fill "56x56" }}
|
{{ $thumb := .Fill "56x56" }}
|
||||||
|
@ -268,7 +288,7 @@ Short Thumb Width: {{ $thumb.Width }}
|
||||||
writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pages", "mypage.md"), pageContent)
|
writeSource(t, fs, filepath.Join(workDir, "base", "assets", "pages", "mypage.md"), pageContent)
|
||||||
|
|
||||||
// Bundle
|
// Bundle
|
||||||
writeSource(t, fs, filepath.Join(workDir, "base", "b", "index.md"), pageWithImageShortcodeContent)
|
writeSource(t, fs, filepath.Join(workDir, "base", "b", "index.md"), pageWithImageShortcodeAndResourceMetadataContent)
|
||||||
writeSource(t, fs, filepath.Join(workDir, "base", "b", "1.md"), pageContent)
|
writeSource(t, fs, filepath.Join(workDir, "base", "b", "1.md"), pageContent)
|
||||||
writeSource(t, fs, filepath.Join(workDir, "base", "b", "2.md"), pageContent)
|
writeSource(t, fs, filepath.Join(workDir, "base", "b", "2.md"), pageContent)
|
||||||
writeSource(t, fs, filepath.Join(workDir, "base", "b", "custom-mime.bep"), "bepsays")
|
writeSource(t, fs, filepath.Join(workDir, "base", "b", "custom-mime.bep"), "bepsays")
|
||||||
|
|
|
@ -134,7 +134,7 @@ func TestGetPage(t *testing.T) {
|
||||||
page := s.getPage(test.kind, test.path...)
|
page := s.getPage(test.kind, test.path...)
|
||||||
assert.NotNil(page, errorMsg)
|
assert.NotNil(page, errorMsg)
|
||||||
assert.Equal(test.kind, page.Kind, errorMsg)
|
assert.Equal(test.kind, page.Kind, errorMsg)
|
||||||
assert.Equal(test.expectedTitle, page.Title)
|
assert.Equal(test.expectedTitle, page.title)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,7 +74,7 @@ type targetPathDescriptor struct {
|
||||||
// and URLs for this Page.
|
// and URLs for this Page.
|
||||||
func (p *Page) createTargetPathDescriptor(t output.Format) (targetPathDescriptor, error) {
|
func (p *Page) createTargetPathDescriptor(t output.Format) (targetPathDescriptor, error) {
|
||||||
if p.targetPathDescriptorPrototype == nil {
|
if p.targetPathDescriptorPrototype == nil {
|
||||||
panic(fmt.Sprintf("Must run initTargetPathDescriptor() for page %q, kind %q", p.Title, p.Kind))
|
panic(fmt.Sprintf("Must run initTargetPathDescriptor() for page %q, kind %q", p.title, p.Kind))
|
||||||
}
|
}
|
||||||
d := *p.targetPathDescriptorPrototype
|
d := *p.targetPathDescriptorPrototype
|
||||||
d.Type = t
|
d.Type = t
|
||||||
|
@ -271,9 +271,9 @@ func (p *Page) createRelativeTargetPath() string {
|
||||||
|
|
||||||
if len(p.outputFormats) == 0 {
|
if len(p.outputFormats) == 0 {
|
||||||
if p.Kind == kindUnknown {
|
if p.Kind == kindUnknown {
|
||||||
panic(fmt.Sprintf("Page %q has unknown kind", p.Title))
|
panic(fmt.Sprintf("Page %q has unknown kind", p.title))
|
||||||
}
|
}
|
||||||
panic(fmt.Sprintf("Page %q missing output format(s)", p.Title))
|
panic(fmt.Sprintf("Page %q missing output format(s)", p.title))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Choose the main output format. In most cases, this will be HTML.
|
// Choose the main output format. In most cases, this will be HTML.
|
||||||
|
|
|
@ -468,8 +468,8 @@ func TestDegenerateEmptyPage(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func checkPageTitle(t *testing.T, page *Page, title string) {
|
func checkPageTitle(t *testing.T, page *Page, title string) {
|
||||||
if page.Title != title {
|
if page.title != title {
|
||||||
t.Fatalf("Page title is: %s. Expected %s", page.Title, title)
|
t.Fatalf("Page title is: %s. Expected %s", page.title, title)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1066,8 +1066,8 @@ func TestCalendarParamsVariants(t *testing.T) {
|
||||||
pageTOML, _ := s.NewPage("test/fileTOML.md")
|
pageTOML, _ := s.NewPage("test/fileTOML.md")
|
||||||
_, _ = pageTOML.ReadFrom(strings.NewReader(pageWithCalendarTOMLFrontmatter))
|
_, _ = pageTOML.ReadFrom(strings.NewReader(pageWithCalendarTOMLFrontmatter))
|
||||||
|
|
||||||
assert.True(t, compareObjects(pageJSON.Params, pageYAML.Params))
|
assert.True(t, compareObjects(pageJSON.params, pageYAML.params))
|
||||||
assert.True(t, compareObjects(pageJSON.Params, pageTOML.Params))
|
assert.True(t, compareObjects(pageJSON.params, pageTOML.params))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1095,10 +1095,10 @@ func TestDifferentFrontMatterVarTypes(t *testing.T) {
|
||||||
}
|
}
|
||||||
param := page.getParamToLower("a_table")
|
param := page.getParamToLower("a_table")
|
||||||
if param == nil {
|
if param == nil {
|
||||||
t.Errorf("frontmatter not handling tables correctly should be type of %v, got: type of %v", reflect.TypeOf(page.Params["a_table"]), reflect.TypeOf(param))
|
t.Errorf("frontmatter not handling tables correctly should be type of %v, got: type of %v", reflect.TypeOf(page.params["a_table"]), reflect.TypeOf(param))
|
||||||
}
|
}
|
||||||
if cast.ToStringMap(param)["a_key"] != "a_value" {
|
if cast.ToStringMap(param)["a_key"] != "a_value" {
|
||||||
t.Errorf("frontmatter not handling values inside a table correctly should be %s, got: %s", "a_value", cast.ToStringMap(page.Params["a_table"])["a_key"])
|
t.Errorf("frontmatter not handling values inside a table correctly should be %s, got: %s", "a_value", cast.ToStringMap(page.params["a_table"])["a_key"])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1370,7 +1370,7 @@ func TestPageParams(t *testing.T) {
|
||||||
p, err := s.NewPageFrom(strings.NewReader(c), "content/post/params.md")
|
p, err := s.NewPageFrom(strings.NewReader(c), "content/post/params.md")
|
||||||
require.NoError(t, err, "err during parse", "#%d", i)
|
require.NoError(t, err, "err during parse", "#%d", i)
|
||||||
for key := range wantedMap {
|
for key := range wantedMap {
|
||||||
assert.Equal(t, wantedMap[key], p.Params[key], "#%d", key)
|
assert.Equal(t, wantedMap[key], p.params[key], "#%d", key)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -54,22 +54,22 @@ Content
|
||||||
|
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
assert.Len(result, 2)
|
assert.Len(result, 2)
|
||||||
assert.Equal("Page 2", result[0].Title)
|
assert.Equal("Page 2", result[0].title)
|
||||||
assert.Equal("Page 1", result[1].Title)
|
assert.Equal("Page 1", result[1].title)
|
||||||
|
|
||||||
result, err = s.RegularPages.Related(s.RegularPages[0])
|
result, err = s.RegularPages.Related(s.RegularPages[0])
|
||||||
assert.Len(result, 2)
|
assert.Len(result, 2)
|
||||||
assert.Equal("Page 2", result[0].Title)
|
assert.Equal("Page 2", result[0].title)
|
||||||
assert.Equal("Page 3", result[1].Title)
|
assert.Equal("Page 3", result[1].title)
|
||||||
|
|
||||||
result, err = s.RegularPages.RelatedIndices(s.RegularPages[0], "keywords")
|
result, err = s.RegularPages.RelatedIndices(s.RegularPages[0], "keywords")
|
||||||
assert.Len(result, 2)
|
assert.Len(result, 2)
|
||||||
assert.Equal("Page 2", result[0].Title)
|
assert.Equal("Page 2", result[0].title)
|
||||||
assert.Equal("Page 3", result[1].Title)
|
assert.Equal("Page 3", result[1].title)
|
||||||
|
|
||||||
result, err = s.RegularPages.RelatedTo(types.NewKeyValuesStrings("keywords", "bep", "rocks"))
|
result, err = s.RegularPages.RelatedTo(types.NewKeyValuesStrings("keywords", "bep", "rocks"))
|
||||||
assert.NoError(err)
|
assert.NoError(err)
|
||||||
assert.Len(result, 2)
|
assert.Len(result, 2)
|
||||||
assert.Equal("Page 2", result[0].Title)
|
assert.Equal("Page 2", result[0].title)
|
||||||
assert.Equal("Page 3", result[1].Title)
|
assert.Equal("Page 3", result[1].title)
|
||||||
}
|
}
|
||||||
|
|
|
@ -270,7 +270,7 @@ func (p *Page) Paginator(options ...interface{}) (*Pager, error) {
|
||||||
// If it's not, one will be created with all pages in Data["Pages"].
|
// If it's not, one will be created with all pages in Data["Pages"].
|
||||||
func (p *PageOutput) Paginator(options ...interface{}) (*Pager, error) {
|
func (p *PageOutput) Paginator(options ...interface{}) (*Pager, error) {
|
||||||
if !p.IsNode() {
|
if !p.IsNode() {
|
||||||
return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind, p.Title)
|
return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind, p.title)
|
||||||
}
|
}
|
||||||
pagerSize, err := resolvePagerSize(p.s.Cfg, options...)
|
pagerSize, err := resolvePagerSize(p.s.Cfg, options...)
|
||||||
|
|
||||||
|
@ -321,7 +321,7 @@ func (p *Page) Paginate(seq interface{}, options ...interface{}) (*Pager, error)
|
||||||
// Note that repeated calls will return the same result, even if the sequence is different.
|
// Note that repeated calls will return the same result, even if the sequence is different.
|
||||||
func (p *PageOutput) Paginate(seq interface{}, options ...interface{}) (*Pager, error) {
|
func (p *PageOutput) Paginate(seq interface{}, options ...interface{}) (*Pager, error) {
|
||||||
if !p.IsNode() {
|
if !p.IsNode() {
|
||||||
return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind, p.Title)
|
return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind, p.title)
|
||||||
}
|
}
|
||||||
|
|
||||||
pagerSize, err := resolvePagerSize(p.s.Cfg, options...)
|
pagerSize, err := resolvePagerSize(p.s.Cfg, options...)
|
||||||
|
|
|
@ -154,7 +154,7 @@ func pageToPermalinkDate(p *Page, dateField string) (string, error) {
|
||||||
func pageToPermalinkTitle(p *Page, _ string) (string, error) {
|
func pageToPermalinkTitle(p *Page, _ string) (string, error) {
|
||||||
// Page contains Node which has Title
|
// Page contains Node which has Title
|
||||||
// (also contains URLPath which has Slug, sometimes)
|
// (also contains URLPath which has Slug, sometimes)
|
||||||
return p.s.PathSpec.URLize(p.Title), nil
|
return p.s.PathSpec.URLize(p.title), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// pageToPermalinkFilename returns the URL-safe form of the filename
|
// pageToPermalinkFilename returns the URL-safe form of the filename
|
||||||
|
|
|
@ -1877,7 +1877,7 @@ func (s *Site) newNodePage(typ string, sections ...string) *Page {
|
||||||
|
|
||||||
func (s *Site) newHomePage() *Page {
|
func (s *Site) newHomePage() *Page {
|
||||||
p := s.newNodePage(KindHome)
|
p := s.newNodePage(KindHome)
|
||||||
p.Title = s.Info.Title
|
p.title = s.Info.Title
|
||||||
pages := Pages{}
|
pages := Pages{}
|
||||||
p.Data["Pages"] = pages
|
p.Data["Pages"] = pages
|
||||||
p.Pages = pages
|
p.Pages = pages
|
||||||
|
@ -1892,10 +1892,10 @@ func (s *Site) newTaxonomyPage(plural, key string) *Page {
|
||||||
// Keep (mostly) as is in the title
|
// Keep (mostly) as is in the title
|
||||||
// We make the first character upper case, mostly because
|
// We make the first character upper case, mostly because
|
||||||
// it is easier to reason about in the tests.
|
// it is easier to reason about in the tests.
|
||||||
p.Title = helpers.FirstUpper(key)
|
p.title = helpers.FirstUpper(key)
|
||||||
key = s.PathSpec.MakePathSanitized(key)
|
key = s.PathSpec.MakePathSanitized(key)
|
||||||
} else {
|
} else {
|
||||||
p.Title = strings.Replace(s.titleFunc(key), "-", " ", -1)
|
p.title = strings.Replace(s.titleFunc(key), "-", " ", -1)
|
||||||
}
|
}
|
||||||
|
|
||||||
return p
|
return p
|
||||||
|
@ -1906,15 +1906,15 @@ func (s *Site) newSectionPage(name string) *Page {
|
||||||
|
|
||||||
sectionName := helpers.FirstUpper(name)
|
sectionName := helpers.FirstUpper(name)
|
||||||
if s.Cfg.GetBool("pluralizeListTitles") {
|
if s.Cfg.GetBool("pluralizeListTitles") {
|
||||||
p.Title = inflect.Pluralize(sectionName)
|
p.title = inflect.Pluralize(sectionName)
|
||||||
} else {
|
} else {
|
||||||
p.Title = sectionName
|
p.title = sectionName
|
||||||
}
|
}
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Site) newTaxonomyTermsPage(plural string) *Page {
|
func (s *Site) newTaxonomyTermsPage(plural string) *Page {
|
||||||
p := s.newNodePage(KindTaxonomyTerm, plural)
|
p := s.newNodePage(KindTaxonomyTerm, plural)
|
||||||
p.Title = s.titleFunc(plural)
|
p.title = s.titleFunc(plural)
|
||||||
return p
|
return p
|
||||||
}
|
}
|
||||||
|
|
|
@ -212,7 +212,7 @@ func (s *Site) renderPaginator(p *PageOutput) error {
|
||||||
|
|
||||||
if err := s.renderAndWritePage(
|
if err := s.renderAndWritePage(
|
||||||
&s.PathSpec.ProcessingStats.PaginatorPages,
|
&s.PathSpec.ProcessingStats.PaginatorPages,
|
||||||
pagerNode.Title,
|
pagerNode.title,
|
||||||
targetPath, pagerNode, layouts...); err != nil {
|
targetPath, pagerNode, layouts...); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -252,7 +252,7 @@ func (s *Site) renderRSS(p *PageOutput) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Pages, p.Title,
|
return s.renderAndWriteXML(&s.PathSpec.ProcessingStats.Pages, p.title,
|
||||||
targetPath, p, layouts...)
|
targetPath, p, layouts...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -267,7 +267,7 @@ func (s *Site) render404() error {
|
||||||
|
|
||||||
p := s.newNodePage(kind404)
|
p := s.newNodePage(kind404)
|
||||||
|
|
||||||
p.Title = "404 Page not found"
|
p.title = "404 Page not found"
|
||||||
p.Data["Pages"] = s.Pages
|
p.Data["Pages"] = s.Pages
|
||||||
p.Pages = s.Pages
|
p.Pages = s.Pages
|
||||||
p.URLPath.URL = "404.html"
|
p.URLPath.URL = "404.html"
|
||||||
|
|
|
@ -143,13 +143,13 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
|
||||||
// > b,c,d where b and d have content files.
|
// > b,c,d where b and d have content files.
|
||||||
b := p.s.getPage(KindSection, "empty2", "b")
|
b := p.s.getPage(KindSection, "empty2", "b")
|
||||||
assert.NotNil(b)
|
assert.NotNil(b)
|
||||||
assert.Equal("T40_-1", b.Title)
|
assert.Equal("T40_-1", b.title)
|
||||||
c := p.s.getPage(KindSection, "empty2", "b", "c")
|
c := p.s.getPage(KindSection, "empty2", "b", "c")
|
||||||
assert.NotNil(c)
|
assert.NotNil(c)
|
||||||
assert.Equal("Cs", c.Title)
|
assert.Equal("Cs", c.title)
|
||||||
d := p.s.getPage(KindSection, "empty2", "b", "c", "d")
|
d := p.s.getPage(KindSection, "empty2", "b", "c", "d")
|
||||||
assert.NotNil(d)
|
assert.NotNil(d)
|
||||||
assert.Equal("T41_-1", d.Title)
|
assert.Equal("T41_-1", d.title)
|
||||||
|
|
||||||
assert.False(c.Eq(d))
|
assert.False(c.Eq(d))
|
||||||
assert.True(c.Eq(c))
|
assert.True(c.Eq(c))
|
||||||
|
@ -165,7 +165,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
|
||||||
|
|
||||||
}},
|
}},
|
||||||
{"top", func(p *Page) {
|
{"top", func(p *Page) {
|
||||||
assert.Equal("Tops", p.Title)
|
assert.Equal("Tops", p.title)
|
||||||
assert.Len(p.Pages, 2)
|
assert.Len(p.Pages, 2)
|
||||||
assert.Equal("mypage2.md", p.Pages[0].LogicalName())
|
assert.Equal("mypage2.md", p.Pages[0].LogicalName())
|
||||||
assert.Equal("mypage3.md", p.Pages[1].LogicalName())
|
assert.Equal("mypage3.md", p.Pages[1].LogicalName())
|
||||||
|
@ -178,16 +178,16 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
|
||||||
assert.True(active)
|
assert.True(active)
|
||||||
}},
|
}},
|
||||||
{"l1", func(p *Page) {
|
{"l1", func(p *Page) {
|
||||||
assert.Equal("L1s", p.Title)
|
assert.Equal("L1s", p.title)
|
||||||
assert.Len(p.Pages, 2)
|
assert.Len(p.Pages, 2)
|
||||||
assert.True(p.Parent().IsHome())
|
assert.True(p.Parent().IsHome())
|
||||||
assert.Len(p.Sections(), 2)
|
assert.Len(p.Sections(), 2)
|
||||||
}},
|
}},
|
||||||
{"l1,l2", func(p *Page) {
|
{"l1,l2", func(p *Page) {
|
||||||
assert.Equal("T2_-1", p.Title)
|
assert.Equal("T2_-1", p.title)
|
||||||
assert.Len(p.Pages, 3)
|
assert.Len(p.Pages, 3)
|
||||||
assert.Equal(p, p.Pages[0].Parent())
|
assert.Equal(p, p.Pages[0].Parent())
|
||||||
assert.Equal("L1s", p.Parent().Title)
|
assert.Equal("L1s", p.Parent().title)
|
||||||
assert.Equal("/l1/l2/", p.URLPath.URL)
|
assert.Equal("/l1/l2/", p.URLPath.URL)
|
||||||
assert.Equal("/l1/l2/", p.RelPermalink())
|
assert.Equal("/l1/l2/", p.RelPermalink())
|
||||||
assert.Len(p.Sections(), 1)
|
assert.Len(p.Sections(), 1)
|
||||||
|
@ -223,16 +223,16 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
|
||||||
|
|
||||||
}},
|
}},
|
||||||
{"l1,l2_2", func(p *Page) {
|
{"l1,l2_2", func(p *Page) {
|
||||||
assert.Equal("T22_-1", p.Title)
|
assert.Equal("T22_-1", p.title)
|
||||||
assert.Len(p.Pages, 2)
|
assert.Len(p.Pages, 2)
|
||||||
assert.Equal(filepath.FromSlash("l1/l2_2/page_2_2_1.md"), p.Pages[0].Path())
|
assert.Equal(filepath.FromSlash("l1/l2_2/page_2_2_1.md"), p.Pages[0].Path())
|
||||||
assert.Equal("L1s", p.Parent().Title)
|
assert.Equal("L1s", p.Parent().title)
|
||||||
assert.Len(p.Sections(), 0)
|
assert.Len(p.Sections(), 0)
|
||||||
}},
|
}},
|
||||||
{"l1,l2,l3", func(p *Page) {
|
{"l1,l2,l3", func(p *Page) {
|
||||||
assert.Equal("T3_-1", p.Title)
|
assert.Equal("T3_-1", p.title)
|
||||||
assert.Len(p.Pages, 2)
|
assert.Len(p.Pages, 2)
|
||||||
assert.Equal("T2_-1", p.Parent().Title)
|
assert.Equal("T2_-1", p.Parent().title)
|
||||||
assert.Len(p.Sections(), 0)
|
assert.Len(p.Sections(), 0)
|
||||||
|
|
||||||
l1 := p.s.getPage(KindSection, "l1")
|
l1 := p.s.getPage(KindSection, "l1")
|
||||||
|
@ -252,7 +252,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
|
||||||
|
|
||||||
}},
|
}},
|
||||||
{"perm a,link", func(p *Page) {
|
{"perm a,link", func(p *Page) {
|
||||||
assert.Equal("T9_-1", p.Title)
|
assert.Equal("T9_-1", p.title)
|
||||||
assert.Equal("/perm-a/link/", p.RelPermalink())
|
assert.Equal("/perm-a/link/", p.RelPermalink())
|
||||||
assert.Len(p.Pages, 4)
|
assert.Len(p.Pages, 4)
|
||||||
first := p.Pages[0]
|
first := p.Pages[0]
|
||||||
|
|
|
@ -160,7 +160,7 @@ func TestFutureExpirationRender(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if s.AllPages[0].Title == "doc2" {
|
if s.AllPages[0].title == "doc2" {
|
||||||
t.Fatal("Expired content published unexpectedly")
|
t.Fatal("Expired content published unexpectedly")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -642,40 +642,40 @@ func TestOrderedPages(t *testing.T) {
|
||||||
|
|
||||||
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
|
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
|
||||||
|
|
||||||
if s.getPage(KindSection, "sect").Pages[1].Title != "Three" || s.getPage(KindSection, "sect").Pages[2].Title != "Four" {
|
if s.getPage(KindSection, "sect").Pages[1].title != "Three" || s.getPage(KindSection, "sect").Pages[2].title != "Four" {
|
||||||
t.Error("Pages in unexpected order.")
|
t.Error("Pages in unexpected order.")
|
||||||
}
|
}
|
||||||
|
|
||||||
bydate := s.RegularPages.ByDate()
|
bydate := s.RegularPages.ByDate()
|
||||||
|
|
||||||
if bydate[0].Title != "One" {
|
if bydate[0].title != "One" {
|
||||||
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bydate[0].Title)
|
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bydate[0].title)
|
||||||
}
|
}
|
||||||
|
|
||||||
rev := bydate.Reverse()
|
rev := bydate.Reverse()
|
||||||
if rev[0].Title != "Three" {
|
if rev[0].title != "Three" {
|
||||||
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rev[0].Title)
|
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rev[0].title)
|
||||||
}
|
}
|
||||||
|
|
||||||
bypubdate := s.RegularPages.ByPublishDate()
|
bypubdate := s.RegularPages.ByPublishDate()
|
||||||
|
|
||||||
if bypubdate[0].Title != "One" {
|
if bypubdate[0].title != "One" {
|
||||||
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bypubdate[0].Title)
|
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bypubdate[0].title)
|
||||||
}
|
}
|
||||||
|
|
||||||
rbypubdate := bypubdate.Reverse()
|
rbypubdate := bypubdate.Reverse()
|
||||||
if rbypubdate[0].Title != "Three" {
|
if rbypubdate[0].title != "Three" {
|
||||||
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rbypubdate[0].Title)
|
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rbypubdate[0].title)
|
||||||
}
|
}
|
||||||
|
|
||||||
bylength := s.RegularPages.ByLength()
|
bylength := s.RegularPages.ByLength()
|
||||||
if bylength[0].Title != "One" {
|
if bylength[0].title != "One" {
|
||||||
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bylength[0].Title)
|
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bylength[0].title)
|
||||||
}
|
}
|
||||||
|
|
||||||
rbylength := bylength.Reverse()
|
rbylength := bylength.Reverse()
|
||||||
if rbylength[0].Title != "Four" {
|
if rbylength[0].title != "Four" {
|
||||||
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Four", rbylength[0].Title)
|
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Four", rbylength[0].title)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -714,8 +714,8 @@ func TestGroupedPages(t *testing.T) {
|
||||||
if rbysection[2].Key != "sect1" {
|
if rbysection[2].Key != "sect1" {
|
||||||
t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "sect1", rbysection[2].Key)
|
t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "sect1", rbysection[2].Key)
|
||||||
}
|
}
|
||||||
if rbysection[0].Pages[0].Title != "Four" {
|
if rbysection[0].Pages[0].title != "Four" {
|
||||||
t.Errorf("PageGroup has an unexpected page. First group's pages should have '%s', got '%s'", "Four", rbysection[0].Pages[0].Title)
|
t.Errorf("PageGroup has an unexpected page. First group's pages should have '%s', got '%s'", "Four", rbysection[0].Pages[0].title)
|
||||||
}
|
}
|
||||||
if len(rbysection[2].Pages) != 2 {
|
if len(rbysection[2].Pages) != 2 {
|
||||||
t.Errorf("PageGroup has unexpected number of pages. Third group should have '%d' pages, got '%d' pages", 2, len(rbysection[2].Pages))
|
t.Errorf("PageGroup has unexpected number of pages. Third group should have '%d' pages, got '%d' pages", 2, len(rbysection[2].Pages))
|
||||||
|
@ -734,8 +734,8 @@ func TestGroupedPages(t *testing.T) {
|
||||||
if bytype[2].Key != "sect3" {
|
if bytype[2].Key != "sect3" {
|
||||||
t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "sect3", bytype[2].Key)
|
t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "sect3", bytype[2].Key)
|
||||||
}
|
}
|
||||||
if bytype[2].Pages[0].Title != "Four" {
|
if bytype[2].Pages[0].title != "Four" {
|
||||||
t.Errorf("PageGroup has an unexpected page. Third group's data should have '%s', got '%s'", "Four", bytype[0].Pages[0].Title)
|
t.Errorf("PageGroup has an unexpected page. Third group's data should have '%s', got '%s'", "Four", bytype[0].Pages[0].title)
|
||||||
}
|
}
|
||||||
if len(bytype[0].Pages) != 2 {
|
if len(bytype[0].Pages) != 2 {
|
||||||
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(bytype[2].Pages))
|
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(bytype[2].Pages))
|
||||||
|
@ -762,8 +762,8 @@ func TestGroupedPages(t *testing.T) {
|
||||||
if bypubdate[1].Key != "0001" {
|
if bypubdate[1].Key != "0001" {
|
||||||
t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "0001", bypubdate[1].Key)
|
t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "0001", bypubdate[1].Key)
|
||||||
}
|
}
|
||||||
if bypubdate[0].Pages[0].Title != "Three" {
|
if bypubdate[0].Pages[0].title != "Three" {
|
||||||
t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", bypubdate[0].Pages[0].Title)
|
t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", bypubdate[0].Pages[0].title)
|
||||||
}
|
}
|
||||||
if len(bypubdate[0].Pages) != 3 {
|
if len(bypubdate[0].Pages) != 3 {
|
||||||
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 3, len(bypubdate[0].Pages))
|
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 3, len(bypubdate[0].Pages))
|
||||||
|
@ -782,8 +782,8 @@ func TestGroupedPages(t *testing.T) {
|
||||||
if byparam[2].Key != "bar" {
|
if byparam[2].Key != "bar" {
|
||||||
t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "bar", byparam[2].Key)
|
t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "bar", byparam[2].Key)
|
||||||
}
|
}
|
||||||
if byparam[2].Pages[0].Title != "Three" {
|
if byparam[2].Pages[0].title != "Three" {
|
||||||
t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", byparam[2].Pages[0].Title)
|
t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", byparam[2].Pages[0].title)
|
||||||
}
|
}
|
||||||
if len(byparam[0].Pages) != 2 {
|
if len(byparam[0].Pages) != 2 {
|
||||||
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byparam[0].Pages))
|
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byparam[0].Pages))
|
||||||
|
@ -815,8 +815,8 @@ func TestGroupedPages(t *testing.T) {
|
||||||
if byParamDate[1].Key != "1979-05" {
|
if byParamDate[1].Key != "1979-05" {
|
||||||
t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "1979-05", byParamDate[1].Key)
|
t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "1979-05", byParamDate[1].Key)
|
||||||
}
|
}
|
||||||
if byParamDate[1].Pages[0].Title != "One" {
|
if byParamDate[1].Pages[0].title != "One" {
|
||||||
t.Errorf("PageGroup has an unexpected page. Second group's pages should have '%s', got '%s'", "One", byParamDate[1].Pages[0].Title)
|
t.Errorf("PageGroup has an unexpected page. Second group's pages should have '%s', got '%s'", "One", byParamDate[1].Pages[0].title)
|
||||||
}
|
}
|
||||||
if len(byParamDate[0].Pages) != 2 {
|
if len(byParamDate[0].Pages) != 2 {
|
||||||
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byParamDate[2].Pages))
|
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byParamDate[2].Pages))
|
||||||
|
@ -872,16 +872,16 @@ func TestWeightedTaxonomies(t *testing.T) {
|
||||||
writeSourcesToSource(t, "content", fs, sources...)
|
writeSourcesToSource(t, "content", fs, sources...)
|
||||||
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
|
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
|
||||||
|
|
||||||
if s.Taxonomies["tags"]["a"][0].Page.Title != "foo" {
|
if s.Taxonomies["tags"]["a"][0].Page.title != "foo" {
|
||||||
t.Errorf("Pages in unexpected order, 'foo' expected first, got '%v'", s.Taxonomies["tags"]["a"][0].Page.Title)
|
t.Errorf("Pages in unexpected order, 'foo' expected first, got '%v'", s.Taxonomies["tags"]["a"][0].Page.title)
|
||||||
}
|
}
|
||||||
|
|
||||||
if s.Taxonomies["categories"]["d"][0].Page.Title != "bar" {
|
if s.Taxonomies["categories"]["d"][0].Page.title != "bar" {
|
||||||
t.Errorf("Pages in unexpected order, 'bar' expected first, got '%v'", s.Taxonomies["categories"]["d"][0].Page.Title)
|
t.Errorf("Pages in unexpected order, 'bar' expected first, got '%v'", s.Taxonomies["categories"]["d"][0].Page.title)
|
||||||
}
|
}
|
||||||
|
|
||||||
if s.Taxonomies["categories"]["e"][0].Page.Title != "bza" {
|
if s.Taxonomies["categories"]["e"][0].Page.title != "bza" {
|
||||||
t.Errorf("Pages in unexpected order, 'bza' expected first, got '%v'", s.Taxonomies["categories"]["e"][0].Page.Title)
|
t.Errorf("Pages in unexpected order, 'bza' expected first, got '%v'", s.Taxonomies["categories"]["e"][0].Page.title)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,7 @@ type WeightedPage struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w WeightedPage) String() string {
|
func (w WeightedPage) String() string {
|
||||||
return fmt.Sprintf("WeightedPage(%d,%q)", w.Weight, w.Page.Title)
|
return fmt.Sprintf("WeightedPage(%d,%q)", w.Weight, w.Page.title)
|
||||||
}
|
}
|
||||||
|
|
||||||
// OrderedTaxonomy is another representation of an Taxonomy using an array rather than a map.
|
// OrderedTaxonomy is another representation of an Taxonomy using an array rather than a map.
|
||||||
|
@ -214,7 +214,7 @@ func (wp WeightedPages) Count() int { return len(wp) }
|
||||||
func (wp WeightedPages) Less(i, j int) bool {
|
func (wp WeightedPages) Less(i, j int) bool {
|
||||||
if wp[i].Weight == wp[j].Weight {
|
if wp[i].Weight == wp[j].Weight {
|
||||||
if wp[i].Page.Date.Equal(wp[j].Page.Date) {
|
if wp[i].Page.Date.Equal(wp[j].Page.Date) {
|
||||||
return wp[i].Page.Title < wp[j].Page.Title
|
return wp[i].Page.title < wp[j].Page.title
|
||||||
}
|
}
|
||||||
return wp[i].Page.Date.After(wp[i].Page.Date)
|
return wp[i].Page.Date.After(wp[i].Page.Date)
|
||||||
}
|
}
|
||||||
|
|
|
@ -200,11 +200,11 @@ permalinkeds:
|
||||||
if preserveTaxonomyNames {
|
if preserveTaxonomyNames {
|
||||||
helloWorld := s.getPage(KindTaxonomy, "others", "Hello Hugo world")
|
helloWorld := s.getPage(KindTaxonomy, "others", "Hello Hugo world")
|
||||||
require.NotNil(t, helloWorld)
|
require.NotNil(t, helloWorld)
|
||||||
require.Equal(t, "Hello Hugo world", helloWorld.Title)
|
require.Equal(t, "Hello Hugo world", helloWorld.title)
|
||||||
} else {
|
} else {
|
||||||
helloWorld := s.getPage(KindTaxonomy, "others", "hello-hugo-world")
|
helloWorld := s.getPage(KindTaxonomy, "others", "hello-hugo-world")
|
||||||
require.NotNil(t, helloWorld)
|
require.NotNil(t, helloWorld)
|
||||||
require.Equal(t, "Hello Hugo World", helloWorld.Title)
|
require.Equal(t, "Hello Hugo World", helloWorld.title)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Issue #2977
|
// Issue #2977
|
||||||
|
|
|
@ -217,7 +217,7 @@ func dumpPages(pages ...*Page) {
|
||||||
for i, p := range pages {
|
for i, p := range pages {
|
||||||
fmt.Printf("%d: Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s Len Sections(): %d\n",
|
fmt.Printf("%d: Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s Len Sections(): %d\n",
|
||||||
i+1,
|
i+1,
|
||||||
p.Kind, p.Title, p.RelPermalink(), p.Path(), p.sections, len(p.Sections()))
|
p.Kind, p.title, p.RelPermalink(), p.Path(), p.sections, len(p.Sections()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -231,6 +231,9 @@ func TestCoverHTML() error {
|
||||||
}
|
}
|
||||||
b, err := ioutil.ReadFile(cover)
|
b, err := ioutil.ReadFile(cover)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
idx := bytes.Index(b, []byte{'\n'})
|
idx := bytes.Index(b, []byte{'\n'})
|
||||||
|
|
|
@ -208,7 +208,7 @@ func (i *Image) doWithImageConfig(action, spec string, f func(src image.Image, c
|
||||||
|
|
||||||
key := i.relTargetPathForRel(i.filenameFromConfig(conf), false)
|
key := i.relTargetPathForRel(i.filenameFromConfig(conf), false)
|
||||||
|
|
||||||
return i.spec.imageCache.getOrCreate(i.spec, key, func(resourceCacheFilename string) (*Image, error) {
|
return i.spec.imageCache.getOrCreate(i, key, func(resourceCacheFilename string) (*Image, error) {
|
||||||
ci := i.clone()
|
ci := i.clone()
|
||||||
|
|
||||||
ci.setBasePath(conf)
|
ci.setBasePath(conf)
|
||||||
|
|
|
@ -15,7 +15,6 @@ package resource
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
@ -50,7 +49,7 @@ func (c *imageCache) deleteByPrefix(prefix string) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *imageCache) getOrCreate(
|
func (c *imageCache) getOrCreate(
|
||||||
spec *Spec, key string, create func(resourceCacheFilename string) (*Image, error)) (*Image, error) {
|
parent *Image, key string, create func(resourceCacheFilename string) (*Image, error)) (*Image, error) {
|
||||||
|
|
||||||
relTargetFilename := key
|
relTargetFilename := key
|
||||||
|
|
||||||
|
@ -77,19 +76,20 @@ func (c *imageCache) getOrCreate(
|
||||||
// but the count of processed image variations for this site.
|
// but the count of processed image variations for this site.
|
||||||
c.pathSpec.ProcessingStats.Incr(&c.pathSpec.ProcessingStats.ProcessedImages)
|
c.pathSpec.ProcessingStats.Incr(&c.pathSpec.ProcessingStats.ProcessedImages)
|
||||||
|
|
||||||
r, err := spec.NewResourceFromFilename(nil, c.absPublishDir, cacheFilename, relTargetFilename)
|
exists, err := helpers.Exists(cacheFilename, c.pathSpec.Fs.Source)
|
||||||
notFound := err != nil && os.IsNotExist(err)
|
if err != nil {
|
||||||
if err != nil && !os.IsNotExist(err) {
|
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if notFound {
|
if exists {
|
||||||
|
img = parent.clone()
|
||||||
|
img.relTargetPath = relTargetFilename
|
||||||
|
img.absSourceFilename = cacheFilename
|
||||||
|
} else {
|
||||||
img, err = create(cacheFilename)
|
img, err = create(cacheFilename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
img = r.(*Image)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
c.mu.Lock()
|
c.mu.Lock()
|
||||||
|
@ -102,7 +102,7 @@ func (c *imageCache) getOrCreate(
|
||||||
|
|
||||||
c.mu.Unlock()
|
c.mu.Unlock()
|
||||||
|
|
||||||
if notFound {
|
if !exists {
|
||||||
// File already written to destination
|
// File already written to destination
|
||||||
return img, nil
|
return img, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -147,3 +147,25 @@ func TestDecodeImaging(t *testing.T) {
|
||||||
assert.Equal(42, imaging.Quality)
|
assert.Equal(42, imaging.Quality)
|
||||||
assert.Equal("nearestneighbor", imaging.ResampleFilter)
|
assert.Equal("nearestneighbor", imaging.ResampleFilter)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestImageWithMetadata(t *testing.T) {
|
||||||
|
assert := require.New(t)
|
||||||
|
|
||||||
|
image := fetchSunset(assert)
|
||||||
|
|
||||||
|
var meta = []map[string]interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"title": "My Sunset",
|
||||||
|
"name": "Sunset #:counter",
|
||||||
|
"src": "*.jpg",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.NoError(AssignMetadata(meta, image))
|
||||||
|
assert.Equal("Sunset #1", image.Name())
|
||||||
|
|
||||||
|
resized, err := image.Resize("200x")
|
||||||
|
assert.NoError(err)
|
||||||
|
assert.Equal("Sunset #1", resized.Name())
|
||||||
|
|
||||||
|
}
|
||||||
|
|
|
@ -19,8 +19,11 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/cast"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/media"
|
"github.com/gohugoio/hugo/media"
|
||||||
"github.com/gohugoio/hugo/source"
|
"github.com/gohugoio/hugo/source"
|
||||||
|
|
||||||
|
@ -29,6 +32,7 @@ import (
|
||||||
|
|
||||||
var (
|
var (
|
||||||
_ Resource = (*genericResource)(nil)
|
_ Resource = (*genericResource)(nil)
|
||||||
|
_ metaAssigner = (*genericResource)(nil)
|
||||||
_ Source = (*genericResource)(nil)
|
_ Source = (*genericResource)(nil)
|
||||||
_ Cloner = (*genericResource)(nil)
|
_ Cloner = (*genericResource)(nil)
|
||||||
)
|
)
|
||||||
|
@ -48,11 +52,38 @@ type Cloner interface {
|
||||||
WithNewBase(base string) Resource
|
WithNewBase(base string) Resource
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type metaAssigner interface {
|
||||||
|
setTitle(title string)
|
||||||
|
setName(name string)
|
||||||
|
setParams(params map[string]interface{})
|
||||||
|
}
|
||||||
|
|
||||||
// Resource represents a linkable resource, i.e. a content page, image etc.
|
// Resource represents a linkable resource, i.e. a content page, image etc.
|
||||||
type Resource interface {
|
type Resource interface {
|
||||||
|
// Permalink represents the absolute link to this resource.
|
||||||
Permalink() string
|
Permalink() string
|
||||||
|
|
||||||
|
// RelPermalink represents the host relative link to this resource.
|
||||||
RelPermalink() string
|
RelPermalink() string
|
||||||
|
|
||||||
|
// ResourceType is the resource type. For most file types, this is the main
|
||||||
|
// part of the MIME type, e.g. "image", "application", "text" etc.
|
||||||
|
// For content pages, this value is "page".
|
||||||
ResourceType() string
|
ResourceType() string
|
||||||
|
|
||||||
|
// Name is the logical name of this resource. This can be set in the front matter
|
||||||
|
// metadata for this resource. If not set, Hugo will assign a value.
|
||||||
|
// This will in most cases be the base filename.
|
||||||
|
// So, for the image "/some/path/sunset.jpg" this will be "sunset.jpg".
|
||||||
|
// The value returned by this method will be used in the GetByPrefix and ByPrefix methods
|
||||||
|
// on Resources.
|
||||||
|
Name() string
|
||||||
|
|
||||||
|
// Title returns the title if set in front matter. For content pages, this will be the expected value.
|
||||||
|
Title() string
|
||||||
|
|
||||||
|
// Params set in front matter for this resource.
|
||||||
|
Params() map[string]interface{}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resources represents a slice of resources, which can be a mix of different types.
|
// Resources represents a slice of resources, which can be a mix of different types.
|
||||||
|
@ -97,16 +128,7 @@ func (r Resources) ByPrefix(prefix string) Resources {
|
||||||
}
|
}
|
||||||
|
|
||||||
func matchesPrefix(r Resource, prefix string) bool {
|
func matchesPrefix(r Resource, prefix string) bool {
|
||||||
var name string
|
return strings.HasPrefix(strings.ToLower(r.Name()), prefix)
|
||||||
f, ok := r.(source.File)
|
|
||||||
if ok {
|
|
||||||
name = f.BaseFileName()
|
|
||||||
} else {
|
|
||||||
_, name = filepath.Split(r.RelPermalink())
|
|
||||||
}
|
|
||||||
name = strings.ToLower(name)
|
|
||||||
|
|
||||||
return strings.HasPrefix(name, prefix)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type Spec struct {
|
type Spec struct {
|
||||||
|
@ -238,6 +260,10 @@ type genericResource struct {
|
||||||
// Base is set when the output format's path has a offset, e.g. for AMP.
|
// Base is set when the output format's path has a offset, e.g. for AMP.
|
||||||
base string
|
base string
|
||||||
|
|
||||||
|
title string
|
||||||
|
name string
|
||||||
|
params map[string]interface{}
|
||||||
|
|
||||||
// Absolute filename to the source, including any content folder path.
|
// Absolute filename to the source, including any content folder path.
|
||||||
absSourceFilename string
|
absSourceFilename string
|
||||||
absPublishDir string
|
absPublishDir string
|
||||||
|
@ -256,6 +282,30 @@ func (l *genericResource) RelPermalink() string {
|
||||||
return l.relPermalinkForRel(l.relTargetPath, true)
|
return l.relPermalinkForRel(l.relTargetPath, true)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (l *genericResource) Name() string {
|
||||||
|
return l.name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *genericResource) Title() string {
|
||||||
|
return l.title
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *genericResource) Params() map[string]interface{} {
|
||||||
|
return l.params
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *genericResource) setTitle(title string) {
|
||||||
|
l.title = title
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *genericResource) setName(name string) {
|
||||||
|
l.name = name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *genericResource) setParams(params map[string]interface{}) {
|
||||||
|
l.params = params
|
||||||
|
}
|
||||||
|
|
||||||
// Implement the Cloner interface.
|
// Implement the Cloner interface.
|
||||||
func (l genericResource) WithNewBase(base string) Resource {
|
func (l genericResource) WithNewBase(base string) Resource {
|
||||||
l.base = base
|
l.base = base
|
||||||
|
@ -306,6 +356,98 @@ func (l *genericResource) Publish() error {
|
||||||
return helpers.WriteToDisk(target, f, l.spec.Fs.Destination)
|
return helpers.WriteToDisk(target, f, l.spec.Fs.Destination)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AssignMetadata assigns the given metadata to those resources that supports updates
|
||||||
|
// and matching by wildcard given in `src` using `filepath.Match` with lower cased values.
|
||||||
|
// This assignment is additive, but the most specific match needs to be first.
|
||||||
|
// The `name` and `title` metadata field support shell-matched collection it got a match in.
|
||||||
|
// See https://golang.org/pkg/path/filepath/#Match
|
||||||
|
func AssignMetadata(metadata []map[string]interface{}, resources ...Resource) error {
|
||||||
|
|
||||||
|
counters := make(map[string]int)
|
||||||
|
|
||||||
|
for _, r := range resources {
|
||||||
|
if _, ok := r.(metaAssigner); !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
nameSet, titleSet, paramsSet bool
|
||||||
|
currentCounter = 0
|
||||||
|
resourceSrcKey = strings.ToLower(r.Name())
|
||||||
|
)
|
||||||
|
|
||||||
|
ma := r.(metaAssigner)
|
||||||
|
for _, meta := range metadata {
|
||||||
|
if nameSet && titleSet && paramsSet {
|
||||||
|
// No need to look further
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
src, found := meta["src"]
|
||||||
|
if !found {
|
||||||
|
return fmt.Errorf("missing 'src' in metadata for resource")
|
||||||
|
}
|
||||||
|
|
||||||
|
srcKey := strings.ToLower(cast.ToString(src))
|
||||||
|
|
||||||
|
match, err := filepath.Match(srcKey, resourceSrcKey)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to match resource with metadata: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if match {
|
||||||
|
if !nameSet {
|
||||||
|
name, found := meta["name"]
|
||||||
|
if found {
|
||||||
|
if currentCounter == 0 {
|
||||||
|
currentCounter = counters[srcKey] + 1
|
||||||
|
counters[srcKey] = currentCounter
|
||||||
|
}
|
||||||
|
|
||||||
|
ma.setName(replaceResourcePlaceholders(cast.ToString(name), currentCounter))
|
||||||
|
nameSet = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !titleSet {
|
||||||
|
title, found := meta["title"]
|
||||||
|
if found {
|
||||||
|
if currentCounter == 0 {
|
||||||
|
currentCounter = counters[srcKey] + 1
|
||||||
|
counters[srcKey] = currentCounter
|
||||||
|
}
|
||||||
|
ma.setTitle((replaceResourcePlaceholders(cast.ToString(title), currentCounter)))
|
||||||
|
titleSet = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !paramsSet {
|
||||||
|
params, found := meta["params"]
|
||||||
|
if found {
|
||||||
|
m := cast.ToStringMap(params)
|
||||||
|
// Needed for case insensitive fetching of params values
|
||||||
|
helpers.ToLowerMap(m)
|
||||||
|
ma.setParams(m)
|
||||||
|
|
||||||
|
if currentCounter == 0 {
|
||||||
|
currentCounter = counters[srcKey] + 1
|
||||||
|
counters[srcKey] = currentCounter
|
||||||
|
}
|
||||||
|
|
||||||
|
paramsSet = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func replaceResourcePlaceholders(in string, counter int) string {
|
||||||
|
return strings.Replace(in, ":counter", strconv.Itoa(counter), -1)
|
||||||
|
}
|
||||||
|
|
||||||
func (l *genericResource) target() string {
|
func (l *genericResource) target() string {
|
||||||
target := l.relTargetPathForRel(l.relTargetPath, false)
|
target := l.relTargetPathForRel(l.relTargetPath, false)
|
||||||
if l.spec.PathSpec.Languages.IsMultihost() {
|
if l.spec.PathSpec.Languages.IsMultihost() {
|
||||||
|
@ -330,5 +472,8 @@ func (r *Spec) newGenericResource(
|
||||||
relTargetPath: baseFilename,
|
relTargetPath: baseFilename,
|
||||||
resourceType: resourceType,
|
resourceType: resourceType,
|
||||||
spec: r,
|
spec: r,
|
||||||
|
params: make(map[string]interface{}),
|
||||||
|
name: baseFilename,
|
||||||
|
title: baseFilename,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package resource
|
package resource
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
@ -129,4 +130,195 @@ func TestResourcesGetByPrefix(t *testing.T) {
|
||||||
assert.Equal(2, len(resources.ByPrefix("logo")))
|
assert.Equal(2, len(resources.ByPrefix("logo")))
|
||||||
assert.Equal(1, len(resources.ByPrefix("logo2")))
|
assert.Equal(1, len(resources.ByPrefix("logo2")))
|
||||||
|
|
||||||
|
logo := resources.GetByPrefix("logo")
|
||||||
|
assert.NotNil(logo.Params())
|
||||||
|
assert.Equal("logo1.png", logo.Name())
|
||||||
|
assert.Equal("logo1.png", logo.Title())
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAssignMetadata(t *testing.T) {
|
||||||
|
assert := require.New(t)
|
||||||
|
spec := newTestResourceSpec(assert)
|
||||||
|
|
||||||
|
var foo1, foo2, foo3, logo1, logo2, logo3 Resource
|
||||||
|
var resources Resources
|
||||||
|
|
||||||
|
for _, this := range []struct {
|
||||||
|
metaData []map[string]interface{}
|
||||||
|
assertFunc func(err error)
|
||||||
|
}{
|
||||||
|
{[]map[string]interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"title": "My Resource",
|
||||||
|
"name": "My Name",
|
||||||
|
"src": "*",
|
||||||
|
},
|
||||||
|
}, func(err error) {
|
||||||
|
assert.Equal("My Resource", logo1.Title())
|
||||||
|
assert.Equal("My Name", logo1.Name())
|
||||||
|
assert.Equal("My Name", foo2.Name())
|
||||||
|
|
||||||
|
}},
|
||||||
|
{[]map[string]interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"title": "My Logo",
|
||||||
|
"src": "*loGo*",
|
||||||
|
},
|
||||||
|
map[string]interface{}{
|
||||||
|
"title": "My Resource",
|
||||||
|
"name": "My Name",
|
||||||
|
"src": "*",
|
||||||
|
},
|
||||||
|
}, func(err error) {
|
||||||
|
assert.Equal("My Logo", logo1.Title())
|
||||||
|
assert.Equal("My Logo", logo2.Title())
|
||||||
|
assert.Equal("My Name", logo1.Name())
|
||||||
|
assert.Equal("My Name", foo2.Name())
|
||||||
|
assert.Equal("My Name", foo3.Name())
|
||||||
|
assert.Equal("My Resource", foo3.Title())
|
||||||
|
|
||||||
|
}},
|
||||||
|
{[]map[string]interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"title": "My Logo",
|
||||||
|
"src": "*loGo*",
|
||||||
|
"params": map[string]interface{}{
|
||||||
|
"Param1": true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
map[string]interface{}{
|
||||||
|
"title": "My Resource",
|
||||||
|
"src": "*",
|
||||||
|
"params": map[string]interface{}{
|
||||||
|
"Param2": true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}, func(err error) {
|
||||||
|
assert.NoError(err)
|
||||||
|
assert.Equal("My Logo", logo1.Title())
|
||||||
|
assert.Equal("My Resource", foo3.Title())
|
||||||
|
_, p1 := logo2.Params()["param1"]
|
||||||
|
_, p2 := foo2.Params()["param2"]
|
||||||
|
assert.True(p1)
|
||||||
|
assert.True(p2)
|
||||||
|
|
||||||
|
}},
|
||||||
|
{[]map[string]interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"name": "Logo Name #:counter",
|
||||||
|
"src": "*logo*",
|
||||||
|
},
|
||||||
|
map[string]interface{}{
|
||||||
|
"title": "Resource #:counter",
|
||||||
|
"name": "Name #:counter",
|
||||||
|
"src": "*",
|
||||||
|
},
|
||||||
|
}, func(err error) {
|
||||||
|
assert.NoError(err)
|
||||||
|
assert.Equal("Resource #1", logo2.Title())
|
||||||
|
assert.Equal("Logo Name #1", logo2.Name())
|
||||||
|
assert.Equal("Resource #2", logo1.Title())
|
||||||
|
assert.Equal("Logo Name #2", logo1.Name())
|
||||||
|
assert.Equal("Resource #1", foo2.Title())
|
||||||
|
assert.Equal("Resource #2", foo1.Title())
|
||||||
|
assert.Equal("Name #2", foo1.Name())
|
||||||
|
assert.Equal("Resource #3", foo3.Title())
|
||||||
|
|
||||||
|
assert.Equal(logo2, resources.GetByPrefix("logo name #1"))
|
||||||
|
|
||||||
|
}},
|
||||||
|
{[]map[string]interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"title": "Third Logo #:counter",
|
||||||
|
"src": "logo3.png",
|
||||||
|
},
|
||||||
|
map[string]interface{}{
|
||||||
|
"title": "Other Logo #:counter",
|
||||||
|
"name": "Name #:counter",
|
||||||
|
"src": "logo*",
|
||||||
|
},
|
||||||
|
}, func(err error) {
|
||||||
|
assert.NoError(err)
|
||||||
|
assert.Equal("Third Logo #1", logo3.Title())
|
||||||
|
assert.Equal("Name #1", logo3.Name())
|
||||||
|
assert.Equal("Other Logo #1", logo2.Title())
|
||||||
|
assert.Equal("Name #1", logo2.Name())
|
||||||
|
assert.Equal("Other Logo #2", logo1.Title())
|
||||||
|
assert.Equal("Name #2", logo1.Name())
|
||||||
|
|
||||||
|
}},
|
||||||
|
{[]map[string]interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"title": "Third Logo #:counter",
|
||||||
|
},
|
||||||
|
}, func(err error) {
|
||||||
|
// Missing src
|
||||||
|
assert.Error(err)
|
||||||
|
|
||||||
|
}},
|
||||||
|
{[]map[string]interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"title": "Title",
|
||||||
|
"src": "[]",
|
||||||
|
},
|
||||||
|
}, func(err error) {
|
||||||
|
// Invalid pattern
|
||||||
|
assert.Error(err)
|
||||||
|
|
||||||
|
}},
|
||||||
|
} {
|
||||||
|
|
||||||
|
foo2 = spec.newGenericResource(nil, nil, "/public", "/b/foo2.css", "foo2.css", "css")
|
||||||
|
logo2 = spec.newGenericResource(nil, nil, "/public", "/b/Logo2.png", "Logo2.png", "image")
|
||||||
|
foo1 = spec.newGenericResource(nil, nil, "/public", "/a/foo1.css", "foo1.css", "css")
|
||||||
|
logo1 = spec.newGenericResource(nil, nil, "/public", "/a/logo1.png", "logo1.png", "image")
|
||||||
|
foo3 = spec.newGenericResource(nil, nil, "/public", "/b/foo3.css", "foo3.css", "css")
|
||||||
|
logo3 = spec.newGenericResource(nil, nil, "/public", "/b/logo3.png", "logo3.png", "image")
|
||||||
|
|
||||||
|
resources = Resources{
|
||||||
|
foo2,
|
||||||
|
logo2,
|
||||||
|
foo1,
|
||||||
|
logo1,
|
||||||
|
foo3,
|
||||||
|
logo3,
|
||||||
|
}
|
||||||
|
|
||||||
|
this.assertFunc(AssignMetadata(this.metaData, resources...))
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkAssignMetadata(b *testing.B) {
|
||||||
|
assert := require.New(b)
|
||||||
|
spec := newTestResourceSpec(assert)
|
||||||
|
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
b.StopTimer()
|
||||||
|
var resources Resources
|
||||||
|
var meta = []map[string]interface{}{
|
||||||
|
map[string]interface{}{
|
||||||
|
"title": "Foo #:counter",
|
||||||
|
"name": "Foo Name #:counter",
|
||||||
|
"src": "foo1*",
|
||||||
|
},
|
||||||
|
map[string]interface{}{
|
||||||
|
"title": "Rest #:counter",
|
||||||
|
"name": "Rest Name #:counter",
|
||||||
|
"src": "*",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for i := 0; i < 20; i++ {
|
||||||
|
name := fmt.Sprintf("foo%d_%d.css", i%5, i)
|
||||||
|
resources = append(resources, spec.newGenericResource(nil, nil, "/public", "/a/"+name, name, "css"))
|
||||||
|
}
|
||||||
|
b.StartTimer()
|
||||||
|
|
||||||
|
if err := AssignMetadata(meta, resources...); err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue