mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-21 20:46:30 -05:00
resources/page: Allow section and taxonomy pages to have a permalink configuration
Allows using permalink configuration for sections (branch bundles) and also for taxonomy pages. Extends the current permalink configuration to be able to specified per page kind while also staying backward compatible: all permalink patterns not dedicated to a certain kind, get automatically added for both normal pages and term pages. Fixes #8523
This commit is contained in:
parent
e3308a0bbc
commit
cc14c6a52c
9 changed files with 132 additions and 35 deletions
|
@ -150,7 +150,7 @@ type Config struct {
|
||||||
Minify minifiers.MinifyConfig `mapstructure:"-"`
|
Minify minifiers.MinifyConfig `mapstructure:"-"`
|
||||||
|
|
||||||
// Permalink configuration.
|
// Permalink configuration.
|
||||||
Permalinks map[string]string `mapstructure:"-"`
|
Permalinks map[string]map[string]string `mapstructure:"-"`
|
||||||
|
|
||||||
// Taxonomy configuration.
|
// Taxonomy configuration.
|
||||||
Taxonomies map[string]string `mapstructure:"-"`
|
Taxonomies map[string]string `mapstructure:"-"`
|
||||||
|
|
|
@ -206,7 +206,48 @@ var allDecoderSetups = map[string]decodeWeight{
|
||||||
"permalinks": {
|
"permalinks": {
|
||||||
key: "permalinks",
|
key: "permalinks",
|
||||||
decode: func(d decodeWeight, p decodeConfig) error {
|
decode: func(d decodeWeight, p decodeConfig) error {
|
||||||
p.c.Permalinks = maps.CleanConfigStringMapString(p.p.GetStringMapString(d.key))
|
p.c.Permalinks = make(map[string]map[string]string)
|
||||||
|
|
||||||
|
p.c.Permalinks["page"] = make(map[string]string)
|
||||||
|
p.c.Permalinks["section"] = make(map[string]string)
|
||||||
|
p.c.Permalinks["taxonomy"] = make(map[string]string)
|
||||||
|
p.c.Permalinks["term"] = make(map[string]string)
|
||||||
|
|
||||||
|
config := maps.CleanConfigStringMap(p.p.GetStringMap(d.key))
|
||||||
|
for k, v := range config {
|
||||||
|
switch v := v.(type) {
|
||||||
|
case string:
|
||||||
|
// [permalinks]
|
||||||
|
// key = '...'
|
||||||
|
|
||||||
|
// To sucessfully be backward compatible, "default" patterns need to be set for both page and term
|
||||||
|
p.c.Permalinks["page"][k] = v;
|
||||||
|
p.c.Permalinks["term"][k] = v;
|
||||||
|
|
||||||
|
case maps.Params:
|
||||||
|
// [permalinks.key]
|
||||||
|
// xyz = ???
|
||||||
|
|
||||||
|
if (k == "page") || (k == "section") || (k == "taxonomy") || (k == "term") {
|
||||||
|
// TODO: warn if we overwrite an already set value
|
||||||
|
for k2, v2 := range v {
|
||||||
|
switch v2 := v2.(type) {
|
||||||
|
case string:
|
||||||
|
p.c.Permalinks[k][k2] = v2
|
||||||
|
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("permalinks configuration invalid: unknown value %q for key %q for kind %q", v2, k2, k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return fmt.Errorf("permalinks configuration only allows per-kind configuration 'page', 'section', 'taxonomy' and 'term'; unknown kind: %q", k)
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("permalinks configuration invalid: unknown value %q for key %q", v, k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
@ -93,6 +93,7 @@ const (
|
||||||
ContentClassBranch ContentClass = "branch"
|
ContentClassBranch ContentClass = "branch"
|
||||||
ContentClassFile ContentClass = "zfile" // Sort below
|
ContentClassFile ContentClass = "zfile" // Sort below
|
||||||
ContentClassContent ContentClass = "zcontent"
|
ContentClassContent ContentClass = "zcontent"
|
||||||
|
ContentClassZero ContentClass = "zero" // Special value for zeroFile
|
||||||
)
|
)
|
||||||
|
|
||||||
func (c ContentClass) IsBundle() bool {
|
func (c ContentClass) IsBundle() bool {
|
||||||
|
|
|
@ -18,6 +18,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
"github.com/gohugoio/hugo/hugofs/files"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/resources/page"
|
"github.com/gohugoio/hugo/resources/page"
|
||||||
)
|
)
|
||||||
|
@ -108,14 +109,16 @@ func createTargetPathDescriptor(s *Site, p page.Page, pm *pageMeta) (page.Target
|
||||||
)
|
)
|
||||||
|
|
||||||
d := s.Deps
|
d := s.Deps
|
||||||
|
classifier := files.ContentClassZero
|
||||||
|
|
||||||
if !p.File().IsZero() {
|
if !p.File().IsZero() {
|
||||||
dir = p.File().Dir()
|
dir = p.File().Dir()
|
||||||
baseName = p.File().TranslationBaseName()
|
baseName = p.File().TranslationBaseName()
|
||||||
contentBaseName = p.File().ContentBaseName()
|
contentBaseName = p.File().ContentBaseName()
|
||||||
|
classifier = p.File().Classifier()
|
||||||
}
|
}
|
||||||
|
|
||||||
if baseName != contentBaseName {
|
if classifier == files.ContentClassLeaf {
|
||||||
// See https://github.com/gohugoio/hugo/issues/4870
|
// See https://github.com/gohugoio/hugo/issues/4870
|
||||||
// A leaf bundle
|
// A leaf bundle
|
||||||
dir = strings.TrimSuffix(dir, contentBaseName+helpers.FilePathSeparator)
|
dir = strings.TrimSuffix(dir, contentBaseName+helpers.FilePathSeparator)
|
||||||
|
@ -143,22 +146,26 @@ func createTargetPathDescriptor(s *Site, p page.Page, pm *pageMeta) (page.Target
|
||||||
desc.PrefixFilePath = s.getLanguageTargetPathLang(alwaysInSubDir)
|
desc.PrefixFilePath = s.getLanguageTargetPathLang(alwaysInSubDir)
|
||||||
desc.PrefixLink = s.getLanguagePermalinkLang(alwaysInSubDir)
|
desc.PrefixLink = s.getLanguagePermalinkLang(alwaysInSubDir)
|
||||||
|
|
||||||
// Expand only page.KindPage and page.KindTaxonomy; don't expand other Kinds of Pages
|
opath, err := d.ResourceSpec.Permalinks.Expand(p.Section(), p)
|
||||||
// like page.KindSection or page.KindTaxonomyTerm because they are "shallower" and
|
if err != nil {
|
||||||
// the permalink configuration values are likely to be redundant, e.g.
|
return desc, err
|
||||||
// naively expanding /category/:slug/ would give /category/categories/ for
|
}
|
||||||
// the "categories" page.KindTaxonomyTerm.
|
|
||||||
if p.Kind() == page.KindPage || p.Kind() == page.KindTerm {
|
if opath != "" {
|
||||||
opath, err := d.ResourceSpec.Permalinks.Expand(p.Section(), p)
|
opath, _ = url.QueryUnescape(opath)
|
||||||
if err != nil {
|
if strings.HasSuffix(opath, "//") {
|
||||||
return desc, err
|
// When rewriting the _index of the section the permalink config is applied to,
|
||||||
|
// we get douple slashes at the end sometimes; clear them up here
|
||||||
|
opath = strings.TrimSuffix(opath, "/")
|
||||||
}
|
}
|
||||||
|
|
||||||
if opath != "" {
|
desc.ExpandedPermalink = opath
|
||||||
opath, _ = url.QueryUnescape(opath)
|
|
||||||
desc.ExpandedPermalink = opath
|
|
||||||
}
|
|
||||||
|
|
||||||
|
if !p.File().IsZero() {
|
||||||
|
s.Log.Debugf("Set expanded permalink path for %s %s to %#v", p.Kind(), p.File().Path(), opath)
|
||||||
|
} else {
|
||||||
|
s.Log.Debugf("Set expanded permalink path for %s in %v to %#v", p.Kind(), desc.Sections, opath)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return desc, nil
|
return desc, nil
|
||||||
|
|
|
@ -221,7 +221,7 @@ func generateFileIsZeroWrappers(c *codegen.Inspector) error {
|
||||||
methods := c.MethodsFromTypes([]reflect.Type{reflect.TypeOf((*source.File)(nil)).Elem()}, nil)
|
methods := c.MethodsFromTypes([]reflect.Type{reflect.TypeOf((*source.File)(nil)).Elem()}, nil)
|
||||||
|
|
||||||
for _, m := range methods {
|
for _, m := range methods {
|
||||||
if m.Name == "IsZero" {
|
if m.Name == "IsZero" || m.Name == "Classifier" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
fmt.Fprint(&buff, m.DeclarationNamed("zeroFile"))
|
fmt.Fprint(&buff, m.DeclarationNamed("zeroFile"))
|
||||||
|
@ -255,6 +255,11 @@ func (zeroFile) IsZero() bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (z zeroFile) Classifier() files.ContentClass {
|
||||||
|
z.log.Warnln(".File.Classifier on zero object. Wrap it in if or with: {{ with .File }}{{ .Classifier }}{{ end }}")
|
||||||
|
return files.ContentClassZero
|
||||||
|
}
|
||||||
|
|
||||||
%s
|
%s
|
||||||
|
|
||||||
`, header, importsString(pkgImports), buff.String())
|
`, header, importsString(pkgImports), buff.String())
|
||||||
|
|
|
@ -26,6 +26,7 @@ import (
|
||||||
"errors"
|
"errors"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/helpers"
|
"github.com/gohugoio/hugo/helpers"
|
||||||
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// PermalinkExpander holds permalin mappings per section.
|
// PermalinkExpander holds permalin mappings per section.
|
||||||
|
@ -35,7 +36,7 @@ type PermalinkExpander struct {
|
||||||
// to be used to replace that tag.
|
// to be used to replace that tag.
|
||||||
knownPermalinkAttributes map[string]pageToPermaAttribute
|
knownPermalinkAttributes map[string]pageToPermaAttribute
|
||||||
|
|
||||||
expanders map[string]func(Page) (string, error)
|
expanders map[string]map[string]func(Page) (string, error)
|
||||||
|
|
||||||
urlize func(uri string) string
|
urlize func(uri string) string
|
||||||
}
|
}
|
||||||
|
@ -68,7 +69,7 @@ func (p PermalinkExpander) callback(attr string) (pageToPermaAttribute, bool) {
|
||||||
|
|
||||||
// NewPermalinkExpander creates a new PermalinkExpander configured by the given
|
// NewPermalinkExpander creates a new PermalinkExpander configured by the given
|
||||||
// urlize func.
|
// urlize func.
|
||||||
func NewPermalinkExpander(urlize func(uri string) string, patterns map[string]string) (PermalinkExpander, error) {
|
func NewPermalinkExpander(urlize func(uri string) string, patterns map[string]map[string]string) (PermalinkExpander, error) {
|
||||||
p := PermalinkExpander{urlize: urlize}
|
p := PermalinkExpander{urlize: urlize}
|
||||||
|
|
||||||
p.knownPermalinkAttributes = map[string]pageToPermaAttribute{
|
p.knownPermalinkAttributes = map[string]pageToPermaAttribute{
|
||||||
|
@ -87,12 +88,15 @@ func NewPermalinkExpander(urlize func(uri string) string, patterns map[string]st
|
||||||
"filename": p.pageToPermalinkFilename,
|
"filename": p.pageToPermalinkFilename,
|
||||||
}
|
}
|
||||||
|
|
||||||
e, err := p.parse(patterns)
|
p.expanders = make(map[string]map[string]func(Page) (string, error))
|
||||||
if err != nil {
|
|
||||||
return p, err
|
|
||||||
}
|
|
||||||
|
|
||||||
p.expanders = e
|
for kind, patterns := range patterns {
|
||||||
|
e, err := p.parse(patterns)
|
||||||
|
if err != nil {
|
||||||
|
return p, err
|
||||||
|
}
|
||||||
|
p.expanders[kind] = e
|
||||||
|
}
|
||||||
|
|
||||||
return p, nil
|
return p, nil
|
||||||
}
|
}
|
||||||
|
@ -100,7 +104,13 @@ func NewPermalinkExpander(urlize func(uri string) string, patterns map[string]st
|
||||||
// Expand expands the path in p according to the rules defined for the given key.
|
// Expand expands the path in p according to the rules defined for the given key.
|
||||||
// If no rules are found for the given key, an empty string is returned.
|
// If no rules are found for the given key, an empty string is returned.
|
||||||
func (l PermalinkExpander) Expand(key string, p Page) (string, error) {
|
func (l PermalinkExpander) Expand(key string, p Page) (string, error) {
|
||||||
expand, found := l.expanders[key]
|
expanders, found := l.expanders[p.Kind()]
|
||||||
|
|
||||||
|
if !found {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
expand, found := expanders[key]
|
||||||
|
|
||||||
if !found {
|
if !found {
|
||||||
return "", nil
|
return "", nil
|
||||||
|
@ -242,6 +252,10 @@ func (l PermalinkExpander) pageToPermalinkDate(p Page, dateField string) (string
|
||||||
|
|
||||||
// pageToPermalinkTitle returns the URL-safe form of the title
|
// pageToPermalinkTitle returns the URL-safe form of the title
|
||||||
func (l PermalinkExpander) pageToPermalinkTitle(p Page, _ string) (string, error) {
|
func (l PermalinkExpander) pageToPermalinkTitle(p Page, _ string) (string, error) {
|
||||||
|
if p.File().TranslationBaseName() == "_index" {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
return l.urlize(p.Title()), nil
|
return l.urlize(p.Title()), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -252,6 +266,8 @@ func (l PermalinkExpander) pageToPermalinkFilename(p Page, _ string) (string, er
|
||||||
// Page bundles; the directory name will hopefully have a better name.
|
// Page bundles; the directory name will hopefully have a better name.
|
||||||
dir := strings.TrimSuffix(p.File().Dir(), helpers.FilePathSeparator)
|
dir := strings.TrimSuffix(p.File().Dir(), helpers.FilePathSeparator)
|
||||||
_, name = filepath.Split(dir)
|
_, name = filepath.Split(dir)
|
||||||
|
} else if name == "_index" {
|
||||||
|
return "", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return l.urlize(name), nil
|
return l.urlize(name), nil
|
||||||
|
|
|
@ -69,6 +69,7 @@ func TestPermalinkExpansion(t *testing.T) {
|
||||||
page.date = d
|
page.date = d
|
||||||
page.section = "blue"
|
page.section = "blue"
|
||||||
page.slug = "The Slug"
|
page.slug = "The Slug"
|
||||||
|
page.kind = "page"
|
||||||
|
|
||||||
for _, item := range testdataPermalinks {
|
for _, item := range testdataPermalinks {
|
||||||
if !item.valid {
|
if !item.valid {
|
||||||
|
@ -79,8 +80,10 @@ func TestPermalinkExpansion(t *testing.T) {
|
||||||
name := specNameCleaner.ReplaceAllString(item.spec, "")
|
name := specNameCleaner.ReplaceAllString(item.spec, "")
|
||||||
|
|
||||||
c.Run(name, func(c *qt.C) {
|
c.Run(name, func(c *qt.C) {
|
||||||
patterns := map[string]string{
|
patterns := map[string]map[string]string{
|
||||||
"posts": item.spec,
|
"page": {
|
||||||
|
"posts": item.spec,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
expander, err := NewPermalinkExpander(urlize, patterns)
|
expander, err := NewPermalinkExpander(urlize, patterns)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
@ -103,14 +106,18 @@ func TestPermalinkExpansionMultiSection(t *testing.T) {
|
||||||
page.date = d
|
page.date = d
|
||||||
page.section = "blue"
|
page.section = "blue"
|
||||||
page.slug = "The Slug"
|
page.slug = "The Slug"
|
||||||
|
page.kind = "page"
|
||||||
|
|
||||||
page_slug_fallback := newTestPageWithFile("/page-filename/index.md")
|
page_slug_fallback := newTestPageWithFile("/page-filename/index.md")
|
||||||
page_slug_fallback.title = "Page Title"
|
page_slug_fallback.title = "Page Title"
|
||||||
|
page_slug_fallback.kind = "page"
|
||||||
|
|
||||||
permalinksConfig := map[string]string{
|
permalinksConfig := map[string]map[string]string{
|
||||||
"posts": "/:slug",
|
"page": {
|
||||||
"blog": "/:section/:year",
|
"posts": "/:slug",
|
||||||
"recipes": "/:slugorfilename",
|
"blog": "/:section/:year",
|
||||||
|
"recipes": "/:slugorfilename",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
expander, err := NewPermalinkExpander(urlize, permalinksConfig)
|
expander, err := NewPermalinkExpander(urlize, permalinksConfig)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
@ -137,8 +144,10 @@ func TestPermalinkExpansionConcurrent(t *testing.T) {
|
||||||
|
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
|
|
||||||
permalinksConfig := map[string]string{
|
permalinksConfig := map[string]map[string]string{
|
||||||
"posts": "/:slug/",
|
"page": {
|
||||||
|
"posts": "/:slug/",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
expander, err := NewPermalinkExpander(urlize, permalinksConfig)
|
expander, err := NewPermalinkExpander(urlize, permalinksConfig)
|
||||||
|
@ -151,6 +160,7 @@ func TestPermalinkExpansionConcurrent(t *testing.T) {
|
||||||
go func(i int) {
|
go func(i int) {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
page := newTestPage()
|
page := newTestPage()
|
||||||
|
page.kind = "page"
|
||||||
for j := 1; j < 20; j++ {
|
for j := 1; j < 20; j++ {
|
||||||
page.slug = fmt.Sprintf("slug%d", i+j)
|
page.slug = fmt.Sprintf("slug%d", i+j)
|
||||||
expanded, err := expander.Expand("posts", page)
|
expanded, err := expander.Expand("posts", page)
|
||||||
|
@ -209,9 +219,12 @@ func BenchmarkPermalinkExpand(b *testing.B) {
|
||||||
page.title = "Hugo Rocks"
|
page.title = "Hugo Rocks"
|
||||||
d, _ := time.Parse("2006-01-02", "2019-02-28")
|
d, _ := time.Parse("2006-01-02", "2019-02-28")
|
||||||
page.date = d
|
page.date = d
|
||||||
|
page.kind = "page"
|
||||||
|
|
||||||
permalinksConfig := map[string]string{
|
permalinksConfig := map[string]map[string]string{
|
||||||
"posts": "/:year-:month-:title",
|
"page": {
|
||||||
|
"posts": "/:year-:month-:title",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
expander, err := NewPermalinkExpander(urlize, permalinksConfig)
|
expander, err := NewPermalinkExpander(urlize, permalinksConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -18,6 +18,7 @@ package page
|
||||||
import (
|
import (
|
||||||
"github.com/gohugoio/hugo/common/loggers"
|
"github.com/gohugoio/hugo/common/loggers"
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
"github.com/gohugoio/hugo/hugofs"
|
||||||
|
"github.com/gohugoio/hugo/hugofs/files"
|
||||||
"github.com/gohugoio/hugo/source"
|
"github.com/gohugoio/hugo/source"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -34,6 +35,11 @@ func (zeroFile) IsZero() bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (z zeroFile) Classifier() files.ContentClass {
|
||||||
|
z.log.Warnln(".File.Classifier on zero object. Wrap it in if or with: {{ with .File }}{{ .Classifier }}{{ end }}")
|
||||||
|
return files.ContentClassZero
|
||||||
|
}
|
||||||
|
|
||||||
func (z zeroFile) Path() (o0 string) {
|
func (z zeroFile) Path() (o0 string) {
|
||||||
z.log.Warnln(".File.Path on zero object. Wrap it in if or with: {{ with .File }}{{ .Path }}{{ end }}")
|
z.log.Warnln(".File.Path on zero object. Wrap it in if or with: {{ with .File }}{{ .Path }}{{ end }}")
|
||||||
return
|
return
|
||||||
|
|
|
@ -92,6 +92,9 @@ type FileWithoutOverlap interface {
|
||||||
// if file is a leaf bundle.
|
// if file is a leaf bundle.
|
||||||
ContentBaseName() string
|
ContentBaseName() string
|
||||||
|
|
||||||
|
// Classifier is the ContentClass of the file
|
||||||
|
Classifier() files.ContentClass
|
||||||
|
|
||||||
// UniqueID is the MD5 hash of the file's path and is for most practical applications,
|
// UniqueID is the MD5 hash of the file's path and is for most practical applications,
|
||||||
// Hugo content files being one of them, considered to be unique.
|
// Hugo content files being one of them, considered to be unique.
|
||||||
UniqueID() string
|
UniqueID() string
|
||||||
|
@ -170,6 +173,11 @@ func (fi *FileInfo) ContentBaseName() string {
|
||||||
return fi.contentBaseName
|
return fi.contentBaseName
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Classifier is the ContentClass of the file
|
||||||
|
func (fi *FileInfo) Classifier() files.ContentClass {
|
||||||
|
return fi.classifier;
|
||||||
|
}
|
||||||
|
|
||||||
// Section returns a file's section.
|
// Section returns a file's section.
|
||||||
func (fi *FileInfo) Section() string {
|
func (fi *FileInfo) Section() string {
|
||||||
fi.init()
|
fi.init()
|
||||||
|
|
Loading…
Reference in a new issue