mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-07 20:30:36 -05:00
parent
1f6e0de361
commit
c38bfda43b
9 changed files with 101 additions and 31 deletions
|
@ -292,12 +292,12 @@ func AddContextRoot(baseURL, relativePath string) string {
|
|||
// URLizeAndPrep applies misc sanitation to the given URL to get it in line
|
||||
// with the Hugo standard.
|
||||
func (p *PathSpec) URLizeAndPrep(in string) string {
|
||||
return URLPrep(p.uglyURLs, p.URLize(in))
|
||||
return p.URLPrep(p.URLize(in))
|
||||
}
|
||||
|
||||
// URLPrep applies misc sanitation to the given URL.
|
||||
func URLPrep(ugly bool, in string) string {
|
||||
if ugly {
|
||||
func (p *PathSpec) URLPrep(in string) string {
|
||||
if p.uglyURLs {
|
||||
x := Uglify(SanitizeURL(in))
|
||||
return x
|
||||
}
|
||||
|
|
|
@ -248,7 +248,10 @@ func TestURLPrep(t *testing.T) {
|
|||
{true, "/section/name/index.html", "/section/name.html"},
|
||||
}
|
||||
for i, d := range data {
|
||||
output := URLPrep(d.ugly, d.input)
|
||||
viper.Set("uglyURLs", d.ugly)
|
||||
p := NewPathSpecFromConfig(viper.GetViper())
|
||||
|
||||
output := p.URLPrep(d.input)
|
||||
if d.output != output {
|
||||
t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output)
|
||||
}
|
||||
|
|
|
@ -372,7 +372,7 @@ func TestShortcodeInstagram(t *testing.T) {
|
|||
}
|
||||
|
||||
if this.expected != output {
|
||||
t.Errorf("[%d] unexpected rendering, got %d expected: %d", i, output, this.expected)
|
||||
t.Errorf("[%d] unexpected rendering, got %s expected: %s", i, output, this.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -555,7 +555,7 @@ func TestHomeNodeMenu(t *testing.T) {
|
|||
testCommonResetState()
|
||||
|
||||
viper.Set("canonifyURLs", true)
|
||||
viper.Set("uglyURLs", true)
|
||||
viper.Set("uglyURLs", false)
|
||||
|
||||
s := setupMenuTests(t, menuPageSources)
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ package hugolib
|
|||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
|
@ -32,6 +33,12 @@ import (
|
|||
*/
|
||||
|
||||
func TestNodesAsPage(t *testing.T) {
|
||||
for _, ugly := range []bool{false, true} {
|
||||
doTestNodeAsPage(t, ugly)
|
||||
}
|
||||
}
|
||||
|
||||
func doTestNodeAsPage(t *testing.T, ugly bool) {
|
||||
//jww.SetStdoutThreshold(jww.LevelDebug)
|
||||
jww.SetStdoutThreshold(jww.LevelFatal)
|
||||
|
||||
|
@ -47,6 +54,8 @@ func TestNodesAsPage(t *testing.T) {
|
|||
|
||||
testCommonResetState()
|
||||
|
||||
viper.Set("uglyURLs", ugly)
|
||||
|
||||
writeLayoutsForNodeAsPageTests(t)
|
||||
writeNodePagesForNodeAsPageTests("", t)
|
||||
|
||||
|
@ -73,7 +82,7 @@ func TestNodesAsPage(t *testing.T) {
|
|||
"GetPage: Section1 ",
|
||||
)
|
||||
|
||||
assertFileContent(t, filepath.Join("public", "sect1", "regular1", "index.html"), false, "Single Title: Page 01", "Content Page 01")
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "sect1", "regular1"), false, "Single Title: Page 01", "Content Page 01")
|
||||
|
||||
h := s.owner
|
||||
nodes := h.findAllPagesByKindNotIn(KindPage)
|
||||
|
@ -99,24 +108,24 @@ func TestNodesAsPage(t *testing.T) {
|
|||
require.True(t, first.IsPage())
|
||||
|
||||
// Check Home paginator
|
||||
assertFileContent(t, filepath.Join("public", "page", "2", "index.html"), false,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "page", "2"), false,
|
||||
"Pag: Page 02")
|
||||
|
||||
// Check Sections
|
||||
assertFileContent(t, filepath.Join("public", "sect1", "index.html"), false,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "sect1"), false,
|
||||
"Section Title: Section", "Section1 <strong>Content!</strong>",
|
||||
"Date: 2009-01-04",
|
||||
"Lastmod: 2009-01-05",
|
||||
)
|
||||
|
||||
assertFileContent(t, filepath.Join("public", "sect2", "index.html"), false,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "sect2"), false,
|
||||
"Section Title: Section", "Section2 <strong>Content!</strong>",
|
||||
"Date: 2009-01-06",
|
||||
"Lastmod: 2009-01-07",
|
||||
)
|
||||
|
||||
// Check Sections paginator
|
||||
assertFileContent(t, filepath.Join("public", "sect1", "page", "2", "index.html"), false,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "sect1", "page", "2"), false,
|
||||
"Pag: Page 02")
|
||||
|
||||
sections := h.findAllPagesByKind(KindSection)
|
||||
|
@ -124,13 +133,13 @@ func TestNodesAsPage(t *testing.T) {
|
|||
require.Len(t, sections, 2)
|
||||
|
||||
// Check taxonomy lists
|
||||
assertFileContent(t, filepath.Join("public", "categories", "hugo", "index.html"), false,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "categories", "hugo"), false,
|
||||
"Taxonomy Title: Taxonomy Hugo", "Taxonomy Hugo <strong>Content!</strong>",
|
||||
"Date: 2009-01-08",
|
||||
"Lastmod: 2009-01-09",
|
||||
)
|
||||
|
||||
assertFileContent(t, filepath.Join("public", "categories", "web", "index.html"), false,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "categories", "web"), false,
|
||||
"Taxonomy Title: Taxonomy Web",
|
||||
"Taxonomy Web <strong>Content!</strong>",
|
||||
"Date: 2009-01-10",
|
||||
|
@ -138,12 +147,12 @@ func TestNodesAsPage(t *testing.T) {
|
|||
)
|
||||
|
||||
// Check taxonomy list paginator
|
||||
assertFileContent(t, filepath.Join("public", "categories", "hugo", "page", "2", "index.html"), false,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "categories", "hugo", "page", "2"), false,
|
||||
"Taxonomy Title: Taxonomy Hugo",
|
||||
"Pag: Page 02")
|
||||
|
||||
// Check taxonomy terms
|
||||
assertFileContent(t, filepath.Join("public", "categories", "index.html"), false,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "categories"), false,
|
||||
"Taxonomy Terms Title: Taxonomy Term Categories", "Taxonomy Term Categories <strong>Content!</strong>", "k/v: hugo",
|
||||
"Date: 2009-01-12",
|
||||
"Lastmod: 2009-01-13",
|
||||
|
@ -161,6 +170,12 @@ func TestNodesAsPage(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestNodesWithNoContentFile(t *testing.T) {
|
||||
for _, ugly := range []bool{false, true} {
|
||||
doTestNodesWithNoContentFile(t, ugly)
|
||||
}
|
||||
}
|
||||
|
||||
func doTestNodesWithNoContentFile(t *testing.T, ugly bool) {
|
||||
//jww.SetStdoutThreshold(jww.LevelDebug)
|
||||
jww.SetStdoutThreshold(jww.LevelFatal)
|
||||
|
||||
|
@ -169,6 +184,7 @@ func TestNodesWithNoContentFile(t *testing.T) {
|
|||
writeLayoutsForNodeAsPageTests(t)
|
||||
writeRegularPagesForNodeAsPageTests(t)
|
||||
|
||||
viper.Set("uglyURLs", ugly)
|
||||
viper.Set("paginate", 1)
|
||||
viper.Set("title", "Hugo Rocks!")
|
||||
viper.Set("rssURI", "customrss.xml")
|
||||
|
@ -195,25 +211,25 @@ func TestNodesWithNoContentFile(t *testing.T) {
|
|||
)
|
||||
|
||||
// Taxonomy list
|
||||
assertFileContent(t, filepath.Join("public", "categories", "hugo", "index.html"), false,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "categories", "hugo"), false,
|
||||
"Taxonomy Title: Hugo",
|
||||
"Date: 2010-06-12",
|
||||
"Lastmod: 2010-06-13",
|
||||
)
|
||||
|
||||
// Taxonomy terms
|
||||
assertFileContent(t, filepath.Join("public", "categories", "index.html"), false,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "categories"), false,
|
||||
"Taxonomy Terms Title: Categories",
|
||||
)
|
||||
|
||||
// Sections
|
||||
assertFileContent(t, filepath.Join("public", "sect1", "index.html"), false,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "sect1"), false,
|
||||
"Section Title: Sect1s",
|
||||
"Date: 2010-06-12",
|
||||
"Lastmod: 2010-06-13",
|
||||
)
|
||||
|
||||
assertFileContent(t, filepath.Join("public", "sect2", "index.html"), false,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "sect2"), false,
|
||||
"Section Title: Sect2s",
|
||||
"Date: 2008-07-06",
|
||||
"Lastmod: 2008-07-09",
|
||||
|
@ -229,9 +245,17 @@ func TestNodesWithNoContentFile(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestNodesAsPageMultilingual(t *testing.T) {
|
||||
for _, ugly := range []bool{true, false} {
|
||||
doTestNodesAsPageMultilingual(t, ugly)
|
||||
}
|
||||
}
|
||||
|
||||
func doTestNodesAsPageMultilingual(t *testing.T, ugly bool) {
|
||||
|
||||
testCommonResetState()
|
||||
|
||||
viper.Set("uglyURLs", ugly)
|
||||
|
||||
writeLayoutsForNodeAsPageTests(t)
|
||||
|
||||
writeSource(t, "config.toml",
|
||||
|
@ -302,6 +326,7 @@ title = "Deutsche Hugo"
|
|||
require.Len(t, deHome.Translations(), 2, deHome.Translations()[0].Language().Lang)
|
||||
require.Equal(t, "en", deHome.Translations()[1].Language().Lang)
|
||||
require.Equal(t, "nn", deHome.Translations()[0].Language().Lang)
|
||||
require.Equal(t, expetedPermalink(ugly, "/de/"), deHome.Permalink())
|
||||
|
||||
enSect := sites.Sites[1].getPage("section", "sect1")
|
||||
require.NotNil(t, enSect)
|
||||
|
@ -310,6 +335,8 @@ title = "Deutsche Hugo"
|
|||
require.Equal(t, "de", enSect.Translations()[1].Language().Lang)
|
||||
require.Equal(t, "nn", enSect.Translations()[0].Language().Lang)
|
||||
|
||||
require.Equal(t, expetedPermalink(ugly, "/en/sect1/"), enSect.Permalink())
|
||||
|
||||
assertFileContent(t, filepath.Join("public", "nn", "index.html"), true,
|
||||
"Index Title: Hugo på norsk")
|
||||
assertFileContent(t, filepath.Join("public", "en", "index.html"), true,
|
||||
|
@ -318,27 +345,33 @@ title = "Deutsche Hugo"
|
|||
"Index Title: Home Sweet Home!", "<strong>Content!</strong>")
|
||||
|
||||
// Taxonomy list
|
||||
assertFileContent(t, filepath.Join("public", "nn", "categories", "hugo", "index.html"), true,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "nn", "categories", "hugo"), true,
|
||||
"Taxonomy Title: Hugo")
|
||||
assertFileContent(t, filepath.Join("public", "en", "categories", "hugo", "index.html"), true,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "en", "categories", "hugo"), true,
|
||||
"Taxonomy Title: Taxonomy Hugo")
|
||||
|
||||
// Taxonomy terms
|
||||
assertFileContent(t, filepath.Join("public", "nn", "categories", "index.html"), true,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "nn", "categories"), true,
|
||||
"Taxonomy Terms Title: Categories")
|
||||
assertFileContent(t, filepath.Join("public", "en", "categories", "index.html"), true,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "en", "categories"), true,
|
||||
"Taxonomy Terms Title: Taxonomy Term Categories")
|
||||
|
||||
// Sections
|
||||
assertFileContent(t, filepath.Join("public", "nn", "sect1", "index.html"), true,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "nn", "sect1"), true,
|
||||
"Section Title: Sect1s")
|
||||
assertFileContent(t, filepath.Join("public", "nn", "sect2", "index.html"), true,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "nn", "sect2"), true,
|
||||
"Section Title: Sect2s")
|
||||
assertFileContent(t, filepath.Join("public", "en", "sect1", "index.html"), true,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "en", "sect1"), true,
|
||||
"Section Title: Section1")
|
||||
assertFileContent(t, filepath.Join("public", "en", "sect2", "index.html"), true,
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "en", "sect2"), true,
|
||||
"Section Title: Section2")
|
||||
|
||||
// Regular pages
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "en", "sect1", "regular1"), true,
|
||||
"Single Title: Page 01")
|
||||
assertFileContent(t, expectedFilePath(ugly, "public", "nn", "sect1", "regular2"), true,
|
||||
"Single Title: Page 02")
|
||||
|
||||
// RSS
|
||||
assertFileContent(t, filepath.Join("public", "nn", "customrss.xml"), true, "Hugo på norsk", "<rss")
|
||||
assertFileContent(t, filepath.Join("public", "nn", "sect1", "customrss.xml"), true, "Recent content in Sect1s on Hugo på norsk", "<rss")
|
||||
|
@ -660,3 +693,17 @@ Date: {{ .Date.Format "2006-01-02" }}
|
|||
Lastmod: {{ .Lastmod.Format "2006-01-02" }}
|
||||
`)
|
||||
}
|
||||
|
||||
func expectedFilePath(ugly bool, path ...string) string {
|
||||
if ugly {
|
||||
return filepath.Join(append(path[0:len(path)-1], path[len(path)-1]+".html")...)
|
||||
}
|
||||
return filepath.Join(append(path, "index.html")...)
|
||||
}
|
||||
|
||||
func expetedPermalink(ugly bool, path string) string {
|
||||
if ugly {
|
||||
return strings.TrimSuffix(path, "/") + ".html"
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
|
|
@ -733,6 +733,7 @@ func (p *Page) permalink() (*url.URL, error) {
|
|||
// No permalink config for nodes (currently)
|
||||
pURL := strings.TrimSpace(p.Site.pathSpec.URLize(p.URLPath.URL))
|
||||
pURL = p.addLangPathPrefix(pURL)
|
||||
pURL = p.Site.pathSpec.URLPrep(path.Join(pURL, "index."+p.Extension()))
|
||||
url := helpers.MakePermalink(baseURL, pURL)
|
||||
return url, nil
|
||||
}
|
||||
|
@ -755,10 +756,10 @@ func (p *Page) permalink() (*url.URL, error) {
|
|||
}
|
||||
} else {
|
||||
if len(pSlug) > 0 {
|
||||
permalink = helpers.URLPrep(viper.GetBool("uglyURLs"), path.Join(dir, p.Slug+"."+p.Extension()))
|
||||
permalink = p.Site.pathSpec.URLPrep(path.Join(dir, p.Slug+"."+p.Extension()))
|
||||
} else {
|
||||
t := p.Source.TranslationBaseName()
|
||||
permalink = helpers.URLPrep(viper.GetBool("uglyURLs"), path.Join(dir, (strings.TrimSpace(t)+"."+p.Extension())))
|
||||
permalink = p.Site.pathSpec.URLPrep(path.Join(dir, (strings.TrimSpace(t) + "." + p.Extension())))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1865,16 +1865,22 @@ func (s *Site) languageAliasTarget() target.AliasPublisher {
|
|||
|
||||
func (s *Site) initTargetList() {
|
||||
s.targetListInit.Do(func() {
|
||||
langDir := ""
|
||||
if s.Language.Lang != s.Info.multilingual.DefaultLang.Lang || s.Info.defaultContentLanguageInSubdir {
|
||||
langDir = s.Language.Lang
|
||||
}
|
||||
if s.targets.page == nil {
|
||||
s.targets.page = &target.PagePub{
|
||||
PublishDir: s.absPublishDir(),
|
||||
UglyURLs: viper.GetBool("uglyURLs"),
|
||||
LangDir: langDir,
|
||||
}
|
||||
}
|
||||
if s.targets.pageUgly == nil {
|
||||
s.targets.pageUgly = &target.PagePub{
|
||||
PublishDir: s.absPublishDir(),
|
||||
UglyURLs: true,
|
||||
LangDir: langDir,
|
||||
}
|
||||
}
|
||||
if s.targets.file == nil {
|
||||
|
|
|
@ -118,7 +118,7 @@ func (s *Site) renderPaginator(p *Page) error {
|
|||
}
|
||||
|
||||
pageNumber := i + 1
|
||||
htmlBase := path.Join(p.URLPath.URL, fmt.Sprintf("/%s/%d", paginatePath, pageNumber))
|
||||
htmlBase := path.Join(append(p.sections, fmt.Sprintf("/%s/%d", paginatePath, pageNumber))...)
|
||||
htmlBase = p.addLangPathPrefix(htmlBase)
|
||||
|
||||
if err := s.renderAndWritePage(pagerNode.Title,
|
||||
|
@ -156,7 +156,8 @@ func (s *Site) renderRSS(p *Page) error {
|
|||
rssPage.Data["Pages"] = rssPage.Pages
|
||||
}
|
||||
rssURI := s.Language.GetString("rssURI")
|
||||
rssPath := path.Join(rssPage.URLPath.URL, rssURI)
|
||||
|
||||
rssPath := path.Join(append(rssPage.sections, rssURI)...)
|
||||
s.setPageURLs(rssPage, rssPath)
|
||||
|
||||
return s.renderAndWriteXML(rssPage.Title,
|
||||
|
|
|
@ -31,6 +31,10 @@ type PagePub struct {
|
|||
UglyURLs bool
|
||||
DefaultExtension string
|
||||
PublishDir string
|
||||
|
||||
// LangDir will contain the subdir for the language, i.e. "en", "de" etc.
|
||||
// It will be empty if the site is rendered in root.
|
||||
LangDir string
|
||||
}
|
||||
|
||||
func (pp *PagePub) Publish(path string, r io.Reader) (err error) {
|
||||
|
@ -64,6 +68,14 @@ func (pp *PagePub) TranslateRelative(src string) (dest string, err error) {
|
|||
ext := pp.extension(filepath.Ext(file))
|
||||
name := filename(file)
|
||||
|
||||
// TODO(bep) Having all of this path logic here seems wrong, but I guess
|
||||
// we'll clean this up when we redo the output files.
|
||||
// This catches the home page in a language sub path. They should never
|
||||
// have any ugly URLs.
|
||||
if pp.LangDir != "" && dir == helpers.FilePathSeparator && name == pp.LangDir {
|
||||
return filepath.Join(dir, name, "index"+ext), nil
|
||||
}
|
||||
|
||||
if pp.UglyURLs || file == "index.html" || (isRoot && file == "404.html") {
|
||||
return filepath.Join(dir, name+ext), nil
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue