diff --git a/helpers/url.go b/helpers/url.go
index c7697b092..68079ce20 100644
--- a/helpers/url.go
+++ b/helpers/url.go
@@ -292,12 +292,12 @@ func AddContextRoot(baseURL, relativePath string) string {
// URLizeAndPrep applies misc sanitation to the given URL to get it in line
// with the Hugo standard.
func (p *PathSpec) URLizeAndPrep(in string) string {
- return URLPrep(p.uglyURLs, p.URLize(in))
+ return p.URLPrep(p.URLize(in))
}
// URLPrep applies misc sanitation to the given URL.
-func URLPrep(ugly bool, in string) string {
- if ugly {
+func (p *PathSpec) URLPrep(in string) string {
+ if p.uglyURLs {
x := Uglify(SanitizeURL(in))
return x
}
diff --git a/helpers/url_test.go b/helpers/url_test.go
index 4072b40e2..8dbec3f7c 100644
--- a/helpers/url_test.go
+++ b/helpers/url_test.go
@@ -248,7 +248,10 @@ func TestURLPrep(t *testing.T) {
{true, "/section/name/index.html", "/section/name.html"},
}
for i, d := range data {
- output := URLPrep(d.ugly, d.input)
+ viper.Set("uglyURLs", d.ugly)
+ p := NewPathSpecFromConfig(viper.GetViper())
+
+ output := p.URLPrep(d.input)
if d.output != output {
t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output)
}
diff --git a/hugolib/embedded_shortcodes_test.go b/hugolib/embedded_shortcodes_test.go
index 0576ca02c..5ca2be5c6 100644
--- a/hugolib/embedded_shortcodes_test.go
+++ b/hugolib/embedded_shortcodes_test.go
@@ -372,7 +372,7 @@ func TestShortcodeInstagram(t *testing.T) {
}
if this.expected != output {
- t.Errorf("[%d] unexpected rendering, got %d expected: %d", i, output, this.expected)
+ t.Errorf("[%d] unexpected rendering, got %s expected: %s", i, output, this.expected)
}
}
}
diff --git a/hugolib/menu_test.go b/hugolib/menu_test.go
index 2fd2e43ab..af698799d 100644
--- a/hugolib/menu_test.go
+++ b/hugolib/menu_test.go
@@ -555,7 +555,7 @@ func TestHomeNodeMenu(t *testing.T) {
testCommonResetState()
viper.Set("canonifyURLs", true)
- viper.Set("uglyURLs", true)
+ viper.Set("uglyURLs", false)
s := setupMenuTests(t, menuPageSources)
diff --git a/hugolib/node_as_page_test.go b/hugolib/node_as_page_test.go
index a152ba303..e40b2874a 100644
--- a/hugolib/node_as_page_test.go
+++ b/hugolib/node_as_page_test.go
@@ -16,6 +16,7 @@ package hugolib
import (
"fmt"
"path/filepath"
+ "strings"
"testing"
"time"
@@ -32,6 +33,12 @@ import (
*/
func TestNodesAsPage(t *testing.T) {
+ for _, ugly := range []bool{false, true} {
+ doTestNodeAsPage(t, ugly)
+ }
+}
+
+func doTestNodeAsPage(t *testing.T, ugly bool) {
//jww.SetStdoutThreshold(jww.LevelDebug)
jww.SetStdoutThreshold(jww.LevelFatal)
@@ -47,6 +54,8 @@ func TestNodesAsPage(t *testing.T) {
testCommonResetState()
+ viper.Set("uglyURLs", ugly)
+
writeLayoutsForNodeAsPageTests(t)
writeNodePagesForNodeAsPageTests("", t)
@@ -73,7 +82,7 @@ func TestNodesAsPage(t *testing.T) {
"GetPage: Section1 ",
)
- assertFileContent(t, filepath.Join("public", "sect1", "regular1", "index.html"), false, "Single Title: Page 01", "Content Page 01")
+ assertFileContent(t, expectedFilePath(ugly, "public", "sect1", "regular1"), false, "Single Title: Page 01", "Content Page 01")
h := s.owner
nodes := h.findAllPagesByKindNotIn(KindPage)
@@ -99,24 +108,24 @@ func TestNodesAsPage(t *testing.T) {
require.True(t, first.IsPage())
// Check Home paginator
- assertFileContent(t, filepath.Join("public", "page", "2", "index.html"), false,
+ assertFileContent(t, expectedFilePath(ugly, "public", "page", "2"), false,
"Pag: Page 02")
// Check Sections
- assertFileContent(t, filepath.Join("public", "sect1", "index.html"), false,
+ assertFileContent(t, expectedFilePath(ugly, "public", "sect1"), false,
"Section Title: Section", "Section1 Content!",
"Date: 2009-01-04",
"Lastmod: 2009-01-05",
)
- assertFileContent(t, filepath.Join("public", "sect2", "index.html"), false,
+ assertFileContent(t, expectedFilePath(ugly, "public", "sect2"), false,
"Section Title: Section", "Section2 Content!",
"Date: 2009-01-06",
"Lastmod: 2009-01-07",
)
// Check Sections paginator
- assertFileContent(t, filepath.Join("public", "sect1", "page", "2", "index.html"), false,
+ assertFileContent(t, expectedFilePath(ugly, "public", "sect1", "page", "2"), false,
"Pag: Page 02")
sections := h.findAllPagesByKind(KindSection)
@@ -124,13 +133,13 @@ func TestNodesAsPage(t *testing.T) {
require.Len(t, sections, 2)
// Check taxonomy lists
- assertFileContent(t, filepath.Join("public", "categories", "hugo", "index.html"), false,
+ assertFileContent(t, expectedFilePath(ugly, "public", "categories", "hugo"), false,
"Taxonomy Title: Taxonomy Hugo", "Taxonomy Hugo Content!",
"Date: 2009-01-08",
"Lastmod: 2009-01-09",
)
- assertFileContent(t, filepath.Join("public", "categories", "web", "index.html"), false,
+ assertFileContent(t, expectedFilePath(ugly, "public", "categories", "web"), false,
"Taxonomy Title: Taxonomy Web",
"Taxonomy Web Content!",
"Date: 2009-01-10",
@@ -138,12 +147,12 @@ func TestNodesAsPage(t *testing.T) {
)
// Check taxonomy list paginator
- assertFileContent(t, filepath.Join("public", "categories", "hugo", "page", "2", "index.html"), false,
+ assertFileContent(t, expectedFilePath(ugly, "public", "categories", "hugo", "page", "2"), false,
"Taxonomy Title: Taxonomy Hugo",
"Pag: Page 02")
// Check taxonomy terms
- assertFileContent(t, filepath.Join("public", "categories", "index.html"), false,
+ assertFileContent(t, expectedFilePath(ugly, "public", "categories"), false,
"Taxonomy Terms Title: Taxonomy Term Categories", "Taxonomy Term Categories Content!", "k/v: hugo",
"Date: 2009-01-12",
"Lastmod: 2009-01-13",
@@ -161,6 +170,12 @@ func TestNodesAsPage(t *testing.T) {
}
func TestNodesWithNoContentFile(t *testing.T) {
+ for _, ugly := range []bool{false, true} {
+ doTestNodesWithNoContentFile(t, ugly)
+ }
+}
+
+func doTestNodesWithNoContentFile(t *testing.T, ugly bool) {
//jww.SetStdoutThreshold(jww.LevelDebug)
jww.SetStdoutThreshold(jww.LevelFatal)
@@ -169,6 +184,7 @@ func TestNodesWithNoContentFile(t *testing.T) {
writeLayoutsForNodeAsPageTests(t)
writeRegularPagesForNodeAsPageTests(t)
+ viper.Set("uglyURLs", ugly)
viper.Set("paginate", 1)
viper.Set("title", "Hugo Rocks!")
viper.Set("rssURI", "customrss.xml")
@@ -195,25 +211,25 @@ func TestNodesWithNoContentFile(t *testing.T) {
)
// Taxonomy list
- assertFileContent(t, filepath.Join("public", "categories", "hugo", "index.html"), false,
+ assertFileContent(t, expectedFilePath(ugly, "public", "categories", "hugo"), false,
"Taxonomy Title: Hugo",
"Date: 2010-06-12",
"Lastmod: 2010-06-13",
)
// Taxonomy terms
- assertFileContent(t, filepath.Join("public", "categories", "index.html"), false,
+ assertFileContent(t, expectedFilePath(ugly, "public", "categories"), false,
"Taxonomy Terms Title: Categories",
)
// Sections
- assertFileContent(t, filepath.Join("public", "sect1", "index.html"), false,
+ assertFileContent(t, expectedFilePath(ugly, "public", "sect1"), false,
"Section Title: Sect1s",
"Date: 2010-06-12",
"Lastmod: 2010-06-13",
)
- assertFileContent(t, filepath.Join("public", "sect2", "index.html"), false,
+ assertFileContent(t, expectedFilePath(ugly, "public", "sect2"), false,
"Section Title: Sect2s",
"Date: 2008-07-06",
"Lastmod: 2008-07-09",
@@ -229,9 +245,17 @@ func TestNodesWithNoContentFile(t *testing.T) {
}
func TestNodesAsPageMultilingual(t *testing.T) {
+ for _, ugly := range []bool{true, false} {
+ doTestNodesAsPageMultilingual(t, ugly)
+ }
+}
+
+func doTestNodesAsPageMultilingual(t *testing.T, ugly bool) {
testCommonResetState()
+ viper.Set("uglyURLs", ugly)
+
writeLayoutsForNodeAsPageTests(t)
writeSource(t, "config.toml",
@@ -302,6 +326,7 @@ title = "Deutsche Hugo"
require.Len(t, deHome.Translations(), 2, deHome.Translations()[0].Language().Lang)
require.Equal(t, "en", deHome.Translations()[1].Language().Lang)
require.Equal(t, "nn", deHome.Translations()[0].Language().Lang)
+ require.Equal(t, expetedPermalink(ugly, "/de/"), deHome.Permalink())
enSect := sites.Sites[1].getPage("section", "sect1")
require.NotNil(t, enSect)
@@ -310,6 +335,8 @@ title = "Deutsche Hugo"
require.Equal(t, "de", enSect.Translations()[1].Language().Lang)
require.Equal(t, "nn", enSect.Translations()[0].Language().Lang)
+ require.Equal(t, expetedPermalink(ugly, "/en/sect1/"), enSect.Permalink())
+
assertFileContent(t, filepath.Join("public", "nn", "index.html"), true,
"Index Title: Hugo på norsk")
assertFileContent(t, filepath.Join("public", "en", "index.html"), true,
@@ -318,27 +345,33 @@ title = "Deutsche Hugo"
"Index Title: Home Sweet Home!", "Content!")
// Taxonomy list
- assertFileContent(t, filepath.Join("public", "nn", "categories", "hugo", "index.html"), true,
+ assertFileContent(t, expectedFilePath(ugly, "public", "nn", "categories", "hugo"), true,
"Taxonomy Title: Hugo")
- assertFileContent(t, filepath.Join("public", "en", "categories", "hugo", "index.html"), true,
+ assertFileContent(t, expectedFilePath(ugly, "public", "en", "categories", "hugo"), true,
"Taxonomy Title: Taxonomy Hugo")
// Taxonomy terms
- assertFileContent(t, filepath.Join("public", "nn", "categories", "index.html"), true,
+ assertFileContent(t, expectedFilePath(ugly, "public", "nn", "categories"), true,
"Taxonomy Terms Title: Categories")
- assertFileContent(t, filepath.Join("public", "en", "categories", "index.html"), true,
+ assertFileContent(t, expectedFilePath(ugly, "public", "en", "categories"), true,
"Taxonomy Terms Title: Taxonomy Term Categories")
// Sections
- assertFileContent(t, filepath.Join("public", "nn", "sect1", "index.html"), true,
+ assertFileContent(t, expectedFilePath(ugly, "public", "nn", "sect1"), true,
"Section Title: Sect1s")
- assertFileContent(t, filepath.Join("public", "nn", "sect2", "index.html"), true,
+ assertFileContent(t, expectedFilePath(ugly, "public", "nn", "sect2"), true,
"Section Title: Sect2s")
- assertFileContent(t, filepath.Join("public", "en", "sect1", "index.html"), true,
+ assertFileContent(t, expectedFilePath(ugly, "public", "en", "sect1"), true,
"Section Title: Section1")
- assertFileContent(t, filepath.Join("public", "en", "sect2", "index.html"), true,
+ assertFileContent(t, expectedFilePath(ugly, "public", "en", "sect2"), true,
"Section Title: Section2")
+ // Regular pages
+ assertFileContent(t, expectedFilePath(ugly, "public", "en", "sect1", "regular1"), true,
+ "Single Title: Page 01")
+ assertFileContent(t, expectedFilePath(ugly, "public", "nn", "sect1", "regular2"), true,
+ "Single Title: Page 02")
+
// RSS
assertFileContent(t, filepath.Join("public", "nn", "customrss.xml"), true, "Hugo på norsk", " 0 {
- permalink = helpers.URLPrep(viper.GetBool("uglyURLs"), path.Join(dir, p.Slug+"."+p.Extension()))
+ permalink = p.Site.pathSpec.URLPrep(path.Join(dir, p.Slug+"."+p.Extension()))
} else {
t := p.Source.TranslationBaseName()
- permalink = helpers.URLPrep(viper.GetBool("uglyURLs"), path.Join(dir, (strings.TrimSpace(t)+"."+p.Extension())))
+ permalink = p.Site.pathSpec.URLPrep(path.Join(dir, (strings.TrimSpace(t) + "." + p.Extension())))
}
}
diff --git a/hugolib/site.go b/hugolib/site.go
index 01539bccc..87ab050b0 100644
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -1865,16 +1865,22 @@ func (s *Site) languageAliasTarget() target.AliasPublisher {
func (s *Site) initTargetList() {
s.targetListInit.Do(func() {
+ langDir := ""
+ if s.Language.Lang != s.Info.multilingual.DefaultLang.Lang || s.Info.defaultContentLanguageInSubdir {
+ langDir = s.Language.Lang
+ }
if s.targets.page == nil {
s.targets.page = &target.PagePub{
PublishDir: s.absPublishDir(),
UglyURLs: viper.GetBool("uglyURLs"),
+ LangDir: langDir,
}
}
if s.targets.pageUgly == nil {
s.targets.pageUgly = &target.PagePub{
PublishDir: s.absPublishDir(),
UglyURLs: true,
+ LangDir: langDir,
}
}
if s.targets.file == nil {
diff --git a/hugolib/site_render.go b/hugolib/site_render.go
index 82523e88f..94fccd950 100644
--- a/hugolib/site_render.go
+++ b/hugolib/site_render.go
@@ -118,7 +118,7 @@ func (s *Site) renderPaginator(p *Page) error {
}
pageNumber := i + 1
- htmlBase := path.Join(p.URLPath.URL, fmt.Sprintf("/%s/%d", paginatePath, pageNumber))
+ htmlBase := path.Join(append(p.sections, fmt.Sprintf("/%s/%d", paginatePath, pageNumber))...)
htmlBase = p.addLangPathPrefix(htmlBase)
if err := s.renderAndWritePage(pagerNode.Title,
@@ -156,7 +156,8 @@ func (s *Site) renderRSS(p *Page) error {
rssPage.Data["Pages"] = rssPage.Pages
}
rssURI := s.Language.GetString("rssURI")
- rssPath := path.Join(rssPage.URLPath.URL, rssURI)
+
+ rssPath := path.Join(append(rssPage.sections, rssURI)...)
s.setPageURLs(rssPage, rssPath)
return s.renderAndWriteXML(rssPage.Title,
diff --git a/target/page.go b/target/page.go
index eb7bb5943..ab38ded58 100644
--- a/target/page.go
+++ b/target/page.go
@@ -31,6 +31,10 @@ type PagePub struct {
UglyURLs bool
DefaultExtension string
PublishDir string
+
+ // LangDir will contain the subdir for the language, i.e. "en", "de" etc.
+ // It will be empty if the site is rendered in root.
+ LangDir string
}
func (pp *PagePub) Publish(path string, r io.Reader) (err error) {
@@ -64,6 +68,14 @@ func (pp *PagePub) TranslateRelative(src string) (dest string, err error) {
ext := pp.extension(filepath.Ext(file))
name := filename(file)
+ // TODO(bep) Having all of this path logic here seems wrong, but I guess
+ // we'll clean this up when we redo the output files.
+ // This catches the home page in a language sub path. They should never
+ // have any ugly URLs.
+ if pp.LangDir != "" && dir == helpers.FilePathSeparator && name == pp.LangDir {
+ return filepath.Join(dir, name, "index"+ext), nil
+ }
+
if pp.UglyURLs || file == "index.html" || (isRoot && file == "404.html") {
return filepath.Join(dir, name+ext), nil
}