From a118cb4138bc903566ade49a7789d91657898a21 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Erik=20Pedersen?= Date: Tue, 20 Feb 2024 10:02:42 +0100 Subject: [PATCH] Fix robots.txt using the built-in template regression Fixes #12071 --- hugolib/robotstxt_test.go | 13 +++++++++++++ output/layouts/layout.go | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/hugolib/robotstxt_test.go b/hugolib/robotstxt_test.go index 2035c235f..c901ce662 100644 --- a/hugolib/robotstxt_test.go +++ b/hugolib/robotstxt_test.go @@ -39,3 +39,16 @@ func TestRobotsTXTOutput(t *testing.T) { b.AssertFileContent("public/robots.txt", "User-agent: Googlebot") } + +func TestRobotsTXTDefaultTemplate(t *testing.T) { + t.Parallel() + files := ` +-- hugo.toml -- +baseURL = "http://auth/bub/" +enableRobotsTXT = true +` + + b := Test(t, files) + + b.AssertFileContent("public/robots.txt", "User-agent: *") +} diff --git a/output/layouts/layout.go b/output/layouts/layout.go index c05841ae3..b903e09ea 100644 --- a/output/layouts/layout.go +++ b/output/layouts/layout.go @@ -222,7 +222,7 @@ func resolvePageTemplate(d LayoutDescriptor) []string { } switch d.Kind { - case "robotsTXT": + case "robotstxt": layouts = append(layouts, "_internal/_default/robots.txt") case "sitemap": layouts = append(layouts, "_internal/_default/sitemap.xml")