hugo/hugolib/site.go

1546 lines
37 KiB
Go
Raw Normal View History

2014-04-10 08:10:12 -04:00
// Copyright © 2013-14 Steve Francia <spf@spf13.com>.
2013-07-04 11:32:55 -04:00
//
// Licensed under the Simple Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://opensource.org/licenses/Simple-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
2014-01-29 17:50:31 -05:00
"bytes"
"errors"
2014-01-29 17:50:31 -05:00
"fmt"
"html/template"
"io"
Provide (relative) reference funcs & shortcodes. - `.Ref` and `.RelRef` take a reference (the logical filename for a page, including extension and/or a document fragment ID) and return a permalink (or relative permalink) to the referenced document. - If the reference is a page name (such as `about.md`), the page will be discovered and the permalink will be returned: `/about/` - If the reference is a page name with a fragment (such as `about.md#who`), the page will be discovered and used to add the `page.UniqueID()` to the resulting fragment and permalink: `/about/#who:deadbeef`. - If the reference is a fragment and `.*Ref` has been called from a `Node` or `SiteInfo`, it will be returned as is: `#who`. - If the reference is a fragment and `.*Ref` has been called from a `Page`, it will be returned with the page’s unique ID: `#who:deadbeef`. - `.*Ref` can be called from either `Node`, `SiteInfo` (e.g., `Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in templates. - `.*Ref` cannot be used in content, so two shortcodes have been created to provide the functionality to content: `ref` and `relref`. These are intended to be used within markup, like `[Who]({{% ref about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`. - There are also `ref` and `relref` template functions (used to create the shortcodes) that expect a `Page` or `Node` object and the reference string (e.g., `{{ relref . "about.md" }}` or `{{ "about.md" | ref . }}`). It actually looks for `.*Ref` as defined on `Node` or `Page` objects. - Shortcode handling had to use a *differently unique* wrapper in `createShortcodePlaceholder` because of the way that the `ref` and `relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
"net/url"
2014-01-29 17:50:31 -05:00
"os"
"path/filepath"
"strconv"
2014-01-29 17:50:31 -05:00
"strings"
"sync"
2014-01-29 17:50:31 -05:00
"time"
2015-01-30 14:51:06 -05:00
"sync/atomic"
"bitbucket.org/pkg/inflect"
"github.com/spf13/cast"
2015-01-30 14:51:06 -05:00
bp "github.com/spf13/hugo/bufferpool"
"github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/hugofs"
"github.com/spf13/hugo/parser"
"github.com/spf13/hugo/source"
"github.com/spf13/hugo/target"
"github.com/spf13/hugo/tpl"
"github.com/spf13/hugo/transform"
jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/nitro"
"github.com/spf13/viper"
2013-07-04 11:32:55 -04:00
)
2013-11-05 00:28:08 -05:00
var _ = transform.AbsURL
var DefaultTimer *nitro.B
// Site contains all the information relevant for constructing a static
2013-09-01 00:13:04 -04:00
// site. The basic flow of information is as follows:
//
// 1. A list of Files is parsed and then converted into Pages.
//
// 2. Pages contain sections (based on the file they were generated from),
// aliases and slugs (included in a pages frontmatter) which are the
// various targets that will get generated. There will be canonical
// listing. The canonical path can be overruled based on a pattern.
2013-09-01 00:13:04 -04:00
//
// 3. Taxonomies are created via configuration and will present some aspect of
2013-09-01 00:13:04 -04:00
// the final page and typically a perm url.
//
// 4. All Pages are passed through a template based on their desired
// layout based on numerous different elements.
2013-09-01 00:13:04 -04:00
//
// 5. The entire collection of files is written to disk.
2013-07-04 11:32:55 -04:00
type Site struct {
Pages Pages
Files []*source.File
Tmpl tpl.Template
Taxonomies TaxonomyList
Source source.Input
Sections Taxonomy
Info SiteInfo
Shortcodes map[string]ShortcodeFunc
Menus Menus
timer *nitro.B
Targets targetList
targetListInit sync.Once
Completed chan bool
RunMode runmode
params map[string]interface{}
draftCount int
futureCount int
Data map[string]interface{}
2013-07-04 11:32:55 -04:00
}
type targetList struct {
Page target.Output
File target.Output
Alias target.AliasPublisher
}
2013-07-04 11:32:55 -04:00
type SiteInfo struct {
BaseUrl template.URL
Taxonomies TaxonomyList
Authors AuthorList
Social SiteSocial
Indexes *TaxonomyList // legacy, should be identical to Taxonomies
Sections Taxonomy
Pages *Pages
Files []*source.File
Recent *Pages // legacy, should be identical to Pages
Menus *Menus
Hugo *HugoInfo
Title string
Author map[string]interface{}
LanguageCode string
DisqusShortname string
Copyright string
LastChange time.Time
Permalinks PermalinkOverrides
Params map[string]interface{}
BuildDrafts bool
canonifyUrls bool
paginationPageCount uint64
Data *map[string]interface{}
}
// SiteSocial is a place to put social details on a site level. These are the
// standard keys that themes will expect to have available, but can be
// expanded to any others on a per site basis
// github
// facebook
// facebook_admin
// twitter
// twitter_domain
// googleplus
// pinterest
// instagram
// youtube
// linkedin
type SiteSocial map[string]string
func (s *SiteInfo) GetParam(key string) interface{} {
v := s.Params[strings.ToLower(key)]
if v == nil {
return nil
}
switch v.(type) {
case bool:
return cast.ToBool(v)
case string:
return cast.ToString(v)
case int64, int32, int16, int8, int:
return cast.ToInt(v)
case float64, float32:
return cast.ToFloat64(v)
case time.Time:
return cast.ToTime(v)
case []string:
return v
}
return nil
2013-07-04 11:32:55 -04:00
}
Provide (relative) reference funcs & shortcodes. - `.Ref` and `.RelRef` take a reference (the logical filename for a page, including extension and/or a document fragment ID) and return a permalink (or relative permalink) to the referenced document. - If the reference is a page name (such as `about.md`), the page will be discovered and the permalink will be returned: `/about/` - If the reference is a page name with a fragment (such as `about.md#who`), the page will be discovered and used to add the `page.UniqueID()` to the resulting fragment and permalink: `/about/#who:deadbeef`. - If the reference is a fragment and `.*Ref` has been called from a `Node` or `SiteInfo`, it will be returned as is: `#who`. - If the reference is a fragment and `.*Ref` has been called from a `Page`, it will be returned with the page’s unique ID: `#who:deadbeef`. - `.*Ref` can be called from either `Node`, `SiteInfo` (e.g., `Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in templates. - `.*Ref` cannot be used in content, so two shortcodes have been created to provide the functionality to content: `ref` and `relref`. These are intended to be used within markup, like `[Who]({{% ref about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`. - There are also `ref` and `relref` template functions (used to create the shortcodes) that expect a `Page` or `Node` object and the reference string (e.g., `{{ relref . "about.md" }}` or `{{ "about.md" | ref . }}`). It actually looks for `.*Ref` as defined on `Node` or `Page` objects. - Shortcode handling had to use a *differently unique* wrapper in `createShortcodePlaceholder` because of the way that the `ref` and `relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
func (s *SiteInfo) refLink(ref string, page *Page, relative bool) (string, error) {
var refUrl *url.URL
var err error
refUrl, err = url.Parse(ref)
if err != nil {
return "", err
}
2015-03-07 06:53:20 -05:00
var target *Page
var link string
Provide (relative) reference funcs & shortcodes. - `.Ref` and `.RelRef` take a reference (the logical filename for a page, including extension and/or a document fragment ID) and return a permalink (or relative permalink) to the referenced document. - If the reference is a page name (such as `about.md`), the page will be discovered and the permalink will be returned: `/about/` - If the reference is a page name with a fragment (such as `about.md#who`), the page will be discovered and used to add the `page.UniqueID()` to the resulting fragment and permalink: `/about/#who:deadbeef`. - If the reference is a fragment and `.*Ref` has been called from a `Node` or `SiteInfo`, it will be returned as is: `#who`. - If the reference is a fragment and `.*Ref` has been called from a `Page`, it will be returned with the page’s unique ID: `#who:deadbeef`. - `.*Ref` can be called from either `Node`, `SiteInfo` (e.g., `Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in templates. - `.*Ref` cannot be used in content, so two shortcodes have been created to provide the functionality to content: `ref` and `relref`. These are intended to be used within markup, like `[Who]({{% ref about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`. - There are also `ref` and `relref` template functions (used to create the shortcodes) that expect a `Page` or `Node` object and the reference string (e.g., `{{ relref . "about.md" }}` or `{{ "about.md" | ref . }}`). It actually looks for `.*Ref` as defined on `Node` or `Page` objects. - Shortcode handling had to use a *differently unique* wrapper in `createShortcodePlaceholder` because of the way that the `ref` and `relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
if refUrl.Path != "" {
for _, page := range []*Page(*s.Pages) {
if page.Source.Path() == refUrl.Path || page.Source.LogicalName() == refUrl.Path {
target = page
break
}
}
if target == nil {
2015-03-07 06:53:20 -05:00
return "", fmt.Errorf("No page found with path or logical name \"%s\".\n", refUrl.Path)
Provide (relative) reference funcs & shortcodes. - `.Ref` and `.RelRef` take a reference (the logical filename for a page, including extension and/or a document fragment ID) and return a permalink (or relative permalink) to the referenced document. - If the reference is a page name (such as `about.md`), the page will be discovered and the permalink will be returned: `/about/` - If the reference is a page name with a fragment (such as `about.md#who`), the page will be discovered and used to add the `page.UniqueID()` to the resulting fragment and permalink: `/about/#who:deadbeef`. - If the reference is a fragment and `.*Ref` has been called from a `Node` or `SiteInfo`, it will be returned as is: `#who`. - If the reference is a fragment and `.*Ref` has been called from a `Page`, it will be returned with the page’s unique ID: `#who:deadbeef`. - `.*Ref` can be called from either `Node`, `SiteInfo` (e.g., `Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in templates. - `.*Ref` cannot be used in content, so two shortcodes have been created to provide the functionality to content: `ref` and `relref`. These are intended to be used within markup, like `[Who]({{% ref about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`. - There are also `ref` and `relref` template functions (used to create the shortcodes) that expect a `Page` or `Node` object and the reference string (e.g., `{{ relref . "about.md" }}` or `{{ "about.md" | ref . }}`). It actually looks for `.*Ref` as defined on `Node` or `Page` objects. - Shortcode handling had to use a *differently unique* wrapper in `createShortcodePlaceholder` because of the way that the `ref` and `relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
}
if relative {
link, err = target.RelPermalink()
} else {
link, err = target.Permalink()
}
if err != nil {
return "", err
}
}
if refUrl.Fragment != "" {
link = link + "#" + refUrl.Fragment
if refUrl.Path != "" && target != nil && !target.getRenderingConfig().PlainIdAnchors {
Provide (relative) reference funcs & shortcodes. - `.Ref` and `.RelRef` take a reference (the logical filename for a page, including extension and/or a document fragment ID) and return a permalink (or relative permalink) to the referenced document. - If the reference is a page name (such as `about.md`), the page will be discovered and the permalink will be returned: `/about/` - If the reference is a page name with a fragment (such as `about.md#who`), the page will be discovered and used to add the `page.UniqueID()` to the resulting fragment and permalink: `/about/#who:deadbeef`. - If the reference is a fragment and `.*Ref` has been called from a `Node` or `SiteInfo`, it will be returned as is: `#who`. - If the reference is a fragment and `.*Ref` has been called from a `Page`, it will be returned with the page’s unique ID: `#who:deadbeef`. - `.*Ref` can be called from either `Node`, `SiteInfo` (e.g., `Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in templates. - `.*Ref` cannot be used in content, so two shortcodes have been created to provide the functionality to content: `ref` and `relref`. These are intended to be used within markup, like `[Who]({{% ref about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`. - There are also `ref` and `relref` template functions (used to create the shortcodes) that expect a `Page` or `Node` object and the reference string (e.g., `{{ relref . "about.md" }}` or `{{ "about.md" | ref . }}`). It actually looks for `.*Ref` as defined on `Node` or `Page` objects. - Shortcode handling had to use a *differently unique* wrapper in `createShortcodePlaceholder` because of the way that the `ref` and `relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
link = link + ":" + target.UniqueId()
} else if page != nil && !page.getRenderingConfig().PlainIdAnchors {
Provide (relative) reference funcs & shortcodes. - `.Ref` and `.RelRef` take a reference (the logical filename for a page, including extension and/or a document fragment ID) and return a permalink (or relative permalink) to the referenced document. - If the reference is a page name (such as `about.md`), the page will be discovered and the permalink will be returned: `/about/` - If the reference is a page name with a fragment (such as `about.md#who`), the page will be discovered and used to add the `page.UniqueID()` to the resulting fragment and permalink: `/about/#who:deadbeef`. - If the reference is a fragment and `.*Ref` has been called from a `Node` or `SiteInfo`, it will be returned as is: `#who`. - If the reference is a fragment and `.*Ref` has been called from a `Page`, it will be returned with the page’s unique ID: `#who:deadbeef`. - `.*Ref` can be called from either `Node`, `SiteInfo` (e.g., `Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in templates. - `.*Ref` cannot be used in content, so two shortcodes have been created to provide the functionality to content: `ref` and `relref`. These are intended to be used within markup, like `[Who]({{% ref about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`. - There are also `ref` and `relref` template functions (used to create the shortcodes) that expect a `Page` or `Node` object and the reference string (e.g., `{{ relref . "about.md" }}` or `{{ "about.md" | ref . }}`). It actually looks for `.*Ref` as defined on `Node` or `Page` objects. - Shortcode handling had to use a *differently unique* wrapper in `createShortcodePlaceholder` because of the way that the `ref` and `relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
link = link + ":" + page.UniqueId()
}
}
return link, nil
}
func (s *SiteInfo) Ref(ref string, page *Page) (string, error) {
return s.refLink(ref, page, false)
}
func (s *SiteInfo) RelRef(ref string, page *Page) (string, error) {
return s.refLink(ref, page, true)
}
func (s *SiteInfo) addToPaginationPageCount(cnt uint64) {
atomic.AddUint64(&s.paginationPageCount, cnt)
}
type runmode struct {
2014-01-29 17:50:31 -05:00
Watching bool
}
func (s *Site) Running() bool {
2014-01-29 17:50:31 -05:00
return s.RunMode.Watching
}
func init() {
2014-01-29 17:50:31 -05:00
DefaultTimer = nitro.Initalize()
}
func (s *Site) timerStep(step string) {
2014-01-29 17:50:31 -05:00
if s.timer == nil {
s.timer = DefaultTimer
}
s.timer.Step(step)
2013-07-04 11:32:55 -04:00
}
func (s *Site) Build() (err error) {
2014-01-29 17:50:31 -05:00
if err = s.Process(); err != nil {
return
}
if err = s.Render(); err != nil {
jww.ERROR.Printf("Error rendering site: %s\nAvailable templates:\n", err)
2014-01-29 17:50:31 -05:00
for _, template := range s.Tmpl.Templates() {
jww.ERROR.Printf("\t%s\n", template.Name())
2014-01-29 17:50:31 -05:00
}
return
}
return nil
2013-07-04 11:32:55 -04:00
}
func (s *Site) Analyze() {
2014-01-29 17:50:31 -05:00
s.Process()
s.ShowPlan(os.Stdout)
2013-07-04 11:32:55 -04:00
}
func (s *Site) prepTemplates() {
s.Tmpl = tpl.InitializeT()
2014-01-29 17:50:31 -05:00
s.Tmpl.LoadTemplates(s.absLayoutDir())
2014-04-10 08:10:12 -04:00
if s.hasTheme() {
s.Tmpl.LoadTemplatesWithPrefix(s.absThemeDir()+"/layouts", "theme")
}
}
func (s *Site) addTemplate(name, data string) error {
2014-01-29 17:50:31 -05:00
return s.Tmpl.AddTemplate(name, data)
}
func (s *Site) loadData(sources []source.Input) (err error) {
s.Data = make(map[string]interface{})
var current map[string]interface{}
for _, currentSource := range sources {
for _, r := range currentSource.Files() {
// Crawl in data tree to insert data
current = s.Data
for _, key := range strings.Split(r.Dir(), helpers.FilePathSeparator) {
if key != "" {
if _, ok := current[key]; !ok {
current[key] = make(map[string]interface{})
}
current = current[key].(map[string]interface{})
}
}
data, err := readData(r)
if err != nil {
return fmt.Errorf("Failed to read data from %s: %s", filepath.Join(r.Path(), r.LogicalName()), err)
}
// Copy content from current to data when needed
if _, ok := current[r.BaseFileName()]; ok {
data := data.(map[string]interface{})
for key, value := range current[r.BaseFileName()].(map[string]interface{}) {
if _, override := data[key]; override {
// filepath.Walk walks the files in lexical order, '/' comes before '.'
// this warning could happen if
// 1. A theme uses the same key; the main data folder wins
// 2. A sub folder uses the same key: the sub folder wins
jww.WARN.Printf("Data for key '%s' in path '%s' is overridden in subfolder", key, r.Path())
}
data[key] = value
}
}
// Insert data
current[r.BaseFileName()] = data
}
}
return
}
func readData(f *source.File) (interface{}, error) {
switch f.Extension() {
case "yaml", "yml":
return parser.HandleYamlMetaData(f.Bytes())
case "json":
return parser.HandleJsonMetaData(f.Bytes())
case "toml":
return parser.HandleTomlMetaData(f.Bytes())
default:
return nil, fmt.Errorf("Data not supported for extension '%s'", f.Extension())
}
}
func (s *Site) Process() (err error) {
2014-01-29 17:50:31 -05:00
if err = s.initialize(); err != nil {
return
}
s.prepTemplates()
s.Tmpl.PrintErrors()
s.timerStep("initialize & template prep")
dataSources := make([]source.Input, 0, 2)
dataSources = append(dataSources, &source.Filesystem{Base: s.absDataDir()})
// have to be last - duplicate keys in earlier entries will win
themeStaticDir, err := helpers.GetThemeDataDirPath()
if err == nil {
dataSources = append(dataSources, &source.Filesystem{Base: themeStaticDir})
}
if err = s.loadData(dataSources); err != nil {
return
}
s.timerStep("load data")
2014-01-29 17:50:31 -05:00
if err = s.CreatePages(); err != nil {
return
}
s.setupPrevNext()
s.timerStep("import pages")
if err = s.BuildSiteMeta(); err != nil {
return
}
s.timerStep("build taxonomies")
2014-01-29 17:50:31 -05:00
return
2013-07-04 11:32:55 -04:00
}
func (s *Site) setupPrevNext() {
2014-01-29 17:50:31 -05:00
for i, page := range s.Pages {
if i < len(s.Pages)-1 {
page.Next = s.Pages[i+1]
}
2014-01-29 17:50:31 -05:00
if i > 0 {
page.Prev = s.Pages[i-1]
}
}
}
func (s *Site) Render() (err error) {
2014-01-29 17:50:31 -05:00
if err = s.RenderAliases(); err != nil {
return
}
s.timerStep("render and write aliases")
if err = s.RenderTaxonomiesLists(); err != nil {
2014-01-29 17:50:31 -05:00
return
}
s.timerStep("render and write taxonomies")
s.RenderListsOfTaxonomyTerms()
s.timerStep("render & write taxonomy lists")
if err = s.RenderSectionLists(); err != nil {
2014-01-29 17:50:31 -05:00
return
}
s.timerStep("render and write lists")
if err = s.RenderPages(); err != nil {
return
}
s.timerStep("render and write pages")
if err = s.RenderHomePage(); err != nil {
return
}
s.timerStep("render and write homepage")
2014-05-06 06:50:23 -04:00
if err = s.RenderSitemap(); err != nil {
return
}
s.timerStep("render and write Sitemap")
2014-01-29 17:50:31 -05:00
return
2013-07-04 11:32:55 -04:00
}
func (s *Site) checkDescriptions() {
2014-01-29 17:50:31 -05:00
for _, p := range s.Pages {
if len(p.Description) < 60 {
jww.FEEDBACK.Println(p.Source.Path() + " ")
2014-01-29 17:50:31 -05:00
}
}
2013-07-04 11:32:55 -04:00
}
func (s *Site) Initialise() (err error) {
return s.initialize()
}
func (s *Site) initialize() (err error) {
2014-01-29 17:50:31 -05:00
if err = s.checkDirectories(); err != nil {
return err
}
2013-07-04 11:32:55 -04:00
staticDir := helpers.AbsPathify(viper.GetString("StaticDir") + "/")
2014-01-29 17:50:31 -05:00
s.Source = &source.Filesystem{
AvoidPaths: []string{staticDir},
Base: s.absContentDir(),
}
2013-07-04 11:32:55 -04:00
s.Menus = Menus{}
2014-01-29 17:50:31 -05:00
s.initializeSiteInfo()
2013-09-12 19:17:53 -04:00
2014-01-29 17:50:31 -05:00
s.Shortcodes = make(map[string]ShortcodeFunc)
return
2013-09-12 19:17:53 -04:00
}
func (s *Site) initializeSiteInfo() {
2014-05-28 18:37:59 -04:00
params := viper.GetStringMap("Params")
permalinks := make(PermalinkOverrides)
for k, v := range viper.GetStringMapString("Permalinks") {
permalinks[k] = PathPattern(v)
}
s.Info = SiteInfo{
BaseUrl: template.URL(helpers.SanitizeUrlKeepTrailingSlash(viper.GetString("BaseUrl"))),
Title: viper.GetString("Title"),
Author: viper.GetStringMap("author"),
LanguageCode: viper.GetString("languagecode"),
Copyright: viper.GetString("copyright"),
DisqusShortname: viper.GetString("DisqusShortname"),
BuildDrafts: viper.GetBool("BuildDrafts"),
canonifyUrls: viper.GetBool("CanonifyUrls"),
Pages: &s.Pages,
Recent: &s.Pages,
Menus: &s.Menus,
Params: params,
Permalinks: permalinks,
Data: &s.Data,
2014-01-29 17:50:31 -05:00
}
2013-07-04 11:32:55 -04:00
}
2014-04-10 08:10:12 -04:00
func (s *Site) hasTheme() bool {
return viper.GetString("theme") != ""
}
func (s *Site) absDataDir() string {
return helpers.AbsPathify(viper.GetString("DataDir"))
}
2014-04-10 08:10:12 -04:00
func (s *Site) absThemeDir() string {
return helpers.AbsPathify("themes/" + viper.GetString("theme"))
}
func (s *Site) absLayoutDir() string {
return helpers.AbsPathify(viper.GetString("LayoutDir"))
}
func (s *Site) absContentDir() string {
return helpers.AbsPathify(viper.GetString("ContentDir"))
}
func (s *Site) absPublishDir() string {
return helpers.AbsPathify(viper.GetString("PublishDir"))
}
func (s *Site) checkDirectories() (err error) {
if b, _ := helpers.DirExists(s.absContentDir(), hugofs.SourceFs); !b {
2014-01-29 17:50:31 -05:00
return fmt.Errorf("No source directory found, expecting to find it at " + s.absContentDir())
}
return
2013-07-04 11:32:55 -04:00
}
type pageResult struct {
page *Page
err error
}
func (s *Site) CreatePages() error {
2014-01-29 17:50:31 -05:00
if s.Source == nil {
panic(fmt.Sprintf("s.Source not set %s", s.absContentDir()))
}
if len(s.Source.Files()) < 1 {
return nil
2014-01-29 17:50:31 -05:00
}
files := s.Source.Files()
2014-01-29 17:50:31 -05:00
2014-10-20 17:42:16 -04:00
results := make(chan HandledResult)
filechan := make(chan *source.File)
2014-01-29 17:50:31 -05:00
procs := getGoMaxProcs()
wg := &sync.WaitGroup{}
2014-11-24 17:51:47 -05:00
wg.Add(procs * 4)
for i := 0; i < procs*4; i++ {
go sourceReader(s, filechan, results, wg)
}
errs := make(chan error)
2014-08-25 13:13:11 -04:00
// we can only have exactly one result collator, since it makes changes that
// must be synchronized.
go readCollator(s, results, errs)
for _, file := range files {
filechan <- file
2014-01-29 17:50:31 -05:00
}
close(filechan)
wg.Wait()
close(results)
readErrs := <-errs
2014-10-20 17:42:16 -04:00
results = make(chan HandledResult)
2014-11-04 00:36:05 -05:00
pageChan := make(chan *Page)
fileConvChan := make(chan *source.File)
wg = &sync.WaitGroup{}
2014-11-24 17:51:47 -05:00
wg.Add(2 * procs * 4)
2014-10-20 20:15:33 -04:00
for i := 0; i < procs*4; i++ {
2014-11-04 00:36:05 -05:00
go fileConverter(s, fileConvChan, results, wg)
go pageConverter(s, pageChan, results, wg)
}
go converterCollator(s, results, errs)
for _, p := range s.Pages {
2014-11-04 00:36:05 -05:00
pageChan <- p
}
2014-10-20 20:15:33 -04:00
for _, f := range s.Files {
2014-11-04 00:36:05 -05:00
fileConvChan <- f
2014-10-20 20:15:33 -04:00
}
2014-11-04 00:36:05 -05:00
close(pageChan)
close(fileConvChan)
wg.Wait()
close(results)
renderErrs := <-errs
if renderErrs == nil && readErrs == nil {
return nil
}
if renderErrs == nil {
return readErrs
}
if readErrs == nil {
return renderErrs
}
return fmt.Errorf("%s\n%s", readErrs, renderErrs)
}
2014-10-20 17:42:16 -04:00
func sourceReader(s *Site, files <-chan *source.File, results chan<- HandledResult, wg *sync.WaitGroup) {
defer wg.Done()
for file := range files {
h := NewMetaHandler(file.Extension())
2014-10-17 16:57:48 -04:00
if h != nil {
2014-10-20 17:51:53 -04:00
h.Read(file, s, results)
2014-10-17 16:57:48 -04:00
} else {
jww.ERROR.Println("Unsupported File Type", file.Path())
}
}
}
2014-10-20 17:42:16 -04:00
func pageConverter(s *Site, pages <-chan *Page, results HandleResults, wg *sync.WaitGroup) {
defer wg.Done()
for page := range pages {
var h *MetaHandle
if page.Markup != "" {
h = NewMetaHandler(page.Markup)
} else {
h = NewMetaHandler(page.File.Extension())
}
2014-10-20 20:15:33 -04:00
if h != nil {
h.Convert(page, s, results)
}
}
}
2014-10-20 20:15:33 -04:00
func fileConverter(s *Site, files <-chan *source.File, results HandleResults, wg *sync.WaitGroup) {
defer wg.Done()
for file := range files {
h := NewMetaHandler(file.Extension())
2014-10-20 20:15:33 -04:00
if h != nil {
h.Convert(file, s, results)
}
}
}
2014-10-20 17:42:16 -04:00
func converterCollator(s *Site, results <-chan HandledResult, errs chan<- error) {
errMsgs := []string{}
for r := range results {
if r.err != nil {
errMsgs = append(errMsgs, r.err.Error())
continue
}
}
if len(errMsgs) == 0 {
errs <- nil
return
}
errs <- fmt.Errorf("Errors rendering pages: %s", strings.Join(errMsgs, "\n"))
}
2014-10-20 17:42:16 -04:00
func readCollator(s *Site, results <-chan HandledResult, errs chan<- error) {
errMsgs := []string{}
for r := range results {
if r.err != nil {
errMsgs = append(errMsgs, r.Error())
continue
}
2014-10-20 20:15:33 -04:00
// !page == file
if r.page == nil {
s.Files = append(s.Files, r.file)
} else {
if r.page.ShouldBuild() {
s.Pages = append(s.Pages, r.page)
}
2014-10-20 20:15:33 -04:00
if r.page.IsDraft() {
2015-03-07 06:53:20 -05:00
s.draftCount++
2014-10-20 20:15:33 -04:00
}
2014-10-20 20:15:33 -04:00
if r.page.IsFuture() {
2015-03-07 06:53:20 -05:00
s.futureCount++
2014-10-20 20:15:33 -04:00
}
}
}
2014-10-20 20:15:33 -04:00
2014-01-29 17:50:31 -05:00
s.Pages.Sort()
if len(errMsgs) == 0 {
errs <- nil
return
}
errs <- fmt.Errorf("Errors reading pages: %s", strings.Join(errMsgs, "\n"))
2013-07-04 11:32:55 -04:00
}
func (s *Site) BuildSiteMeta() (err error) {
s.assembleMenus()
if len(s.Pages) == 0 {
return
}
s.assembleTaxonomies()
s.assembleSections()
s.Info.LastChange = s.Pages[0].Date
return
}
func (s *Site) getMenusFromConfig() Menus {
ret := Menus{}
if menus := viper.GetStringMap("menu"); menus != nil {
for name, menu := range menus {
m, err := cast.ToSliceE(menu)
if err != nil {
jww.ERROR.Printf("unable to process menus in site config\n")
jww.ERROR.Println(err)
} else {
for _, entry := range m {
jww.DEBUG.Printf("found menu: %q, in site config\n", name)
menuEntry := MenuEntry{Menu: name}
ime, err := cast.ToStringMapE(entry)
if err != nil {
jww.ERROR.Printf("unable to process menus in site config\n")
jww.ERROR.Println(err)
}
menuEntry.MarshallMap(ime)
if strings.HasPrefix(menuEntry.Url, "/") {
// make it match the nodes
menuEntryUrl := menuEntry.Url
menuEntryUrl = helpers.UrlizeAndPrep(menuEntryUrl)
if !s.Info.canonifyUrls {
menuEntryUrl = helpers.AddContextRoot(string(s.Info.BaseUrl), menuEntryUrl)
}
menuEntry.Url = menuEntryUrl
}
if ret[name] == nil {
ret[name] = &Menu{}
}
*ret[name] = ret[name].Add(&menuEntry)
}
}
}
return ret
}
return ret
}
func (s *Site) assembleMenus() {
type twoD struct {
MenuName, EntryName string
}
flat := map[twoD]*MenuEntry{}
children := map[twoD]Menu{}
menuConfig := s.getMenusFromConfig()
for name, menu := range menuConfig {
for _, me := range *menu {
flat[twoD{name, me.KeyName()}] = me
}
}
//creating flat hash
for _, p := range s.Pages {
for name, me := range p.Menus() {
if _, ok := flat[twoD{name, me.KeyName()}]; ok {
jww.ERROR.Printf("Two or more menu items have the same name/identifier in %q Menu. Identified as %q.\n Rename or set a unique identifier. \n", name, me.KeyName())
continue
}
flat[twoD{name, me.KeyName()}] = me
}
}
// Create Children Menus First
for _, e := range flat {
if e.Parent != "" {
children[twoD{e.Menu, e.Parent}] = children[twoD{e.Menu, e.Parent}].Add(e)
}
}
// Placing Children in Parents (in flat)
for p, childmenu := range children {
_, ok := flat[twoD{p.MenuName, p.EntryName}]
if !ok {
// if parent does not exist, create one without a url
flat[twoD{p.MenuName, p.EntryName}] = &MenuEntry{Name: p.EntryName, Url: ""}
}
flat[twoD{p.MenuName, p.EntryName}].Children = childmenu
}
// Assembling Top Level of Tree
for menu, e := range flat {
if e.Parent == "" {
_, ok := s.Menus[menu.MenuName]
if !ok {
s.Menus[menu.MenuName] = &Menu{}
}
*s.Menus[menu.MenuName] = s.Menus[menu.MenuName].Add(e)
}
}
}
func (s *Site) assembleTaxonomies() {
s.Taxonomies = make(TaxonomyList)
s.Sections = make(Taxonomy)
2014-01-29 17:50:31 -05:00
taxonomies := viper.GetStringMapString("Taxonomies")
jww.INFO.Printf("found taxonomies: %#v\n", taxonomies)
for _, plural := range taxonomies {
s.Taxonomies[plural] = make(Taxonomy)
2014-01-29 17:50:31 -05:00
for _, p := range s.Pages {
vals := p.GetParam(plural)
weight := p.GetParam(plural + "_weight")
if weight == nil {
weight = 0
}
if vals != nil {
if v, ok := vals.([]string); ok {
2014-01-29 17:50:31 -05:00
for _, idx := range v {
x := WeightedPage{weight.(int), p}
s.Taxonomies[plural].Add(idx, x)
2014-01-29 17:50:31 -05:00
}
} else if v, ok := vals.(string); ok {
x := WeightedPage{weight.(int), p}
s.Taxonomies[plural].Add(v, x)
2014-01-29 17:50:31 -05:00
} else {
jww.ERROR.Printf("Invalid %s in %s\n", plural, p.File.Path())
2014-01-29 17:50:31 -05:00
}
}
}
for k := range s.Taxonomies[plural] {
s.Taxonomies[plural][k].Sort()
2014-01-29 17:50:31 -05:00
}
}
s.Info.Taxonomies = s.Taxonomies
s.Info.Indexes = &s.Taxonomies
s.Info.Sections = s.Sections
}
func (s *Site) assembleSections() {
2014-01-29 17:50:31 -05:00
for i, p := range s.Pages {
s.Sections.Add(p.Section(), WeightedPage{s.Pages[i].Weight, s.Pages[i]})
2014-01-29 17:50:31 -05:00
}
for k := range s.Sections {
s.Sections[k].Sort()
for i, wp := range s.Sections[k] {
if i > 0 {
2015-01-18 20:40:34 -05:00
wp.Page.NextInSection = s.Sections[k][i-1].Page
}
2015-01-18 20:40:34 -05:00
if i < len(s.Sections[k])-1 {
wp.Page.PrevInSection = s.Sections[k][i+1].Page
}
}
2014-01-29 17:50:31 -05:00
}
2013-07-04 11:32:55 -04:00
}
func (s *Site) possibleTaxonomies() (taxonomies []string) {
2014-01-29 17:50:31 -05:00
for _, p := range s.Pages {
for k := range p.Params {
2014-10-20 20:15:33 -04:00
if !helpers.InStringArray(taxonomies, k) {
taxonomies = append(taxonomies, k)
2014-01-29 17:50:31 -05:00
}
}
}
return
2013-08-13 13:46:05 -04:00
}
2015-03-07 06:53:20 -05:00
// RenderAliases renders shell pages that simply have a redirect in the header
2013-08-10 10:35:34 -04:00
func (s *Site) RenderAliases() error {
2014-01-29 17:50:31 -05:00
for _, p := range s.Pages {
for _, a := range p.Aliases {
plink, err := p.Permalink()
if err != nil {
return err
}
if err := s.WriteDestAlias(a, template.HTML(plink)); err != nil {
2014-01-29 17:50:31 -05:00
return err
}
}
}
return nil
2013-08-10 10:35:34 -04:00
}
2015-03-07 06:53:20 -05:00
// RenderPages renders pages each corresponding to a markdown file
func (s *Site) RenderPages() error {
results := make(chan error)
pages := make(chan *Page)
procs := getGoMaxProcs()
wg := &sync.WaitGroup{}
for i := 0; i < procs*4; i++ {
wg.Add(1)
go pageRenderer(s, pages, results, wg)
}
errs := make(chan error)
go errorCollator(results, errs)
2014-01-29 17:50:31 -05:00
for _, page := range s.Pages {
pages <- page
}
close(pages)
wg.Wait()
close(results)
err := <-errs
if err != nil {
return fmt.Errorf("Error(s) rendering pages: %s", err)
}
return nil
}
func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.WaitGroup) {
defer wg.Done()
for p := range pages {
var layouts []string
if !p.IsRenderable() {
self := "__" + p.TargetPath()
_, err := s.Tmpl.New(self).Parse(string(p.Content))
if err != nil {
results <- err
continue
}
layouts = append(layouts, self)
} else {
layouts = append(layouts, p.Layout()...)
layouts = append(layouts, "_default/single.html")
}
2015-01-30 15:05:05 -05:00
err := s.renderAndWritePage("page "+p.FullFilePath(), p.TargetPath(), p, s.appendThemeTemplates(layouts)...)
if err != nil {
results <- err
}
2014-01-29 17:50:31 -05:00
}
}
func errorCollator(results <-chan error, errs chan<- error) {
errMsgs := []string{}
for err := range results {
if err != nil {
errMsgs = append(errMsgs, err.Error())
}
}
if len(errMsgs) == 0 {
errs <- nil
} else {
errs <- errors.New(strings.Join(errMsgs, "\n"))
}
close(errs)
2013-07-04 11:32:55 -04:00
}
2014-04-10 08:10:12 -04:00
func (s *Site) appendThemeTemplates(in []string) []string {
if s.hasTheme() {
out := []string{}
// First place all non internal templates
for _, t := range in {
if !strings.HasPrefix(t, "_internal/") {
2014-04-10 08:10:12 -04:00
out = append(out, t)
}
}
// Then place theme templates with the same names
for _, t := range in {
if !strings.HasPrefix(t, "_internal/") {
2014-04-10 08:10:12 -04:00
out = append(out, "theme/"+t)
}
}
2014-04-10 08:10:12 -04:00
// Lastly place internal templates
for _, t := range in {
if strings.HasPrefix(t, "_internal/") {
out = append(out, t)
2014-04-10 08:10:12 -04:00
}
}
return out
}
2015-03-07 06:53:20 -05:00
return in
2014-04-10 08:10:12 -04:00
}
type taxRenderInfo struct {
key string
pages WeightedPages
singular string
plural string
}
2015-03-07 06:53:20 -05:00
// RenderTaxonomiesLists renders the listing pages based on the meta data
// each unique term within a taxonomy will have a page created
func (s *Site) RenderTaxonomiesLists() error {
wg := &sync.WaitGroup{}
taxes := make(chan taxRenderInfo)
results := make(chan error)
procs := getGoMaxProcs()
for i := 0; i < procs*4; i++ {
wg.Add(1)
go taxonomyRenderer(s, taxes, results, wg)
}
errs := make(chan error)
go errorCollator(results, errs)
taxonomies := viper.GetStringMapString("Taxonomies")
for singular, plural := range taxonomies {
for key, pages := range s.Taxonomies[plural] {
taxes <- taxRenderInfo{key, pages, singular, plural}
2014-01-29 17:50:31 -05:00
}
}
close(taxes)
wg.Wait()
close(results)
err := <-errs
if err != nil {
return fmt.Errorf("Error(s) rendering taxonomies: %s", err)
}
2014-01-29 17:50:31 -05:00
return nil
2013-07-04 11:32:55 -04:00
}
func (s *Site) newTaxonomyNode(t taxRenderInfo) (*Node, string) {
base := t.plural + "/" + t.key
n := s.NewNode()
n.Title = strings.Replace(strings.Title(t.key), "-", " ", -1)
s.setUrls(n, base)
if len(t.pages) > 0 {
n.Date = t.pages[0].Page.Date
}
n.Data[t.singular] = t.pages
n.Data["Singular"] = t.singular
n.Data["Plural"] = t.plural
n.Data["Pages"] = t.pages.Pages()
return n, base
}
func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error, wg *sync.WaitGroup) {
defer wg.Done()
var n *Node
for t := range taxes {
var base string
layouts := s.appendThemeTemplates(
[]string{"taxonomy/" + t.singular + ".html", "indexes/" + t.singular + ".html", "_default/taxonomy.html", "_default/list.html"})
n, base = s.newTaxonomyNode(t)
2015-01-30 15:05:05 -05:00
if err := s.renderAndWritePage("taxononomy "+t.singular, base, n, layouts...); err != nil {
results <- err
continue
}
if n.paginator != nil {
paginatePath := viper.GetString("paginatePath")
// write alias for page 1
s.WriteDestAlias(fmt.Sprintf("%s/%s/%d/index.html", base, paginatePath, 1), s.permalink(base))
pagers := n.paginator.Pagers()
for i, pager := range pagers {
if i == 0 {
// already created
continue
}
taxonomyPagerNode, _ := s.newTaxonomyNode(t)
taxonomyPagerNode.paginator = pager
if pager.TotalPages() > 0 {
taxonomyPagerNode.Date = pager.Pages()[0].Date
}
pageNumber := i + 1
htmlBase := fmt.Sprintf("/%s/%s/%d", base, paginatePath, pageNumber)
2015-01-30 15:05:05 -05:00
if err := s.renderAndWritePage(fmt.Sprintf("taxononomy_%s_%d", t.singular, pageNumber), htmlBase, taxonomyPagerNode, layouts...); err != nil {
results <- err
continue
}
}
}
if !viper.GetBool("DisableRSS") {
// XML Feed
n.Url = s.permalinkStr(base + "/index.xml")
n.Permalink = s.permalink(base)
rssLayouts := []string{"taxonomy/" + t.singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
2015-01-30 14:51:06 -05:00
if err := s.renderAndWriteXML("taxonomy "+t.singular+" rss", base+"/index.xml", n, s.appendThemeTemplates(rssLayouts)...); err != nil {
results <- err
continue
}
}
}
}
2015-03-07 06:53:20 -05:00
// RenderListsOfTaxonomyTerms renders a page per taxonomy that lists the terms for that taxonomy
func (s *Site) RenderListsOfTaxonomyTerms() (err error) {
2014-04-10 08:10:12 -04:00
taxonomies := viper.GetStringMapString("Taxonomies")
for singular, plural := range taxonomies {
n := s.NewNode()
n.Title = strings.Title(plural)
s.setUrls(n, plural)
n.Data["Singular"] = singular
n.Data["Plural"] = plural
n.Data["Terms"] = s.Taxonomies[plural]
// keep the following just for legacy reasons
n.Data["OrderedIndex"] = n.Data["Terms"]
n.Data["Index"] = n.Data["Terms"]
layouts := []string{"taxonomy/" + singular + ".terms.html", "_default/terms.html", "indexes/indexes.html"}
layouts = s.appendThemeTemplates(layouts)
if s.layoutExists(layouts...) {
2015-01-30 15:05:05 -05:00
if err := s.renderAndWritePage("taxonomy terms for "+singular, plural+"/index.html", n, layouts...); err != nil {
return err
}
2014-01-29 17:50:31 -05:00
}
}
2014-04-10 08:10:12 -04:00
2014-01-29 17:50:31 -05:00
return
}
func (s *Site) newSectionListNode(section string, data WeightedPages) *Node {
n := s.NewNode()
if viper.GetBool("PluralizeListTitles") {
n.Title = strings.Title(inflect.Pluralize(section))
} else {
n.Title = strings.Title(section)
}
s.setUrls(n, section)
n.Date = data[0].Page.Date
n.Data["Pages"] = data.Pages()
return n
}
2015-03-07 06:53:20 -05:00
// RenderSectionLists renders a page for each section
func (s *Site) RenderSectionLists() error {
2014-01-29 17:50:31 -05:00
for section, data := range s.Sections {
layouts := s.appendThemeTemplates(
[]string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"})
n := s.newSectionListNode(section, data)
2015-01-30 15:05:05 -05:00
if err := s.renderAndWritePage(fmt.Sprintf("section%s_%d", section, 1), fmt.Sprintf("/%s", section), n, s.appendThemeTemplates(layouts)...); err != nil {
return err
}
2014-01-29 17:50:31 -05:00
if n.paginator != nil {
paginatePath := viper.GetString("paginatePath")
// write alias for page 1
s.WriteDestAlias(filepath.FromSlash(fmt.Sprintf("/%s/%s/%d", section, paginatePath, 1)), s.permalink(section))
pagers := n.paginator.Pagers()
for i, pager := range pagers {
if i == 0 {
// already created
continue
}
sectionPagerNode := s.newSectionListNode(section, data)
sectionPagerNode.paginator = pager
if pager.TotalPages() > 0 {
sectionPagerNode.Date = pager.Pages()[0].Date
}
pageNumber := i + 1
htmlBase := fmt.Sprintf("/%s/%s/%d", section, paginatePath, pageNumber)
2015-01-30 15:05:05 -05:00
if err := s.renderAndWritePage(fmt.Sprintf("section_%s_%d", section, pageNumber), filepath.FromSlash(htmlBase), sectionPagerNode, layouts...); err != nil {
return err
}
}
}
if !viper.GetBool("DisableRSS") && section != "" {
2014-01-29 17:50:31 -05:00
// XML Feed
n.Url = s.permalinkStr(section + "/index.xml")
n.Permalink = s.permalink(section)
2014-04-10 08:10:12 -04:00
rssLayouts := []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
2015-01-30 14:51:06 -05:00
if err := s.renderAndWriteXML("section "+section+" rss", section+"/index.xml", n, s.appendThemeTemplates(rssLayouts)...); err != nil {
return err
}
2014-01-29 17:50:31 -05:00
}
}
return nil
2013-07-04 11:32:55 -04:00
}
func (s *Site) newHomeNode() *Node {
2014-01-29 17:50:31 -05:00
n := s.NewNode()
n.Title = n.Site.Title
s.setUrls(n, "/")
2014-01-29 17:50:31 -05:00
n.Data["Pages"] = s.Pages
return n
}
func (s *Site) RenderHomePage() error {
n := s.newHomeNode()
layouts := s.appendThemeTemplates([]string{"index.html", "_default/list.html", "_default/single.html"})
2015-01-30 15:05:05 -05:00
if err := s.renderAndWritePage("homepage", "/", n, layouts...); err != nil {
return err
}
2014-01-29 17:50:31 -05:00
if n.paginator != nil {
paginatePath := viper.GetString("paginatePath")
// write alias for page 1
s.WriteDestAlias(filepath.FromSlash(fmt.Sprintf("/%s/%d", paginatePath, 1)), s.permalink("/"))
pagers := n.paginator.Pagers()
for i, pager := range pagers {
if i == 0 {
// already created
continue
}
homePagerNode := s.newHomeNode()
homePagerNode.paginator = pager
if pager.TotalPages() > 0 {
homePagerNode.Date = pager.Pages()[0].Date
}
pageNumber := i + 1
htmlBase := fmt.Sprintf("/%s/%d", paginatePath, pageNumber)
2015-01-30 15:05:05 -05:00
if err := s.renderAndWritePage(fmt.Sprintf("homepage_%d", pageNumber), filepath.FromSlash(htmlBase), homePagerNode, layouts...); err != nil {
return err
}
}
}
if !viper.GetBool("DisableRSS") {
2014-01-29 17:50:31 -05:00
// XML Feed
n.Url = s.permalinkStr("index.xml")
n.Title = ""
2014-01-29 17:50:31 -05:00
high := 50
if len(s.Pages) < high {
high = len(s.Pages)
}
n.Data["Pages"] = s.Pages[:high]
if len(s.Pages) > 0 {
n.Date = s.Pages[0].Date
}
2014-04-10 08:10:12 -04:00
rssLayouts := []string{"rss.xml", "_default/rss.xml", "_internal/_default/rss.xml"}
2015-01-30 14:51:06 -05:00
if err := s.renderAndWriteXML("homepage rss", "index.xml", n, s.appendThemeTemplates(rssLayouts)...); err != nil {
return err
}
}
// Force `UglyUrls` option to force `404.html` file name
if !s.PageTarget().(*target.PagePub).UglyUrls {
s.PageTarget().(*target.PagePub).UglyUrls = true
defer func() { s.PageTarget().(*target.PagePub).UglyUrls = false }()
}
n.Url = helpers.Urlize("404.html")
n.Title = "404 Page not found"
n.Permalink = s.permalink("404.html")
nfLayouts := []string{"404.html"}
2015-01-30 15:05:05 -05:00
if nfErr := s.renderAndWritePage("404 page", "404.html", n, s.appendThemeTemplates(nfLayouts)...); nfErr != nil {
return nfErr
2014-01-29 17:50:31 -05:00
}
return nil
2013-07-04 11:32:55 -04:00
}
2014-05-06 06:50:23 -04:00
func (s *Site) RenderSitemap() error {
if viper.GetBool("DisableSitemap") {
return nil
}
2014-05-06 11:02:56 -04:00
sitemapDefault := parseSitemap(viper.GetStringMap("Sitemap"))
2014-05-06 06:50:23 -04:00
n := s.NewNode()
2014-05-06 11:02:56 -04:00
// Prepend homepage to the list of pages
pages := make(Pages, 0)
page := &Page{}
page.Date = s.Info.LastChange
2014-05-28 19:11:54 -04:00
page.Site = &s.Info
2014-05-06 11:02:56 -04:00
page.Url = "/"
pages = append(pages, page)
pages = append(pages, s.Pages...)
n.Data["Pages"] = pages
for _, page := range pages {
if page.Sitemap.ChangeFreq == "" {
page.Sitemap.ChangeFreq = sitemapDefault.ChangeFreq
}
if page.Sitemap.Priority == -1 {
page.Sitemap.Priority = sitemapDefault.Priority
}
}
2014-05-06 06:50:23 -04:00
smLayouts := []string{"sitemap.xml", "_default/sitemap.xml", "_internal/_default/sitemap.xml"}
2015-01-30 14:51:06 -05:00
if err := s.renderAndWriteXML("sitemap", "sitemap.xml", n, s.appendThemeTemplates(smLayouts)...); err != nil {
return err
}
2014-05-06 06:50:23 -04:00
return nil
}
2013-07-04 11:32:55 -04:00
func (s *Site) Stats() {
jww.FEEDBACK.Println(s.draftStats())
jww.FEEDBACK.Println(s.futureStats())
2014-08-25 13:13:11 -04:00
jww.FEEDBACK.Printf("%d pages created \n", len(s.Pages))
jww.FEEDBACK.Printf("%d paginator pages created \n", s.Info.paginationPageCount)
taxonomies := viper.GetStringMapString("Taxonomies")
for _, pl := range taxonomies {
jww.FEEDBACK.Printf("%d %s created\n", len(s.Taxonomies[pl]), pl)
2014-01-29 17:50:31 -05:00
}
2013-07-04 11:32:55 -04:00
}
func (s *Site) setUrls(n *Node, in string) {
n.Url = helpers.UrlizeAndPrep(in)
n.Permalink = s.permalink(n.Url)
n.RSSLink = s.permalink(in + ".xml")
}
func (s *Site) permalink(plink string) template.HTML {
return template.HTML(s.permalinkStr(plink))
}
func (s *Site) permalinkStr(plink string) string {
return helpers.MakePermalink(string(viper.GetString("BaseUrl")), helpers.UrlizeAndPrep(plink)).String()
}
func (s *Site) NewNode() *Node {
2014-01-29 17:50:31 -05:00
return &Node{
Data: make(map[string]interface{}),
2014-05-28 19:11:54 -04:00
Site: &s.Info,
2014-01-29 17:50:31 -05:00
}
2013-07-04 11:32:55 -04:00
}
func (s *Site) layoutExists(layouts ...string) bool {
_, found := s.findFirstLayout(layouts...)
return found
}
2015-01-30 14:51:06 -05:00
func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layouts ...string) error {
renderBuffer := bp.GetBuffer()
defer bp.PutBuffer(renderBuffer)
renderBuffer.WriteString("<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>\n")
2015-01-30 14:51:06 -05:00
err := s.render(name, d, renderBuffer, layouts...)
absURLInXML, err := transform.AbsURLInXML(viper.GetString("BaseUrl"))
if err != nil {
2015-01-30 14:51:06 -05:00
return err
}
2015-01-30 14:51:06 -05:00
outBuffer := bp.GetBuffer()
defer bp.PutBuffer(outBuffer)
transformer := transform.NewChain(absURLInXML...)
transformer.Apply(outBuffer, renderBuffer)
2015-01-30 14:51:06 -05:00
if err == nil {
err = s.WriteDestFile(dest, outBuffer)
}
return err
}
2015-01-30 15:05:05 -05:00
func (s *Site) renderAndWritePage(name string, dest string, d interface{}, layouts ...string) error {
renderBuffer := bp.GetBuffer()
defer bp.PutBuffer(renderBuffer)
err := s.render(name, d, renderBuffer, layouts...)
2015-01-30 15:05:05 -05:00
outBuffer := bp.GetBuffer()
defer bp.PutBuffer(outBuffer)
2014-01-29 17:50:31 -05:00
transformLinks := transform.NewEmptyTransforms()
if viper.GetBool("CanonifyUrls") {
absURL, err := transform.AbsURL(viper.GetString("BaseUrl"))
2014-01-29 17:50:31 -05:00
if err != nil {
2015-01-30 15:05:05 -05:00
return err
2014-01-29 17:50:31 -05:00
}
transformLinks = append(transformLinks, absURL...)
}
if viper.GetBool("watch") && !viper.GetBool("DisableLiveReload") {
transformLinks = append(transformLinks, transform.LiveReloadInject)
}
2014-01-29 17:50:31 -05:00
transformer := transform.NewChain(transformLinks...)
transformer.Apply(outBuffer, renderBuffer)
2015-01-30 15:05:05 -05:00
if err == nil {
if err = s.WriteDestPage(dest, outBuffer); err != nil {
return err
}
}
return err
}
2014-01-29 17:50:31 -05:00
func (s *Site) render(name string, d interface{}, renderBuffer *bytes.Buffer, layouts ...string) error {
layout, found := s.findFirstLayout(layouts...)
if found == false {
jww.WARN.Printf("Unable to locate layout for %s: %s\n", name, layouts)
return nil
2014-01-29 17:50:31 -05:00
}
if err := s.renderThing(d, layout, renderBuffer); err != nil {
2014-01-29 17:50:31 -05:00
// Behavior here should be dependent on if running in server or watch mode.
jww.ERROR.Println(fmt.Errorf("Error while rendering %s: %v", name, err))
2014-01-29 17:50:31 -05:00
if !s.Running() {
os.Exit(-1)
}
}
return nil
}
func (s *Site) findFirstLayout(layouts ...string) (string, bool) {
for _, layout := range layouts {
2014-01-29 17:50:31 -05:00
if s.Tmpl.Lookup(layout) != nil {
return layout, true
2014-01-29 17:50:31 -05:00
}
}
return "", false
2013-07-04 11:32:55 -04:00
}
2013-11-05 00:28:08 -05:00
func (s *Site) renderThing(d interface{}, layout string, w io.Writer) error {
2014-01-29 17:50:31 -05:00
// If the template doesn't exist, then return, but leave the Writer open
if s.Tmpl.Lookup(layout) == nil {
return fmt.Errorf("Layout not found: %s", layout)
}
return s.Tmpl.ExecuteTemplate(w, layout, d)
}
2013-11-05 00:28:08 -05:00
func (s *Site) NewXMLBuffer() *bytes.Buffer {
2014-01-29 17:50:31 -05:00
header := "<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>\n"
return bytes.NewBufferString(header)
2013-07-04 11:32:55 -04:00
}
func (s *Site) PageTarget() target.Output {
s.initTargetList()
return s.Targets.Page
2013-09-03 23:52:50 -04:00
}
func (s *Site) FileTarget() target.Output {
s.initTargetList()
return s.Targets.File
2013-07-04 11:32:55 -04:00
}
2013-09-12 19:17:53 -04:00
func (s *Site) AliasTarget() target.AliasPublisher {
s.initTargetList()
return s.Targets.Alias
}
2014-01-29 17:50:31 -05:00
func (s *Site) initTargetList() {
s.targetListInit.Do(func() {
if s.Targets.Page == nil {
s.Targets.Page = &target.PagePub{
PublishDir: s.absPublishDir(),
UglyUrls: viper.GetBool("UglyUrls"),
}
}
if s.Targets.File == nil {
s.Targets.File = &target.Filesystem{
PublishDir: s.absPublishDir(),
}
}
if s.Targets.Alias == nil {
s.Targets.Alias = &target.HTMLRedirectAlias{
PublishDir: s.absPublishDir(),
}
}
})
}
func (s *Site) WriteDestFile(path string, reader io.Reader) (err error) {
jww.DEBUG.Println("creating file:", path)
return s.FileTarget().Publish(path, reader)
}
func (s *Site) WriteDestPage(path string, reader io.Reader) (err error) {
jww.DEBUG.Println("creating page:", path)
return s.PageTarget().Publish(path, reader)
}
2014-01-29 17:50:31 -05:00
func (s *Site) WriteDestAlias(path string, permalink template.HTML) (err error) {
jww.DEBUG.Println("alias created at:", path)
return s.AliasTarget().Publish(path, permalink)
2013-09-12 19:17:53 -04:00
}
func (s *Site) draftStats() string {
var msg string
switch s.draftCount {
case 0:
return "0 draft content "
case 1:
msg = "1 draft rendered "
default:
msg = fmt.Sprintf("%d drafts rendered", s.draftCount)
}
if viper.GetBool("BuildDrafts") {
return fmt.Sprintf("%d of ", s.draftCount) + msg
2014-08-25 13:13:11 -04:00
}
return "0 of " + msg
}
func (s *Site) futureStats() string {
2014-08-25 13:13:11 -04:00
var msg string
switch s.futureCount {
case 0:
return "0 future content "
case 1:
msg = "1 future rendered "
default:
msg = fmt.Sprintf("%d future rendered", s.draftCount)
}
if viper.GetBool("BuildFuture") {
return fmt.Sprintf("%d of ", s.futureCount) + msg
}
return "0 of " + msg
}
func getGoMaxProcs() int {
if gmp := os.Getenv("GOMAXPROCS"); gmp != "" {
if p, err := strconv.Atoi(gmp); err != nil {
return p
}
}
return 1
}