2016-03-23 04:43:04 -04:00
|
|
|
// Copyright 2016 The Hugo Authors. All rights reserved.
|
2013-07-04 11:32:55 -04:00
|
|
|
//
|
2015-11-23 22:16:36 -05:00
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
2013-07-04 11:32:55 -04:00
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
2015-11-23 22:16:36 -05:00
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
2013-07-04 11:32:55 -04:00
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package hugolib
|
|
|
|
|
|
|
|
import (
|
2014-09-14 07:23:03 -04:00
|
|
|
"errors"
|
2014-01-29 17:50:31 -05:00
|
|
|
"fmt"
|
|
|
|
"html/template"
|
|
|
|
"io"
|
Provide (relative) reference funcs & shortcodes.
- `.Ref` and `.RelRef` take a reference (the logical filename for a
page, including extension and/or a document fragment ID) and return
a permalink (or relative permalink) to the referenced document.
- If the reference is a page name (such as `about.md`), the page
will be discovered and the permalink will be returned: `/about/`
- If the reference is a page name with a fragment (such as
`about.md#who`), the page will be discovered and used to add the
`page.UniqueID()` to the resulting fragment and permalink:
`/about/#who:deadbeef`.
- If the reference is a fragment and `.*Ref` has been called from
a `Node` or `SiteInfo`, it will be returned as is: `#who`.
- If the reference is a fragment and `.*Ref` has been called from
a `Page`, it will be returned with the page’s unique ID:
`#who:deadbeef`.
- `.*Ref` can be called from either `Node`, `SiteInfo` (e.g.,
`Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in
templates.
- `.*Ref` cannot be used in content, so two shortcodes have been
created to provide the functionality to content: `ref` and `relref`.
These are intended to be used within markup, like `[Who]({{% ref
about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`.
- There are also `ref` and `relref` template functions (used to create
the shortcodes) that expect a `Page` or `Node` object and the
reference string (e.g., `{{ relref . "about.md" }}` or `{{
"about.md" | ref . }}`). It actually looks for `.*Ref` as defined on
`Node` or `Page` objects.
- Shortcode handling had to use a *differently unique* wrapper in
`createShortcodePlaceholder` because of the way that the `ref` and
`relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
|
|
|
"net/url"
|
2014-01-29 17:50:31 -05:00
|
|
|
"os"
|
2016-05-14 00:35:16 -04:00
|
|
|
"path"
|
2014-12-27 08:11:19 -05:00
|
|
|
"path/filepath"
|
2014-08-29 13:40:21 -04:00
|
|
|
"strconv"
|
2014-01-29 17:50:31 -05:00
|
|
|
"strings"
|
2014-03-05 19:07:39 -05:00
|
|
|
"sync"
|
2014-01-29 17:50:31 -05:00
|
|
|
"time"
|
2014-03-31 13:23:34 -04:00
|
|
|
|
2015-01-30 14:51:06 -05:00
|
|
|
"sync/atomic"
|
|
|
|
|
2016-04-08 15:11:44 -04:00
|
|
|
"github.com/bep/inflect"
|
2016-04-20 10:28:26 -04:00
|
|
|
"github.com/fsnotify/fsnotify"
|
2016-01-28 09:31:25 -05:00
|
|
|
"github.com/spf13/afero"
|
2014-04-23 02:55:43 -04:00
|
|
|
"github.com/spf13/cast"
|
2015-01-30 14:51:06 -05:00
|
|
|
bp "github.com/spf13/hugo/bufferpool"
|
2014-03-31 13:23:34 -04:00
|
|
|
"github.com/spf13/hugo/helpers"
|
2014-11-01 11:57:29 -04:00
|
|
|
"github.com/spf13/hugo/hugofs"
|
2015-01-20 17:08:01 -05:00
|
|
|
"github.com/spf13/hugo/parser"
|
2014-03-31 13:23:34 -04:00
|
|
|
"github.com/spf13/hugo/source"
|
|
|
|
"github.com/spf13/hugo/target"
|
2014-11-20 12:32:21 -05:00
|
|
|
"github.com/spf13/hugo/tpl"
|
2014-03-31 13:23:34 -04:00
|
|
|
"github.com/spf13/hugo/transform"
|
|
|
|
jww "github.com/spf13/jwalterweatherman"
|
|
|
|
"github.com/spf13/nitro"
|
2014-04-05 01:26:43 -04:00
|
|
|
"github.com/spf13/viper"
|
2013-07-04 11:32:55 -04:00
|
|
|
)
|
|
|
|
|
2013-11-05 00:28:08 -05:00
|
|
|
var _ = transform.AbsURL
|
|
|
|
|
2016-03-09 08:05:31 -05:00
|
|
|
// used to indicate if run as a test.
|
|
|
|
var testMode bool
|
|
|
|
|
2016-03-23 04:43:04 -04:00
|
|
|
var defaultTimer *nitro.B
|
2013-08-07 20:21:22 -04:00
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
var (
|
|
|
|
distinctErrorLogger = helpers.NewDistinctErrorLogger()
|
|
|
|
distinctFeedbackLogger = helpers.NewDistinctFeedbackLogger()
|
|
|
|
)
|
2015-03-31 16:33:24 -04:00
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
type nodeCache struct {
|
|
|
|
m map[string]*Node
|
|
|
|
sync.RWMutex
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *nodeCache) reset() {
|
|
|
|
c.m = make(map[string]*Node)
|
|
|
|
}
|
|
|
|
|
2013-10-17 23:57:25 -04:00
|
|
|
// Site contains all the information relevant for constructing a static
|
2013-09-01 00:13:04 -04:00
|
|
|
// site. The basic flow of information is as follows:
|
|
|
|
//
|
|
|
|
// 1. A list of Files is parsed and then converted into Pages.
|
|
|
|
//
|
|
|
|
// 2. Pages contain sections (based on the file they were generated from),
|
|
|
|
// aliases and slugs (included in a pages frontmatter) which are the
|
2013-11-18 04:35:56 -05:00
|
|
|
// various targets that will get generated. There will be canonical
|
|
|
|
// listing. The canonical path can be overruled based on a pattern.
|
2013-09-01 00:13:04 -04:00
|
|
|
//
|
2014-04-08 23:15:57 -04:00
|
|
|
// 3. Taxonomies are created via configuration and will present some aspect of
|
2013-09-01 00:13:04 -04:00
|
|
|
// the final page and typically a perm url.
|
|
|
|
//
|
|
|
|
// 4. All Pages are passed through a template based on their desired
|
2013-11-18 04:35:56 -05:00
|
|
|
// layout based on numerous different elements.
|
2013-09-01 00:13:04 -04:00
|
|
|
//
|
|
|
|
// 5. The entire collection of files is written to disk.
|
2013-07-04 11:32:55 -04:00
|
|
|
type Site struct {
|
2016-08-09 14:06:15 -04:00
|
|
|
owner *HugoSites
|
|
|
|
|
|
|
|
// Used internally to discover duplicates.
|
|
|
|
nodeCache *nodeCache
|
|
|
|
nodeCacheInit sync.Once
|
|
|
|
|
2015-02-20 12:38:35 -05:00
|
|
|
Pages Pages
|
2016-05-14 00:35:16 -04:00
|
|
|
AllPages Pages
|
2016-07-28 03:30:58 -04:00
|
|
|
rawAllPages Pages
|
2015-02-20 12:38:35 -05:00
|
|
|
Files []*source.File
|
|
|
|
Taxonomies TaxonomyList
|
|
|
|
Source source.Input
|
|
|
|
Sections Taxonomy
|
|
|
|
Info SiteInfo
|
|
|
|
Menus Menus
|
|
|
|
timer *nitro.B
|
2016-03-05 14:56:38 -05:00
|
|
|
targets targetList
|
2015-02-20 12:38:35 -05:00
|
|
|
targetListInit sync.Once
|
2016-08-08 04:12:39 -04:00
|
|
|
draftCount int
|
|
|
|
futureCount int
|
|
|
|
expiredCount int
|
|
|
|
Data map[string]interface{}
|
|
|
|
Language *helpers.Language
|
2016-07-26 04:24:27 -04:00
|
|
|
}
|
|
|
|
|
2016-08-07 18:12:06 -04:00
|
|
|
// reset returns a new Site prepared for rebuild.
|
|
|
|
func (s *Site) reset() *Site {
|
2016-08-08 04:12:39 -04:00
|
|
|
return &Site{Language: s.Language}
|
2016-07-26 04:24:27 -04:00
|
|
|
}
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
// newSite creates a new site in the given language.
|
2016-08-07 16:01:55 -04:00
|
|
|
func newSite(lang *helpers.Language) *Site {
|
2016-08-07 08:03:03 -04:00
|
|
|
return &Site{Language: lang, Info: SiteInfo{multilingual: newMultiLingualForLanguage(lang)}}
|
2016-07-26 04:24:27 -04:00
|
|
|
}
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
// newSite creates a new site in the default language.
|
2016-07-26 04:24:27 -04:00
|
|
|
func newSiteDefaultLang() *Site {
|
2016-08-07 16:01:55 -04:00
|
|
|
return newSite(helpers.NewDefaultLanguage())
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
// Convenience func used in tests.
|
|
|
|
func newSiteFromSources(pathContentPairs ...string) *Site {
|
|
|
|
if len(pathContentPairs)%2 != 0 {
|
|
|
|
panic("pathContentPairs must come in pairs")
|
|
|
|
}
|
|
|
|
|
|
|
|
sources := make([]source.ByteSource, 0)
|
|
|
|
|
|
|
|
for i := 0; i < len(pathContentPairs); i += 2 {
|
|
|
|
path := pathContentPairs[i]
|
|
|
|
content := pathContentPairs[i+1]
|
|
|
|
sources = append(sources, source.ByteSource{Name: filepath.FromSlash(path), Content: []byte(content)})
|
|
|
|
}
|
|
|
|
|
|
|
|
return &Site{
|
|
|
|
Source: &source.InMemorySource{ByteSource: sources},
|
2016-08-07 16:01:55 -04:00
|
|
|
Language: helpers.NewDefaultLanguage(),
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-11-04 00:39:37 -05:00
|
|
|
type targetList struct {
|
2016-08-04 14:41:30 -04:00
|
|
|
page target.Output
|
|
|
|
pageUgly target.Output
|
|
|
|
file target.Output
|
|
|
|
alias target.AliasPublisher
|
|
|
|
languageAlias target.AliasPublisher
|
2014-11-04 00:39:37 -05:00
|
|
|
}
|
|
|
|
|
2013-07-04 11:32:55 -04:00
|
|
|
type SiteInfo struct {
|
2016-08-07 18:12:06 -04:00
|
|
|
BaseURL template.URL
|
|
|
|
Taxonomies TaxonomyList
|
|
|
|
Authors AuthorList
|
|
|
|
Social SiteSocial
|
|
|
|
Sections Taxonomy
|
|
|
|
Pages *Pages // Includes only pages in this language
|
|
|
|
AllPages *Pages // Includes other translated pages, excluding those in this language.
|
|
|
|
rawAllPages *Pages // Includes absolute all pages, including drafts etc.
|
|
|
|
Files *[]*source.File
|
|
|
|
Menus *Menus
|
|
|
|
Hugo *HugoInfo
|
|
|
|
Title string
|
|
|
|
RSSLink string
|
|
|
|
Author map[string]interface{}
|
2015-05-31 14:30:53 -04:00
|
|
|
LanguageCode string
|
|
|
|
DisqusShortname string
|
2015-10-19 07:31:03 -04:00
|
|
|
GoogleAnalytics string
|
2015-05-31 14:30:53 -04:00
|
|
|
Copyright string
|
|
|
|
LastChange time.Time
|
|
|
|
Permalinks PermalinkOverrides
|
|
|
|
Params map[string]interface{}
|
|
|
|
BuildDrafts bool
|
|
|
|
canonifyURLs bool
|
|
|
|
preserveTaxonomyNames bool
|
|
|
|
paginationPageCount uint64
|
|
|
|
Data *map[string]interface{}
|
2016-05-14 00:35:16 -04:00
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
owner *HugoSites
|
2016-08-08 07:55:18 -04:00
|
|
|
multilingual *Multilingual
|
|
|
|
Language *helpers.Language
|
|
|
|
LanguagePrefix string
|
|
|
|
Languages helpers.Languages
|
|
|
|
defaultContentLanguageInSubdir bool
|
2014-04-23 02:52:01 -04:00
|
|
|
}
|
2014-04-23 02:55:43 -04:00
|
|
|
|
2016-08-07 08:03:03 -04:00
|
|
|
// Used in tests.
|
|
|
|
func newSiteInfoDefaultLanguage(baseURL string, pages ...*Page) *SiteInfo {
|
|
|
|
ps := Pages(pages)
|
|
|
|
|
|
|
|
return &SiteInfo{
|
|
|
|
BaseURL: template.URL(baseURL),
|
|
|
|
rawAllPages: &ps,
|
|
|
|
multilingual: newMultiLingualDefaultLanguage(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-12-09 13:33:55 -05:00
|
|
|
// SiteSocial is a place to put social details on a site level. These are the
|
|
|
|
// standard keys that themes will expect to have available, but can be
|
|
|
|
// expanded to any others on a per site basis
|
|
|
|
// github
|
|
|
|
// facebook
|
|
|
|
// facebook_admin
|
|
|
|
// twitter
|
|
|
|
// twitter_domain
|
|
|
|
// googleplus
|
|
|
|
// pinterest
|
|
|
|
// instagram
|
|
|
|
// youtube
|
|
|
|
// linkedin
|
|
|
|
type SiteSocial map[string]string
|
|
|
|
|
2016-01-15 18:28:48 -05:00
|
|
|
// GetParam gets a site parameter value if found, nil if not.
|
2014-04-23 02:55:43 -04:00
|
|
|
func (s *SiteInfo) GetParam(key string) interface{} {
|
|
|
|
v := s.Params[strings.ToLower(key)]
|
|
|
|
|
|
|
|
if v == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-05-14 00:35:16 -04:00
|
|
|
switch val := v.(type) {
|
2014-04-23 02:55:43 -04:00
|
|
|
case bool:
|
2016-05-14 00:35:16 -04:00
|
|
|
return val
|
2014-04-23 02:55:43 -04:00
|
|
|
case string:
|
2016-05-14 00:35:16 -04:00
|
|
|
return val
|
2014-04-23 02:55:43 -04:00
|
|
|
case int64, int32, int16, int8, int:
|
|
|
|
return cast.ToInt(v)
|
|
|
|
case float64, float32:
|
|
|
|
return cast.ToFloat64(v)
|
|
|
|
case time.Time:
|
2016-05-14 00:35:16 -04:00
|
|
|
return val
|
2014-04-23 02:55:43 -04:00
|
|
|
case []string:
|
|
|
|
return v
|
|
|
|
}
|
|
|
|
return nil
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-08-07 08:03:03 -04:00
|
|
|
func (s *SiteInfo) IsMultiLingual() bool {
|
|
|
|
return len(s.Languages) > 1
|
|
|
|
}
|
|
|
|
|
Provide (relative) reference funcs & shortcodes.
- `.Ref` and `.RelRef` take a reference (the logical filename for a
page, including extension and/or a document fragment ID) and return
a permalink (or relative permalink) to the referenced document.
- If the reference is a page name (such as `about.md`), the page
will be discovered and the permalink will be returned: `/about/`
- If the reference is a page name with a fragment (such as
`about.md#who`), the page will be discovered and used to add the
`page.UniqueID()` to the resulting fragment and permalink:
`/about/#who:deadbeef`.
- If the reference is a fragment and `.*Ref` has been called from
a `Node` or `SiteInfo`, it will be returned as is: `#who`.
- If the reference is a fragment and `.*Ref` has been called from
a `Page`, it will be returned with the page’s unique ID:
`#who:deadbeef`.
- `.*Ref` can be called from either `Node`, `SiteInfo` (e.g.,
`Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in
templates.
- `.*Ref` cannot be used in content, so two shortcodes have been
created to provide the functionality to content: `ref` and `relref`.
These are intended to be used within markup, like `[Who]({{% ref
about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`.
- There are also `ref` and `relref` template functions (used to create
the shortcodes) that expect a `Page` or `Node` object and the
reference string (e.g., `{{ relref . "about.md" }}` or `{{
"about.md" | ref . }}`). It actually looks for `.*Ref` as defined on
`Node` or `Page` objects.
- Shortcode handling had to use a *differently unique* wrapper in
`createShortcodePlaceholder` because of the way that the `ref` and
`relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
|
|
|
func (s *SiteInfo) refLink(ref string, page *Page, relative bool) (string, error) {
|
2015-03-11 13:34:57 -04:00
|
|
|
var refURL *url.URL
|
Provide (relative) reference funcs & shortcodes.
- `.Ref` and `.RelRef` take a reference (the logical filename for a
page, including extension and/or a document fragment ID) and return
a permalink (or relative permalink) to the referenced document.
- If the reference is a page name (such as `about.md`), the page
will be discovered and the permalink will be returned: `/about/`
- If the reference is a page name with a fragment (such as
`about.md#who`), the page will be discovered and used to add the
`page.UniqueID()` to the resulting fragment and permalink:
`/about/#who:deadbeef`.
- If the reference is a fragment and `.*Ref` has been called from
a `Node` or `SiteInfo`, it will be returned as is: `#who`.
- If the reference is a fragment and `.*Ref` has been called from
a `Page`, it will be returned with the page’s unique ID:
`#who:deadbeef`.
- `.*Ref` can be called from either `Node`, `SiteInfo` (e.g.,
`Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in
templates.
- `.*Ref` cannot be used in content, so two shortcodes have been
created to provide the functionality to content: `ref` and `relref`.
These are intended to be used within markup, like `[Who]({{% ref
about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`.
- There are also `ref` and `relref` template functions (used to create
the shortcodes) that expect a `Page` or `Node` object and the
reference string (e.g., `{{ relref . "about.md" }}` or `{{
"about.md" | ref . }}`). It actually looks for `.*Ref` as defined on
`Node` or `Page` objects.
- Shortcode handling had to use a *differently unique* wrapper in
`createShortcodePlaceholder` because of the way that the `ref` and
`relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
|
|
|
var err error
|
|
|
|
|
2015-03-11 13:34:57 -04:00
|
|
|
refURL, err = url.Parse(ref)
|
Provide (relative) reference funcs & shortcodes.
- `.Ref` and `.RelRef` take a reference (the logical filename for a
page, including extension and/or a document fragment ID) and return
a permalink (or relative permalink) to the referenced document.
- If the reference is a page name (such as `about.md`), the page
will be discovered and the permalink will be returned: `/about/`
- If the reference is a page name with a fragment (such as
`about.md#who`), the page will be discovered and used to add the
`page.UniqueID()` to the resulting fragment and permalink:
`/about/#who:deadbeef`.
- If the reference is a fragment and `.*Ref` has been called from
a `Node` or `SiteInfo`, it will be returned as is: `#who`.
- If the reference is a fragment and `.*Ref` has been called from
a `Page`, it will be returned with the page’s unique ID:
`#who:deadbeef`.
- `.*Ref` can be called from either `Node`, `SiteInfo` (e.g.,
`Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in
templates.
- `.*Ref` cannot be used in content, so two shortcodes have been
created to provide the functionality to content: `ref` and `relref`.
These are intended to be used within markup, like `[Who]({{% ref
about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`.
- There are also `ref` and `relref` template functions (used to create
the shortcodes) that expect a `Page` or `Node` object and the
reference string (e.g., `{{ relref . "about.md" }}` or `{{
"about.md" | ref . }}`). It actually looks for `.*Ref` as defined on
`Node` or `Page` objects.
- Shortcode handling had to use a *differently unique* wrapper in
`createShortcodePlaceholder` because of the way that the `ref` and
`relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
2015-03-07 06:53:20 -05:00
|
|
|
var target *Page
|
|
|
|
var link string
|
Provide (relative) reference funcs & shortcodes.
- `.Ref` and `.RelRef` take a reference (the logical filename for a
page, including extension and/or a document fragment ID) and return
a permalink (or relative permalink) to the referenced document.
- If the reference is a page name (such as `about.md`), the page
will be discovered and the permalink will be returned: `/about/`
- If the reference is a page name with a fragment (such as
`about.md#who`), the page will be discovered and used to add the
`page.UniqueID()` to the resulting fragment and permalink:
`/about/#who:deadbeef`.
- If the reference is a fragment and `.*Ref` has been called from
a `Node` or `SiteInfo`, it will be returned as is: `#who`.
- If the reference is a fragment and `.*Ref` has been called from
a `Page`, it will be returned with the page’s unique ID:
`#who:deadbeef`.
- `.*Ref` can be called from either `Node`, `SiteInfo` (e.g.,
`Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in
templates.
- `.*Ref` cannot be used in content, so two shortcodes have been
created to provide the functionality to content: `ref` and `relref`.
These are intended to be used within markup, like `[Who]({{% ref
about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`.
- There are also `ref` and `relref` template functions (used to create
the shortcodes) that expect a `Page` or `Node` object and the
reference string (e.g., `{{ relref . "about.md" }}` or `{{
"about.md" | ref . }}`). It actually looks for `.*Ref` as defined on
`Node` or `Page` objects.
- Shortcode handling had to use a *differently unique* wrapper in
`createShortcodePlaceholder` because of the way that the `ref` and
`relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
|
|
|
|
2015-03-11 13:34:57 -04:00
|
|
|
if refURL.Path != "" {
|
2016-08-07 16:29:27 -04:00
|
|
|
for _, page := range []*Page(*s.AllPages) {
|
2015-03-17 10:38:48 -04:00
|
|
|
refPath := filepath.FromSlash(refURL.Path)
|
|
|
|
if page.Source.Path() == refPath || page.Source.LogicalName() == refPath {
|
Provide (relative) reference funcs & shortcodes.
- `.Ref` and `.RelRef` take a reference (the logical filename for a
page, including extension and/or a document fragment ID) and return
a permalink (or relative permalink) to the referenced document.
- If the reference is a page name (such as `about.md`), the page
will be discovered and the permalink will be returned: `/about/`
- If the reference is a page name with a fragment (such as
`about.md#who`), the page will be discovered and used to add the
`page.UniqueID()` to the resulting fragment and permalink:
`/about/#who:deadbeef`.
- If the reference is a fragment and `.*Ref` has been called from
a `Node` or `SiteInfo`, it will be returned as is: `#who`.
- If the reference is a fragment and `.*Ref` has been called from
a `Page`, it will be returned with the page’s unique ID:
`#who:deadbeef`.
- `.*Ref` can be called from either `Node`, `SiteInfo` (e.g.,
`Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in
templates.
- `.*Ref` cannot be used in content, so two shortcodes have been
created to provide the functionality to content: `ref` and `relref`.
These are intended to be used within markup, like `[Who]({{% ref
about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`.
- There are also `ref` and `relref` template functions (used to create
the shortcodes) that expect a `Page` or `Node` object and the
reference string (e.g., `{{ relref . "about.md" }}` or `{{
"about.md" | ref . }}`). It actually looks for `.*Ref` as defined on
`Node` or `Page` objects.
- Shortcode handling had to use a *differently unique* wrapper in
`createShortcodePlaceholder` because of the way that the `ref` and
`relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
|
|
|
target = page
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if target == nil {
|
2015-03-11 13:34:57 -04:00
|
|
|
return "", fmt.Errorf("No page found with path or logical name \"%s\".\n", refURL.Path)
|
Provide (relative) reference funcs & shortcodes.
- `.Ref` and `.RelRef` take a reference (the logical filename for a
page, including extension and/or a document fragment ID) and return
a permalink (or relative permalink) to the referenced document.
- If the reference is a page name (such as `about.md`), the page
will be discovered and the permalink will be returned: `/about/`
- If the reference is a page name with a fragment (such as
`about.md#who`), the page will be discovered and used to add the
`page.UniqueID()` to the resulting fragment and permalink:
`/about/#who:deadbeef`.
- If the reference is a fragment and `.*Ref` has been called from
a `Node` or `SiteInfo`, it will be returned as is: `#who`.
- If the reference is a fragment and `.*Ref` has been called from
a `Page`, it will be returned with the page’s unique ID:
`#who:deadbeef`.
- `.*Ref` can be called from either `Node`, `SiteInfo` (e.g.,
`Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in
templates.
- `.*Ref` cannot be used in content, so two shortcodes have been
created to provide the functionality to content: `ref` and `relref`.
These are intended to be used within markup, like `[Who]({{% ref
about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`.
- There are also `ref` and `relref` template functions (used to create
the shortcodes) that expect a `Page` or `Node` object and the
reference string (e.g., `{{ relref . "about.md" }}` or `{{
"about.md" | ref . }}`). It actually looks for `.*Ref` as defined on
`Node` or `Page` objects.
- Shortcode handling had to use a *differently unique* wrapper in
`createShortcodePlaceholder` because of the way that the `ref` and
`relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
if relative {
|
|
|
|
link, err = target.RelPermalink()
|
|
|
|
} else {
|
|
|
|
link, err = target.Permalink()
|
|
|
|
}
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-03-11 13:34:57 -04:00
|
|
|
if refURL.Fragment != "" {
|
|
|
|
link = link + "#" + refURL.Fragment
|
Provide (relative) reference funcs & shortcodes.
- `.Ref` and `.RelRef` take a reference (the logical filename for a
page, including extension and/or a document fragment ID) and return
a permalink (or relative permalink) to the referenced document.
- If the reference is a page name (such as `about.md`), the page
will be discovered and the permalink will be returned: `/about/`
- If the reference is a page name with a fragment (such as
`about.md#who`), the page will be discovered and used to add the
`page.UniqueID()` to the resulting fragment and permalink:
`/about/#who:deadbeef`.
- If the reference is a fragment and `.*Ref` has been called from
a `Node` or `SiteInfo`, it will be returned as is: `#who`.
- If the reference is a fragment and `.*Ref` has been called from
a `Page`, it will be returned with the page’s unique ID:
`#who:deadbeef`.
- `.*Ref` can be called from either `Node`, `SiteInfo` (e.g.,
`Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in
templates.
- `.*Ref` cannot be used in content, so two shortcodes have been
created to provide the functionality to content: `ref` and `relref`.
These are intended to be used within markup, like `[Who]({{% ref
about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`.
- There are also `ref` and `relref` template functions (used to create
the shortcodes) that expect a `Page` or `Node` object and the
reference string (e.g., `{{ relref . "about.md" }}` or `{{
"about.md" | ref . }}`). It actually looks for `.*Ref` as defined on
`Node` or `Page` objects.
- Shortcode handling had to use a *differently unique* wrapper in
`createShortcodePlaceholder` because of the way that the `ref` and
`relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
|
|
|
|
2015-03-11 13:34:57 -04:00
|
|
|
if refURL.Path != "" && target != nil && !target.getRenderingConfig().PlainIDAnchors {
|
|
|
|
link = link + ":" + target.UniqueID()
|
|
|
|
} else if page != nil && !page.getRenderingConfig().PlainIDAnchors {
|
|
|
|
link = link + ":" + page.UniqueID()
|
Provide (relative) reference funcs & shortcodes.
- `.Ref` and `.RelRef` take a reference (the logical filename for a
page, including extension and/or a document fragment ID) and return
a permalink (or relative permalink) to the referenced document.
- If the reference is a page name (such as `about.md`), the page
will be discovered and the permalink will be returned: `/about/`
- If the reference is a page name with a fragment (such as
`about.md#who`), the page will be discovered and used to add the
`page.UniqueID()` to the resulting fragment and permalink:
`/about/#who:deadbeef`.
- If the reference is a fragment and `.*Ref` has been called from
a `Node` or `SiteInfo`, it will be returned as is: `#who`.
- If the reference is a fragment and `.*Ref` has been called from
a `Page`, it will be returned with the page’s unique ID:
`#who:deadbeef`.
- `.*Ref` can be called from either `Node`, `SiteInfo` (e.g.,
`Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in
templates.
- `.*Ref` cannot be used in content, so two shortcodes have been
created to provide the functionality to content: `ref` and `relref`.
These are intended to be used within markup, like `[Who]({{% ref
about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`.
- There are also `ref` and `relref` template functions (used to create
the shortcodes) that expect a `Page` or `Node` object and the
reference string (e.g., `{{ relref . "about.md" }}` or `{{
"about.md" | ref . }}`). It actually looks for `.*Ref` as defined on
`Node` or `Page` objects.
- Shortcode handling had to use a *differently unique* wrapper in
`createShortcodePlaceholder` because of the way that the `ref` and
`relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return link, nil
|
|
|
|
}
|
|
|
|
|
2016-01-15 18:28:48 -05:00
|
|
|
// Ref will give an absolute URL to ref in the given Page.
|
Provide (relative) reference funcs & shortcodes.
- `.Ref` and `.RelRef` take a reference (the logical filename for a
page, including extension and/or a document fragment ID) and return
a permalink (or relative permalink) to the referenced document.
- If the reference is a page name (such as `about.md`), the page
will be discovered and the permalink will be returned: `/about/`
- If the reference is a page name with a fragment (such as
`about.md#who`), the page will be discovered and used to add the
`page.UniqueID()` to the resulting fragment and permalink:
`/about/#who:deadbeef`.
- If the reference is a fragment and `.*Ref` has been called from
a `Node` or `SiteInfo`, it will be returned as is: `#who`.
- If the reference is a fragment and `.*Ref` has been called from
a `Page`, it will be returned with the page’s unique ID:
`#who:deadbeef`.
- `.*Ref` can be called from either `Node`, `SiteInfo` (e.g.,
`Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in
templates.
- `.*Ref` cannot be used in content, so two shortcodes have been
created to provide the functionality to content: `ref` and `relref`.
These are intended to be used within markup, like `[Who]({{% ref
about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`.
- There are also `ref` and `relref` template functions (used to create
the shortcodes) that expect a `Page` or `Node` object and the
reference string (e.g., `{{ relref . "about.md" }}` or `{{
"about.md" | ref . }}`). It actually looks for `.*Ref` as defined on
`Node` or `Page` objects.
- Shortcode handling had to use a *differently unique* wrapper in
`createShortcodePlaceholder` because of the way that the `ref` and
`relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
|
|
|
func (s *SiteInfo) Ref(ref string, page *Page) (string, error) {
|
|
|
|
return s.refLink(ref, page, false)
|
|
|
|
}
|
|
|
|
|
2016-01-15 18:28:48 -05:00
|
|
|
// RelRef will give an relative URL to ref in the given Page.
|
Provide (relative) reference funcs & shortcodes.
- `.Ref` and `.RelRef` take a reference (the logical filename for a
page, including extension and/or a document fragment ID) and return
a permalink (or relative permalink) to the referenced document.
- If the reference is a page name (such as `about.md`), the page
will be discovered and the permalink will be returned: `/about/`
- If the reference is a page name with a fragment (such as
`about.md#who`), the page will be discovered and used to add the
`page.UniqueID()` to the resulting fragment and permalink:
`/about/#who:deadbeef`.
- If the reference is a fragment and `.*Ref` has been called from
a `Node` or `SiteInfo`, it will be returned as is: `#who`.
- If the reference is a fragment and `.*Ref` has been called from
a `Page`, it will be returned with the page’s unique ID:
`#who:deadbeef`.
- `.*Ref` can be called from either `Node`, `SiteInfo` (e.g.,
`Node.Site`), `Page` objects, or `ShortcodeWithPage` objects in
templates.
- `.*Ref` cannot be used in content, so two shortcodes have been
created to provide the functionality to content: `ref` and `relref`.
These are intended to be used within markup, like `[Who]({{% ref
about.md#who %}})` or `<a href="{{% ref about.md#who %}}">Who</a>`.
- There are also `ref` and `relref` template functions (used to create
the shortcodes) that expect a `Page` or `Node` object and the
reference string (e.g., `{{ relref . "about.md" }}` or `{{
"about.md" | ref . }}`). It actually looks for `.*Ref` as defined on
`Node` or `Page` objects.
- Shortcode handling had to use a *differently unique* wrapper in
`createShortcodePlaceholder` because of the way that the `ref` and
`relref` are intended to be used in content.
2014-11-24 01:15:34 -05:00
|
|
|
func (s *SiteInfo) RelRef(ref string, page *Page) (string, error) {
|
|
|
|
return s.refLink(ref, page, true)
|
|
|
|
}
|
|
|
|
|
2016-03-15 02:00:36 -04:00
|
|
|
// SourceRelativeLink attempts to convert any source page relative links (like [../another.md]) into absolute links
|
|
|
|
func (s *SiteInfo) SourceRelativeLink(ref string, currentPage *Page) (string, error) {
|
2015-09-08 20:03:38 -04:00
|
|
|
var refURL *url.URL
|
|
|
|
var err error
|
|
|
|
|
2016-03-15 02:00:36 -04:00
|
|
|
refURL, err = url.Parse(strings.TrimPrefix(ref, currentPage.getRenderingConfig().SourceRelativeLinksProjectFolder))
|
2015-09-08 20:03:38 -04:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
if refURL.Scheme != "" {
|
2016-03-15 02:00:36 -04:00
|
|
|
// Not a relative source level path
|
2015-09-08 20:03:38 -04:00
|
|
|
return ref, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
var target *Page
|
|
|
|
var link string
|
|
|
|
|
|
|
|
if refURL.Path != "" {
|
|
|
|
refPath := filepath.Clean(filepath.FromSlash(refURL.Path))
|
|
|
|
|
|
|
|
if strings.IndexRune(refPath, os.PathSeparator) == 0 { // filepath.IsAbs fails to me.
|
|
|
|
refPath = refPath[1:]
|
|
|
|
} else {
|
|
|
|
if currentPage != nil {
|
|
|
|
refPath = filepath.Join(currentPage.Source.Dir(), refURL.Path)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-05-14 00:35:16 -04:00
|
|
|
for _, page := range []*Page(*s.AllPages) {
|
2015-09-08 20:03:38 -04:00
|
|
|
if page.Source.Path() == refPath {
|
|
|
|
target = page
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// need to exhaust the test, then try with the others :/
|
|
|
|
// if the refPath doesn't end in a filename with extension `.md`, then try with `.md` , and then `/index.md`
|
|
|
|
mdPath := strings.TrimSuffix(refPath, string(os.PathSeparator)) + ".md"
|
2016-05-14 00:35:16 -04:00
|
|
|
for _, page := range []*Page(*s.AllPages) {
|
2015-09-08 20:03:38 -04:00
|
|
|
if page.Source.Path() == mdPath {
|
|
|
|
target = page
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
indexPath := filepath.Join(refPath, "index.md")
|
2016-05-14 00:35:16 -04:00
|
|
|
for _, page := range []*Page(*s.AllPages) {
|
2015-09-08 20:03:38 -04:00
|
|
|
if page.Source.Path() == indexPath {
|
|
|
|
target = page
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if target == nil {
|
|
|
|
return "", fmt.Errorf("No page found for \"%s\" on page \"%s\".\n", ref, currentPage.Source.Path())
|
|
|
|
}
|
|
|
|
|
2016-03-15 02:00:36 -04:00
|
|
|
link, err = target.RelPermalink()
|
2015-09-08 20:03:38 -04:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if refURL.Fragment != "" {
|
|
|
|
link = link + "#" + refURL.Fragment
|
|
|
|
|
|
|
|
if refURL.Path != "" && target != nil && !target.getRenderingConfig().PlainIDAnchors {
|
|
|
|
link = link + ":" + target.UniqueID()
|
|
|
|
} else if currentPage != nil && !currentPage.getRenderingConfig().PlainIDAnchors {
|
|
|
|
link = link + ":" + currentPage.UniqueID()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return link, nil
|
|
|
|
}
|
|
|
|
|
2016-03-15 02:00:36 -04:00
|
|
|
// SourceRelativeLinkFile attempts to convert any non-md source relative links (like [../another.gif]) into absolute links
|
|
|
|
func (s *SiteInfo) SourceRelativeLinkFile(ref string, currentPage *Page) (string, error) {
|
2015-09-08 20:03:38 -04:00
|
|
|
var refURL *url.URL
|
|
|
|
var err error
|
|
|
|
|
2016-03-15 02:00:36 -04:00
|
|
|
refURL, err = url.Parse(strings.TrimPrefix(ref, currentPage.getRenderingConfig().SourceRelativeLinksProjectFolder))
|
2015-09-08 20:03:38 -04:00
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
if refURL.Scheme != "" {
|
2016-03-15 02:00:36 -04:00
|
|
|
// Not a relative source level path
|
2015-09-08 20:03:38 -04:00
|
|
|
return ref, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
var target *source.File
|
|
|
|
var link string
|
|
|
|
|
|
|
|
if refURL.Path != "" {
|
|
|
|
refPath := filepath.Clean(filepath.FromSlash(refURL.Path))
|
|
|
|
|
|
|
|
if strings.IndexRune(refPath, os.PathSeparator) == 0 { // filepath.IsAbs fails to me.
|
|
|
|
refPath = refPath[1:]
|
|
|
|
} else {
|
|
|
|
if currentPage != nil {
|
|
|
|
refPath = filepath.Join(currentPage.Source.Dir(), refURL.Path)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-03-14 15:31:31 -04:00
|
|
|
for _, file := range *s.Files {
|
2015-09-08 20:03:38 -04:00
|
|
|
if file.Path() == refPath {
|
|
|
|
target = file
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if target == nil {
|
|
|
|
return "", fmt.Errorf("No file found for \"%s\" on page \"%s\".\n", ref, currentPage.Source.Path())
|
|
|
|
}
|
|
|
|
|
|
|
|
link = target.Path()
|
2016-03-15 02:00:36 -04:00
|
|
|
return "/" + filepath.ToSlash(link), nil
|
2015-09-08 20:03:38 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
return "", fmt.Errorf("failed to find a file to match \"%s\" on page \"%s\"", ref, currentPage.Source.Path())
|
|
|
|
}
|
|
|
|
|
2014-12-27 08:11:19 -05:00
|
|
|
func (s *SiteInfo) addToPaginationPageCount(cnt uint64) {
|
|
|
|
atomic.AddUint64(&s.paginationPageCount, cnt)
|
|
|
|
}
|
|
|
|
|
2013-10-25 18:03:14 -04:00
|
|
|
type runmode struct {
|
2014-01-29 17:50:31 -05:00
|
|
|
Watching bool
|
2013-10-25 18:03:14 -04:00
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) running() bool {
|
2016-08-08 04:12:39 -04:00
|
|
|
return s.owner.runMode.Watching
|
2013-10-25 18:03:14 -04:00
|
|
|
}
|
|
|
|
|
2013-10-03 09:44:45 -04:00
|
|
|
func init() {
|
2016-03-23 04:43:04 -04:00
|
|
|
defaultTimer = nitro.Initalize()
|
2013-10-03 09:44:45 -04:00
|
|
|
}
|
|
|
|
|
2013-08-07 20:21:22 -04:00
|
|
|
func (s *Site) timerStep(step string) {
|
2014-01-29 17:50:31 -05:00
|
|
|
if s.timer == nil {
|
2016-03-23 04:43:04 -04:00
|
|
|
s.timer = defaultTimer
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
s.timer.Step(step)
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-08-08 03:05:16 -04:00
|
|
|
// reBuild partially rebuilds a site given the filesystem events.
|
2016-07-28 03:30:58 -04:00
|
|
|
// It returns whetever the content source was changed.
|
2016-08-08 03:05:16 -04:00
|
|
|
func (s *Site) reBuild(events []fsnotify.Event) (bool, error) {
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
jww.DEBUG.Printf("Rebuild for events %q", events)
|
2016-07-26 08:44:37 -04:00
|
|
|
|
2015-12-22 00:10:01 -05:00
|
|
|
s.timerStep("initialize rebuild")
|
2016-07-26 08:44:37 -04:00
|
|
|
|
2015-12-22 00:10:01 -05:00
|
|
|
// First we need to determine what changed
|
|
|
|
|
2016-01-11 12:06:52 -05:00
|
|
|
sourceChanged := []fsnotify.Event{}
|
2016-07-28 03:30:58 -04:00
|
|
|
sourceReallyChanged := []fsnotify.Event{}
|
2016-01-11 12:06:52 -05:00
|
|
|
tmplChanged := []fsnotify.Event{}
|
|
|
|
dataChanged := []fsnotify.Event{}
|
2016-07-28 03:30:58 -04:00
|
|
|
i18nChanged := []fsnotify.Event{}
|
2015-12-22 00:10:01 -05:00
|
|
|
|
2016-01-28 09:31:25 -05:00
|
|
|
// prevent spamming the log on changes
|
|
|
|
logger := helpers.NewDistinctFeedbackLogger()
|
|
|
|
|
2016-01-11 12:06:52 -05:00
|
|
|
for _, ev := range events {
|
2015-12-22 00:10:01 -05:00
|
|
|
// Need to re-read source
|
2016-01-11 12:06:52 -05:00
|
|
|
if strings.HasPrefix(ev.Name, s.absContentDir()) {
|
2016-07-28 03:30:58 -04:00
|
|
|
logger.Println("Source changed", ev.Name)
|
2016-01-11 12:06:52 -05:00
|
|
|
sourceChanged = append(sourceChanged, ev)
|
2015-12-22 00:10:01 -05:00
|
|
|
}
|
2016-01-11 12:06:52 -05:00
|
|
|
if strings.HasPrefix(ev.Name, s.absLayoutDir()) || strings.HasPrefix(ev.Name, s.absThemeDir()) {
|
2016-01-28 09:31:25 -05:00
|
|
|
logger.Println("Template changed", ev.Name)
|
2016-01-11 12:06:52 -05:00
|
|
|
tmplChanged = append(tmplChanged, ev)
|
2015-12-22 00:10:01 -05:00
|
|
|
}
|
2016-01-11 12:06:52 -05:00
|
|
|
if strings.HasPrefix(ev.Name, s.absDataDir()) {
|
2016-01-28 09:31:25 -05:00
|
|
|
logger.Println("Data changed", ev.Name)
|
|
|
|
dataChanged = append(dataChanged, ev)
|
2015-12-22 00:10:01 -05:00
|
|
|
}
|
2016-07-28 03:30:58 -04:00
|
|
|
if strings.HasPrefix(ev.Name, s.absI18nDir()) {
|
|
|
|
logger.Println("i18n changed", ev.Name)
|
|
|
|
i18nChanged = append(dataChanged, ev)
|
|
|
|
}
|
2015-12-22 00:10:01 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
if len(tmplChanged) > 0 {
|
2016-07-28 03:30:58 -04:00
|
|
|
s.prepTemplates(nil)
|
2016-08-08 04:12:39 -04:00
|
|
|
s.owner.tmpl.PrintErrors()
|
2015-12-22 00:10:01 -05:00
|
|
|
s.timerStep("template prep")
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(dataChanged) > 0 {
|
2016-04-07 14:03:03 -04:00
|
|
|
s.readDataFromSourceFS()
|
2015-12-22 00:10:01 -05:00
|
|
|
}
|
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
if len(i18nChanged) > 0 {
|
|
|
|
// TODO(bep ml
|
|
|
|
s.readI18nSources()
|
|
|
|
}
|
2016-03-06 10:33:32 -05:00
|
|
|
|
2016-01-08 18:37:37 -05:00
|
|
|
// If a content file changes, we need to reload only it and re-render the entire site.
|
2016-01-07 21:48:13 -05:00
|
|
|
|
2016-05-14 00:35:16 -04:00
|
|
|
// First step is to read the changed files and (re)place them in site.AllPages
|
2016-03-17 10:51:52 -04:00
|
|
|
// This includes processing any meta-data for that content
|
|
|
|
|
|
|
|
// The second step is to convert the content into HTML
|
|
|
|
// This includes processing any shortcodes that may be present.
|
|
|
|
|
|
|
|
// We do this in parallel... even though it's likely only one file at a time.
|
|
|
|
// We need to process the reading prior to the conversion for each file, but
|
|
|
|
// we can convert one file while another one is still reading.
|
|
|
|
errs := make(chan error)
|
|
|
|
readResults := make(chan HandledResult)
|
|
|
|
filechan := make(chan *source.File)
|
|
|
|
convertResults := make(chan HandledResult)
|
|
|
|
pageChan := make(chan *Page)
|
|
|
|
fileConvChan := make(chan *source.File)
|
|
|
|
coordinator := make(chan bool)
|
|
|
|
|
|
|
|
wg := &sync.WaitGroup{}
|
|
|
|
wg.Add(2)
|
|
|
|
for i := 0; i < 2; i++ {
|
|
|
|
go sourceReader(s, filechan, readResults, wg)
|
|
|
|
}
|
|
|
|
|
|
|
|
wg2 := &sync.WaitGroup{}
|
|
|
|
wg2.Add(4)
|
|
|
|
for i := 0; i < 2; i++ {
|
|
|
|
go fileConverter(s, fileConvChan, convertResults, wg2)
|
|
|
|
go pageConverter(s, pageChan, convertResults, wg2)
|
|
|
|
}
|
2016-01-08 18:37:37 -05:00
|
|
|
|
2016-03-17 10:51:52 -04:00
|
|
|
for _, ev := range sourceChanged {
|
2016-07-28 03:30:58 -04:00
|
|
|
// The incrementalReadCollator below will also make changes to the site's pages,
|
|
|
|
// so we do this first to prevent races.
|
2016-03-17 10:51:52 -04:00
|
|
|
if ev.Op&fsnotify.Remove == fsnotify.Remove {
|
|
|
|
//remove the file & a create will follow
|
|
|
|
path, _ := helpers.GetRelativePath(ev.Name, s.absContentDir())
|
2016-04-07 14:03:03 -04:00
|
|
|
s.removePageByPath(path)
|
2016-03-17 10:51:52 -04:00
|
|
|
continue
|
|
|
|
}
|
2016-01-25 14:40:44 -05:00
|
|
|
|
2016-03-17 10:51:52 -04:00
|
|
|
// Some editors (Vim) sometimes issue only a Rename operation when writing an existing file
|
|
|
|
// Sometimes a rename operation means that file has been renamed other times it means
|
|
|
|
// it's been updated
|
|
|
|
if ev.Op&fsnotify.Rename == fsnotify.Rename {
|
|
|
|
// If the file is still on disk, it's only been updated, if it's not, it's been moved
|
2016-03-21 19:28:42 -04:00
|
|
|
if ex, err := afero.Exists(hugofs.Source(), ev.Name); !ex || err != nil {
|
2016-01-11 12:06:52 -05:00
|
|
|
path, _ := helpers.GetRelativePath(ev.Name, s.absContentDir())
|
2016-04-07 14:03:03 -04:00
|
|
|
s.removePageByPath(path)
|
2016-01-11 12:06:52 -05:00
|
|
|
continue
|
|
|
|
}
|
2016-03-17 10:51:52 -04:00
|
|
|
}
|
2016-01-11 12:06:52 -05:00
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
sourceReallyChanged = append(sourceReallyChanged, ev)
|
|
|
|
}
|
|
|
|
|
|
|
|
go incrementalReadCollator(s, readResults, pageChan, fileConvChan, coordinator, errs)
|
|
|
|
go converterCollator(s, convertResults, errs)
|
|
|
|
|
|
|
|
if len(tmplChanged) > 0 || len(dataChanged) > 0 {
|
|
|
|
// Do not need to read the files again, but they need conversion
|
|
|
|
// for shortocde re-rendering.
|
|
|
|
for _, p := range s.rawAllPages {
|
2016-08-08 03:05:16 -04:00
|
|
|
if p.shouldBuild() {
|
|
|
|
pageChan <- p
|
|
|
|
}
|
2016-07-28 03:30:58 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, ev := range sourceReallyChanged {
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
file, err := s.reReadFile(ev.Name)
|
2016-06-02 16:47:28 -04:00
|
|
|
|
2016-03-17 10:51:52 -04:00
|
|
|
if err != nil {
|
2016-06-02 16:47:28 -04:00
|
|
|
jww.ERROR.Println("Error reading file", ev.Name, ";", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
if file != nil {
|
|
|
|
filechan <- file
|
2015-12-22 00:10:01 -05:00
|
|
|
}
|
2016-01-08 18:37:37 -05:00
|
|
|
|
2016-03-17 10:51:52 -04:00
|
|
|
}
|
2016-07-28 03:30:58 -04:00
|
|
|
|
2016-03-17 10:51:52 -04:00
|
|
|
// we close the filechan as we have sent everything we want to send to it.
|
|
|
|
// this will tell the sourceReaders to stop iterating on that channel
|
|
|
|
close(filechan)
|
|
|
|
|
|
|
|
// waiting for the sourceReaders to all finish
|
|
|
|
wg.Wait()
|
|
|
|
// Now closing readResults as this will tell the incrementalReadCollator to
|
|
|
|
// stop iterating over that.
|
|
|
|
close(readResults)
|
2016-01-08 18:37:37 -05:00
|
|
|
|
2016-03-17 10:51:52 -04:00
|
|
|
// once readResults is finished it will close coordinator and move along
|
|
|
|
<-coordinator
|
|
|
|
// allow that routine to finish, then close page & fileconvchan as we've sent
|
|
|
|
// everything to them we need to.
|
|
|
|
close(pageChan)
|
|
|
|
close(fileConvChan)
|
2016-01-07 21:48:13 -05:00
|
|
|
|
2016-03-17 10:51:52 -04:00
|
|
|
wg2.Wait()
|
|
|
|
close(convertResults)
|
2016-01-07 21:48:13 -05:00
|
|
|
|
2016-03-17 10:51:52 -04:00
|
|
|
s.timerStep("read & convert pages from source")
|
2016-01-07 21:48:13 -05:00
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
return len(sourceChanged) > 0, nil
|
2016-05-14 00:35:16 -04:00
|
|
|
|
2015-12-22 00:10:01 -05:00
|
|
|
}
|
|
|
|
|
2016-03-09 08:05:31 -05:00
|
|
|
func (s *Site) loadTemplates() {
|
2016-08-08 04:12:39 -04:00
|
|
|
s.owner.tmpl = tpl.InitializeT()
|
|
|
|
s.owner.tmpl.LoadTemplates(s.absLayoutDir())
|
2014-04-10 08:10:12 -04:00
|
|
|
if s.hasTheme() {
|
2016-08-08 04:12:39 -04:00
|
|
|
s.owner.tmpl.LoadTemplatesWithPrefix(s.absThemeDir()+"/layouts", "theme")
|
2014-04-10 08:10:12 -04:00
|
|
|
}
|
2013-08-31 20:47:21 -04:00
|
|
|
}
|
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
func (s *Site) prepTemplates(withTemplate func(templ tpl.Template) error) error {
|
2016-03-09 08:05:31 -05:00
|
|
|
s.loadTemplates()
|
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
if withTemplate != nil {
|
2016-08-08 04:12:39 -04:00
|
|
|
if err := withTemplate(s.owner.tmpl); err != nil {
|
2016-03-09 08:05:31 -05:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
2016-07-28 03:30:58 -04:00
|
|
|
|
2016-08-08 04:12:39 -04:00
|
|
|
s.owner.tmpl.MarkReady()
|
2016-03-09 08:05:31 -05:00
|
|
|
|
|
|
|
return nil
|
2013-08-31 20:47:21 -04:00
|
|
|
}
|
|
|
|
|
2015-02-11 14:24:56 -05:00
|
|
|
func (s *Site) loadData(sources []source.Input) (err error) {
|
2016-07-28 03:30:58 -04:00
|
|
|
jww.DEBUG.Printf("Load Data from %q", sources)
|
2015-01-20 17:08:01 -05:00
|
|
|
s.Data = make(map[string]interface{})
|
2015-02-08 15:09:14 -05:00
|
|
|
var current map[string]interface{}
|
2015-02-11 14:24:56 -05:00
|
|
|
for _, currentSource := range sources {
|
|
|
|
for _, r := range currentSource.Files() {
|
|
|
|
// Crawl in data tree to insert data
|
|
|
|
current = s.Data
|
|
|
|
for _, key := range strings.Split(r.Dir(), helpers.FilePathSeparator) {
|
|
|
|
if key != "" {
|
|
|
|
if _, ok := current[key]; !ok {
|
|
|
|
current[key] = make(map[string]interface{})
|
|
|
|
}
|
|
|
|
current = current[key].(map[string]interface{})
|
2015-01-20 17:08:01 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-02-11 14:24:56 -05:00
|
|
|
data, err := readData(r)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("Failed to read data from %s: %s", filepath.Join(r.Path(), r.LogicalName()), err)
|
|
|
|
}
|
2015-01-20 17:08:01 -05:00
|
|
|
|
2015-04-22 12:36:07 -04:00
|
|
|
if data == nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2015-02-11 14:24:56 -05:00
|
|
|
// Copy content from current to data when needed
|
|
|
|
if _, ok := current[r.BaseFileName()]; ok {
|
|
|
|
data := data.(map[string]interface{})
|
|
|
|
|
|
|
|
for key, value := range current[r.BaseFileName()].(map[string]interface{}) {
|
|
|
|
if _, override := data[key]; override {
|
|
|
|
// filepath.Walk walks the files in lexical order, '/' comes before '.'
|
|
|
|
// this warning could happen if
|
|
|
|
// 1. A theme uses the same key; the main data folder wins
|
|
|
|
// 2. A sub folder uses the same key: the sub folder wins
|
|
|
|
jww.WARN.Printf("Data for key '%s' in path '%s' is overridden in subfolder", key, r.Path())
|
|
|
|
}
|
|
|
|
data[key] = value
|
2015-01-20 17:08:01 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-02-11 14:24:56 -05:00
|
|
|
// Insert data
|
|
|
|
current[r.BaseFileName()] = data
|
|
|
|
}
|
2015-01-20 17:08:01 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2015-02-08 15:09:14 -05:00
|
|
|
func readData(f *source.File) (interface{}, error) {
|
2015-01-20 17:08:01 -05:00
|
|
|
switch f.Extension() {
|
|
|
|
case "yaml", "yml":
|
2015-03-11 13:34:57 -04:00
|
|
|
return parser.HandleYAMLMetaData(f.Bytes())
|
2015-01-20 17:08:01 -05:00
|
|
|
case "json":
|
2015-03-11 13:34:57 -04:00
|
|
|
return parser.HandleJSONMetaData(f.Bytes())
|
2015-01-20 17:08:01 -05:00
|
|
|
case "toml":
|
2015-03-11 13:34:57 -04:00
|
|
|
return parser.HandleTOMLMetaData(f.Bytes())
|
2015-01-20 17:08:01 -05:00
|
|
|
default:
|
2015-04-22 12:36:07 -04:00
|
|
|
jww.WARN.Printf("Data not supported for extension '%s'", f.Extension())
|
|
|
|
return nil, nil
|
2015-01-20 17:08:01 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
func (s *Site) readI18nSources() error {
|
|
|
|
|
|
|
|
i18nSources := []source.Input{&source.Filesystem{Base: s.absI18nDir()}}
|
|
|
|
|
|
|
|
themeI18nDir, err := helpers.GetThemeI18nDirPath()
|
|
|
|
if err == nil {
|
|
|
|
i18nSources = []source.Input{&source.Filesystem{Base: themeI18nDir}, i18nSources[0]}
|
|
|
|
}
|
|
|
|
|
|
|
|
if err = loadI18n(i18nSources); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) readDataFromSourceFS() error {
|
2015-02-11 14:24:56 -05:00
|
|
|
dataSources := make([]source.Input, 0, 2)
|
|
|
|
dataSources = append(dataSources, &source.Filesystem{Base: s.absDataDir()})
|
|
|
|
|
|
|
|
// have to be last - duplicate keys in earlier entries will win
|
2016-05-14 00:35:16 -04:00
|
|
|
themeDataDir, err := helpers.GetThemeDataDirPath()
|
2015-02-11 14:24:56 -05:00
|
|
|
if err == nil {
|
2016-05-14 00:35:16 -04:00
|
|
|
dataSources = append(dataSources, &source.Filesystem{Base: themeDataDir})
|
2015-02-11 14:24:56 -05:00
|
|
|
}
|
|
|
|
|
2015-12-22 00:10:01 -05:00
|
|
|
err = s.loadData(dataSources)
|
|
|
|
s.timerStep("load data")
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
func (s *Site) preProcess(config BuildCfg) (err error) {
|
2015-12-22 00:10:01 -05:00
|
|
|
s.timerStep("Go initialization")
|
|
|
|
if err = s.initialize(); err != nil {
|
|
|
|
return
|
|
|
|
}
|
2016-07-28 03:30:58 -04:00
|
|
|
s.prepTemplates(config.withTemplate)
|
2016-08-08 04:12:39 -04:00
|
|
|
s.owner.tmpl.PrintErrors()
|
2015-12-22 00:10:01 -05:00
|
|
|
s.timerStep("initialize & template prep")
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
if err = s.readDataFromSourceFS(); err != nil {
|
2015-01-20 17:08:01 -05:00
|
|
|
return
|
|
|
|
}
|
2015-02-08 15:09:14 -05:00
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
if err = s.readI18nSources(); err != nil {
|
2016-05-14 00:35:16 -04:00
|
|
|
return
|
|
|
|
}
|
2016-07-28 03:30:58 -04:00
|
|
|
|
2016-05-14 00:35:16 -04:00
|
|
|
s.timerStep("load i18n")
|
2016-07-28 03:30:58 -04:00
|
|
|
return s.createPages()
|
2016-05-14 00:35:16 -04:00
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
}
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
func (s *Site) postProcess() (err error) {
|
2016-05-14 00:35:16 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
s.setupPrevNext()
|
2016-05-14 00:35:16 -04:00
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
if err = s.buildSiteMeta(); err != nil {
|
2014-01-29 17:50:31 -05:00
|
|
|
return
|
|
|
|
}
|
2014-04-08 23:15:57 -04:00
|
|
|
s.timerStep("build taxonomies")
|
2014-01-29 17:50:31 -05:00
|
|
|
return
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2013-09-20 20:03:43 -04:00
|
|
|
func (s *Site) setupPrevNext() {
|
2014-01-29 17:50:31 -05:00
|
|
|
for i, page := range s.Pages {
|
|
|
|
if i < len(s.Pages)-1 {
|
|
|
|
page.Next = s.Pages[i+1]
|
|
|
|
}
|
2013-09-20 20:03:43 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
if i > 0 {
|
|
|
|
page.Prev = s.Pages[i-1]
|
|
|
|
}
|
|
|
|
}
|
2013-09-20 20:03:43 -04:00
|
|
|
}
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
func (s *Site) render() (err error) {
|
2016-08-10 16:46:02 -04:00
|
|
|
// There are sadly some global template funcs etc. that need the language information.
|
2016-08-07 16:01:55 -04:00
|
|
|
viper.Set("Multilingual", s.multilingualEnabled())
|
|
|
|
viper.Set("CurrentContentLanguage", s.Language)
|
2016-07-28 03:30:58 -04:00
|
|
|
if err = tpl.SetTranslateLang(s.Language.Lang); err != nil {
|
2016-05-14 00:35:16 -04:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
if err = s.renderAliases(); err != nil {
|
2014-01-29 17:50:31 -05:00
|
|
|
return
|
|
|
|
}
|
|
|
|
s.timerStep("render and write aliases")
|
2016-08-09 14:06:15 -04:00
|
|
|
if err = s.renderTaxonomiesLists(false); err != nil {
|
2014-01-29 17:50:31 -05:00
|
|
|
return
|
|
|
|
}
|
2014-04-08 23:15:57 -04:00
|
|
|
s.timerStep("render and write taxonomies")
|
2016-08-09 14:06:15 -04:00
|
|
|
if err = s.renderListsOfTaxonomyTerms(false); err != nil {
|
|
|
|
return
|
|
|
|
}
|
2014-04-08 23:15:57 -04:00
|
|
|
s.timerStep("render & write taxonomy lists")
|
2016-08-09 14:06:15 -04:00
|
|
|
if err = s.renderSectionLists(false); err != nil {
|
2014-01-29 17:50:31 -05:00
|
|
|
return
|
|
|
|
}
|
|
|
|
s.timerStep("render and write lists")
|
2016-04-07 14:03:03 -04:00
|
|
|
if err = s.renderPages(); err != nil {
|
2014-01-29 17:50:31 -05:00
|
|
|
return
|
|
|
|
}
|
|
|
|
s.timerStep("render and write pages")
|
2016-08-09 14:06:15 -04:00
|
|
|
if err = s.renderHomePage(false); err != nil {
|
2014-01-29 17:50:31 -05:00
|
|
|
return
|
|
|
|
}
|
|
|
|
s.timerStep("render and write homepage")
|
2016-04-07 14:03:03 -04:00
|
|
|
if err = s.renderSitemap(); err != nil {
|
2014-05-06 06:50:23 -04:00
|
|
|
return
|
|
|
|
}
|
|
|
|
s.timerStep("render and write Sitemap")
|
2015-12-08 16:13:09 -05:00
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
if err = s.renderRobotsTXT(); err != nil {
|
2015-12-08 16:13:09 -05:00
|
|
|
return
|
|
|
|
}
|
|
|
|
s.timerStep("render and write robots.txt")
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2014-05-01 13:20:58 -04:00
|
|
|
func (s *Site) Initialise() (err error) {
|
|
|
|
return s.initialize()
|
|
|
|
}
|
|
|
|
|
2013-09-25 00:24:49 -04:00
|
|
|
func (s *Site) initialize() (err error) {
|
2016-07-28 03:30:58 -04:00
|
|
|
defer s.initializeSiteInfo()
|
|
|
|
s.Menus = Menus{}
|
|
|
|
|
|
|
|
// May be supplied in tests.
|
|
|
|
if s.Source != nil && len(s.Source.Files()) > 0 {
|
|
|
|
jww.DEBUG.Println("initialize: Source is already set")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
if err = s.checkDirectories(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2014-04-05 01:26:43 -04:00
|
|
|
staticDir := helpers.AbsPathify(viper.GetString("StaticDir") + "/")
|
2013-08-15 14:58:34 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
s.Source = &source.Filesystem{
|
|
|
|
AvoidPaths: []string{staticDir},
|
|
|
|
Base: s.absContentDir(),
|
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return
|
2013-09-12 19:17:53 -04:00
|
|
|
}
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
// HomeAbsURL is a convenience method giving the absolute URL to the home page.
|
2016-08-04 16:12:19 -04:00
|
|
|
func (s *SiteInfo) HomeAbsURL() string {
|
2016-08-07 16:01:55 -04:00
|
|
|
base := ""
|
2016-08-07 08:03:03 -04:00
|
|
|
if s.IsMultiLingual() {
|
2016-08-04 16:12:19 -04:00
|
|
|
base = s.Language.Lang
|
|
|
|
}
|
2016-08-07 16:01:55 -04:00
|
|
|
return helpers.AbsURL(base, false)
|
2016-08-04 16:12:19 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// SitemapAbsURL is a convenience method giving the absolute URL to the sitemap.
|
|
|
|
func (s *SiteInfo) SitemapAbsURL() string {
|
|
|
|
sitemapDefault := parseSitemap(viper.GetStringMap("Sitemap"))
|
|
|
|
return path.Join(s.HomeAbsURL(), sitemapDefault.Filename)
|
|
|
|
}
|
|
|
|
|
2013-09-12 19:17:53 -04:00
|
|
|
func (s *Site) initializeSiteInfo() {
|
2016-07-24 07:58:27 -04:00
|
|
|
|
|
|
|
var (
|
2016-08-07 16:01:55 -04:00
|
|
|
lang *helpers.Language = s.Language
|
|
|
|
languages helpers.Languages
|
2016-07-24 07:58:27 -04:00
|
|
|
)
|
|
|
|
|
2016-08-08 04:12:39 -04:00
|
|
|
if s.owner != nil && s.owner.multilingual != nil {
|
|
|
|
languages = s.owner.multilingual.Languages
|
2016-07-24 07:58:27 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
params := lang.Params()
|
2014-04-05 01:26:43 -04:00
|
|
|
|
2014-05-10 20:27:49 -04:00
|
|
|
permalinks := make(PermalinkOverrides)
|
|
|
|
for k, v := range viper.GetStringMapString("Permalinks") {
|
2016-03-24 22:12:03 -04:00
|
|
|
permalinks[k] = pathPattern(v)
|
2014-04-05 01:26:43 -04:00
|
|
|
}
|
|
|
|
|
2016-08-08 07:55:18 -04:00
|
|
|
defaultContentInSubDir := viper.GetBool("DefaultContentLanguageInSubdir")
|
|
|
|
defaultContentLanguage := viper.GetString("DefaultContentLanguage")
|
|
|
|
|
2016-05-14 00:35:16 -04:00
|
|
|
languagePrefix := ""
|
2016-08-08 07:55:18 -04:00
|
|
|
if s.multilingualEnabled() && (defaultContentInSubDir || lang.Lang != defaultContentLanguage) {
|
2016-07-24 07:58:27 -04:00
|
|
|
languagePrefix = "/" + lang.Lang
|
2016-05-14 00:35:16 -04:00
|
|
|
}
|
|
|
|
|
2016-08-08 04:12:39 -04:00
|
|
|
var multilingual *Multilingual
|
|
|
|
if s.owner != nil {
|
|
|
|
multilingual = s.owner.multilingual
|
|
|
|
}
|
|
|
|
|
2014-04-24 12:18:47 -04:00
|
|
|
s.Info = SiteInfo{
|
2016-08-08 07:55:18 -04:00
|
|
|
BaseURL: template.URL(helpers.SanitizeURLKeepTrailingSlash(viper.GetString("BaseURL"))),
|
|
|
|
Title: lang.GetString("Title"),
|
|
|
|
Author: lang.GetStringMap("author"),
|
|
|
|
Social: lang.GetStringMapString("social"),
|
|
|
|
LanguageCode: lang.GetString("languagecode"),
|
|
|
|
Copyright: lang.GetString("copyright"),
|
|
|
|
DisqusShortname: lang.GetString("DisqusShortname"),
|
|
|
|
multilingual: multilingual,
|
|
|
|
Language: lang,
|
|
|
|
LanguagePrefix: languagePrefix,
|
|
|
|
Languages: languages,
|
|
|
|
defaultContentLanguageInSubdir: defaultContentInSubDir,
|
|
|
|
GoogleAnalytics: lang.GetString("GoogleAnalytics"),
|
|
|
|
RSSLink: permalinkStr(viper.GetString("RSSUri")),
|
|
|
|
BuildDrafts: viper.GetBool("BuildDrafts"),
|
|
|
|
canonifyURLs: viper.GetBool("CanonifyURLs"),
|
|
|
|
preserveTaxonomyNames: viper.GetBool("PreserveTaxonomyNames"),
|
|
|
|
AllPages: &s.AllPages,
|
|
|
|
Pages: &s.Pages,
|
|
|
|
rawAllPages: &s.rawAllPages,
|
|
|
|
Files: &s.Files,
|
|
|
|
Menus: &s.Menus,
|
|
|
|
Params: params,
|
|
|
|
Permalinks: permalinks,
|
|
|
|
Data: &s.Data,
|
2016-08-09 14:06:15 -04:00
|
|
|
owner: s.owner,
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2014-04-10 08:10:12 -04:00
|
|
|
func (s *Site) hasTheme() bool {
|
|
|
|
return viper.GetString("theme") != ""
|
|
|
|
}
|
|
|
|
|
2015-01-20 17:08:01 -05:00
|
|
|
func (s *Site) absDataDir() string {
|
|
|
|
return helpers.AbsPathify(viper.GetString("DataDir"))
|
|
|
|
}
|
|
|
|
|
2016-05-14 00:35:16 -04:00
|
|
|
func (s *Site) absI18nDir() string {
|
|
|
|
return helpers.AbsPathify(viper.GetString("I18nDir"))
|
|
|
|
}
|
|
|
|
|
2014-04-10 08:10:12 -04:00
|
|
|
func (s *Site) absThemeDir() string {
|
2015-11-23 14:44:59 -05:00
|
|
|
return helpers.AbsPathify(viper.GetString("themesDir") + "/" + viper.GetString("theme"))
|
2014-04-10 08:10:12 -04:00
|
|
|
}
|
|
|
|
|
2013-08-07 20:21:22 -04:00
|
|
|
func (s *Site) absLayoutDir() string {
|
2014-04-05 01:26:43 -04:00
|
|
|
return helpers.AbsPathify(viper.GetString("LayoutDir"))
|
2013-08-07 20:21:22 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Site) absContentDir() string {
|
2014-04-05 01:26:43 -04:00
|
|
|
return helpers.AbsPathify(viper.GetString("ContentDir"))
|
2013-08-07 20:21:22 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Site) absPublishDir() string {
|
2014-04-05 01:26:43 -04:00
|
|
|
return helpers.AbsPathify(viper.GetString("PublishDir"))
|
2013-08-07 20:21:22 -04:00
|
|
|
}
|
|
|
|
|
2013-09-25 00:24:49 -04:00
|
|
|
func (s *Site) checkDirectories() (err error) {
|
2016-03-21 19:28:42 -04:00
|
|
|
if b, _ := helpers.DirExists(s.absContentDir(), hugofs.Source()); !b {
|
2014-01-29 17:50:31 -05:00
|
|
|
return fmt.Errorf("No source directory found, expecting to find it at " + s.absContentDir())
|
|
|
|
}
|
|
|
|
return
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
// reReadFile resets file to be read from disk again
|
|
|
|
func (s *Site) reReadFile(absFilePath string) (*source.File, error) {
|
2016-01-08 18:37:37 -05:00
|
|
|
jww.INFO.Println("rereading", absFilePath)
|
2016-01-07 21:48:13 -05:00
|
|
|
var file *source.File
|
|
|
|
|
2016-07-30 09:14:41 -04:00
|
|
|
reader, err := source.NewLazyFileReader(hugofs.Source(), absFilePath)
|
2016-01-07 21:48:13 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
file, err = source.NewFileFromAbs(s.absContentDir(), absFilePath, reader)
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return file, nil
|
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) readPagesFromSource() chan error {
|
2014-01-29 17:50:31 -05:00
|
|
|
if s.Source == nil {
|
|
|
|
panic(fmt.Sprintf("s.Source not set %s", s.absContentDir()))
|
|
|
|
}
|
2015-12-21 19:47:48 -05:00
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
jww.DEBUG.Printf("Read %d pages from source", len(s.Source.Files()))
|
2016-01-31 10:21:12 -05:00
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
errs := make(chan error)
|
2014-01-29 17:50:31 -05:00
|
|
|
if len(s.Source.Files()) < 1 {
|
2016-01-31 10:21:12 -05:00
|
|
|
close(errs)
|
|
|
|
return errs
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
|
2014-08-29 13:40:21 -04:00
|
|
|
files := s.Source.Files()
|
2014-10-20 17:42:16 -04:00
|
|
|
results := make(chan HandledResult)
|
2014-09-05 06:57:32 -04:00
|
|
|
filechan := make(chan *source.File)
|
2014-08-29 13:40:21 -04:00
|
|
|
procs := getGoMaxProcs()
|
|
|
|
wg := &sync.WaitGroup{}
|
2014-03-05 19:07:39 -05:00
|
|
|
|
2014-11-24 17:51:47 -05:00
|
|
|
wg.Add(procs * 4)
|
2014-08-30 01:15:20 -04:00
|
|
|
for i := 0; i < procs*4; i++ {
|
2014-10-16 20:20:09 -04:00
|
|
|
go sourceReader(s, filechan, results, wg)
|
2014-08-29 13:40:21 -04:00
|
|
|
}
|
2014-03-05 19:07:39 -05:00
|
|
|
|
2014-08-29 13:40:21 -04:00
|
|
|
// we can only have exactly one result collator, since it makes changes that
|
|
|
|
// must be synchronized.
|
2014-09-05 06:57:32 -04:00
|
|
|
go readCollator(s, results, errs)
|
2014-08-20 11:09:35 -04:00
|
|
|
|
2014-09-05 06:57:32 -04:00
|
|
|
for _, file := range files {
|
|
|
|
filechan <- file
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
|
2014-09-05 06:57:32 -04:00
|
|
|
close(filechan)
|
2014-03-05 19:07:39 -05:00
|
|
|
wg.Wait()
|
2014-08-29 13:40:21 -04:00
|
|
|
close(results)
|
|
|
|
|
2015-12-21 19:47:48 -05:00
|
|
|
return errs
|
|
|
|
}
|
2014-09-05 06:57:32 -04:00
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) convertSource() chan error {
|
2015-12-21 19:47:48 -05:00
|
|
|
errs := make(chan error)
|
|
|
|
results := make(chan HandledResult)
|
2014-11-04 00:36:05 -05:00
|
|
|
pageChan := make(chan *Page)
|
|
|
|
fileConvChan := make(chan *source.File)
|
2015-12-21 19:47:48 -05:00
|
|
|
procs := getGoMaxProcs()
|
|
|
|
wg := &sync.WaitGroup{}
|
2014-09-05 06:57:32 -04:00
|
|
|
|
2014-11-24 17:51:47 -05:00
|
|
|
wg.Add(2 * procs * 4)
|
2014-10-20 20:15:33 -04:00
|
|
|
for i := 0; i < procs*4; i++ {
|
2014-11-04 00:36:05 -05:00
|
|
|
go fileConverter(s, fileConvChan, results, wg)
|
|
|
|
go pageConverter(s, pageChan, results, wg)
|
2014-09-05 06:57:32 -04:00
|
|
|
}
|
|
|
|
|
2014-09-14 07:01:40 -04:00
|
|
|
go converterCollator(s, results, errs)
|
2014-09-05 06:57:32 -04:00
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
for _, p := range s.rawAllPages {
|
2016-08-08 03:05:16 -04:00
|
|
|
if p.shouldBuild() {
|
|
|
|
pageChan <- p
|
|
|
|
}
|
2014-09-05 06:57:32 -04:00
|
|
|
}
|
|
|
|
|
2014-10-20 20:15:33 -04:00
|
|
|
for _, f := range s.Files {
|
2014-11-04 00:36:05 -05:00
|
|
|
fileConvChan <- f
|
2014-10-20 20:15:33 -04:00
|
|
|
}
|
|
|
|
|
2014-11-04 00:36:05 -05:00
|
|
|
close(pageChan)
|
|
|
|
close(fileConvChan)
|
2014-09-05 06:57:32 -04:00
|
|
|
wg.Wait()
|
|
|
|
close(results)
|
|
|
|
|
2015-12-21 19:47:48 -05:00
|
|
|
return errs
|
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) createPages() error {
|
|
|
|
readErrs := <-s.readPagesFromSource()
|
2015-12-21 19:47:48 -05:00
|
|
|
s.timerStep("read pages from source")
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
renderErrs := <-s.convertSource()
|
2015-12-21 19:47:48 -05:00
|
|
|
s.timerStep("convert source")
|
2014-09-05 06:57:32 -04:00
|
|
|
|
|
|
|
if renderErrs == nil && readErrs == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
if renderErrs == nil {
|
|
|
|
return readErrs
|
|
|
|
}
|
|
|
|
if readErrs == nil {
|
|
|
|
return renderErrs
|
|
|
|
}
|
2015-12-21 19:47:48 -05:00
|
|
|
|
2014-09-05 06:57:32 -04:00
|
|
|
return fmt.Errorf("%s\n%s", readErrs, renderErrs)
|
2014-08-29 13:40:21 -04:00
|
|
|
}
|
|
|
|
|
2014-10-20 17:42:16 -04:00
|
|
|
func sourceReader(s *Site, files <-chan *source.File, results chan<- HandledResult, wg *sync.WaitGroup) {
|
2014-09-14 07:01:40 -04:00
|
|
|
defer wg.Done()
|
2014-09-05 06:57:32 -04:00
|
|
|
for file := range files {
|
2016-01-07 21:48:13 -05:00
|
|
|
readSourceFile(s, file, results)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func readSourceFile(s *Site, file *source.File, results chan<- HandledResult) {
|
|
|
|
h := NewMetaHandler(file.Extension())
|
|
|
|
if h != nil {
|
|
|
|
h.Read(file, s, results)
|
|
|
|
} else {
|
|
|
|
jww.ERROR.Println("Unsupported File Type", file.Path())
|
2014-09-05 06:57:32 -04:00
|
|
|
}
|
|
|
|
}
|
2014-08-29 13:40:21 -04:00
|
|
|
|
2014-10-20 17:42:16 -04:00
|
|
|
func pageConverter(s *Site, pages <-chan *Page, results HandleResults, wg *sync.WaitGroup) {
|
2014-09-14 07:01:40 -04:00
|
|
|
defer wg.Done()
|
2014-09-05 06:57:32 -04:00
|
|
|
for page := range pages {
|
2014-11-20 12:39:09 -05:00
|
|
|
var h *MetaHandle
|
2014-11-04 00:44:30 -05:00
|
|
|
if page.Markup != "" {
|
2014-11-20 12:39:09 -05:00
|
|
|
h = NewMetaHandler(page.Markup)
|
2014-11-04 00:44:30 -05:00
|
|
|
} else {
|
2014-11-20 12:39:09 -05:00
|
|
|
h = NewMetaHandler(page.File.Extension())
|
2014-11-04 00:44:30 -05:00
|
|
|
}
|
2014-10-20 20:15:33 -04:00
|
|
|
if h != nil {
|
|
|
|
h.Convert(page, s, results)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2014-08-29 13:40:21 -04:00
|
|
|
|
2014-10-20 20:15:33 -04:00
|
|
|
func fileConverter(s *Site, files <-chan *source.File, results HandleResults, wg *sync.WaitGroup) {
|
|
|
|
defer wg.Done()
|
|
|
|
for file := range files {
|
2014-11-20 12:39:09 -05:00
|
|
|
h := NewMetaHandler(file.Extension())
|
2014-10-20 20:15:33 -04:00
|
|
|
if h != nil {
|
|
|
|
h.Convert(file, s, results)
|
2014-08-29 13:40:21 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-20 17:42:16 -04:00
|
|
|
func converterCollator(s *Site, results <-chan HandledResult, errs chan<- error) {
|
2014-09-05 06:57:32 -04:00
|
|
|
errMsgs := []string{}
|
|
|
|
for r := range results {
|
|
|
|
if r.err != nil {
|
|
|
|
errMsgs = append(errMsgs, r.err.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if len(errMsgs) == 0 {
|
|
|
|
errs <- nil
|
|
|
|
return
|
|
|
|
}
|
|
|
|
errs <- fmt.Errorf("Errors rendering pages: %s", strings.Join(errMsgs, "\n"))
|
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) addPage(page *Page) {
|
2016-07-28 03:30:58 -04:00
|
|
|
s.rawAllPages = append(s.rawAllPages, page)
|
2016-01-07 21:48:13 -05:00
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) removePageByPath(path string) {
|
2016-07-28 03:30:58 -04:00
|
|
|
if i := s.rawAllPages.FindPagePosByFilePath(path); i >= 0 {
|
|
|
|
s.rawAllPages = append(s.rawAllPages[:i], s.rawAllPages[i+1:]...)
|
2016-01-11 12:06:52 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) removePage(page *Page) {
|
2016-07-28 03:30:58 -04:00
|
|
|
if i := s.rawAllPages.FindPagePos(page); i >= 0 {
|
|
|
|
s.rawAllPages = append(s.rawAllPages[:i], s.rawAllPages[i+1:]...)
|
2016-01-07 21:48:13 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) replacePage(page *Page) {
|
2016-01-07 21:48:13 -05:00
|
|
|
// will find existing page that matches filepath and remove it
|
2016-04-07 14:03:03 -04:00
|
|
|
s.removePage(page)
|
|
|
|
s.addPage(page)
|
2016-01-07 21:48:13 -05:00
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) replaceFile(sf *source.File) {
|
2016-01-11 10:48:23 -05:00
|
|
|
for i, f := range s.Files {
|
|
|
|
if f.Path() == sf.Path() {
|
|
|
|
s.Files[i] = sf
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// If a match isn't found, then append it
|
|
|
|
s.Files = append(s.Files, sf)
|
|
|
|
}
|
|
|
|
|
2016-01-08 18:37:37 -05:00
|
|
|
func incrementalReadCollator(s *Site, results <-chan HandledResult, pageChan chan *Page, fileConvChan chan *source.File, coordinator chan bool, errs chan<- error) {
|
2014-08-29 13:40:21 -04:00
|
|
|
errMsgs := []string{}
|
|
|
|
for r := range results {
|
|
|
|
if r.err != nil {
|
2014-11-20 12:39:09 -05:00
|
|
|
errMsgs = append(errMsgs, r.Error())
|
2014-08-29 13:40:21 -04:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2014-10-20 20:15:33 -04:00
|
|
|
if r.page == nil {
|
2016-04-07 14:03:03 -04:00
|
|
|
s.replaceFile(r.file)
|
2016-01-08 18:37:37 -05:00
|
|
|
fileConvChan <- r.file
|
2014-10-20 20:15:33 -04:00
|
|
|
} else {
|
2016-04-07 14:03:03 -04:00
|
|
|
s.replacePage(r.page)
|
2016-01-08 18:37:37 -05:00
|
|
|
pageChan <- r.page
|
2016-01-07 21:48:13 -05:00
|
|
|
}
|
|
|
|
}
|
2014-08-29 13:40:21 -04:00
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
s.rawAllPages.Sort()
|
2016-01-08 18:37:37 -05:00
|
|
|
close(coordinator)
|
|
|
|
|
2016-01-07 21:48:13 -05:00
|
|
|
if len(errMsgs) == 0 {
|
|
|
|
errs <- nil
|
|
|
|
return
|
|
|
|
}
|
|
|
|
errs <- fmt.Errorf("Errors reading pages: %s", strings.Join(errMsgs, "\n"))
|
|
|
|
}
|
2014-08-29 13:40:21 -04:00
|
|
|
|
2016-01-07 21:48:13 -05:00
|
|
|
func readCollator(s *Site, results <-chan HandledResult, errs chan<- error) {
|
|
|
|
errMsgs := []string{}
|
|
|
|
for r := range results {
|
|
|
|
if r.err != nil {
|
|
|
|
errMsgs = append(errMsgs, r.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// !page == file
|
|
|
|
if r.page == nil {
|
|
|
|
s.Files = append(s.Files, r.file)
|
|
|
|
} else {
|
2016-04-07 14:03:03 -04:00
|
|
|
s.addPage(r.page)
|
2014-08-29 13:40:21 -04:00
|
|
|
}
|
|
|
|
}
|
2014-10-20 20:15:33 -04:00
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
s.rawAllPages.Sort()
|
2014-08-29 13:40:21 -04:00
|
|
|
if len(errMsgs) == 0 {
|
|
|
|
errs <- nil
|
2014-09-05 06:57:32 -04:00
|
|
|
return
|
2014-08-29 13:40:21 -04:00
|
|
|
}
|
2014-09-05 06:57:32 -04:00
|
|
|
errs <- fmt.Errorf("Errors reading pages: %s", strings.Join(errMsgs, "\n"))
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) buildSiteMeta() (err error) {
|
2014-04-23 02:59:19 -04:00
|
|
|
s.assembleMenus()
|
|
|
|
|
|
|
|
if len(s.Pages) == 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
s.assembleTaxonomies()
|
|
|
|
s.assembleSections()
|
2015-05-14 17:58:14 -04:00
|
|
|
s.Info.LastChange = s.Pages[0].Lastmod
|
2014-04-23 02:59:19 -04:00
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2014-04-24 18:11:08 -04:00
|
|
|
func (s *Site) getMenusFromConfig() Menus {
|
|
|
|
|
|
|
|
ret := Menus{}
|
|
|
|
|
|
|
|
if menus := viper.GetStringMap("menu"); menus != nil {
|
|
|
|
for name, menu := range menus {
|
|
|
|
m, err := cast.ToSliceE(menu)
|
|
|
|
if err != nil {
|
|
|
|
jww.ERROR.Printf("unable to process menus in site config\n")
|
|
|
|
jww.ERROR.Println(err)
|
|
|
|
} else {
|
|
|
|
for _, entry := range m {
|
|
|
|
jww.DEBUG.Printf("found menu: %q, in site config\n", name)
|
|
|
|
|
|
|
|
menuEntry := MenuEntry{Menu: name}
|
|
|
|
ime, err := cast.ToStringMapE(entry)
|
|
|
|
if err != nil {
|
|
|
|
jww.ERROR.Printf("unable to process menus in site config\n")
|
|
|
|
jww.ERROR.Println(err)
|
|
|
|
}
|
|
|
|
|
2016-03-22 19:29:39 -04:00
|
|
|
menuEntry.marshallMap(ime)
|
2015-05-09 14:54:11 -04:00
|
|
|
menuEntry.URL = s.Info.createNodeMenuEntryURL(menuEntry.URL)
|
2014-12-12 14:28:28 -05:00
|
|
|
|
2014-04-24 18:11:08 -04:00
|
|
|
if ret[name] == nil {
|
|
|
|
ret[name] = &Menu{}
|
|
|
|
}
|
2016-03-22 19:29:39 -04:00
|
|
|
*ret[name] = ret[name].add(&menuEntry)
|
2014-04-24 18:11:08 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
2015-05-09 14:54:11 -04:00
|
|
|
func (s *SiteInfo) createNodeMenuEntryURL(in string) string {
|
|
|
|
|
|
|
|
if !strings.HasPrefix(in, "/") {
|
|
|
|
return in
|
|
|
|
}
|
|
|
|
// make it match the nodes
|
|
|
|
menuEntryURL := in
|
2015-12-02 17:58:54 -05:00
|
|
|
menuEntryURL = helpers.SanitizeURLKeepTrailingSlash(helpers.URLize(menuEntryURL))
|
2015-05-09 14:54:11 -04:00
|
|
|
if !s.canonifyURLs {
|
|
|
|
menuEntryURL = helpers.AddContextRoot(string(s.BaseURL), menuEntryURL)
|
|
|
|
}
|
|
|
|
return menuEntryURL
|
|
|
|
}
|
|
|
|
|
2014-04-23 02:59:19 -04:00
|
|
|
func (s *Site) assembleMenus() {
|
2015-12-22 00:10:01 -05:00
|
|
|
s.Menus = Menus{}
|
2014-04-23 02:59:19 -04:00
|
|
|
|
|
|
|
type twoD struct {
|
|
|
|
MenuName, EntryName string
|
|
|
|
}
|
|
|
|
flat := map[twoD]*MenuEntry{}
|
|
|
|
children := map[twoD]Menu{}
|
|
|
|
|
2014-04-24 18:11:08 -04:00
|
|
|
menuConfig := s.getMenusFromConfig()
|
|
|
|
for name, menu := range menuConfig {
|
|
|
|
for _, me := range *menu {
|
2014-05-14 18:01:13 -04:00
|
|
|
flat[twoD{name, me.KeyName()}] = me
|
2014-04-24 18:11:08 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-01-06 12:11:06 -05:00
|
|
|
sectionPagesMenu := viper.GetString("SectionPagesMenu")
|
|
|
|
sectionPagesMenus := make(map[string]interface{})
|
2014-04-23 02:59:19 -04:00
|
|
|
//creating flat hash
|
|
|
|
for _, p := range s.Pages {
|
2015-01-06 12:11:06 -05:00
|
|
|
|
|
|
|
if sectionPagesMenu != "" {
|
|
|
|
if _, ok := sectionPagesMenus[p.Section()]; !ok {
|
|
|
|
if p.Section() != "" {
|
2016-07-28 03:30:58 -04:00
|
|
|
me := MenuEntry{Identifier: p.Section(),
|
|
|
|
Name: helpers.MakeTitle(helpers.FirstUpper(p.Section())),
|
2016-08-08 07:55:18 -04:00
|
|
|
URL: s.Info.createNodeMenuEntryURL(p.addLangPathPrefix("/"+p.Section()) + "/")}
|
2015-01-06 12:11:06 -05:00
|
|
|
if _, ok := flat[twoD{sectionPagesMenu, me.KeyName()}]; ok {
|
|
|
|
// menu with same id defined in config, let that one win
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
flat[twoD{sectionPagesMenu, me.KeyName()}] = &me
|
|
|
|
sectionPagesMenus[p.Section()] = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-04-23 02:59:19 -04:00
|
|
|
for name, me := range p.Menus() {
|
2014-05-14 18:01:13 -04:00
|
|
|
if _, ok := flat[twoD{name, me.KeyName()}]; ok {
|
2015-05-21 17:04:36 -04:00
|
|
|
jww.ERROR.Printf("Two or more menu items have the same name/identifier in Menu %q: %q.\nRename or set an unique identifier.\n", name, me.KeyName())
|
2015-01-22 11:23:01 -05:00
|
|
|
continue
|
2014-05-14 18:01:13 -04:00
|
|
|
}
|
|
|
|
flat[twoD{name, me.KeyName()}] = me
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create Children Menus First
|
|
|
|
for _, e := range flat {
|
|
|
|
if e.Parent != "" {
|
2016-03-22 19:29:39 -04:00
|
|
|
children[twoD{e.Menu, e.Parent}] = children[twoD{e.Menu, e.Parent}].add(e)
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Placing Children in Parents (in flat)
|
|
|
|
for p, childmenu := range children {
|
|
|
|
_, ok := flat[twoD{p.MenuName, p.EntryName}]
|
|
|
|
if !ok {
|
2015-03-18 01:16:54 -04:00
|
|
|
// if parent does not exist, create one without a URL
|
|
|
|
flat[twoD{p.MenuName, p.EntryName}] = &MenuEntry{Name: p.EntryName, URL: ""}
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
flat[twoD{p.MenuName, p.EntryName}].Children = childmenu
|
|
|
|
}
|
|
|
|
|
|
|
|
// Assembling Top Level of Tree
|
|
|
|
for menu, e := range flat {
|
|
|
|
if e.Parent == "" {
|
|
|
|
_, ok := s.Menus[menu.MenuName]
|
|
|
|
if !ok {
|
|
|
|
s.Menus[menu.MenuName] = &Menu{}
|
|
|
|
}
|
2016-03-22 19:29:39 -04:00
|
|
|
*s.Menus[menu.MenuName] = s.Menus[menu.MenuName].add(e)
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Site) assembleTaxonomies() {
|
2014-04-08 23:15:57 -04:00
|
|
|
s.Taxonomies = make(TaxonomyList)
|
2014-01-29 17:50:31 -05:00
|
|
|
|
2016-08-05 07:10:58 -04:00
|
|
|
taxonomies := s.Language.GetStringMapString("Taxonomies")
|
2014-04-08 23:15:57 -04:00
|
|
|
jww.INFO.Printf("found taxonomies: %#v\n", taxonomies)
|
2014-04-07 23:29:35 -04:00
|
|
|
|
2014-04-08 23:15:57 -04:00
|
|
|
for _, plural := range taxonomies {
|
|
|
|
s.Taxonomies[plural] = make(Taxonomy)
|
2014-01-29 17:50:31 -05:00
|
|
|
for _, p := range s.Pages {
|
2015-05-31 14:30:53 -04:00
|
|
|
vals := p.getParam(plural, !s.Info.preserveTaxonomyNames)
|
2014-01-29 17:50:31 -05:00
|
|
|
weight := p.GetParam(plural + "_weight")
|
|
|
|
if weight == nil {
|
|
|
|
weight = 0
|
|
|
|
}
|
|
|
|
if vals != nil {
|
2014-09-05 09:29:01 -04:00
|
|
|
if v, ok := vals.([]string); ok {
|
2014-01-29 17:50:31 -05:00
|
|
|
for _, idx := range v {
|
|
|
|
x := WeightedPage{weight.(int), p}
|
2016-03-24 11:01:25 -04:00
|
|
|
s.Taxonomies[plural].add(idx, x, s.Info.preserveTaxonomyNames)
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2014-09-05 09:29:01 -04:00
|
|
|
} else if v, ok := vals.(string); ok {
|
|
|
|
x := WeightedPage{weight.(int), p}
|
2016-03-24 11:01:25 -04:00
|
|
|
s.Taxonomies[plural].add(v, x, s.Info.preserveTaxonomyNames)
|
2014-01-29 17:50:31 -05:00
|
|
|
} else {
|
2014-10-16 20:20:09 -04:00
|
|
|
jww.ERROR.Printf("Invalid %s in %s\n", plural, p.File.Path())
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2014-04-08 23:15:57 -04:00
|
|
|
for k := range s.Taxonomies[plural] {
|
|
|
|
s.Taxonomies[plural][k].Sort()
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-04-23 02:59:19 -04:00
|
|
|
s.Info.Taxonomies = s.Taxonomies
|
|
|
|
}
|
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
// Prepare site for a new full build.
|
|
|
|
func (s *Site) resetBuildState() {
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
s.nodeCache.reset()
|
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
s.Pages = make(Pages, 0)
|
|
|
|
s.AllPages = make(Pages, 0)
|
2016-03-06 10:33:32 -05:00
|
|
|
|
|
|
|
s.Info.paginationPageCount = 0
|
2016-07-28 03:30:58 -04:00
|
|
|
s.draftCount = 0
|
|
|
|
s.futureCount = 0
|
|
|
|
s.expiredCount = 0
|
2016-03-06 10:33:32 -05:00
|
|
|
|
2016-07-28 03:30:58 -04:00
|
|
|
for _, p := range s.rawAllPages {
|
2016-03-06 10:33:32 -05:00
|
|
|
p.scratch = newScratch()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-04-23 02:59:19 -04:00
|
|
|
func (s *Site) assembleSections() {
|
2015-10-27 12:55:57 -04:00
|
|
|
s.Sections = make(Taxonomy)
|
|
|
|
s.Info.Sections = s.Sections
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
for i, p := range s.Pages {
|
2016-03-24 11:01:25 -04:00
|
|
|
s.Sections.add(p.Section(), WeightedPage{s.Pages[i].Weight, s.Pages[i]}, s.Info.preserveTaxonomyNames)
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
for k := range s.Sections {
|
|
|
|
s.Sections[k].Sort()
|
2014-04-18 03:23:13 -04:00
|
|
|
|
|
|
|
for i, wp := range s.Sections[k] {
|
|
|
|
if i > 0 {
|
2015-01-18 20:40:34 -05:00
|
|
|
wp.Page.NextInSection = s.Sections[k][i-1].Page
|
2014-04-18 03:23:13 -04:00
|
|
|
}
|
2015-01-18 20:40:34 -05:00
|
|
|
if i < len(s.Sections[k])-1 {
|
|
|
|
wp.Page.PrevInSection = s.Sections[k][i+1].Page
|
2014-04-18 03:23:13 -04:00
|
|
|
}
|
|
|
|
}
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
// renderAliases renders shell pages that simply have a redirect in the header.
|
|
|
|
func (s *Site) renderAliases() error {
|
2014-01-29 17:50:31 -05:00
|
|
|
for _, p := range s.Pages {
|
2015-12-30 00:47:59 -05:00
|
|
|
if len(p.Aliases) == 0 {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
plink, err := p.Permalink()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
for _, a := range p.Aliases {
|
2016-04-07 14:03:03 -04:00
|
|
|
if err := s.writeDestAlias(a, plink); err != nil {
|
2014-01-29 17:50:31 -05:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-08-04 14:41:30 -04:00
|
|
|
|
2016-08-08 07:55:18 -04:00
|
|
|
if s.owner.multilingual.enabled() && s.Info.defaultContentLanguageInSubdir {
|
2016-08-08 04:12:39 -04:00
|
|
|
mainLang := s.owner.multilingual.DefaultLang.Lang
|
2016-08-07 16:01:55 -04:00
|
|
|
mainLangURL := helpers.AbsURL(mainLang, false)
|
2016-08-04 14:41:30 -04:00
|
|
|
jww.DEBUG.Printf("Write redirect to main language %s: %s", mainLang, mainLangURL)
|
|
|
|
if err := s.publishDestAlias(s.languageAliasTarget(), "/", mainLangURL); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return nil
|
2013-08-10 10:35:34 -04:00
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
// renderPages renders pages each corresponding to a markdown file.
|
|
|
|
func (s *Site) renderPages() error {
|
2014-09-14 07:01:40 -04:00
|
|
|
|
|
|
|
results := make(chan error)
|
|
|
|
pages := make(chan *Page)
|
2016-02-19 23:54:35 -05:00
|
|
|
errs := make(chan error)
|
|
|
|
|
|
|
|
go errorCollator(results, errs)
|
2014-09-14 07:01:40 -04:00
|
|
|
|
|
|
|
procs := getGoMaxProcs()
|
|
|
|
|
2015-12-18 03:54:46 -05:00
|
|
|
// this cannot be fanned out to multiple Go routines
|
|
|
|
// See issue #1601
|
|
|
|
// TODO(bep): Check the IsRenderable logic.
|
|
|
|
for _, p := range s.Pages {
|
|
|
|
var layouts []string
|
|
|
|
if !p.IsRenderable() {
|
|
|
|
self := "__" + p.TargetPath()
|
2016-08-08 04:12:39 -04:00
|
|
|
_, err := s.owner.tmpl.GetClone().New(self).Parse(string(p.Content))
|
2015-12-18 03:54:46 -05:00
|
|
|
if err != nil {
|
|
|
|
results <- err
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
layouts = append(layouts, self)
|
|
|
|
} else {
|
|
|
|
layouts = append(layouts, p.layouts()...)
|
|
|
|
layouts = append(layouts, "_default/single.html")
|
|
|
|
}
|
|
|
|
p.layoutsCalculated = layouts
|
|
|
|
}
|
|
|
|
|
2014-09-14 07:01:40 -04:00
|
|
|
wg := &sync.WaitGroup{}
|
|
|
|
|
|
|
|
for i := 0; i < procs*4; i++ {
|
2014-03-05 19:07:39 -05:00
|
|
|
wg.Add(1)
|
2014-09-14 07:01:40 -04:00
|
|
|
go pageRenderer(s, pages, results, wg)
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, page := range s.Pages {
|
|
|
|
pages <- page
|
2014-03-05 19:07:39 -05:00
|
|
|
}
|
2014-09-14 07:01:40 -04:00
|
|
|
|
|
|
|
close(pages)
|
|
|
|
|
2014-03-05 19:07:39 -05:00
|
|
|
wg.Wait()
|
|
|
|
|
2014-09-14 07:01:40 -04:00
|
|
|
close(results)
|
|
|
|
|
2014-09-14 07:23:03 -04:00
|
|
|
err := <-errs
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("Error(s) rendering pages: %s", err)
|
|
|
|
}
|
|
|
|
return nil
|
2014-09-14 07:01:40 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.WaitGroup) {
|
|
|
|
defer wg.Done()
|
|
|
|
for p := range pages {
|
2015-12-18 03:54:46 -05:00
|
|
|
err := s.renderAndWritePage("page "+p.FullFilePath(), p.TargetPath(), p, s.appendThemeTemplates(p.layouts())...)
|
2014-11-04 00:39:37 -05:00
|
|
|
if err != nil {
|
|
|
|
results <- err
|
|
|
|
}
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2014-09-14 07:01:40 -04:00
|
|
|
}
|
|
|
|
|
2014-09-14 07:23:03 -04:00
|
|
|
func errorCollator(results <-chan error, errs chan<- error) {
|
2014-09-14 07:01:40 -04:00
|
|
|
errMsgs := []string{}
|
|
|
|
for err := range results {
|
|
|
|
if err != nil {
|
|
|
|
errMsgs = append(errMsgs, err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if len(errMsgs) == 0 {
|
|
|
|
errs <- nil
|
2014-09-14 07:23:03 -04:00
|
|
|
} else {
|
|
|
|
errs <- errors.New(strings.Join(errMsgs, "\n"))
|
2014-09-14 07:01:40 -04:00
|
|
|
}
|
2014-09-14 07:23:03 -04:00
|
|
|
close(errs)
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2014-04-10 08:10:12 -04:00
|
|
|
func (s *Site) appendThemeTemplates(in []string) []string {
|
2016-03-17 18:36:11 -04:00
|
|
|
if !s.hasTheme() {
|
|
|
|
return in
|
|
|
|
}
|
|
|
|
|
|
|
|
out := []string{}
|
|
|
|
// First place all non internal templates
|
|
|
|
for _, t := range in {
|
|
|
|
if !strings.HasPrefix(t, "_internal/") {
|
|
|
|
out = append(out, t)
|
2014-04-10 08:10:12 -04:00
|
|
|
}
|
2016-03-17 18:36:11 -04:00
|
|
|
}
|
2014-04-10 08:10:12 -04:00
|
|
|
|
2016-03-17 18:36:11 -04:00
|
|
|
// Then place theme templates with the same names
|
|
|
|
for _, t := range in {
|
|
|
|
if !strings.HasPrefix(t, "_internal/") {
|
|
|
|
out = append(out, "theme/"+t)
|
2014-04-10 08:10:12 -04:00
|
|
|
}
|
2016-03-17 18:36:11 -04:00
|
|
|
}
|
2014-11-14 12:14:52 -05:00
|
|
|
|
2016-03-17 18:36:11 -04:00
|
|
|
// Lastly place internal templates
|
|
|
|
for _, t := range in {
|
|
|
|
if strings.HasPrefix(t, "_internal/") {
|
|
|
|
out = append(out, t)
|
2014-04-10 08:10:12 -04:00
|
|
|
}
|
|
|
|
}
|
2016-03-17 18:36:11 -04:00
|
|
|
return out
|
|
|
|
|
2014-04-10 08:10:12 -04:00
|
|
|
}
|
|
|
|
|
2014-09-14 07:23:03 -04:00
|
|
|
type taxRenderInfo struct {
|
|
|
|
key string
|
|
|
|
pages WeightedPages
|
|
|
|
singular string
|
|
|
|
plural string
|
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
// renderTaxonomiesLists renders the listing pages based on the meta data
|
2014-04-08 23:15:57 -04:00
|
|
|
// each unique term within a taxonomy will have a page created
|
2016-08-09 14:06:15 -04:00
|
|
|
func (s *Site) renderTaxonomiesLists(prepare bool) error {
|
2014-09-14 07:23:03 -04:00
|
|
|
wg := &sync.WaitGroup{}
|
2014-03-05 19:07:39 -05:00
|
|
|
|
2014-09-14 07:23:03 -04:00
|
|
|
taxes := make(chan taxRenderInfo)
|
|
|
|
results := make(chan error)
|
2014-04-07 23:29:35 -04:00
|
|
|
|
2014-09-14 07:23:03 -04:00
|
|
|
procs := getGoMaxProcs()
|
|
|
|
|
|
|
|
for i := 0; i < procs*4; i++ {
|
|
|
|
wg.Add(1)
|
2016-08-09 14:06:15 -04:00
|
|
|
go taxonomyRenderer(prepare, s, taxes, results, wg)
|
2014-09-14 07:23:03 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
errs := make(chan error)
|
|
|
|
|
|
|
|
go errorCollator(results, errs)
|
|
|
|
|
2016-08-05 07:10:58 -04:00
|
|
|
taxonomies := s.Language.GetStringMapString("Taxonomies")
|
2014-09-14 07:23:03 -04:00
|
|
|
for singular, plural := range taxonomies {
|
|
|
|
for key, pages := range s.Taxonomies[plural] {
|
|
|
|
taxes <- taxRenderInfo{key, pages, singular, plural}
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
}
|
2014-09-14 07:23:03 -04:00
|
|
|
close(taxes)
|
|
|
|
|
2014-03-05 19:07:39 -05:00
|
|
|
wg.Wait()
|
2014-09-14 07:23:03 -04:00
|
|
|
|
|
|
|
close(results)
|
|
|
|
|
|
|
|
err := <-errs
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("Error(s) rendering taxonomies: %s", err)
|
|
|
|
}
|
2014-01-29 17:50:31 -05:00
|
|
|
return nil
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
func (s *Site) newTaxonomyNode(prepare bool, t taxRenderInfo, counter int) (*Node, string) {
|
2015-05-31 14:30:53 -04:00
|
|
|
key := t.key
|
2016-08-09 14:06:15 -04:00
|
|
|
n := s.nodeLookup(fmt.Sprintf("tax-%s-%s", t.plural, key), counter, prepare)
|
|
|
|
|
|
|
|
if s.Info.preserveTaxonomyNames {
|
|
|
|
key = helpers.MakePathSanitized(key)
|
|
|
|
}
|
|
|
|
base := t.plural + "/" + key
|
|
|
|
|
|
|
|
if !prepare {
|
|
|
|
return n, base
|
|
|
|
}
|
|
|
|
|
2015-05-31 14:30:53 -04:00
|
|
|
if s.Info.preserveTaxonomyNames {
|
2015-09-01 08:53:25 -04:00
|
|
|
key = helpers.MakePathSanitized(key)
|
2016-03-03 15:54:23 -05:00
|
|
|
// keep as is in the title
|
|
|
|
n.Title = t.key
|
2015-05-31 14:30:53 -04:00
|
|
|
} else {
|
|
|
|
n.Title = strings.Replace(strings.Title(t.key), "-", " ", -1)
|
|
|
|
}
|
2015-03-18 01:16:54 -04:00
|
|
|
s.setURLs(n, base)
|
2014-10-18 14:25:10 -04:00
|
|
|
if len(t.pages) > 0 {
|
|
|
|
n.Date = t.pages[0].Page.Date
|
2015-05-14 16:06:36 -04:00
|
|
|
n.Lastmod = t.pages[0].Page.Lastmod
|
2014-10-18 14:25:10 -04:00
|
|
|
}
|
|
|
|
n.Data[t.singular] = t.pages
|
2014-11-01 23:44:21 -04:00
|
|
|
n.Data["Singular"] = t.singular
|
|
|
|
n.Data["Plural"] = t.plural
|
2014-10-18 14:25:10 -04:00
|
|
|
n.Data["Pages"] = t.pages.Pages()
|
|
|
|
return n, base
|
|
|
|
}
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
func taxonomyRenderer(prepare bool, s *Site, taxes <-chan taxRenderInfo, results chan<- error, wg *sync.WaitGroup) {
|
2014-09-14 07:23:03 -04:00
|
|
|
defer wg.Done()
|
2014-12-27 08:11:19 -05:00
|
|
|
|
|
|
|
var n *Node
|
|
|
|
|
2014-09-14 07:23:03 -04:00
|
|
|
for t := range taxes {
|
2014-12-27 08:11:19 -05:00
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
var (
|
|
|
|
base string
|
|
|
|
baseWithLanguagePrefix string
|
|
|
|
paginatePath string
|
|
|
|
layouts []string
|
|
|
|
)
|
2014-12-27 08:11:19 -05:00
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
n, base = s.newTaxonomyNode(prepare, t, 0)
|
|
|
|
|
|
|
|
if prepare {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
baseWithLanguagePrefix = n.addLangPathPrefix(base)
|
|
|
|
|
|
|
|
layouts = s.appendThemeTemplates(
|
|
|
|
[]string{"taxonomy/" + t.singular + ".html", "indexes/" + t.singular + ".html", "_default/taxonomy.html", "_default/list.html"})
|
2016-05-14 00:35:16 -04:00
|
|
|
|
2015-06-01 20:29:29 -04:00
|
|
|
dest := base
|
|
|
|
if viper.GetBool("UglyURLs") {
|
2016-08-08 07:55:18 -04:00
|
|
|
dest = helpers.Uglify(baseWithLanguagePrefix + ".html")
|
2015-06-01 20:29:29 -04:00
|
|
|
} else {
|
2016-08-08 07:55:18 -04:00
|
|
|
dest = helpers.PrettifyPath(baseWithLanguagePrefix + "/index.html")
|
2015-06-01 20:29:29 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
if err := s.renderAndWritePage("taxonomy "+t.singular, dest, n, layouts...); err != nil {
|
2014-12-27 08:11:19 -05:00
|
|
|
results <- err
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if n.paginator != nil {
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
paginatePath = viper.GetString("paginatePath")
|
2014-12-27 08:11:19 -05:00
|
|
|
|
|
|
|
// write alias for page 1
|
2016-08-08 07:55:18 -04:00
|
|
|
s.writeDestAlias(helpers.PaginateAliasPath(baseWithLanguagePrefix, 1), n.Permalink())
|
2014-12-27 08:11:19 -05:00
|
|
|
|
|
|
|
pagers := n.paginator.Pagers()
|
|
|
|
|
|
|
|
for i, pager := range pagers {
|
|
|
|
if i == 0 {
|
|
|
|
// already created
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
taxonomyPagerNode, _ := s.newTaxonomyNode(true, t, i)
|
|
|
|
|
2014-12-27 08:11:19 -05:00
|
|
|
taxonomyPagerNode.paginator = pager
|
|
|
|
if pager.TotalPages() > 0 {
|
2015-07-16 07:17:54 -04:00
|
|
|
first, _ := pager.page(0)
|
|
|
|
taxonomyPagerNode.Date = first.Date
|
|
|
|
taxonomyPagerNode.Lastmod = first.Lastmod
|
2014-12-27 08:11:19 -05:00
|
|
|
}
|
2016-08-09 14:06:15 -04:00
|
|
|
|
2014-12-27 08:11:19 -05:00
|
|
|
pageNumber := i + 1
|
2016-08-08 07:55:18 -04:00
|
|
|
htmlBase := fmt.Sprintf("/%s/%s/%d", baseWithLanguagePrefix, paginatePath, pageNumber)
|
2015-07-16 07:47:35 -04:00
|
|
|
if err := s.renderAndWritePage(fmt.Sprintf("taxonomy %s", t.singular), htmlBase, taxonomyPagerNode, layouts...); err != nil {
|
2014-12-27 08:11:19 -05:00
|
|
|
results <- err
|
|
|
|
continue
|
|
|
|
}
|
2014-11-04 00:39:37 -05:00
|
|
|
}
|
2014-09-14 07:23:03 -04:00
|
|
|
}
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
if prepare {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2014-09-14 07:23:03 -04:00
|
|
|
if !viper.GetBool("DisableRSS") {
|
|
|
|
// XML Feed
|
2016-08-09 14:06:15 -04:00
|
|
|
c := *n
|
|
|
|
rssNode := &c
|
|
|
|
rssNode.nodeID = ""
|
2015-12-01 23:00:59 -05:00
|
|
|
rssuri := viper.GetString("RSSUri")
|
2016-08-09 14:06:15 -04:00
|
|
|
s.setURLs(rssNode, base+"/"+rssuri)
|
|
|
|
|
2014-09-14 07:23:03 -04:00
|
|
|
rssLayouts := []string{"taxonomy/" + t.singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
|
2015-01-30 14:51:06 -05:00
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
if err := s.renderAndWriteXML("taxonomy "+t.singular+" rss", baseWithLanguagePrefix+"/"+rssuri, rssNode, s.appendThemeTemplates(rssLayouts)...); err != nil {
|
2014-09-14 07:23:03 -04:00
|
|
|
results <- err
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
// renderListsOfTaxonomyTerms renders a page per taxonomy that lists the terms for that taxonomy
|
2016-08-09 14:06:15 -04:00
|
|
|
func (s *Site) renderListsOfTaxonomyTerms(prepare bool) (err error) {
|
2016-08-05 07:10:58 -04:00
|
|
|
taxonomies := s.Language.GetStringMapString("Taxonomies")
|
2014-04-10 08:10:12 -04:00
|
|
|
for singular, plural := range taxonomies {
|
2016-08-09 14:06:15 -04:00
|
|
|
n := s.nodeLookup(fmt.Sprintf("taxlist-%s", plural), 0, prepare)
|
|
|
|
|
|
|
|
if prepare {
|
|
|
|
n.Title = strings.Title(plural)
|
|
|
|
s.setURLs(n, plural)
|
|
|
|
n.Data["Singular"] = singular
|
|
|
|
n.Data["Plural"] = plural
|
|
|
|
n.Data["Terms"] = s.Taxonomies[plural]
|
|
|
|
// keep the following just for legacy reasons
|
|
|
|
n.Data["OrderedIndex"] = n.Data["Terms"]
|
|
|
|
n.Data["Index"] = n.Data["Terms"]
|
|
|
|
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2014-04-10 08:10:12 -04:00
|
|
|
layouts := []string{"taxonomy/" + singular + ".terms.html", "_default/terms.html", "indexes/indexes.html"}
|
|
|
|
layouts = s.appendThemeTemplates(layouts)
|
|
|
|
if s.layoutExists(layouts...) {
|
2016-08-08 07:55:18 -04:00
|
|
|
if err := s.renderAndWritePage("taxonomy terms for "+singular, n.addLangPathPrefix(plural+"/index.html"), n, layouts...); err != nil {
|
2014-11-04 00:39:37 -05:00
|
|
|
return err
|
|
|
|
}
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
}
|
2014-04-10 08:10:12 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return
|
2013-08-03 03:09:28 -04:00
|
|
|
}
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
func (s *Site) newSectionListNode(prepare bool, sectionName, section string, data WeightedPages, counter int) *Node {
|
|
|
|
n := s.nodeLookup(fmt.Sprintf("sect-%s", sectionName), counter, prepare)
|
|
|
|
|
|
|
|
if !prepare {
|
|
|
|
return n
|
|
|
|
}
|
|
|
|
|
2015-05-28 17:05:13 -04:00
|
|
|
sectionName = helpers.FirstUpper(sectionName)
|
2014-12-27 08:11:19 -05:00
|
|
|
if viper.GetBool("PluralizeListTitles") {
|
2015-05-28 17:05:13 -04:00
|
|
|
n.Title = inflect.Pluralize(sectionName)
|
2014-12-27 08:11:19 -05:00
|
|
|
} else {
|
2015-05-28 17:05:13 -04:00
|
|
|
n.Title = sectionName
|
2014-12-27 08:11:19 -05:00
|
|
|
}
|
2015-03-18 01:16:54 -04:00
|
|
|
s.setURLs(n, section)
|
2014-12-27 08:11:19 -05:00
|
|
|
n.Date = data[0].Page.Date
|
2015-05-14 16:06:36 -04:00
|
|
|
n.Lastmod = data[0].Page.Lastmod
|
2014-12-27 08:11:19 -05:00
|
|
|
n.Data["Pages"] = data.Pages()
|
|
|
|
|
|
|
|
return n
|
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
// renderSectionLists renders a page for each section
|
2016-08-09 14:06:15 -04:00
|
|
|
func (s *Site) renderSectionLists(prepare bool) error {
|
2014-01-29 17:50:31 -05:00
|
|
|
for section, data := range s.Sections {
|
2015-05-31 14:30:53 -04:00
|
|
|
// section keys can be lower case (depending on site.pathifyTaxonomyKeys)
|
2015-05-28 17:05:13 -04:00
|
|
|
// extract the original casing from the first page to get sensible titles.
|
|
|
|
sectionName := section
|
2015-05-31 14:30:53 -04:00
|
|
|
if !s.Info.preserveTaxonomyNames && len(data) > 0 {
|
2015-05-28 17:05:13 -04:00
|
|
|
sectionName = data[0].Page.Section()
|
|
|
|
}
|
2016-08-09 14:06:15 -04:00
|
|
|
|
|
|
|
n := s.newSectionListNode(prepare, sectionName, section, data, 0)
|
|
|
|
|
|
|
|
if prepare {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2014-12-27 08:11:19 -05:00
|
|
|
layouts := s.appendThemeTemplates(
|
|
|
|
[]string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"})
|
|
|
|
|
2015-05-31 14:30:53 -04:00
|
|
|
if s.Info.preserveTaxonomyNames {
|
2015-09-01 08:53:25 -04:00
|
|
|
section = helpers.MakePathSanitized(section)
|
2015-05-31 14:30:53 -04:00
|
|
|
}
|
|
|
|
|
2016-08-08 07:55:18 -04:00
|
|
|
base := n.addLangPathPrefix(section)
|
|
|
|
|
2016-05-14 00:35:16 -04:00
|
|
|
if err := s.renderAndWritePage(fmt.Sprintf("section %s", section), base, n, s.appendThemeTemplates(layouts)...); err != nil {
|
2014-11-04 00:39:37 -05:00
|
|
|
return err
|
|
|
|
}
|
2014-01-29 17:50:31 -05:00
|
|
|
|
2014-12-27 08:11:19 -05:00
|
|
|
if n.paginator != nil {
|
|
|
|
|
|
|
|
paginatePath := viper.GetString("paginatePath")
|
|
|
|
|
|
|
|
// write alias for page 1
|
2016-07-26 13:04:10 -04:00
|
|
|
s.writeDestAlias(helpers.PaginateAliasPath(base, 1), permalink(base))
|
2014-12-27 08:11:19 -05:00
|
|
|
|
|
|
|
pagers := n.paginator.Pagers()
|
|
|
|
|
|
|
|
for i, pager := range pagers {
|
|
|
|
if i == 0 {
|
|
|
|
// already created
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
sectionPagerNode := s.newSectionListNode(true, sectionName, section, data, i)
|
2014-12-27 08:11:19 -05:00
|
|
|
sectionPagerNode.paginator = pager
|
|
|
|
if pager.TotalPages() > 0 {
|
2015-07-16 07:17:54 -04:00
|
|
|
first, _ := pager.page(0)
|
|
|
|
sectionPagerNode.Date = first.Date
|
|
|
|
sectionPagerNode.Lastmod = first.Lastmod
|
2014-12-27 08:11:19 -05:00
|
|
|
}
|
|
|
|
pageNumber := i + 1
|
2016-05-14 00:35:16 -04:00
|
|
|
htmlBase := fmt.Sprintf("/%s/%s/%d", base, paginatePath, pageNumber)
|
2015-03-31 16:33:24 -04:00
|
|
|
if err := s.renderAndWritePage(fmt.Sprintf("section %s", section), filepath.FromSlash(htmlBase), sectionPagerNode, layouts...); err != nil {
|
2014-12-27 08:11:19 -05:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
if prepare {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2014-11-16 09:41:07 -05:00
|
|
|
if !viper.GetBool("DisableRSS") && section != "" {
|
2014-01-29 17:50:31 -05:00
|
|
|
// XML Feed
|
2015-12-01 23:00:59 -05:00
|
|
|
rssuri := viper.GetString("RSSUri")
|
2016-08-08 07:55:18 -04:00
|
|
|
s.setURLs(n, section+"/"+rssuri)
|
2014-04-10 08:10:12 -04:00
|
|
|
rssLayouts := []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
|
2016-08-08 07:55:18 -04:00
|
|
|
if err := s.renderAndWriteXML("section "+section+" rss", n.addLangPathPrefix(section+"/"+rssuri), n, s.appendThemeTemplates(rssLayouts)...); err != nil {
|
2014-11-04 00:39:37 -05:00
|
|
|
return err
|
|
|
|
}
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
func (s *Site) renderHomePage(prepare bool) error {
|
2016-08-08 07:55:18 -04:00
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
n := s.newHomeNode(prepare, 0)
|
|
|
|
if prepare {
|
|
|
|
return nil
|
|
|
|
}
|
2014-12-27 08:11:19 -05:00
|
|
|
|
2016-08-08 07:55:18 -04:00
|
|
|
layouts := s.appendThemeTemplates([]string{"index.html", "_default/list.html"})
|
|
|
|
base := n.addLangFilepathPrefix("")
|
|
|
|
if err := s.renderAndWritePage("homepage", base, n, layouts...); err != nil {
|
2014-11-04 00:39:37 -05:00
|
|
|
return err
|
|
|
|
}
|
2014-01-29 17:50:31 -05:00
|
|
|
|
2014-12-27 08:11:19 -05:00
|
|
|
if n.paginator != nil {
|
|
|
|
paginatePath := viper.GetString("paginatePath")
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
{
|
|
|
|
// write alias for page 1
|
|
|
|
// TODO(bep) ml all of these n.addLang ... fix.
|
|
|
|
s.writeDestAlias(n.addLangPathPrefix(helpers.PaginateAliasPath("", 1)), n.Permalink())
|
|
|
|
}
|
2014-12-27 08:11:19 -05:00
|
|
|
|
|
|
|
pagers := n.paginator.Pagers()
|
|
|
|
|
|
|
|
for i, pager := range pagers {
|
|
|
|
if i == 0 {
|
|
|
|
// already created
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
homePagerNode := s.newHomeNode(true, i)
|
|
|
|
|
2014-12-27 08:11:19 -05:00
|
|
|
homePagerNode.paginator = pager
|
|
|
|
if pager.TotalPages() > 0 {
|
2015-07-16 07:17:54 -04:00
|
|
|
first, _ := pager.page(0)
|
|
|
|
homePagerNode.Date = first.Date
|
|
|
|
homePagerNode.Lastmod = first.Lastmod
|
2014-12-27 08:11:19 -05:00
|
|
|
}
|
2016-08-09 14:06:15 -04:00
|
|
|
|
2014-12-27 08:11:19 -05:00
|
|
|
pageNumber := i + 1
|
|
|
|
htmlBase := fmt.Sprintf("/%s/%d", paginatePath, pageNumber)
|
2016-08-08 07:55:18 -04:00
|
|
|
htmlBase = n.addLangPathPrefix(htmlBase)
|
2016-08-09 14:06:15 -04:00
|
|
|
if err := s.renderAndWritePage(fmt.Sprintf("homepage"),
|
|
|
|
filepath.FromSlash(htmlBase), homePagerNode, layouts...); err != nil {
|
2014-12-27 08:11:19 -05:00
|
|
|
return err
|
|
|
|
}
|
2016-08-09 14:06:15 -04:00
|
|
|
|
2014-12-27 08:11:19 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-04-09 17:45:34 -04:00
|
|
|
if !viper.GetBool("DisableRSS") {
|
2014-01-29 17:50:31 -05:00
|
|
|
// XML Feed
|
2016-08-09 14:06:15 -04:00
|
|
|
rssNode := s.newNode("rss-home")
|
|
|
|
s.setURLs(rssNode, viper.GetString("RSSUri"))
|
|
|
|
rssNode.Title = ""
|
2014-01-29 17:50:31 -05:00
|
|
|
high := 50
|
|
|
|
if len(s.Pages) < high {
|
|
|
|
high = len(s.Pages)
|
|
|
|
}
|
2016-08-09 14:06:15 -04:00
|
|
|
rssNode.Data["Pages"] = s.Pages[:high]
|
2014-01-29 17:50:31 -05:00
|
|
|
if len(s.Pages) > 0 {
|
2016-08-09 14:06:15 -04:00
|
|
|
rssNode.Date = s.Pages[0].Date
|
|
|
|
rssNode.Lastmod = s.Pages[0].Lastmod
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2014-04-10 08:10:12 -04:00
|
|
|
|
2014-11-16 09:41:07 -05:00
|
|
|
rssLayouts := []string{"rss.xml", "_default/rss.xml", "_internal/_default/rss.xml"}
|
2015-01-30 14:51:06 -05:00
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
if err := s.renderAndWriteXML("homepage rss", rssNode.addLangPathPrefix(viper.GetString("RSSUri")), rssNode, s.appendThemeTemplates(rssLayouts)...); err != nil {
|
2014-11-16 09:41:07 -05:00
|
|
|
return err
|
2014-08-20 22:57:51 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-04-02 18:22:31 -04:00
|
|
|
if viper.GetBool("Disable404") {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
node404 := s.newNode("404")
|
|
|
|
node404.Title = "404 Page not found"
|
|
|
|
s.setURLs(node404, "404.html")
|
2014-06-26 15:41:29 -04:00
|
|
|
|
|
|
|
nfLayouts := []string{"404.html"}
|
2016-08-09 14:06:15 -04:00
|
|
|
if nfErr := s.renderAndWritePage("404 page", "404.html", node404, s.appendThemeTemplates(nfLayouts)...); nfErr != nil {
|
2014-06-26 15:41:29 -04:00
|
|
|
return nfErr
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
func (s *Site) newHomeNode(prepare bool, counter int) *Node {
|
|
|
|
n := s.nodeLookup("home", counter, prepare)
|
2016-05-14 00:35:16 -04:00
|
|
|
n.Title = n.Site.Title
|
|
|
|
n.IsHome = true
|
|
|
|
s.setURLs(n, "/")
|
|
|
|
n.Data["Pages"] = s.Pages
|
|
|
|
if len(s.Pages) != 0 {
|
|
|
|
n.Date = s.Pages[0].Date
|
|
|
|
n.Lastmod = s.Pages[0].Lastmod
|
|
|
|
}
|
|
|
|
return n
|
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) renderSitemap() error {
|
2014-05-06 06:50:23 -04:00
|
|
|
if viper.GetBool("DisableSitemap") {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2014-05-06 11:02:56 -04:00
|
|
|
sitemapDefault := parseSitemap(viper.GetStringMap("Sitemap"))
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
n := s.newNode("sitemap")
|
2014-05-06 11:02:56 -04:00
|
|
|
|
|
|
|
// Prepend homepage to the list of pages
|
|
|
|
pages := make(Pages, 0)
|
|
|
|
|
|
|
|
page := &Page{}
|
2016-08-08 07:55:18 -04:00
|
|
|
page.language = s.Language
|
2014-05-10 04:45:33 -04:00
|
|
|
page.Date = s.Info.LastChange
|
2015-05-14 16:06:36 -04:00
|
|
|
page.Lastmod = s.Info.LastChange
|
2014-05-28 19:11:54 -04:00
|
|
|
page.Site = &s.Info
|
2016-08-08 07:55:18 -04:00
|
|
|
page.URLPath.URL = ""
|
2016-03-08 15:34:08 -05:00
|
|
|
page.Sitemap.ChangeFreq = sitemapDefault.ChangeFreq
|
|
|
|
page.Sitemap.Priority = sitemapDefault.Priority
|
2014-05-06 11:02:56 -04:00
|
|
|
|
|
|
|
pages = append(pages, page)
|
|
|
|
pages = append(pages, s.Pages...)
|
|
|
|
|
|
|
|
n.Data["Pages"] = pages
|
|
|
|
|
|
|
|
for _, page := range pages {
|
|
|
|
if page.Sitemap.ChangeFreq == "" {
|
|
|
|
page.Sitemap.ChangeFreq = sitemapDefault.ChangeFreq
|
|
|
|
}
|
|
|
|
|
|
|
|
if page.Sitemap.Priority == -1 {
|
|
|
|
page.Sitemap.Priority = sitemapDefault.Priority
|
|
|
|
}
|
2015-12-10 14:39:06 -05:00
|
|
|
|
|
|
|
if page.Sitemap.Filename == "" {
|
|
|
|
page.Sitemap.Filename = sitemapDefault.Filename
|
|
|
|
}
|
2014-05-06 11:02:56 -04:00
|
|
|
}
|
2014-05-06 06:50:23 -04:00
|
|
|
|
|
|
|
smLayouts := []string{"sitemap.xml", "_default/sitemap.xml", "_internal/_default/sitemap.xml"}
|
2016-08-08 07:55:18 -04:00
|
|
|
addLanguagePrefix := n.Site.IsMultiLingual()
|
|
|
|
if err := s.renderAndWriteXML("sitemap", n.addLangPathPrefixIfFlagSet(page.Sitemap.Filename, addLanguagePrefix), n, s.appendThemeTemplates(smLayouts)...); err != nil {
|
2014-11-04 00:39:37 -05:00
|
|
|
return err
|
|
|
|
}
|
2014-05-06 06:50:23 -04:00
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) renderRobotsTXT() error {
|
2016-04-05 16:20:39 -04:00
|
|
|
if !viper.GetBool("EnableRobotsTXT") {
|
2015-12-08 16:13:09 -05:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
n := s.newNode("robots")
|
2015-12-08 16:13:09 -05:00
|
|
|
n.Data["Pages"] = s.Pages
|
|
|
|
|
|
|
|
rLayouts := []string{"robots.txt", "_default/robots.txt", "_internal/_default/robots.txt"}
|
|
|
|
outBuffer := bp.GetBuffer()
|
|
|
|
defer bp.PutBuffer(outBuffer)
|
2016-08-05 10:11:03 -04:00
|
|
|
err := s.renderForLayouts("robots", n, outBuffer, s.appendThemeTemplates(rLayouts)...)
|
2015-12-08 16:13:09 -05:00
|
|
|
|
|
|
|
if err == nil {
|
2016-04-07 14:03:03 -04:00
|
|
|
err = s.writeDestFile("robots.txt", outBuffer)
|
2015-12-08 16:13:09 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2016-03-23 04:43:04 -04:00
|
|
|
// Stats prints Hugo builds stats to the console.
|
|
|
|
// This is what you see after a successful hugo build.
|
2016-07-28 03:30:58 -04:00
|
|
|
func (s *Site) Stats() {
|
|
|
|
jww.FEEDBACK.Printf("Built site for language %s:\n", s.Language.Lang)
|
2014-08-20 11:09:35 -04:00
|
|
|
jww.FEEDBACK.Println(s.draftStats())
|
|
|
|
jww.FEEDBACK.Println(s.futureStats())
|
2016-05-19 15:17:36 -04:00
|
|
|
jww.FEEDBACK.Println(s.expiredStats())
|
2015-03-12 10:46:42 -04:00
|
|
|
jww.FEEDBACK.Printf("%d pages created\n", len(s.Pages))
|
2015-09-08 20:03:38 -04:00
|
|
|
jww.FEEDBACK.Printf("%d non-page files copied\n", len(s.Files))
|
2015-03-12 10:46:42 -04:00
|
|
|
jww.FEEDBACK.Printf("%d paginator pages created\n", s.Info.paginationPageCount)
|
2016-08-05 07:10:58 -04:00
|
|
|
taxonomies := s.Language.GetStringMapString("Taxonomies")
|
2014-04-05 01:26:43 -04:00
|
|
|
|
2014-04-08 23:15:57 -04:00
|
|
|
for _, pl := range taxonomies {
|
|
|
|
jww.FEEDBACK.Printf("%d %s created\n", len(s.Taxonomies[pl]), pl)
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2016-05-14 00:35:16 -04:00
|
|
|
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2015-03-18 01:16:54 -04:00
|
|
|
func (s *Site) setURLs(n *Node, in string) {
|
2016-07-26 13:04:10 -04:00
|
|
|
n.URLPath.URL = helpers.URLizeAndPrep(in)
|
|
|
|
n.URLPath.Permalink = permalink(n.URLPath.URL)
|
|
|
|
n.RSSLink = template.HTML(permalink(in + ".xml"))
|
2014-02-27 18:32:09 -05:00
|
|
|
}
|
2013-09-25 00:24:49 -04:00
|
|
|
|
2016-07-26 13:04:10 -04:00
|
|
|
func permalink(plink string) string {
|
|
|
|
return permalinkStr(plink)
|
2014-10-18 14:25:10 -04:00
|
|
|
}
|
|
|
|
|
2016-07-26 13:04:10 -04:00
|
|
|
func permalinkStr(plink string) string {
|
2016-03-14 15:31:31 -04:00
|
|
|
return helpers.MakePermalink(viper.GetString("BaseURL"), helpers.URLizeAndPrep(plink)).String()
|
2013-08-31 20:47:21 -04:00
|
|
|
}
|
2016-08-09 14:06:15 -04:00
|
|
|
func (s *Site) newNode(nodeID string) *Node {
|
|
|
|
return s.nodeLookup(nodeID, 0, true)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Site) getNode(nodeID string) *Node {
|
|
|
|
return s.getOrAddNode(nodeID, false)
|
|
|
|
}
|
2013-08-31 20:47:21 -04:00
|
|
|
|
2016-08-09 14:06:15 -04:00
|
|
|
func (s *Site) getOrAddNode(nodeID string, add bool) *Node {
|
|
|
|
s.nodeCacheInit.Do(func() {
|
|
|
|
s.nodeCache = &nodeCache{m: make(map[string]*Node)}
|
|
|
|
})
|
|
|
|
|
|
|
|
s.nodeCache.RLock()
|
|
|
|
if n, ok := s.nodeCache.m[nodeID]; ok {
|
|
|
|
s.nodeCache.RUnlock()
|
|
|
|
if !add {
|
|
|
|
return n
|
|
|
|
}
|
|
|
|
panic(fmt.Sprintf("Node with ID %q in use", nodeID))
|
|
|
|
}
|
|
|
|
|
|
|
|
s.nodeCache.RUnlock()
|
|
|
|
s.nodeCache.Lock()
|
|
|
|
|
|
|
|
if !add {
|
|
|
|
// this is a test type error, print the keys
|
|
|
|
for k, _ := range s.nodeCache.m {
|
|
|
|
fmt.Println("Node:", k)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Double check
|
|
|
|
if _, ok := s.nodeCache.m[nodeID]; ok {
|
|
|
|
s.nodeCache.Unlock()
|
|
|
|
panic(fmt.Sprintf("Node with ID %q in use", nodeID))
|
|
|
|
}
|
|
|
|
|
|
|
|
n := &Node{
|
|
|
|
nodeID: nodeID,
|
2016-07-26 13:04:10 -04:00
|
|
|
Data: make(map[string]interface{}),
|
|
|
|
Site: &s.Info,
|
2016-07-28 03:30:58 -04:00
|
|
|
language: s.Language,
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2016-08-09 14:06:15 -04:00
|
|
|
|
|
|
|
s.nodeCache.m[nodeID] = n
|
|
|
|
s.nodeCache.Unlock()
|
|
|
|
return n
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Site) nodeLookup(nodeIDPrefix string, counter int, add bool) *Node {
|
|
|
|
|
|
|
|
nodeID := fmt.Sprintf("%s-%d", nodeIDPrefix, counter)
|
|
|
|
|
|
|
|
n := s.getOrAddNode(nodeID, add)
|
|
|
|
|
|
|
|
// Paginator nodes (counter > 0) gets created during rendering and cannot take part in any
|
|
|
|
// global translations mapping
|
|
|
|
if add && s.owner != nil && counter == 0 {
|
|
|
|
s.owner.addNode(nodeID, n)
|
|
|
|
}
|
|
|
|
|
|
|
|
return n
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2014-04-08 23:15:57 -04:00
|
|
|
func (s *Site) layoutExists(layouts ...string) bool {
|
|
|
|
_, found := s.findFirstLayout(layouts...)
|
|
|
|
|
|
|
|
return found
|
|
|
|
}
|
|
|
|
|
2015-01-30 14:51:06 -05:00
|
|
|
func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layouts ...string) error {
|
|
|
|
renderBuffer := bp.GetBuffer()
|
|
|
|
defer bp.PutBuffer(renderBuffer)
|
|
|
|
renderBuffer.WriteString("<?xml version=\"1.0\" encoding=\"utf-8\" standalone=\"yes\" ?>\n")
|
2014-12-18 14:59:39 -05:00
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
err := s.renderForLayouts(name, d, renderBuffer, layouts...)
|
2014-12-18 14:59:39 -05:00
|
|
|
|
2016-07-25 16:22:09 -04:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2015-01-30 14:51:06 -05:00
|
|
|
outBuffer := bp.GetBuffer()
|
|
|
|
defer bp.PutBuffer(outBuffer)
|
|
|
|
|
2015-05-15 18:11:39 -04:00
|
|
|
var path []byte
|
|
|
|
if viper.GetBool("RelativeURLs") {
|
|
|
|
path = []byte(helpers.GetDottedRelativePath(dest))
|
|
|
|
} else {
|
|
|
|
s := viper.GetString("BaseURL")
|
|
|
|
if !strings.HasSuffix(s, "/") {
|
|
|
|
s += "/"
|
|
|
|
}
|
|
|
|
path = []byte(s)
|
|
|
|
}
|
|
|
|
transformer := transform.NewChain(transform.AbsURLInXML)
|
|
|
|
transformer.Apply(outBuffer, renderBuffer, path)
|
2015-01-30 14:51:06 -05:00
|
|
|
|
2016-07-25 16:22:09 -04:00
|
|
|
return s.writeDestFile(dest, outBuffer)
|
2015-01-30 14:51:06 -05:00
|
|
|
|
2014-11-04 00:39:37 -05:00
|
|
|
}
|
2013-10-01 22:58:15 -04:00
|
|
|
|
2015-01-30 15:05:05 -05:00
|
|
|
func (s *Site) renderAndWritePage(name string, dest string, d interface{}, layouts ...string) error {
|
|
|
|
renderBuffer := bp.GetBuffer()
|
|
|
|
defer bp.PutBuffer(renderBuffer)
|
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
err := s.renderForLayouts(name, d, renderBuffer, layouts...)
|
2014-11-04 00:39:37 -05:00
|
|
|
|
2016-07-25 16:22:09 -04:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2015-01-30 15:05:05 -05:00
|
|
|
outBuffer := bp.GetBuffer()
|
|
|
|
defer bp.PutBuffer(outBuffer)
|
2014-01-29 17:50:31 -05:00
|
|
|
|
2016-03-05 14:18:17 -05:00
|
|
|
var pageTarget target.Output
|
|
|
|
|
2016-07-26 13:04:10 -04:00
|
|
|
if p, ok := d.(*Page); ok && path.Ext(p.URLPath.URL) != "" {
|
2016-03-05 14:18:17 -05:00
|
|
|
// user has explicitly set a URL with extension for this page
|
|
|
|
// make sure it sticks even if "ugly URLs" are turned off.
|
2016-03-05 14:56:38 -05:00
|
|
|
pageTarget = s.pageUglyTarget()
|
2016-03-05 14:18:17 -05:00
|
|
|
} else {
|
2016-03-05 14:56:38 -05:00
|
|
|
pageTarget = s.pageTarget()
|
2016-03-05 14:18:17 -05:00
|
|
|
}
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
transformLinks := transform.NewEmptyTransforms()
|
|
|
|
|
2015-05-15 18:11:39 -04:00
|
|
|
if viper.GetBool("RelativeURLs") || viper.GetBool("CanonifyURLs") {
|
|
|
|
transformLinks = append(transformLinks, transform.AbsURL)
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
if s.running() && viper.GetBool("watch") && !viper.GetBool("DisableLiveReload") {
|
2014-05-16 17:49:27 -04:00
|
|
|
transformLinks = append(transformLinks, transform.LiveReloadInject)
|
|
|
|
}
|
|
|
|
|
2016-06-04 07:45:56 -04:00
|
|
|
// For performance reasons we only inject the Hugo generator tag on the home page.
|
|
|
|
if n, ok := d.(*Node); ok && n.IsHome {
|
|
|
|
if !viper.GetBool("DisableHugoGeneratorInject") {
|
|
|
|
transformLinks = append(transformLinks, transform.HugoGeneratorInject)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-05-15 18:11:39 -04:00
|
|
|
var path []byte
|
|
|
|
|
|
|
|
if viper.GetBool("RelativeURLs") {
|
2016-03-05 14:18:17 -05:00
|
|
|
translated, err := pageTarget.(target.OptionalTranslator).TranslateRelative(dest)
|
2015-05-15 18:11:39 -04:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
path = []byte(helpers.GetDottedRelativePath(translated))
|
|
|
|
} else if viper.GetBool("CanonifyURLs") {
|
|
|
|
s := viper.GetString("BaseURL")
|
|
|
|
if !strings.HasSuffix(s, "/") {
|
|
|
|
s += "/"
|
|
|
|
}
|
|
|
|
path = []byte(s)
|
|
|
|
}
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
transformer := transform.NewChain(transformLinks...)
|
2015-05-15 18:11:39 -04:00
|
|
|
transformer.Apply(outBuffer, renderBuffer, path)
|
2015-01-30 15:05:05 -05:00
|
|
|
|
2015-12-01 23:03:17 -05:00
|
|
|
if outBuffer.Len() == 0 {
|
2016-07-28 03:30:58 -04:00
|
|
|
|
2015-12-01 23:03:17 -05:00
|
|
|
jww.WARN.Printf("%q is rendered empty\n", dest)
|
|
|
|
if dest == "/" {
|
2016-07-28 03:30:58 -04:00
|
|
|
debugAddend := ""
|
2015-12-01 23:03:17 -05:00
|
|
|
if !viper.GetBool("Verbose") {
|
2016-07-28 03:30:58 -04:00
|
|
|
debugAddend = "* For more debugging information, run \"hugo -v\""
|
2015-12-01 23:03:17 -05:00
|
|
|
}
|
2016-07-28 03:30:58 -04:00
|
|
|
distinctFeedbackLogger.Printf(`=============================================================
|
|
|
|
Your rendered home page is blank: /index.html is zero-length
|
|
|
|
* Did you specify a theme on the command-line or in your
|
|
|
|
%q file? (Current theme: %q)
|
|
|
|
%s
|
|
|
|
=============================================================`,
|
|
|
|
filepath.Base(viper.ConfigFileUsed()),
|
|
|
|
viper.GetString("Theme"),
|
|
|
|
debugAddend)
|
2015-12-01 23:03:17 -05:00
|
|
|
}
|
2016-07-28 03:30:58 -04:00
|
|
|
|
2015-12-01 23:03:17 -05:00
|
|
|
}
|
|
|
|
|
2015-01-30 15:05:05 -05:00
|
|
|
if err == nil {
|
2016-07-25 16:22:09 -04:00
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
if err = s.writeDestPage(dest, pageTarget, outBuffer); err != nil {
|
2015-01-30 15:05:05 -05:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return err
|
2014-11-04 00:39:37 -05:00
|
|
|
}
|
2014-01-29 17:50:31 -05:00
|
|
|
|
2016-08-05 10:11:03 -04:00
|
|
|
func (s *Site) renderForLayouts(name string, d interface{}, w io.Writer, layouts ...string) error {
|
2014-11-04 00:39:37 -05:00
|
|
|
layout, found := s.findFirstLayout(layouts...)
|
|
|
|
if found == false {
|
2016-07-14 05:29:21 -04:00
|
|
|
jww.WARN.Printf("Unable to locate layout for %s: %s\n", name, layouts)
|
2014-11-04 00:39:37 -05:00
|
|
|
return nil
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
|
2016-01-03 06:53:45 -05:00
|
|
|
if err := s.renderThing(d, layout, w); err != nil {
|
2016-07-25 16:22:09 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
// Behavior here should be dependent on if running in server or watch mode.
|
2015-03-31 16:33:24 -04:00
|
|
|
distinctErrorLogger.Printf("Error while rendering %s: %v", name, err)
|
2016-04-07 14:03:03 -04:00
|
|
|
if !s.running() && !testMode {
|
2016-03-09 08:05:31 -05:00
|
|
|
// TODO(bep) check if this can be propagated
|
2014-01-29 17:50:31 -05:00
|
|
|
os.Exit(-1)
|
2016-03-09 08:05:31 -05:00
|
|
|
} else if testMode {
|
|
|
|
return err
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-11-04 00:39:37 -05:00
|
|
|
return nil
|
2013-10-01 22:58:15 -04:00
|
|
|
}
|
|
|
|
|
2014-04-08 23:15:57 -04:00
|
|
|
func (s *Site) findFirstLayout(layouts ...string) (string, bool) {
|
|
|
|
for _, layout := range layouts {
|
2016-08-08 04:12:39 -04:00
|
|
|
if s.owner.tmpl.Lookup(layout) != nil {
|
2014-04-08 23:15:57 -04:00
|
|
|
return layout, true
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
}
|
2014-04-08 23:15:57 -04:00
|
|
|
return "", false
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2013-11-05 00:28:08 -05:00
|
|
|
func (s *Site) renderThing(d interface{}, layout string, w io.Writer) error {
|
2016-07-25 16:22:09 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
// If the template doesn't exist, then return, but leave the Writer open
|
2016-08-08 04:12:39 -04:00
|
|
|
if templ := s.owner.tmpl.Lookup(layout); templ != nil {
|
2016-03-09 08:05:31 -05:00
|
|
|
return templ.Execute(w, d)
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2016-03-09 08:05:31 -05:00
|
|
|
return fmt.Errorf("Layout not found: %s", layout)
|
|
|
|
|
2013-08-27 05:09:50 -04:00
|
|
|
}
|
|
|
|
|
2016-03-05 14:56:38 -05:00
|
|
|
func (s *Site) pageTarget() target.Output {
|
2015-02-20 12:38:35 -05:00
|
|
|
s.initTargetList()
|
2016-03-05 14:56:38 -05:00
|
|
|
return s.targets.page
|
2013-09-03 23:52:50 -04:00
|
|
|
}
|
|
|
|
|
2016-03-05 14:56:38 -05:00
|
|
|
func (s *Site) pageUglyTarget() target.Output {
|
2016-03-05 14:18:17 -05:00
|
|
|
s.initTargetList()
|
2016-03-05 14:56:38 -05:00
|
|
|
return s.targets.pageUgly
|
2016-03-05 14:18:17 -05:00
|
|
|
}
|
|
|
|
|
2016-03-05 14:56:38 -05:00
|
|
|
func (s *Site) fileTarget() target.Output {
|
2015-02-20 12:38:35 -05:00
|
|
|
s.initTargetList()
|
2016-03-05 14:56:38 -05:00
|
|
|
return s.targets.file
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
2013-09-12 19:17:53 -04:00
|
|
|
|
2016-03-05 14:56:38 -05:00
|
|
|
func (s *Site) aliasTarget() target.AliasPublisher {
|
2015-02-20 12:38:35 -05:00
|
|
|
s.initTargetList()
|
2016-03-05 14:56:38 -05:00
|
|
|
return s.targets.alias
|
2014-11-04 00:39:37 -05:00
|
|
|
}
|
2014-01-29 17:50:31 -05:00
|
|
|
|
2016-08-04 14:41:30 -04:00
|
|
|
func (s *Site) languageAliasTarget() target.AliasPublisher {
|
|
|
|
s.initTargetList()
|
|
|
|
return s.targets.languageAlias
|
|
|
|
}
|
|
|
|
|
2015-02-20 12:38:35 -05:00
|
|
|
func (s *Site) initTargetList() {
|
|
|
|
s.targetListInit.Do(func() {
|
2016-03-05 14:56:38 -05:00
|
|
|
if s.targets.page == nil {
|
|
|
|
s.targets.page = &target.PagePub{
|
2015-02-20 12:38:35 -05:00
|
|
|
PublishDir: s.absPublishDir(),
|
2015-03-11 13:34:57 -04:00
|
|
|
UglyURLs: viper.GetBool("UglyURLs"),
|
2015-02-20 12:38:35 -05:00
|
|
|
}
|
|
|
|
}
|
2016-03-05 14:56:38 -05:00
|
|
|
if s.targets.pageUgly == nil {
|
|
|
|
s.targets.pageUgly = &target.PagePub{
|
2016-03-05 14:18:17 -05:00
|
|
|
PublishDir: s.absPublishDir(),
|
|
|
|
UglyURLs: true,
|
|
|
|
}
|
|
|
|
}
|
2016-03-05 14:56:38 -05:00
|
|
|
if s.targets.file == nil {
|
|
|
|
s.targets.file = &target.Filesystem{
|
2015-02-20 12:38:35 -05:00
|
|
|
PublishDir: s.absPublishDir(),
|
|
|
|
}
|
|
|
|
}
|
2016-03-05 14:56:38 -05:00
|
|
|
if s.targets.alias == nil {
|
|
|
|
s.targets.alias = &target.HTMLRedirectAlias{
|
2015-02-20 12:38:35 -05:00
|
|
|
PublishDir: s.absPublishDir(),
|
|
|
|
}
|
|
|
|
}
|
2016-08-04 14:41:30 -04:00
|
|
|
if s.targets.languageAlias == nil {
|
|
|
|
s.targets.languageAlias = &target.HTMLRedirectAlias{
|
|
|
|
PublishDir: s.absPublishDir(),
|
|
|
|
AllowRoot: true,
|
|
|
|
}
|
|
|
|
}
|
2015-02-20 12:38:35 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) writeDestFile(path string, reader io.Reader) (err error) {
|
2014-11-04 00:39:37 -05:00
|
|
|
jww.DEBUG.Println("creating file:", path)
|
2016-03-05 14:56:38 -05:00
|
|
|
return s.fileTarget().Publish(path, reader)
|
2014-11-04 00:39:37 -05:00
|
|
|
}
|
|
|
|
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) writeDestPage(path string, publisher target.Publisher, reader io.Reader) (err error) {
|
2014-11-04 00:39:37 -05:00
|
|
|
jww.DEBUG.Println("creating page:", path)
|
2016-04-01 18:04:08 -04:00
|
|
|
return publisher.Publish(path, reader)
|
2014-11-04 00:39:37 -05:00
|
|
|
}
|
2014-01-29 17:50:31 -05:00
|
|
|
|
2016-08-04 14:41:30 -04:00
|
|
|
// AliasPublisher
|
2016-04-07 14:03:03 -04:00
|
|
|
func (s *Site) writeDestAlias(path string, permalink string) (err error) {
|
2016-08-04 14:41:30 -04:00
|
|
|
return s.publishDestAlias(s.aliasTarget(), path, permalink)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Site) publishDestAlias(aliasPublisher target.AliasPublisher, path string, permalink string) (err error) {
|
2016-04-18 21:40:15 -04:00
|
|
|
if viper.GetBool("RelativeURLs") {
|
|
|
|
// convert `permalink` into URI relative to location of `path`
|
|
|
|
baseURL := helpers.SanitizeURLKeepTrailingSlash(viper.GetString("BaseURL"))
|
|
|
|
if strings.HasPrefix(permalink, baseURL) {
|
|
|
|
permalink = "/" + strings.TrimPrefix(permalink, baseURL)
|
|
|
|
}
|
|
|
|
permalink, err = helpers.GetRelativePath(permalink, path)
|
|
|
|
if err != nil {
|
|
|
|
jww.ERROR.Println("Failed to make a RelativeURL alias:", path, "redirecting to", permalink)
|
|
|
|
}
|
|
|
|
permalink = filepath.ToSlash(permalink)
|
|
|
|
}
|
|
|
|
jww.DEBUG.Println("creating alias:", path, "redirecting to", permalink)
|
2016-08-04 14:41:30 -04:00
|
|
|
return aliasPublisher.Publish(path, permalink)
|
2013-09-12 19:17:53 -04:00
|
|
|
}
|
2014-08-20 11:09:35 -04:00
|
|
|
|
|
|
|
func (s *Site) draftStats() string {
|
|
|
|
var msg string
|
|
|
|
|
|
|
|
switch s.draftCount {
|
|
|
|
case 0:
|
2015-03-12 10:46:42 -04:00
|
|
|
return "0 draft content"
|
2014-08-20 11:09:35 -04:00
|
|
|
case 1:
|
2015-03-12 10:46:42 -04:00
|
|
|
msg = "1 draft rendered"
|
2014-08-20 11:09:35 -04:00
|
|
|
default:
|
|
|
|
msg = fmt.Sprintf("%d drafts rendered", s.draftCount)
|
|
|
|
}
|
|
|
|
|
|
|
|
if viper.GetBool("BuildDrafts") {
|
|
|
|
return fmt.Sprintf("%d of ", s.draftCount) + msg
|
2014-08-25 13:13:11 -04:00
|
|
|
}
|
2014-08-20 11:09:35 -04:00
|
|
|
|
|
|
|
return "0 of " + msg
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Site) futureStats() string {
|
2014-08-25 13:13:11 -04:00
|
|
|
var msg string
|
|
|
|
|
|
|
|
switch s.futureCount {
|
|
|
|
case 0:
|
2015-08-04 13:59:32 -04:00
|
|
|
return "0 future content"
|
2014-08-25 13:13:11 -04:00
|
|
|
case 1:
|
2015-08-04 13:59:32 -04:00
|
|
|
msg = "1 future rendered"
|
2014-08-25 13:13:11 -04:00
|
|
|
default:
|
2016-06-13 11:38:39 -04:00
|
|
|
msg = fmt.Sprintf("%d futures rendered", s.futureCount)
|
2014-08-25 13:13:11 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
if viper.GetBool("BuildFuture") {
|
|
|
|
return fmt.Sprintf("%d of ", s.futureCount) + msg
|
|
|
|
}
|
|
|
|
|
|
|
|
return "0 of " + msg
|
2014-08-20 11:09:35 -04:00
|
|
|
}
|
2014-08-29 13:40:21 -04:00
|
|
|
|
2016-05-19 15:17:36 -04:00
|
|
|
func (s *Site) expiredStats() string {
|
|
|
|
var msg string
|
|
|
|
|
|
|
|
switch s.expiredCount {
|
|
|
|
case 0:
|
|
|
|
return "0 expired content"
|
|
|
|
case 1:
|
|
|
|
msg = "1 expired rendered"
|
|
|
|
default:
|
|
|
|
msg = fmt.Sprintf("%d expired rendered", s.expiredCount)
|
|
|
|
}
|
|
|
|
|
|
|
|
if viper.GetBool("BuildExpired") {
|
|
|
|
return fmt.Sprintf("%d of ", s.expiredCount) + msg
|
|
|
|
}
|
|
|
|
|
|
|
|
return "0 of " + msg
|
|
|
|
}
|
|
|
|
|
2014-08-29 13:40:21 -04:00
|
|
|
func getGoMaxProcs() int {
|
|
|
|
if gmp := os.Getenv("GOMAXPROCS"); gmp != "" {
|
|
|
|
if p, err := strconv.Atoi(gmp); err != nil {
|
|
|
|
return p
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return 1
|
|
|
|
}
|