2016-03-21 19:28:42 -04:00
|
|
|
// Copyright 2016 The Hugo Authors. All rights reserved.
|
2013-07-04 11:32:55 -04:00
|
|
|
//
|
2015-11-23 22:16:36 -05:00
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
2013-07-04 11:32:55 -04:00
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
2015-11-23 22:16:36 -05:00
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
2013-07-04 11:32:55 -04:00
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package hugolib
|
|
|
|
|
|
|
|
import (
|
2014-01-29 17:50:31 -05:00
|
|
|
"bytes"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
2015-01-25 06:08:02 -05:00
|
|
|
"reflect"
|
|
|
|
|
2016-11-01 18:04:12 -04:00
|
|
|
"github.com/bep/gitmap"
|
|
|
|
|
2015-01-25 06:08:02 -05:00
|
|
|
"github.com/mitchellh/mapstructure"
|
2014-11-20 12:32:21 -05:00
|
|
|
"github.com/spf13/hugo/helpers"
|
2017-03-06 07:40:06 -05:00
|
|
|
"github.com/spf13/hugo/output"
|
2014-11-20 12:32:21 -05:00
|
|
|
"github.com/spf13/hugo/parser"
|
2014-11-28 15:16:57 -05:00
|
|
|
|
2014-12-07 13:48:00 -05:00
|
|
|
"html/template"
|
|
|
|
"io"
|
|
|
|
"path"
|
|
|
|
"path/filepath"
|
2015-09-03 06:22:20 -04:00
|
|
|
"regexp"
|
2014-12-07 13:48:00 -05:00
|
|
|
"strings"
|
2015-01-21 09:28:05 -05:00
|
|
|
"sync"
|
2014-12-07 13:48:00 -05:00
|
|
|
"time"
|
2015-09-03 06:22:20 -04:00
|
|
|
"unicode/utf8"
|
2015-01-25 06:08:02 -05:00
|
|
|
|
|
|
|
"github.com/spf13/cast"
|
2015-01-30 14:42:02 -05:00
|
|
|
bp "github.com/spf13/hugo/bufferpool"
|
2015-01-25 06:08:02 -05:00
|
|
|
"github.com/spf13/hugo/source"
|
2013-07-04 11:32:55 -04:00
|
|
|
)
|
|
|
|
|
2015-09-03 06:22:20 -04:00
|
|
|
var (
|
2017-02-18 04:02:12 -05:00
|
|
|
cjk = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`)
|
2017-02-19 03:15:30 -05:00
|
|
|
allKinds = []string{KindPage, KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm, kindRSS, kindSitemap, kindRobotsTXT, kind404}
|
2015-09-03 06:22:20 -04:00
|
|
|
)
|
|
|
|
|
2016-11-11 03:19:16 -05:00
|
|
|
const (
|
2016-11-13 06:33:11 -05:00
|
|
|
KindPage = "page"
|
2016-11-11 03:19:16 -05:00
|
|
|
|
|
|
|
// The rest are node types; home page, sections etc.
|
2016-11-13 06:33:11 -05:00
|
|
|
KindHome = "home"
|
|
|
|
KindSection = "section"
|
|
|
|
KindTaxonomy = "taxonomy"
|
|
|
|
KindTaxonomyTerm = "taxonomyTerm"
|
2016-11-11 03:19:16 -05:00
|
|
|
|
|
|
|
// Temporary state.
|
2016-11-13 06:33:11 -05:00
|
|
|
kindUnknown = "unknown"
|
2016-11-11 03:19:16 -05:00
|
|
|
|
|
|
|
// The following are (currently) temporary nodes,
|
|
|
|
// i.e. nodes we create just to render in isolation.
|
2016-11-15 04:43:49 -05:00
|
|
|
kindRSS = "RSS"
|
2016-11-13 06:33:11 -05:00
|
|
|
kindSitemap = "sitemap"
|
|
|
|
kindRobotsTXT = "robotsTXT"
|
|
|
|
kind404 = "404"
|
2016-11-11 03:19:16 -05:00
|
|
|
)
|
|
|
|
|
2013-07-04 11:32:55 -04:00
|
|
|
type Page struct {
|
2016-11-16 14:32:38 -05:00
|
|
|
*pageInit
|
2016-11-13 05:43:23 -05:00
|
|
|
|
2016-11-13 06:33:11 -05:00
|
|
|
// Kind is the discriminator that identifies the different page types
|
|
|
|
// in the different page collections. This can, as an example, be used
|
|
|
|
// to to filter regular pages, find sections etc.
|
2016-11-13 05:43:23 -05:00
|
|
|
// Kind will, for the pages available to the templates, be one of:
|
|
|
|
// page, home, section, taxonomy and taxonomyTerm.
|
2016-11-13 06:33:11 -05:00
|
|
|
// It is of string type to make it easy to reason about in
|
|
|
|
// the templates.
|
|
|
|
Kind string
|
2016-11-11 03:19:16 -05:00
|
|
|
|
|
|
|
// Since Hugo 0.18 we got rid of the Node type. So now all pages are ...
|
|
|
|
// pages (regular pages, home page, sections etc.).
|
|
|
|
// Sections etc. will have child pages. These were earlier placed in .Data.Pages,
|
|
|
|
// but can now be more intuitively also be fetched directly from .Pages.
|
|
|
|
// This collection will be nil for regular pages.
|
|
|
|
Pages Pages
|
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
// translations will contain references to this page in other language
|
|
|
|
// if available.
|
|
|
|
translations Pages
|
|
|
|
|
|
|
|
// Params contains configuration defined in the params section of page frontmatter.
|
|
|
|
Params map[string]interface{}
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
// Content sections
|
|
|
|
Content template.HTML
|
|
|
|
Summary template.HTML
|
2016-11-13 08:27:10 -05:00
|
|
|
TableOfContents template.HTML
|
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
Aliases []string
|
|
|
|
|
|
|
|
Images []Image
|
|
|
|
Videos []Video
|
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
Truncated bool
|
|
|
|
Draft bool
|
2016-11-16 11:52:03 -05:00
|
|
|
Status string
|
2016-11-13 08:27:10 -05:00
|
|
|
|
|
|
|
PublishDate time.Time
|
|
|
|
ExpiryDate time.Time
|
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
// PageMeta contains page stats such as word count etc.
|
|
|
|
PageMeta
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
// Markup contains the markup type for the content.
|
|
|
|
Markup string
|
2016-11-13 08:27:10 -05:00
|
|
|
|
|
|
|
extension string
|
|
|
|
contentType string
|
|
|
|
renderable bool
|
|
|
|
|
2016-08-12 18:33:17 -04:00
|
|
|
Layout string
|
|
|
|
layoutsCalculated []string
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
linkTitle string
|
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
frontmatter []byte
|
2016-08-12 18:33:17 -04:00
|
|
|
|
2016-12-01 04:21:49 -05:00
|
|
|
// rawContent is the raw content read from the content file.
|
2016-08-12 18:33:17 -04:00
|
|
|
rawContent []byte
|
|
|
|
|
2016-12-01 04:21:49 -05:00
|
|
|
// workContent is a copy of rawContent that may be mutated during site build.
|
|
|
|
workContent []byte
|
|
|
|
|
2016-08-12 18:33:17 -04:00
|
|
|
// state telling if this is a "new page" or if we have rendered it previously.
|
|
|
|
rendered bool
|
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
// whether the content is in a CJK language.
|
|
|
|
isCJKLanguage bool
|
|
|
|
|
2017-03-10 14:54:50 -05:00
|
|
|
shortcodeState *shortcodeHandler
|
2016-11-16 11:52:03 -05:00
|
|
|
|
|
|
|
// the content stripped for HTML
|
2016-11-16 14:32:38 -05:00
|
|
|
plain string // TODO should be []byte
|
|
|
|
plainWords []string
|
2016-11-16 11:52:03 -05:00
|
|
|
|
|
|
|
// rendering configuration
|
2016-11-16 14:32:38 -05:00
|
|
|
renderingConfig *helpers.Blackfriday
|
2016-11-16 11:52:03 -05:00
|
|
|
|
|
|
|
// menus
|
2016-11-16 14:32:38 -05:00
|
|
|
pageMenus PageMenus
|
2016-11-16 11:52:03 -05:00
|
|
|
|
2014-10-16 20:20:09 -04:00
|
|
|
Source
|
2016-11-16 11:52:03 -05:00
|
|
|
|
2015-05-23 06:28:01 -04:00
|
|
|
Position `json:"-"`
|
2016-11-02 16:34:19 -04:00
|
|
|
|
2016-11-01 18:04:12 -04:00
|
|
|
GitInfo *gitmap.GitInfo
|
2016-10-31 14:53:33 -04:00
|
|
|
|
|
|
|
// This was added as part of getting the Nodes (taxonomies etc.) to work as
|
|
|
|
// Pages in Hugo 0.18.
|
|
|
|
// It is deliberately named similar to Section, but not exported (for now).
|
|
|
|
// We currently have only one level of section in Hugo, but the page can live
|
|
|
|
// any number of levels down the file path.
|
|
|
|
// To support taxonomies like /categories/hugo etc. we will need to keep track
|
|
|
|
// of that information in a general way.
|
|
|
|
// So, sections represents the path to the content, i.e. a content file or a
|
|
|
|
// virtual content file in the situations where a taxonomy or a section etc.
|
|
|
|
// isn't accomanied by one.
|
|
|
|
sections []string
|
|
|
|
|
2017-01-03 11:28:51 -05:00
|
|
|
s *Site
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2016-11-16 15:06:10 -05:00
|
|
|
// Pulled over from old Node. TODO(bep) reorg and group (embed)
|
2016-11-13 08:27:10 -05:00
|
|
|
|
|
|
|
Site *SiteInfo `json:"-"`
|
|
|
|
|
|
|
|
Title string
|
|
|
|
Description string
|
|
|
|
Keywords []string
|
|
|
|
Data map[string]interface{}
|
|
|
|
|
|
|
|
Date time.Time
|
|
|
|
Lastmod time.Time
|
|
|
|
|
|
|
|
Sitemap Sitemap
|
|
|
|
|
2017-03-01 06:30:41 -05:00
|
|
|
RSSLink template.URL
|
2016-11-16 11:52:03 -05:00
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
URLPath
|
2017-03-09 13:19:29 -05:00
|
|
|
permalink string
|
2017-02-20 02:51:18 -05:00
|
|
|
relPermalink string
|
2016-11-13 08:27:10 -05:00
|
|
|
|
|
|
|
scratch *Scratch
|
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
// It would be tempting to use the language set on the Site, but in they way we do
|
|
|
|
// multi-site processing, these values may differ during the initial page processing.
|
2016-11-16 14:32:38 -05:00
|
|
|
language *helpers.Language
|
2017-02-04 22:20:06 -05:00
|
|
|
|
|
|
|
lang string
|
2017-03-02 09:35:25 -05:00
|
|
|
|
2017-03-16 03:32:14 -04:00
|
|
|
// The output formats this page will be rendered to.
|
|
|
|
outputFormats output.Formats
|
2017-03-06 07:18:33 -05:00
|
|
|
|
2017-03-16 03:32:14 -04:00
|
|
|
// This is the PageOutput that represents the first item in outputFormats.
|
2017-03-09 13:19:29 -05:00
|
|
|
// Use with care, as there are potential for inifinite loops.
|
|
|
|
mainPageOutput *PageOutput
|
2017-03-17 11:35:09 -04:00
|
|
|
|
|
|
|
targetPathDescriptorPrototype *targetPathDescriptor
|
2017-03-06 07:18:33 -05:00
|
|
|
}
|
|
|
|
|
2017-03-16 03:58:50 -04:00
|
|
|
func (p *Page) createLayoutDescriptor() output.LayoutDescriptor {
|
|
|
|
var section string
|
2017-03-06 07:18:33 -05:00
|
|
|
|
|
|
|
switch p.Kind {
|
|
|
|
case KindSection:
|
2017-03-16 03:58:50 -04:00
|
|
|
section = p.sections[0]
|
2017-03-06 07:18:33 -05:00
|
|
|
case KindTaxonomy, KindTaxonomyTerm:
|
2017-03-16 03:58:50 -04:00
|
|
|
section = p.s.taxonomiesPluralSingular[p.sections[0]]
|
2017-03-06 07:18:33 -05:00
|
|
|
default:
|
2017-03-16 03:58:50 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
return output.LayoutDescriptor{
|
|
|
|
Kind: p.Kind,
|
|
|
|
Type: p.Type(),
|
|
|
|
Layout: p.Layout,
|
|
|
|
Section: section,
|
2017-03-06 07:18:33 -05:00
|
|
|
}
|
2016-11-16 14:32:38 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// pageInit lazy initializes different parts of the page. It is extracted
|
|
|
|
// into its own type so we can easily create a copy of a given page.
|
|
|
|
type pageInit struct {
|
|
|
|
languageInit sync.Once
|
|
|
|
pageMenusInit sync.Once
|
|
|
|
pageMetaInit sync.Once
|
|
|
|
plainInit sync.Once
|
|
|
|
plainWordsInit sync.Once
|
|
|
|
renderingConfigInit sync.Once
|
2016-11-27 13:25:28 -05:00
|
|
|
pageURLInit sync.Once
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-11-13 06:33:11 -05:00
|
|
|
// IsNode returns whether this is an item of one of the list types in Hugo,
|
|
|
|
// i.e. not a regular content page.
|
|
|
|
func (p *Page) IsNode() bool {
|
|
|
|
return p.Kind != KindPage
|
|
|
|
}
|
|
|
|
|
|
|
|
// IsHome returns whether this is the home page.
|
|
|
|
func (p *Page) IsHome() bool {
|
|
|
|
return p.Kind == KindHome
|
|
|
|
}
|
|
|
|
|
|
|
|
// IsPage returns whether this is a regular content page.
|
|
|
|
func (p *Page) IsPage() bool {
|
|
|
|
return p.Kind == KindPage
|
|
|
|
}
|
|
|
|
|
2014-10-16 20:20:09 -04:00
|
|
|
type Source struct {
|
|
|
|
Frontmatter []byte
|
|
|
|
Content []byte
|
|
|
|
source.File
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
type PageMeta struct {
|
2016-08-17 07:41:48 -04:00
|
|
|
wordCount int
|
|
|
|
fuzzyWordCount int
|
|
|
|
readingTime int
|
2014-01-29 17:50:31 -05:00
|
|
|
Weight int
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
type Position struct {
|
2015-01-21 08:35:33 -05:00
|
|
|
Prev *Page
|
|
|
|
Next *Page
|
2014-04-18 03:23:13 -04:00
|
|
|
PrevInSection *Page
|
|
|
|
NextInSection *Page
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
type Pages []*Page
|
2016-01-07 21:48:13 -05:00
|
|
|
|
2017-03-09 08:18:12 -05:00
|
|
|
func (ps Pages) String() string {
|
|
|
|
return fmt.Sprintf("Pages(%d)", len(ps))
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2016-01-11 12:06:52 -05:00
|
|
|
func (ps Pages) FindPagePosByFilePath(inPath string) int {
|
|
|
|
for i, x := range ps {
|
|
|
|
if x.Source.Path() == inPath {
|
|
|
|
return i
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return -1
|
|
|
|
}
|
2016-01-07 21:48:13 -05:00
|
|
|
|
|
|
|
// FindPagePos Given a page, it will find the position in Pages
|
|
|
|
// will return -1 if not found
|
|
|
|
func (ps Pages) FindPagePos(page *Page) int {
|
|
|
|
for i, x := range ps {
|
2016-01-11 12:06:52 -05:00
|
|
|
if x.Source.Path() == page.Source.Path() {
|
2016-01-07 21:48:13 -05:00
|
|
|
return i
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return -1
|
|
|
|
}
|
|
|
|
|
2016-12-01 04:21:49 -05:00
|
|
|
func (p *Page) createWorkContentCopy() {
|
|
|
|
p.workContent = make([]byte, len(p.rawContent))
|
|
|
|
copy(p.workContent, p.rawContent)
|
|
|
|
}
|
|
|
|
|
2014-01-27 17:16:28 -05:00
|
|
|
func (p *Page) Plain() string {
|
2015-02-04 18:38:50 -05:00
|
|
|
p.initPlain()
|
2014-01-29 17:50:31 -05:00
|
|
|
return p.plain
|
2013-10-15 09:15:52 -04:00
|
|
|
}
|
|
|
|
|
2015-02-04 18:38:50 -05:00
|
|
|
func (p *Page) PlainWords() []string {
|
2016-08-16 16:50:15 -04:00
|
|
|
p.initPlainWords()
|
2015-02-04 18:38:50 -05:00
|
|
|
return p.plainWords
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) initPlain() {
|
|
|
|
p.plainInit.Do(func() {
|
|
|
|
p.plain = helpers.StripHTML(string(p.Content))
|
2016-08-16 16:50:15 -04:00
|
|
|
return
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) initPlainWords() {
|
|
|
|
p.plainWordsInit.Do(func() {
|
|
|
|
p.plainWords = strings.Fields(p.Plain())
|
2015-07-12 05:28:19 -04:00
|
|
|
return
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2015-10-11 23:51:04 -04:00
|
|
|
// Param is a convenience method to do lookups in Page's and Site's Params map,
|
|
|
|
// in that order.
|
|
|
|
//
|
2016-10-17 13:30:21 -04:00
|
|
|
// This method is also implemented on Node and SiteInfo.
|
2015-10-11 23:51:04 -04:00
|
|
|
func (p *Page) Param(key interface{}) (interface{}, error) {
|
|
|
|
keyStr, err := cast.ToStringE(key)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2017-02-19 02:50:08 -05:00
|
|
|
|
2016-10-17 13:30:21 -04:00
|
|
|
keyStr = strings.ToLower(keyStr)
|
2017-02-19 02:50:08 -05:00
|
|
|
result, _ := p.traverseDirect(keyStr)
|
|
|
|
if result != nil {
|
|
|
|
return result, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
keySegments := strings.Split(keyStr, ".")
|
|
|
|
if len(keySegments) == 1 {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return p.traverseNested(keySegments)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) traverseDirect(key string) (interface{}, error) {
|
|
|
|
keyStr := strings.ToLower(key)
|
2015-10-11 23:51:04 -04:00
|
|
|
if val, ok := p.Params[keyStr]; ok {
|
|
|
|
return val, nil
|
|
|
|
}
|
2017-02-19 02:50:08 -05:00
|
|
|
|
2015-10-11 23:51:04 -04:00
|
|
|
return p.Site.Params[keyStr], nil
|
|
|
|
}
|
|
|
|
|
2017-02-19 02:50:08 -05:00
|
|
|
func (p *Page) traverseNested(keySegments []string) (interface{}, error) {
|
|
|
|
result := traverse(keySegments, p.Params)
|
|
|
|
if result != nil {
|
|
|
|
return result, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
result = traverse(keySegments, p.Site.Params)
|
|
|
|
if result != nil {
|
|
|
|
return result, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Didn't find anything, but also no problems.
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func traverse(keys []string, m map[string]interface{}) interface{} {
|
|
|
|
// Shift first element off.
|
|
|
|
firstKey, rest := keys[0], keys[1:]
|
|
|
|
result := m[firstKey]
|
|
|
|
|
|
|
|
// No point in continuing here.
|
|
|
|
if result == nil {
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(rest) == 0 {
|
|
|
|
// That was the last key.
|
|
|
|
return result
|
|
|
|
} else {
|
|
|
|
// That was not the last key.
|
|
|
|
return traverse(rest, cast.ToStringMap(result))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-12-09 13:33:55 -05:00
|
|
|
func (p *Page) Author() Author {
|
|
|
|
authors := p.Authors()
|
2016-09-18 13:10:11 -04:00
|
|
|
|
|
|
|
for _, author := range authors {
|
|
|
|
return author
|
2016-09-11 04:46:56 -04:00
|
|
|
}
|
2016-09-18 13:10:11 -04:00
|
|
|
return Author{}
|
|
|
|
}
|
2016-09-15 22:28:13 -04:00
|
|
|
|
2016-09-18 13:10:11 -04:00
|
|
|
func (p *Page) Authors() AuthorList {
|
|
|
|
authorKeys, ok := p.Params["authors"]
|
|
|
|
if !ok {
|
|
|
|
return AuthorList{}
|
|
|
|
}
|
|
|
|
authors := authorKeys.([]string)
|
|
|
|
if len(authors) < 1 || len(p.Site.Authors) < 1 {
|
|
|
|
return AuthorList{}
|
2014-12-09 13:33:55 -05:00
|
|
|
}
|
|
|
|
|
2016-09-18 13:10:11 -04:00
|
|
|
al := make(AuthorList)
|
|
|
|
for _, author := range authors {
|
|
|
|
a, ok := p.Site.Authors[author]
|
|
|
|
if ok {
|
|
|
|
al[author] = a
|
2014-12-09 13:33:55 -05:00
|
|
|
}
|
|
|
|
}
|
2016-09-18 13:10:11 -04:00
|
|
|
return al
|
2014-12-09 13:33:55 -05:00
|
|
|
}
|
|
|
|
|
2015-03-11 13:34:57 -04:00
|
|
|
func (p *Page) UniqueID() string {
|
|
|
|
return p.Source.UniqueID()
|
2014-10-01 14:26:43 -04:00
|
|
|
}
|
|
|
|
|
Shortcode rewrite, take 2
This commit contains a restructuring and partial rewrite of the shortcode handling.
Prior to this commit rendering of the page content was mingled with handling of the shortcodes. This led to several oddities.
The new flow is:
1. Shortcodes are extracted from page and replaced with placeholders.
2. Shortcodes are processed and rendered
3. Page is processed
4. The placeholders are replaced with the rendered shortcodes
The handling of summaries is also made simpler by this.
This commit also introduces some other chenges:
1. distinction between shortcodes that need further processing and those who do not:
* `{{< >}}`: Typically raw HTML. Will not be processed.
* `{{% %}}`: Will be processed by the page's markup engine (Markdown or (infuture) Asciidoctor)
The above also involves a new shortcode-parser, with lexical scanning inspired by Rob Pike's talk called "Lexical Scanning in Go",
which should be easier to understand, give better error messages and perform better.
2. If you want to exclude a shortcode from being processed (for documentation etc.), the inner part of the shorcode must be commented out, i.e. `{{%/* movie 47238zzb */%}}`. See the updated shortcode section in the documentation for further examples.
The new parser supports nested shortcodes. This isn't new, but has two related design choices worth mentioning:
* The shortcodes will be rendered individually, so If both `{{< >}}` and `{{% %}}` are used in the nested hierarchy, one will be passed through the page's markdown processor, the other not.
* To avoid potential costly overhead of always looking far ahead for a possible closing tag, this implementation looks at the template itself, and is branded as a container with inner content if it contains a reference to `.Inner`
Fixes #565
Fixes #480
Fixes #461
And probably some others.
2014-10-27 16:48:30 -04:00
|
|
|
// for logging
|
|
|
|
func (p *Page) lineNumRawContentStart() int {
|
|
|
|
return bytes.Count(p.frontmatter, []byte("\n")) + 1
|
|
|
|
}
|
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
var (
|
2016-09-19 16:14:15 -04:00
|
|
|
internalSummaryDivider = []byte("HUGOMORE42")
|
2016-08-01 17:04:44 -04:00
|
|
|
)
|
Shortcode rewrite, take 2
This commit contains a restructuring and partial rewrite of the shortcode handling.
Prior to this commit rendering of the page content was mingled with handling of the shortcodes. This led to several oddities.
The new flow is:
1. Shortcodes are extracted from page and replaced with placeholders.
2. Shortcodes are processed and rendered
3. Page is processed
4. The placeholders are replaced with the rendered shortcodes
The handling of summaries is also made simpler by this.
This commit also introduces some other chenges:
1. distinction between shortcodes that need further processing and those who do not:
* `{{< >}}`: Typically raw HTML. Will not be processed.
* `{{% %}}`: Will be processed by the page's markup engine (Markdown or (infuture) Asciidoctor)
The above also involves a new shortcode-parser, with lexical scanning inspired by Rob Pike's talk called "Lexical Scanning in Go",
which should be easier to understand, give better error messages and perform better.
2. If you want to exclude a shortcode from being processed (for documentation etc.), the inner part of the shorcode must be commented out, i.e. `{{%/* movie 47238zzb */%}}`. See the updated shortcode section in the documentation for further examples.
The new parser supports nested shortcodes. This isn't new, but has two related design choices worth mentioning:
* The shortcodes will be rendered individually, so If both `{{< >}}` and `{{% %}}` are used in the nested hierarchy, one will be passed through the page's markdown processor, the other not.
* To avoid potential costly overhead of always looking far ahead for a possible closing tag, this implementation looks at the template itself, and is branded as a container with inner content if it contains a reference to `.Inner`
Fixes #565
Fixes #480
Fixes #461
And probably some others.
2014-10-27 16:48:30 -04:00
|
|
|
|
2017-01-26 14:58:25 -05:00
|
|
|
// We have to replace the <!--more--> with something that survives all the
|
|
|
|
// rendering engines.
|
|
|
|
// TODO(bep) inline replace
|
|
|
|
func (p *Page) replaceDivider(content []byte) []byte {
|
2017-02-21 02:46:03 -05:00
|
|
|
summaryDivider := helpers.SummaryDivider
|
2017-02-21 02:49:04 -05:00
|
|
|
// TODO(bep) handle better.
|
|
|
|
if p.Ext() == "org" || p.Markup == "org" {
|
2017-02-21 02:46:03 -05:00
|
|
|
summaryDivider = []byte("# more")
|
|
|
|
}
|
|
|
|
sections := bytes.Split(content, summaryDivider)
|
2017-01-26 14:58:25 -05:00
|
|
|
|
|
|
|
// If the raw content has nothing but whitespace after the summary
|
|
|
|
// marker then the page shouldn't be marked as truncated. This check
|
|
|
|
// is simplest against the raw content because different markup engines
|
|
|
|
// (rst and asciidoc in particular) add div and p elements after the
|
|
|
|
// summary marker.
|
|
|
|
p.Truncated = (len(sections) == 2 &&
|
|
|
|
len(bytes.Trim(sections[1], " \n\r")) > 0)
|
|
|
|
|
|
|
|
return bytes.Join(sections, internalSummaryDivider)
|
|
|
|
}
|
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
// Returns the page as summary and main if a user defined split is provided.
|
2016-10-25 14:40:32 -04:00
|
|
|
func (p *Page) setUserDefinedSummaryIfProvided(rawContentCopy []byte) (*summaryContent, error) {
|
Shortcode rewrite, take 2
This commit contains a restructuring and partial rewrite of the shortcode handling.
Prior to this commit rendering of the page content was mingled with handling of the shortcodes. This led to several oddities.
The new flow is:
1. Shortcodes are extracted from page and replaced with placeholders.
2. Shortcodes are processed and rendered
3. Page is processed
4. The placeholders are replaced with the rendered shortcodes
The handling of summaries is also made simpler by this.
This commit also introduces some other chenges:
1. distinction between shortcodes that need further processing and those who do not:
* `{{< >}}`: Typically raw HTML. Will not be processed.
* `{{% %}}`: Will be processed by the page's markup engine (Markdown or (infuture) Asciidoctor)
The above also involves a new shortcode-parser, with lexical scanning inspired by Rob Pike's talk called "Lexical Scanning in Go",
which should be easier to understand, give better error messages and perform better.
2. If you want to exclude a shortcode from being processed (for documentation etc.), the inner part of the shorcode must be commented out, i.e. `{{%/* movie 47238zzb */%}}`. See the updated shortcode section in the documentation for further examples.
The new parser supports nested shortcodes. This isn't new, but has two related design choices worth mentioning:
* The shortcodes will be rendered individually, so If both `{{< >}}` and `{{% %}}` are used in the nested hierarchy, one will be passed through the page's markdown processor, the other not.
* To avoid potential costly overhead of always looking far ahead for a possible closing tag, this implementation looks at the template itself, and is branded as a container with inner content if it contains a reference to `.Inner`
Fixes #565
Fixes #480
Fixes #461
And probably some others.
2014-10-27 16:48:30 -04:00
|
|
|
|
2016-10-25 14:40:32 -04:00
|
|
|
sc, err := splitUserDefinedSummaryAndContent(p.Markup, rawContentCopy)
|
2016-10-18 02:43:44 -04:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2015-05-10 07:33:50 -04:00
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
if sc == nil {
|
|
|
|
// No divider found
|
|
|
|
return nil, nil
|
|
|
|
}
|
2015-02-04 18:38:50 -05:00
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
p.Summary = helpers.BytesToHTML(sc.summary)
|
|
|
|
|
|
|
|
return sc, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Make this explicit so there is no doubt about what is what.
|
|
|
|
type summaryContent struct {
|
2017-01-26 14:58:25 -05:00
|
|
|
summary []byte
|
|
|
|
content []byte
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
2016-10-18 02:43:44 -04:00
|
|
|
func splitUserDefinedSummaryAndContent(markup string, c []byte) (sc *summaryContent, err error) {
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
|
|
|
err = fmt.Errorf("summary split failed: %s", r)
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2016-08-07 17:34:04 -04:00
|
|
|
c = bytes.TrimSpace(c)
|
2016-08-01 17:04:44 -04:00
|
|
|
startDivider := bytes.Index(c, internalSummaryDivider)
|
|
|
|
|
|
|
|
if startDivider == -1 {
|
2016-10-18 02:43:44 -04:00
|
|
|
return
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
endDivider := startDivider + len(internalSummaryDivider)
|
|
|
|
endSummary := startDivider
|
|
|
|
|
|
|
|
var (
|
|
|
|
startMarkup []byte
|
|
|
|
endMarkup []byte
|
|
|
|
addDiv bool
|
|
|
|
)
|
|
|
|
|
|
|
|
switch markup {
|
|
|
|
default:
|
|
|
|
startMarkup = []byte("<p>")
|
|
|
|
endMarkup = []byte("</p>")
|
|
|
|
case "asciidoc":
|
|
|
|
startMarkup = []byte("<div class=\"paragraph\">")
|
|
|
|
endMarkup = []byte("</div>")
|
|
|
|
case "rst":
|
|
|
|
startMarkup = []byte("<p>")
|
|
|
|
endMarkup = []byte("</p>")
|
|
|
|
addDiv = true
|
|
|
|
}
|
|
|
|
|
|
|
|
// Find the closest end/start markup string to the divider
|
2016-10-18 02:43:44 -04:00
|
|
|
fromStart := -1
|
2016-08-01 17:04:44 -04:00
|
|
|
fromIdx := bytes.LastIndex(c[:startDivider], startMarkup)
|
2016-10-18 02:43:44 -04:00
|
|
|
if fromIdx != -1 {
|
|
|
|
fromStart = startDivider - fromIdx - len(startMarkup)
|
|
|
|
}
|
2016-08-01 17:04:44 -04:00
|
|
|
fromEnd := bytes.Index(c[endDivider:], endMarkup)
|
|
|
|
|
|
|
|
if fromEnd != -1 && fromEnd <= fromStart {
|
|
|
|
endSummary = startDivider + fromEnd + len(endMarkup)
|
2016-08-07 17:34:04 -04:00
|
|
|
} else if fromStart != -1 && fromEnd != -1 {
|
2016-08-01 17:04:44 -04:00
|
|
|
endSummary = startDivider - fromStart - len(startMarkup)
|
|
|
|
}
|
|
|
|
|
|
|
|
withoutDivider := bytes.TrimSpace(append(c[:startDivider], c[endDivider:]...))
|
2016-08-07 17:34:04 -04:00
|
|
|
var (
|
2017-01-26 14:58:25 -05:00
|
|
|
summary []byte
|
2016-08-07 17:34:04 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
if len(withoutDivider) > 0 {
|
|
|
|
summary = bytes.TrimSpace(withoutDivider[:endSummary])
|
|
|
|
}
|
2016-08-01 17:04:44 -04:00
|
|
|
|
|
|
|
if addDiv {
|
|
|
|
// For the rst
|
|
|
|
summary = append(append([]byte(nil), summary...), []byte("</div>")...)
|
|
|
|
}
|
|
|
|
|
2016-10-18 02:43:44 -04:00
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
sc = &summaryContent{
|
2017-01-26 14:58:25 -05:00
|
|
|
summary: summary,
|
|
|
|
content: withoutDivider,
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
2016-10-18 02:43:44 -04:00
|
|
|
|
|
|
|
return
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) setAutoSummary() error {
|
|
|
|
var summary string
|
|
|
|
var truncated bool
|
|
|
|
if p.isCJKLanguage {
|
|
|
|
summary, truncated = helpers.TruncateWordsByRune(p.PlainWords(), helpers.SummaryLength)
|
|
|
|
} else {
|
2016-08-16 16:50:15 -04:00
|
|
|
summary, truncated = helpers.TruncateWordsToWholeSentence(p.Plain(), helpers.SummaryLength)
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
p.Summary = template.HTML(summary)
|
|
|
|
p.Truncated = truncated
|
|
|
|
|
|
|
|
return nil
|
2014-01-27 17:16:28 -05:00
|
|
|
}
|
|
|
|
|
2014-01-28 23:11:05 -05:00
|
|
|
func (p *Page) renderContent(content []byte) []byte {
|
2015-09-08 20:03:38 -04:00
|
|
|
var fn helpers.LinkResolverFunc
|
|
|
|
var fileFn helpers.FileResolverFunc
|
|
|
|
if p.getRenderingConfig().SourceRelativeLinksEval {
|
|
|
|
fn = func(ref string) (string, error) {
|
2016-11-13 08:27:10 -05:00
|
|
|
return p.Site.SourceRelativeLink(ref, p)
|
2015-09-08 20:03:38 -04:00
|
|
|
}
|
|
|
|
fileFn = func(ref string) (string, error) {
|
2016-11-13 08:27:10 -05:00
|
|
|
return p.Site.SourceRelativeLinkFile(ref, p)
|
2015-09-08 20:03:38 -04:00
|
|
|
}
|
|
|
|
}
|
2017-02-04 22:20:06 -05:00
|
|
|
|
|
|
|
return p.s.ContentSpec.RenderBytes(&helpers.RenderingContext{
|
2016-08-07 08:03:03 -04:00
|
|
|
Content: content, RenderTOC: true, PageFmt: p.determineMarkupType(),
|
2017-02-04 22:20:06 -05:00
|
|
|
Cfg: p.Language(),
|
|
|
|
DocumentID: p.UniqueID(), DocumentName: p.Path(),
|
2016-10-13 04:30:43 -04:00
|
|
|
Config: p.getRenderingConfig(), LinkResolver: fn, FileResolver: fileFn})
|
2014-11-28 15:16:57 -05:00
|
|
|
}
|
|
|
|
|
2015-01-25 06:08:02 -05:00
|
|
|
func (p *Page) getRenderingConfig() *helpers.Blackfriday {
|
2014-11-28 15:16:57 -05:00
|
|
|
|
2015-01-25 06:08:02 -05:00
|
|
|
p.renderingConfigInit.Do(func() {
|
2015-11-03 14:09:34 -05:00
|
|
|
pageParam := cast.ToStringMap(p.GetParam("blackfriday"))
|
2016-08-07 08:03:03 -04:00
|
|
|
if p.Language() == nil {
|
|
|
|
panic(fmt.Sprintf("nil language for %s with source lang %s", p.BaseFileName(), p.lang))
|
|
|
|
}
|
2017-02-04 22:20:06 -05:00
|
|
|
p.renderingConfig = p.s.ContentSpec.NewBlackfriday()
|
2016-10-16 13:28:21 -04:00
|
|
|
|
2015-11-03 14:09:34 -05:00
|
|
|
if err := mapstructure.Decode(pageParam, p.renderingConfig); err != nil {
|
2017-01-10 04:55:03 -05:00
|
|
|
p.s.Log.FATAL.Printf("Failed to get rendering config for %s:\n%s", p.BaseFileName(), err.Error())
|
2015-01-25 06:08:02 -05:00
|
|
|
}
|
2016-10-16 13:28:21 -04:00
|
|
|
|
2015-01-21 09:28:05 -05:00
|
|
|
})
|
2014-11-28 15:16:57 -05:00
|
|
|
|
2015-01-25 06:08:02 -05:00
|
|
|
return p.renderingConfig
|
2015-01-21 08:35:33 -05:00
|
|
|
}
|
|
|
|
|
2017-01-03 11:28:51 -05:00
|
|
|
func (s *Site) newPage(filename string) *Page {
|
2017-02-04 22:20:06 -05:00
|
|
|
sp := source.NewSourceSpec(s.Cfg, s.Fs)
|
2017-03-06 07:18:33 -05:00
|
|
|
p := &Page{
|
2016-11-16 14:32:38 -05:00
|
|
|
pageInit: &pageInit{},
|
2016-11-13 08:27:10 -05:00
|
|
|
Kind: kindFromFilename(filename),
|
|
|
|
contentType: "",
|
2017-02-04 22:20:06 -05:00
|
|
|
Source: Source{File: *sp.NewFile(filename)},
|
2016-11-13 08:27:10 -05:00
|
|
|
Keywords: []string{}, Sitemap: Sitemap{Priority: -1},
|
2016-05-14 00:35:16 -04:00
|
|
|
Params: make(map[string]interface{}),
|
2016-07-25 16:22:09 -04:00
|
|
|
translations: make(Pages, 0),
|
2016-10-31 14:53:33 -04:00
|
|
|
sections: sectionsFromFilename(filename),
|
2017-02-04 22:20:06 -05:00
|
|
|
Site: &s.Info,
|
|
|
|
s: s,
|
2016-05-14 00:35:16 -04:00
|
|
|
}
|
2014-03-31 13:23:34 -04:00
|
|
|
|
2017-03-06 07:18:33 -05:00
|
|
|
s.Log.DEBUG.Println("Reading from", p.File.Path())
|
|
|
|
return p
|
2013-08-13 19:39:24 -04:00
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2013-09-18 13:17:43 -04:00
|
|
|
func (p *Page) IsRenderable() bool {
|
2014-01-29 17:50:31 -05:00
|
|
|
return p.renderable
|
2013-09-18 13:17:43 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) Type() string {
|
|
|
|
if p.contentType != "" {
|
|
|
|
return p.contentType
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2014-10-16 20:20:09 -04:00
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
if x := p.Section(); x != "" {
|
2014-01-29 17:50:31 -05:00
|
|
|
return x
|
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return "page"
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) Section() string {
|
2017-02-20 03:33:35 -05:00
|
|
|
if p.Kind == KindSection {
|
|
|
|
return p.sections[0]
|
|
|
|
}
|
2015-03-07 06:25:38 -05:00
|
|
|
return p.Source.Section()
|
2014-10-16 20:20:09 -04:00
|
|
|
}
|
|
|
|
|
2017-03-06 07:18:33 -05:00
|
|
|
func (p *Page) layouts(layouts ...string) []string {
|
2017-03-06 07:40:06 -05:00
|
|
|
// TODO(bep) output
|
2015-12-18 03:54:46 -05:00
|
|
|
if len(p.layoutsCalculated) > 0 {
|
|
|
|
return p.layoutsCalculated
|
|
|
|
}
|
|
|
|
|
2017-03-06 07:18:33 -05:00
|
|
|
layoutOverride := ""
|
|
|
|
if len(layouts) > 0 {
|
|
|
|
layoutOverride = layouts[0]
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2013-10-07 00:57:45 -04:00
|
|
|
|
2017-03-16 03:58:50 -04:00
|
|
|
return p.s.layoutHandler.For(
|
|
|
|
p.createLayoutDescriptor(),
|
|
|
|
layoutOverride,
|
|
|
|
output.HTMLType)
|
2013-10-07 00:57:45 -04:00
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
// TODO(bep) consolidate and test these KindHome switches (see other layouts methods)s
|
2016-11-02 16:34:19 -04:00
|
|
|
// rssLayouts returns RSS layouts to use for the RSS version of this page, nil
|
|
|
|
// if no RSS should be rendered.
|
2017-03-06 07:18:33 -05:00
|
|
|
// TODO(bep) output
|
2016-11-02 16:34:19 -04:00
|
|
|
func (p *Page) rssLayouts() []string {
|
2016-11-13 05:43:23 -05:00
|
|
|
switch p.Kind {
|
|
|
|
case KindHome:
|
2016-11-02 16:34:19 -04:00
|
|
|
return []string{"rss.xml", "_default/rss.xml", "_internal/_default/rss.xml"}
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindSection:
|
2016-11-02 16:34:19 -04:00
|
|
|
section := p.sections[0]
|
|
|
|
return []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindTaxonomy:
|
2017-01-03 11:28:51 -05:00
|
|
|
singular := p.s.taxonomiesPluralSingular[p.sections[0]]
|
2016-11-02 16:34:19 -04:00
|
|
|
return []string{"taxonomy/" + singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindTaxonomyTerm:
|
2017-03-05 15:24:14 -05:00
|
|
|
singular := p.s.taxonomiesPluralSingular[p.sections[0]]
|
|
|
|
return []string{"taxonomy/" + singular + ".terms.rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindPage:
|
2016-11-02 16:34:19 -04:00
|
|
|
// No RSS for regular pages
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-01-03 11:28:51 -05:00
|
|
|
func (s *Site) NewPageFrom(buf io.Reader, name string) (*Page, error) {
|
|
|
|
p, err := s.NewPage(name)
|
2014-05-01 14:11:56 -04:00
|
|
|
if err != nil {
|
|
|
|
return p, err
|
|
|
|
}
|
2015-04-03 15:41:12 -04:00
|
|
|
_, err = p.ReadFrom(buf)
|
2014-05-01 14:11:56 -04:00
|
|
|
|
|
|
|
return p, err
|
|
|
|
}
|
|
|
|
|
2017-01-03 11:28:51 -05:00
|
|
|
func (s *Site) NewPage(name string) (*Page, error) {
|
2014-01-29 17:50:31 -05:00
|
|
|
if len(name) == 0 {
|
|
|
|
return nil, errors.New("Zero length page name")
|
|
|
|
}
|
2013-08-05 10:53:58 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
// Create new page
|
2017-01-03 11:28:51 -05:00
|
|
|
p := s.newPage(name)
|
|
|
|
p.s = s
|
|
|
|
p.Site = &s.Info
|
2013-08-05 10:53:58 -04:00
|
|
|
|
2014-05-01 13:20:58 -04:00
|
|
|
return p, nil
|
|
|
|
}
|
|
|
|
|
2015-04-03 15:41:12 -04:00
|
|
|
func (p *Page) ReadFrom(buf io.Reader) (int64, error) {
|
2014-01-29 17:50:31 -05:00
|
|
|
// Parse for metadata & body
|
2015-04-03 15:41:12 -04:00
|
|
|
if err := p.parse(buf); err != nil {
|
2017-01-10 04:55:03 -05:00
|
|
|
p.s.Log.ERROR.Print(err)
|
2015-04-03 15:41:12 -04:00
|
|
|
return 0, err
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2013-08-05 10:53:58 -04:00
|
|
|
|
2015-04-03 15:41:12 -04:00
|
|
|
return int64(len(p.rawContent)), nil
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-08-17 07:41:48 -04:00
|
|
|
func (p *Page) WordCount() int {
|
|
|
|
p.analyzePage()
|
|
|
|
return p.wordCount
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) ReadingTime() int {
|
|
|
|
p.analyzePage()
|
|
|
|
return p.readingTime
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) FuzzyWordCount() int {
|
|
|
|
p.analyzePage()
|
|
|
|
return p.fuzzyWordCount
|
|
|
|
}
|
|
|
|
|
2013-07-04 11:32:55 -04:00
|
|
|
func (p *Page) analyzePage() {
|
2016-08-17 07:41:48 -04:00
|
|
|
p.pageMetaInit.Do(func() {
|
|
|
|
if p.isCJKLanguage {
|
|
|
|
p.wordCount = 0
|
|
|
|
for _, word := range p.PlainWords() {
|
|
|
|
runeCount := utf8.RuneCountInString(word)
|
|
|
|
if len(word) == runeCount {
|
|
|
|
p.wordCount++
|
|
|
|
} else {
|
|
|
|
p.wordCount += runeCount
|
|
|
|
}
|
2015-09-03 06:22:20 -04:00
|
|
|
}
|
2016-08-17 07:41:48 -04:00
|
|
|
} else {
|
|
|
|
p.wordCount = helpers.TotalWords(p.Plain())
|
2015-09-03 06:22:20 -04:00
|
|
|
}
|
2015-09-03 06:22:20 -04:00
|
|
|
|
2016-08-17 07:41:48 -04:00
|
|
|
// TODO(bep) is set in a test. Fix that.
|
|
|
|
if p.fuzzyWordCount == 0 {
|
|
|
|
p.fuzzyWordCount = (p.wordCount + 100) / 100 * 100
|
|
|
|
}
|
2015-09-03 06:22:20 -04:00
|
|
|
|
2016-08-17 07:41:48 -04:00
|
|
|
if p.isCJKLanguage {
|
|
|
|
p.readingTime = (p.wordCount + 500) / 501
|
|
|
|
} else {
|
|
|
|
p.readingTime = (p.wordCount + 212) / 213
|
|
|
|
}
|
|
|
|
})
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2014-10-16 20:20:09 -04:00
|
|
|
func (p *Page) Extension() string {
|
|
|
|
if p.extension != "" {
|
2017-03-08 07:45:33 -05:00
|
|
|
// TODO(bep) output remove/deprecate this
|
2014-10-16 20:20:09 -04:00
|
|
|
return p.extension
|
|
|
|
}
|
2017-03-08 07:45:33 -05:00
|
|
|
//
|
2017-03-16 03:32:14 -04:00
|
|
|
// TODO(bep) return MediaType.Suffix
|
2017-03-08 07:45:33 -05:00
|
|
|
|
|
|
|
// TODO(bep) remove this config option =>
|
2017-02-04 22:20:06 -05:00
|
|
|
return p.s.Cfg.GetString("defaultExtension")
|
2014-10-16 20:20:09 -04:00
|
|
|
}
|
|
|
|
|
2016-07-25 16:22:09 -04:00
|
|
|
// AllTranslations returns all translations, including the current Page.
|
|
|
|
func (p *Page) AllTranslations() Pages {
|
|
|
|
return p.translations
|
|
|
|
}
|
|
|
|
|
2016-08-09 08:26:55 -04:00
|
|
|
// IsTranslated returns whether this content file is translated to
|
|
|
|
// other language(s).
|
|
|
|
func (p *Page) IsTranslated() bool {
|
|
|
|
return len(p.translations) > 1
|
|
|
|
}
|
|
|
|
|
2016-07-25 16:22:09 -04:00
|
|
|
// Translations returns the translations excluding the current Page.
|
|
|
|
func (p *Page) Translations() Pages {
|
|
|
|
translations := make(Pages, 0)
|
|
|
|
for _, t := range p.translations {
|
2017-02-19 09:00:18 -05:00
|
|
|
if t.Lang() != p.Lang() {
|
2016-07-25 16:22:09 -04:00
|
|
|
translations = append(translations, t)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return translations
|
|
|
|
}
|
|
|
|
|
2013-10-25 18:37:53 -04:00
|
|
|
func (p *Page) LinkTitle() string {
|
2014-01-29 17:50:31 -05:00
|
|
|
if len(p.linkTitle) > 0 {
|
|
|
|
return p.linkTitle
|
|
|
|
}
|
2015-03-07 06:25:38 -05:00
|
|
|
return p.Title
|
2013-10-25 18:37:53 -04:00
|
|
|
}
|
|
|
|
|
2016-06-14 09:53:49 -04:00
|
|
|
func (p *Page) shouldBuild() bool {
|
2017-02-04 22:20:06 -05:00
|
|
|
return shouldBuild(p.s.Cfg.GetBool("buildFuture"), p.s.Cfg.GetBool("buildExpired"),
|
|
|
|
p.s.Cfg.GetBool("buildDrafts"), p.Draft, p.PublishDate, p.ExpiryDate)
|
2016-05-11 11:45:09 -04:00
|
|
|
}
|
|
|
|
|
2016-06-14 09:53:49 -04:00
|
|
|
func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bool,
|
2016-05-11 11:45:09 -04:00
|
|
|
publishDate time.Time, expiryDate time.Time) bool {
|
|
|
|
if !(buildDrafts || !Draft) {
|
|
|
|
return false
|
2014-05-29 00:48:40 -04:00
|
|
|
}
|
2016-05-11 11:45:09 -04:00
|
|
|
if !buildFuture && !publishDate.IsZero() && publishDate.After(time.Now()) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
if !buildExpired && !expiryDate.IsZero() && expiryDate.Before(time.Now()) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
return true
|
2014-05-29 00:48:40 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) IsDraft() bool {
|
|
|
|
return p.Draft
|
2014-08-20 11:09:35 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) IsFuture() bool {
|
2016-06-13 11:37:10 -04:00
|
|
|
if p.PublishDate.IsZero() {
|
|
|
|
return false
|
|
|
|
}
|
2016-05-11 10:04:53 -04:00
|
|
|
return p.PublishDate.After(time.Now())
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) IsExpired() bool {
|
2016-06-13 11:37:10 -04:00
|
|
|
if p.ExpiryDate.IsZero() {
|
|
|
|
return false
|
|
|
|
}
|
2016-05-11 10:04:53 -04:00
|
|
|
return p.ExpiryDate.Before(time.Now())
|
2014-08-20 11:09:35 -04:00
|
|
|
}
|
|
|
|
|
2016-09-14 12:51:34 -04:00
|
|
|
func (p *Page) URL() string {
|
2016-11-15 04:43:49 -05:00
|
|
|
|
|
|
|
if p.IsPage() && p.URLPath.URL != "" {
|
2016-09-14 12:51:34 -04:00
|
|
|
// This is the url set in front matter
|
|
|
|
return p.URLPath.URL
|
|
|
|
}
|
|
|
|
// Fall back to the relative permalink.
|
2016-11-15 04:43:49 -05:00
|
|
|
u := p.RelPermalink()
|
2016-09-14 12:51:34 -04:00
|
|
|
return u
|
|
|
|
}
|
|
|
|
|
2017-03-09 13:19:29 -05:00
|
|
|
// Permalink returns the absolute URL to this Page.
|
|
|
|
func (p *Page) Permalink() string {
|
|
|
|
return p.permalink
|
|
|
|
}
|
2016-11-15 04:43:49 -05:00
|
|
|
|
2017-03-09 13:19:29 -05:00
|
|
|
// RelPermalink gets a URL to the resource relative to the host.
|
|
|
|
func (p *Page) RelPermalink() string {
|
|
|
|
return p.relPermalink
|
|
|
|
}
|
2016-11-15 04:43:49 -05:00
|
|
|
|
2017-03-17 11:35:09 -04:00
|
|
|
func (p *Page) initURLs() error {
|
|
|
|
// TODO(bep) output
|
|
|
|
if len(p.outputFormats) == 0 {
|
|
|
|
p.outputFormats = p.s.defaultOutputDefinitions.ForKind(p.Kind)
|
|
|
|
}
|
|
|
|
rel := p.createRelativePermalink()
|
|
|
|
p.permalink = p.s.permalink(rel)
|
|
|
|
rel = p.s.PathSpec.PrependBasePath(rel)
|
|
|
|
p.relPermalink = rel
|
|
|
|
return nil
|
2013-10-02 20:00:21 -04:00
|
|
|
}
|
|
|
|
|
2015-08-02 02:02:20 -04:00
|
|
|
var ErrHasDraftAndPublished = errors.New("both draft and published parameters were found in page's frontmatter")
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) update(f interface{}) error {
|
2014-05-01 14:11:56 -04:00
|
|
|
if f == nil {
|
2016-11-18 16:54:57 -05:00
|
|
|
return errors.New("no metadata found")
|
2014-05-01 14:11:56 -04:00
|
|
|
}
|
2014-01-29 17:50:31 -05:00
|
|
|
m := f.(map[string]interface{})
|
2016-10-24 16:29:48 -04:00
|
|
|
// Needed for case insensitive fetching of params values
|
|
|
|
helpers.ToLowerMap(m)
|
|
|
|
|
2015-01-05 06:44:41 -05:00
|
|
|
var err error
|
2015-09-03 06:22:20 -04:00
|
|
|
var draft, published, isCJKLanguage *bool
|
2014-01-29 17:50:31 -05:00
|
|
|
for k, v := range m {
|
|
|
|
loki := strings.ToLower(k)
|
|
|
|
switch loki {
|
|
|
|
case "title":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Title = cast.ToString(v)
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params[loki] = p.Title
|
2014-01-29 17:50:31 -05:00
|
|
|
case "linktitle":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.linkTitle = cast.ToString(v)
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params[loki] = p.linkTitle
|
2014-01-29 17:50:31 -05:00
|
|
|
case "description":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Description = cast.ToString(v)
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params[loki] = p.Description
|
2014-01-29 17:50:31 -05:00
|
|
|
case "slug":
|
2015-04-09 12:14:26 -04:00
|
|
|
p.Slug = cast.ToString(v)
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params[loki] = p.Slug
|
2014-01-29 17:50:31 -05:00
|
|
|
case "url":
|
2014-04-05 01:26:43 -04:00
|
|
|
if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
|
2015-03-18 01:16:54 -04:00
|
|
|
return fmt.Errorf("Only relative URLs are supported, %v provided", url)
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2016-07-26 13:04:10 -04:00
|
|
|
p.URLPath.URL = cast.ToString(v)
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params[loki] = p.URLPath.URL
|
2014-01-29 17:50:31 -05:00
|
|
|
case "type":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.contentType = cast.ToString(v)
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params[loki] = p.contentType
|
2014-10-16 20:20:09 -04:00
|
|
|
case "extension", "ext":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.extension = cast.ToString(v)
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params[loki] = p.extension
|
2014-01-29 17:50:31 -05:00
|
|
|
case "keywords":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Keywords = cast.ToStringSlice(v)
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params[loki] = p.Keywords
|
2014-05-29 00:48:40 -04:00
|
|
|
case "date":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Date, err = cast.ToTimeE(v)
|
2015-01-05 06:44:41 -05:00
|
|
|
if err != nil {
|
2017-01-10 04:55:03 -05:00
|
|
|
p.s.Log.ERROR.Printf("Failed to parse date '%v' in page %s", v, p.File.Path())
|
2015-01-05 06:44:41 -05:00
|
|
|
}
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params[loki] = p.Date
|
2015-05-14 16:06:36 -04:00
|
|
|
case "lastmod":
|
|
|
|
p.Lastmod, err = cast.ToTimeE(v)
|
|
|
|
if err != nil {
|
2017-01-10 04:55:03 -05:00
|
|
|
p.s.Log.ERROR.Printf("Failed to parse lastmod '%v' in page %s", v, p.File.Path())
|
2015-05-14 16:06:36 -04:00
|
|
|
}
|
2017-03-08 07:45:33 -05:00
|
|
|
case "outputs":
|
|
|
|
outputs := cast.ToStringSlice(v)
|
|
|
|
if len(outputs) > 0 {
|
2017-03-16 03:32:14 -04:00
|
|
|
// Output formats are exlicitly set in front matter, use those.
|
|
|
|
outFormats, err := output.GetTypes(outputs...)
|
2017-03-08 07:45:33 -05:00
|
|
|
if err != nil {
|
2017-03-16 03:32:14 -04:00
|
|
|
p.s.Log.ERROR.Printf("Failed to resolve output formats: %s", err)
|
2017-03-08 07:45:33 -05:00
|
|
|
} else {
|
2017-03-16 03:32:14 -04:00
|
|
|
p.outputFormats = outFormats
|
|
|
|
p.Params[loki] = outFormats
|
2017-03-08 07:45:33 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
//p.Params[loki] = p.Keywords
|
2014-05-29 00:48:40 -04:00
|
|
|
case "publishdate", "pubdate":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.PublishDate, err = cast.ToTimeE(v)
|
2015-01-05 06:44:41 -05:00
|
|
|
if err != nil {
|
2017-01-10 04:55:03 -05:00
|
|
|
p.s.Log.ERROR.Printf("Failed to parse publishdate '%v' in page %s", v, p.File.Path())
|
2015-01-05 06:44:41 -05:00
|
|
|
}
|
2016-05-11 10:04:53 -04:00
|
|
|
case "expirydate", "unpublishdate":
|
|
|
|
p.ExpiryDate, err = cast.ToTimeE(v)
|
|
|
|
if err != nil {
|
2017-01-10 04:55:03 -05:00
|
|
|
p.s.Log.ERROR.Printf("Failed to parse expirydate '%v' in page %s", v, p.File.Path())
|
2016-05-11 10:04:53 -04:00
|
|
|
}
|
2014-01-29 17:50:31 -05:00
|
|
|
case "draft":
|
2015-08-02 02:02:20 -04:00
|
|
|
draft = new(bool)
|
|
|
|
*draft = cast.ToBool(v)
|
|
|
|
case "published": // Intentionally undocumented
|
|
|
|
published = new(bool)
|
2015-08-30 18:51:25 -04:00
|
|
|
*published = cast.ToBool(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "layout":
|
2015-11-02 11:24:50 -05:00
|
|
|
p.Layout = cast.ToString(v)
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params[loki] = p.Layout
|
2014-01-29 17:50:31 -05:00
|
|
|
case "markup":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Markup = cast.ToString(v)
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params[loki] = p.Markup
|
2014-01-29 17:50:31 -05:00
|
|
|
case "weight":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Weight = cast.ToInt(v)
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params[loki] = p.Weight
|
2014-01-29 17:50:31 -05:00
|
|
|
case "aliases":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Aliases = cast.ToStringSlice(v)
|
|
|
|
for _, alias := range p.Aliases {
|
2014-01-29 17:50:31 -05:00
|
|
|
if strings.HasPrefix(alias, "http://") || strings.HasPrefix(alias, "https://") {
|
|
|
|
return fmt.Errorf("Only relative aliases are supported, %v provided", alias)
|
|
|
|
}
|
|
|
|
}
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params[loki] = p.Aliases
|
2014-01-29 17:50:31 -05:00
|
|
|
case "status":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Status = cast.ToString(v)
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params[loki] = p.Status
|
2014-05-06 11:02:56 -04:00
|
|
|
case "sitemap":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Sitemap = parseSitemap(cast.ToStringMap(v))
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params[loki] = p.Sitemap
|
2015-09-03 06:22:20 -04:00
|
|
|
case "iscjklanguage":
|
|
|
|
isCJKLanguage = new(bool)
|
|
|
|
*isCJKLanguage = cast.ToBool(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
default:
|
|
|
|
// If not one of the explicit values, store in Params
|
|
|
|
switch vv := v.(type) {
|
|
|
|
case bool:
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Params[loki] = vv
|
2014-01-29 17:50:31 -05:00
|
|
|
case string:
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Params[loki] = vv
|
2014-01-29 17:50:31 -05:00
|
|
|
case int64, int32, int16, int8, int:
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Params[loki] = vv
|
2014-01-29 17:50:31 -05:00
|
|
|
case float64, float32:
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Params[loki] = vv
|
2014-01-29 17:50:31 -05:00
|
|
|
case time.Time:
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Params[loki] = vv
|
2014-01-29 17:50:31 -05:00
|
|
|
default: // handle array of strings as well
|
|
|
|
switch vvv := vv.(type) {
|
|
|
|
case []interface{}:
|
2015-06-25 05:46:09 -04:00
|
|
|
if len(vvv) > 0 {
|
|
|
|
switch vvv[0].(type) {
|
2015-07-26 09:28:56 -04:00
|
|
|
case map[interface{}]interface{}: // Proper parsing structured array from YAML based FrontMatter
|
|
|
|
p.Params[loki] = vvv
|
|
|
|
case map[string]interface{}: // Proper parsing structured array from JSON based FrontMatter
|
2015-06-25 05:46:09 -04:00
|
|
|
p.Params[loki] = vvv
|
2016-12-27 21:08:24 -05:00
|
|
|
case []interface{}:
|
|
|
|
p.Params[loki] = vvv
|
2015-06-25 05:46:09 -04:00
|
|
|
default:
|
|
|
|
a := make([]string, len(vvv))
|
|
|
|
for i, u := range vvv {
|
|
|
|
a[i] = cast.ToString(u)
|
|
|
|
}
|
|
|
|
|
|
|
|
p.Params[loki] = a
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
p.Params[loki] = []string{}
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2014-04-23 02:55:43 -04:00
|
|
|
default:
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Params[loki] = vv
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-05-14 16:06:36 -04:00
|
|
|
|
2015-08-02 02:02:20 -04:00
|
|
|
if draft != nil && published != nil {
|
|
|
|
p.Draft = *draft
|
2017-01-10 04:55:03 -05:00
|
|
|
p.s.Log.ERROR.Printf("page %s has both draft and published settings in its frontmatter. Using draft.", p.File.Path())
|
2015-08-02 02:02:20 -04:00
|
|
|
return ErrHasDraftAndPublished
|
|
|
|
} else if draft != nil {
|
|
|
|
p.Draft = *draft
|
|
|
|
} else if published != nil {
|
|
|
|
p.Draft = !*published
|
|
|
|
}
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params["draft"] = p.Draft
|
2015-08-02 02:02:20 -04:00
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
if p.Date.IsZero() && p.s.Cfg.GetBool("useModTimeAsFallback") {
|
|
|
|
fi, err := p.s.Fs.Source.Stat(filepath.Join(p.s.PathSpec.AbsPathify(p.s.Cfg.GetString("contentDir")), p.File.Path()))
|
2016-06-27 11:07:34 -04:00
|
|
|
if err == nil {
|
|
|
|
p.Date = fi.ModTime()
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params["date"] = p.Date
|
2016-06-27 11:07:34 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-05-14 16:06:36 -04:00
|
|
|
if p.Lastmod.IsZero() {
|
|
|
|
p.Lastmod = p.Date
|
|
|
|
}
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params["lastmod"] = p.Lastmod
|
2015-05-14 16:06:36 -04:00
|
|
|
|
2015-09-03 06:22:20 -04:00
|
|
|
if isCJKLanguage != nil {
|
|
|
|
p.isCJKLanguage = *isCJKLanguage
|
2017-02-04 22:20:06 -05:00
|
|
|
} else if p.s.Cfg.GetBool("hasCJKLanguage") {
|
2015-09-03 06:22:20 -04:00
|
|
|
if cjk.Match(p.rawContent) {
|
|
|
|
p.isCJKLanguage = true
|
|
|
|
} else {
|
|
|
|
p.isCJKLanguage = false
|
|
|
|
}
|
|
|
|
}
|
2017-02-12 15:30:39 -05:00
|
|
|
p.Params["iscjklanguage"] = p.isCJKLanguage
|
2015-09-03 06:22:20 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return nil
|
2013-07-07 00:49:57 -04:00
|
|
|
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) GetParam(key string) interface{} {
|
2015-05-31 14:30:53 -04:00
|
|
|
return p.getParam(key, true)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) getParam(key string, stringToLower bool) interface{} {
|
2015-03-07 06:25:38 -05:00
|
|
|
v := p.Params[strings.ToLower(key)]
|
2014-01-29 17:50:31 -05:00
|
|
|
|
|
|
|
if v == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-05-14 00:35:16 -04:00
|
|
|
switch val := v.(type) {
|
2014-01-29 17:50:31 -05:00
|
|
|
case bool:
|
2016-05-14 00:35:16 -04:00
|
|
|
return val
|
|
|
|
case string:
|
|
|
|
if stringToLower {
|
|
|
|
return strings.ToLower(val)
|
|
|
|
}
|
|
|
|
return val
|
2014-01-29 17:50:31 -05:00
|
|
|
case int64, int32, int16, int8, int:
|
2014-04-05 01:26:43 -04:00
|
|
|
return cast.ToInt(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case float64, float32:
|
2014-04-05 01:26:43 -04:00
|
|
|
return cast.ToFloat64(v)
|
2016-05-14 00:35:16 -04:00
|
|
|
case time.Time:
|
|
|
|
return val
|
2014-01-29 17:50:31 -05:00
|
|
|
case []string:
|
2015-05-31 14:30:53 -04:00
|
|
|
if stringToLower {
|
2016-05-14 00:35:16 -04:00
|
|
|
return helpers.SliceToLower(val)
|
2015-05-31 14:30:53 -04:00
|
|
|
}
|
2014-11-28 15:16:57 -05:00
|
|
|
return v
|
2016-05-14 00:35:16 -04:00
|
|
|
case map[string]interface{}: // JSON and TOML
|
|
|
|
return v
|
|
|
|
case map[interface{}]interface{}: // YAML
|
|
|
|
return v
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2015-01-24 06:44:35 -05:00
|
|
|
|
2017-01-10 04:55:03 -05:00
|
|
|
p.s.Log.ERROR.Printf("GetParam(\"%s\"): Unknown type %s\n", key, reflect.TypeOf(v))
|
2014-01-29 17:50:31 -05:00
|
|
|
return nil
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
func (p *Page) HasMenuCurrent(menuID string, me *MenuEntry) bool {
|
|
|
|
|
2016-11-23 16:06:15 -05:00
|
|
|
sectionPagesMenu := p.Site.sectionPagesMenu
|
2015-01-06 12:11:06 -05:00
|
|
|
|
|
|
|
// page is labeled as "shadow-member" of the menu with the same identifier as the section
|
2017-02-20 03:33:35 -05:00
|
|
|
if sectionPagesMenu != "" {
|
|
|
|
section := p.Section()
|
|
|
|
|
|
|
|
if !p.s.Info.preserveTaxonomyNames {
|
|
|
|
section = p.s.PathSpec.MakePathSanitized(section)
|
|
|
|
}
|
|
|
|
|
|
|
|
if section != "" && sectionPagesMenu == menuID && section == me.Identifier {
|
|
|
|
return true
|
|
|
|
}
|
2015-01-06 12:11:06 -05:00
|
|
|
}
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
if !me.HasChildren() {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
menus := p.Menus()
|
|
|
|
|
|
|
|
if m, ok := menus[menuID]; ok {
|
|
|
|
|
|
|
|
for _, child := range me.Children {
|
|
|
|
if child.IsEqual(m) {
|
|
|
|
return true
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
2016-11-13 08:27:10 -05:00
|
|
|
if p.HasMenuCurrent(menuID, child) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
if p.IsPage() {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
// The following logic is kept from back when Hugo had both Page and Node types.
|
|
|
|
// TODO(bep) consolidate / clean
|
|
|
|
nme := MenuEntry{Name: p.Title, URL: p.URL()}
|
|
|
|
|
|
|
|
for _, child := range me.Children {
|
|
|
|
if nme.IsSameResource(child) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
if p.HasMenuCurrent(menuID, child) {
|
|
|
|
return true
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return false
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
func (p *Page) IsMenuCurrent(menuID string, inme *MenuEntry) bool {
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
menus := p.Menus()
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
if me, ok := menus[menuID]; ok {
|
|
|
|
if me.IsEqual(inme) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if p.IsPage() {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
// The following logic is kept from back when Hugo had both Page and Node types.
|
|
|
|
// TODO(bep) consolidate / clean
|
2016-11-15 04:43:49 -05:00
|
|
|
me := MenuEntry{Name: p.Title, URL: p.URL()}
|
2016-11-13 08:27:10 -05:00
|
|
|
|
|
|
|
if !me.IsSameResource(inme) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
// this resource may be included in several menus
|
|
|
|
// search for it to make sure that it is in the menu with the given menuId
|
|
|
|
if menu, ok := (*p.Site.Menus)[menuID]; ok {
|
|
|
|
for _, menuEntry := range *menu {
|
|
|
|
if menuEntry.IsSameResource(inme) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
descendantFound := p.isSameAsDescendantMenu(inme, menuEntry)
|
|
|
|
if descendantFound {
|
|
|
|
return descendantFound
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
func (p *Page) isSameAsDescendantMenu(inme *MenuEntry, parent *MenuEntry) bool {
|
|
|
|
if parent.HasChildren() {
|
|
|
|
for _, child := range parent.Children {
|
|
|
|
if child.IsSameResource(inme) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
descendantFound := p.isSameAsDescendantMenu(inme, child)
|
|
|
|
if descendantFound {
|
|
|
|
return descendantFound
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) Menus() PageMenus {
|
|
|
|
p.pageMenusInit.Do(func() {
|
|
|
|
p.pageMenus = PageMenus{}
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
if ms, ok := p.Params["menu"]; ok {
|
2016-11-15 04:43:49 -05:00
|
|
|
link := p.RelPermalink()
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2015-03-18 01:16:54 -04:00
|
|
|
me := MenuEntry{Name: p.LinkTitle(), Weight: p.Weight, URL: link}
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2015-02-04 15:27:27 -05:00
|
|
|
// Could be the name of the menu to attach it to
|
|
|
|
mname, err := cast.ToStringE(ms)
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2015-02-04 15:27:27 -05:00
|
|
|
if err == nil {
|
2014-04-23 02:59:19 -04:00
|
|
|
me.Menu = mname
|
2015-03-07 06:25:38 -05:00
|
|
|
p.pageMenus[mname] = &me
|
2015-02-04 15:27:27 -05:00
|
|
|
return
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
|
2015-02-04 15:27:27 -05:00
|
|
|
// Could be a slice of strings
|
|
|
|
mnames, err := cast.ToStringSliceE(ms)
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2015-02-04 15:27:27 -05:00
|
|
|
if err == nil {
|
|
|
|
for _, mname := range mnames {
|
|
|
|
me.Menu = mname
|
2015-03-07 06:25:38 -05:00
|
|
|
p.pageMenus[mname] = &me
|
2015-02-04 15:27:27 -05:00
|
|
|
}
|
2016-03-10 04:31:12 -05:00
|
|
|
return
|
2015-02-04 15:27:27 -05:00
|
|
|
}
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2015-02-04 15:27:27 -05:00
|
|
|
// Could be a structured menu entry
|
|
|
|
menus, err := cast.ToStringMapE(ms)
|
2014-04-23 02:59:19 -04:00
|
|
|
|
|
|
|
if err != nil {
|
2017-01-10 04:55:03 -05:00
|
|
|
p.s.Log.ERROR.Printf("unable to process menus for %q\n", p.Title)
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
|
2015-02-04 15:27:27 -05:00
|
|
|
for name, menu := range menus {
|
2015-03-18 01:16:54 -04:00
|
|
|
menuEntry := MenuEntry{Name: p.LinkTitle(), URL: link, Weight: p.Weight, Menu: name}
|
2016-03-10 04:31:12 -05:00
|
|
|
if menu != nil {
|
2017-01-10 04:55:03 -05:00
|
|
|
p.s.Log.DEBUG.Printf("found menu: %q, in %q\n", name, p.Title)
|
2016-03-10 04:31:12 -05:00
|
|
|
ime, err := cast.ToStringMapE(menu)
|
|
|
|
if err != nil {
|
2017-01-10 04:55:03 -05:00
|
|
|
p.s.Log.ERROR.Printf("unable to process menus for %q: %s", p.Title, err)
|
2016-03-10 04:31:12 -05:00
|
|
|
}
|
2015-02-04 15:27:27 -05:00
|
|
|
|
2016-03-22 19:29:39 -04:00
|
|
|
menuEntry.marshallMap(ime)
|
2015-02-04 15:27:27 -05:00
|
|
|
}
|
2015-03-07 06:25:38 -05:00
|
|
|
p.pageMenus[name] = &menuEntry
|
2016-07-28 03:30:58 -04:00
|
|
|
|
2015-02-04 15:27:27 -05:00
|
|
|
}
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
2015-02-04 15:27:27 -05:00
|
|
|
})
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
return p.pageMenus
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
|
2017-03-06 07:18:33 -05:00
|
|
|
func (p *Page) Render(layouts ...string) template.HTML {
|
|
|
|
l := p.layouts(layouts...)
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2017-01-10 04:55:03 -05:00
|
|
|
return p.s.Tmpl.ExecuteTemplateToHTML(p, l...)
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-03-20 16:40:03 -04:00
|
|
|
func (p *Page) determineMarkupType() string {
|
|
|
|
// Try markup explicitly set in the frontmatter
|
|
|
|
p.Markup = helpers.GuessType(p.Markup)
|
|
|
|
if p.Markup == "unknown" {
|
|
|
|
// Fall back to file extension (might also return "unknown")
|
|
|
|
p.Markup = helpers.GuessType(p.Source.Ext())
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2013-09-18 17:21:27 -04:00
|
|
|
|
2016-03-20 16:40:03 -04:00
|
|
|
return p.Markup
|
2013-12-06 23:56:51 -05:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) parse(reader io.Reader) error {
|
2014-05-01 13:19:51 -04:00
|
|
|
psr, err := parser.ReadFrom(reader)
|
2014-01-29 17:50:31 -05:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
p.renderable = psr.IsRenderable()
|
|
|
|
p.frontmatter = psr.FrontMatter()
|
2015-09-03 06:22:20 -04:00
|
|
|
p.rawContent = psr.Content()
|
2016-05-14 00:35:16 -04:00
|
|
|
p.lang = p.Source.File.Lang()
|
2015-09-03 06:22:20 -04:00
|
|
|
|
2014-05-01 13:19:51 -04:00
|
|
|
meta, err := psr.Metadata()
|
2014-05-01 14:11:56 -04:00
|
|
|
if meta != nil {
|
|
|
|
if err != nil {
|
2017-03-10 16:45:07 -05:00
|
|
|
return fmt.Errorf("failed to parse page metadata for %s: %s", p.File.Path(), err)
|
2014-05-01 14:11:56 -04:00
|
|
|
}
|
2015-03-07 06:25:38 -05:00
|
|
|
if err = p.update(meta); err != nil {
|
2014-05-01 14:11:56 -04:00
|
|
|
return err
|
|
|
|
}
|
2014-05-01 13:19:51 -04:00
|
|
|
}
|
2013-09-18 13:17:43 -04:00
|
|
|
|
2014-05-01 13:19:51 -04:00
|
|
|
return nil
|
|
|
|
}
|
2013-09-18 13:17:43 -04:00
|
|
|
|
2015-07-02 09:32:57 -04:00
|
|
|
func (p *Page) RawContent() string {
|
|
|
|
return string(p.rawContent)
|
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) SetSourceContent(content []byte) {
|
|
|
|
p.Source.Content = content
|
2014-05-01 13:19:51 -04:00
|
|
|
}
|
2013-12-06 23:32:00 -05:00
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) SetSourceMetaData(in interface{}, mark rune) (err error) {
|
2016-09-19 04:52:07 -04:00
|
|
|
// See https://github.com/spf13/hugo/issues/2458
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
|
|
|
var ok bool
|
|
|
|
err, ok = r.(error)
|
|
|
|
if !ok {
|
|
|
|
err = fmt.Errorf("error from marshal: %v", r)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2016-12-26 16:23:20 -05:00
|
|
|
buf := bp.GetBuffer()
|
|
|
|
defer bp.PutBuffer(buf)
|
2016-09-19 04:52:07 -04:00
|
|
|
|
2016-12-26 16:23:20 -05:00
|
|
|
err = parser.InterfaceToFrontMatter(in, mark, buf)
|
2014-05-01 13:19:51 -04:00
|
|
|
if err != nil {
|
2016-09-19 04:52:07 -04:00
|
|
|
return
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2014-05-01 13:19:51 -04:00
|
|
|
|
2016-12-26 16:23:20 -05:00
|
|
|
_, err = buf.WriteRune('\n')
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
p.Source.Frontmatter = buf.Bytes()
|
2013-12-06 23:32:00 -05:00
|
|
|
|
2016-09-19 04:52:07 -04:00
|
|
|
return
|
2013-12-06 23:32:00 -05:00
|
|
|
}
|
2013-08-25 00:27:41 -04:00
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) SafeSaveSourceAs(path string) error {
|
|
|
|
return p.saveSourceAs(path, true)
|
2014-05-02 01:04:48 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) SaveSourceAs(path string) error {
|
|
|
|
return p.saveSourceAs(path, false)
|
2014-05-02 01:04:48 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) saveSourceAs(path string, safe bool) error {
|
2015-01-30 14:42:02 -05:00
|
|
|
b := bp.GetBuffer()
|
|
|
|
defer bp.PutBuffer(b)
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
b.Write(p.Source.Frontmatter)
|
|
|
|
b.Write(p.Source.Content)
|
2014-05-01 13:21:37 -04:00
|
|
|
|
2015-01-30 14:42:02 -05:00
|
|
|
bc := make([]byte, b.Len(), b.Len())
|
|
|
|
copy(bc, b.Bytes())
|
|
|
|
|
2016-11-23 12:28:14 -05:00
|
|
|
return p.saveSource(bc, path, safe)
|
2014-05-01 13:21:37 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) saveSource(by []byte, inpath string, safe bool) (err error) {
|
2014-11-06 11:52:01 -05:00
|
|
|
if !filepath.IsAbs(inpath) {
|
2017-02-04 22:20:06 -05:00
|
|
|
inpath = p.s.PathSpec.AbsPathify(inpath)
|
2014-05-01 13:21:37 -04:00
|
|
|
}
|
2017-01-10 04:55:03 -05:00
|
|
|
p.s.Log.INFO.Println("creating", inpath)
|
2014-05-02 01:04:48 -04:00
|
|
|
if safe {
|
2017-01-10 04:55:03 -05:00
|
|
|
err = helpers.SafeWriteToDisk(inpath, bytes.NewReader(by), p.s.Fs.Source)
|
2014-05-02 01:04:48 -04:00
|
|
|
} else {
|
2017-01-10 04:55:03 -05:00
|
|
|
err = helpers.WriteToDisk(inpath, bytes.NewReader(by), p.s.Fs.Source)
|
2014-05-02 01:04:48 -04:00
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
2014-05-01 13:21:37 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) SaveSource() error {
|
|
|
|
return p.SaveSourceAs(p.FullFilePath())
|
2014-05-01 13:21:37 -04:00
|
|
|
}
|
|
|
|
|
2017-01-09 19:36:59 -05:00
|
|
|
func (p *Page) ProcessShortcodes() {
|
2017-03-10 14:54:50 -05:00
|
|
|
p.shortcodeState = newShortcodeHandler()
|
|
|
|
tmpContent, _ := p.shortcodeState.extractAndRenderShortcodes(string(p.workContent), p)
|
2016-12-01 04:21:49 -05:00
|
|
|
p.workContent = []byte(tmpContent)
|
2017-03-10 14:54:50 -05:00
|
|
|
|
2014-01-27 17:16:28 -05:00
|
|
|
}
|
|
|
|
|
2014-05-01 13:21:37 -04:00
|
|
|
func (p *Page) FullFilePath() string {
|
2015-05-31 12:54:50 -04:00
|
|
|
return filepath.Join(p.Dir(), p.LogicalName())
|
2014-05-01 13:21:37 -04:00
|
|
|
}
|
|
|
|
|
2016-10-31 05:23:01 -04:00
|
|
|
// Pre render prepare steps
|
|
|
|
|
|
|
|
func (p *Page) prepareLayouts() error {
|
|
|
|
// TODO(bep): Check the IsRenderable logic.
|
2016-11-13 05:43:23 -05:00
|
|
|
if p.Kind == KindPage {
|
2016-10-31 05:23:01 -04:00
|
|
|
var layouts []string
|
|
|
|
if !p.IsRenderable() {
|
2017-03-08 07:45:33 -05:00
|
|
|
// TODO(bep) output
|
|
|
|
self := "__" + p.UniqueID()
|
2017-02-09 13:28:02 -05:00
|
|
|
_, err := p.s.Tmpl.GetClone().New(self).Parse(string(p.Content))
|
2016-10-31 05:23:01 -04:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
layouts = append(layouts, self)
|
|
|
|
} else {
|
|
|
|
layouts = append(layouts, p.layouts()...)
|
|
|
|
}
|
|
|
|
p.layoutsCalculated = layouts
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-10-31 13:03:02 -04:00
|
|
|
func (p *Page) prepareData(s *Site) error {
|
2016-11-03 19:34:25 -04:00
|
|
|
|
2016-11-11 03:19:16 -05:00
|
|
|
var pages Pages
|
|
|
|
|
2016-10-31 13:03:02 -04:00
|
|
|
p.Data = make(map[string]interface{})
|
2016-11-13 05:43:23 -05:00
|
|
|
switch p.Kind {
|
|
|
|
case KindPage:
|
|
|
|
case KindHome:
|
2016-11-17 04:29:11 -05:00
|
|
|
pages = s.RegularPages
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindSection:
|
2017-02-20 03:33:35 -05:00
|
|
|
sectionData, ok := s.Sections[p.Section()]
|
2016-10-31 13:03:02 -04:00
|
|
|
if !ok {
|
|
|
|
return fmt.Errorf("Data for section %s not found", p.Section())
|
|
|
|
}
|
2016-11-11 03:19:16 -05:00
|
|
|
pages = sectionData.Pages()
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindTaxonomy:
|
2016-10-31 14:53:33 -04:00
|
|
|
plural := p.sections[0]
|
|
|
|
term := p.sections[1]
|
2016-10-31 05:23:01 -04:00
|
|
|
|
2016-12-26 13:30:57 -05:00
|
|
|
if s.Info.preserveTaxonomyNames {
|
|
|
|
if v, ok := s.taxonomiesOrigKey[fmt.Sprintf("%s-%s", plural, term)]; ok {
|
|
|
|
term = v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-31 14:53:33 -04:00
|
|
|
singular := s.taxonomiesPluralSingular[plural]
|
|
|
|
taxonomy := s.Taxonomies[plural].Get(term)
|
2016-10-31 05:23:01 -04:00
|
|
|
|
2016-10-31 14:53:33 -04:00
|
|
|
p.Data[singular] = taxonomy
|
|
|
|
p.Data["Singular"] = singular
|
|
|
|
p.Data["Plural"] = plural
|
2017-03-05 15:24:14 -05:00
|
|
|
p.Data["Term"] = term
|
2016-11-11 03:19:16 -05:00
|
|
|
pages = taxonomy.Pages()
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindTaxonomyTerm:
|
2016-11-01 11:47:15 -04:00
|
|
|
plural := p.sections[0]
|
|
|
|
singular := s.taxonomiesPluralSingular[plural]
|
2016-10-31 05:23:01 -04:00
|
|
|
|
2016-11-01 11:47:15 -04:00
|
|
|
p.Data["Singular"] = singular
|
|
|
|
p.Data["Plural"] = plural
|
2017-02-25 12:14:48 -05:00
|
|
|
p.Data["Terms"] = s.Taxonomies[plural]
|
2016-11-01 11:47:15 -04:00
|
|
|
// keep the following just for legacy reasons
|
|
|
|
p.Data["OrderedIndex"] = p.Data["Terms"]
|
|
|
|
p.Data["Index"] = p.Data["Terms"]
|
2017-03-05 15:24:14 -05:00
|
|
|
|
|
|
|
// A list of all KindTaxonomy pages with matching plural
|
|
|
|
for _, p := range s.findPagesByKind(KindTaxonomy) {
|
|
|
|
if p.sections[0] == plural {
|
|
|
|
pages = append(pages, p)
|
|
|
|
}
|
|
|
|
}
|
2016-10-31 05:23:01 -04:00
|
|
|
}
|
2016-10-31 14:53:33 -04:00
|
|
|
|
2016-11-11 03:19:16 -05:00
|
|
|
p.Data["Pages"] = pages
|
|
|
|
p.Pages = pages
|
|
|
|
|
2016-11-11 05:35:55 -05:00
|
|
|
// Now we know enough to set missing dates on home page etc.
|
|
|
|
p.updatePageDates()
|
|
|
|
|
2016-10-31 05:23:01 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-11-11 05:35:55 -05:00
|
|
|
func (p *Page) updatePageDates() {
|
2016-11-16 15:06:10 -05:00
|
|
|
// TODO(bep) there is a potential issue with page sorting for home pages
|
2016-11-11 05:35:55 -05:00
|
|
|
// etc. without front matter dates set, but let us wrap the head around
|
|
|
|
// that in another time.
|
2016-11-13 06:33:11 -05:00
|
|
|
if !p.IsNode() {
|
2016-11-11 05:35:55 -05:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if !p.Date.IsZero() {
|
|
|
|
if p.Lastmod.IsZero() {
|
|
|
|
p.Lastmod = p.Date
|
|
|
|
}
|
|
|
|
return
|
|
|
|
} else if !p.Lastmod.IsZero() {
|
|
|
|
if p.Date.IsZero() {
|
|
|
|
p.Date = p.Lastmod
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Set it to the first non Zero date in children
|
|
|
|
var foundDate, foundLastMod bool
|
|
|
|
|
|
|
|
for _, child := range p.Pages {
|
|
|
|
if !child.Date.IsZero() {
|
|
|
|
p.Date = child.Date
|
|
|
|
foundDate = true
|
|
|
|
}
|
|
|
|
if !child.Lastmod.IsZero() {
|
|
|
|
p.Lastmod = child.Lastmod
|
|
|
|
foundLastMod = true
|
|
|
|
}
|
|
|
|
|
|
|
|
if foundDate && foundLastMod {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-16 14:32:38 -05:00
|
|
|
// copy creates a copy of this page with the lazy sync.Once vars reset
|
|
|
|
// so they will be evaluated again, for word count calculations etc.
|
2016-10-31 05:23:01 -04:00
|
|
|
func (p *Page) copy() *Page {
|
2016-11-16 14:32:38 -05:00
|
|
|
c := *p
|
2017-03-17 11:35:09 -04:00
|
|
|
c.pageInit = &pageInit{
|
|
|
|
//pageMenusInit: p.pageMenusInit,
|
|
|
|
}
|
2016-11-16 14:32:38 -05:00
|
|
|
return &c
|
2016-10-31 05:23:01 -04:00
|
|
|
}
|
2016-11-13 08:27:10 -05:00
|
|
|
|
|
|
|
func (p *Page) Now() time.Time {
|
2017-01-01 07:10:36 -05:00
|
|
|
// Delete in Hugo 0.21
|
2017-02-11 04:51:22 -05:00
|
|
|
helpers.Deprecated("Page", "Now", "Use now (the template func)", false)
|
2016-11-13 08:27:10 -05:00
|
|
|
return time.Now()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) Hugo() *HugoInfo {
|
|
|
|
return hugoInfo
|
|
|
|
}
|
|
|
|
|
2017-03-01 06:30:41 -05:00
|
|
|
func (p *Page) RSSlink() template.URL {
|
2016-11-13 08:27:10 -05:00
|
|
|
// TODO(bep) we cannot have two of these
|
2017-01-01 06:33:30 -05:00
|
|
|
// Remove in Hugo 0.20
|
2017-02-11 04:51:22 -05:00
|
|
|
helpers.Deprecated(".Page", "Use RSSlink", "RSSLink", true)
|
2016-11-13 08:27:10 -05:00
|
|
|
return p.RSSLink
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) Ref(ref string) (string, error) {
|
|
|
|
return p.Site.Ref(ref, nil)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) RelRef(ref string) (string, error) {
|
|
|
|
return p.Site.RelRef(ref, nil)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) String() string {
|
|
|
|
return fmt.Sprintf("Page(%q)", p.Title)
|
|
|
|
}
|
|
|
|
|
|
|
|
type URLPath struct {
|
|
|
|
URL string
|
|
|
|
Permalink string
|
|
|
|
Slug string
|
|
|
|
Section string
|
|
|
|
}
|
|
|
|
|
|
|
|
// Scratch returns the writable context associated with this Page.
|
|
|
|
func (p *Page) Scratch() *Scratch {
|
|
|
|
if p.scratch == nil {
|
|
|
|
p.scratch = newScratch()
|
|
|
|
}
|
|
|
|
return p.scratch
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) Language() *helpers.Language {
|
|
|
|
p.initLanguage()
|
|
|
|
return p.language
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) Lang() string {
|
|
|
|
// When set, Language can be different from lang in the case where there is a
|
|
|
|
// content file (doc.sv.md) with language indicator, but there is no language
|
|
|
|
// config for that language. Then the language will fall back on the site default.
|
|
|
|
if p.Language() != nil {
|
|
|
|
return p.Language().Lang
|
|
|
|
}
|
|
|
|
return p.lang
|
|
|
|
}
|
|
|
|
|
2016-11-21 04:11:34 -05:00
|
|
|
func (p *Page) isNewTranslation(candidate *Page) bool {
|
2016-12-23 03:52:05 -05:00
|
|
|
|
|
|
|
if p.Kind != candidate.Kind {
|
2016-11-13 08:27:10 -05:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
if p.Kind == KindPage || p.Kind == kindUnknown {
|
|
|
|
panic("Node type not currently supported for this op")
|
|
|
|
}
|
|
|
|
|
|
|
|
// At this point, we know that this is a traditional Node (home page, section, taxonomy)
|
|
|
|
// It represents the same node, but different language, if the sections is the same.
|
|
|
|
if len(p.sections) != len(candidate.sections) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
for i := 0; i < len(p.sections); i++ {
|
|
|
|
if p.sections[i] != candidate.sections[i] {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-21 04:11:34 -05:00
|
|
|
// Finally check that it is not already added.
|
2016-12-23 03:52:05 -05:00
|
|
|
for _, translation := range p.translations {
|
|
|
|
if candidate == translation {
|
2016-11-21 04:11:34 -05:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
return true
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) shouldAddLanguagePrefix() bool {
|
|
|
|
if !p.Site.IsMultiLingual() {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
if p.Lang() == "" {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
if !p.Site.defaultContentLanguageInSubdir && p.Lang() == p.Site.multilingual.DefaultLang.Lang {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) initLanguage() {
|
|
|
|
p.languageInit.Do(func() {
|
|
|
|
if p.language != nil {
|
|
|
|
return
|
|
|
|
}
|
2017-02-04 22:20:06 -05:00
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
ml := p.Site.multilingual
|
|
|
|
if ml == nil {
|
|
|
|
panic("Multilanguage not set")
|
|
|
|
}
|
2017-02-04 22:20:06 -05:00
|
|
|
if p.lang == "" {
|
|
|
|
p.lang = ml.DefaultLang.Lang
|
2016-11-13 08:27:10 -05:00
|
|
|
p.language = ml.DefaultLang
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2017-02-04 22:20:06 -05:00
|
|
|
language := ml.Language(p.lang)
|
2016-11-13 08:27:10 -05:00
|
|
|
|
|
|
|
if language == nil {
|
|
|
|
// It can be a file named stefano.chiodino.md.
|
2017-02-04 22:20:06 -05:00
|
|
|
p.s.Log.WARN.Printf("Page language (if it is that) not found in multilang setup: %s.", p.lang)
|
2016-11-13 08:27:10 -05:00
|
|
|
language = ml.DefaultLang
|
|
|
|
}
|
|
|
|
|
|
|
|
p.language = language
|
2017-02-04 22:20:06 -05:00
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) LanguagePrefix() string {
|
|
|
|
return p.Site.LanguagePrefix
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) addLangPathPrefix(outfile string) string {
|
|
|
|
return p.addLangPathPrefixIfFlagSet(outfile, p.shouldAddLanguagePrefix())
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) addLangPathPrefixIfFlagSet(outfile string, should bool) string {
|
|
|
|
if helpers.IsAbsURL(outfile) {
|
|
|
|
return outfile
|
|
|
|
}
|
|
|
|
|
|
|
|
if !should {
|
|
|
|
return outfile
|
|
|
|
}
|
|
|
|
|
|
|
|
hadSlashSuffix := strings.HasSuffix(outfile, "/")
|
|
|
|
|
|
|
|
outfile = "/" + path.Join(p.Lang(), outfile)
|
|
|
|
if hadSlashSuffix {
|
|
|
|
outfile += "/"
|
|
|
|
}
|
|
|
|
return outfile
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) addLangFilepathPrefix(outfile string) string {
|
|
|
|
if outfile == "" {
|
|
|
|
outfile = helpers.FilePathSeparator
|
|
|
|
}
|
2017-02-04 22:20:06 -05:00
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
if !p.shouldAddLanguagePrefix() {
|
|
|
|
return outfile
|
|
|
|
}
|
|
|
|
return helpers.FilePathSeparator + filepath.Join(p.Lang(), outfile)
|
|
|
|
}
|
|
|
|
|
|
|
|
func sectionsFromFilename(filename string) []string {
|
2016-11-21 04:11:34 -05:00
|
|
|
var sections []string
|
2016-11-13 08:27:10 -05:00
|
|
|
dir, _ := filepath.Split(filename)
|
|
|
|
dir = strings.TrimSuffix(dir, helpers.FilePathSeparator)
|
2016-11-21 04:11:34 -05:00
|
|
|
if dir == "" {
|
|
|
|
return sections
|
|
|
|
}
|
|
|
|
sections = strings.Split(dir, helpers.FilePathSeparator)
|
2016-11-13 08:27:10 -05:00
|
|
|
return sections
|
|
|
|
}
|
|
|
|
|
2017-03-25 04:56:00 -04:00
|
|
|
const (
|
|
|
|
regularPageFileNameDoesNotStartWith = "_index"
|
|
|
|
|
|
|
|
// There can be "my_regular_index_page.md but not /_index_file.md
|
|
|
|
regularPageFileNameDoesNotContain = helpers.FilePathSeparator + regularPageFileNameDoesNotStartWith
|
|
|
|
)
|
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
func kindFromFilename(filename string) string {
|
2017-03-25 04:56:00 -04:00
|
|
|
if !strings.HasPrefix(filename, regularPageFileNameDoesNotStartWith) && !strings.Contains(filename, regularPageFileNameDoesNotContain) {
|
2016-11-13 08:27:10 -05:00
|
|
|
return KindPage
|
|
|
|
}
|
|
|
|
|
|
|
|
if strings.HasPrefix(filename, "_index") {
|
|
|
|
return KindHome
|
|
|
|
}
|
|
|
|
|
|
|
|
// We don't know enough yet to determine the type.
|
|
|
|
return kindUnknown
|
|
|
|
}
|
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
func (p *Page) setValuesForKind(s *Site) {
|
2016-11-13 08:27:10 -05:00
|
|
|
if p.Kind == kindUnknown {
|
|
|
|
// This is either a taxonomy list, taxonomy term or a section
|
2016-11-16 11:52:03 -05:00
|
|
|
nodeType := s.kindFromSections(p.sections)
|
2016-11-13 08:27:10 -05:00
|
|
|
|
|
|
|
if nodeType == kindUnknown {
|
2016-11-16 11:52:03 -05:00
|
|
|
panic(fmt.Sprintf("Unable to determine page kind from %q", p.sections))
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
p.Kind = nodeType
|
|
|
|
}
|
2016-11-16 11:52:03 -05:00
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
switch p.Kind {
|
|
|
|
case KindHome:
|
2016-11-15 04:43:49 -05:00
|
|
|
p.URLPath.URL = "/"
|
2016-11-13 08:27:10 -05:00
|
|
|
case KindSection:
|
2016-11-15 04:43:49 -05:00
|
|
|
p.URLPath.URL = "/" + p.sections[0] + "/"
|
2016-11-13 08:27:10 -05:00
|
|
|
case KindTaxonomy:
|
2016-11-15 04:43:49 -05:00
|
|
|
p.URLPath.URL = "/" + path.Join(p.sections...) + "/"
|
2016-11-13 08:27:10 -05:00
|
|
|
case KindTaxonomyTerm:
|
2016-11-15 04:43:49 -05:00
|
|
|
p.URLPath.URL = "/" + path.Join(p.sections...) + "/"
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
}
|