2016-03-21 19:28:42 -04:00
|
|
|
// Copyright 2016 The Hugo Authors. All rights reserved.
|
2013-07-04 11:32:55 -04:00
|
|
|
//
|
2015-11-23 22:16:36 -05:00
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
2013-07-04 11:32:55 -04:00
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
2015-11-23 22:16:36 -05:00
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
2013-07-04 11:32:55 -04:00
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package hugolib
|
|
|
|
|
|
|
|
import (
|
2014-01-29 17:50:31 -05:00
|
|
|
"bytes"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
2015-01-25 06:08:02 -05:00
|
|
|
"reflect"
|
|
|
|
|
2016-11-01 18:04:12 -04:00
|
|
|
"github.com/bep/gitmap"
|
|
|
|
|
2015-01-25 06:08:02 -05:00
|
|
|
"github.com/mitchellh/mapstructure"
|
2014-11-20 12:32:21 -05:00
|
|
|
"github.com/spf13/hugo/helpers"
|
|
|
|
"github.com/spf13/hugo/parser"
|
2014-11-28 15:16:57 -05:00
|
|
|
|
2014-12-07 13:48:00 -05:00
|
|
|
"html/template"
|
|
|
|
"io"
|
|
|
|
"net/url"
|
|
|
|
"path"
|
|
|
|
"path/filepath"
|
2015-09-03 06:22:20 -04:00
|
|
|
"regexp"
|
2014-12-07 13:48:00 -05:00
|
|
|
"strings"
|
2015-01-21 09:28:05 -05:00
|
|
|
"sync"
|
2014-12-07 13:48:00 -05:00
|
|
|
"time"
|
2015-09-03 06:22:20 -04:00
|
|
|
"unicode/utf8"
|
2015-01-25 06:08:02 -05:00
|
|
|
|
|
|
|
"github.com/spf13/cast"
|
2015-01-30 14:42:02 -05:00
|
|
|
bp "github.com/spf13/hugo/bufferpool"
|
2015-01-25 06:08:02 -05:00
|
|
|
"github.com/spf13/hugo/hugofs"
|
|
|
|
"github.com/spf13/hugo/source"
|
|
|
|
"github.com/spf13/hugo/tpl"
|
|
|
|
jww "github.com/spf13/jwalterweatherman"
|
|
|
|
"github.com/spf13/viper"
|
2013-07-04 11:32:55 -04:00
|
|
|
)
|
|
|
|
|
2015-09-03 06:22:20 -04:00
|
|
|
var (
|
|
|
|
cjk = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`)
|
|
|
|
)
|
|
|
|
|
2016-11-11 03:19:16 -05:00
|
|
|
const (
|
2016-11-13 06:33:11 -05:00
|
|
|
KindPage = "page"
|
2016-11-11 03:19:16 -05:00
|
|
|
|
|
|
|
// The rest are node types; home page, sections etc.
|
2016-11-13 06:33:11 -05:00
|
|
|
KindHome = "home"
|
|
|
|
KindSection = "section"
|
|
|
|
KindTaxonomy = "taxonomy"
|
|
|
|
KindTaxonomyTerm = "taxonomyTerm"
|
2016-11-11 03:19:16 -05:00
|
|
|
|
|
|
|
// Temporary state.
|
2016-11-13 06:33:11 -05:00
|
|
|
kindUnknown = "unknown"
|
2016-11-11 03:19:16 -05:00
|
|
|
|
|
|
|
// The following are (currently) temporary nodes,
|
|
|
|
// i.e. nodes we create just to render in isolation.
|
2016-11-15 04:43:49 -05:00
|
|
|
kindRSS = "RSS"
|
2016-11-13 06:33:11 -05:00
|
|
|
kindSitemap = "sitemap"
|
|
|
|
kindRobotsTXT = "robotsTXT"
|
|
|
|
kind404 = "404"
|
2016-11-11 03:19:16 -05:00
|
|
|
)
|
|
|
|
|
2013-07-04 11:32:55 -04:00
|
|
|
type Page struct {
|
2016-11-16 14:32:38 -05:00
|
|
|
*pageInit
|
2016-11-13 05:43:23 -05:00
|
|
|
|
2016-11-13 06:33:11 -05:00
|
|
|
// Kind is the discriminator that identifies the different page types
|
|
|
|
// in the different page collections. This can, as an example, be used
|
|
|
|
// to to filter regular pages, find sections etc.
|
2016-11-13 05:43:23 -05:00
|
|
|
// Kind will, for the pages available to the templates, be one of:
|
|
|
|
// page, home, section, taxonomy and taxonomyTerm.
|
2016-11-13 06:33:11 -05:00
|
|
|
// It is of string type to make it easy to reason about in
|
|
|
|
// the templates.
|
|
|
|
Kind string
|
2016-11-11 03:19:16 -05:00
|
|
|
|
|
|
|
// Since Hugo 0.18 we got rid of the Node type. So now all pages are ...
|
|
|
|
// pages (regular pages, home page, sections etc.).
|
|
|
|
// Sections etc. will have child pages. These were earlier placed in .Data.Pages,
|
|
|
|
// but can now be more intuitively also be fetched directly from .Pages.
|
|
|
|
// This collection will be nil for regular pages.
|
|
|
|
Pages Pages
|
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
// translations will contain references to this page in other language
|
|
|
|
// if available.
|
|
|
|
translations Pages
|
|
|
|
|
|
|
|
// Params contains configuration defined in the params section of page frontmatter.
|
|
|
|
Params map[string]interface{}
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
// Content sections
|
|
|
|
Content template.HTML
|
|
|
|
Summary template.HTML
|
2016-11-13 08:27:10 -05:00
|
|
|
TableOfContents template.HTML
|
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
Aliases []string
|
|
|
|
|
|
|
|
Images []Image
|
|
|
|
Videos []Video
|
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
Truncated bool
|
|
|
|
Draft bool
|
2016-11-16 11:52:03 -05:00
|
|
|
Status string
|
2016-11-13 08:27:10 -05:00
|
|
|
|
|
|
|
PublishDate time.Time
|
|
|
|
ExpiryDate time.Time
|
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
// PageMeta contains page stats such as word count etc.
|
|
|
|
PageMeta
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
// Markup contains the markup type for the content.
|
|
|
|
Markup string
|
2016-11-13 08:27:10 -05:00
|
|
|
|
|
|
|
extension string
|
|
|
|
contentType string
|
|
|
|
renderable bool
|
|
|
|
|
2016-08-12 18:33:17 -04:00
|
|
|
Layout string
|
|
|
|
layoutsCalculated []string
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
linkTitle string
|
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
frontmatter []byte
|
2016-08-12 18:33:17 -04:00
|
|
|
|
2016-12-01 04:21:49 -05:00
|
|
|
// rawContent is the raw content read from the content file.
|
2016-08-12 18:33:17 -04:00
|
|
|
rawContent []byte
|
|
|
|
|
2016-12-01 04:21:49 -05:00
|
|
|
// workContent is a copy of rawContent that may be mutated during site build.
|
|
|
|
workContent []byte
|
|
|
|
|
2016-08-12 18:33:17 -04:00
|
|
|
// state telling if this is a "new page" or if we have rendered it previously.
|
|
|
|
rendered bool
|
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
// whether the content is in a CJK language.
|
|
|
|
isCJKLanguage bool
|
|
|
|
|
|
|
|
// shortcode state
|
|
|
|
contentShortCodes map[string]func() (string, error)
|
|
|
|
shortcodes map[string]shortcode
|
|
|
|
|
|
|
|
// the content stripped for HTML
|
2016-11-16 14:32:38 -05:00
|
|
|
plain string // TODO should be []byte
|
|
|
|
plainWords []string
|
2016-11-16 11:52:03 -05:00
|
|
|
|
|
|
|
// rendering configuration
|
2016-11-16 14:32:38 -05:00
|
|
|
renderingConfig *helpers.Blackfriday
|
2016-11-16 11:52:03 -05:00
|
|
|
|
|
|
|
// menus
|
2016-11-16 14:32:38 -05:00
|
|
|
pageMenus PageMenus
|
2016-11-16 11:52:03 -05:00
|
|
|
|
2014-10-16 20:20:09 -04:00
|
|
|
Source
|
2016-11-16 11:52:03 -05:00
|
|
|
|
2015-05-23 06:28:01 -04:00
|
|
|
Position `json:"-"`
|
2016-11-02 16:34:19 -04:00
|
|
|
|
2016-11-01 18:04:12 -04:00
|
|
|
GitInfo *gitmap.GitInfo
|
2016-10-31 14:53:33 -04:00
|
|
|
|
|
|
|
// This was added as part of getting the Nodes (taxonomies etc.) to work as
|
|
|
|
// Pages in Hugo 0.18.
|
|
|
|
// It is deliberately named similar to Section, but not exported (for now).
|
|
|
|
// We currently have only one level of section in Hugo, but the page can live
|
|
|
|
// any number of levels down the file path.
|
|
|
|
// To support taxonomies like /categories/hugo etc. we will need to keep track
|
|
|
|
// of that information in a general way.
|
|
|
|
// So, sections represents the path to the content, i.e. a content file or a
|
|
|
|
// virtual content file in the situations where a taxonomy or a section etc.
|
|
|
|
// isn't accomanied by one.
|
|
|
|
sections []string
|
|
|
|
|
|
|
|
site *Site
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2016-11-16 15:06:10 -05:00
|
|
|
// Pulled over from old Node. TODO(bep) reorg and group (embed)
|
2016-11-13 08:27:10 -05:00
|
|
|
|
|
|
|
Site *SiteInfo `json:"-"`
|
|
|
|
|
|
|
|
Title string
|
|
|
|
Description string
|
|
|
|
Keywords []string
|
|
|
|
Data map[string]interface{}
|
|
|
|
|
|
|
|
Date time.Time
|
|
|
|
Lastmod time.Time
|
|
|
|
|
|
|
|
Sitemap Sitemap
|
|
|
|
|
|
|
|
RSSLink template.HTML
|
2016-11-16 11:52:03 -05:00
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
URLPath
|
2016-11-27 13:25:28 -05:00
|
|
|
permalink *url.URL
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2016-11-16 14:32:38 -05:00
|
|
|
paginator *Pager
|
2016-11-13 08:27:10 -05:00
|
|
|
|
|
|
|
scratch *Scratch
|
|
|
|
|
2016-11-16 14:32:38 -05:00
|
|
|
language *helpers.Language
|
|
|
|
lang string
|
|
|
|
}
|
|
|
|
|
|
|
|
// pageInit lazy initializes different parts of the page. It is extracted
|
|
|
|
// into its own type so we can easily create a copy of a given page.
|
|
|
|
type pageInit struct {
|
|
|
|
languageInit sync.Once
|
|
|
|
pageMenusInit sync.Once
|
|
|
|
pageMetaInit sync.Once
|
|
|
|
paginatorInit sync.Once
|
|
|
|
plainInit sync.Once
|
|
|
|
plainWordsInit sync.Once
|
|
|
|
renderingConfigInit sync.Once
|
2016-11-27 13:25:28 -05:00
|
|
|
pageURLInit sync.Once
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-11-13 06:33:11 -05:00
|
|
|
// IsNode returns whether this is an item of one of the list types in Hugo,
|
|
|
|
// i.e. not a regular content page.
|
|
|
|
func (p *Page) IsNode() bool {
|
|
|
|
return p.Kind != KindPage
|
|
|
|
}
|
|
|
|
|
|
|
|
// IsHome returns whether this is the home page.
|
|
|
|
func (p *Page) IsHome() bool {
|
|
|
|
return p.Kind == KindHome
|
|
|
|
}
|
|
|
|
|
|
|
|
// IsPage returns whether this is a regular content page.
|
|
|
|
func (p *Page) IsPage() bool {
|
|
|
|
return p.Kind == KindPage
|
|
|
|
}
|
|
|
|
|
2014-10-16 20:20:09 -04:00
|
|
|
type Source struct {
|
|
|
|
Frontmatter []byte
|
|
|
|
Content []byte
|
|
|
|
source.File
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
type PageMeta struct {
|
2016-08-17 07:41:48 -04:00
|
|
|
wordCount int
|
|
|
|
fuzzyWordCount int
|
|
|
|
readingTime int
|
2014-01-29 17:50:31 -05:00
|
|
|
Weight int
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-09-28 05:05:21 -04:00
|
|
|
func (*PageMeta) WordCount() int {
|
2017-01-01 06:33:30 -05:00
|
|
|
// Remove in Hugo 0.19
|
2016-11-23 11:26:13 -05:00
|
|
|
helpers.Deprecated("PageMeta", "WordCount", ".WordCount (on Page)", true)
|
2016-09-28 05:05:21 -04:00
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
func (*PageMeta) FuzzyWordCount() int {
|
2017-01-01 06:33:30 -05:00
|
|
|
// Remove in Hugo 0.19
|
2016-11-23 11:26:13 -05:00
|
|
|
helpers.Deprecated("PageMeta", "FuzzyWordCount", ".FuzzyWordCount (on Page)", true)
|
2016-09-28 05:05:21 -04:00
|
|
|
return 0
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func (*PageMeta) ReadingTime() int {
|
2017-01-01 06:33:30 -05:00
|
|
|
// Remove in Hugo 0.19
|
2016-11-23 11:26:13 -05:00
|
|
|
helpers.Deprecated("PageMeta", "ReadingTime", ".ReadingTime (on Page)", true)
|
2016-09-28 05:05:21 -04:00
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
2013-07-04 11:32:55 -04:00
|
|
|
type Position struct {
|
2015-01-21 08:35:33 -05:00
|
|
|
Prev *Page
|
|
|
|
Next *Page
|
2014-04-18 03:23:13 -04:00
|
|
|
PrevInSection *Page
|
|
|
|
NextInSection *Page
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
type Pages []*Page
|
2016-01-07 21:48:13 -05:00
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
func (p Pages) String() string {
|
|
|
|
return fmt.Sprintf("Pages(%d)", len(p))
|
|
|
|
}
|
|
|
|
|
2016-01-11 12:06:52 -05:00
|
|
|
func (ps Pages) FindPagePosByFilePath(inPath string) int {
|
|
|
|
for i, x := range ps {
|
|
|
|
if x.Source.Path() == inPath {
|
|
|
|
return i
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return -1
|
|
|
|
}
|
2016-01-07 21:48:13 -05:00
|
|
|
|
|
|
|
// FindPagePos Given a page, it will find the position in Pages
|
|
|
|
// will return -1 if not found
|
|
|
|
func (ps Pages) FindPagePos(page *Page) int {
|
|
|
|
for i, x := range ps {
|
2016-01-11 12:06:52 -05:00
|
|
|
if x.Source.Path() == page.Source.Path() {
|
2016-01-07 21:48:13 -05:00
|
|
|
return i
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return -1
|
|
|
|
}
|
|
|
|
|
2016-12-01 04:21:49 -05:00
|
|
|
func (p *Page) createWorkContentCopy() {
|
|
|
|
p.workContent = make([]byte, len(p.rawContent))
|
|
|
|
copy(p.workContent, p.rawContent)
|
|
|
|
}
|
|
|
|
|
2014-01-27 17:16:28 -05:00
|
|
|
func (p *Page) Plain() string {
|
2015-02-04 18:38:50 -05:00
|
|
|
p.initPlain()
|
2014-01-29 17:50:31 -05:00
|
|
|
return p.plain
|
2013-10-15 09:15:52 -04:00
|
|
|
}
|
|
|
|
|
2015-02-04 18:38:50 -05:00
|
|
|
func (p *Page) PlainWords() []string {
|
2016-08-16 16:50:15 -04:00
|
|
|
p.initPlainWords()
|
2015-02-04 18:38:50 -05:00
|
|
|
return p.plainWords
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) initPlain() {
|
|
|
|
p.plainInit.Do(func() {
|
|
|
|
p.plain = helpers.StripHTML(string(p.Content))
|
2016-08-16 16:50:15 -04:00
|
|
|
return
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) initPlainWords() {
|
|
|
|
p.plainWordsInit.Do(func() {
|
|
|
|
p.plainWords = strings.Fields(p.Plain())
|
2015-07-12 05:28:19 -04:00
|
|
|
return
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2015-10-11 23:51:04 -04:00
|
|
|
// Param is a convenience method to do lookups in Page's and Site's Params map,
|
|
|
|
// in that order.
|
|
|
|
//
|
2016-10-17 13:30:21 -04:00
|
|
|
// This method is also implemented on Node and SiteInfo.
|
2015-10-11 23:51:04 -04:00
|
|
|
func (p *Page) Param(key interface{}) (interface{}, error) {
|
|
|
|
keyStr, err := cast.ToStringE(key)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2016-10-17 13:30:21 -04:00
|
|
|
keyStr = strings.ToLower(keyStr)
|
2015-10-11 23:51:04 -04:00
|
|
|
if val, ok := p.Params[keyStr]; ok {
|
|
|
|
return val, nil
|
|
|
|
}
|
|
|
|
return p.Site.Params[keyStr], nil
|
|
|
|
}
|
|
|
|
|
2014-12-09 13:33:55 -05:00
|
|
|
func (p *Page) Author() Author {
|
|
|
|
authors := p.Authors()
|
2016-09-18 13:10:11 -04:00
|
|
|
|
|
|
|
for _, author := range authors {
|
|
|
|
return author
|
2016-09-11 04:46:56 -04:00
|
|
|
}
|
2016-09-18 13:10:11 -04:00
|
|
|
return Author{}
|
|
|
|
}
|
2016-09-15 22:28:13 -04:00
|
|
|
|
2016-09-18 13:10:11 -04:00
|
|
|
func (p *Page) Authors() AuthorList {
|
|
|
|
authorKeys, ok := p.Params["authors"]
|
|
|
|
if !ok {
|
|
|
|
return AuthorList{}
|
|
|
|
}
|
|
|
|
authors := authorKeys.([]string)
|
|
|
|
if len(authors) < 1 || len(p.Site.Authors) < 1 {
|
|
|
|
return AuthorList{}
|
2014-12-09 13:33:55 -05:00
|
|
|
}
|
|
|
|
|
2016-09-18 13:10:11 -04:00
|
|
|
al := make(AuthorList)
|
|
|
|
for _, author := range authors {
|
|
|
|
a, ok := p.Site.Authors[author]
|
|
|
|
if ok {
|
|
|
|
al[author] = a
|
2014-12-09 13:33:55 -05:00
|
|
|
}
|
|
|
|
}
|
2016-09-18 13:10:11 -04:00
|
|
|
return al
|
2014-12-09 13:33:55 -05:00
|
|
|
}
|
|
|
|
|
2015-03-11 13:34:57 -04:00
|
|
|
func (p *Page) UniqueID() string {
|
|
|
|
return p.Source.UniqueID()
|
2014-10-01 14:26:43 -04:00
|
|
|
}
|
|
|
|
|
Shortcode rewrite, take 2
This commit contains a restructuring and partial rewrite of the shortcode handling.
Prior to this commit rendering of the page content was mingled with handling of the shortcodes. This led to several oddities.
The new flow is:
1. Shortcodes are extracted from page and replaced with placeholders.
2. Shortcodes are processed and rendered
3. Page is processed
4. The placeholders are replaced with the rendered shortcodes
The handling of summaries is also made simpler by this.
This commit also introduces some other chenges:
1. distinction between shortcodes that need further processing and those who do not:
* `{{< >}}`: Typically raw HTML. Will not be processed.
* `{{% %}}`: Will be processed by the page's markup engine (Markdown or (infuture) Asciidoctor)
The above also involves a new shortcode-parser, with lexical scanning inspired by Rob Pike's talk called "Lexical Scanning in Go",
which should be easier to understand, give better error messages and perform better.
2. If you want to exclude a shortcode from being processed (for documentation etc.), the inner part of the shorcode must be commented out, i.e. `{{%/* movie 47238zzb */%}}`. See the updated shortcode section in the documentation for further examples.
The new parser supports nested shortcodes. This isn't new, but has two related design choices worth mentioning:
* The shortcodes will be rendered individually, so If both `{{< >}}` and `{{% %}}` are used in the nested hierarchy, one will be passed through the page's markdown processor, the other not.
* To avoid potential costly overhead of always looking far ahead for a possible closing tag, this implementation looks at the template itself, and is branded as a container with inner content if it contains a reference to `.Inner`
Fixes #565
Fixes #480
Fixes #461
And probably some others.
2014-10-27 16:48:30 -04:00
|
|
|
// for logging
|
|
|
|
func (p *Page) lineNumRawContentStart() int {
|
|
|
|
return bytes.Count(p.frontmatter, []byte("\n")) + 1
|
|
|
|
}
|
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
var (
|
2016-09-19 16:14:15 -04:00
|
|
|
internalSummaryDivider = []byte("HUGOMORE42")
|
2016-08-01 17:04:44 -04:00
|
|
|
)
|
Shortcode rewrite, take 2
This commit contains a restructuring and partial rewrite of the shortcode handling.
Prior to this commit rendering of the page content was mingled with handling of the shortcodes. This led to several oddities.
The new flow is:
1. Shortcodes are extracted from page and replaced with placeholders.
2. Shortcodes are processed and rendered
3. Page is processed
4. The placeholders are replaced with the rendered shortcodes
The handling of summaries is also made simpler by this.
This commit also introduces some other chenges:
1. distinction between shortcodes that need further processing and those who do not:
* `{{< >}}`: Typically raw HTML. Will not be processed.
* `{{% %}}`: Will be processed by the page's markup engine (Markdown or (infuture) Asciidoctor)
The above also involves a new shortcode-parser, with lexical scanning inspired by Rob Pike's talk called "Lexical Scanning in Go",
which should be easier to understand, give better error messages and perform better.
2. If you want to exclude a shortcode from being processed (for documentation etc.), the inner part of the shorcode must be commented out, i.e. `{{%/* movie 47238zzb */%}}`. See the updated shortcode section in the documentation for further examples.
The new parser supports nested shortcodes. This isn't new, but has two related design choices worth mentioning:
* The shortcodes will be rendered individually, so If both `{{< >}}` and `{{% %}}` are used in the nested hierarchy, one will be passed through the page's markdown processor, the other not.
* To avoid potential costly overhead of always looking far ahead for a possible closing tag, this implementation looks at the template itself, and is branded as a container with inner content if it contains a reference to `.Inner`
Fixes #565
Fixes #480
Fixes #461
And probably some others.
2014-10-27 16:48:30 -04:00
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
// Returns the page as summary and main if a user defined split is provided.
|
2016-10-25 14:40:32 -04:00
|
|
|
func (p *Page) setUserDefinedSummaryIfProvided(rawContentCopy []byte) (*summaryContent, error) {
|
Shortcode rewrite, take 2
This commit contains a restructuring and partial rewrite of the shortcode handling.
Prior to this commit rendering of the page content was mingled with handling of the shortcodes. This led to several oddities.
The new flow is:
1. Shortcodes are extracted from page and replaced with placeholders.
2. Shortcodes are processed and rendered
3. Page is processed
4. The placeholders are replaced with the rendered shortcodes
The handling of summaries is also made simpler by this.
This commit also introduces some other chenges:
1. distinction between shortcodes that need further processing and those who do not:
* `{{< >}}`: Typically raw HTML. Will not be processed.
* `{{% %}}`: Will be processed by the page's markup engine (Markdown or (infuture) Asciidoctor)
The above also involves a new shortcode-parser, with lexical scanning inspired by Rob Pike's talk called "Lexical Scanning in Go",
which should be easier to understand, give better error messages and perform better.
2. If you want to exclude a shortcode from being processed (for documentation etc.), the inner part of the shorcode must be commented out, i.e. `{{%/* movie 47238zzb */%}}`. See the updated shortcode section in the documentation for further examples.
The new parser supports nested shortcodes. This isn't new, but has two related design choices worth mentioning:
* The shortcodes will be rendered individually, so If both `{{< >}}` and `{{% %}}` are used in the nested hierarchy, one will be passed through the page's markdown processor, the other not.
* To avoid potential costly overhead of always looking far ahead for a possible closing tag, this implementation looks at the template itself, and is branded as a container with inner content if it contains a reference to `.Inner`
Fixes #565
Fixes #480
Fixes #461
And probably some others.
2014-10-27 16:48:30 -04:00
|
|
|
|
2016-10-25 14:40:32 -04:00
|
|
|
sc, err := splitUserDefinedSummaryAndContent(p.Markup, rawContentCopy)
|
2016-10-18 02:43:44 -04:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2015-05-10 07:33:50 -04:00
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
if sc == nil {
|
|
|
|
// No divider found
|
|
|
|
return nil, nil
|
|
|
|
}
|
2015-02-04 18:38:50 -05:00
|
|
|
|
2016-08-01 17:04:44 -04:00
|
|
|
p.Truncated = true
|
|
|
|
if len(sc.content) < 20 {
|
|
|
|
// only whitespace?
|
|
|
|
p.Truncated = len(bytes.Trim(sc.content, " \n\r")) > 0
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2016-08-01 17:04:44 -04:00
|
|
|
|
|
|
|
p.Summary = helpers.BytesToHTML(sc.summary)
|
|
|
|
|
|
|
|
return sc, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Make this explicit so there is no doubt about what is what.
|
|
|
|
type summaryContent struct {
|
|
|
|
summary []byte
|
|
|
|
content []byte
|
|
|
|
contentWithoutSummary []byte
|
|
|
|
}
|
|
|
|
|
2016-10-18 02:43:44 -04:00
|
|
|
func splitUserDefinedSummaryAndContent(markup string, c []byte) (sc *summaryContent, err error) {
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
|
|
|
err = fmt.Errorf("summary split failed: %s", r)
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
2016-08-07 17:34:04 -04:00
|
|
|
c = bytes.TrimSpace(c)
|
2016-08-01 17:04:44 -04:00
|
|
|
startDivider := bytes.Index(c, internalSummaryDivider)
|
|
|
|
|
|
|
|
if startDivider == -1 {
|
2016-10-18 02:43:44 -04:00
|
|
|
return
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
endDivider := startDivider + len(internalSummaryDivider)
|
|
|
|
endSummary := startDivider
|
|
|
|
|
|
|
|
var (
|
|
|
|
startMarkup []byte
|
|
|
|
endMarkup []byte
|
|
|
|
addDiv bool
|
|
|
|
divStart = []byte("<div class=\"document\">")
|
|
|
|
)
|
|
|
|
|
|
|
|
switch markup {
|
|
|
|
default:
|
|
|
|
startMarkup = []byte("<p>")
|
|
|
|
endMarkup = []byte("</p>")
|
|
|
|
case "asciidoc":
|
|
|
|
startMarkup = []byte("<div class=\"paragraph\">")
|
|
|
|
endMarkup = []byte("</div>")
|
|
|
|
case "rst":
|
|
|
|
startMarkup = []byte("<p>")
|
|
|
|
endMarkup = []byte("</p>")
|
|
|
|
addDiv = true
|
|
|
|
}
|
|
|
|
|
|
|
|
// Find the closest end/start markup string to the divider
|
2016-10-18 02:43:44 -04:00
|
|
|
fromStart := -1
|
2016-08-01 17:04:44 -04:00
|
|
|
fromIdx := bytes.LastIndex(c[:startDivider], startMarkup)
|
2016-10-18 02:43:44 -04:00
|
|
|
if fromIdx != -1 {
|
|
|
|
fromStart = startDivider - fromIdx - len(startMarkup)
|
|
|
|
}
|
2016-08-01 17:04:44 -04:00
|
|
|
fromEnd := bytes.Index(c[endDivider:], endMarkup)
|
|
|
|
|
|
|
|
if fromEnd != -1 && fromEnd <= fromStart {
|
|
|
|
endSummary = startDivider + fromEnd + len(endMarkup)
|
2016-08-07 17:34:04 -04:00
|
|
|
} else if fromStart != -1 && fromEnd != -1 {
|
2016-08-01 17:04:44 -04:00
|
|
|
endSummary = startDivider - fromStart - len(startMarkup)
|
|
|
|
}
|
|
|
|
|
|
|
|
withoutDivider := bytes.TrimSpace(append(c[:startDivider], c[endDivider:]...))
|
2016-08-07 17:34:04 -04:00
|
|
|
var (
|
|
|
|
contentWithoutSummary []byte
|
|
|
|
summary []byte
|
|
|
|
)
|
|
|
|
|
|
|
|
if len(withoutDivider) > 0 {
|
|
|
|
contentWithoutSummary = bytes.TrimSpace(withoutDivider[endSummary:])
|
|
|
|
summary = bytes.TrimSpace(withoutDivider[:endSummary])
|
|
|
|
}
|
2016-08-01 17:04:44 -04:00
|
|
|
|
|
|
|
if addDiv {
|
|
|
|
// For the rst
|
|
|
|
summary = append(append([]byte(nil), summary...), []byte("</div>")...)
|
|
|
|
// TODO(bep) include the document class, maybe
|
|
|
|
contentWithoutSummary = append(divStart, contentWithoutSummary...)
|
|
|
|
}
|
|
|
|
|
2016-10-18 02:43:44 -04:00
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
sc = &summaryContent{
|
2016-08-01 17:04:44 -04:00
|
|
|
summary: summary,
|
|
|
|
content: withoutDivider,
|
|
|
|
contentWithoutSummary: contentWithoutSummary,
|
|
|
|
}
|
2016-10-18 02:43:44 -04:00
|
|
|
|
|
|
|
return
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) setAutoSummary() error {
|
|
|
|
var summary string
|
|
|
|
var truncated bool
|
|
|
|
if p.isCJKLanguage {
|
|
|
|
summary, truncated = helpers.TruncateWordsByRune(p.PlainWords(), helpers.SummaryLength)
|
|
|
|
} else {
|
2016-08-16 16:50:15 -04:00
|
|
|
summary, truncated = helpers.TruncateWordsToWholeSentence(p.Plain(), helpers.SummaryLength)
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
p.Summary = template.HTML(summary)
|
|
|
|
p.Truncated = truncated
|
|
|
|
|
|
|
|
return nil
|
2014-01-27 17:16:28 -05:00
|
|
|
}
|
|
|
|
|
2014-01-28 23:11:05 -05:00
|
|
|
func (p *Page) renderContent(content []byte) []byte {
|
2015-09-08 20:03:38 -04:00
|
|
|
var fn helpers.LinkResolverFunc
|
|
|
|
var fileFn helpers.FileResolverFunc
|
|
|
|
if p.getRenderingConfig().SourceRelativeLinksEval {
|
|
|
|
fn = func(ref string) (string, error) {
|
2016-11-13 08:27:10 -05:00
|
|
|
return p.Site.SourceRelativeLink(ref, p)
|
2015-09-08 20:03:38 -04:00
|
|
|
}
|
|
|
|
fileFn = func(ref string) (string, error) {
|
2016-11-13 08:27:10 -05:00
|
|
|
return p.Site.SourceRelativeLinkFile(ref, p)
|
2015-09-08 20:03:38 -04:00
|
|
|
}
|
|
|
|
}
|
2016-08-07 08:03:03 -04:00
|
|
|
return helpers.RenderBytes(&helpers.RenderingContext{
|
|
|
|
Content: content, RenderTOC: true, PageFmt: p.determineMarkupType(),
|
|
|
|
ConfigProvider: p.Language(),
|
2016-10-13 04:30:43 -04:00
|
|
|
DocumentID: p.UniqueID(), DocumentName: p.Path(),
|
|
|
|
Config: p.getRenderingConfig(), LinkResolver: fn, FileResolver: fileFn})
|
2014-11-28 15:16:57 -05:00
|
|
|
}
|
|
|
|
|
2015-01-25 06:08:02 -05:00
|
|
|
func (p *Page) getRenderingConfig() *helpers.Blackfriday {
|
2014-11-28 15:16:57 -05:00
|
|
|
|
2015-01-25 06:08:02 -05:00
|
|
|
p.renderingConfigInit.Do(func() {
|
2015-11-03 14:09:34 -05:00
|
|
|
pageParam := cast.ToStringMap(p.GetParam("blackfriday"))
|
2016-08-07 08:03:03 -04:00
|
|
|
if p.Language() == nil {
|
|
|
|
panic(fmt.Sprintf("nil language for %s with source lang %s", p.BaseFileName(), p.lang))
|
|
|
|
}
|
|
|
|
p.renderingConfig = helpers.NewBlackfriday(p.Language())
|
2016-10-16 13:28:21 -04:00
|
|
|
|
2015-11-03 14:09:34 -05:00
|
|
|
if err := mapstructure.Decode(pageParam, p.renderingConfig); err != nil {
|
2015-01-25 06:08:02 -05:00
|
|
|
jww.FATAL.Printf("Failed to get rendering config for %s:\n%s", p.BaseFileName(), err.Error())
|
|
|
|
}
|
2016-10-16 13:28:21 -04:00
|
|
|
|
2015-01-21 09:28:05 -05:00
|
|
|
})
|
2014-11-28 15:16:57 -05:00
|
|
|
|
2015-01-25 06:08:02 -05:00
|
|
|
return p.renderingConfig
|
2015-01-21 08:35:33 -05:00
|
|
|
}
|
|
|
|
|
2013-09-04 19:57:17 -04:00
|
|
|
func newPage(filename string) *Page {
|
2016-11-11 03:19:16 -05:00
|
|
|
page := Page{
|
2016-11-16 14:32:38 -05:00
|
|
|
pageInit: &pageInit{},
|
2016-11-13 08:27:10 -05:00
|
|
|
Kind: kindFromFilename(filename),
|
|
|
|
contentType: "",
|
|
|
|
Source: Source{File: *source.NewFile(filename)},
|
|
|
|
Keywords: []string{}, Sitemap: Sitemap{Priority: -1},
|
2016-05-14 00:35:16 -04:00
|
|
|
Params: make(map[string]interface{}),
|
2016-07-25 16:22:09 -04:00
|
|
|
translations: make(Pages, 0),
|
2016-10-31 14:53:33 -04:00
|
|
|
sections: sectionsFromFilename(filename),
|
2016-05-14 00:35:16 -04:00
|
|
|
}
|
2014-03-31 13:23:34 -04:00
|
|
|
|
2014-10-16 20:20:09 -04:00
|
|
|
jww.DEBUG.Println("Reading from", page.File.Path())
|
2014-01-29 17:50:31 -05:00
|
|
|
return &page
|
2013-08-13 19:39:24 -04:00
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2013-09-18 13:17:43 -04:00
|
|
|
func (p *Page) IsRenderable() bool {
|
2014-01-29 17:50:31 -05:00
|
|
|
return p.renderable
|
2013-09-18 13:17:43 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) Type() string {
|
|
|
|
if p.contentType != "" {
|
|
|
|
return p.contentType
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2014-10-16 20:20:09 -04:00
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
if x := p.Section(); x != "" {
|
2014-01-29 17:50:31 -05:00
|
|
|
return x
|
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return "page"
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) Section() string {
|
|
|
|
return p.Source.Section()
|
2014-10-16 20:20:09 -04:00
|
|
|
}
|
|
|
|
|
2015-11-02 11:24:50 -05:00
|
|
|
func (p *Page) layouts(l ...string) []string {
|
2015-12-18 03:54:46 -05:00
|
|
|
if len(p.layoutsCalculated) > 0 {
|
|
|
|
return p.layoutsCalculated
|
|
|
|
}
|
|
|
|
|
2016-11-13 05:43:23 -05:00
|
|
|
switch p.Kind {
|
|
|
|
case KindHome:
|
2016-11-17 11:08:31 -05:00
|
|
|
return p.site.appendThemeTemplates([]string{"index.html", "_default/list.html"})
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindSection:
|
2016-11-01 17:39:24 -04:00
|
|
|
section := p.sections[0]
|
2016-11-17 11:08:31 -05:00
|
|
|
return p.site.appendThemeTemplates([]string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"})
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindTaxonomy:
|
2016-10-31 14:53:33 -04:00
|
|
|
singular := p.site.taxonomiesPluralSingular[p.sections[0]]
|
2016-11-17 11:08:31 -05:00
|
|
|
return p.site.appendThemeTemplates([]string{"taxonomy/" + singular + ".html", "indexes/" + singular + ".html", "_default/taxonomy.html", "_default/list.html"})
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindTaxonomyTerm:
|
2016-11-01 11:47:15 -04:00
|
|
|
singular := p.site.taxonomiesPluralSingular[p.sections[0]]
|
2016-11-17 11:08:31 -05:00
|
|
|
return p.site.appendThemeTemplates([]string{"taxonomy/" + singular + ".terms.html", "_default/terms.html", "indexes/indexes.html"})
|
2016-10-30 12:59:24 -04:00
|
|
|
}
|
|
|
|
|
2016-10-31 14:53:33 -04:00
|
|
|
// Regular Page handled below
|
|
|
|
|
2015-11-02 11:24:50 -05:00
|
|
|
if p.Layout != "" {
|
|
|
|
return layouts(p.Type(), p.Layout)
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2013-10-07 00:57:45 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
layout := ""
|
|
|
|
if len(l) == 0 {
|
|
|
|
layout = "single"
|
|
|
|
} else {
|
|
|
|
layout = l[0]
|
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
return layouts(p.Type(), layout)
|
2013-10-07 00:57:45 -04:00
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
// TODO(bep) consolidate and test these KindHome switches (see other layouts methods)s
|
2016-11-02 16:34:19 -04:00
|
|
|
// rssLayouts returns RSS layouts to use for the RSS version of this page, nil
|
|
|
|
// if no RSS should be rendered.
|
|
|
|
func (p *Page) rssLayouts() []string {
|
2016-11-13 05:43:23 -05:00
|
|
|
switch p.Kind {
|
|
|
|
case KindHome:
|
2016-11-02 16:34:19 -04:00
|
|
|
return []string{"rss.xml", "_default/rss.xml", "_internal/_default/rss.xml"}
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindSection:
|
2016-11-02 16:34:19 -04:00
|
|
|
section := p.sections[0]
|
|
|
|
return []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindTaxonomy:
|
2016-11-02 16:34:19 -04:00
|
|
|
singular := p.site.taxonomiesPluralSingular[p.sections[0]]
|
|
|
|
return []string{"taxonomy/" + singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindTaxonomyTerm:
|
2016-11-02 16:34:19 -04:00
|
|
|
// No RSS for taxonomy terms
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindPage:
|
2016-11-02 16:34:19 -04:00
|
|
|
// No RSS for regular pages
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2013-10-07 00:57:45 -04:00
|
|
|
func layouts(types string, layout string) (layouts []string) {
|
2014-01-29 17:50:31 -05:00
|
|
|
t := strings.Split(types, "/")
|
2014-06-03 17:06:32 -04:00
|
|
|
|
|
|
|
// Add type/layout.html
|
2014-01-29 17:50:31 -05:00
|
|
|
for i := range t {
|
|
|
|
search := t[:len(t)-i]
|
2014-12-07 13:48:00 -05:00
|
|
|
layouts = append(layouts, fmt.Sprintf("%s/%s.html", strings.ToLower(path.Join(search...)), layout))
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2014-06-03 17:06:32 -04:00
|
|
|
|
|
|
|
// Add _default/layout.html
|
|
|
|
layouts = append(layouts, fmt.Sprintf("_default/%s.html", layout))
|
|
|
|
|
|
|
|
// Add theme/type/layout.html & theme/_default/layout.html
|
|
|
|
for _, l := range layouts {
|
|
|
|
layouts = append(layouts, "theme/"+l)
|
|
|
|
}
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func NewPageFrom(buf io.Reader, name string) (*Page, error) {
|
2014-05-01 14:11:56 -04:00
|
|
|
p, err := NewPage(name)
|
|
|
|
if err != nil {
|
|
|
|
return p, err
|
|
|
|
}
|
2015-04-03 15:41:12 -04:00
|
|
|
_, err = p.ReadFrom(buf)
|
2014-05-01 14:11:56 -04:00
|
|
|
|
|
|
|
return p, err
|
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func NewPage(name string) (*Page, error) {
|
2014-01-29 17:50:31 -05:00
|
|
|
if len(name) == 0 {
|
|
|
|
return nil, errors.New("Zero length page name")
|
|
|
|
}
|
2013-08-05 10:53:58 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
// Create new page
|
|
|
|
p := newPage(name)
|
2013-08-05 10:53:58 -04:00
|
|
|
|
2014-05-01 13:20:58 -04:00
|
|
|
return p, nil
|
|
|
|
}
|
|
|
|
|
2015-04-03 15:41:12 -04:00
|
|
|
func (p *Page) ReadFrom(buf io.Reader) (int64, error) {
|
2014-01-29 17:50:31 -05:00
|
|
|
// Parse for metadata & body
|
2015-04-03 15:41:12 -04:00
|
|
|
if err := p.parse(buf); err != nil {
|
2014-03-31 13:23:34 -04:00
|
|
|
jww.ERROR.Print(err)
|
2015-04-03 15:41:12 -04:00
|
|
|
return 0, err
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2013-08-05 10:53:58 -04:00
|
|
|
|
2015-04-03 15:41:12 -04:00
|
|
|
return int64(len(p.rawContent)), nil
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-08-17 07:41:48 -04:00
|
|
|
func (p *Page) WordCount() int {
|
|
|
|
p.analyzePage()
|
|
|
|
return p.wordCount
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) ReadingTime() int {
|
|
|
|
p.analyzePage()
|
|
|
|
return p.readingTime
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) FuzzyWordCount() int {
|
|
|
|
p.analyzePage()
|
|
|
|
return p.fuzzyWordCount
|
|
|
|
}
|
|
|
|
|
2013-07-04 11:32:55 -04:00
|
|
|
func (p *Page) analyzePage() {
|
2016-08-17 07:41:48 -04:00
|
|
|
p.pageMetaInit.Do(func() {
|
|
|
|
if p.isCJKLanguage {
|
|
|
|
p.wordCount = 0
|
|
|
|
for _, word := range p.PlainWords() {
|
|
|
|
runeCount := utf8.RuneCountInString(word)
|
|
|
|
if len(word) == runeCount {
|
|
|
|
p.wordCount++
|
|
|
|
} else {
|
|
|
|
p.wordCount += runeCount
|
|
|
|
}
|
2015-09-03 06:22:20 -04:00
|
|
|
}
|
2016-08-17 07:41:48 -04:00
|
|
|
} else {
|
|
|
|
p.wordCount = helpers.TotalWords(p.Plain())
|
2015-09-03 06:22:20 -04:00
|
|
|
}
|
2015-09-03 06:22:20 -04:00
|
|
|
|
2016-08-17 07:41:48 -04:00
|
|
|
// TODO(bep) is set in a test. Fix that.
|
|
|
|
if p.fuzzyWordCount == 0 {
|
|
|
|
p.fuzzyWordCount = (p.wordCount + 100) / 100 * 100
|
|
|
|
}
|
2015-09-03 06:22:20 -04:00
|
|
|
|
2016-08-17 07:41:48 -04:00
|
|
|
if p.isCJKLanguage {
|
|
|
|
p.readingTime = (p.wordCount + 500) / 501
|
|
|
|
} else {
|
|
|
|
p.readingTime = (p.wordCount + 212) / 213
|
|
|
|
}
|
|
|
|
})
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-11-27 13:25:28 -05:00
|
|
|
func (p *Page) getPermalink() *url.URL {
|
|
|
|
p.pageURLInit.Do(func() {
|
|
|
|
u, err := p.createPermalink()
|
|
|
|
if err != nil {
|
|
|
|
jww.ERROR.Printf("Failed to create permalink for page %q: %s", p.FullFilePath(), err)
|
|
|
|
p.permalink = new(url.URL)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
p.permalink = u
|
|
|
|
})
|
|
|
|
|
|
|
|
// The link may be modified by the receiver, so create a copy.
|
|
|
|
l := *p.permalink
|
|
|
|
|
|
|
|
return &l
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) createPermalink() (*url.URL, error) {
|
2016-11-15 04:43:49 -05:00
|
|
|
// TODO(bep) this should probably be set once during build. Maybe.
|
|
|
|
// And simplified.
|
2015-03-18 01:16:54 -04:00
|
|
|
baseURL := string(p.Site.BaseURL)
|
2016-11-15 04:43:49 -05:00
|
|
|
|
|
|
|
if p.IsNode() {
|
|
|
|
// No permalink config for nodes (currently)
|
|
|
|
pURL := strings.TrimSpace(p.Site.pathSpec.URLize(p.URLPath.URL))
|
|
|
|
pURL = p.addLangPathPrefix(pURL)
|
2016-12-26 21:36:08 -05:00
|
|
|
pURL = p.Site.pathSpec.URLPrep(pURL)
|
2016-11-15 04:43:49 -05:00
|
|
|
url := helpers.MakePermalink(baseURL, pURL)
|
|
|
|
return url, nil
|
|
|
|
}
|
|
|
|
|
2016-10-24 07:45:30 -04:00
|
|
|
dir := strings.TrimSpace(p.Site.pathSpec.MakePath(filepath.ToSlash(strings.ToLower(p.Source.Dir()))))
|
|
|
|
pSlug := strings.TrimSpace(p.Site.pathSpec.URLize(p.Slug))
|
|
|
|
pURL := strings.TrimSpace(p.Site.pathSpec.URLize(p.URLPath.URL))
|
2014-01-29 17:50:31 -05:00
|
|
|
var permalink string
|
|
|
|
var err error
|
|
|
|
|
2015-03-11 13:34:57 -04:00
|
|
|
if len(pURL) > 0 {
|
|
|
|
return helpers.MakePermalink(baseURL, pURL), nil
|
2014-02-28 02:30:12 -05:00
|
|
|
}
|
|
|
|
|
2014-10-16 20:20:09 -04:00
|
|
|
if override, ok := p.Site.Permalinks[p.Section()]; ok {
|
2014-01-29 17:50:31 -05:00
|
|
|
permalink, err = override.Expand(p)
|
2014-04-23 02:55:43 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if len(pSlug) > 0 {
|
2016-11-26 09:50:32 -05:00
|
|
|
permalink = p.Site.pathSpec.URLPrep(path.Join(dir, p.Slug+"."+p.Extension()))
|
2014-01-29 17:50:31 -05:00
|
|
|
} else {
|
2016-05-14 00:35:16 -04:00
|
|
|
t := p.Source.TranslationBaseName()
|
2016-11-26 09:50:32 -05:00
|
|
|
permalink = p.Site.pathSpec.URLPrep(path.Join(dir, (strings.TrimSpace(t) + "." + p.Extension())))
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-08-08 07:55:18 -04:00
|
|
|
permalink = p.addLangPathPrefix(permalink)
|
2016-05-14 00:35:16 -04:00
|
|
|
|
2015-03-11 13:34:57 -04:00
|
|
|
return helpers.MakePermalink(baseURL, permalink), nil
|
2013-10-02 19:33:51 -04:00
|
|
|
}
|
|
|
|
|
2014-10-16 20:20:09 -04:00
|
|
|
func (p *Page) Extension() string {
|
|
|
|
if p.extension != "" {
|
|
|
|
return p.extension
|
|
|
|
}
|
2016-10-24 14:56:00 -04:00
|
|
|
return viper.GetString("defaultExtension")
|
2014-10-16 20:20:09 -04:00
|
|
|
}
|
|
|
|
|
2016-07-25 16:22:09 -04:00
|
|
|
// AllTranslations returns all translations, including the current Page.
|
|
|
|
func (p *Page) AllTranslations() Pages {
|
|
|
|
return p.translations
|
|
|
|
}
|
|
|
|
|
2016-08-09 08:26:55 -04:00
|
|
|
// IsTranslated returns whether this content file is translated to
|
|
|
|
// other language(s).
|
|
|
|
func (p *Page) IsTranslated() bool {
|
|
|
|
return len(p.translations) > 1
|
|
|
|
}
|
|
|
|
|
2016-07-25 16:22:09 -04:00
|
|
|
// Translations returns the translations excluding the current Page.
|
|
|
|
func (p *Page) Translations() Pages {
|
|
|
|
translations := make(Pages, 0)
|
|
|
|
for _, t := range p.translations {
|
|
|
|
if t != p {
|
|
|
|
translations = append(translations, t)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return translations
|
|
|
|
}
|
|
|
|
|
2013-10-25 18:37:53 -04:00
|
|
|
func (p *Page) LinkTitle() string {
|
2014-01-29 17:50:31 -05:00
|
|
|
if len(p.linkTitle) > 0 {
|
|
|
|
return p.linkTitle
|
|
|
|
}
|
2015-03-07 06:25:38 -05:00
|
|
|
return p.Title
|
2013-10-25 18:37:53 -04:00
|
|
|
}
|
|
|
|
|
2016-06-14 09:53:49 -04:00
|
|
|
func (p *Page) shouldBuild() bool {
|
2016-10-24 14:56:00 -04:00
|
|
|
return shouldBuild(viper.GetBool("buildFuture"), viper.GetBool("buildExpired"),
|
|
|
|
viper.GetBool("buildDrafts"), p.Draft, p.PublishDate, p.ExpiryDate)
|
2016-05-11 11:45:09 -04:00
|
|
|
}
|
|
|
|
|
2016-06-14 09:53:49 -04:00
|
|
|
func shouldBuild(buildFuture bool, buildExpired bool, buildDrafts bool, Draft bool,
|
2016-05-11 11:45:09 -04:00
|
|
|
publishDate time.Time, expiryDate time.Time) bool {
|
|
|
|
if !(buildDrafts || !Draft) {
|
|
|
|
return false
|
2014-05-29 00:48:40 -04:00
|
|
|
}
|
2016-05-11 11:45:09 -04:00
|
|
|
if !buildFuture && !publishDate.IsZero() && publishDate.After(time.Now()) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
if !buildExpired && !expiryDate.IsZero() && expiryDate.Before(time.Now()) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
return true
|
2014-05-29 00:48:40 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) IsDraft() bool {
|
|
|
|
return p.Draft
|
2014-08-20 11:09:35 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) IsFuture() bool {
|
2016-06-13 11:37:10 -04:00
|
|
|
if p.PublishDate.IsZero() {
|
|
|
|
return false
|
|
|
|
}
|
2016-05-11 10:04:53 -04:00
|
|
|
return p.PublishDate.After(time.Now())
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) IsExpired() bool {
|
2016-06-13 11:37:10 -04:00
|
|
|
if p.ExpiryDate.IsZero() {
|
|
|
|
return false
|
|
|
|
}
|
2016-05-11 10:04:53 -04:00
|
|
|
return p.ExpiryDate.Before(time.Now())
|
2014-08-20 11:09:35 -04:00
|
|
|
}
|
|
|
|
|
2016-11-15 04:43:49 -05:00
|
|
|
func (p *Page) Permalink() string {
|
2016-11-27 13:25:28 -05:00
|
|
|
return p.getPermalink().String()
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-09-14 12:51:34 -04:00
|
|
|
func (p *Page) URL() string {
|
2016-11-15 04:43:49 -05:00
|
|
|
|
|
|
|
if p.IsPage() && p.URLPath.URL != "" {
|
2016-09-14 12:51:34 -04:00
|
|
|
// This is the url set in front matter
|
|
|
|
return p.URLPath.URL
|
|
|
|
}
|
|
|
|
// Fall back to the relative permalink.
|
2016-11-15 04:43:49 -05:00
|
|
|
u := p.RelPermalink()
|
2016-09-14 12:51:34 -04:00
|
|
|
return u
|
|
|
|
}
|
|
|
|
|
2016-11-15 04:43:49 -05:00
|
|
|
func (p *Page) RelPermalink() string {
|
2016-11-27 13:25:28 -05:00
|
|
|
link := p.getPermalink()
|
2013-10-02 20:00:21 -04:00
|
|
|
|
2016-10-24 14:56:00 -04:00
|
|
|
if viper.GetBool("canonifyURLs") {
|
2015-03-18 01:16:54 -04:00
|
|
|
// replacements for relpermalink with baseURL on the form http://myhost.com/sub/ will fail later on
|
|
|
|
// have to return the URL relative from baseURL
|
|
|
|
relpath, err := helpers.GetRelativePath(link.String(), string(p.Site.BaseURL))
|
2014-12-12 14:28:28 -05:00
|
|
|
if err != nil {
|
2016-11-15 04:43:49 -05:00
|
|
|
return ""
|
|
|
|
}
|
|
|
|
|
|
|
|
relpath = filepath.ToSlash(relpath)
|
|
|
|
|
|
|
|
if relpath[0] == '.' {
|
|
|
|
relpath = relpath[1:]
|
|
|
|
}
|
|
|
|
|
|
|
|
if !strings.HasPrefix(relpath, "/") {
|
|
|
|
relpath = "/" + relpath
|
2014-12-12 14:28:28 -05:00
|
|
|
}
|
2016-11-15 04:43:49 -05:00
|
|
|
|
|
|
|
return relpath
|
2014-12-12 14:28:28 -05:00
|
|
|
}
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
link.Scheme = ""
|
|
|
|
link.Host = ""
|
|
|
|
link.User = nil
|
|
|
|
link.Opaque = ""
|
2016-11-15 04:43:49 -05:00
|
|
|
return link.String()
|
2013-10-02 20:00:21 -04:00
|
|
|
}
|
|
|
|
|
2015-08-02 02:02:20 -04:00
|
|
|
var ErrHasDraftAndPublished = errors.New("both draft and published parameters were found in page's frontmatter")
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) update(f interface{}) error {
|
2014-05-01 14:11:56 -04:00
|
|
|
if f == nil {
|
2016-11-18 16:54:57 -05:00
|
|
|
return errors.New("no metadata found")
|
2014-05-01 14:11:56 -04:00
|
|
|
}
|
2014-01-29 17:50:31 -05:00
|
|
|
m := f.(map[string]interface{})
|
2016-10-24 16:29:48 -04:00
|
|
|
// Needed for case insensitive fetching of params values
|
|
|
|
helpers.ToLowerMap(m)
|
|
|
|
|
2015-01-05 06:44:41 -05:00
|
|
|
var err error
|
2015-09-03 06:22:20 -04:00
|
|
|
var draft, published, isCJKLanguage *bool
|
2014-01-29 17:50:31 -05:00
|
|
|
for k, v := range m {
|
|
|
|
loki := strings.ToLower(k)
|
|
|
|
switch loki {
|
|
|
|
case "title":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Title = cast.ToString(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "linktitle":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.linkTitle = cast.ToString(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "description":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Description = cast.ToString(v)
|
2015-12-12 16:20:26 -05:00
|
|
|
p.Params["description"] = p.Description
|
2014-01-29 17:50:31 -05:00
|
|
|
case "slug":
|
2015-04-09 12:14:26 -04:00
|
|
|
p.Slug = cast.ToString(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "url":
|
2014-04-05 01:26:43 -04:00
|
|
|
if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
|
2015-03-18 01:16:54 -04:00
|
|
|
return fmt.Errorf("Only relative URLs are supported, %v provided", url)
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2016-07-26 13:04:10 -04:00
|
|
|
p.URLPath.URL = cast.ToString(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "type":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.contentType = cast.ToString(v)
|
2014-10-16 20:20:09 -04:00
|
|
|
case "extension", "ext":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.extension = cast.ToString(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "keywords":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Keywords = cast.ToStringSlice(v)
|
2014-05-29 00:48:40 -04:00
|
|
|
case "date":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Date, err = cast.ToTimeE(v)
|
2015-01-05 06:44:41 -05:00
|
|
|
if err != nil {
|
2015-03-07 06:25:38 -05:00
|
|
|
jww.ERROR.Printf("Failed to parse date '%v' in page %s", v, p.File.Path())
|
2015-01-05 06:44:41 -05:00
|
|
|
}
|
2015-05-14 16:06:36 -04:00
|
|
|
case "lastmod":
|
|
|
|
p.Lastmod, err = cast.ToTimeE(v)
|
|
|
|
if err != nil {
|
|
|
|
jww.ERROR.Printf("Failed to parse lastmod '%v' in page %s", v, p.File.Path())
|
|
|
|
}
|
2014-05-29 00:48:40 -04:00
|
|
|
case "publishdate", "pubdate":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.PublishDate, err = cast.ToTimeE(v)
|
2015-01-05 06:44:41 -05:00
|
|
|
if err != nil {
|
2015-03-07 06:25:38 -05:00
|
|
|
jww.ERROR.Printf("Failed to parse publishdate '%v' in page %s", v, p.File.Path())
|
2015-01-05 06:44:41 -05:00
|
|
|
}
|
2016-05-11 10:04:53 -04:00
|
|
|
case "expirydate", "unpublishdate":
|
|
|
|
p.ExpiryDate, err = cast.ToTimeE(v)
|
|
|
|
if err != nil {
|
|
|
|
jww.ERROR.Printf("Failed to parse expirydate '%v' in page %s", v, p.File.Path())
|
|
|
|
}
|
2014-01-29 17:50:31 -05:00
|
|
|
case "draft":
|
2015-08-02 02:02:20 -04:00
|
|
|
draft = new(bool)
|
|
|
|
*draft = cast.ToBool(v)
|
|
|
|
case "published": // Intentionally undocumented
|
|
|
|
published = new(bool)
|
2015-08-30 18:51:25 -04:00
|
|
|
*published = cast.ToBool(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "layout":
|
2015-11-02 11:24:50 -05:00
|
|
|
p.Layout = cast.ToString(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "markup":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Markup = cast.ToString(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "weight":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Weight = cast.ToInt(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "aliases":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Aliases = cast.ToStringSlice(v)
|
|
|
|
for _, alias := range p.Aliases {
|
2014-01-29 17:50:31 -05:00
|
|
|
if strings.HasPrefix(alias, "http://") || strings.HasPrefix(alias, "https://") {
|
|
|
|
return fmt.Errorf("Only relative aliases are supported, %v provided", alias)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case "status":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Status = cast.ToString(v)
|
2014-05-06 11:02:56 -04:00
|
|
|
case "sitemap":
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Sitemap = parseSitemap(cast.ToStringMap(v))
|
2015-09-03 06:22:20 -04:00
|
|
|
case "iscjklanguage":
|
|
|
|
isCJKLanguage = new(bool)
|
|
|
|
*isCJKLanguage = cast.ToBool(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
default:
|
|
|
|
// If not one of the explicit values, store in Params
|
|
|
|
switch vv := v.(type) {
|
|
|
|
case bool:
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Params[loki] = vv
|
2014-01-29 17:50:31 -05:00
|
|
|
case string:
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Params[loki] = vv
|
2014-01-29 17:50:31 -05:00
|
|
|
case int64, int32, int16, int8, int:
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Params[loki] = vv
|
2014-01-29 17:50:31 -05:00
|
|
|
case float64, float32:
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Params[loki] = vv
|
2014-01-29 17:50:31 -05:00
|
|
|
case time.Time:
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Params[loki] = vv
|
2014-01-29 17:50:31 -05:00
|
|
|
default: // handle array of strings as well
|
|
|
|
switch vvv := vv.(type) {
|
|
|
|
case []interface{}:
|
2015-06-25 05:46:09 -04:00
|
|
|
if len(vvv) > 0 {
|
|
|
|
switch vvv[0].(type) {
|
2015-07-26 09:28:56 -04:00
|
|
|
case map[interface{}]interface{}: // Proper parsing structured array from YAML based FrontMatter
|
|
|
|
p.Params[loki] = vvv
|
|
|
|
case map[string]interface{}: // Proper parsing structured array from JSON based FrontMatter
|
2015-06-25 05:46:09 -04:00
|
|
|
p.Params[loki] = vvv
|
|
|
|
default:
|
|
|
|
a := make([]string, len(vvv))
|
|
|
|
for i, u := range vvv {
|
|
|
|
a[i] = cast.ToString(u)
|
|
|
|
}
|
|
|
|
|
|
|
|
p.Params[loki] = a
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
p.Params[loki] = []string{}
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2014-04-23 02:55:43 -04:00
|
|
|
default:
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Params[loki] = vv
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-05-14 16:06:36 -04:00
|
|
|
|
2015-08-02 02:02:20 -04:00
|
|
|
if draft != nil && published != nil {
|
|
|
|
p.Draft = *draft
|
|
|
|
jww.ERROR.Printf("page %s has both draft and published settings in its frontmatter. Using draft.", p.File.Path())
|
|
|
|
return ErrHasDraftAndPublished
|
|
|
|
} else if draft != nil {
|
|
|
|
p.Draft = *draft
|
|
|
|
} else if published != nil {
|
|
|
|
p.Draft = !*published
|
|
|
|
}
|
|
|
|
|
2016-10-24 14:56:00 -04:00
|
|
|
if p.Date.IsZero() && viper.GetBool("useModTimeAsFallback") {
|
|
|
|
fi, err := hugofs.Source().Stat(filepath.Join(helpers.AbsPathify(viper.GetString("contentDir")), p.File.Path()))
|
2016-06-27 11:07:34 -04:00
|
|
|
if err == nil {
|
|
|
|
p.Date = fi.ModTime()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-05-14 16:06:36 -04:00
|
|
|
if p.Lastmod.IsZero() {
|
|
|
|
p.Lastmod = p.Date
|
|
|
|
}
|
|
|
|
|
2015-09-03 06:22:20 -04:00
|
|
|
if isCJKLanguage != nil {
|
|
|
|
p.isCJKLanguage = *isCJKLanguage
|
2016-10-24 14:56:00 -04:00
|
|
|
} else if viper.GetBool("hasCJKLanguage") {
|
2015-09-03 06:22:20 -04:00
|
|
|
if cjk.Match(p.rawContent) {
|
|
|
|
p.isCJKLanguage = true
|
|
|
|
} else {
|
|
|
|
p.isCJKLanguage = false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return nil
|
2013-07-07 00:49:57 -04:00
|
|
|
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) GetParam(key string) interface{} {
|
2015-05-31 14:30:53 -04:00
|
|
|
return p.getParam(key, true)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) getParam(key string, stringToLower bool) interface{} {
|
2015-03-07 06:25:38 -05:00
|
|
|
v := p.Params[strings.ToLower(key)]
|
2014-01-29 17:50:31 -05:00
|
|
|
|
|
|
|
if v == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-05-14 00:35:16 -04:00
|
|
|
switch val := v.(type) {
|
2014-01-29 17:50:31 -05:00
|
|
|
case bool:
|
2016-05-14 00:35:16 -04:00
|
|
|
return val
|
|
|
|
case string:
|
|
|
|
if stringToLower {
|
|
|
|
return strings.ToLower(val)
|
|
|
|
}
|
|
|
|
return val
|
2014-01-29 17:50:31 -05:00
|
|
|
case int64, int32, int16, int8, int:
|
2014-04-05 01:26:43 -04:00
|
|
|
return cast.ToInt(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case float64, float32:
|
2014-04-05 01:26:43 -04:00
|
|
|
return cast.ToFloat64(v)
|
2016-05-14 00:35:16 -04:00
|
|
|
case time.Time:
|
|
|
|
return val
|
2014-01-29 17:50:31 -05:00
|
|
|
case []string:
|
2015-05-31 14:30:53 -04:00
|
|
|
if stringToLower {
|
2016-05-14 00:35:16 -04:00
|
|
|
return helpers.SliceToLower(val)
|
2015-05-31 14:30:53 -04:00
|
|
|
}
|
2014-11-28 15:16:57 -05:00
|
|
|
return v
|
2016-05-14 00:35:16 -04:00
|
|
|
case map[string]interface{}: // JSON and TOML
|
|
|
|
return v
|
|
|
|
case map[interface{}]interface{}: // YAML
|
|
|
|
return v
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2015-01-24 06:44:35 -05:00
|
|
|
|
|
|
|
jww.ERROR.Printf("GetParam(\"%s\"): Unknown type %s\n", key, reflect.TypeOf(v))
|
2014-01-29 17:50:31 -05:00
|
|
|
return nil
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
func (p *Page) HasMenuCurrent(menuID string, me *MenuEntry) bool {
|
|
|
|
|
2016-11-23 16:06:15 -05:00
|
|
|
sectionPagesMenu := p.Site.sectionPagesMenu
|
2015-01-06 12:11:06 -05:00
|
|
|
|
|
|
|
// page is labeled as "shadow-member" of the menu with the same identifier as the section
|
2016-11-13 08:27:10 -05:00
|
|
|
if sectionPagesMenu != "" && p.Section() != "" && sectionPagesMenu == menuID && p.Section() == me.Identifier {
|
2015-01-06 12:11:06 -05:00
|
|
|
return true
|
|
|
|
}
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
if !me.HasChildren() {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
menus := p.Menus()
|
|
|
|
|
|
|
|
if m, ok := menus[menuID]; ok {
|
|
|
|
|
|
|
|
for _, child := range me.Children {
|
|
|
|
if child.IsEqual(m) {
|
|
|
|
return true
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
2016-11-13 08:27:10 -05:00
|
|
|
if p.HasMenuCurrent(menuID, child) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
if p.IsPage() {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
// The following logic is kept from back when Hugo had both Page and Node types.
|
|
|
|
// TODO(bep) consolidate / clean
|
|
|
|
nme := MenuEntry{Name: p.Title, URL: p.URL()}
|
|
|
|
|
|
|
|
for _, child := range me.Children {
|
|
|
|
if nme.IsSameResource(child) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
if p.HasMenuCurrent(menuID, child) {
|
|
|
|
return true
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return false
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
func (p *Page) IsMenuCurrent(menuID string, inme *MenuEntry) bool {
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
menus := p.Menus()
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
if me, ok := menus[menuID]; ok {
|
|
|
|
if me.IsEqual(inme) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if p.IsPage() {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
// The following logic is kept from back when Hugo had both Page and Node types.
|
|
|
|
// TODO(bep) consolidate / clean
|
2016-11-15 04:43:49 -05:00
|
|
|
me := MenuEntry{Name: p.Title, URL: p.URL()}
|
2016-11-13 08:27:10 -05:00
|
|
|
|
|
|
|
if !me.IsSameResource(inme) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
// this resource may be included in several menus
|
|
|
|
// search for it to make sure that it is in the menu with the given menuId
|
|
|
|
if menu, ok := (*p.Site.Menus)[menuID]; ok {
|
|
|
|
for _, menuEntry := range *menu {
|
|
|
|
if menuEntry.IsSameResource(inme) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
descendantFound := p.isSameAsDescendantMenu(inme, menuEntry)
|
|
|
|
if descendantFound {
|
|
|
|
return descendantFound
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
func (p *Page) isSameAsDescendantMenu(inme *MenuEntry, parent *MenuEntry) bool {
|
|
|
|
if parent.HasChildren() {
|
|
|
|
for _, child := range parent.Children {
|
|
|
|
if child.IsSameResource(inme) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
descendantFound := p.isSameAsDescendantMenu(inme, child)
|
|
|
|
if descendantFound {
|
|
|
|
return descendantFound
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) Menus() PageMenus {
|
|
|
|
p.pageMenusInit.Do(func() {
|
|
|
|
p.pageMenus = PageMenus{}
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
if ms, ok := p.Params["menu"]; ok {
|
2016-11-15 04:43:49 -05:00
|
|
|
link := p.RelPermalink()
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2015-03-18 01:16:54 -04:00
|
|
|
me := MenuEntry{Name: p.LinkTitle(), Weight: p.Weight, URL: link}
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2015-02-04 15:27:27 -05:00
|
|
|
// Could be the name of the menu to attach it to
|
|
|
|
mname, err := cast.ToStringE(ms)
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2015-02-04 15:27:27 -05:00
|
|
|
if err == nil {
|
2014-04-23 02:59:19 -04:00
|
|
|
me.Menu = mname
|
2015-03-07 06:25:38 -05:00
|
|
|
p.pageMenus[mname] = &me
|
2015-02-04 15:27:27 -05:00
|
|
|
return
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
|
2015-02-04 15:27:27 -05:00
|
|
|
// Could be a slice of strings
|
|
|
|
mnames, err := cast.ToStringSliceE(ms)
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2015-02-04 15:27:27 -05:00
|
|
|
if err == nil {
|
|
|
|
for _, mname := range mnames {
|
|
|
|
me.Menu = mname
|
2015-03-07 06:25:38 -05:00
|
|
|
p.pageMenus[mname] = &me
|
2015-02-04 15:27:27 -05:00
|
|
|
}
|
2016-03-10 04:31:12 -05:00
|
|
|
return
|
2015-02-04 15:27:27 -05:00
|
|
|
}
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2015-02-04 15:27:27 -05:00
|
|
|
// Could be a structured menu entry
|
|
|
|
menus, err := cast.ToStringMapE(ms)
|
2014-04-23 02:59:19 -04:00
|
|
|
|
|
|
|
if err != nil {
|
2015-03-07 06:25:38 -05:00
|
|
|
jww.ERROR.Printf("unable to process menus for %q\n", p.Title)
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
|
2015-02-04 15:27:27 -05:00
|
|
|
for name, menu := range menus {
|
2015-03-18 01:16:54 -04:00
|
|
|
menuEntry := MenuEntry{Name: p.LinkTitle(), URL: link, Weight: p.Weight, Menu: name}
|
2016-03-10 04:31:12 -05:00
|
|
|
if menu != nil {
|
|
|
|
jww.DEBUG.Printf("found menu: %q, in %q\n", name, p.Title)
|
|
|
|
ime, err := cast.ToStringMapE(menu)
|
|
|
|
if err != nil {
|
|
|
|
jww.ERROR.Printf("unable to process menus for %q: %s", p.Title, err)
|
|
|
|
}
|
2015-02-04 15:27:27 -05:00
|
|
|
|
2016-03-22 19:29:39 -04:00
|
|
|
menuEntry.marshallMap(ime)
|
2015-02-04 15:27:27 -05:00
|
|
|
}
|
2015-03-07 06:25:38 -05:00
|
|
|
p.pageMenus[name] = &menuEntry
|
2016-07-28 03:30:58 -04:00
|
|
|
|
2015-02-04 15:27:27 -05:00
|
|
|
}
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
2015-02-04 15:27:27 -05:00
|
|
|
})
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
return p.pageMenus
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
|
2013-09-03 15:41:13 -04:00
|
|
|
func (p *Page) Render(layout ...string) template.HTML {
|
2015-07-28 19:19:29 -04:00
|
|
|
var l []string
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
if len(layout) > 0 {
|
2015-07-28 19:19:29 -04:00
|
|
|
l = layouts(p.Type(), layout[0])
|
|
|
|
} else {
|
2015-11-02 11:24:50 -05:00
|
|
|
l = p.layouts()
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2015-07-28 19:19:29 -04:00
|
|
|
return tpl.ExecuteTemplateToHTML(p, l...)
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2016-03-20 16:40:03 -04:00
|
|
|
func (p *Page) determineMarkupType() string {
|
|
|
|
// Try markup explicitly set in the frontmatter
|
|
|
|
p.Markup = helpers.GuessType(p.Markup)
|
|
|
|
if p.Markup == "unknown" {
|
|
|
|
// Fall back to file extension (might also return "unknown")
|
|
|
|
p.Markup = helpers.GuessType(p.Source.Ext())
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2013-09-18 17:21:27 -04:00
|
|
|
|
2016-03-20 16:40:03 -04:00
|
|
|
return p.Markup
|
2013-12-06 23:56:51 -05:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) parse(reader io.Reader) error {
|
2014-05-01 13:19:51 -04:00
|
|
|
psr, err := parser.ReadFrom(reader)
|
2014-01-29 17:50:31 -05:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
p.renderable = psr.IsRenderable()
|
|
|
|
p.frontmatter = psr.FrontMatter()
|
2015-09-03 06:22:20 -04:00
|
|
|
p.rawContent = psr.Content()
|
2016-05-14 00:35:16 -04:00
|
|
|
p.lang = p.Source.File.Lang()
|
2015-09-03 06:22:20 -04:00
|
|
|
|
2014-05-01 13:19:51 -04:00
|
|
|
meta, err := psr.Metadata()
|
2014-05-01 14:11:56 -04:00
|
|
|
if meta != nil {
|
|
|
|
if err != nil {
|
2015-03-07 06:25:38 -05:00
|
|
|
jww.ERROR.Printf("Error parsing page meta data for %s", p.File.Path())
|
2014-05-01 14:11:56 -04:00
|
|
|
jww.ERROR.Println(err)
|
|
|
|
return err
|
|
|
|
}
|
2015-03-07 06:25:38 -05:00
|
|
|
if err = p.update(meta); err != nil {
|
2014-05-01 14:11:56 -04:00
|
|
|
return err
|
|
|
|
}
|
2014-05-01 13:19:51 -04:00
|
|
|
}
|
2013-09-18 13:17:43 -04:00
|
|
|
|
2014-05-01 13:19:51 -04:00
|
|
|
return nil
|
|
|
|
}
|
2013-09-18 13:17:43 -04:00
|
|
|
|
2015-07-02 09:32:57 -04:00
|
|
|
func (p *Page) RawContent() string {
|
|
|
|
return string(p.rawContent)
|
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) SetSourceContent(content []byte) {
|
|
|
|
p.Source.Content = content
|
2014-05-01 13:19:51 -04:00
|
|
|
}
|
2013-12-06 23:32:00 -05:00
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) SetSourceMetaData(in interface{}, mark rune) (err error) {
|
2016-09-19 04:52:07 -04:00
|
|
|
// See https://github.com/spf13/hugo/issues/2458
|
|
|
|
defer func() {
|
|
|
|
if r := recover(); r != nil {
|
|
|
|
var ok bool
|
|
|
|
err, ok = r.(error)
|
|
|
|
if !ok {
|
|
|
|
err = fmt.Errorf("error from marshal: %v", r)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
|
|
|
|
var by []byte
|
|
|
|
|
|
|
|
by, err = parser.InterfaceToFrontMatter(in, mark)
|
2014-05-01 13:19:51 -04:00
|
|
|
if err != nil {
|
2016-09-19 04:52:07 -04:00
|
|
|
return
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
2014-05-11 22:34:44 -04:00
|
|
|
by = append(by, '\n')
|
2014-05-01 13:19:51 -04:00
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
p.Source.Frontmatter = by
|
2013-12-06 23:32:00 -05:00
|
|
|
|
2016-09-19 04:52:07 -04:00
|
|
|
return
|
2013-12-06 23:32:00 -05:00
|
|
|
}
|
2013-08-25 00:27:41 -04:00
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) SafeSaveSourceAs(path string) error {
|
|
|
|
return p.saveSourceAs(path, true)
|
2014-05-02 01:04:48 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) SaveSourceAs(path string) error {
|
|
|
|
return p.saveSourceAs(path, false)
|
2014-05-02 01:04:48 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) saveSourceAs(path string, safe bool) error {
|
2015-01-30 14:42:02 -05:00
|
|
|
b := bp.GetBuffer()
|
|
|
|
defer bp.PutBuffer(b)
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
b.Write(p.Source.Frontmatter)
|
|
|
|
b.Write(p.Source.Content)
|
2014-05-01 13:21:37 -04:00
|
|
|
|
2015-01-30 14:42:02 -05:00
|
|
|
bc := make([]byte, b.Len(), b.Len())
|
|
|
|
copy(bc, b.Bytes())
|
|
|
|
|
2016-11-23 12:28:14 -05:00
|
|
|
return p.saveSource(bc, path, safe)
|
2014-05-01 13:21:37 -04:00
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) saveSource(by []byte, inpath string, safe bool) (err error) {
|
2014-11-06 11:52:01 -05:00
|
|
|
if !filepath.IsAbs(inpath) {
|
2014-05-01 13:21:37 -04:00
|
|
|
inpath = helpers.AbsPathify(inpath)
|
|
|
|
}
|
|
|
|
jww.INFO.Println("creating", inpath)
|
2014-05-02 01:04:48 -04:00
|
|
|
|
|
|
|
if safe {
|
2016-03-21 19:28:42 -04:00
|
|
|
err = helpers.SafeWriteToDisk(inpath, bytes.NewReader(by), hugofs.Source())
|
2014-05-02 01:04:48 -04:00
|
|
|
} else {
|
2016-03-21 19:28:42 -04:00
|
|
|
err = helpers.WriteToDisk(inpath, bytes.NewReader(by), hugofs.Source())
|
2014-05-02 01:04:48 -04:00
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
2014-05-01 13:21:37 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2015-03-07 06:25:38 -05:00
|
|
|
func (p *Page) SaveSource() error {
|
|
|
|
return p.SaveSourceAs(p.FullFilePath())
|
2014-05-01 13:21:37 -04:00
|
|
|
}
|
|
|
|
|
2014-11-20 12:32:21 -05:00
|
|
|
func (p *Page) ProcessShortcodes(t tpl.Template) {
|
2016-12-01 04:21:49 -05:00
|
|
|
tmpContent, tmpContentShortCodes, _ := extractAndRenderShortcodes(string(p.workContent), p, t)
|
|
|
|
p.workContent = []byte(tmpContent)
|
2016-08-12 18:33:17 -04:00
|
|
|
p.contentShortCodes = tmpContentShortCodes
|
2014-01-27 17:16:28 -05:00
|
|
|
}
|
|
|
|
|
2014-05-01 13:21:37 -04:00
|
|
|
func (p *Page) FullFilePath() string {
|
2015-05-31 12:54:50 -04:00
|
|
|
return filepath.Join(p.Dir(), p.LogicalName())
|
2014-05-01 13:21:37 -04:00
|
|
|
}
|
|
|
|
|
2013-09-20 20:24:25 -04:00
|
|
|
func (p *Page) TargetPath() (outfile string) {
|
2016-10-31 13:03:02 -04:00
|
|
|
|
2016-11-13 05:43:23 -05:00
|
|
|
switch p.Kind {
|
|
|
|
case KindHome:
|
2016-11-08 18:19:37 -05:00
|
|
|
return p.addLangFilepathPrefix(helpers.FilePathSeparator)
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindSection:
|
2016-11-08 17:34:52 -05:00
|
|
|
return p.addLangFilepathPrefix(p.sections[0])
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindTaxonomy:
|
2016-11-08 17:34:52 -05:00
|
|
|
return p.addLangFilepathPrefix(filepath.Join(p.sections...))
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindTaxonomyTerm:
|
2016-11-08 17:34:52 -05:00
|
|
|
return p.addLangFilepathPrefix(filepath.Join(p.sections...))
|
2016-10-30 12:59:24 -04:00
|
|
|
}
|
2016-10-31 13:03:02 -04:00
|
|
|
|
2015-03-18 01:16:54 -04:00
|
|
|
// Always use URL if it's specified
|
2016-07-26 13:04:10 -04:00
|
|
|
if len(strings.TrimSpace(p.URLPath.URL)) > 2 {
|
|
|
|
outfile = strings.TrimSpace(p.URLPath.URL)
|
2014-01-29 17:50:31 -05:00
|
|
|
|
|
|
|
if strings.HasSuffix(outfile, "/") {
|
|
|
|
outfile = outfile + "index.html"
|
|
|
|
}
|
2014-12-07 13:48:00 -05:00
|
|
|
outfile = filepath.FromSlash(outfile)
|
2014-01-29 17:50:31 -05:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// If there's a Permalink specification, we use that
|
2014-10-16 20:20:09 -04:00
|
|
|
if override, ok := p.Site.Permalinks[p.Section()]; ok {
|
2014-01-29 17:50:31 -05:00
|
|
|
var err error
|
|
|
|
outfile, err = override.Expand(p)
|
|
|
|
if err == nil {
|
2015-04-09 12:14:26 -04:00
|
|
|
outfile, _ = url.QueryUnescape(outfile)
|
2014-01-29 17:50:31 -05:00
|
|
|
if strings.HasSuffix(outfile, "/") {
|
|
|
|
outfile += "index.html"
|
|
|
|
}
|
2014-12-07 13:48:00 -05:00
|
|
|
outfile = filepath.FromSlash(outfile)
|
2016-08-08 07:55:18 -04:00
|
|
|
outfile = p.addLangFilepathPrefix(outfile)
|
2014-01-29 17:50:31 -05:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(strings.TrimSpace(p.Slug)) > 0 {
|
2014-10-16 20:20:09 -04:00
|
|
|
outfile = strings.TrimSpace(p.Slug) + "." + p.Extension()
|
2014-01-29 17:50:31 -05:00
|
|
|
} else {
|
|
|
|
// Fall back to filename
|
2016-11-01 09:18:24 -04:00
|
|
|
outfile = (p.Source.TranslationBaseName() + "." + p.Extension())
|
2014-09-09 16:58:02 -04:00
|
|
|
}
|
|
|
|
|
2016-10-24 07:45:30 -04:00
|
|
|
return p.addLangFilepathPrefix(filepath.Join(strings.ToLower(
|
|
|
|
p.Site.pathSpec.MakePath(p.Source.Dir())), strings.TrimSpace(outfile)))
|
2016-05-14 00:35:16 -04:00
|
|
|
}
|
2016-10-31 05:23:01 -04:00
|
|
|
|
|
|
|
// Pre render prepare steps
|
|
|
|
|
|
|
|
func (p *Page) prepareLayouts() error {
|
|
|
|
// TODO(bep): Check the IsRenderable logic.
|
2016-11-13 05:43:23 -05:00
|
|
|
if p.Kind == KindPage {
|
2016-10-31 05:23:01 -04:00
|
|
|
var layouts []string
|
|
|
|
if !p.IsRenderable() {
|
|
|
|
self := "__" + p.TargetPath()
|
|
|
|
_, err := p.Site.owner.tmpl.GetClone().New(self).Parse(string(p.Content))
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
layouts = append(layouts, self)
|
|
|
|
} else {
|
|
|
|
layouts = append(layouts, p.layouts()...)
|
|
|
|
layouts = append(layouts, "_default/single.html")
|
|
|
|
}
|
|
|
|
p.layoutsCalculated = layouts
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-10-31 13:03:02 -04:00
|
|
|
func (p *Page) prepareData(s *Site) error {
|
2016-11-03 19:34:25 -04:00
|
|
|
|
2016-11-11 03:19:16 -05:00
|
|
|
var pages Pages
|
|
|
|
|
2016-10-31 13:03:02 -04:00
|
|
|
p.Data = make(map[string]interface{})
|
2016-11-13 05:43:23 -05:00
|
|
|
switch p.Kind {
|
|
|
|
case KindPage:
|
|
|
|
case KindHome:
|
2016-11-17 04:29:11 -05:00
|
|
|
pages = s.RegularPages
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindSection:
|
2016-11-01 17:39:24 -04:00
|
|
|
sectionData, ok := s.Sections[p.sections[0]]
|
2016-10-31 13:03:02 -04:00
|
|
|
if !ok {
|
|
|
|
return fmt.Errorf("Data for section %s not found", p.Section())
|
|
|
|
}
|
2016-11-11 03:19:16 -05:00
|
|
|
pages = sectionData.Pages()
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindTaxonomy:
|
2016-10-31 14:53:33 -04:00
|
|
|
plural := p.sections[0]
|
|
|
|
term := p.sections[1]
|
2016-10-31 05:23:01 -04:00
|
|
|
|
2016-12-26 13:30:57 -05:00
|
|
|
if s.Info.preserveTaxonomyNames {
|
|
|
|
if v, ok := s.taxonomiesOrigKey[fmt.Sprintf("%s-%s", plural, term)]; ok {
|
|
|
|
term = v
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-31 14:53:33 -04:00
|
|
|
singular := s.taxonomiesPluralSingular[plural]
|
|
|
|
taxonomy := s.Taxonomies[plural].Get(term)
|
2016-10-31 05:23:01 -04:00
|
|
|
|
2016-10-31 14:53:33 -04:00
|
|
|
p.Data[singular] = taxonomy
|
|
|
|
p.Data["Singular"] = singular
|
|
|
|
p.Data["Plural"] = plural
|
2016-11-11 03:19:16 -05:00
|
|
|
pages = taxonomy.Pages()
|
2016-11-13 05:43:23 -05:00
|
|
|
case KindTaxonomyTerm:
|
2016-11-01 11:47:15 -04:00
|
|
|
plural := p.sections[0]
|
|
|
|
singular := s.taxonomiesPluralSingular[plural]
|
2016-10-31 05:23:01 -04:00
|
|
|
|
2016-11-01 11:47:15 -04:00
|
|
|
p.Data["Singular"] = singular
|
|
|
|
p.Data["Plural"] = plural
|
|
|
|
p.Data["Terms"] = s.Taxonomies[plural]
|
|
|
|
// keep the following just for legacy reasons
|
|
|
|
p.Data["OrderedIndex"] = p.Data["Terms"]
|
|
|
|
p.Data["Index"] = p.Data["Terms"]
|
2016-10-31 05:23:01 -04:00
|
|
|
}
|
2016-10-31 14:53:33 -04:00
|
|
|
|
2016-11-11 03:19:16 -05:00
|
|
|
p.Data["Pages"] = pages
|
|
|
|
p.Pages = pages
|
|
|
|
|
2016-11-11 05:35:55 -05:00
|
|
|
// Now we know enough to set missing dates on home page etc.
|
|
|
|
p.updatePageDates()
|
|
|
|
|
2016-10-31 05:23:01 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-11-11 05:35:55 -05:00
|
|
|
func (p *Page) updatePageDates() {
|
2016-11-16 15:06:10 -05:00
|
|
|
// TODO(bep) there is a potential issue with page sorting for home pages
|
2016-11-11 05:35:55 -05:00
|
|
|
// etc. without front matter dates set, but let us wrap the head around
|
|
|
|
// that in another time.
|
2016-11-13 06:33:11 -05:00
|
|
|
if !p.IsNode() {
|
2016-11-11 05:35:55 -05:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if !p.Date.IsZero() {
|
|
|
|
if p.Lastmod.IsZero() {
|
|
|
|
p.Lastmod = p.Date
|
|
|
|
}
|
|
|
|
return
|
|
|
|
} else if !p.Lastmod.IsZero() {
|
|
|
|
if p.Date.IsZero() {
|
|
|
|
p.Date = p.Lastmod
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Set it to the first non Zero date in children
|
|
|
|
var foundDate, foundLastMod bool
|
|
|
|
|
|
|
|
for _, child := range p.Pages {
|
|
|
|
if !child.Date.IsZero() {
|
|
|
|
p.Date = child.Date
|
|
|
|
foundDate = true
|
|
|
|
}
|
|
|
|
if !child.Lastmod.IsZero() {
|
|
|
|
p.Lastmod = child.Lastmod
|
|
|
|
foundLastMod = true
|
|
|
|
}
|
|
|
|
|
|
|
|
if foundDate && foundLastMod {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-16 14:32:38 -05:00
|
|
|
// copy creates a copy of this page with the lazy sync.Once vars reset
|
|
|
|
// so they will be evaluated again, for word count calculations etc.
|
2016-10-31 05:23:01 -04:00
|
|
|
func (p *Page) copy() *Page {
|
2016-11-16 14:32:38 -05:00
|
|
|
c := *p
|
|
|
|
c.pageInit = &pageInit{}
|
|
|
|
return &c
|
2016-10-31 05:23:01 -04:00
|
|
|
}
|
2016-11-13 08:27:10 -05:00
|
|
|
|
|
|
|
func (p *Page) Now() time.Time {
|
2017-01-01 07:10:36 -05:00
|
|
|
// Delete in Hugo 0.21
|
|
|
|
helpers.Deprecated("Page", "Now", "now (the template func)", false)
|
2016-11-13 08:27:10 -05:00
|
|
|
return time.Now()
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) Hugo() *HugoInfo {
|
|
|
|
return hugoInfo
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) RSSlink() template.HTML {
|
|
|
|
// TODO(bep) we cannot have two of these
|
2017-01-01 06:33:30 -05:00
|
|
|
// Remove in Hugo 0.20
|
|
|
|
helpers.Deprecated(".Page", "RSSlink", "RSSLink", true)
|
2016-11-13 08:27:10 -05:00
|
|
|
return p.RSSLink
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) Ref(ref string) (string, error) {
|
|
|
|
return p.Site.Ref(ref, nil)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) RelRef(ref string) (string, error) {
|
|
|
|
return p.Site.RelRef(ref, nil)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) String() string {
|
|
|
|
return fmt.Sprintf("Page(%q)", p.Title)
|
|
|
|
}
|
|
|
|
|
|
|
|
type URLPath struct {
|
|
|
|
URL string
|
|
|
|
Permalink string
|
|
|
|
Slug string
|
|
|
|
Section string
|
|
|
|
}
|
|
|
|
|
|
|
|
// Scratch returns the writable context associated with this Page.
|
|
|
|
func (p *Page) Scratch() *Scratch {
|
|
|
|
if p.scratch == nil {
|
|
|
|
p.scratch = newScratch()
|
|
|
|
}
|
|
|
|
return p.scratch
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) Language() *helpers.Language {
|
|
|
|
p.initLanguage()
|
|
|
|
return p.language
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) Lang() string {
|
|
|
|
// When set, Language can be different from lang in the case where there is a
|
|
|
|
// content file (doc.sv.md) with language indicator, but there is no language
|
|
|
|
// config for that language. Then the language will fall back on the site default.
|
|
|
|
if p.Language() != nil {
|
|
|
|
return p.Language().Lang
|
|
|
|
}
|
|
|
|
return p.lang
|
|
|
|
}
|
|
|
|
|
2016-11-21 04:11:34 -05:00
|
|
|
func (p *Page) isNewTranslation(candidate *Page) bool {
|
2016-12-23 03:52:05 -05:00
|
|
|
|
|
|
|
if p.Kind != candidate.Kind {
|
2016-11-13 08:27:10 -05:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
if p.Kind == KindPage || p.Kind == kindUnknown {
|
|
|
|
panic("Node type not currently supported for this op")
|
|
|
|
}
|
|
|
|
|
|
|
|
// At this point, we know that this is a traditional Node (home page, section, taxonomy)
|
|
|
|
// It represents the same node, but different language, if the sections is the same.
|
|
|
|
if len(p.sections) != len(candidate.sections) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
for i := 0; i < len(p.sections); i++ {
|
|
|
|
if p.sections[i] != candidate.sections[i] {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-21 04:11:34 -05:00
|
|
|
// Finally check that it is not already added.
|
2016-12-23 03:52:05 -05:00
|
|
|
for _, translation := range p.translations {
|
|
|
|
if candidate == translation {
|
2016-11-21 04:11:34 -05:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
return true
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) shouldAddLanguagePrefix() bool {
|
|
|
|
if !p.Site.IsMultiLingual() {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
if p.Lang() == "" {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
if !p.Site.defaultContentLanguageInSubdir && p.Lang() == p.Site.multilingual.DefaultLang.Lang {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) initLanguage() {
|
|
|
|
p.languageInit.Do(func() {
|
|
|
|
if p.language != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
pageLang := p.lang
|
|
|
|
ml := p.Site.multilingual
|
|
|
|
if ml == nil {
|
|
|
|
panic("Multilanguage not set")
|
|
|
|
}
|
|
|
|
if pageLang == "" {
|
|
|
|
p.language = ml.DefaultLang
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
language := ml.Language(pageLang)
|
|
|
|
|
|
|
|
if language == nil {
|
|
|
|
// It can be a file named stefano.chiodino.md.
|
|
|
|
jww.WARN.Printf("Page language (if it is that) not found in multilang setup: %s.", pageLang)
|
|
|
|
language = ml.DefaultLang
|
|
|
|
}
|
|
|
|
|
|
|
|
p.language = language
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) LanguagePrefix() string {
|
|
|
|
return p.Site.LanguagePrefix
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) addLangPathPrefix(outfile string) string {
|
|
|
|
return p.addLangPathPrefixIfFlagSet(outfile, p.shouldAddLanguagePrefix())
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) addLangPathPrefixIfFlagSet(outfile string, should bool) string {
|
|
|
|
if helpers.IsAbsURL(outfile) {
|
|
|
|
return outfile
|
|
|
|
}
|
|
|
|
|
|
|
|
if !should {
|
|
|
|
return outfile
|
|
|
|
}
|
|
|
|
|
|
|
|
hadSlashSuffix := strings.HasSuffix(outfile, "/")
|
|
|
|
|
|
|
|
outfile = "/" + path.Join(p.Lang(), outfile)
|
|
|
|
if hadSlashSuffix {
|
|
|
|
outfile += "/"
|
|
|
|
}
|
|
|
|
return outfile
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) addLangFilepathPrefix(outfile string) string {
|
|
|
|
if outfile == "" {
|
|
|
|
outfile = helpers.FilePathSeparator
|
|
|
|
}
|
|
|
|
if !p.shouldAddLanguagePrefix() {
|
|
|
|
return outfile
|
|
|
|
}
|
|
|
|
return helpers.FilePathSeparator + filepath.Join(p.Lang(), outfile)
|
|
|
|
}
|
|
|
|
|
|
|
|
func sectionsFromFilename(filename string) []string {
|
2016-11-21 04:11:34 -05:00
|
|
|
var sections []string
|
2016-11-13 08:27:10 -05:00
|
|
|
dir, _ := filepath.Split(filename)
|
|
|
|
dir = strings.TrimSuffix(dir, helpers.FilePathSeparator)
|
2016-11-21 04:11:34 -05:00
|
|
|
if dir == "" {
|
|
|
|
return sections
|
|
|
|
}
|
|
|
|
sections = strings.Split(dir, helpers.FilePathSeparator)
|
2016-11-13 08:27:10 -05:00
|
|
|
return sections
|
|
|
|
}
|
|
|
|
|
|
|
|
func kindFromFilename(filename string) string {
|
|
|
|
if !strings.Contains(filename, "_index") {
|
|
|
|
return KindPage
|
|
|
|
}
|
|
|
|
|
|
|
|
if strings.HasPrefix(filename, "_index") {
|
|
|
|
return KindHome
|
|
|
|
}
|
|
|
|
|
|
|
|
// We don't know enough yet to determine the type.
|
|
|
|
return kindUnknown
|
|
|
|
}
|
|
|
|
|
2016-11-16 11:52:03 -05:00
|
|
|
func (p *Page) setValuesForKind(s *Site) {
|
2016-11-13 08:27:10 -05:00
|
|
|
if p.Kind == kindUnknown {
|
|
|
|
// This is either a taxonomy list, taxonomy term or a section
|
2016-11-16 11:52:03 -05:00
|
|
|
nodeType := s.kindFromSections(p.sections)
|
2016-11-13 08:27:10 -05:00
|
|
|
|
|
|
|
if nodeType == kindUnknown {
|
2016-11-16 11:52:03 -05:00
|
|
|
panic(fmt.Sprintf("Unable to determine page kind from %q", p.sections))
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
p.Kind = nodeType
|
|
|
|
}
|
2016-11-16 11:52:03 -05:00
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
switch p.Kind {
|
|
|
|
case KindHome:
|
2016-11-15 04:43:49 -05:00
|
|
|
p.URLPath.URL = "/"
|
2016-11-13 08:27:10 -05:00
|
|
|
case KindSection:
|
2016-11-15 04:43:49 -05:00
|
|
|
p.URLPath.URL = "/" + p.sections[0] + "/"
|
2016-11-13 08:27:10 -05:00
|
|
|
case KindTaxonomy:
|
2016-11-15 04:43:49 -05:00
|
|
|
p.URLPath.URL = "/" + path.Join(p.sections...) + "/"
|
2016-11-13 08:27:10 -05:00
|
|
|
case KindTaxonomyTerm:
|
2016-11-15 04:43:49 -05:00
|
|
|
p.URLPath.URL = "/" + path.Join(p.sections...) + "/"
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
p.site = s
|
|
|
|
|
|
|
|
}
|