2013-07-04 11:32:55 -04:00
|
|
|
// Copyright © 2013 Steve Francia <spf@spf13.com>.
|
|
|
|
//
|
|
|
|
// Licensed under the Simple Public License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
// http://opensource.org/licenses/Simple-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package hugolib
|
|
|
|
|
|
|
|
import (
|
2014-01-29 17:50:31 -05:00
|
|
|
"bytes"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
2014-03-31 13:23:34 -04:00
|
|
|
"html/template"
|
|
|
|
"io"
|
|
|
|
"net/url"
|
|
|
|
"path"
|
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
"github.com/BurntSushi/toml"
|
2014-04-05 01:26:43 -04:00
|
|
|
"github.com/spf13/cast"
|
2014-01-29 17:50:31 -05:00
|
|
|
"github.com/spf13/hugo/helpers"
|
|
|
|
"github.com/spf13/hugo/parser"
|
|
|
|
"github.com/spf13/hugo/template/bundle"
|
2014-03-31 13:23:34 -04:00
|
|
|
jww "github.com/spf13/jwalterweatherman"
|
2014-04-05 01:26:43 -04:00
|
|
|
"github.com/spf13/viper"
|
2014-01-29 17:50:31 -05:00
|
|
|
"github.com/theplant/blackfriday"
|
|
|
|
"launchpad.net/goyaml"
|
|
|
|
json "launchpad.net/rjson"
|
2013-07-04 11:32:55 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
type Page struct {
|
2014-01-29 17:50:31 -05:00
|
|
|
Status string
|
|
|
|
Images []string
|
|
|
|
rawContent []byte
|
|
|
|
Content template.HTML
|
|
|
|
Summary template.HTML
|
|
|
|
TableOfContents template.HTML
|
|
|
|
Truncated bool
|
|
|
|
plain string // TODO should be []byte
|
|
|
|
Params map[string]interface{}
|
|
|
|
contentType string
|
|
|
|
Draft bool
|
|
|
|
Aliases []string
|
|
|
|
Tmpl bundle.Template
|
|
|
|
Markup string
|
|
|
|
renderable bool
|
|
|
|
layout string
|
|
|
|
linkTitle string
|
|
|
|
PageMeta
|
|
|
|
File
|
|
|
|
Position
|
|
|
|
Node
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
type File struct {
|
2014-01-29 17:50:31 -05:00
|
|
|
FileName, Extension, Dir string
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
type PageMeta struct {
|
2014-01-29 17:50:31 -05:00
|
|
|
WordCount int
|
|
|
|
FuzzyWordCount int
|
|
|
|
ReadingTime int
|
|
|
|
Weight int
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
type Position struct {
|
2014-01-29 17:50:31 -05:00
|
|
|
Prev *Page
|
|
|
|
Next *Page
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
type Pages []*Page
|
|
|
|
|
2014-01-27 17:16:28 -05:00
|
|
|
func (p *Page) Plain() string {
|
2014-01-29 17:50:31 -05:00
|
|
|
if len(p.plain) == 0 {
|
2014-02-27 18:32:09 -05:00
|
|
|
p.plain = helpers.StripHTML(StripShortcodes(string(p.renderBytes(p.rawContent))))
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
return p.plain
|
2013-10-15 09:15:52 -04:00
|
|
|
}
|
|
|
|
|
2014-01-27 17:16:28 -05:00
|
|
|
func (p *Page) setSummary() {
|
2014-01-29 17:50:31 -05:00
|
|
|
if bytes.Contains(p.rawContent, summaryDivider) {
|
|
|
|
// If user defines split:
|
|
|
|
// Split then render
|
|
|
|
p.Truncated = true // by definition
|
|
|
|
header := bytes.Split(p.rawContent, summaryDivider)[0]
|
|
|
|
p.Summary = bytesToHTML(p.renderBytes(header))
|
|
|
|
} else {
|
|
|
|
// If hugo defines split:
|
|
|
|
// render, strip html, then split
|
|
|
|
plain := strings.TrimSpace(p.Plain())
|
|
|
|
p.Summary = bytesToHTML([]byte(TruncateWordsToWholeSentence(plain, summaryLength)))
|
|
|
|
p.Truncated = len(p.Summary) != len(plain)
|
|
|
|
}
|
2014-01-27 17:16:28 -05:00
|
|
|
}
|
|
|
|
|
2014-01-28 23:11:05 -05:00
|
|
|
func stripEmptyNav(in []byte) []byte {
|
2014-01-29 17:50:31 -05:00
|
|
|
return bytes.Replace(in, []byte("<nav>\n</nav>\n\n"), []byte(``), -1)
|
2014-01-28 23:11:05 -05:00
|
|
|
}
|
|
|
|
|
2014-01-27 17:16:28 -05:00
|
|
|
func bytesToHTML(b []byte) template.HTML {
|
2014-01-29 17:50:31 -05:00
|
|
|
return template.HTML(string(b))
|
2014-01-27 17:16:28 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) renderBytes(content []byte) []byte {
|
2014-01-29 17:50:31 -05:00
|
|
|
return renderBytes(content, p.guessMarkupType())
|
2014-01-27 17:16:28 -05:00
|
|
|
}
|
|
|
|
|
2014-01-28 23:11:05 -05:00
|
|
|
func (p *Page) renderContent(content []byte) []byte {
|
2014-01-29 17:50:31 -05:00
|
|
|
return renderBytesWithTOC(content, p.guessMarkupType())
|
2014-01-28 23:11:05 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
func renderBytesWithTOC(content []byte, pagefmt string) []byte {
|
2014-01-29 17:50:31 -05:00
|
|
|
switch pagefmt {
|
|
|
|
default:
|
|
|
|
return markdownRenderWithTOC(content)
|
|
|
|
case "markdown":
|
|
|
|
return markdownRenderWithTOC(content)
|
|
|
|
case "rst":
|
|
|
|
return []byte(getRstContent(content))
|
|
|
|
}
|
2013-10-04 12:28:28 -04:00
|
|
|
}
|
|
|
|
|
2013-11-10 15:04:51 -05:00
|
|
|
func renderBytes(content []byte, pagefmt string) []byte {
|
2014-01-29 17:50:31 -05:00
|
|
|
switch pagefmt {
|
|
|
|
default:
|
|
|
|
return markdownRender(content)
|
|
|
|
case "markdown":
|
|
|
|
return markdownRender(content)
|
|
|
|
case "rst":
|
|
|
|
return []byte(getRstContent(content))
|
|
|
|
}
|
2013-09-03 18:38:20 -04:00
|
|
|
}
|
|
|
|
|
2013-08-13 19:39:24 -04:00
|
|
|
// TODO abstract further to support loading from more
|
|
|
|
// than just files on disk. Should load reader (file, []byte)
|
2013-09-04 19:57:17 -04:00
|
|
|
func newPage(filename string) *Page {
|
2014-01-29 17:50:31 -05:00
|
|
|
page := Page{contentType: "",
|
|
|
|
File: File{FileName: filename, Extension: "html"},
|
2014-04-08 21:40:03 -04:00
|
|
|
Node: Node{Keywords: []string{}},
|
2014-01-29 17:50:31 -05:00
|
|
|
Params: make(map[string]interface{})}
|
2014-03-31 13:23:34 -04:00
|
|
|
|
|
|
|
jww.DEBUG.Println("Reading from", page.File.FileName)
|
2014-01-29 17:50:31 -05:00
|
|
|
page.Date, _ = time.Parse("20060102", "20080101")
|
|
|
|
page.guessSection()
|
|
|
|
return &page
|
2013-08-13 19:39:24 -04:00
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2013-09-18 13:17:43 -04:00
|
|
|
func (p *Page) IsRenderable() bool {
|
2014-01-29 17:50:31 -05:00
|
|
|
return p.renderable
|
2013-09-18 13:17:43 -04:00
|
|
|
}
|
|
|
|
|
2013-08-14 08:57:14 -04:00
|
|
|
func (p *Page) guessSection() {
|
2014-01-29 17:50:31 -05:00
|
|
|
if p.Section == "" {
|
|
|
|
x := strings.Split(p.FileName, "/")
|
|
|
|
x = x[:len(x)-1]
|
|
|
|
if len(x) == 0 {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if x[0] == "content" {
|
|
|
|
x = x[1:]
|
|
|
|
}
|
|
|
|
p.Section = path.Join(x...)
|
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func (page *Page) Type() string {
|
2014-01-29 17:50:31 -05:00
|
|
|
if page.contentType != "" {
|
|
|
|
return page.contentType
|
|
|
|
}
|
|
|
|
page.guessSection()
|
|
|
|
if x := page.Section; x != "" {
|
|
|
|
return x
|
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return "page"
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2013-10-07 00:57:45 -04:00
|
|
|
func (page *Page) Layout(l ...string) []string {
|
2014-01-29 17:50:31 -05:00
|
|
|
if page.layout != "" {
|
|
|
|
return layouts(page.Type(), page.layout)
|
|
|
|
}
|
2013-10-07 00:57:45 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
layout := ""
|
|
|
|
if len(l) == 0 {
|
|
|
|
layout = "single"
|
|
|
|
} else {
|
|
|
|
layout = l[0]
|
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return layouts(page.Type(), layout)
|
2013-10-07 00:57:45 -04:00
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2013-10-07 00:57:45 -04:00
|
|
|
func layouts(types string, layout string) (layouts []string) {
|
2014-01-29 17:50:31 -05:00
|
|
|
t := strings.Split(types, "/")
|
|
|
|
for i := range t {
|
|
|
|
search := t[:len(t)-i]
|
|
|
|
layouts = append(layouts, fmt.Sprintf("%s/%s.html", strings.ToLower(path.Join(search...)), layout))
|
|
|
|
}
|
|
|
|
layouts = append(layouts, fmt.Sprintf("%s.html", layout))
|
|
|
|
return
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2013-08-05 10:53:58 -04:00
|
|
|
func ReadFrom(buf io.Reader, name string) (page *Page, err error) {
|
2014-01-29 17:50:31 -05:00
|
|
|
if len(name) == 0 {
|
|
|
|
return nil, errors.New("Zero length page name")
|
|
|
|
}
|
2013-08-05 10:53:58 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
// Create new page
|
|
|
|
p := newPage(name)
|
2013-08-05 10:53:58 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
// Parse for metadata & body
|
|
|
|
if err = p.parse(buf); err != nil {
|
2014-03-31 13:23:34 -04:00
|
|
|
jww.ERROR.Print(err)
|
2014-01-29 17:50:31 -05:00
|
|
|
return
|
|
|
|
}
|
2013-08-05 10:53:58 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
//analyze for raw stats
|
|
|
|
p.analyzePage()
|
2013-08-05 10:53:58 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return p, nil
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) analyzePage() {
|
2014-01-29 17:50:31 -05:00
|
|
|
p.WordCount = TotalWords(p.Plain())
|
|
|
|
p.FuzzyWordCount = int((p.WordCount+100)/100) * 100
|
|
|
|
p.ReadingTime = int((p.WordCount + 212) / 213)
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2013-10-02 19:33:51 -04:00
|
|
|
func (p *Page) permalink() (*url.URL, error) {
|
2014-01-29 17:50:31 -05:00
|
|
|
baseUrl := string(p.Site.BaseUrl)
|
|
|
|
dir := strings.TrimSpace(p.Dir)
|
|
|
|
pSlug := strings.TrimSpace(p.Slug)
|
|
|
|
pUrl := strings.TrimSpace(p.Url)
|
|
|
|
var permalink string
|
|
|
|
var err error
|
|
|
|
|
2014-02-28 02:30:12 -05:00
|
|
|
if len(pUrl) > 0 {
|
|
|
|
return helpers.MakePermalink(baseUrl, pUrl), nil
|
|
|
|
}
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
if override, ok := p.Site.Permalinks[p.Section]; ok {
|
|
|
|
permalink, err = override.Expand(p)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2014-02-28 02:30:12 -05:00
|
|
|
// fmt.Printf("have a section override for %q in section %s → %s\n", p.Title, p.Section, permalink)
|
2014-01-29 17:50:31 -05:00
|
|
|
} else {
|
|
|
|
if len(pSlug) > 0 {
|
2014-04-05 01:26:43 -04:00
|
|
|
permalink = helpers.UrlPrep(viper.GetBool("UglyUrls"), path.Join(dir, p.Slug+"."+p.Extension))
|
2014-01-29 17:50:31 -05:00
|
|
|
} else {
|
|
|
|
_, t := path.Split(p.FileName)
|
2014-04-05 01:26:43 -04:00
|
|
|
permalink = helpers.UrlPrep(viper.GetBool("UglyUrls"), path.Join(dir, helpers.ReplaceExtension(strings.TrimSpace(t), p.Extension)))
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-02-27 18:32:09 -05:00
|
|
|
return helpers.MakePermalink(baseUrl, permalink), nil
|
2013-10-02 19:33:51 -04:00
|
|
|
}
|
|
|
|
|
2013-10-25 18:37:53 -04:00
|
|
|
func (p *Page) LinkTitle() string {
|
2014-01-29 17:50:31 -05:00
|
|
|
if len(p.linkTitle) > 0 {
|
|
|
|
return p.linkTitle
|
|
|
|
} else {
|
|
|
|
return p.Title
|
|
|
|
}
|
2013-10-25 18:37:53 -04:00
|
|
|
}
|
|
|
|
|
2013-10-02 19:33:51 -04:00
|
|
|
func (p *Page) Permalink() (string, error) {
|
2014-01-29 17:50:31 -05:00
|
|
|
link, err := p.permalink()
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
return link.String(), nil
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2013-10-02 20:00:21 -04:00
|
|
|
func (p *Page) RelPermalink() (string, error) {
|
2014-01-29 17:50:31 -05:00
|
|
|
link, err := p.permalink()
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
|
|
|
}
|
2013-10-02 20:00:21 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
link.Scheme = ""
|
|
|
|
link.Host = ""
|
|
|
|
link.User = nil
|
|
|
|
link.Opaque = ""
|
|
|
|
return link.String(), nil
|
2013-10-02 20:00:21 -04:00
|
|
|
}
|
|
|
|
|
2013-07-09 18:53:08 -04:00
|
|
|
func (page *Page) handleTomlMetaData(datum []byte) (interface{}, error) {
|
2014-01-29 17:50:31 -05:00
|
|
|
m := map[string]interface{}{}
|
|
|
|
datum = removeTomlIdentifier(datum)
|
|
|
|
if _, err := toml.Decode(string(datum), &m); err != nil {
|
|
|
|
return m, fmt.Errorf("Invalid TOML in %s \nError parsing page meta data: %s", page.FileName, err)
|
|
|
|
}
|
|
|
|
return m, nil
|
2013-07-08 22:23:54 -04:00
|
|
|
}
|
|
|
|
|
2013-08-25 00:27:41 -04:00
|
|
|
func removeTomlIdentifier(datum []byte) []byte {
|
2014-01-29 17:50:31 -05:00
|
|
|
return bytes.Replace(datum, []byte("+++"), []byte(""), -1)
|
2013-08-25 00:27:41 -04:00
|
|
|
}
|
|
|
|
|
2013-07-09 18:53:08 -04:00
|
|
|
func (page *Page) handleYamlMetaData(datum []byte) (interface{}, error) {
|
2014-01-29 17:50:31 -05:00
|
|
|
m := map[string]interface{}{}
|
|
|
|
if err := goyaml.Unmarshal(datum, &m); err != nil {
|
|
|
|
return m, fmt.Errorf("Invalid YAML in %s \nError parsing page meta data: %s", page.FileName, err)
|
|
|
|
}
|
|
|
|
return m, nil
|
2013-07-07 00:49:57 -04:00
|
|
|
}
|
|
|
|
|
2013-07-19 03:10:42 -04:00
|
|
|
func (page *Page) handleJsonMetaData(datum []byte) (interface{}, error) {
|
2014-01-29 17:50:31 -05:00
|
|
|
var f interface{}
|
|
|
|
if err := json.Unmarshal(datum, &f); err != nil {
|
|
|
|
return f, fmt.Errorf("Invalid JSON in %v \nError parsing page meta data: %s", page.FileName, err)
|
|
|
|
}
|
|
|
|
return f, nil
|
2013-07-07 00:49:57 -04:00
|
|
|
}
|
|
|
|
|
2013-08-05 10:53:58 -04:00
|
|
|
func (page *Page) update(f interface{}) error {
|
2014-01-29 17:50:31 -05:00
|
|
|
m := f.(map[string]interface{})
|
|
|
|
|
|
|
|
for k, v := range m {
|
|
|
|
loki := strings.ToLower(k)
|
|
|
|
switch loki {
|
|
|
|
case "title":
|
2014-04-05 01:26:43 -04:00
|
|
|
page.Title = cast.ToString(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "linktitle":
|
2014-04-05 01:26:43 -04:00
|
|
|
page.linkTitle = cast.ToString(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "description":
|
2014-04-05 01:26:43 -04:00
|
|
|
page.Description = cast.ToString(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "slug":
|
2014-04-05 01:26:43 -04:00
|
|
|
page.Slug = helpers.Urlize(cast.ToString(v))
|
2014-01-29 17:50:31 -05:00
|
|
|
case "url":
|
2014-04-05 01:26:43 -04:00
|
|
|
if url := cast.ToString(v); strings.HasPrefix(url, "http://") || strings.HasPrefix(url, "https://") {
|
2014-01-29 17:50:31 -05:00
|
|
|
return fmt.Errorf("Only relative urls are supported, %v provided", url)
|
|
|
|
}
|
2014-04-05 01:26:43 -04:00
|
|
|
page.Url = helpers.Urlize(cast.ToString(v))
|
2014-01-29 17:50:31 -05:00
|
|
|
case "type":
|
2014-04-05 01:26:43 -04:00
|
|
|
page.contentType = cast.ToString(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "keywords":
|
2014-04-05 01:26:43 -04:00
|
|
|
page.Keywords = cast.ToStringSlice(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "date", "pubdate":
|
2014-04-05 01:26:43 -04:00
|
|
|
page.Date = cast.ToTime(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "draft":
|
2014-04-05 01:26:43 -04:00
|
|
|
page.Draft = cast.ToBool(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "layout":
|
2014-04-05 01:26:43 -04:00
|
|
|
page.layout = cast.ToString(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "markup":
|
2014-04-05 01:26:43 -04:00
|
|
|
page.Markup = cast.ToString(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "weight":
|
2014-04-05 01:26:43 -04:00
|
|
|
page.Weight = cast.ToInt(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case "aliases":
|
2014-04-05 01:26:43 -04:00
|
|
|
page.Aliases = cast.ToStringSlice(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
for _, alias := range page.Aliases {
|
|
|
|
if strings.HasPrefix(alias, "http://") || strings.HasPrefix(alias, "https://") {
|
|
|
|
return fmt.Errorf("Only relative aliases are supported, %v provided", alias)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
case "status":
|
2014-04-05 01:26:43 -04:00
|
|
|
page.Status = cast.ToString(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
default:
|
|
|
|
// If not one of the explicit values, store in Params
|
|
|
|
switch vv := v.(type) {
|
|
|
|
case bool:
|
|
|
|
page.Params[loki] = vv
|
|
|
|
case string:
|
|
|
|
page.Params[loki] = vv
|
|
|
|
case int64, int32, int16, int8, int:
|
|
|
|
page.Params[loki] = vv
|
|
|
|
case float64, float32:
|
|
|
|
page.Params[loki] = vv
|
|
|
|
case time.Time:
|
|
|
|
page.Params[loki] = vv
|
|
|
|
default: // handle array of strings as well
|
|
|
|
switch vvv := vv.(type) {
|
|
|
|
case []interface{}:
|
|
|
|
var a = make([]string, len(vvv))
|
|
|
|
for i, u := range vvv {
|
2014-04-05 01:26:43 -04:00
|
|
|
a[i] = cast.ToString(u)
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
page.Params[loki] = a
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
2013-07-07 00:49:57 -04:00
|
|
|
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func (page *Page) GetParam(key string) interface{} {
|
2014-01-29 17:50:31 -05:00
|
|
|
v := page.Params[strings.ToLower(key)]
|
|
|
|
|
|
|
|
if v == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
switch v.(type) {
|
|
|
|
case bool:
|
2014-04-05 01:26:43 -04:00
|
|
|
return cast.ToBool(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case string:
|
2014-04-05 01:26:43 -04:00
|
|
|
return cast.ToString(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case int64, int32, int16, int8, int:
|
2014-04-05 01:26:43 -04:00
|
|
|
return cast.ToInt(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case float64, float32:
|
2014-04-05 01:26:43 -04:00
|
|
|
return cast.ToFloat64(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case time.Time:
|
2014-04-05 01:26:43 -04:00
|
|
|
return cast.ToTime(v)
|
2014-01-29 17:50:31 -05:00
|
|
|
case []string:
|
|
|
|
return v
|
|
|
|
}
|
|
|
|
return nil
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2013-08-05 10:53:58 -04:00
|
|
|
type frontmatterType struct {
|
2014-01-29 17:50:31 -05:00
|
|
|
markstart, markend []byte
|
|
|
|
parse func([]byte) (interface{}, error)
|
|
|
|
includeMark bool
|
2013-08-05 10:53:58 -04:00
|
|
|
}
|
|
|
|
|
2013-09-18 13:17:43 -04:00
|
|
|
const YAML_DELIM = "---"
|
|
|
|
const TOML_DELIM = "+++"
|
|
|
|
|
2013-08-05 10:53:58 -04:00
|
|
|
func (page *Page) detectFrontMatter(mark rune) (f *frontmatterType) {
|
2014-01-29 17:50:31 -05:00
|
|
|
switch mark {
|
|
|
|
case '-':
|
|
|
|
return &frontmatterType{[]byte(YAML_DELIM), []byte(YAML_DELIM), page.handleYamlMetaData, false}
|
|
|
|
case '+':
|
|
|
|
return &frontmatterType{[]byte(TOML_DELIM), []byte(TOML_DELIM), page.handleTomlMetaData, false}
|
|
|
|
case '{':
|
|
|
|
return &frontmatterType{[]byte{'{'}, []byte{'}'}, page.handleJsonMetaData, true}
|
|
|
|
default:
|
|
|
|
return nil
|
|
|
|
}
|
2013-08-05 10:53:58 -04:00
|
|
|
}
|
|
|
|
|
2013-09-03 15:41:13 -04:00
|
|
|
func (p *Page) Render(layout ...string) template.HTML {
|
2014-01-29 17:50:31 -05:00
|
|
|
curLayout := ""
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
if len(layout) > 0 {
|
|
|
|
curLayout = layout[0]
|
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return bytesToHTML(p.ExecuteTemplate(curLayout).Bytes())
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) ExecuteTemplate(layout string) *bytes.Buffer {
|
2014-01-29 17:50:31 -05:00
|
|
|
l := p.Layout(layout)
|
|
|
|
buffer := new(bytes.Buffer)
|
|
|
|
for _, layout := range l {
|
|
|
|
if p.Tmpl.Lookup(layout) != nil {
|
|
|
|
p.Tmpl.ExecuteTemplate(buffer, layout, p)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return buffer
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2013-09-18 17:21:27 -04:00
|
|
|
func (page *Page) guessMarkupType() string {
|
2014-01-29 17:50:31 -05:00
|
|
|
// First try the explicitly set markup from the frontmatter
|
|
|
|
if page.Markup != "" {
|
|
|
|
format := guessType(page.Markup)
|
|
|
|
if format != "unknown" {
|
|
|
|
return format
|
|
|
|
}
|
|
|
|
}
|
2013-09-18 17:21:27 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
// Then try to guess from the extension
|
|
|
|
ext := strings.ToLower(path.Ext(page.FileName))
|
|
|
|
if strings.HasPrefix(ext, ".") {
|
|
|
|
return guessType(ext[1:])
|
|
|
|
}
|
2013-09-18 17:21:27 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return "unknown"
|
2013-09-18 17:21:27 -04:00
|
|
|
}
|
|
|
|
|
2013-12-06 23:56:51 -05:00
|
|
|
func guessType(in string) string {
|
2014-01-29 17:50:31 -05:00
|
|
|
switch strings.ToLower(in) {
|
|
|
|
case "md", "markdown", "mdown":
|
|
|
|
return "markdown"
|
|
|
|
case "rst":
|
|
|
|
return "rst"
|
|
|
|
case "html", "htm":
|
|
|
|
return "html"
|
|
|
|
}
|
|
|
|
return "unknown"
|
2013-12-06 23:56:51 -05:00
|
|
|
}
|
|
|
|
|
2013-08-05 10:53:58 -04:00
|
|
|
func (page *Page) parse(reader io.Reader) error {
|
2014-01-29 17:50:31 -05:00
|
|
|
p, err := parser.ReadFrom(reader)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
page.renderable = p.IsRenderable()
|
2013-09-18 13:17:43 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
front := p.FrontMatter()
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
if len(front) != 0 {
|
|
|
|
fm := page.detectFrontMatter(rune(front[0]))
|
|
|
|
meta, err := fm.parse(front)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2013-09-18 13:17:43 -04:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
if err = page.update(meta); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2013-12-06 23:32:00 -05:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
page.rawContent = p.Content()
|
|
|
|
page.setSummary()
|
2013-12-06 23:32:00 -05:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return nil
|
2013-12-06 23:32:00 -05:00
|
|
|
}
|
2013-08-25 00:27:41 -04:00
|
|
|
|
2014-01-27 17:16:28 -05:00
|
|
|
func (p *Page) ProcessShortcodes(t bundle.Template) {
|
2014-01-29 17:50:31 -05:00
|
|
|
p.rawContent = []byte(ShortcodesHandle(string(p.rawContent), p, t))
|
|
|
|
p.Summary = template.HTML(ShortcodesHandle(string(p.Summary), p, t))
|
2014-01-27 17:16:28 -05:00
|
|
|
}
|
|
|
|
|
2013-12-06 23:32:00 -05:00
|
|
|
func (page *Page) Convert() error {
|
2014-01-29 17:50:31 -05:00
|
|
|
markupType := page.guessMarkupType()
|
|
|
|
switch markupType {
|
|
|
|
case "markdown", "rst":
|
|
|
|
tmpContent, tmpTableOfContents := extractTOC(page.renderContent(RemoveSummaryDivider(page.rawContent)))
|
|
|
|
page.Content = bytesToHTML(tmpContent)
|
|
|
|
page.TableOfContents = bytesToHTML(tmpTableOfContents)
|
|
|
|
case "html":
|
|
|
|
page.Content = bytesToHTML(page.rawContent)
|
|
|
|
default:
|
2014-02-28 00:25:48 -05:00
|
|
|
return fmt.Errorf("Error converting unsupported file type '%s' for page '%s'", markupType, page.FileName)
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
return nil
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2014-01-28 23:11:05 -05:00
|
|
|
func markdownRender(content []byte) []byte {
|
2014-01-29 17:50:31 -05:00
|
|
|
htmlFlags := 0
|
|
|
|
htmlFlags |= blackfriday.HTML_SKIP_SCRIPT
|
2014-01-30 17:50:47 -05:00
|
|
|
htmlFlags |= blackfriday.HTML_USE_XHTML
|
2014-01-29 17:50:31 -05:00
|
|
|
htmlFlags |= blackfriday.HTML_USE_SMARTYPANTS
|
2014-01-30 17:50:47 -05:00
|
|
|
htmlFlags |= blackfriday.HTML_SMARTYPANTS_FRACTIONS
|
|
|
|
htmlFlags |= blackfriday.HTML_SMARTYPANTS_LATEX_DASHES
|
2014-01-29 17:50:31 -05:00
|
|
|
renderer := blackfriday.HtmlRenderer(htmlFlags, "", "")
|
2014-01-28 23:11:05 -05:00
|
|
|
|
2014-01-30 17:50:47 -05:00
|
|
|
extensions := 0
|
|
|
|
extensions |= blackfriday.EXTENSION_NO_INTRA_EMPHASIS
|
|
|
|
extensions |= blackfriday.EXTENSION_TABLES
|
|
|
|
extensions |= blackfriday.EXTENSION_FENCED_CODE
|
|
|
|
extensions |= blackfriday.EXTENSION_AUTOLINK
|
|
|
|
extensions |= blackfriday.EXTENSION_STRIKETHROUGH
|
|
|
|
extensions |= blackfriday.EXTENSION_SPACE_HEADERS
|
|
|
|
|
|
|
|
return blackfriday.Markdown(content, renderer, extensions)
|
2014-01-27 17:16:28 -05:00
|
|
|
}
|
|
|
|
|
2014-01-28 23:11:05 -05:00
|
|
|
func markdownRenderWithTOC(content []byte) []byte {
|
2014-01-29 17:50:31 -05:00
|
|
|
htmlFlags := 0
|
|
|
|
htmlFlags |= blackfriday.HTML_SKIP_SCRIPT
|
|
|
|
htmlFlags |= blackfriday.HTML_TOC
|
2014-01-30 17:50:47 -05:00
|
|
|
htmlFlags |= blackfriday.HTML_USE_XHTML
|
2014-01-29 17:50:31 -05:00
|
|
|
htmlFlags |= blackfriday.HTML_USE_SMARTYPANTS
|
2014-01-30 17:50:47 -05:00
|
|
|
htmlFlags |= blackfriday.HTML_SMARTYPANTS_FRACTIONS
|
|
|
|
htmlFlags |= blackfriday.HTML_SMARTYPANTS_LATEX_DASHES
|
2014-01-29 17:50:31 -05:00
|
|
|
renderer := blackfriday.HtmlRenderer(htmlFlags, "", "")
|
2014-01-18 11:42:01 -05:00
|
|
|
|
2014-01-30 17:50:47 -05:00
|
|
|
extensions := 0
|
|
|
|
extensions |= blackfriday.EXTENSION_NO_INTRA_EMPHASIS
|
|
|
|
extensions |= blackfriday.EXTENSION_TABLES
|
|
|
|
extensions |= blackfriday.EXTENSION_FENCED_CODE
|
|
|
|
extensions |= blackfriday.EXTENSION_AUTOLINK
|
|
|
|
extensions |= blackfriday.EXTENSION_STRIKETHROUGH
|
|
|
|
extensions |= blackfriday.EXTENSION_SPACE_HEADERS
|
|
|
|
|
|
|
|
return blackfriday.Markdown(content, renderer, extensions)
|
2014-01-28 23:11:05 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
func extractTOC(content []byte) (newcontent []byte, toc []byte) {
|
2014-01-29 17:50:31 -05:00
|
|
|
origContent := make([]byte, len(content))
|
|
|
|
copy(origContent, content)
|
|
|
|
first := []byte(`<nav>
|
2014-01-28 23:11:05 -05:00
|
|
|
<ul>`)
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
last := []byte(`</ul>
|
2014-01-28 23:11:05 -05:00
|
|
|
</nav>`)
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
replacement := []byte(`<nav id="TableOfContents">
|
2014-01-28 23:11:05 -05:00
|
|
|
<ul>`)
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
startOfTOC := bytes.Index(content, first)
|
2014-01-28 23:11:05 -05:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
peekEnd := len(content)
|
|
|
|
if peekEnd > 70+startOfTOC {
|
|
|
|
peekEnd = 70 + startOfTOC
|
|
|
|
}
|
2014-01-28 23:11:05 -05:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
if startOfTOC < 0 {
|
|
|
|
return stripEmptyNav(content), toc
|
|
|
|
}
|
|
|
|
// Need to peek ahead to see if this nav element is actually the right one.
|
|
|
|
correctNav := bytes.Index(content[startOfTOC:peekEnd], []byte(`#toc_0`))
|
|
|
|
if correctNav < 0 { // no match found
|
|
|
|
return content, toc
|
|
|
|
}
|
|
|
|
lengthOfTOC := bytes.Index(content[startOfTOC:], last) + len(last)
|
|
|
|
endOfTOC := startOfTOC + lengthOfTOC
|
2014-01-28 23:11:05 -05:00
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
newcontent = append(content[:startOfTOC], content[endOfTOC:]...)
|
|
|
|
toc = append(replacement, origContent[startOfTOC+len(first):endOfTOC]...)
|
|
|
|
return
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
2013-07-06 22:31:43 -04:00
|
|
|
|
2014-01-27 17:16:28 -05:00
|
|
|
func ReaderToBytes(lines io.Reader) []byte {
|
2014-01-29 17:50:31 -05:00
|
|
|
b := new(bytes.Buffer)
|
|
|
|
b.ReadFrom(lines)
|
|
|
|
return b.Bytes()
|
2013-07-06 22:31:43 -04:00
|
|
|
}
|
2013-09-20 20:24:25 -04:00
|
|
|
|
|
|
|
func (p *Page) TargetPath() (outfile string) {
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
// Always use Url if it's specified
|
|
|
|
if len(strings.TrimSpace(p.Url)) > 2 {
|
|
|
|
outfile = strings.TrimSpace(p.Url)
|
|
|
|
|
|
|
|
if strings.HasSuffix(outfile, "/") {
|
|
|
|
outfile = outfile + "index.html"
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// If there's a Permalink specification, we use that
|
|
|
|
if override, ok := p.Site.Permalinks[p.Section]; ok {
|
|
|
|
var err error
|
|
|
|
outfile, err = override.Expand(p)
|
|
|
|
if err == nil {
|
|
|
|
if strings.HasSuffix(outfile, "/") {
|
|
|
|
outfile += "index.html"
|
|
|
|
}
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(strings.TrimSpace(p.Slug)) > 0 {
|
|
|
|
outfile = strings.TrimSpace(p.Slug) + "." + p.Extension
|
|
|
|
} else {
|
|
|
|
// Fall back to filename
|
|
|
|
_, t := path.Split(p.FileName)
|
2014-02-27 18:32:09 -05:00
|
|
|
outfile = helpers.ReplaceExtension(strings.TrimSpace(t), p.Extension)
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
return path.Join(p.Dir, strings.TrimSpace(outfile))
|
2013-09-20 20:24:25 -04:00
|
|
|
}
|