2019-01-02 06:33:26 -05:00
|
|
|
// Copyright 2019 The Hugo Authors. All rights reserved.
|
2013-07-04 11:32:55 -04:00
|
|
|
//
|
2015-11-23 22:16:36 -05:00
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
2013-07-04 11:32:55 -04:00
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
2015-11-23 22:16:36 -05:00
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
2013-07-04 11:32:55 -04:00
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package hugolib
|
|
|
|
|
|
|
|
import (
|
2014-01-29 17:50:31 -05:00
|
|
|
"bytes"
|
|
|
|
"fmt"
|
2014-12-07 13:48:00 -05:00
|
|
|
"html/template"
|
2019-01-02 06:33:26 -05:00
|
|
|
"os"
|
2014-12-07 13:48:00 -05:00
|
|
|
"path"
|
|
|
|
"path/filepath"
|
2019-01-02 06:33:26 -05:00
|
|
|
"sort"
|
2014-12-07 13:48:00 -05:00
|
|
|
"strings"
|
2015-01-25 06:08:02 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
"github.com/bep/gitmap"
|
2013-07-04 11:32:55 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
"github.com/gohugoio/hugo/helpers"
|
2017-05-26 03:51:17 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
"github.com/gohugoio/hugo/common/herrors"
|
|
|
|
"github.com/gohugoio/hugo/parser/metadecoders"
|
2017-05-26 03:51:17 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
"github.com/gohugoio/hugo/parser/pageparser"
|
|
|
|
"github.com/pkg/errors"
|
2017-08-17 04:24:17 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
"github.com/gohugoio/hugo/output"
|
2017-08-19 07:16:00 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
"github.com/gohugoio/hugo/media"
|
|
|
|
"github.com/gohugoio/hugo/source"
|
2018-11-01 06:28:30 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
"github.com/gohugoio/hugo/common/collections"
|
|
|
|
"github.com/gohugoio/hugo/common/text"
|
|
|
|
"github.com/gohugoio/hugo/resources"
|
|
|
|
"github.com/gohugoio/hugo/resources/page"
|
|
|
|
"github.com/gohugoio/hugo/resources/resource"
|
2015-09-03 06:22:20 -04:00
|
|
|
)
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
var (
|
|
|
|
_ page.Page = (*pageState)(nil)
|
|
|
|
_ collections.Grouper = (*pageState)(nil)
|
|
|
|
_ collections.Slicer = (*pageState)(nil)
|
2016-11-11 03:19:16 -05:00
|
|
|
)
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
var (
|
|
|
|
pageTypesProvider = resource.NewResourceTypesProvider(media.OctetType, pageResourceType)
|
|
|
|
nopPageOutput = &pageOutput{pagePerOutputProviders: nopPagePerOutput}
|
|
|
|
)
|
2017-08-19 07:16:00 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// pageContext provides contextual information about this page, for error
|
|
|
|
// logging and similar.
|
|
|
|
type pageContext interface {
|
|
|
|
posOffset(offset int) text.Position
|
|
|
|
wrapError(err error) error
|
|
|
|
getRenderingConfig() *helpers.BlackFriday
|
2017-08-19 07:16:00 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// wrapErr adds some context to the given error if possible.
|
|
|
|
func wrapErr(err error, ctx interface{}) error {
|
|
|
|
if pc, ok := ctx.(pageContext); ok {
|
|
|
|
return pc.wrapError(err)
|
2017-08-19 07:16:00 -04:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
return err
|
:sparkles: Implement Page bundling and image handling
This commit is not the smallest in Hugo's history.
Some hightlights include:
* Page bundles (for complete articles, keeping images and content together etc.).
* Bundled images can be processed in as many versions/sizes as you need with the three methods `Resize`, `Fill` and `Fit`.
* Processed images are cached inside `resources/_gen/images` (default) in your project.
* Symbolic links (both files and dirs) are now allowed anywhere inside /content
* A new table based build summary
* The "Total in nn ms" now reports the total including the handling of the files inside /static. So if it now reports more than you're used to, it is just **more real** and probably faster than before (see below).
A site building benchmark run compared to `v0.31.1` shows that this should be slightly faster and use less memory:
```bash
▶ ./benchSite.sh "TOML,num_langs=.*,num_root_sections=5,num_pages=(500|1000),tags_per_page=5,shortcodes,render"
benchmark old ns/op new ns/op delta
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 101785785 78067944 -23.30%
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 185481057 149159919 -19.58%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 103149918 85679409 -16.94%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 203515478 169208775 -16.86%
benchmark old allocs new allocs delta
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 532464 391539 -26.47%
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 1056549 772702 -26.87%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 555974 406630 -26.86%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 1086545 789922 -27.30%
benchmark old bytes new bytes delta
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 53243246 43598155 -18.12%
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 105811617 86087116 -18.64%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 54558852 44545097 -18.35%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 106903858 86978413 -18.64%
```
Fixes #3651
Closes #3158
Fixes #1014
Closes #2021
Fixes #1240
Updates #3757
2017-07-24 03:00:23 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
type pageSiteAdapter struct {
|
|
|
|
p page.Page
|
|
|
|
s *Site
|
2017-03-24 11:54:37 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (pa pageSiteAdapter) GetPage(ref string) (page.Page, error) {
|
|
|
|
p, err := pa.s.getPageNew(pa.p, ref)
|
|
|
|
if p == nil {
|
|
|
|
// The nil struct has meaning in some situations, mostly to avoid breaking
|
|
|
|
// existing sites doing $nilpage.IsDescendant($p), which will always return
|
|
|
|
// false.
|
|
|
|
p = page.NilPage
|
2017-03-16 03:58:50 -04:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
return p, err
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
type pageState struct {
|
|
|
|
// This slice will be of same length as the number of global slice of output
|
|
|
|
// formats (for all sites).
|
|
|
|
pageOutputs []*pageOutput
|
2016-11-13 06:33:11 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// This will be shifted out when we start to render a new output format.
|
|
|
|
*pageOutput
|
2016-11-13 06:33:11 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// Common for all output formats.
|
|
|
|
*pageCommon
|
2017-04-09 04:33:04 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// Eq returns whether the current page equals the given page.
|
|
|
|
// This is what's invoked when doing `{{ if eq $page $otherPage }}`
|
|
|
|
func (p *pageState) Eq(other interface{}) bool {
|
|
|
|
pp, err := unwrapPage(other)
|
|
|
|
if err != nil {
|
|
|
|
return false
|
2018-04-23 02:41:19 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return p == pp
|
2018-04-23 02:41:19 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) GitInfo() *gitmap.GitInfo {
|
|
|
|
return p.gitInfo
|
Add Hugo Piper with SCSS support and much more
Before this commit, you would have to use page bundles to do image processing etc. in Hugo.
This commit adds
* A new `/assets` top-level project or theme dir (configurable via `assetDir`)
* A new template func, `resources.Get` which can be used to "get a resource" that can be further processed.
This means that you can now do this in your templates (or shortcodes):
```bash
{{ $sunset := (resources.Get "images/sunset.jpg").Fill "300x200" }}
```
This also adds a new `extended` build tag that enables powerful SCSS/SASS support with source maps. To compile this from source, you will also need a C compiler installed:
```
HUGO_BUILD_TAGS=extended mage install
```
Note that you can use output of the SCSS processing later in a non-SCSSS-enabled Hugo.
The `SCSS` processor is a _Resource transformation step_ and it can be chained with the many others in a pipeline:
```bash
{{ $css := resources.Get "styles.scss" | resources.ToCSS | resources.PostCSS | resources.Minify | resources.Fingerprint }}
<link rel="stylesheet" href="{{ $styles.RelPermalink }}" integrity="{{ $styles.Data.Digest }}" media="screen">
```
The transformation funcs above have aliases, so it can be shortened to:
```bash
{{ $css := resources.Get "styles.scss" | toCSS | postCSS | minify | fingerprint }}
<link rel="stylesheet" href="{{ $styles.RelPermalink }}" integrity="{{ $styles.Data.Digest }}" media="screen">
```
A quick tip would be to avoid the fingerprinting part, and possibly also the not-superfast `postCSS` when you're doing development, as it allows Hugo to be smarter about the rebuilding.
Documentation will follow, but have a look at the demo repo in https://github.com/bep/hugo-sass-test
New functions to create `Resource` objects:
* `resources.Get` (see above)
* `resources.FromString`: Create a Resource from a string.
New `Resource` transformation funcs:
* `resources.ToCSS`: Compile `SCSS` or `SASS` into `CSS`.
* `resources.PostCSS`: Process your CSS with PostCSS. Config file support (project or theme or passed as an option).
* `resources.Minify`: Currently supports `css`, `js`, `json`, `html`, `svg`, `xml`.
* `resources.Fingerprint`: Creates a fingerprinted version of the given Resource with Subresource Integrity..
* `resources.Concat`: Concatenates a list of Resource objects. Think of this as a poor man's bundler.
* `resources.ExecuteAsTemplate`: Parses and executes the given Resource and data context (e.g. .Site) as a Go template.
Fixes #4381
Fixes #4903
Fixes #4858
2018-02-20 04:02:14 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) MarshalJSON() ([]byte, error) {
|
|
|
|
return page.MarshalPageToJSON(p)
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) Pages() page.Pages {
|
|
|
|
p.pagesInit.Do(func() {
|
|
|
|
if p.pages != nil {
|
|
|
|
return
|
2017-08-19 07:16:00 -04:00
|
|
|
}
|
2018-03-21 12:21:46 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
var pages page.Pages
|
2018-03-21 12:21:46 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
switch p.Kind() {
|
|
|
|
case page.KindPage:
|
|
|
|
case page.KindHome:
|
|
|
|
pages = p.s.RegularPages()
|
|
|
|
case page.KindTaxonomy:
|
|
|
|
termInfo := p.getTaxonomyNodeInfo()
|
|
|
|
taxonomy := p.s.Taxonomies[termInfo.plural].Get(termInfo.termKey)
|
|
|
|
pages = taxonomy.Pages()
|
|
|
|
case page.KindTaxonomyTerm:
|
|
|
|
plural := p.getTaxonomyNodeInfo().plural
|
|
|
|
// A list of all page.KindTaxonomy pages with matching plural
|
|
|
|
for _, p := range p.s.findPagesByKind(page.KindTaxonomy) {
|
|
|
|
if p.SectionsEntries()[0] == plural {
|
|
|
|
pages = append(pages, p)
|
|
|
|
}
|
2018-03-21 12:21:46 -04:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
case kind404, kindSitemap, kindRobotsTXT:
|
|
|
|
pages = p.s.Pages()
|
2017-06-08 14:00:05 -04:00
|
|
|
}
|
2013-10-15 09:15:52 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
p.pages = pages
|
2016-08-16 16:50:15 -04:00
|
|
|
})
|
2016-09-15 22:28:13 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return p.pages
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// RawContent returns the un-rendered source content without
|
|
|
|
// any leading front matter.
|
|
|
|
func (p *pageState) RawContent() string {
|
|
|
|
if p.source.parsed == nil {
|
|
|
|
return ""
|
2018-10-30 15:24:34 -04:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
start := p.source.posMainContent
|
2018-10-30 15:24:34 -04:00
|
|
|
if start == -1 {
|
2019-01-02 06:33:26 -05:00
|
|
|
start = 0
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
return string(p.source.parsed.Input()[start:])
|
2016-08-01 17:04:44 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) Resources() resource.Resources {
|
|
|
|
p.resourcesInit.Do(func() {
|
2018-04-19 12:06:40 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
sort := func() {
|
|
|
|
sort.SliceStable(p.resources, func(i, j int) bool {
|
|
|
|
ri, rj := p.resources[i], p.resources[j]
|
|
|
|
if ri.ResourceType() < rj.ResourceType() {
|
|
|
|
return true
|
|
|
|
}
|
2014-01-27 17:16:28 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
p1, ok1 := ri.(page.Page)
|
|
|
|
p2, ok2 := rj.(page.Page)
|
2014-11-28 15:16:57 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if ok1 != ok2 {
|
|
|
|
return ok2
|
|
|
|
}
|
Reuse the BlackFriday instance when possible
This is in heavy use in rendering, so this makes a difference:
```bash
benchmark old ns/op new ns/op delta
BenchmarkSiteBuilding/TOML,num_langs=1,num_pages=500,tags_per_page=5,shortcodes,render-4 124551144 107743429 -13.49%
benchmark old allocs new allocs delta
BenchmarkSiteBuilding/TOML,num_langs=1,num_pages=500,tags_per_page=5,shortcodes,render-4 528684 435118 -17.70%
benchmark old bytes new bytes delta
BenchmarkSiteBuilding/TOML,num_langs=1,num_pages=500,tags_per_page=5,shortcodes,render-4 53306848 45147832 -15.31%
```
2017-12-16 12:56:58 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if ok1 {
|
|
|
|
return page.DefaultPageSort(p1, p2)
|
|
|
|
}
|
2017-04-06 16:29:37 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return ri.RelPermalink() < rj.RelPermalink()
|
|
|
|
})
|
2015-01-25 06:08:02 -05:00
|
|
|
}
|
2016-10-16 13:28:21 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
sort()
|
2014-10-16 20:20:09 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if len(p.m.resourcesMetadata) > 0 {
|
|
|
|
resources.AssignMetadata(p.m.resourcesMetadata, p.resources...)
|
|
|
|
sort()
|
2015-09-03 06:22:20 -04:00
|
|
|
}
|
2015-09-03 06:22:20 -04:00
|
|
|
|
2016-08-17 07:41:48 -04:00
|
|
|
})
|
2019-01-02 06:33:26 -05:00
|
|
|
return p.resources
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) HasShortcode(name string) bool {
|
2017-07-17 17:20:13 -04:00
|
|
|
if p.shortcodeState == nil {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
return p.shortcodeState.nameSet[name]
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) Site() page.Site {
|
|
|
|
return &p.s.Info
|
2016-07-25 16:22:09 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) String() string {
|
|
|
|
if sourceRef := p.sourceRef(); sourceRef != "" {
|
|
|
|
return fmt.Sprintf("Page(%s)", sourceRef)
|
|
|
|
}
|
|
|
|
return fmt.Sprintf("Page(%q)", p.Title())
|
2016-08-09 08:26:55 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// IsTranslated returns whether this content file is translated to
|
|
|
|
// other language(s).
|
|
|
|
func (p *pageState) IsTranslated() bool {
|
|
|
|
p.s.h.init.translations.Do()
|
|
|
|
return len(p.translations) > 0
|
2016-07-25 16:22:09 -04:00
|
|
|
}
|
|
|
|
|
2017-11-17 10:28:35 -05:00
|
|
|
// TranslationKey returns the key used to map language translations of this page.
|
|
|
|
// It will use the translationKey set in front matter if set, or the content path and
|
|
|
|
// filename (excluding any language code and extension), e.g. "about/index".
|
|
|
|
// The Page Kind is always prepended.
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) TranslationKey() string {
|
|
|
|
p.translationKeyInit.Do(func() {
|
|
|
|
if p.m.translationKey != "" {
|
|
|
|
p.translationKey = p.Kind() + "/" + p.m.translationKey
|
2019-03-25 13:18:34 -04:00
|
|
|
} else if p.IsPage() && !p.File().IsZero() {
|
2019-01-02 06:33:26 -05:00
|
|
|
p.translationKey = path.Join(p.Kind(), filepath.ToSlash(p.File().Dir()), p.File().TranslationBaseName())
|
|
|
|
} else if p.IsNode() {
|
|
|
|
p.translationKey = path.Join(p.Kind(), p.SectionsPath())
|
|
|
|
}
|
2018-01-15 14:40:39 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
})
|
2018-01-15 14:40:39 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return p.translationKey
|
2018-01-15 14:40:39 -05:00
|
|
|
|
:sparkles: Implement Page bundling and image handling
This commit is not the smallest in Hugo's history.
Some hightlights include:
* Page bundles (for complete articles, keeping images and content together etc.).
* Bundled images can be processed in as many versions/sizes as you need with the three methods `Resize`, `Fill` and `Fit`.
* Processed images are cached inside `resources/_gen/images` (default) in your project.
* Symbolic links (both files and dirs) are now allowed anywhere inside /content
* A new table based build summary
* The "Total in nn ms" now reports the total including the handling of the files inside /static. So if it now reports more than you're used to, it is just **more real** and probably faster than before (see below).
A site building benchmark run compared to `v0.31.1` shows that this should be slightly faster and use less memory:
```bash
▶ ./benchSite.sh "TOML,num_langs=.*,num_root_sections=5,num_pages=(500|1000),tags_per_page=5,shortcodes,render"
benchmark old ns/op new ns/op delta
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 101785785 78067944 -23.30%
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 185481057 149159919 -19.58%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 103149918 85679409 -16.94%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 203515478 169208775 -16.86%
benchmark old allocs new allocs delta
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 532464 391539 -26.47%
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 1056549 772702 -26.87%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 555974 406630 -26.86%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 1086545 789922 -27.30%
benchmark old bytes new bytes delta
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 53243246 43598155 -18.12%
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 105811617 86087116 -18.64%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 54558852 44545097 -18.35%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 106903858 86978413 -18.64%
```
Fixes #3651
Closes #3158
Fixes #1014
Closes #2021
Fixes #1240
Updates #3757
2017-07-24 03:00:23 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// AllTranslations returns all translations, including the current Page.
|
|
|
|
func (p *pageState) AllTranslations() page.Pages {
|
|
|
|
p.s.h.init.translations.Do()
|
|
|
|
return p.allTranslations
|
2018-11-06 04:04:37 -05:00
|
|
|
}
|
2018-05-08 04:10:13 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// Translations returns the translations excluding the current Page.
|
|
|
|
func (p *pageState) Translations() page.Pages {
|
|
|
|
p.s.h.init.translations.Do()
|
|
|
|
return p.translations
|
2018-05-08 04:10:13 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) getRenderingConfig() *helpers.BlackFriday {
|
|
|
|
if p.m.renderingConfig == nil {
|
|
|
|
return p.s.ContentSpec.BlackFriday
|
:sparkles: Implement Page bundling and image handling
This commit is not the smallest in Hugo's history.
Some hightlights include:
* Page bundles (for complete articles, keeping images and content together etc.).
* Bundled images can be processed in as many versions/sizes as you need with the three methods `Resize`, `Fill` and `Fit`.
* Processed images are cached inside `resources/_gen/images` (default) in your project.
* Symbolic links (both files and dirs) are now allowed anywhere inside /content
* A new table based build summary
* The "Total in nn ms" now reports the total including the handling of the files inside /static. So if it now reports more than you're used to, it is just **more real** and probably faster than before (see below).
A site building benchmark run compared to `v0.31.1` shows that this should be slightly faster and use less memory:
```bash
▶ ./benchSite.sh "TOML,num_langs=.*,num_root_sections=5,num_pages=(500|1000),tags_per_page=5,shortcodes,render"
benchmark old ns/op new ns/op delta
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 101785785 78067944 -23.30%
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 185481057 149159919 -19.58%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 103149918 85679409 -16.94%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 203515478 169208775 -16.86%
benchmark old allocs new allocs delta
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 532464 391539 -26.47%
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 1056549 772702 -26.87%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 555974 406630 -26.86%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 1086545 789922 -27.30%
benchmark old bytes new bytes delta
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 53243246 43598155 -18.12%
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 105811617 86087116 -18.64%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 54558852 44545097 -18.35%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 106903858 86978413 -18.64%
```
Fixes #3651
Closes #3158
Fixes #1014
Closes #2021
Fixes #1240
Updates #3757
2017-07-24 03:00:23 -04:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
return p.m.renderingConfig
|
2018-04-19 12:06:40 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (ps *pageState) initCommonProviders(pp pagePaths) error {
|
|
|
|
if ps.IsPage() {
|
|
|
|
ps.posNextPrev = &nextPrev{init: ps.s.init.prevNext}
|
|
|
|
ps.posNextPrevSection = &nextPrev{init: ps.s.init.prevNextInSection}
|
|
|
|
ps.InSectionPositioner = newPagePositionInSection(ps.posNextPrevSection)
|
|
|
|
ps.Positioner = newPagePosition(ps.posNextPrev)
|
2017-03-23 15:05:10 -04:00
|
|
|
}
|
:sparkles: Implement Page bundling and image handling
This commit is not the smallest in Hugo's history.
Some hightlights include:
* Page bundles (for complete articles, keeping images and content together etc.).
* Bundled images can be processed in as many versions/sizes as you need with the three methods `Resize`, `Fill` and `Fit`.
* Processed images are cached inside `resources/_gen/images` (default) in your project.
* Symbolic links (both files and dirs) are now allowed anywhere inside /content
* A new table based build summary
* The "Total in nn ms" now reports the total including the handling of the files inside /static. So if it now reports more than you're used to, it is just **more real** and probably faster than before (see below).
A site building benchmark run compared to `v0.31.1` shows that this should be slightly faster and use less memory:
```bash
▶ ./benchSite.sh "TOML,num_langs=.*,num_root_sections=5,num_pages=(500|1000),tags_per_page=5,shortcodes,render"
benchmark old ns/op new ns/op delta
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 101785785 78067944 -23.30%
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 185481057 149159919 -19.58%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 103149918 85679409 -16.94%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 203515478 169208775 -16.86%
benchmark old allocs new allocs delta
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 532464 391539 -26.47%
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 1056549 772702 -26.87%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 555974 406630 -26.86%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 1086545 789922 -27.30%
benchmark old bytes new bytes delta
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 53243246 43598155 -18.12%
BenchmarkSiteBuilding/TOML,num_langs=1,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 105811617 86087116 -18.64%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=500,tags_per_page=5,shortcodes,render-4 54558852 44545097 -18.35%
BenchmarkSiteBuilding/TOML,num_langs=3,num_root_sections=5,num_pages=1000,tags_per_page=5,shortcodes,render-4 106903858 86978413 -18.64%
```
Fixes #3651
Closes #3158
Fixes #1014
Closes #2021
Fixes #1240
Updates #3757
2017-07-24 03:00:23 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
ps.OutputFormatsProvider = pp
|
|
|
|
ps.targetPathDescriptor = pp.targetPathDescriptor
|
|
|
|
ps.RefProvider = newPageRef(ps)
|
|
|
|
ps.SitesProvider = &ps.s.Info
|
2018-04-19 11:40:54 -04:00
|
|
|
|
2017-03-17 11:35:09 -04:00
|
|
|
return nil
|
2013-10-02 20:00:21 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) getLayoutDescriptor() output.LayoutDescriptor {
|
|
|
|
p.layoutDescriptorInit.Do(func() {
|
|
|
|
var section string
|
|
|
|
sections := p.SectionsEntries()
|
2018-03-11 13:59:11 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
switch p.Kind() {
|
|
|
|
case page.KindSection:
|
|
|
|
section = sections[0]
|
|
|
|
case page.KindTaxonomyTerm:
|
|
|
|
section = p.getTaxonomyNodeInfo().singular
|
|
|
|
case page.KindTaxonomy:
|
|
|
|
section = p.getTaxonomyNodeInfo().parent.singular
|
|
|
|
default:
|
hugolib: Extract date and slug from filename
This commit makes it possible to extract the date from the content filename. Also, the filenames in these cases will make for very poor permalinks, so we will also use the remaining part as the page `slug` if that value is not set in front matter.
This should make it easier to move content from Jekyll to Hugo.
To enable, put this in your `config.toml`:
```toml
[frontmatter]
date = [":filename", ":default"]
```
This commit is also a spring cleaning of how the different dates are configured in Hugo. Hugo will check for dates following the configuration from left to right, starting with `:filename` etc.
So, if you want to use the `file modification time`, this can be a good configuration:
```toml
[frontmatter]
date = [ "date",":fileModTime", ":default"]
lastmod = ["lastmod" ,":fileModTime", ":default"]
```
The current `:default` values for the different dates are
```toml
[frontmatter]
date = ["date","publishDate", "lastmod"]
lastmod = ["lastmod", "date","publishDate"]
publishDate = ["publishDate", "date"]
expiryDate = ["expiryDate"]
```
The above will now be the same as:
```toml
[frontmatter]
date = [":default"]
lastmod = [":default"]
publishDate = [":default"]
expiryDate = [":default"]
```
Note:
* We have some built-in aliases to the above: lastmod => modified, publishDate => pubdate, published and expiryDate => unpublishdate.
* If you want a new configuration for, say, `date`, you can provide only that line, and the rest will be preserved.
* All the keywords to the right that does not start with a ":" maps to front matter parameters, and can be any date param (e.g. `myCustomDateParam`).
* The keywords to the left are the **4 predefined dates in Hugo**, i.e. they are constant values.
* The current "special date handlers" are `:fileModTime` and `:filename`. We will soon add `:git` to that list.
Fixes #285
Closes #3310
Closes #3762
Closes #4340
2018-03-11 06:32:55 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
p.layoutDescriptor = output.LayoutDescriptor{
|
|
|
|
Kind: p.Kind(),
|
|
|
|
Type: p.Type(),
|
|
|
|
Lang: p.Language().Lang,
|
|
|
|
Layout: p.Layout(),
|
|
|
|
Section: section,
|
hugolib: Extract date and slug from filename
This commit makes it possible to extract the date from the content filename. Also, the filenames in these cases will make for very poor permalinks, so we will also use the remaining part as the page `slug` if that value is not set in front matter.
This should make it easier to move content from Jekyll to Hugo.
To enable, put this in your `config.toml`:
```toml
[frontmatter]
date = [":filename", ":default"]
```
This commit is also a spring cleaning of how the different dates are configured in Hugo. Hugo will check for dates following the configuration from left to right, starting with `:filename` etc.
So, if you want to use the `file modification time`, this can be a good configuration:
```toml
[frontmatter]
date = [ "date",":fileModTime", ":default"]
lastmod = ["lastmod" ,":fileModTime", ":default"]
```
The current `:default` values for the different dates are
```toml
[frontmatter]
date = ["date","publishDate", "lastmod"]
lastmod = ["lastmod", "date","publishDate"]
publishDate = ["publishDate", "date"]
expiryDate = ["expiryDate"]
```
The above will now be the same as:
```toml
[frontmatter]
date = [":default"]
lastmod = [":default"]
publishDate = [":default"]
expiryDate = [":default"]
```
Note:
* We have some built-in aliases to the above: lastmod => modified, publishDate => pubdate, published and expiryDate => unpublishdate.
* If you want a new configuration for, say, `date`, you can provide only that line, and the rest will be preserved.
* All the keywords to the right that does not start with a ":" maps to front matter parameters, and can be any date param (e.g. `myCustomDateParam`).
* The keywords to the left are the **4 predefined dates in Hugo**, i.e. they are constant values.
* The current "special date handlers" are `:fileModTime` and `:filename`. We will soon add `:git` to that list.
Fixes #285
Closes #3310
Closes #3762
Closes #4340
2018-03-11 06:32:55 -04:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
})
|
hugolib: Extract date and slug from filename
This commit makes it possible to extract the date from the content filename. Also, the filenames in these cases will make for very poor permalinks, so we will also use the remaining part as the page `slug` if that value is not set in front matter.
This should make it easier to move content from Jekyll to Hugo.
To enable, put this in your `config.toml`:
```toml
[frontmatter]
date = [":filename", ":default"]
```
This commit is also a spring cleaning of how the different dates are configured in Hugo. Hugo will check for dates following the configuration from left to right, starting with `:filename` etc.
So, if you want to use the `file modification time`, this can be a good configuration:
```toml
[frontmatter]
date = [ "date",":fileModTime", ":default"]
lastmod = ["lastmod" ,":fileModTime", ":default"]
```
The current `:default` values for the different dates are
```toml
[frontmatter]
date = ["date","publishDate", "lastmod"]
lastmod = ["lastmod", "date","publishDate"]
publishDate = ["publishDate", "date"]
expiryDate = ["expiryDate"]
```
The above will now be the same as:
```toml
[frontmatter]
date = [":default"]
lastmod = [":default"]
publishDate = [":default"]
expiryDate = [":default"]
```
Note:
* We have some built-in aliases to the above: lastmod => modified, publishDate => pubdate, published and expiryDate => unpublishdate.
* If you want a new configuration for, say, `date`, you can provide only that line, and the rest will be preserved.
* All the keywords to the right that does not start with a ":" maps to front matter parameters, and can be any date param (e.g. `myCustomDateParam`).
* The keywords to the left are the **4 predefined dates in Hugo**, i.e. they are constant values.
* The current "special date handlers" are `:fileModTime` and `:filename`. We will soon add `:git` to that list.
Fixes #285
Closes #3310
Closes #3762
Closes #4340
2018-03-11 06:32:55 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return p.layoutDescriptor
|
2017-03-08 07:45:33 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
}
|
2018-01-15 14:40:39 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) getLayouts(layouts ...string) ([]string, error) {
|
|
|
|
f := p.outputFormat()
|
2018-01-15 14:40:39 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if len(layouts) == 0 {
|
|
|
|
selfLayout := p.selfLayoutForOutput(f)
|
|
|
|
if selfLayout != "" {
|
|
|
|
return []string{selfLayout}, nil
|
2014-01-29 17:50:31 -05:00
|
|
|
}
|
|
|
|
}
|
2015-05-14 16:06:36 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
layoutDescriptor := p.getLayoutDescriptor()
|
|
|
|
|
|
|
|
if len(layouts) > 0 {
|
|
|
|
layoutDescriptor.Layout = layouts[0]
|
|
|
|
layoutDescriptor.LayoutOverride = true
|
2018-05-04 20:17:16 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return p.s.layoutHandler.For(layoutDescriptor, f)
|
|
|
|
}
|
|
|
|
|
|
|
|
// This is serialized
|
2019-04-10 04:11:51 -04:00
|
|
|
func (p *pageState) initOutputFormat(isRenderingSite bool, idx int) error {
|
|
|
|
if err := p.shiftToOutputFormat(isRenderingSite, idx); err != nil {
|
2019-01-02 06:33:26 -05:00
|
|
|
return err
|
2015-08-02 02:02:20 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if !p.renderable {
|
|
|
|
if _, err := p.Content(); err != nil {
|
|
|
|
return err
|
2015-09-03 06:22:20 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-01-29 17:50:31 -05:00
|
|
|
return nil
|
2019-01-02 06:33:26 -05:00
|
|
|
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// Must be run after the site section tree etc. is built and ready.
|
|
|
|
func (p *pageState) initPage() error {
|
|
|
|
if _, err := p.init.Do(); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
return nil
|
2017-12-29 02:58:38 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) setPages(pages page.Pages) {
|
|
|
|
page.SortByDefault(pages)
|
|
|
|
p.pages = pages
|
2015-05-31 14:30:53 -04:00
|
|
|
}
|
|
|
|
|
2019-04-15 06:06:12 -04:00
|
|
|
func (p *pageState) renderResources() (err error) {
|
|
|
|
p.resourcesPublishInit.Do(func() {
|
|
|
|
var toBeDeleted []int
|
|
|
|
|
|
|
|
for i, r := range p.Resources() {
|
|
|
|
if _, ok := r.(page.Page); ok {
|
|
|
|
// Pages gets rendered with the owning page but we count them here.
|
|
|
|
p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Pages)
|
|
|
|
continue
|
|
|
|
}
|
2014-01-29 17:50:31 -05:00
|
|
|
|
2019-04-15 06:06:12 -04:00
|
|
|
src, ok := r.(resource.Source)
|
|
|
|
if !ok {
|
|
|
|
err = errors.Errorf("Resource %T does not support resource.Source", src)
|
|
|
|
return
|
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
|
2019-04-15 06:06:12 -04:00
|
|
|
if err := src.Publish(); err != nil {
|
|
|
|
if os.IsNotExist(err) {
|
|
|
|
// The resource has been deleted from the file system.
|
|
|
|
// This should be extremely rare, but can happen on live reload in server
|
|
|
|
// mode when the same resource is member of different page bundles.
|
|
|
|
toBeDeleted = append(toBeDeleted, i)
|
|
|
|
} else {
|
|
|
|
p.s.Log.ERROR.Printf("Failed to publish Resource for page %q: %s", p.pathOrTitle(), err)
|
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
} else {
|
2019-04-15 06:06:12 -04:00
|
|
|
p.s.PathSpec.ProcessingStats.Incr(&p.s.PathSpec.ProcessingStats.Files)
|
2019-01-02 06:33:26 -05:00
|
|
|
}
|
2015-05-31 14:30:53 -04:00
|
|
|
}
|
2015-01-24 06:44:35 -05:00
|
|
|
|
2019-04-15 06:06:12 -04:00
|
|
|
for _, i := range toBeDeleted {
|
|
|
|
p.deleteResource(i)
|
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
|
2019-04-15 06:06:12 -04:00
|
|
|
})
|
|
|
|
|
|
|
|
return
|
2013-07-04 11:32:55 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) deleteResource(i int) {
|
|
|
|
p.resources = append(p.resources[:i], p.resources[i+1:]...)
|
|
|
|
}
|
2015-01-06 12:11:06 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) getTargetPaths() page.TargetPaths {
|
|
|
|
return p.targetPaths()
|
|
|
|
}
|
2017-02-20 03:33:35 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) setTranslations(pages page.Pages) {
|
|
|
|
p.allTranslations = pages
|
|
|
|
page.SortByLanguage(p.allTranslations)
|
|
|
|
translations := make(page.Pages, 0)
|
|
|
|
for _, t := range p.allTranslations {
|
|
|
|
if !t.Eq(p) {
|
|
|
|
translations = append(translations, t)
|
2017-02-20 03:33:35 -05:00
|
|
|
}
|
2015-01-06 12:11:06 -05:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
p.translations = translations
|
|
|
|
}
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) AlternativeOutputFormats() page.OutputFormats {
|
|
|
|
f := p.outputFormat()
|
|
|
|
var o page.OutputFormats
|
|
|
|
for _, of := range p.OutputFormats() {
|
|
|
|
if of.Format.NotAlternative || of.Format.Name == f.Name {
|
|
|
|
continue
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
o = append(o, of)
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
return o
|
|
|
|
}
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) Render(layout ...string) template.HTML {
|
|
|
|
l, err := p.getLayouts(layout...)
|
|
|
|
if err != nil {
|
|
|
|
p.s.SendError(p.wrapError(errors.Errorf(".Render: failed to resolve layout %v", layout)))
|
|
|
|
return ""
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
for _, layout := range l {
|
|
|
|
templ, found := p.s.Tmpl.Lookup(layout)
|
|
|
|
if !found {
|
|
|
|
// This is legacy from when we had only one output format and
|
|
|
|
// HTML templates only. Some have references to layouts without suffix.
|
|
|
|
// We default to good old HTML.
|
|
|
|
templ, _ = p.s.Tmpl.Lookup(layout + ".html")
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
if templ != nil {
|
|
|
|
res, err := executeToString(templ, p)
|
|
|
|
if err != nil {
|
|
|
|
p.s.SendError(p.wrapError(errors.Wrapf(err, ".Render: failed to execute template %q v", layout)))
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
return template.HTML(res)
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return ""
|
2014-04-23 02:59:19 -04:00
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// wrapError adds some more context to the given error if possible
|
|
|
|
func (p *pageState) wrapError(err error) error {
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
var filename string
|
2019-03-25 13:18:34 -04:00
|
|
|
if !p.File().IsZero() {
|
2019-01-02 06:33:26 -05:00
|
|
|
filename = p.File().Filename()
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
err, _ = herrors.WithFileContextForFile(
|
|
|
|
err,
|
|
|
|
filename,
|
|
|
|
filename,
|
|
|
|
p.s.SourceSpec.Fs.Source,
|
|
|
|
herrors.SimpleLineMatcher)
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return err
|
|
|
|
}
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) addResources(r ...resource.Resource) {
|
|
|
|
p.resources = append(p.resources, r...)
|
|
|
|
}
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) addSectionToParent() {
|
|
|
|
if p.parent == nil {
|
|
|
|
return
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
p.parent.subSections = append(p.parent.subSections, p)
|
2014-04-23 02:59:19 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) contentMarkupType() string {
|
|
|
|
if p.m.markup != "" {
|
|
|
|
return p.m.markup
|
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
return p.File().Ext()
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) mapContent(meta *pageMeta) error {
|
2018-11-15 03:28:02 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
s := p.shortcodeState
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
p.renderable = true
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
rn := &pageContentMap{
|
|
|
|
items: make([]interface{}, 0, 20),
|
|
|
|
}
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
iter := p.source.parsed.Iterator()
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
fail := func(err error, i pageparser.Item) error {
|
|
|
|
return p.parseError(err, iter.Input(), i.Pos)
|
|
|
|
}
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// the parser is guaranteed to return items in proper order or fail, so …
|
|
|
|
// … it's safe to keep some "global" state
|
|
|
|
var currShortcode shortcode
|
|
|
|
var ordinal int
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
Loop:
|
|
|
|
for {
|
|
|
|
it := iter.Next()
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
switch {
|
|
|
|
case it.Type == pageparser.TypeIgnore:
|
|
|
|
case it.Type == pageparser.TypeHTMLStart:
|
|
|
|
// This is HTML without front matter. It can still have shortcodes.
|
|
|
|
p.selfLayout = "__" + p.File().Filename()
|
|
|
|
p.renderable = false
|
|
|
|
rn.AddBytes(it)
|
|
|
|
case it.IsFrontMatter():
|
|
|
|
f := metadecoders.FormatFromFrontMatterType(it.Type)
|
|
|
|
m, err := metadecoders.Default.UnmarshalToMap(it.Val, f)
|
2014-04-23 02:59:19 -04:00
|
|
|
if err != nil {
|
2019-01-02 06:33:26 -05:00
|
|
|
if fe, ok := err.(herrors.FileError); ok {
|
|
|
|
return herrors.ToFileErrorWithOffset(fe, iter.LineNumber()-1)
|
|
|
|
} else {
|
|
|
|
return err
|
2015-02-04 15:27:27 -05:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
}
|
2016-07-28 03:30:58 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if err := meta.setMetadata(p, m); err != nil {
|
|
|
|
return err
|
2015-02-04 15:27:27 -05:00
|
|
|
}
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
next := iter.Peek()
|
|
|
|
if !next.IsDone() {
|
|
|
|
p.source.posMainContent = next.Pos
|
|
|
|
}
|
2014-04-23 02:59:19 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if !p.s.shouldBuild(p) {
|
|
|
|
// Nothing more to do.
|
|
|
|
return nil
|
|
|
|
}
|
2017-05-06 14:15:28 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
case it.Type == pageparser.TypeLeadSummaryDivider:
|
|
|
|
posBody := -1
|
|
|
|
f := func(item pageparser.Item) bool {
|
|
|
|
if posBody == -1 && !item.IsDone() {
|
|
|
|
posBody = item.Pos
|
|
|
|
}
|
2015-07-02 09:32:57 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if item.IsNonWhitespace() {
|
|
|
|
p.truncated = true
|
2014-05-01 13:21:37 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// Done
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
iter.PeekWalk(f)
|
2018-07-17 05:18:29 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
p.source.posSummaryEnd = it.Pos
|
|
|
|
p.source.posBodyStart = posBody
|
|
|
|
p.source.hasSummaryDivider = true
|
2018-07-17 05:18:29 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if meta.markup != "html" {
|
|
|
|
// The content will be rendered by Blackfriday or similar,
|
|
|
|
// and we need to track the summary.
|
|
|
|
rn.AddReplacement(internalSummaryDividerPre, it)
|
|
|
|
}
|
2018-05-29 21:35:27 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// Handle shortcode
|
|
|
|
case it.IsLeftShortcodeDelim():
|
|
|
|
// let extractShortcode handle left delim (will do so recursively)
|
|
|
|
iter.Backup()
|
2016-10-31 05:23:01 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
currShortcode, err := s.extractShortcode(ordinal, 0, iter)
|
2016-10-31 05:23:01 -04:00
|
|
|
if err != nil {
|
2019-01-02 06:33:26 -05:00
|
|
|
return fail(errors.Wrap(err, "failed to extract shortcode"), it)
|
2016-10-31 05:23:01 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
currShortcode.pos = it.Pos
|
|
|
|
currShortcode.length = iter.Current().Pos - it.Pos
|
|
|
|
if currShortcode.placeholder == "" {
|
|
|
|
currShortcode.placeholder = createShortcodePlaceholder("s", currShortcode.ordinal)
|
2016-12-26 13:30:57 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if currShortcode.name != "" {
|
|
|
|
s.nameSet[currShortcode.name] = true
|
2017-03-05 15:24:14 -05:00
|
|
|
}
|
2016-10-31 14:53:33 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if currShortcode.params == nil {
|
|
|
|
var s []string
|
|
|
|
currShortcode.params = s
|
|
|
|
}
|
2016-11-11 03:19:16 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
currShortcode.placeholder = createShortcodePlaceholder("s", ordinal)
|
|
|
|
ordinal++
|
|
|
|
s.shortcodes = append(s.shortcodes, currShortcode)
|
2016-11-11 05:35:55 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
rn.AddShortcode(currShortcode)
|
2016-10-31 05:23:01 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
case it.Type == pageparser.TypeEmoji:
|
|
|
|
if emoji := helpers.Emoji(it.ValStr()); emoji != nil {
|
|
|
|
rn.AddReplacement(emoji, it)
|
|
|
|
} else {
|
|
|
|
rn.AddBytes(it)
|
|
|
|
}
|
|
|
|
case it.IsEOF():
|
|
|
|
break Loop
|
|
|
|
case it.IsError():
|
|
|
|
err := fail(errors.WithStack(errors.New(it.ValStr())), it)
|
|
|
|
currShortcode.err = err
|
|
|
|
return err
|
2016-11-11 05:35:55 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
default:
|
|
|
|
rn.AddBytes(it)
|
2016-11-11 05:35:55 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
p.cmap = rn
|
2016-11-11 05:35:55 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return nil
|
|
|
|
}
|
2016-11-11 05:35:55 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) errorf(err error, format string, a ...interface{}) error {
|
|
|
|
if herrors.UnwrapErrorWithFileContext(err) != nil {
|
|
|
|
// More isn't always better.
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
args := append([]interface{}{p.Language().Lang, p.pathOrTitle()}, a...)
|
|
|
|
format = "[%s] page %q: " + format
|
|
|
|
if err == nil {
|
|
|
|
errors.Errorf(format, args...)
|
|
|
|
return fmt.Errorf(format, args...)
|
2016-11-11 05:35:55 -05:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
return errors.Wrapf(err, format, args...)
|
2016-11-11 05:35:55 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) outputFormat() (f output.Format) {
|
|
|
|
if p.pageOutput == nil {
|
|
|
|
panic("no pageOutput")
|
2018-05-08 04:10:13 -04:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
return p.pageOutput.f
|
2016-10-31 05:23:01 -04:00
|
|
|
}
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) parseError(err error, input []byte, offset int) error {
|
|
|
|
if herrors.UnwrapFileError(err) != nil {
|
|
|
|
// Use the most specific location.
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
pos := p.posFromInput(input, offset)
|
|
|
|
return herrors.NewFileError("md", -1, pos.LineNumber, pos.ColumnNumber, err)
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2018-05-29 21:35:27 -04:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) pathOrTitle() string {
|
2019-03-25 13:18:34 -04:00
|
|
|
if !p.File().IsZero() {
|
2019-01-02 06:33:26 -05:00
|
|
|
return p.File().Filename()
|
2018-05-25 19:59:58 -04:00
|
|
|
}
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if p.Path() != "" {
|
|
|
|
return p.Path()
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
|
|
|
|
return p.Title()
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) posFromPage(offset int) text.Position {
|
|
|
|
return p.posFromInput(p.source.parsed.Input(), offset)
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) posFromInput(input []byte, offset int) text.Position {
|
|
|
|
lf := []byte("\n")
|
|
|
|
input = input[:offset]
|
|
|
|
lineNumber := bytes.Count(input, lf) + 1
|
|
|
|
endOfLastLine := bytes.LastIndex(input, lf)
|
|
|
|
|
|
|
|
return text.Position{
|
|
|
|
Filename: p.pathOrTitle(),
|
|
|
|
LineNumber: lineNumber,
|
|
|
|
ColumnNumber: offset - endOfLastLine,
|
|
|
|
Offset: offset,
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) posOffset(offset int) text.Position {
|
|
|
|
return p.posFromInput(p.source.parsed.Input(), offset)
|
|
|
|
}
|
2016-12-23 03:52:05 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// shiftToOutputFormat is serialized. The output format idx refers to the
|
|
|
|
// full set of output formats for all sites.
|
2019-04-10 04:11:51 -04:00
|
|
|
func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error {
|
2019-01-02 06:33:26 -05:00
|
|
|
if err := p.initPage(); err != nil {
|
|
|
|
return err
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if idx >= len(p.pageOutputs) {
|
|
|
|
panic(fmt.Sprintf("invalid page state for %q: got output format index %d, have %d", p.pathOrTitle(), idx, len(p.pageOutputs)))
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
p.pageOutput = p.pageOutputs[idx]
|
|
|
|
|
|
|
|
if p.pageOutput == nil {
|
|
|
|
panic(fmt.Sprintf("pageOutput is nil for output idx %d", idx))
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-04-10 04:11:51 -04:00
|
|
|
// Reset any built paginator. This will trigger when re-rendering pages in
|
|
|
|
// server mode.
|
|
|
|
if isRenderingSite && p.pageOutput.paginator != nil && p.pageOutput.paginator.current != nil {
|
|
|
|
p.pageOutput.paginator.reset()
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if idx > 0 {
|
|
|
|
// Check if we can reuse content from one of the previous formats.
|
|
|
|
for i := idx - 1; i >= 0; i-- {
|
|
|
|
po := p.pageOutputs[i]
|
|
|
|
if po.cp != nil && po.cp.reuse {
|
|
|
|
p.pageOutput.cp = po.cp
|
|
|
|
break
|
|
|
|
}
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
for _, r := range p.Resources().ByType(pageResourceType) {
|
|
|
|
rp := r.(*pageState)
|
2019-04-10 04:11:51 -04:00
|
|
|
if err := rp.shiftToOutputFormat(isRenderingSite, idx); err != nil {
|
2019-01-02 06:33:26 -05:00
|
|
|
return errors.Wrap(err, "failed to shift outputformat in Page resource")
|
2016-11-21 04:11:34 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return nil
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) getTaxonomyNodeInfo() *taxonomyNodeInfo {
|
|
|
|
info := p.s.taxonomyNodes.Get(p.SectionsEntries()...)
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if info == nil {
|
2019-04-15 03:38:14 -04:00
|
|
|
// There can be unused content pages for taxonomies (e.g. author that
|
|
|
|
// has not written anything, yet), and these will not have a taxonomy
|
|
|
|
// node created in the assemble taxonomies step.
|
|
|
|
return nil
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return info
|
2017-02-04 22:20:06 -05:00
|
|
|
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (p *pageState) sortParentSections() {
|
|
|
|
if p.parent == nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
page.SortByDefault(p.parent.subSections)
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// sourceRef returns the reference used by GetPage and ref/relref shortcodes to refer to
|
|
|
|
// this page. It is prefixed with a "/".
|
|
|
|
//
|
|
|
|
// For pages that have a source file, it is returns the path to this file as an
|
|
|
|
// absolute path rooted in this site's content dir.
|
|
|
|
// For pages that do not (sections witout content page etc.), it returns the
|
|
|
|
// virtual path, consistent with where you would add a source file.
|
|
|
|
func (p *pageState) sourceRef() string {
|
2019-03-25 13:18:34 -04:00
|
|
|
if !p.File().IsZero() {
|
2019-01-02 06:33:26 -05:00
|
|
|
sourcePath := p.File().Path()
|
|
|
|
if sourcePath != "" {
|
|
|
|
return "/" + filepath.ToSlash(sourcePath)
|
|
|
|
}
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
if len(p.SectionsEntries()) > 0 {
|
|
|
|
// no backing file, return the virtual source path
|
|
|
|
return "/" + p.SectionsPath()
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return ""
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
type pageStatePages []*pageState
|
2018-01-27 12:03:06 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// Implement sorting.
|
|
|
|
func (ps pageStatePages) Len() int { return len(ps) }
|
2018-01-27 12:03:06 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (ps pageStatePages) Less(i, j int) bool { return page.DefaultPageSort(ps[i], ps[j]) }
|
|
|
|
|
|
|
|
func (ps pageStatePages) Swap(i, j int) { ps[i], ps[j] = ps[j], ps[i] }
|
2016-11-13 08:27:10 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// findPagePos Given a page, it will find the position in Pages
|
|
|
|
// will return -1 if not found
|
|
|
|
func (ps pageStatePages) findPagePos(page *pageState) int {
|
|
|
|
for i, x := range ps {
|
|
|
|
if x.File().Filename() == page.File().Filename() {
|
|
|
|
return i
|
2018-01-21 14:40:58 -05:00
|
|
|
}
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
return -1
|
2016-11-13 08:27:10 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (ps pageStatePages) findPagePosByFilename(filename string) int {
|
|
|
|
for i, x := range ps {
|
|
|
|
if x.File().Filename() == filename {
|
|
|
|
return i
|
|
|
|
}
|
2018-12-29 04:35:46 -05:00
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
return -1
|
2018-12-29 04:35:46 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (ps pageStatePages) findPagePosByFilnamePrefix(prefix string) int {
|
|
|
|
if prefix == "" {
|
|
|
|
return -1
|
2018-12-29 04:35:46 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
lenDiff := -1
|
|
|
|
currPos := -1
|
|
|
|
prefixLen := len(prefix)
|
2018-12-29 04:35:46 -05:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
// Find the closest match
|
|
|
|
for i, x := range ps {
|
|
|
|
if strings.HasPrefix(x.File().Filename(), prefix) {
|
|
|
|
diff := len(x.File().Filename()) - prefixLen
|
|
|
|
if lenDiff == -1 || diff < lenDiff {
|
|
|
|
lenDiff = diff
|
|
|
|
currPos = i
|
|
|
|
}
|
2018-12-29 04:35:46 -05:00
|
|
|
}
|
|
|
|
}
|
2019-01-02 06:33:26 -05:00
|
|
|
return currPos
|
2018-12-29 04:35:46 -05:00
|
|
|
}
|
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
func (s *Site) sectionsFromFile(fi source.File) []string {
|
|
|
|
dirname := fi.Dir()
|
|
|
|
dirname = strings.Trim(dirname, helpers.FilePathSeparator)
|
|
|
|
if dirname == "" {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
parts := strings.Split(dirname, helpers.FilePathSeparator)
|
|
|
|
|
|
|
|
if fii, ok := fi.(*fileInfo); ok {
|
|
|
|
if fii.bundleTp == bundleLeaf && len(parts) > 0 {
|
|
|
|
// my-section/mybundle/index.md => my-section
|
|
|
|
return parts[:len(parts)-1]
|
|
|
|
}
|
2017-03-26 13:34:30 -04:00
|
|
|
}
|
2018-09-24 18:06:29 -04:00
|
|
|
|
2019-01-02 06:33:26 -05:00
|
|
|
return parts
|
2018-09-24 18:06:29 -04:00
|
|
|
}
|