2017-04-09 04:33:04 -04:00
|
|
|
// Copyright 2017 The Hugo Authors. All rights reserved.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package hugolib
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"path"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
|
2017-07-02 14:14:06 -04:00
|
|
|
"github.com/gohugoio/hugo/helpers"
|
|
|
|
|
2017-06-13 13:07:35 -04:00
|
|
|
radix "github.com/hashicorp/go-immutable-radix"
|
2017-04-09 04:33:04 -04:00
|
|
|
)
|
|
|
|
|
2017-07-02 12:20:14 -04:00
|
|
|
// Sections returns the top level sections.
|
|
|
|
func (s *SiteInfo) Sections() Pages {
|
|
|
|
home, err := s.Home()
|
|
|
|
if err == nil {
|
|
|
|
return home.Sections()
|
|
|
|
}
|
|
|
|
return nil
|
2017-04-09 04:33:04 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// Home is a shortcut to the home page, equivalent to .Site.GetPage "home".
|
|
|
|
func (s *SiteInfo) Home() (*Page, error) {
|
|
|
|
return s.GetPage(KindHome)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Parent returns a section's parent section or a page's section.
|
|
|
|
// To get a section's subsections, see Page's Sections method.
|
|
|
|
func (p *Page) Parent() *Page {
|
|
|
|
return p.parent
|
|
|
|
}
|
|
|
|
|
2017-07-02 14:14:06 -04:00
|
|
|
// CurrentSection returns the page's current section or the page itself if home or a section.
|
2017-04-09 04:33:04 -04:00
|
|
|
// Note that this will return nil for pages that is not regular, home or section pages.
|
2017-07-02 14:14:06 -04:00
|
|
|
func (p *Page) CurrentSection() *Page {
|
2017-04-09 04:33:04 -04:00
|
|
|
v := p
|
|
|
|
if v.origOnCopy != nil {
|
|
|
|
v = v.origOnCopy
|
|
|
|
}
|
|
|
|
if v.IsHome() || v.IsSection() {
|
|
|
|
return v
|
|
|
|
}
|
|
|
|
|
|
|
|
return v.parent
|
|
|
|
}
|
|
|
|
|
2018-07-23 14:19:32 -04:00
|
|
|
// FirstSection returns the section on level 1 below home, e.g. "/docs".
|
|
|
|
// For the home page, this will return itself.
|
|
|
|
func (p *Page) FirstSection() *Page {
|
|
|
|
v := p
|
|
|
|
if v.origOnCopy != nil {
|
|
|
|
v = v.origOnCopy
|
|
|
|
}
|
|
|
|
|
|
|
|
if v.parent == nil || v.parent.IsHome() {
|
|
|
|
return v
|
|
|
|
}
|
|
|
|
|
|
|
|
parent := v.parent
|
|
|
|
for {
|
|
|
|
current := parent
|
|
|
|
parent = parent.parent
|
|
|
|
if parent == nil || parent.IsHome() {
|
|
|
|
return current
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2017-04-09 04:33:04 -04:00
|
|
|
// InSection returns whether the given page is in the current section.
|
|
|
|
// Note that this will always return false for pages that are
|
|
|
|
// not either regular, home or section pages.
|
|
|
|
func (p *Page) InSection(other interface{}) (bool, error) {
|
|
|
|
if p == nil || other == nil {
|
|
|
|
return false, nil
|
|
|
|
}
|
|
|
|
|
2017-07-02 14:14:06 -04:00
|
|
|
pp, err := unwrapPage(other)
|
|
|
|
if err != nil {
|
|
|
|
return false, err
|
2017-04-09 04:33:04 -04:00
|
|
|
}
|
|
|
|
|
2017-07-02 14:14:06 -04:00
|
|
|
if pp == nil {
|
|
|
|
return false, nil
|
2017-04-09 04:33:04 -04:00
|
|
|
}
|
|
|
|
|
2017-07-02 14:14:06 -04:00
|
|
|
return pp.CurrentSection() == p.CurrentSection(), nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// IsDescendant returns whether the current page is a descendant of the given page.
|
|
|
|
// Note that this method is not relevant for taxonomy lists and taxonomy terms pages.
|
|
|
|
func (p *Page) IsDescendant(other interface{}) (bool, error) {
|
2018-11-28 06:36:59 -05:00
|
|
|
if p == nil {
|
|
|
|
return false, nil
|
|
|
|
}
|
2017-07-02 14:14:06 -04:00
|
|
|
pp, err := unwrapPage(other)
|
2018-11-28 06:36:59 -05:00
|
|
|
if err != nil || pp == nil {
|
2017-07-02 14:14:06 -04:00
|
|
|
return false, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if pp.Kind == KindPage && len(p.sections) == len(pp.sections) {
|
|
|
|
// A regular page is never its section's descendant.
|
2017-04-09 04:33:04 -04:00
|
|
|
return false, nil
|
|
|
|
}
|
2017-07-02 14:14:06 -04:00
|
|
|
return helpers.HasStringsPrefix(p.sections, pp.sections), nil
|
|
|
|
}
|
2017-04-09 04:33:04 -04:00
|
|
|
|
2017-07-02 14:14:06 -04:00
|
|
|
// IsAncestor returns whether the current page is an ancestor of the given page.
|
|
|
|
// Note that this method is not relevant for taxonomy lists and taxonomy terms pages.
|
|
|
|
func (p *Page) IsAncestor(other interface{}) (bool, error) {
|
2018-11-28 06:36:59 -05:00
|
|
|
if p == nil {
|
|
|
|
return false, nil
|
|
|
|
}
|
|
|
|
|
2017-07-02 14:14:06 -04:00
|
|
|
pp, err := unwrapPage(other)
|
2018-11-28 06:36:59 -05:00
|
|
|
if err != nil || pp == nil {
|
2017-07-02 14:14:06 -04:00
|
|
|
return false, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if p.Kind == KindPage && len(p.sections) == len(pp.sections) {
|
|
|
|
// A regular page is never its section's ancestor.
|
|
|
|
return false, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return helpers.HasStringsPrefix(pp.sections, p.sections), nil
|
|
|
|
}
|
|
|
|
|
2017-08-16 04:01:16 -04:00
|
|
|
// Eq returns whether the current page equals the given page.
|
2017-08-16 03:59:42 -04:00
|
|
|
// Note that this is more accurate than doing `{{ if eq $page $otherPage }}`
|
|
|
|
// since a Page can be embedded in another type.
|
2017-08-17 03:43:39 -04:00
|
|
|
func (p *Page) Eq(other interface{}) bool {
|
2017-08-16 03:59:42 -04:00
|
|
|
pp, err := unwrapPage(other)
|
|
|
|
if err != nil {
|
2017-08-17 03:43:39 -04:00
|
|
|
return false
|
2017-08-16 03:59:42 -04:00
|
|
|
}
|
|
|
|
|
2017-08-17 03:43:39 -04:00
|
|
|
return p == pp
|
2017-08-16 03:59:42 -04:00
|
|
|
}
|
|
|
|
|
2017-07-02 14:14:06 -04:00
|
|
|
func unwrapPage(in interface{}) (*Page, error) {
|
2018-08-14 12:11:36 -04:00
|
|
|
switch v := in.(type) {
|
|
|
|
case *Page:
|
|
|
|
return v, nil
|
|
|
|
case *PageOutput:
|
|
|
|
return v.Page, nil
|
|
|
|
case *PageWithoutContent:
|
|
|
|
return v.Page, nil
|
2018-10-02 22:46:32 -04:00
|
|
|
case nil:
|
|
|
|
return nil, nil
|
2018-08-14 12:11:36 -04:00
|
|
|
default:
|
2017-07-02 14:14:06 -04:00
|
|
|
return nil, fmt.Errorf("%T not supported", in)
|
|
|
|
}
|
2017-04-09 04:33:04 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// Sections returns this section's subsections, if any.
|
|
|
|
// Note that for non-sections, this method will always return an empty list.
|
|
|
|
func (p *Page) Sections() Pages {
|
|
|
|
return p.subSections
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Site) assembleSections() Pages {
|
|
|
|
var newPages Pages
|
|
|
|
|
|
|
|
if !s.isEnabled(KindSection) {
|
|
|
|
return newPages
|
|
|
|
}
|
|
|
|
|
|
|
|
// Maps section kind pages to their path, i.e. "my/section"
|
|
|
|
sectionPages := make(map[string]*Page)
|
|
|
|
|
|
|
|
// The sections with content files will already have been created.
|
|
|
|
for _, sect := range s.findPagesByKind(KindSection) {
|
|
|
|
sectionPages[path.Join(sect.sections...)] = sect
|
|
|
|
}
|
|
|
|
|
|
|
|
const (
|
|
|
|
sectKey = "__hs"
|
|
|
|
sectSectKey = "_a" + sectKey
|
|
|
|
sectPageKey = "_b" + sectKey
|
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
|
|
|
inPages = radix.New().Txn()
|
|
|
|
inSections = radix.New().Txn()
|
|
|
|
undecided Pages
|
|
|
|
)
|
|
|
|
|
2018-02-25 04:50:44 -05:00
|
|
|
home := s.findFirstPageByKindIn(KindHome, s.Pages)
|
2018-02-25 01:15:03 -05:00
|
|
|
|
2017-04-09 04:33:04 -04:00
|
|
|
for i, p := range s.Pages {
|
|
|
|
if p.Kind != KindPage {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(p.sections) == 0 {
|
|
|
|
// Root level pages. These will have the home page as their Parent.
|
|
|
|
p.parent = home
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
sectionKey := path.Join(p.sections...)
|
|
|
|
sect, found := sectionPages[sectionKey]
|
|
|
|
|
|
|
|
if !found && len(p.sections) == 1 {
|
|
|
|
// We only create content-file-less sections for the root sections.
|
|
|
|
sect = s.newSectionPage(p.sections[0])
|
|
|
|
sectionPages[sectionKey] = sect
|
|
|
|
newPages = append(newPages, sect)
|
2017-06-13 05:26:17 -04:00
|
|
|
found = true
|
2017-04-09 04:33:04 -04:00
|
|
|
}
|
|
|
|
|
2017-06-13 05:26:17 -04:00
|
|
|
if len(p.sections) > 1 {
|
|
|
|
// Create the root section if not found.
|
|
|
|
_, rootFound := sectionPages[p.sections[0]]
|
|
|
|
if !rootFound {
|
|
|
|
sect = s.newSectionPage(p.sections[0])
|
|
|
|
sectionPages[p.sections[0]] = sect
|
|
|
|
newPages = append(newPages, sect)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if found {
|
|
|
|
pagePath := path.Join(sectionKey, sectPageKey, strconv.Itoa(i))
|
|
|
|
inPages.Insert([]byte(pagePath), p)
|
|
|
|
} else {
|
|
|
|
undecided = append(undecided, p)
|
|
|
|
}
|
2017-04-09 04:33:04 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// Create any missing sections in the tree.
|
|
|
|
// A sub-section needs a content file, but to create a navigational tree,
|
|
|
|
// given a content file in /content/a/b/c/_index.md, we cannot create just
|
|
|
|
// the c section.
|
|
|
|
for _, sect := range sectionPages {
|
|
|
|
for i := len(sect.sections); i > 0; i-- {
|
|
|
|
sectionPath := sect.sections[:i]
|
|
|
|
sectionKey := path.Join(sectionPath...)
|
|
|
|
sect, found := sectionPages[sectionKey]
|
|
|
|
if !found {
|
|
|
|
sect = s.newSectionPage(sectionPath[len(sectionPath)-1])
|
|
|
|
sect.sections = sectionPath
|
|
|
|
sectionPages[sectionKey] = sect
|
|
|
|
newPages = append(newPages, sect)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for k, sect := range sectionPages {
|
|
|
|
inPages.Insert([]byte(path.Join(k, sectSectKey)), sect)
|
|
|
|
inSections.Insert([]byte(k), sect)
|
|
|
|
}
|
|
|
|
|
|
|
|
var (
|
|
|
|
currentSection *Page
|
|
|
|
children Pages
|
|
|
|
rootSections = inSections.Commit().Root()
|
|
|
|
)
|
|
|
|
|
2017-06-13 05:26:17 -04:00
|
|
|
for i, p := range undecided {
|
|
|
|
// Now we can decide where to put this page into the tree.
|
|
|
|
sectionKey := path.Join(p.sections...)
|
|
|
|
_, v, _ := rootSections.LongestPrefix([]byte(sectionKey))
|
|
|
|
sect := v.(*Page)
|
|
|
|
pagePath := path.Join(path.Join(sect.sections...), sectSectKey, "u", strconv.Itoa(i))
|
|
|
|
inPages.Insert([]byte(pagePath), p)
|
|
|
|
}
|
|
|
|
|
|
|
|
var rootPages = inPages.Commit().Root()
|
|
|
|
|
2017-04-09 04:33:04 -04:00
|
|
|
rootPages.Walk(func(path []byte, v interface{}) bool {
|
|
|
|
p := v.(*Page)
|
|
|
|
|
|
|
|
if p.Kind == KindSection {
|
|
|
|
if currentSection != nil {
|
|
|
|
// A new section
|
|
|
|
currentSection.setPagePages(children)
|
|
|
|
}
|
|
|
|
|
|
|
|
currentSection = p
|
|
|
|
children = make(Pages, 0)
|
|
|
|
|
|
|
|
return false
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// Regular page
|
|
|
|
p.parent = currentSection
|
|
|
|
children = append(children, p)
|
|
|
|
return false
|
|
|
|
})
|
|
|
|
|
|
|
|
if currentSection != nil {
|
|
|
|
currentSection.setPagePages(children)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Build the sections hierarchy
|
|
|
|
for _, sect := range sectionPages {
|
|
|
|
if len(sect.sections) == 1 {
|
|
|
|
sect.parent = home
|
|
|
|
} else {
|
|
|
|
parentSearchKey := path.Join(sect.sections[:len(sect.sections)-1]...)
|
|
|
|
_, v, _ := rootSections.LongestPrefix([]byte(parentSearchKey))
|
|
|
|
p := v.(*Page)
|
|
|
|
sect.parent = p
|
|
|
|
}
|
|
|
|
|
|
|
|
if sect.parent != nil {
|
|
|
|
sect.parent.subSections = append(sect.parent.subSections, sect)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
var (
|
|
|
|
sectionsParamId = "mainSections"
|
|
|
|
sectionsParamIdLower = strings.ToLower(sectionsParamId)
|
|
|
|
mainSections interface{}
|
|
|
|
mainSectionsFound bool
|
|
|
|
maxSectionWeight int
|
|
|
|
)
|
|
|
|
|
|
|
|
mainSections, mainSectionsFound = s.Info.Params[sectionsParamIdLower]
|
|
|
|
|
|
|
|
for _, sect := range sectionPages {
|
|
|
|
if sect.parent != nil {
|
2018-09-21 08:23:00 -04:00
|
|
|
sect.parent.subSections.sort()
|
2017-04-09 04:33:04 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
for i, p := range sect.Pages {
|
|
|
|
if i > 0 {
|
|
|
|
p.NextInSection = sect.Pages[i-1]
|
|
|
|
}
|
|
|
|
if i < len(sect.Pages)-1 {
|
|
|
|
p.PrevInSection = sect.Pages[i+1]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !mainSectionsFound {
|
|
|
|
weight := len(sect.Pages) + (len(sect.Sections()) * 5)
|
|
|
|
if weight >= maxSectionWeight {
|
|
|
|
mainSections = []string{sect.Section()}
|
|
|
|
maxSectionWeight = weight
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Try to make this as backwards compatible as possible.
|
|
|
|
s.Info.Params[sectionsParamId] = mainSections
|
|
|
|
s.Info.Params[sectionsParamIdLower] = mainSections
|
|
|
|
|
|
|
|
return newPages
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *Page) setPagePages(pages Pages) {
|
2018-09-21 08:23:00 -04:00
|
|
|
pages.sort()
|
2017-04-09 04:33:04 -04:00
|
|
|
p.Pages = pages
|
Add Hugo Piper with SCSS support and much more
Before this commit, you would have to use page bundles to do image processing etc. in Hugo.
This commit adds
* A new `/assets` top-level project or theme dir (configurable via `assetDir`)
* A new template func, `resources.Get` which can be used to "get a resource" that can be further processed.
This means that you can now do this in your templates (or shortcodes):
```bash
{{ $sunset := (resources.Get "images/sunset.jpg").Fill "300x200" }}
```
This also adds a new `extended` build tag that enables powerful SCSS/SASS support with source maps. To compile this from source, you will also need a C compiler installed:
```
HUGO_BUILD_TAGS=extended mage install
```
Note that you can use output of the SCSS processing later in a non-SCSSS-enabled Hugo.
The `SCSS` processor is a _Resource transformation step_ and it can be chained with the many others in a pipeline:
```bash
{{ $css := resources.Get "styles.scss" | resources.ToCSS | resources.PostCSS | resources.Minify | resources.Fingerprint }}
<link rel="stylesheet" href="{{ $styles.RelPermalink }}" integrity="{{ $styles.Data.Digest }}" media="screen">
```
The transformation funcs above have aliases, so it can be shortened to:
```bash
{{ $css := resources.Get "styles.scss" | toCSS | postCSS | minify | fingerprint }}
<link rel="stylesheet" href="{{ $styles.RelPermalink }}" integrity="{{ $styles.Data.Digest }}" media="screen">
```
A quick tip would be to avoid the fingerprinting part, and possibly also the not-superfast `postCSS` when you're doing development, as it allows Hugo to be smarter about the rebuilding.
Documentation will follow, but have a look at the demo repo in https://github.com/bep/hugo-sass-test
New functions to create `Resource` objects:
* `resources.Get` (see above)
* `resources.FromString`: Create a Resource from a string.
New `Resource` transformation funcs:
* `resources.ToCSS`: Compile `SCSS` or `SASS` into `CSS`.
* `resources.PostCSS`: Process your CSS with PostCSS. Config file support (project or theme or passed as an option).
* `resources.Minify`: Currently supports `css`, `js`, `json`, `html`, `svg`, `xml`.
* `resources.Fingerprint`: Creates a fingerprinted version of the given Resource with Subresource Integrity..
* `resources.Concat`: Concatenates a list of Resource objects. Think of this as a poor man's bundler.
* `resources.ExecuteAsTemplate`: Parses and executes the given Resource and data context (e.g. .Site) as a Go template.
Fixes #4381
Fixes #4903
Fixes #4858
2018-02-20 04:02:14 -05:00
|
|
|
p.data = make(map[string]interface{})
|
|
|
|
p.data["Pages"] = pages
|
2017-04-09 04:33:04 -04:00
|
|
|
}
|