mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-07 20:30:36 -05:00
Add page fragments support to Related
The main topic of this commit is that you can now index fragments (content heading identifiers) when calling `.Related`. You can do this by: * Configure one or more indices with type `fragments` * The name of those index configurations maps to an (optional) front matter slice with fragment references. This allows you to link page<->fragment and page<->page. * This also will index all the fragments (heading identifiers) of the pages. It's also possible to use type `fragments` indices in shortcode, e.g.: ``` {{ $related := site.RegularPages.Related .Page }} ``` But, and this is important, you need to include the shortcode using the `{{<` delimiter. Not doing so will create infinite loops and timeouts. This commit also: * Adds two new methods to Page: Fragments (can also be used to build ToC) and HeadingsFiltered (this is only used in Related Content with index type `fragments` and `enableFilter` set to true. * Consolidates all `.Related*` methods into one, which takes either a `Page` or an options map as its only argument. * Add `context.Context` to all of the content related Page API. Turns out it wasn't strictly needed for this particular feature, but it will soon become usefil, e.g. in #9339. Closes #10711 Updates #9339 Updates #10725
This commit is contained in:
parent
0afec0a9f4
commit
90da7664bf
66 changed files with 1363 additions and 829 deletions
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
|
||||||
|
*.test
|
|
@ -15,6 +15,7 @@ package collections
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"sort"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Slicer defines a very generic way to create a typed slice. This is used
|
// Slicer defines a very generic way to create a typed slice. This is used
|
||||||
|
@ -74,3 +75,22 @@ func StringSliceToInterfaceSlice(ss []string) []any {
|
||||||
return result
|
return result
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type SortedStringSlice []string
|
||||||
|
|
||||||
|
// Contains returns true if s is in ss.
|
||||||
|
func (ss SortedStringSlice) Contains(s string) bool {
|
||||||
|
i := sort.SearchStrings(ss, s)
|
||||||
|
return i < len(ss) && ss[i] == s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count returns the number of times s is in ss.
|
||||||
|
func (ss SortedStringSlice) Count(s string) int {
|
||||||
|
var count int
|
||||||
|
i := sort.SearchStrings(ss, s)
|
||||||
|
for i < len(ss) && ss[i] == s {
|
||||||
|
count++
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
return count
|
||||||
|
}
|
||||||
|
|
|
@ -122,3 +122,18 @@ func TestSlice(t *testing.T) {
|
||||||
c.Assert(test.expected, qt.DeepEquals, result, errMsg)
|
c.Assert(test.expected, qt.DeepEquals, result, errMsg)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestSortedStringSlice(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
c := qt.New(t)
|
||||||
|
|
||||||
|
var s SortedStringSlice = []string{"a", "b", "b", "b", "c", "d"}
|
||||||
|
|
||||||
|
c.Assert(s.Contains("a"), qt.IsTrue)
|
||||||
|
c.Assert(s.Contains("b"), qt.IsTrue)
|
||||||
|
c.Assert(s.Contains("z"), qt.IsFalse)
|
||||||
|
c.Assert(s.Count("b"), qt.Equals, 3)
|
||||||
|
c.Assert(s.Count("z"), qt.Equals, 0)
|
||||||
|
c.Assert(s.Count("a"), qt.Equals, 1)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
|
@ -36,3 +36,19 @@ type ProbablyEqer interface {
|
||||||
type Comparer interface {
|
type Comparer interface {
|
||||||
Compare(other any) int
|
Compare(other any) int
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Eq returns whether v1 is equal to v2.
|
||||||
|
// It will use the Eqer interface if implemented, which
|
||||||
|
// defines equals when two value are interchangeable
|
||||||
|
// in the Hugo templates.
|
||||||
|
func Eq(v1, v2 any) bool {
|
||||||
|
if v1 == nil || v2 == nil {
|
||||||
|
return v1 == v2
|
||||||
|
}
|
||||||
|
|
||||||
|
if eqer, ok := v1.(Eqer); ok {
|
||||||
|
return eqer.Eq(v2)
|
||||||
|
}
|
||||||
|
|
||||||
|
return v1 == v2
|
||||||
|
}
|
||||||
|
|
|
@ -31,40 +31,82 @@ To list up to 5 related pages (which share the same _date_ or _keyword_ paramete
|
||||||
{{ end }}
|
{{ end }}
|
||||||
{{< /code >}}
|
{{< /code >}}
|
||||||
|
|
||||||
### Methods
|
The `Related` method takes one argument which may be a `Page` or a options map. The options map have these options:
|
||||||
|
|
||||||
Here is the list of "Related" methods available on a page collection such `.RegularPages`.
|
indices
|
||||||
|
: The indices to search in.
|
||||||
|
|
||||||
#### .Related PAGE
|
document
|
||||||
|
: The document to search for related content for.
|
||||||
|
|
||||||
Returns a collection of pages related the given one.
|
namedSlices
|
||||||
|
: The keywords to search for.
|
||||||
|
|
||||||
|
fragments
|
||||||
|
: Fragments holds a a list of special keywords that is used for indices configured as type "fragments". This will match the fragment identifiers of the documents.
|
||||||
|
|
||||||
|
A fictional example using all of the above options:
|
||||||
|
|
||||||
```go-html-template
|
```go-html-template
|
||||||
{{ $related := site.RegularPages.Related . }}
|
{{ $page := . }}
|
||||||
```
|
{{ $opts :=
|
||||||
|
"indices" (slice "tags" "keywords")
|
||||||
#### .RelatedIndices PAGE INDICE1 [INDICE2 ...]
|
"document" $page
|
||||||
|
"namedSlices" (slice (keyVals "tags" "hugo" "rocks") (keyVals "date" $page.Date))
|
||||||
Returns a collection of pages related to a given one restricted to a list of indices.
|
"fragments" (slice "heading-1" "heading-2")
|
||||||
|
}}
|
||||||
```go-html-template
|
|
||||||
{{ $related := site.RegularPages.RelatedIndices . "tags" "date" }}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### .RelatedTo KEYVALS [KEYVALS2 ...]
|
|
||||||
|
|
||||||
Returns a collection of pages related together by a set of indices and their match.
|
|
||||||
|
|
||||||
In order to build those set and pass them as argument, one must use the `keyVals` function where the first argument would be the `indice` and the consecutive ones its potential `matches`.
|
|
||||||
|
|
||||||
```go-html-template
|
|
||||||
{{ $related := site.RegularPages.RelatedTo ( keyVals "tags" "hugo" "rocks") ( keyVals "date" .Date ) }}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
{{% note %}}
|
{{% note %}}
|
||||||
Read [this blog article](https://regisphilibert.com/blog/2018/04/hugo-optmized-relashionships-with-related-content/) for a great explanation of more advanced usage of this feature.
|
We improved and simplified this feature in Hugo 0.111.0. Before this we had 3 different methods: `Related`, `RelatedTo` and `RelatedIndicies`. Now we have only one method: `Related`. The old methods are still available but deprecated. Also see [this blog article](https://regisphilibert.com/blog/2018/04/hugo-optmized-relashionships-with-related-content/) for a great explanation of more advanced usage of this feature.
|
||||||
{{% /note %}}
|
{{% /note %}}
|
||||||
|
|
||||||
|
## Index Content Headings in Related Content
|
||||||
|
|
||||||
|
{{< new-in "0.111.0" >}}
|
||||||
|
|
||||||
|
Hugo can index the headings in your content and use this to find related content. You can enable this by adding a index of type `fragments` to your `related` configuration:
|
||||||
|
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[related]
|
||||||
|
threshold = 20
|
||||||
|
includeNewer = true
|
||||||
|
toLower = false
|
||||||
|
[[related.indices]]
|
||||||
|
name = "fragmentrefs"
|
||||||
|
type = "fragments"
|
||||||
|
applyFilter = false
|
||||||
|
weight = 80
|
||||||
|
```
|
||||||
|
|
||||||
|
* The `name` maps to a optional front matter slice attribute that can be used to link from the page level down to the fragment/heading level.
|
||||||
|
* If `applyFilter`is enabled, the `.HeadingsFiltered` on each page in the result will reflect the filtered headings. This is useful if you want to show the headings in the related content listing:
|
||||||
|
|
||||||
|
```go-html-template
|
||||||
|
{{ $related := .Site.RegularPages.Related . | first 5 }}
|
||||||
|
{{ with $related }}
|
||||||
|
<h2>See Also</h2>
|
||||||
|
<ul>
|
||||||
|
{{ range . }}
|
||||||
|
<li>
|
||||||
|
<a href="{{ .RelPermalink }}">{{ .Title }}</a>
|
||||||
|
{{ with .HeadingsFiltered }}
|
||||||
|
<ul>
|
||||||
|
{{ range . }}
|
||||||
|
{{ $link := printf "%s#%s" $.RelPermalink .ID }}
|
||||||
|
<li>
|
||||||
|
<a href="{{ $link }}">{{ .Title }}</a>
|
||||||
|
</li>
|
||||||
|
{{ end }}
|
||||||
|
</ul>
|
||||||
|
{{ end }}
|
||||||
|
</li>
|
||||||
|
{{ end }}
|
||||||
|
</ul>
|
||||||
|
{{ end }}
|
||||||
|
```
|
||||||
|
|
||||||
## Configure Related Content
|
## Configure Related Content
|
||||||
|
|
||||||
Hugo provides a sensible default configuration of Related Content, but you can fine-tune this in your configuration, on the global or language level if needed.
|
Hugo provides a sensible default configuration of Related Content, but you can fine-tune this in your configuration, on the global or language level if needed.
|
||||||
|
@ -109,6 +151,12 @@ toLower
|
||||||
name
|
name
|
||||||
: The index name. This value maps directly to a page param. Hugo supports string values (`author` in the example) and lists (`tags`, `keywords` etc.) and time and date objects.
|
: The index name. This value maps directly to a page param. Hugo supports string values (`author` in the example) and lists (`tags`, `keywords` etc.) and time and date objects.
|
||||||
|
|
||||||
|
type
|
||||||
|
: {{< new-in "0.111.0" >}}. One of `basic`(default) or `fragments`.
|
||||||
|
|
||||||
|
applyFilter
|
||||||
|
: {{< new-in "0.111.0" >}}. Apply a `type` specific filter to the result of a search. This is currently only used for the `fragments` type.
|
||||||
|
|
||||||
weight
|
weight
|
||||||
: An integer weight that indicates _how important_ this parameter is relative to the other parameters. It can be 0, which has the effect of turning this index off, or even negative. Test with different values to see what fits your content best.
|
: An integer weight that indicates _how important_ this parameter is relative to the other parameters. It can be 0, which has the effect of turning this index off, or even negative. Test with different values to see what fits your content best.
|
||||||
|
|
||||||
|
|
1
go.mod
1
go.mod
|
@ -61,6 +61,7 @@ require (
|
||||||
github.com/yuin/goldmark v1.5.4
|
github.com/yuin/goldmark v1.5.4
|
||||||
go.uber.org/atomic v1.10.0
|
go.uber.org/atomic v1.10.0
|
||||||
gocloud.dev v0.28.0
|
gocloud.dev v0.28.0
|
||||||
|
golang.org/x/exp v0.0.0-20221031165847-c99f073a8326
|
||||||
golang.org/x/image v0.0.0-20211028202545-6944b10bf410
|
golang.org/x/image v0.0.0-20211028202545-6944b10bf410
|
||||||
golang.org/x/net v0.4.0
|
golang.org/x/net v0.4.0
|
||||||
golang.org/x/sync v0.1.0
|
golang.org/x/sync v0.1.0
|
||||||
|
|
1
go.sum
1
go.sum
|
@ -2002,6 +2002,7 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
|
||||||
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||||
|
golang.org/x/exp v0.0.0-20221031165847-c99f073a8326 h1:QfTh0HpN6hlw6D3vu8DAwC8pBIwikq0AI1evdm+FksE=
|
||||||
golang.org/x/exp v0.0.0-20221031165847-c99f073a8326/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
|
golang.org/x/exp v0.0.0-20221031165847-c99f073a8326/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
|
||||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
@ -83,7 +84,7 @@ func (f ContentFactory) ApplyArchetypeTemplate(w io.Writer, p page.Page, archety
|
||||||
return fmt.Errorf("failed to parse archetype template: %s: %w", err, err)
|
return fmt.Errorf("failed to parse archetype template: %s: %w", err, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := executeToString(ps.s.Tmpl(), templ, d)
|
result, err := executeToString(context.TODO(), ps.s.Tmpl(), templ, d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to execute archetype template: %s: %w", err, err)
|
return fmt.Errorf("failed to execute archetype template: %s: %w", err, err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -171,7 +171,7 @@ func (m *pageMap) newPageFromContentNode(n *contentNode, parentBucket *pagesMapB
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
ps.init.Add(func() (any, error) {
|
ps.init.Add(func(context.Context) (any, error) {
|
||||||
pp, err := newPagePaths(s, ps, metaProvider)
|
pp, err := newPagePaths(s, ps, metaProvider)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"html/template"
|
"html/template"
|
||||||
|
@ -70,7 +71,7 @@ func doTestShortcodeCrossrefs(t *testing.T, relative bool) {
|
||||||
|
|
||||||
c.Assert(len(s.RegularPages()), qt.Equals, 1)
|
c.Assert(len(s.RegularPages()), qt.Equals, 1)
|
||||||
|
|
||||||
content, err := s.RegularPages()[0].Content()
|
content, err := s.RegularPages()[0].Content(context.Background())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
output := cast.ToString(content)
|
output := cast.ToString(content)
|
||||||
|
|
||||||
|
|
|
@ -194,7 +194,7 @@ func (h *hugoSitesInit) Reset() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *HugoSites) Data() map[string]any {
|
func (h *HugoSites) Data() map[string]any {
|
||||||
if _, err := h.init.data.Do(); err != nil {
|
if _, err := h.init.data.Do(context.Background()); err != nil {
|
||||||
h.SendError(fmt.Errorf("failed to load data: %w", err))
|
h.SendError(fmt.Errorf("failed to load data: %w", err))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -202,7 +202,7 @@ func (h *HugoSites) Data() map[string]any {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *HugoSites) gitInfoForPage(p page.Page) (source.GitInfo, error) {
|
func (h *HugoSites) gitInfoForPage(p page.Page) (source.GitInfo, error) {
|
||||||
if _, err := h.init.gitInfo.Do(); err != nil {
|
if _, err := h.init.gitInfo.Do(context.Background()); err != nil {
|
||||||
return source.GitInfo{}, err
|
return source.GitInfo{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -214,7 +214,7 @@ func (h *HugoSites) gitInfoForPage(p page.Page) (source.GitInfo, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *HugoSites) codeownersForPage(p page.Page) ([]string, error) {
|
func (h *HugoSites) codeownersForPage(p page.Page) ([]string, error) {
|
||||||
if _, err := h.init.gitInfo.Do(); err != nil {
|
if _, err := h.init.gitInfo.Do(context.Background()); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -363,7 +363,7 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
|
||||||
donec: make(chan bool),
|
donec: make(chan bool),
|
||||||
}
|
}
|
||||||
|
|
||||||
h.init.data.Add(func() (any, error) {
|
h.init.data.Add(func(context.Context) (any, error) {
|
||||||
err := h.loadData(h.PathSpec.BaseFs.Data.Dirs)
|
err := h.loadData(h.PathSpec.BaseFs.Data.Dirs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to load data: %w", err)
|
return nil, fmt.Errorf("failed to load data: %w", err)
|
||||||
|
@ -371,7 +371,7 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
h.init.layouts.Add(func() (any, error) {
|
h.init.layouts.Add(func(context.Context) (any, error) {
|
||||||
for _, s := range h.Sites {
|
for _, s := range h.Sites {
|
||||||
if err := s.Tmpl().(tpl.TemplateManager).MarkReady(); err != nil {
|
if err := s.Tmpl().(tpl.TemplateManager).MarkReady(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -380,7 +380,7 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
h.init.translations.Add(func() (any, error) {
|
h.init.translations.Add(func(context.Context) (any, error) {
|
||||||
if len(h.Sites) > 1 {
|
if len(h.Sites) > 1 {
|
||||||
allTranslations := pagesToTranslationsMap(h.Sites)
|
allTranslations := pagesToTranslationsMap(h.Sites)
|
||||||
assignTranslationsToPages(allTranslations, h.Sites)
|
assignTranslationsToPages(allTranslations, h.Sites)
|
||||||
|
@ -389,7 +389,7 @@ func newHugoSites(cfg deps.DepsCfg, sites ...*Site) (*HugoSites, error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
h.init.gitInfo.Add(func() (any, error) {
|
h.init.gitInfo.Add(func(context.Context) (any, error) {
|
||||||
err := h.loadGitInfo()
|
err := h.loadGitInfo()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to load Git info: %w", err)
|
return nil, fmt.Errorf("failed to load Git info: %w", err)
|
||||||
|
|
|
@ -268,7 +268,7 @@ func (h *HugoSites) assemble(bcfg *BuildCfg) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *HugoSites) render(config *BuildCfg) error {
|
func (h *HugoSites) render(config *BuildCfg) error {
|
||||||
if _, err := h.init.layouts.Do(); err != nil {
|
if _, err := h.init.layouts.Do(context.Background()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -396,7 +396,7 @@ line 4
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestErrorNestedShortocde(t *testing.T) {
|
func TestErrorNestedShortcode(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
|
|
||||||
files := `
|
files := `
|
||||||
|
|
|
@ -14,162 +14,9 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"runtime"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/config"
|
|
||||||
"github.com/gohugoio/hugo/htesting"
|
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
|
||||||
"github.com/gohugoio/hugo/hugofs"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// We have many tests for the different resize operations etc. in the resource package,
|
|
||||||
// this is an integration test.
|
|
||||||
func TestImageOps(t *testing.T) {
|
|
||||||
c := qt.New(t)
|
|
||||||
// Make this a real as possible.
|
|
||||||
workDir, clean, err := htesting.CreateTempDir(hugofs.Os, "image-resize")
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
defer clean()
|
|
||||||
|
|
||||||
newBuilder := func(timeout any) *sitesBuilder {
|
|
||||||
v := config.NewWithTestDefaults()
|
|
||||||
v.Set("workingDir", workDir)
|
|
||||||
v.Set("baseURL", "https://example.org")
|
|
||||||
v.Set("timeout", timeout)
|
|
||||||
|
|
||||||
b := newTestSitesBuilder(t).WithWorkingDir(workDir)
|
|
||||||
b.Fs = hugofs.NewDefault(v)
|
|
||||||
b.WithViper(v)
|
|
||||||
b.WithContent("mybundle/index.md", `
|
|
||||||
---
|
|
||||||
title: "My bundle"
|
|
||||||
---
|
|
||||||
|
|
||||||
{{< imgproc >}}
|
|
||||||
|
|
||||||
`)
|
|
||||||
|
|
||||||
b.WithTemplatesAdded(
|
|
||||||
"shortcodes/imgproc.html", `
|
|
||||||
{{ $img := resources.Get "images/sunset.jpg" }}
|
|
||||||
{{ $r := $img.Resize "129x239" }}
|
|
||||||
IMG SHORTCODE: {{ $r.RelPermalink }}/{{ $r.Width }}
|
|
||||||
`,
|
|
||||||
"index.html", `
|
|
||||||
{{ $p := .Site.GetPage "mybundle" }}
|
|
||||||
{{ $img1 := resources.Get "images/sunset.jpg" }}
|
|
||||||
{{ $img2 := $p.Resources.GetMatch "sunset.jpg" }}
|
|
||||||
{{ $img3 := resources.GetMatch "images/*.jpg" }}
|
|
||||||
{{ $r := $img1.Resize "123x234" }}
|
|
||||||
{{ $r2 := $r.Resize "12x23" }}
|
|
||||||
{{ $b := $img2.Resize "345x678" }}
|
|
||||||
{{ $b2 := $b.Resize "34x67" }}
|
|
||||||
{{ $c := $img3.Resize "456x789" }}
|
|
||||||
{{ $fingerprinted := $img1.Resize "350x" | fingerprint }}
|
|
||||||
|
|
||||||
{{ $images := slice $r $r2 $b $b2 $c $fingerprinted }}
|
|
||||||
|
|
||||||
{{ range $i, $r := $images }}
|
|
||||||
{{ printf "Resized%d:" (add $i 1) }} {{ $r.Name }}|{{ $r.Width }}|{{ $r.Height }}|{{ $r.MediaType }}|{{ $r.RelPermalink }}|
|
|
||||||
{{ end }}
|
|
||||||
|
|
||||||
{{ $blurryGrayscale1 := $r | images.Filter images.Grayscale (images.GaussianBlur 8) }}
|
|
||||||
BG1: {{ $blurryGrayscale1.RelPermalink }}/{{ $blurryGrayscale1.Width }}
|
|
||||||
{{ $blurryGrayscale2 := $r.Filter images.Grayscale (images.GaussianBlur 8) }}
|
|
||||||
BG2: {{ $blurryGrayscale2.RelPermalink }}/{{ $blurryGrayscale2.Width }}
|
|
||||||
{{ $blurryGrayscale2_2 := $r.Filter images.Grayscale (images.GaussianBlur 8) }}
|
|
||||||
BG2_2: {{ $blurryGrayscale2_2.RelPermalink }}/{{ $blurryGrayscale2_2.Width }}
|
|
||||||
|
|
||||||
{{ $filters := slice images.Grayscale (images.GaussianBlur 9) }}
|
|
||||||
{{ $blurryGrayscale3 := $r | images.Filter $filters }}
|
|
||||||
BG3: {{ $blurryGrayscale3.RelPermalink }}/{{ $blurryGrayscale3.Width }}
|
|
||||||
|
|
||||||
{{ $blurryGrayscale4 := $r.Filter $filters }}
|
|
||||||
BG4: {{ $blurryGrayscale4.RelPermalink }}/{{ $blurryGrayscale4.Width }}
|
|
||||||
|
|
||||||
{{ $p.Content }}
|
|
||||||
|
|
||||||
`)
|
|
||||||
|
|
||||||
return b
|
|
||||||
}
|
|
||||||
|
|
||||||
imageDir := filepath.Join(workDir, "assets", "images")
|
|
||||||
bundleDir := filepath.Join(workDir, "content", "mybundle")
|
|
||||||
|
|
||||||
c.Assert(os.MkdirAll(imageDir, 0777), qt.IsNil)
|
|
||||||
c.Assert(os.MkdirAll(bundleDir, 0777), qt.IsNil)
|
|
||||||
src, err := os.Open("testdata/sunset.jpg")
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
out, err := os.Create(filepath.Join(imageDir, "sunset.jpg"))
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
_, err = io.Copy(out, src)
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
out.Close()
|
|
||||||
|
|
||||||
src.Seek(0, 0)
|
|
||||||
|
|
||||||
out, err = os.Create(filepath.Join(bundleDir, "sunset.jpg"))
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
_, err = io.Copy(out, src)
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
out.Close()
|
|
||||||
src.Close()
|
|
||||||
|
|
||||||
// First build it with a very short timeout to trigger errors.
|
|
||||||
b := newBuilder("10ns")
|
|
||||||
|
|
||||||
imgExpect := `
|
|
||||||
Resized1: images/sunset.jpg|123|234|image/jpeg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_123x234_resize_q75_box.jpg|
|
|
||||||
Resized2: images/sunset.jpg|12|23|image/jpeg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_ada4bb1a57f77a63306e3bd67286248e.jpg|
|
|
||||||
Resized3: sunset.jpg|345|678|image/jpeg|/mybundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_345x678_resize_q75_box.jpg|
|
|
||||||
Resized4: sunset.jpg|34|67|image/jpeg|/mybundle/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_44d8c928664d7c5a67377c6ec58425ce.jpg|
|
|
||||||
Resized5: images/sunset.jpg|456|789|image/jpeg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_456x789_resize_q75_box.jpg|
|
|
||||||
Resized6: images/sunset.jpg|350|219|image/jpeg|/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_350x0_resize_q75_box.a86fe88d894e5db613f6aa8a80538fefc25b20fa24ba0d782c057adcef616f56.jpg|
|
|
||||||
BG1: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_2ae8bb993431ec1aec40fe59927b46b4.jpg/123
|
|
||||||
BG2: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_2ae8bb993431ec1aec40fe59927b46b4.jpg/123
|
|
||||||
BG3: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_ed7740a90b82802261c2fbdb98bc8082.jpg/123
|
|
||||||
BG4: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_ed7740a90b82802261c2fbdb98bc8082.jpg/123
|
|
||||||
IMG SHORTCODE: /images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_129x239_resize_q75_box.jpg/129
|
|
||||||
`
|
|
||||||
|
|
||||||
assertImages := func() {
|
|
||||||
b.Helper()
|
|
||||||
b.AssertFileContent("public/index.html", imgExpect)
|
|
||||||
b.AssertImage(350, 219, "public/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_350x0_resize_q75_box.a86fe88d894e5db613f6aa8a80538fefc25b20fa24ba0d782c057adcef616f56.jpg")
|
|
||||||
b.AssertImage(129, 239, "public/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_129x239_resize_q75_box.jpg")
|
|
||||||
}
|
|
||||||
|
|
||||||
err = b.BuildE(BuildCfg{})
|
|
||||||
if runtime.GOOS != "windows" && !strings.Contains(runtime.GOARCH, "arm") && !htesting.IsGitHubAction() {
|
|
||||||
// TODO(bep)
|
|
||||||
c.Assert(err, qt.Not(qt.IsNil))
|
|
||||||
}
|
|
||||||
|
|
||||||
b = newBuilder(29000)
|
|
||||||
b.Build(BuildCfg{})
|
|
||||||
|
|
||||||
assertImages()
|
|
||||||
|
|
||||||
// Truncate one image.
|
|
||||||
imgInCache := filepath.Join(workDir, "resources/_gen/images/sunset_hu59e56ffff1bc1d8d122b1403d34e039f_90587_ed7740a90b82802261c2fbdb98bc8082.jpg")
|
|
||||||
f, err := os.Create(imgInCache)
|
|
||||||
c.Assert(err, qt.IsNil)
|
|
||||||
f.Close()
|
|
||||||
|
|
||||||
// Build it again to make sure we read images from file cache.
|
|
||||||
b = newBuilder("30s")
|
|
||||||
b.Build(BuildCfg{})
|
|
||||||
|
|
||||||
assertImages()
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestImageResizeMultilingual(t *testing.T) {
|
func TestImageResizeMultilingual(t *testing.T) {
|
||||||
b := newTestSitesBuilder(t).WithConfigFile("toml", `
|
b := newTestSitesBuilder(t).WithConfigFile("toml", `
|
||||||
baseURL="https://example.org"
|
baseURL="https://example.org"
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
@ -245,7 +246,7 @@ Content.
|
||||||
c.Assert(svP2.Language().Lang, qt.Equals, "sv")
|
c.Assert(svP2.Language().Lang, qt.Equals, "sv")
|
||||||
c.Assert(nnP2.Language().Lang, qt.Equals, "nn")
|
c.Assert(nnP2.Language().Lang, qt.Equals, "nn")
|
||||||
|
|
||||||
content, _ := nnP2.Content()
|
content, _ := nnP2.Content(context.Background())
|
||||||
contentStr := cast.ToString(content)
|
contentStr := cast.ToString(content)
|
||||||
c.Assert(contentStr, qt.Contains, "SVP3-REF: https://example.org/sv/sect/p-sv-3/")
|
c.Assert(contentStr, qt.Contains, "SVP3-REF: https://example.org/sv/sect/p-sv-3/")
|
||||||
c.Assert(contentStr, qt.Contains, "SVP3-RELREF: /sv/sect/p-sv-3/")
|
c.Assert(contentStr, qt.Contains, "SVP3-RELREF: /sv/sect/p-sv-3/")
|
||||||
|
|
|
@ -15,6 +15,7 @@ package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
@ -24,8 +25,10 @@ import (
|
||||||
"go.uber.org/atomic"
|
"go.uber.org/atomic"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/identity"
|
"github.com/gohugoio/hugo/identity"
|
||||||
|
"github.com/gohugoio/hugo/related"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/markup/converter"
|
"github.com/gohugoio/hugo/markup/converter"
|
||||||
|
"github.com/gohugoio/hugo/markup/tableofcontents"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/tpl"
|
"github.com/gohugoio/hugo/tpl"
|
||||||
|
|
||||||
|
@ -148,6 +151,43 @@ func (p *pageState) GetIdentity() identity.Identity {
|
||||||
return identity.NewPathIdentity(files.ComponentFolderContent, filepath.FromSlash(p.Pathc()))
|
return identity.NewPathIdentity(files.ComponentFolderContent, filepath.FromSlash(p.Pathc()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *pageState) Fragments(ctx context.Context) *tableofcontents.Fragments {
|
||||||
|
p.s.initInit(ctx, p.cp.initToC, p)
|
||||||
|
if p.pageOutput.cp.tableOfContents == nil {
|
||||||
|
return tableofcontents.Empty
|
||||||
|
}
|
||||||
|
return p.pageOutput.cp.tableOfContents
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *pageState) HeadingsFiltered(context.Context) tableofcontents.Headings {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type pageHeadingsFiltered struct {
|
||||||
|
*pageState
|
||||||
|
headings tableofcontents.Headings
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *pageHeadingsFiltered) HeadingsFiltered(context.Context) tableofcontents.Headings {
|
||||||
|
return p.headings
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *pageHeadingsFiltered) page() page.Page {
|
||||||
|
return p.pageState
|
||||||
|
}
|
||||||
|
|
||||||
|
// For internal use by the related content feature.
|
||||||
|
func (p *pageState) ApplyFilterToHeadings(ctx context.Context, fn func(*tableofcontents.Heading) bool) related.Document {
|
||||||
|
if p.pageOutput.cp.tableOfContents == nil {
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
headings := p.pageOutput.cp.tableOfContents.Headings.FilterBy(fn)
|
||||||
|
return &pageHeadingsFiltered{
|
||||||
|
pageState: p,
|
||||||
|
headings: headings,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (p *pageState) GitInfo() source.GitInfo {
|
func (p *pageState) GitInfo() source.GitInfo {
|
||||||
return p.gitInfo
|
return p.gitInfo
|
||||||
}
|
}
|
||||||
|
@ -351,7 +391,7 @@ func (p *pageState) String() string {
|
||||||
// IsTranslated returns whether this content file is translated to
|
// IsTranslated returns whether this content file is translated to
|
||||||
// other language(s).
|
// other language(s).
|
||||||
func (p *pageState) IsTranslated() bool {
|
func (p *pageState) IsTranslated() bool {
|
||||||
p.s.h.init.translations.Do()
|
p.s.h.init.translations.Do(context.Background())
|
||||||
return len(p.translations) > 0
|
return len(p.translations) > 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -375,13 +415,13 @@ func (p *pageState) TranslationKey() string {
|
||||||
|
|
||||||
// AllTranslations returns all translations, including the current Page.
|
// AllTranslations returns all translations, including the current Page.
|
||||||
func (p *pageState) AllTranslations() page.Pages {
|
func (p *pageState) AllTranslations() page.Pages {
|
||||||
p.s.h.init.translations.Do()
|
p.s.h.init.translations.Do(context.Background())
|
||||||
return p.allTranslations
|
return p.allTranslations
|
||||||
}
|
}
|
||||||
|
|
||||||
// Translations returns the translations excluding the current Page.
|
// Translations returns the translations excluding the current Page.
|
||||||
func (p *pageState) Translations() page.Pages {
|
func (p *pageState) Translations() page.Pages {
|
||||||
p.s.h.init.translations.Do()
|
p.s.h.init.translations.Do(context.Background())
|
||||||
return p.translations
|
return p.translations
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -461,7 +501,7 @@ func (p *pageState) initOutputFormat(isRenderingSite bool, idx int) error {
|
||||||
|
|
||||||
// Must be run after the site section tree etc. is built and ready.
|
// Must be run after the site section tree etc. is built and ready.
|
||||||
func (p *pageState) initPage() error {
|
func (p *pageState) initPage() error {
|
||||||
if _, err := p.init.Do(); err != nil {
|
if _, err := p.init.Do(context.Background()); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/output"
|
"github.com/gohugoio/hugo/output"
|
||||||
|
@ -37,9 +38,9 @@ type pageContent struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
// returns the content to be processed by Goldmark or similar.
|
// returns the content to be processed by Goldmark or similar.
|
||||||
func (p pageContent) contentToRender(parsed pageparser.Result, pm *pageContentMap, renderedShortcodes map[string]string) []byte {
|
func (p pageContent) contentToRender(ctx context.Context, parsed pageparser.Result, pm *pageContentMap, renderedShortcodes map[string]shortcodeRenderer) ([]byte, bool, error) {
|
||||||
source := parsed.Input()
|
source := parsed.Input()
|
||||||
|
var hasVariants bool
|
||||||
c := make([]byte, 0, len(source)+(len(source)/10))
|
c := make([]byte, 0, len(source)+(len(source)/10))
|
||||||
|
|
||||||
for _, it := range pm.items {
|
for _, it := range pm.items {
|
||||||
|
@ -57,7 +58,12 @@ func (p pageContent) contentToRender(parsed pageparser.Result, pm *pageContentMa
|
||||||
panic(fmt.Sprintf("rendered shortcode %q not found", v.placeholder))
|
panic(fmt.Sprintf("rendered shortcode %q not found", v.placeholder))
|
||||||
}
|
}
|
||||||
|
|
||||||
c = append(c, []byte(renderedShortcode)...)
|
b, more, err := renderedShortcode.renderShortcode(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, false, fmt.Errorf("failed to render shortcode: %w", err)
|
||||||
|
}
|
||||||
|
hasVariants = hasVariants || more
|
||||||
|
c = append(c, []byte(b)...)
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
// Insert the placeholder so we can insert the content after
|
// Insert the placeholder so we can insert the content after
|
||||||
|
@ -69,7 +75,7 @@ func (p pageContent) contentToRender(parsed pageparser.Result, pm *pageContentMa
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return c
|
return c, hasVariants, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p pageContent) selfLayoutForOutput(f output.Format) string {
|
func (p pageContent) selfLayoutForOutput(f output.Format) string {
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/navigation"
|
"github.com/gohugoio/hugo/navigation"
|
||||||
|
@ -29,13 +30,13 @@ type pageMenus struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageMenus) HasMenuCurrent(menuID string, me *navigation.MenuEntry) bool {
|
func (p *pageMenus) HasMenuCurrent(menuID string, me *navigation.MenuEntry) bool {
|
||||||
p.p.s.init.menus.Do()
|
p.p.s.init.menus.Do(context.Background())
|
||||||
p.init()
|
p.init()
|
||||||
return p.q.HasMenuCurrent(menuID, me)
|
return p.q.HasMenuCurrent(menuID, me)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageMenus) IsMenuCurrent(menuID string, inme *navigation.MenuEntry) bool {
|
func (p *pageMenus) IsMenuCurrent(menuID string, inme *navigation.MenuEntry) bool {
|
||||||
p.p.s.init.menus.Do()
|
p.p.s.init.menus.Do(context.Background())
|
||||||
p.init()
|
p.init()
|
||||||
return p.q.IsMenuCurrent(menuID, inme)
|
return p.q.IsMenuCurrent(menuID, inme)
|
||||||
}
|
}
|
||||||
|
@ -43,7 +44,7 @@ func (p *pageMenus) IsMenuCurrent(menuID string, inme *navigation.MenuEntry) boo
|
||||||
func (p *pageMenus) Menus() navigation.PageMenus {
|
func (p *pageMenus) Menus() navigation.PageMenus {
|
||||||
// There is a reverse dependency here. initMenus will, once, build the
|
// There is a reverse dependency here. initMenus will, once, build the
|
||||||
// site menus and update any relevant page.
|
// site menus and update any relevant page.
|
||||||
p.p.s.init.menus.Do()
|
p.p.s.init.menus.Do(context.Background())
|
||||||
|
|
||||||
return p.menus()
|
return p.menus()
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"html/template"
|
"html/template"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
@ -121,7 +122,7 @@ func newPageFromMeta(
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
ps.init.Add(func() (any, error) {
|
ps.init.Add(func(context.Context) (any, error) {
|
||||||
pp, err := newPagePaths(metaProvider.s, ps, metaProvider)
|
pp, err := newPagePaths(metaProvider.s, ps, metaProvider)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
|
@ -18,7 +18,6 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"html/template"
|
"html/template"
|
||||||
"runtime/debug"
|
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
@ -34,6 +33,7 @@ import (
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/markup/converter/hooks"
|
"github.com/gohugoio/hugo/markup/converter/hooks"
|
||||||
"github.com/gohugoio/hugo/markup/highlight/chromalexers"
|
"github.com/gohugoio/hugo/markup/highlight/chromalexers"
|
||||||
|
"github.com/gohugoio/hugo/markup/tableofcontents"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/markup/converter"
|
"github.com/gohugoio/hugo/markup/converter"
|
||||||
|
|
||||||
|
@ -87,43 +87,35 @@ func newPageContentOutput(p *pageState, po *pageOutput) (*pageContentOutput, err
|
||||||
renderHooks: &renderHooks{},
|
renderHooks: &renderHooks{},
|
||||||
}
|
}
|
||||||
|
|
||||||
initContent := func() (err error) {
|
initToC := func(ctx context.Context) (err error) {
|
||||||
p.s.h.IncrContentRender()
|
|
||||||
|
|
||||||
if p.cmap == nil {
|
if p.cmap == nil {
|
||||||
// Nothing to do.
|
// Nothing to do.
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
defer func() {
|
|
||||||
// See https://github.com/gohugoio/hugo/issues/6210
|
|
||||||
if r := recover(); r != nil {
|
|
||||||
err = fmt.Errorf("%s", r)
|
|
||||||
p.s.Log.Errorf("[BUG] Got panic:\n%s\n%s", r, string(debug.Stack()))
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
if err := po.cp.initRenderHooks(); err != nil {
|
if err := po.cp.initRenderHooks(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
var hasShortcodeVariants bool
|
|
||||||
|
|
||||||
f := po.f
|
f := po.f
|
||||||
cp.contentPlaceholders, hasShortcodeVariants, err = p.shortcodeState.renderShortcodesForPage(p, f)
|
cp.contentPlaceholders, err = p.shortcodeState.prepareShortcodesForPage(ctx, p, f)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if hasShortcodeVariants {
|
var hasVariants bool
|
||||||
|
cp.workContent, hasVariants, err = p.contentToRender(ctx, p.source.parsed, p.cmap, cp.contentPlaceholders)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if hasVariants {
|
||||||
p.pageOutputTemplateVariationsState.Store(2)
|
p.pageOutputTemplateVariationsState.Store(2)
|
||||||
}
|
}
|
||||||
|
|
||||||
cp.workContent = p.contentToRender(p.source.parsed, p.cmap, cp.contentPlaceholders)
|
|
||||||
|
|
||||||
isHTML := cp.p.m.markup == "html"
|
isHTML := cp.p.m.markup == "html"
|
||||||
|
|
||||||
if !isHTML {
|
if !isHTML {
|
||||||
r, err := po.contentRenderer.RenderContent(cp.workContent, true)
|
r, err := po.contentRenderer.RenderContent(ctx, cp.workContent, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -132,8 +124,9 @@ func newPageContentOutput(p *pageState, po *pageOutput) (*pageContentOutput, err
|
||||||
|
|
||||||
if tocProvider, ok := r.(converter.TableOfContentsProvider); ok {
|
if tocProvider, ok := r.(converter.TableOfContentsProvider); ok {
|
||||||
cfg := p.s.ContentSpec.Converters.GetMarkupConfig()
|
cfg := p.s.ContentSpec.Converters.GetMarkupConfig()
|
||||||
cp.tableOfContents = template.HTML(
|
cp.tableOfContents = tocProvider.TableOfContents()
|
||||||
tocProvider.TableOfContents().ToHTML(
|
cp.tableOfContentsHTML = template.HTML(
|
||||||
|
cp.tableOfContents.ToHTML(
|
||||||
cfg.TableOfContents.StartLevel,
|
cfg.TableOfContents.StartLevel,
|
||||||
cfg.TableOfContents.EndLevel,
|
cfg.TableOfContents.EndLevel,
|
||||||
cfg.TableOfContents.Ordered,
|
cfg.TableOfContents.Ordered,
|
||||||
|
@ -141,26 +134,60 @@ func newPageContentOutput(p *pageState, po *pageOutput) (*pageContentOutput, err
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
tmpContent, tmpTableOfContents := helpers.ExtractTOC(cp.workContent)
|
tmpContent, tmpTableOfContents := helpers.ExtractTOC(cp.workContent)
|
||||||
cp.tableOfContents = helpers.BytesToHTML(tmpTableOfContents)
|
cp.tableOfContentsHTML = helpers.BytesToHTML(tmpTableOfContents)
|
||||||
|
cp.tableOfContents = tableofcontents.Empty
|
||||||
cp.workContent = tmpContent
|
cp.workContent = tmpContent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if cp.placeholdersEnabled {
|
return nil
|
||||||
// ToC was accessed via .Page.TableOfContents in the shortcode,
|
|
||||||
// at a time when the ToC wasn't ready.
|
}
|
||||||
cp.contentPlaceholders[tocShortcodePlaceholder] = string(cp.tableOfContents)
|
|
||||||
|
initContent := func(ctx context.Context) (err error) {
|
||||||
|
|
||||||
|
p.s.h.IncrContentRender()
|
||||||
|
|
||||||
|
if p.cmap == nil {
|
||||||
|
// Nothing to do.
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.cmap.hasNonMarkdownShortcode || cp.placeholdersEnabled {
|
if p.cmap.hasNonMarkdownShortcode || cp.placeholdersEnabled {
|
||||||
// There are one or more replacement tokens to be replaced.
|
// There are one or more replacement tokens to be replaced.
|
||||||
cp.workContent, err = replaceShortcodeTokens(cp.workContent, cp.contentPlaceholders)
|
var hasShortcodeVariants bool
|
||||||
|
tokenHandler := func(ctx context.Context, token string) ([]byte, error) {
|
||||||
|
if token == tocShortcodePlaceholder {
|
||||||
|
// The Page's TableOfContents was accessed in a shortcode.
|
||||||
|
if cp.tableOfContentsHTML == "" {
|
||||||
|
cp.p.s.initInit(ctx, cp.initToC, cp.p)
|
||||||
|
}
|
||||||
|
return []byte(cp.tableOfContentsHTML), nil
|
||||||
|
}
|
||||||
|
renderer, found := cp.contentPlaceholders[token]
|
||||||
|
if found {
|
||||||
|
repl, more, err := renderer.renderShortcode(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
hasShortcodeVariants = hasShortcodeVariants || more
|
||||||
|
return repl, nil
|
||||||
|
}
|
||||||
|
// This should never happen.
|
||||||
|
return nil, fmt.Errorf("unknown shortcode token %q", token)
|
||||||
|
}
|
||||||
|
|
||||||
|
cp.workContent, err = expandShortcodeTokens(ctx, cp.workContent, tokenHandler)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
if hasShortcodeVariants {
|
||||||
|
p.pageOutputTemplateVariationsState.Store(2)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if cp.p.source.hasSummaryDivider {
|
if cp.p.source.hasSummaryDivider {
|
||||||
|
isHTML := cp.p.m.markup == "html"
|
||||||
if isHTML {
|
if isHTML {
|
||||||
src := p.source.parsed.Input()
|
src := p.source.parsed.Input()
|
||||||
|
|
||||||
|
@ -183,7 +210,7 @@ func newPageContentOutput(p *pageState, po *pageOutput) (*pageContentOutput, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if cp.p.m.summary != "" {
|
} else if cp.p.m.summary != "" {
|
||||||
b, err := po.contentRenderer.RenderContent([]byte(cp.p.m.summary), false)
|
b, err := po.contentRenderer.RenderContent(ctx, []byte(cp.p.m.summary), false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -196,12 +223,16 @@ func newPageContentOutput(p *pageState, po *pageOutput) (*pageContentOutput, err
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// There may be recursive loops in shortcodes and render hooks.
|
cp.initToC = parent.Branch(func(ctx context.Context) (any, error) {
|
||||||
cp.initMain = parent.BranchWithTimeout(p.s.siteCfg.timeout, func(ctx context.Context) (any, error) {
|
return nil, initToC(ctx)
|
||||||
return nil, initContent()
|
|
||||||
})
|
})
|
||||||
|
|
||||||
cp.initPlain = cp.initMain.Branch(func() (any, error) {
|
// There may be recursive loops in shortcodes and render hooks.
|
||||||
|
cp.initMain = cp.initToC.BranchWithTimeout(p.s.siteCfg.timeout, func(ctx context.Context) (any, error) {
|
||||||
|
return nil, initContent(ctx)
|
||||||
|
})
|
||||||
|
|
||||||
|
cp.initPlain = cp.initMain.Branch(func(context.Context) (any, error) {
|
||||||
cp.plain = tpl.StripHTML(string(cp.content))
|
cp.plain = tpl.StripHTML(string(cp.content))
|
||||||
cp.plainWords = strings.Fields(cp.plain)
|
cp.plainWords = strings.Fields(cp.plain)
|
||||||
cp.setWordCounts(p.m.isCJKLanguage)
|
cp.setWordCounts(p.m.isCJKLanguage)
|
||||||
|
@ -228,6 +259,7 @@ type pageContentOutput struct {
|
||||||
p *pageState
|
p *pageState
|
||||||
|
|
||||||
// Lazy load dependencies
|
// Lazy load dependencies
|
||||||
|
initToC *lazy.Init
|
||||||
initMain *lazy.Init
|
initMain *lazy.Init
|
||||||
initPlain *lazy.Init
|
initPlain *lazy.Init
|
||||||
|
|
||||||
|
@ -243,12 +275,13 @@ type pageContentOutput struct {
|
||||||
// Temporary storage of placeholders mapped to their content.
|
// Temporary storage of placeholders mapped to their content.
|
||||||
// These are shortcodes etc. Some of these will need to be replaced
|
// These are shortcodes etc. Some of these will need to be replaced
|
||||||
// after any markup is rendered, so they share a common prefix.
|
// after any markup is rendered, so they share a common prefix.
|
||||||
contentPlaceholders map[string]string
|
contentPlaceholders map[string]shortcodeRenderer
|
||||||
|
|
||||||
// Content sections
|
// Content sections
|
||||||
content template.HTML
|
content template.HTML
|
||||||
summary template.HTML
|
summary template.HTML
|
||||||
tableOfContents template.HTML
|
tableOfContents *tableofcontents.Fragments
|
||||||
|
tableOfContentsHTML template.HTML
|
||||||
|
|
||||||
truncated bool
|
truncated bool
|
||||||
|
|
||||||
|
@ -263,76 +296,76 @@ func (p *pageContentOutput) trackDependency(id identity.Provider) {
|
||||||
if p.dependencyTracker != nil {
|
if p.dependencyTracker != nil {
|
||||||
p.dependencyTracker.Add(id)
|
p.dependencyTracker.Add(id)
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageContentOutput) Reset() {
|
func (p *pageContentOutput) Reset() {
|
||||||
if p.dependencyTracker != nil {
|
if p.dependencyTracker != nil {
|
||||||
p.dependencyTracker.Reset()
|
p.dependencyTracker.Reset()
|
||||||
}
|
}
|
||||||
|
p.initToC.Reset()
|
||||||
p.initMain.Reset()
|
p.initMain.Reset()
|
||||||
p.initPlain.Reset()
|
p.initPlain.Reset()
|
||||||
p.renderHooks = &renderHooks{}
|
p.renderHooks = &renderHooks{}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageContentOutput) Content() (any, error) {
|
func (p *pageContentOutput) Content(ctx context.Context) (any, error) {
|
||||||
if p.p.s.initInit(p.initMain, p.p) {
|
p.p.s.initInit(ctx, p.initMain, p.p)
|
||||||
return p.content, nil
|
return p.content, nil
|
||||||
}
|
|
||||||
return nil, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageContentOutput) FuzzyWordCount() int {
|
func (p *pageContentOutput) FuzzyWordCount(ctx context.Context) int {
|
||||||
p.p.s.initInit(p.initPlain, p.p)
|
p.p.s.initInit(ctx, p.initPlain, p.p)
|
||||||
return p.fuzzyWordCount
|
return p.fuzzyWordCount
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageContentOutput) Len() int {
|
func (p *pageContentOutput) Len(ctx context.Context) int {
|
||||||
p.p.s.initInit(p.initMain, p.p)
|
p.p.s.initInit(ctx, p.initMain, p.p)
|
||||||
return len(p.content)
|
return len(p.content)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageContentOutput) Plain() string {
|
func (p *pageContentOutput) Plain(ctx context.Context) string {
|
||||||
p.p.s.initInit(p.initPlain, p.p)
|
p.p.s.initInit(ctx, p.initPlain, p.p)
|
||||||
return p.plain
|
return p.plain
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageContentOutput) PlainWords() []string {
|
func (p *pageContentOutput) PlainWords(ctx context.Context) []string {
|
||||||
p.p.s.initInit(p.initPlain, p.p)
|
p.p.s.initInit(ctx, p.initPlain, p.p)
|
||||||
return p.plainWords
|
return p.plainWords
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageContentOutput) ReadingTime() int {
|
func (p *pageContentOutput) ReadingTime(ctx context.Context) int {
|
||||||
p.p.s.initInit(p.initPlain, p.p)
|
p.p.s.initInit(ctx, p.initPlain, p.p)
|
||||||
return p.readingTime
|
return p.readingTime
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageContentOutput) Summary() template.HTML {
|
func (p *pageContentOutput) Summary(ctx context.Context) template.HTML {
|
||||||
p.p.s.initInit(p.initMain, p.p)
|
p.p.s.initInit(ctx, p.initMain, p.p)
|
||||||
if !p.p.source.hasSummaryDivider {
|
if !p.p.source.hasSummaryDivider {
|
||||||
p.p.s.initInit(p.initPlain, p.p)
|
p.p.s.initInit(ctx, p.initPlain, p.p)
|
||||||
}
|
}
|
||||||
return p.summary
|
return p.summary
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageContentOutput) TableOfContents() template.HTML {
|
func (p *pageContentOutput) TableOfContents(ctx context.Context) template.HTML {
|
||||||
p.p.s.initInit(p.initMain, p.p)
|
p.p.s.initInit(ctx, p.initMain, p.p)
|
||||||
return p.tableOfContents
|
return p.tableOfContentsHTML
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageContentOutput) Truncated() bool {
|
func (p *pageContentOutput) Truncated(ctx context.Context) bool {
|
||||||
if p.p.truncated {
|
if p.p.truncated {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
p.p.s.initInit(p.initPlain, p.p)
|
p.p.s.initInit(ctx, p.initPlain, p.p)
|
||||||
return p.truncated
|
return p.truncated
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageContentOutput) WordCount() int {
|
func (p *pageContentOutput) WordCount(ctx context.Context) int {
|
||||||
p.p.s.initInit(p.initPlain, p.p)
|
p.p.s.initInit(ctx, p.initPlain, p.p)
|
||||||
return p.wordCount
|
return p.wordCount
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageContentOutput) RenderString(args ...any) (template.HTML, error) {
|
func (p *pageContentOutput) RenderString(ctx context.Context, args ...any) (template.HTML, error) {
|
||||||
if len(args) < 1 || len(args) > 2 {
|
if len(args) < 1 || len(args) > 2 {
|
||||||
return "", errors.New("want 1 or 2 arguments")
|
return "", errors.New("want 1 or 2 arguments")
|
||||||
}
|
}
|
||||||
|
@ -405,42 +438,62 @@ func (p *pageContentOutput) RenderString(args ...any) (template.HTML, error) {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
placeholders, hasShortcodeVariants, err := s.renderShortcodesForPage(p.p, p.f)
|
placeholders, err := s.prepareShortcodesForPage(ctx, p.p, p.f)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
if hasShortcodeVariants {
|
contentToRender, hasVariants, err := p.p.contentToRender(ctx, parsed, pm, placeholders)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if hasVariants {
|
||||||
p.p.pageOutputTemplateVariationsState.Store(2)
|
p.p.pageOutputTemplateVariationsState.Store(2)
|
||||||
}
|
}
|
||||||
|
b, err := p.renderContentWithConverter(ctx, conv, contentToRender, false)
|
||||||
b, err := p.renderContentWithConverter(conv, p.p.contentToRender(parsed, pm, placeholders), false)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", p.p.wrapError(err)
|
return "", p.p.wrapError(err)
|
||||||
}
|
}
|
||||||
rendered = b.Bytes()
|
rendered = b.Bytes()
|
||||||
|
|
||||||
if p.placeholdersEnabled {
|
|
||||||
// ToC was accessed via .Page.TableOfContents in the shortcode,
|
|
||||||
// at a time when the ToC wasn't ready.
|
|
||||||
if _, err := p.p.Content(); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
placeholders[tocShortcodePlaceholder] = string(p.tableOfContents)
|
|
||||||
}
|
|
||||||
|
|
||||||
if pm.hasNonMarkdownShortcode || p.placeholdersEnabled {
|
if pm.hasNonMarkdownShortcode || p.placeholdersEnabled {
|
||||||
rendered, err = replaceShortcodeTokens(rendered, placeholders)
|
var hasShortcodeVariants bool
|
||||||
|
|
||||||
|
tokenHandler := func(ctx context.Context, token string) ([]byte, error) {
|
||||||
|
if token == tocShortcodePlaceholder {
|
||||||
|
// The Page's TableOfContents was accessed in a shortcode.
|
||||||
|
if p.tableOfContentsHTML == "" {
|
||||||
|
p.p.s.initInit(ctx, p.initToC, p.p)
|
||||||
|
}
|
||||||
|
return []byte(p.tableOfContentsHTML), nil
|
||||||
|
}
|
||||||
|
renderer, found := placeholders[token]
|
||||||
|
if found {
|
||||||
|
repl, more, err := renderer.renderShortcode(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
hasShortcodeVariants = hasShortcodeVariants || more
|
||||||
|
return repl, nil
|
||||||
|
}
|
||||||
|
// This should not happen.
|
||||||
|
return nil, fmt.Errorf("unknown shortcode token %q", token)
|
||||||
|
}
|
||||||
|
|
||||||
|
rendered, err = expandShortcodeTokens(ctx, rendered, tokenHandler)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
if hasShortcodeVariants {
|
||||||
|
p.p.pageOutputTemplateVariationsState.Store(2)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// We need a consolidated view in $page.HasShortcode
|
// We need a consolidated view in $page.HasShortcode
|
||||||
p.p.shortcodeState.transferNames(s)
|
p.p.shortcodeState.transferNames(s)
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
c, err := p.renderContentWithConverter(conv, []byte(contentToRender), false)
|
c, err := p.renderContentWithConverter(ctx, conv, []byte(contentToRender), false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", p.p.wrapError(err)
|
return "", p.p.wrapError(err)
|
||||||
}
|
}
|
||||||
|
@ -457,12 +510,12 @@ func (p *pageContentOutput) RenderString(args ...any) (template.HTML, error) {
|
||||||
return template.HTML(string(rendered)), nil
|
return template.HTML(string(rendered)), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageContentOutput) RenderWithTemplateInfo(info tpl.Info, layout ...string) (template.HTML, error) {
|
func (p *pageContentOutput) RenderWithTemplateInfo(ctx context.Context, info tpl.Info, layout ...string) (template.HTML, error) {
|
||||||
p.p.addDependency(info)
|
p.p.addDependency(info)
|
||||||
return p.Render(layout...)
|
return p.Render(ctx, layout...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageContentOutput) Render(layout ...string) (template.HTML, error) {
|
func (p *pageContentOutput) Render(ctx context.Context, layout ...string) (template.HTML, error) {
|
||||||
templ, found, err := p.p.resolveTemplate(layout...)
|
templ, found, err := p.p.resolveTemplate(layout...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", p.p.wrapError(err)
|
return "", p.p.wrapError(err)
|
||||||
|
@ -475,7 +528,7 @@ func (p *pageContentOutput) Render(layout ...string) (template.HTML, error) {
|
||||||
p.p.addDependency(templ.(tpl.Info))
|
p.p.addDependency(templ.(tpl.Info))
|
||||||
|
|
||||||
// Make sure to send the *pageState and not the *pageContentOutput to the template.
|
// Make sure to send the *pageState and not the *pageContentOutput to the template.
|
||||||
res, err := executeToString(p.p.s.Tmpl(), templ, p.p)
|
res, err := executeToString(ctx, p.p.s.Tmpl(), templ, p.p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", p.p.wrapError(fmt.Errorf("failed to execute template %s: %w", templ.Name(), err))
|
return "", p.p.wrapError(fmt.Errorf("failed to execute template %s: %w", templ.Name(), err))
|
||||||
}
|
}
|
||||||
|
@ -629,15 +682,15 @@ func (p *pageContentOutput) setAutoSummary() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cp *pageContentOutput) RenderContent(content []byte, renderTOC bool) (converter.Result, error) {
|
func (cp *pageContentOutput) RenderContent(ctx context.Context, content []byte, renderTOC bool) (converter.Result, error) {
|
||||||
if err := cp.initRenderHooks(); err != nil {
|
if err := cp.initRenderHooks(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
c := cp.p.getContentConverter()
|
c := cp.p.getContentConverter()
|
||||||
return cp.renderContentWithConverter(c, content, renderTOC)
|
return cp.renderContentWithConverter(ctx, c, content, renderTOC)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cp *pageContentOutput) renderContentWithConverter(c converter.Converter, content []byte, renderTOC bool) (converter.Result, error) {
|
func (cp *pageContentOutput) renderContentWithConverter(ctx context.Context, c converter.Converter, content []byte, renderTOC bool) (converter.Result, error) {
|
||||||
r, err := c.Convert(
|
r, err := c.Convert(
|
||||||
converter.RenderContext{
|
converter.RenderContext{
|
||||||
Src: content,
|
Src: content,
|
||||||
|
@ -711,10 +764,10 @@ func (t targetPathsHolder) targetPaths() page.TargetPaths {
|
||||||
return t.paths
|
return t.paths
|
||||||
}
|
}
|
||||||
|
|
||||||
func executeToString(h tpl.TemplateHandler, templ tpl.Template, data any) (string, error) {
|
func executeToString(ctx context.Context, h tpl.TemplateHandler, templ tpl.Template, data any) (string, error) {
|
||||||
b := bp.GetBuffer()
|
b := bp.GetBuffer()
|
||||||
defer bp.PutBuffer(b)
|
defer bp.PutBuffer(b)
|
||||||
if err := h.Execute(templ, b, data); err != nil {
|
if err := h.ExecuteWithContext(ctx, templ, b, data); err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return b.String(), nil
|
return b.String(), nil
|
||||||
|
|
|
@ -14,6 +14,8 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/lazy"
|
"github.com/gohugoio/hugo/lazy"
|
||||||
"github.com/gohugoio/hugo/resources/page"
|
"github.com/gohugoio/hugo/resources/page"
|
||||||
)
|
)
|
||||||
|
@ -33,12 +35,12 @@ type nextPrev struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *nextPrev) next() page.Page {
|
func (n *nextPrev) next() page.Page {
|
||||||
n.init.Do()
|
n.init.Do(context.Background())
|
||||||
return n.nextPage
|
return n.nextPage
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *nextPrev) prev() page.Page {
|
func (n *nextPrev) prev() page.Page {
|
||||||
n.init.Do()
|
n.init.Do(context.Background())
|
||||||
return n.prevPage
|
return n.prevPage
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"html/template"
|
"html/template"
|
||||||
"os"
|
"os"
|
||||||
|
@ -311,13 +312,13 @@ func normalizeContent(c string) string {
|
||||||
|
|
||||||
func checkPageTOC(t *testing.T, page page.Page, toc string) {
|
func checkPageTOC(t *testing.T, page page.Page, toc string) {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
if page.TableOfContents() != template.HTML(toc) {
|
if page.TableOfContents(context.Background()) != template.HTML(toc) {
|
||||||
t.Fatalf("Page TableOfContents is:\n%q.\nExpected %q", page.TableOfContents(), toc)
|
t.Fatalf("Page TableOfContents is:\n%q.\nExpected %q", page.TableOfContents(context.Background()), toc)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func checkPageSummary(t *testing.T, page page.Page, summary string, msg ...any) {
|
func checkPageSummary(t *testing.T, page page.Page, summary string, msg ...any) {
|
||||||
a := normalizeContent(string(page.Summary()))
|
a := normalizeContent(string(page.Summary(context.Background())))
|
||||||
b := normalizeContent(summary)
|
b := normalizeContent(summary)
|
||||||
if a != b {
|
if a != b {
|
||||||
t.Fatalf("Page summary is:\n%q.\nExpected\n%q (%q)", a, b, msg)
|
t.Fatalf("Page summary is:\n%q.\nExpected\n%q (%q)", a, b, msg)
|
||||||
|
@ -443,9 +444,9 @@ func TestPageWithDelimiterForMarkdownThatCrossesBorder(t *testing.T) {
|
||||||
|
|
||||||
p := s.RegularPages()[0]
|
p := s.RegularPages()[0]
|
||||||
|
|
||||||
if p.Summary() != template.HTML(
|
if p.Summary(context.Background()) != template.HTML(
|
||||||
"<p>The <a href=\"http://gohugo.io/\">best static site generator</a>.<sup id=\"fnref:1\"><a href=\"#fn:1\" class=\"footnote-ref\" role=\"doc-noteref\">1</a></sup></p>") {
|
"<p>The <a href=\"http://gohugo.io/\">best static site generator</a>.<sup id=\"fnref:1\"><a href=\"#fn:1\" class=\"footnote-ref\" role=\"doc-noteref\">1</a></sup></p>") {
|
||||||
t.Fatalf("Got summary:\n%q", p.Summary())
|
t.Fatalf("Got summary:\n%q", p.Summary(context.Background()))
|
||||||
}
|
}
|
||||||
|
|
||||||
cnt := content(p)
|
cnt := content(p)
|
||||||
|
@ -719,7 +720,7 @@ func TestSummaryWithHTMLTagsOnNextLine(t *testing.T) {
|
||||||
assertFunc := func(t *testing.T, ext string, pages page.Pages) {
|
assertFunc := func(t *testing.T, ext string, pages page.Pages) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
p := pages[0]
|
p := pages[0]
|
||||||
s := string(p.Summary())
|
s := string(p.Summary(context.Background()))
|
||||||
c.Assert(s, qt.Contains, "Happy new year everyone!")
|
c.Assert(s, qt.Contains, "Happy new year everyone!")
|
||||||
c.Assert(s, qt.Not(qt.Contains), "User interface")
|
c.Assert(s, qt.Not(qt.Contains), "User interface")
|
||||||
}
|
}
|
||||||
|
@ -1122,8 +1123,8 @@ func TestWordCountWithAllCJKRunesWithoutHasCJKLanguage(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
assertFunc := func(t *testing.T, ext string, pages page.Pages) {
|
assertFunc := func(t *testing.T, ext string, pages page.Pages) {
|
||||||
p := pages[0]
|
p := pages[0]
|
||||||
if p.WordCount() != 8 {
|
if p.WordCount(context.Background()) != 8 {
|
||||||
t.Fatalf("[%s] incorrect word count. expected %v, got %v", ext, 8, p.WordCount())
|
t.Fatalf("[%s] incorrect word count. expected %v, got %v", ext, 8, p.WordCount(context.Background()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1136,8 +1137,8 @@ func TestWordCountWithAllCJKRunesHasCJKLanguage(t *testing.T) {
|
||||||
|
|
||||||
assertFunc := func(t *testing.T, ext string, pages page.Pages) {
|
assertFunc := func(t *testing.T, ext string, pages page.Pages) {
|
||||||
p := pages[0]
|
p := pages[0]
|
||||||
if p.WordCount() != 15 {
|
if p.WordCount(context.Background()) != 15 {
|
||||||
t.Fatalf("[%s] incorrect word count, expected %v, got %v", ext, 15, p.WordCount())
|
t.Fatalf("[%s] incorrect word count, expected %v, got %v", ext, 15, p.WordCount(context.Background()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
testAllMarkdownEnginesForPages(t, assertFunc, settings, simplePageWithAllCJKRunes)
|
testAllMarkdownEnginesForPages(t, assertFunc, settings, simplePageWithAllCJKRunes)
|
||||||
|
@ -1149,13 +1150,13 @@ func TestWordCountWithMainEnglishWithCJKRunes(t *testing.T) {
|
||||||
|
|
||||||
assertFunc := func(t *testing.T, ext string, pages page.Pages) {
|
assertFunc := func(t *testing.T, ext string, pages page.Pages) {
|
||||||
p := pages[0]
|
p := pages[0]
|
||||||
if p.WordCount() != 74 {
|
if p.WordCount(context.Background()) != 74 {
|
||||||
t.Fatalf("[%s] incorrect word count, expected %v, got %v", ext, 74, p.WordCount())
|
t.Fatalf("[%s] incorrect word count, expected %v, got %v", ext, 74, p.WordCount(context.Background()))
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.Summary() != simplePageWithMainEnglishWithCJKRunesSummary {
|
if p.Summary(context.Background()) != simplePageWithMainEnglishWithCJKRunesSummary {
|
||||||
t.Fatalf("[%s] incorrect Summary for content '%s'. expected %v, got %v", ext, p.Plain(),
|
t.Fatalf("[%s] incorrect Summary for content '%s'. expected %v, got %v", ext, p.Plain(context.Background()),
|
||||||
simplePageWithMainEnglishWithCJKRunesSummary, p.Summary())
|
simplePageWithMainEnglishWithCJKRunesSummary, p.Summary(context.Background()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1170,13 +1171,13 @@ func TestWordCountWithIsCJKLanguageFalse(t *testing.T) {
|
||||||
|
|
||||||
assertFunc := func(t *testing.T, ext string, pages page.Pages) {
|
assertFunc := func(t *testing.T, ext string, pages page.Pages) {
|
||||||
p := pages[0]
|
p := pages[0]
|
||||||
if p.WordCount() != 75 {
|
if p.WordCount(context.Background()) != 75 {
|
||||||
t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.Plain(), 74, p.WordCount())
|
t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.Plain(context.Background()), 74, p.WordCount(context.Background()))
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.Summary() != simplePageWithIsCJKLanguageFalseSummary {
|
if p.Summary(context.Background()) != simplePageWithIsCJKLanguageFalseSummary {
|
||||||
t.Fatalf("[%s] incorrect Summary for content '%s'. expected %v, got %v", ext, p.Plain(),
|
t.Fatalf("[%s] incorrect Summary for content '%s'. expected %v, got %v", ext, p.Plain(context.Background()),
|
||||||
simplePageWithIsCJKLanguageFalseSummary, p.Summary())
|
simplePageWithIsCJKLanguageFalseSummary, p.Summary(context.Background()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1187,16 +1188,16 @@ func TestWordCount(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
assertFunc := func(t *testing.T, ext string, pages page.Pages) {
|
assertFunc := func(t *testing.T, ext string, pages page.Pages) {
|
||||||
p := pages[0]
|
p := pages[0]
|
||||||
if p.WordCount() != 483 {
|
if p.WordCount(context.Background()) != 483 {
|
||||||
t.Fatalf("[%s] incorrect word count. expected %v, got %v", ext, 483, p.WordCount())
|
t.Fatalf("[%s] incorrect word count. expected %v, got %v", ext, 483, p.WordCount(context.Background()))
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.FuzzyWordCount() != 500 {
|
if p.FuzzyWordCount(context.Background()) != 500 {
|
||||||
t.Fatalf("[%s] incorrect word count. expected %v, got %v", ext, 500, p.FuzzyWordCount())
|
t.Fatalf("[%s] incorrect word count. expected %v, got %v", ext, 500, p.FuzzyWordCount(context.Background()))
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.ReadingTime() != 3 {
|
if p.ReadingTime(context.Background()) != 3 {
|
||||||
t.Fatalf("[%s] incorrect min read. expected %v, got %v", ext, 3, p.ReadingTime())
|
t.Fatalf("[%s] incorrect min read. expected %v, got %v", ext, 3, p.ReadingTime(context.Background()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,6 +15,7 @@ package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"html/template"
|
"html/template"
|
||||||
"path"
|
"path"
|
||||||
|
@ -302,13 +303,44 @@ const (
|
||||||
innerCleanupExpand = "$1"
|
innerCleanupExpand = "$1"
|
||||||
)
|
)
|
||||||
|
|
||||||
func renderShortcode(
|
func prepareShortcode(
|
||||||
|
ctx context.Context,
|
||||||
level int,
|
level int,
|
||||||
s *Site,
|
s *Site,
|
||||||
tplVariants tpl.TemplateVariants,
|
tplVariants tpl.TemplateVariants,
|
||||||
sc *shortcode,
|
sc *shortcode,
|
||||||
parent *ShortcodeWithPage,
|
parent *ShortcodeWithPage,
|
||||||
p *pageState) (string, bool, error) {
|
p *pageState) (shortcodeRenderer, error) {
|
||||||
|
|
||||||
|
toParseErr := func(err error) error {
|
||||||
|
return p.parseError(fmt.Errorf("failed to render shortcode %q: %w", sc.name, err), p.source.parsed.Input(), sc.pos)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allow the caller to delay the rendering of the shortcode if needed.
|
||||||
|
var fn shortcodeRenderFunc = func(ctx context.Context) ([]byte, bool, error) {
|
||||||
|
r, err := doRenderShortcode(ctx, level, s, tplVariants, sc, parent, p)
|
||||||
|
if err != nil {
|
||||||
|
return nil, false, toParseErr(err)
|
||||||
|
}
|
||||||
|
b, hasVariants, err := r.renderShortcode(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, false, toParseErr(err)
|
||||||
|
}
|
||||||
|
return b, hasVariants, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return fn, nil
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func doRenderShortcode(
|
||||||
|
ctx context.Context,
|
||||||
|
level int,
|
||||||
|
s *Site,
|
||||||
|
tplVariants tpl.TemplateVariants,
|
||||||
|
sc *shortcode,
|
||||||
|
parent *ShortcodeWithPage,
|
||||||
|
p *pageState) (shortcodeRenderer, error) {
|
||||||
var tmpl tpl.Template
|
var tmpl tpl.Template
|
||||||
|
|
||||||
// Tracks whether this shortcode or any of its children has template variations
|
// Tracks whether this shortcode or any of its children has template variations
|
||||||
|
@ -319,7 +351,7 @@ func renderShortcode(
|
||||||
|
|
||||||
if sc.isInline {
|
if sc.isInline {
|
||||||
if !p.s.ExecHelper.Sec().EnableInlineShortcodes {
|
if !p.s.ExecHelper.Sec().EnableInlineShortcodes {
|
||||||
return "", false, nil
|
return zeroShortcode, nil
|
||||||
}
|
}
|
||||||
templName := path.Join("_inline_shortcode", p.File().Path(), sc.name)
|
templName := path.Join("_inline_shortcode", p.File().Path(), sc.name)
|
||||||
if sc.isClosing {
|
if sc.isClosing {
|
||||||
|
@ -332,7 +364,7 @@ func renderShortcode(
|
||||||
pos := fe.Position()
|
pos := fe.Position()
|
||||||
pos.LineNumber += p.posOffset(sc.pos).LineNumber
|
pos.LineNumber += p.posOffset(sc.pos).LineNumber
|
||||||
fe = fe.UpdatePosition(pos)
|
fe = fe.UpdatePosition(pos)
|
||||||
return "", false, p.wrapError(fe)
|
return zeroShortcode, p.wrapError(fe)
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
@ -340,7 +372,7 @@ func renderShortcode(
|
||||||
var found bool
|
var found bool
|
||||||
tmpl, found = s.TextTmpl().Lookup(templName)
|
tmpl, found = s.TextTmpl().Lookup(templName)
|
||||||
if !found {
|
if !found {
|
||||||
return "", false, fmt.Errorf("no earlier definition of shortcode %q found", sc.name)
|
return zeroShortcode, fmt.Errorf("no earlier definition of shortcode %q found", sc.name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -348,7 +380,7 @@ func renderShortcode(
|
||||||
tmpl, found, more = s.Tmpl().LookupVariant(sc.name, tplVariants)
|
tmpl, found, more = s.Tmpl().LookupVariant(sc.name, tplVariants)
|
||||||
if !found {
|
if !found {
|
||||||
s.Log.Errorf("Unable to locate template for shortcode %q in page %q", sc.name, p.File().Path())
|
s.Log.Errorf("Unable to locate template for shortcode %q in page %q", sc.name, p.File().Path())
|
||||||
return "", false, nil
|
return zeroShortcode, nil
|
||||||
}
|
}
|
||||||
hasVariants = hasVariants || more
|
hasVariants = hasVariants || more
|
||||||
}
|
}
|
||||||
|
@ -365,16 +397,20 @@ func renderShortcode(
|
||||||
case string:
|
case string:
|
||||||
inner += innerData
|
inner += innerData
|
||||||
case *shortcode:
|
case *shortcode:
|
||||||
s, more, err := renderShortcode(level+1, s, tplVariants, innerData, data, p)
|
s, err := prepareShortcode(ctx, level+1, s, tplVariants, innerData, data, p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", false, err
|
return zeroShortcode, err
|
||||||
}
|
}
|
||||||
|
ss, more, err := s.renderShortcodeString(ctx)
|
||||||
hasVariants = hasVariants || more
|
hasVariants = hasVariants || more
|
||||||
inner += s
|
if err != nil {
|
||||||
|
return zeroShortcode, err
|
||||||
|
}
|
||||||
|
inner += ss
|
||||||
default:
|
default:
|
||||||
s.Log.Errorf("Illegal state on shortcode rendering of %q in page %q. Illegal type in inner data: %s ",
|
s.Log.Errorf("Illegal state on shortcode rendering of %q in page %q. Illegal type in inner data: %s ",
|
||||||
sc.name, p.File().Path(), reflect.TypeOf(innerData))
|
sc.name, p.File().Path(), reflect.TypeOf(innerData))
|
||||||
return "", false, nil
|
return zeroShortcode, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -382,9 +418,9 @@ func renderShortcode(
|
||||||
// shortcode.
|
// shortcode.
|
||||||
if sc.doMarkup && (level > 0 || sc.configVersion() == 1) {
|
if sc.doMarkup && (level > 0 || sc.configVersion() == 1) {
|
||||||
var err error
|
var err error
|
||||||
b, err := p.pageOutput.contentRenderer.RenderContent([]byte(inner), false)
|
b, err := p.pageOutput.contentRenderer.RenderContent(ctx, []byte(inner), false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", false, err
|
return zeroShortcode, err
|
||||||
}
|
}
|
||||||
|
|
||||||
newInner := b.Bytes()
|
newInner := b.Bytes()
|
||||||
|
@ -418,14 +454,14 @@ func renderShortcode(
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := renderShortcodeWithPage(s.Tmpl(), tmpl, data)
|
result, err := renderShortcodeWithPage(ctx, s.Tmpl(), tmpl, data)
|
||||||
|
|
||||||
if err != nil && sc.isInline {
|
if err != nil && sc.isInline {
|
||||||
fe := herrors.NewFileErrorFromName(err, p.File().Filename())
|
fe := herrors.NewFileErrorFromName(err, p.File().Filename())
|
||||||
pos := fe.Position()
|
pos := fe.Position()
|
||||||
pos.LineNumber += p.posOffset(sc.pos).LineNumber
|
pos.LineNumber += p.posOffset(sc.pos).LineNumber
|
||||||
fe = fe.UpdatePosition(pos)
|
fe = fe.UpdatePosition(pos)
|
||||||
return "", false, fe
|
return zeroShortcode, fe
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(sc.inner) == 0 && len(sc.indentation) > 0 {
|
if len(sc.inner) == 0 && len(sc.indentation) > 0 {
|
||||||
|
@ -444,7 +480,7 @@ func renderShortcode(
|
||||||
bp.PutBuffer(b)
|
bp.PutBuffer(b)
|
||||||
}
|
}
|
||||||
|
|
||||||
return result, hasVariants, err
|
return prerenderedShortcode{s: result, hasVariants: hasVariants}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *shortcodeHandler) hasShortcodes() bool {
|
func (s *shortcodeHandler) hasShortcodes() bool {
|
||||||
|
@ -473,28 +509,24 @@ func (s *shortcodeHandler) hasName(name string) bool {
|
||||||
return ok
|
return ok
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *shortcodeHandler) renderShortcodesForPage(p *pageState, f output.Format) (map[string]string, bool, error) {
|
func (s *shortcodeHandler) prepareShortcodesForPage(ctx context.Context, p *pageState, f output.Format) (map[string]shortcodeRenderer, error) {
|
||||||
rendered := make(map[string]string)
|
rendered := make(map[string]shortcodeRenderer)
|
||||||
|
|
||||||
tplVariants := tpl.TemplateVariants{
|
tplVariants := tpl.TemplateVariants{
|
||||||
Language: p.Language().Lang,
|
Language: p.Language().Lang,
|
||||||
OutputFormat: f,
|
OutputFormat: f,
|
||||||
}
|
}
|
||||||
|
|
||||||
var hasVariants bool
|
|
||||||
|
|
||||||
for _, v := range s.shortcodes {
|
for _, v := range s.shortcodes {
|
||||||
s, more, err := renderShortcode(0, s.s, tplVariants, v, nil, p)
|
s, err := prepareShortcode(ctx, 0, s.s, tplVariants, v, nil, p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
err = p.parseError(fmt.Errorf("failed to render shortcode %q: %w", v.name, err), p.source.parsed.Input(), v.pos)
|
return nil, err
|
||||||
return nil, false, err
|
|
||||||
}
|
}
|
||||||
hasVariants = hasVariants || more
|
|
||||||
rendered[v.placeholder] = s
|
rendered[v.placeholder] = s
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return rendered, hasVariants, nil
|
return rendered, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *shortcodeHandler) parseError(err error, input []byte, pos int) error {
|
func (s *shortcodeHandler) parseError(err error, input []byte, pos int) error {
|
||||||
|
@ -668,11 +700,11 @@ Loop:
|
||||||
|
|
||||||
// Replace prefixed shortcode tokens with the real content.
|
// Replace prefixed shortcode tokens with the real content.
|
||||||
// Note: This function will rewrite the input slice.
|
// Note: This function will rewrite the input slice.
|
||||||
func replaceShortcodeTokens(source []byte, replacements map[string]string) ([]byte, error) {
|
func expandShortcodeTokens(
|
||||||
if len(replacements) == 0 {
|
ctx context.Context,
|
||||||
return source, nil
|
source []byte,
|
||||||
}
|
tokenHandler func(ctx context.Context, token string) ([]byte, error),
|
||||||
|
) ([]byte, error) {
|
||||||
start := 0
|
start := 0
|
||||||
|
|
||||||
pre := []byte(shortcodePlaceholderPrefix)
|
pre := []byte(shortcodePlaceholderPrefix)
|
||||||
|
@ -691,8 +723,11 @@ func replaceShortcodeTokens(source []byte, replacements map[string]string) ([]by
|
||||||
}
|
}
|
||||||
|
|
||||||
end := j + postIdx + 4
|
end := j + postIdx + 4
|
||||||
|
key := string(source[j:end])
|
||||||
newVal := []byte(replacements[string(source[j:end])])
|
newVal, err := tokenHandler(ctx, key)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// Issue #1148: Check for wrapping p-tags <p>
|
// Issue #1148: Check for wrapping p-tags <p>
|
||||||
if j >= 3 && bytes.Equal(source[j-3:j], pStart) {
|
if j >= 3 && bytes.Equal(source[j-3:j], pStart) {
|
||||||
|
@ -712,11 +747,11 @@ func replaceShortcodeTokens(source []byte, replacements map[string]string) ([]by
|
||||||
return source, nil
|
return source, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func renderShortcodeWithPage(h tpl.TemplateHandler, tmpl tpl.Template, data *ShortcodeWithPage) (string, error) {
|
func renderShortcodeWithPage(ctx context.Context, h tpl.TemplateHandler, tmpl tpl.Template, data *ShortcodeWithPage) (string, error) {
|
||||||
buffer := bp.GetBuffer()
|
buffer := bp.GetBuffer()
|
||||||
defer bp.PutBuffer(buffer)
|
defer bp.PutBuffer(buffer)
|
||||||
|
|
||||||
err := h.Execute(tmpl, buffer, data)
|
err := h.ExecuteWithContext(ctx, tmpl, buffer, data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", fmt.Errorf("failed to process shortcode: %w", err)
|
return "", fmt.Errorf("failed to process shortcode: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,13 +14,48 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"html/template"
|
"html/template"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/resources/page"
|
"github.com/gohugoio/hugo/resources/page"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// A placeholder for the TableOfContents markup. This is what we pass to the Goldmark etc. renderers.
|
||||||
var tocShortcodePlaceholder = createShortcodePlaceholder("TOC", 0)
|
var tocShortcodePlaceholder = createShortcodePlaceholder("TOC", 0)
|
||||||
|
|
||||||
|
// shortcodeRenderer is typically used to delay rendering of inner shortcodes
|
||||||
|
// marked with placeholders in the content.
|
||||||
|
type shortcodeRenderer interface {
|
||||||
|
renderShortcode(context.Context) ([]byte, bool, error)
|
||||||
|
renderShortcodeString(context.Context) (string, bool, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
type shortcodeRenderFunc func(context.Context) ([]byte, bool, error)
|
||||||
|
|
||||||
|
func (f shortcodeRenderFunc) renderShortcode(ctx context.Context) ([]byte, bool, error) {
|
||||||
|
return f(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f shortcodeRenderFunc) renderShortcodeString(ctx context.Context) (string, bool, error) {
|
||||||
|
b, has, err := f(ctx)
|
||||||
|
return string(b), has, err
|
||||||
|
}
|
||||||
|
|
||||||
|
type prerenderedShortcode struct {
|
||||||
|
s string
|
||||||
|
hasVariants bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p prerenderedShortcode) renderShortcode(context.Context) ([]byte, bool, error) {
|
||||||
|
return []byte(p.s), p.hasVariants, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p prerenderedShortcode) renderShortcodeString(context.Context) (string, bool, error) {
|
||||||
|
return p.s, p.hasVariants, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var zeroShortcode = prerenderedShortcode{}
|
||||||
|
|
||||||
// This is sent to the shortcodes. They cannot access the content
|
// This is sent to the shortcodes. They cannot access the content
|
||||||
// they're a part of. It would cause an infinite regress.
|
// they're a part of. It would cause an infinite regress.
|
||||||
//
|
//
|
||||||
|
@ -50,7 +85,11 @@ func (p *pageForShortcode) page() page.Page {
|
||||||
return p.PageWithoutContent.(page.Page)
|
return p.PageWithoutContent.(page.Page)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *pageForShortcode) TableOfContents() template.HTML {
|
func (p *pageForShortcode) String() string {
|
||||||
|
return p.p.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *pageForShortcode) TableOfContents(context.Context) template.HTML {
|
||||||
p.p.enablePlaceholders()
|
p.p.enablePlaceholders()
|
||||||
return p.toc
|
return p.toc
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
@ -247,7 +248,7 @@ CSV: {{< myShort >}}
|
||||||
func BenchmarkReplaceShortcodeTokens(b *testing.B) {
|
func BenchmarkReplaceShortcodeTokens(b *testing.B) {
|
||||||
type input struct {
|
type input struct {
|
||||||
in []byte
|
in []byte
|
||||||
replacements map[string]string
|
tokenHandler func(ctx context.Context, token string) ([]byte, error)
|
||||||
expect []byte
|
expect []byte
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -263,22 +264,30 @@ func BenchmarkReplaceShortcodeTokens(b *testing.B) {
|
||||||
{strings.Repeat("A ", 3000) + " HAHAHUGOSHORTCODE-1HBHB." + strings.Repeat("BC ", 1000) + " HAHAHUGOSHORTCODE-1HBHB.", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "Hello World"}, []byte(strings.Repeat("A ", 3000) + " Hello World." + strings.Repeat("BC ", 1000) + " Hello World.")},
|
{strings.Repeat("A ", 3000) + " HAHAHUGOSHORTCODE-1HBHB." + strings.Repeat("BC ", 1000) + " HAHAHUGOSHORTCODE-1HBHB.", map[string]string{"HAHAHUGOSHORTCODE-1HBHB": "Hello World"}, []byte(strings.Repeat("A ", 3000) + " Hello World." + strings.Repeat("BC ", 1000) + " Hello World.")},
|
||||||
}
|
}
|
||||||
|
|
||||||
in := make([]input, b.N*len(data))
|
|
||||||
cnt := 0
|
cnt := 0
|
||||||
|
in := make([]input, b.N*len(data))
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
for _, this := range data {
|
for _, this := range data {
|
||||||
in[cnt] = input{[]byte(this.input), this.replacements, this.expect}
|
replacements := make(map[string]shortcodeRenderer)
|
||||||
|
for k, v := range this.replacements {
|
||||||
|
replacements[k] = prerenderedShortcode{s: v}
|
||||||
|
}
|
||||||
|
tokenHandler := func(ctx context.Context, token string) ([]byte, error) {
|
||||||
|
return []byte(this.replacements[token]), nil
|
||||||
|
}
|
||||||
|
in[cnt] = input{[]byte(this.input), tokenHandler, this.expect}
|
||||||
cnt++
|
cnt++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
cnt = 0
|
cnt = 0
|
||||||
|
ctx := context.Background()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
for j := range data {
|
for j := range data {
|
||||||
currIn := in[cnt]
|
currIn := in[cnt]
|
||||||
cnt++
|
cnt++
|
||||||
results, err := replaceShortcodeTokens(currIn.in, currIn.replacements)
|
results, err := expandShortcodeTokens(ctx, currIn.in, currIn.tokenHandler)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
b.Fatalf("[%d] failed: %s", i, err)
|
b.Fatalf("[%d] failed: %s", i, err)
|
||||||
continue
|
continue
|
||||||
|
@ -383,7 +392,16 @@ func TestReplaceShortcodeTokens(t *testing.T) {
|
||||||
},
|
},
|
||||||
} {
|
} {
|
||||||
|
|
||||||
results, err := replaceShortcodeTokens([]byte(this.input), this.replacements)
|
replacements := make(map[string]shortcodeRenderer)
|
||||||
|
for k, v := range this.replacements {
|
||||||
|
replacements[k] = prerenderedShortcode{s: v}
|
||||||
|
}
|
||||||
|
tokenHandler := func(ctx context.Context, token string) ([]byte, error) {
|
||||||
|
return []byte(this.replacements[token]), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
results, err := expandShortcodeTokens(ctx, []byte(this.input), tokenHandler)
|
||||||
|
|
||||||
if b, ok := this.expect.(bool); ok && !b {
|
if b, ok := this.expect.(bool); ok && !b {
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"html/template"
|
"html/template"
|
||||||
"io"
|
"io"
|
||||||
|
@ -173,7 +174,7 @@ type Site struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Site) Taxonomies() page.TaxonomyList {
|
func (s *Site) Taxonomies() page.TaxonomyList {
|
||||||
s.init.taxonomies.Do()
|
s.init.taxonomies.Do(context.Background())
|
||||||
return s.taxonomies
|
return s.taxonomies
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -214,8 +215,9 @@ func (init *siteInit) Reset() {
|
||||||
init.taxonomies.Reset()
|
init.taxonomies.Reset()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Site) initInit(init *lazy.Init, pctx pageContext) bool {
|
func (s *Site) initInit(ctx context.Context, init *lazy.Init, pctx pageContext) bool {
|
||||||
_, err := init.Do()
|
_, err := init.Do(ctx)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
s.h.FatalError(pctx.wrapError(err))
|
s.h.FatalError(pctx.wrapError(err))
|
||||||
}
|
}
|
||||||
|
@ -227,7 +229,7 @@ func (s *Site) prepareInits() {
|
||||||
|
|
||||||
var init lazy.Init
|
var init lazy.Init
|
||||||
|
|
||||||
s.init.prevNext = init.Branch(func() (any, error) {
|
s.init.prevNext = init.Branch(func(context.Context) (any, error) {
|
||||||
regularPages := s.RegularPages()
|
regularPages := s.RegularPages()
|
||||||
for i, p := range regularPages {
|
for i, p := range regularPages {
|
||||||
np, ok := p.(nextPrevProvider)
|
np, ok := p.(nextPrevProvider)
|
||||||
|
@ -254,7 +256,7 @@ func (s *Site) prepareInits() {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
s.init.prevNextInSection = init.Branch(func() (any, error) {
|
s.init.prevNextInSection = init.Branch(func(context.Context) (any, error) {
|
||||||
var sections page.Pages
|
var sections page.Pages
|
||||||
s.home.treeRef.m.collectSectionsRecursiveIncludingSelf(pageMapQuery{Prefix: s.home.treeRef.key}, func(n *contentNode) {
|
s.home.treeRef.m.collectSectionsRecursiveIncludingSelf(pageMapQuery{Prefix: s.home.treeRef.key}, func(n *contentNode) {
|
||||||
sections = append(sections, n.p)
|
sections = append(sections, n.p)
|
||||||
|
@ -311,12 +313,12 @@ func (s *Site) prepareInits() {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
s.init.menus = init.Branch(func() (any, error) {
|
s.init.menus = init.Branch(func(context.Context) (any, error) {
|
||||||
s.assembleMenus()
|
s.assembleMenus()
|
||||||
return nil, nil
|
return nil, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
s.init.taxonomies = init.Branch(func() (any, error) {
|
s.init.taxonomies = init.Branch(func(context.Context) (any, error) {
|
||||||
err := s.pageMap.assembleTaxonomies()
|
err := s.pageMap.assembleTaxonomies()
|
||||||
return nil, err
|
return nil, err
|
||||||
})
|
})
|
||||||
|
@ -327,7 +329,7 @@ type siteRenderingContext struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Site) Menus() navigation.Menus {
|
func (s *Site) Menus() navigation.Menus {
|
||||||
s.init.menus.Do()
|
s.init.menus.Do(context.Background())
|
||||||
return s.menus
|
return s.menus
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1821,7 +1823,9 @@ func (s *Site) renderForTemplate(name, outputFormat string, d any, w io.Writer,
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = s.Tmpl().Execute(templ, w, d); err != nil {
|
ctx := context.Background()
|
||||||
|
|
||||||
|
if err = s.Tmpl().ExecuteWithContext(ctx, templ, w, d); err != nil {
|
||||||
return fmt.Errorf("render of %q failed: %w", name, err)
|
return fmt.Errorf("render of %q failed: %w", name, err)
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
|
|
|
@ -19,9 +19,8 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/tpl"
|
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/gohugoio/hugo/config"
|
||||||
|
"github.com/gohugoio/hugo/tpl"
|
||||||
|
|
||||||
"errors"
|
"errors"
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package hugolib
|
package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
@ -630,7 +631,7 @@ func TestOrderedPages(t *testing.T) {
|
||||||
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rbypubdate[0].Title())
|
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rbypubdate[0].Title())
|
||||||
}
|
}
|
||||||
|
|
||||||
bylength := s.RegularPages().ByLength()
|
bylength := s.RegularPages().ByLength(context.Background())
|
||||||
if bylength[0].Title() != "One" {
|
if bylength[0].Title() != "One" {
|
||||||
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bylength[0].Title())
|
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bylength[0].Title())
|
||||||
}
|
}
|
||||||
|
@ -662,7 +663,7 @@ func TestGroupedPages(t *testing.T) {
|
||||||
writeSourcesToSource(t, "content", fs, groupedSources...)
|
writeSourcesToSource(t, "content", fs, groupedSources...)
|
||||||
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
|
s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
|
||||||
|
|
||||||
rbysection, err := s.RegularPages().GroupBy("Section", "desc")
|
rbysection, err := s.RegularPages().GroupBy(context.Background(), "Section", "desc")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Unable to make PageGroup array: %s", err)
|
t.Fatalf("Unable to make PageGroup array: %s", err)
|
||||||
}
|
}
|
||||||
|
@ -683,7 +684,7 @@ func TestGroupedPages(t *testing.T) {
|
||||||
t.Errorf("PageGroup has unexpected number of pages. Third group should have '%d' pages, got '%d' pages", 2, len(rbysection[2].Pages))
|
t.Errorf("PageGroup has unexpected number of pages. Third group should have '%d' pages, got '%d' pages", 2, len(rbysection[2].Pages))
|
||||||
}
|
}
|
||||||
|
|
||||||
bytype, err := s.RegularPages().GroupBy("Type", "asc")
|
bytype, err := s.RegularPages().GroupBy(context.Background(), "Type", "asc")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Unable to make PageGroup array: %s", err)
|
t.Fatalf("Unable to make PageGroup array: %s", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@ package hugolib
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"image/jpeg"
|
"image/jpeg"
|
||||||
"io"
|
"io"
|
||||||
|
@ -1005,7 +1006,7 @@ func getPage(in page.Page, ref string) page.Page {
|
||||||
}
|
}
|
||||||
|
|
||||||
func content(c resource.ContentProvider) string {
|
func content(c resource.ContentProvider) string {
|
||||||
cc, err := c.Content()
|
cc, err := c.Content(context.Background())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
|
|
30
lazy/init.go
30
lazy/init.go
|
@ -29,7 +29,7 @@ func New() *Init {
|
||||||
|
|
||||||
// Init holds a graph of lazily initialized dependencies.
|
// Init holds a graph of lazily initialized dependencies.
|
||||||
type Init struct {
|
type Init struct {
|
||||||
// Used in tests
|
// Used mainly for testing.
|
||||||
initCount uint64
|
initCount uint64
|
||||||
|
|
||||||
mu sync.Mutex
|
mu sync.Mutex
|
||||||
|
@ -40,11 +40,11 @@ type Init struct {
|
||||||
init onceMore
|
init onceMore
|
||||||
out any
|
out any
|
||||||
err error
|
err error
|
||||||
f func() (any, error)
|
f func(context.Context) (any, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add adds a func as a new child dependency.
|
// Add adds a func as a new child dependency.
|
||||||
func (ini *Init) Add(initFn func() (any, error)) *Init {
|
func (ini *Init) Add(initFn func(context.Context) (any, error)) *Init {
|
||||||
if ini == nil {
|
if ini == nil {
|
||||||
ini = New()
|
ini = New()
|
||||||
}
|
}
|
||||||
|
@ -59,14 +59,14 @@ func (ini *Init) InitCount() int {
|
||||||
|
|
||||||
// AddWithTimeout is same as Add, but with a timeout that aborts initialization.
|
// AddWithTimeout is same as Add, but with a timeout that aborts initialization.
|
||||||
func (ini *Init) AddWithTimeout(timeout time.Duration, f func(ctx context.Context) (any, error)) *Init {
|
func (ini *Init) AddWithTimeout(timeout time.Duration, f func(ctx context.Context) (any, error)) *Init {
|
||||||
return ini.Add(func() (any, error) {
|
return ini.Add(func(ctx context.Context) (any, error) {
|
||||||
return ini.withTimeout(timeout, f)
|
return ini.withTimeout(ctx, timeout, f)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Branch creates a new dependency branch based on an existing and adds
|
// Branch creates a new dependency branch based on an existing and adds
|
||||||
// the given dependency as a child.
|
// the given dependency as a child.
|
||||||
func (ini *Init) Branch(initFn func() (any, error)) *Init {
|
func (ini *Init) Branch(initFn func(context.Context) (any, error)) *Init {
|
||||||
if ini == nil {
|
if ini == nil {
|
||||||
ini = New()
|
ini = New()
|
||||||
}
|
}
|
||||||
|
@ -75,13 +75,13 @@ func (ini *Init) Branch(initFn func() (any, error)) *Init {
|
||||||
|
|
||||||
// BranchdWithTimeout is same as Branch, but with a timeout.
|
// BranchdWithTimeout is same as Branch, but with a timeout.
|
||||||
func (ini *Init) BranchWithTimeout(timeout time.Duration, f func(ctx context.Context) (any, error)) *Init {
|
func (ini *Init) BranchWithTimeout(timeout time.Duration, f func(ctx context.Context) (any, error)) *Init {
|
||||||
return ini.Branch(func() (any, error) {
|
return ini.Branch(func(ctx context.Context) (any, error) {
|
||||||
return ini.withTimeout(timeout, f)
|
return ini.withTimeout(ctx, timeout, f)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Do initializes the entire dependency graph.
|
// Do initializes the entire dependency graph.
|
||||||
func (ini *Init) Do() (any, error) {
|
func (ini *Init) Do(ctx context.Context) (any, error) {
|
||||||
if ini == nil {
|
if ini == nil {
|
||||||
panic("init is nil")
|
panic("init is nil")
|
||||||
}
|
}
|
||||||
|
@ -92,7 +92,7 @@ func (ini *Init) Do() (any, error) {
|
||||||
if prev != nil {
|
if prev != nil {
|
||||||
// A branch. Initialize the ancestors.
|
// A branch. Initialize the ancestors.
|
||||||
if prev.shouldInitialize() {
|
if prev.shouldInitialize() {
|
||||||
_, err := prev.Do()
|
_, err := prev.Do(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ini.err = err
|
ini.err = err
|
||||||
return
|
return
|
||||||
|
@ -105,12 +105,12 @@ func (ini *Init) Do() (any, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if ini.f != nil {
|
if ini.f != nil {
|
||||||
ini.out, ini.err = ini.f()
|
ini.out, ini.err = ini.f(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, child := range ini.children {
|
for _, child := range ini.children {
|
||||||
if child.shouldInitialize() {
|
if child.shouldInitialize() {
|
||||||
_, err := child.Do()
|
_, err := child.Do(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ini.err = err
|
ini.err = err
|
||||||
return
|
return
|
||||||
|
@ -154,7 +154,7 @@ func (ini *Init) Reset() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ini *Init) add(branch bool, initFn func() (any, error)) *Init {
|
func (ini *Init) add(branch bool, initFn func(context.Context) (any, error)) *Init {
|
||||||
ini.mu.Lock()
|
ini.mu.Lock()
|
||||||
defer ini.mu.Unlock()
|
defer ini.mu.Unlock()
|
||||||
|
|
||||||
|
@ -179,8 +179,8 @@ func (ini *Init) checkDone() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ini *Init) withTimeout(timeout time.Duration, f func(ctx context.Context) (any, error)) (any, error) {
|
func (ini *Init) withTimeout(ctx context.Context, timeout time.Duration, f func(ctx context.Context) (any, error)) (any, error) {
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), timeout)
|
ctx, cancel := context.WithTimeout(ctx, timeout)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
c := make(chan verr, 1)
|
c := make(chan verr, 1)
|
||||||
|
|
||||||
|
|
|
@ -48,16 +48,16 @@ func TestInit(t *testing.T) {
|
||||||
|
|
||||||
var result string
|
var result string
|
||||||
|
|
||||||
f1 := func(name string) func() (any, error) {
|
f1 := func(name string) func(context.Context) (any, error) {
|
||||||
return func() (any, error) {
|
return func(context.Context) (any, error) {
|
||||||
result += name + "|"
|
result += name + "|"
|
||||||
doWork()
|
doWork()
|
||||||
return name, nil
|
return name, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
f2 := func() func() (any, error) {
|
f2 := func() func(context.Context) (any, error) {
|
||||||
return func() (any, error) {
|
return func(context.Context) (any, error) {
|
||||||
doWork()
|
doWork()
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
@ -75,6 +75,8 @@ func TestInit(t *testing.T) {
|
||||||
|
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
// Add some concurrency and randomness to verify thread safety and
|
// Add some concurrency and randomness to verify thread safety and
|
||||||
// init order.
|
// init order.
|
||||||
for i := 0; i < 100; i++ {
|
for i := 0; i < 100; i++ {
|
||||||
|
@ -83,20 +85,20 @@ func TestInit(t *testing.T) {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
var err error
|
var err error
|
||||||
if rnd.Intn(10) < 5 {
|
if rnd.Intn(10) < 5 {
|
||||||
_, err = root.Do()
|
_, err = root.Do(ctx)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add a new branch on the fly.
|
// Add a new branch on the fly.
|
||||||
if rnd.Intn(10) > 5 {
|
if rnd.Intn(10) > 5 {
|
||||||
branch := branch1_2.Branch(f2())
|
branch := branch1_2.Branch(f2())
|
||||||
_, err = branch.Do()
|
_, err = branch.Do(ctx)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
} else {
|
} else {
|
||||||
_, err = branch1_2_1.Do()
|
_, err = branch1_2_1.Do(ctx)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
}
|
}
|
||||||
_, err = branch1_2.Do()
|
_, err = branch1_2.Do(ctx)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
}(i)
|
}(i)
|
||||||
|
|
||||||
|
@ -114,7 +116,7 @@ func TestInitAddWithTimeout(t *testing.T) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
_, err := init.Do()
|
_, err := init.Do(context.Background())
|
||||||
|
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
}
|
}
|
||||||
|
@ -133,7 +135,7 @@ func TestInitAddWithTimeoutTimeout(t *testing.T) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
_, err := init.Do()
|
_, err := init.Do(context.Background())
|
||||||
|
|
||||||
c.Assert(err, qt.Not(qt.IsNil))
|
c.Assert(err, qt.Not(qt.IsNil))
|
||||||
|
|
||||||
|
@ -149,7 +151,7 @@ func TestInitAddWithTimeoutError(t *testing.T) {
|
||||||
return nil, errors.New("failed")
|
return nil, errors.New("failed")
|
||||||
})
|
})
|
||||||
|
|
||||||
_, err := init.Do()
|
_, err := init.Do(context.Background())
|
||||||
|
|
||||||
c.Assert(err, qt.Not(qt.IsNil))
|
c.Assert(err, qt.Not(qt.IsNil))
|
||||||
}
|
}
|
||||||
|
@ -178,8 +180,8 @@ func TestInitBranchOrder(t *testing.T) {
|
||||||
|
|
||||||
base := New()
|
base := New()
|
||||||
|
|
||||||
work := func(size int, f func()) func() (any, error) {
|
work := func(size int, f func()) func(context.Context) (any, error) {
|
||||||
return func() (any, error) {
|
return func(context.Context) (any, error) {
|
||||||
doWorkOfSize(size)
|
doWorkOfSize(size)
|
||||||
if f != nil {
|
if f != nil {
|
||||||
f()
|
f()
|
||||||
|
@ -205,13 +207,14 @@ func TestInitBranchOrder(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
for _, v := range inits {
|
for _, v := range inits {
|
||||||
v := v
|
v := v
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go func() {
|
go func() {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
_, err := v.Do()
|
_, err := v.Do(ctx)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
|
@ -225,17 +228,17 @@ func TestInitBranchOrder(t *testing.T) {
|
||||||
func TestResetError(t *testing.T) {
|
func TestResetError(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
r := false
|
r := false
|
||||||
i := New().Add(func() (any, error) {
|
i := New().Add(func(context.Context) (any, error) {
|
||||||
if r {
|
if r {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
return nil, errors.New("r is false")
|
return nil, errors.New("r is false")
|
||||||
})
|
})
|
||||||
_, err := i.Do()
|
_, err := i.Do(context.Background())
|
||||||
c.Assert(err, qt.IsNotNil)
|
c.Assert(err, qt.IsNotNil)
|
||||||
i.Reset()
|
i.Reset()
|
||||||
r = true
|
r = true
|
||||||
_, err = i.Do()
|
_, err = i.Do(context.Background())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,10 +53,10 @@ func (p provider) New(cfg converter.ProviderConfig) (converter.Provider, error)
|
||||||
|
|
||||||
type asciidocResult struct {
|
type asciidocResult struct {
|
||||||
converter.Result
|
converter.Result
|
||||||
toc tableofcontents.Root
|
toc *tableofcontents.Fragments
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r asciidocResult) TableOfContents() tableofcontents.Root {
|
func (r asciidocResult) TableOfContents() *tableofcontents.Fragments {
|
||||||
return r.toc
|
return r.toc
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -205,16 +205,16 @@ func hasAsciiDoc() bool {
|
||||||
|
|
||||||
// extractTOC extracts the toc from the given src html.
|
// extractTOC extracts the toc from the given src html.
|
||||||
// It returns the html without the TOC, and the TOC data
|
// It returns the html without the TOC, and the TOC data
|
||||||
func (a *asciidocConverter) extractTOC(src []byte) ([]byte, tableofcontents.Root, error) {
|
func (a *asciidocConverter) extractTOC(src []byte) ([]byte, *tableofcontents.Fragments, error) {
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
buf.Write(src)
|
buf.Write(src)
|
||||||
node, err := html.Parse(&buf)
|
node, err := html.Parse(&buf)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, tableofcontents.Root{}, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
var (
|
var (
|
||||||
f func(*html.Node) bool
|
f func(*html.Node) bool
|
||||||
toc tableofcontents.Root
|
toc *tableofcontents.Fragments
|
||||||
toVisit []*html.Node
|
toVisit []*html.Node
|
||||||
)
|
)
|
||||||
f = func(n *html.Node) bool {
|
f = func(n *html.Node) bool {
|
||||||
|
@ -242,12 +242,12 @@ func (a *asciidocConverter) extractTOC(src []byte) ([]byte, tableofcontents.Root
|
||||||
}
|
}
|
||||||
f(node)
|
f(node)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, tableofcontents.Root{}, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
buf.Reset()
|
buf.Reset()
|
||||||
err = html.Render(&buf, node)
|
err = html.Render(&buf, node)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, tableofcontents.Root{}, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
// ltrim <html><head></head><body> and rtrim </body></html> which are added by html.Render
|
// ltrim <html><head></head><body> and rtrim </body></html> which are added by html.Render
|
||||||
res := buf.Bytes()[25:]
|
res := buf.Bytes()[25:]
|
||||||
|
@ -256,9 +256,9 @@ func (a *asciidocConverter) extractTOC(src []byte) ([]byte, tableofcontents.Root
|
||||||
}
|
}
|
||||||
|
|
||||||
// parseTOC returns a TOC root from the given toc Node
|
// parseTOC returns a TOC root from the given toc Node
|
||||||
func parseTOC(doc *html.Node) tableofcontents.Root {
|
func parseTOC(doc *html.Node) *tableofcontents.Fragments {
|
||||||
var (
|
var (
|
||||||
toc tableofcontents.Root
|
toc tableofcontents.Builder
|
||||||
f func(*html.Node, int, int)
|
f func(*html.Node, int, int)
|
||||||
)
|
)
|
||||||
f = func(n *html.Node, row, level int) {
|
f = func(n *html.Node, row, level int) {
|
||||||
|
@ -276,9 +276,9 @@ func parseTOC(doc *html.Node) tableofcontents.Root {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
href := attr(c, "href")[1:]
|
href := attr(c, "href")[1:]
|
||||||
toc.AddAt(tableofcontents.Heading{
|
toc.AddAt(&tableofcontents.Heading{
|
||||||
Text: nodeContent(c),
|
Title: nodeContent(c),
|
||||||
ID: href,
|
ID: href,
|
||||||
}, row, level)
|
}, row, level)
|
||||||
}
|
}
|
||||||
f(n.FirstChild, row, level)
|
f(n.FirstChild, row, level)
|
||||||
|
@ -289,7 +289,7 @@ func parseTOC(doc *html.Node) tableofcontents.Root {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
f(doc.FirstChild, -1, 0)
|
f(doc.FirstChild, -1, 0)
|
||||||
return toc
|
return toc.Build()
|
||||||
}
|
}
|
||||||
|
|
||||||
func attr(node *html.Node, key string) string {
|
func attr(node *html.Node, key string) string {
|
||||||
|
|
|
@ -21,13 +21,13 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/common/collections"
|
||||||
"github.com/gohugoio/hugo/common/hexec"
|
"github.com/gohugoio/hugo/common/hexec"
|
||||||
"github.com/gohugoio/hugo/common/loggers"
|
"github.com/gohugoio/hugo/common/loggers"
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/gohugoio/hugo/config"
|
||||||
"github.com/gohugoio/hugo/config/security"
|
"github.com/gohugoio/hugo/config/security"
|
||||||
"github.com/gohugoio/hugo/markup/converter"
|
"github.com/gohugoio/hugo/markup/converter"
|
||||||
"github.com/gohugoio/hugo/markup/markup_config"
|
"github.com/gohugoio/hugo/markup/markup_config"
|
||||||
"github.com/gohugoio/hugo/markup/tableofcontents"
|
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
qt "github.com/frankban/quicktest"
|
||||||
)
|
)
|
||||||
|
@ -343,49 +343,8 @@ testContent
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
toc, ok := r.(converter.TableOfContentsProvider)
|
toc, ok := r.(converter.TableOfContentsProvider)
|
||||||
c.Assert(ok, qt.Equals, true)
|
c.Assert(ok, qt.Equals, true)
|
||||||
expected := tableofcontents.Root{
|
|
||||||
Headings: tableofcontents.Headings{
|
c.Assert(toc.TableOfContents().Identifiers, qt.DeepEquals, collections.SortedStringSlice{"_introduction", "_section_1", "_section_1_1", "_section_1_1_1", "_section_1_2", "_section_2"})
|
||||||
{
|
|
||||||
ID: "",
|
|
||||||
Text: "",
|
|
||||||
Headings: tableofcontents.Headings{
|
|
||||||
{
|
|
||||||
ID: "_introduction",
|
|
||||||
Text: "Introduction",
|
|
||||||
Headings: nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
ID: "_section_1",
|
|
||||||
Text: "Section 1",
|
|
||||||
Headings: tableofcontents.Headings{
|
|
||||||
{
|
|
||||||
ID: "_section_1_1",
|
|
||||||
Text: "Section 1.1",
|
|
||||||
Headings: tableofcontents.Headings{
|
|
||||||
{
|
|
||||||
ID: "_section_1_1_1",
|
|
||||||
Text: "Section 1.1.1",
|
|
||||||
Headings: nil,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
ID: "_section_1_2",
|
|
||||||
Text: "Section 1.2",
|
|
||||||
Headings: nil,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
ID: "_section_2",
|
|
||||||
Text: "Section 2",
|
|
||||||
Headings: nil,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
c.Assert(toc.TableOfContents(), qt.DeepEquals, expected)
|
|
||||||
c.Assert(string(r.Bytes()), qt.Not(qt.Contains), "<div id=\"toc\" class=\"toc\">")
|
c.Assert(string(r.Bytes()), qt.Not(qt.Contains), "<div id=\"toc\" class=\"toc\">")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -404,22 +363,7 @@ func TestTableOfContentsWithCode(t *testing.T) {
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
toc, ok := r.(converter.TableOfContentsProvider)
|
toc, ok := r.(converter.TableOfContentsProvider)
|
||||||
c.Assert(ok, qt.Equals, true)
|
c.Assert(ok, qt.Equals, true)
|
||||||
expected := tableofcontents.Root{
|
c.Assert(toc.TableOfContents().HeadingsMap["_some_code_in_the_title"].Title, qt.Equals, "Some <code>code</code> in the title")
|
||||||
Headings: tableofcontents.Headings{
|
|
||||||
{
|
|
||||||
ID: "",
|
|
||||||
Text: "",
|
|
||||||
Headings: tableofcontents.Headings{
|
|
||||||
{
|
|
||||||
ID: "_some_code_in_the_title",
|
|
||||||
Text: "Some <code>code</code> in the title",
|
|
||||||
Headings: nil,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
c.Assert(toc.TableOfContents(), qt.DeepEquals, expected)
|
|
||||||
c.Assert(string(r.Bytes()), qt.Not(qt.Contains), "<div id=\"toc\" class=\"toc\">")
|
c.Assert(string(r.Bytes()), qt.Not(qt.Contains), "<div id=\"toc\" class=\"toc\">")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -443,21 +387,7 @@ func TestTableOfContentsPreserveTOC(t *testing.T) {
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
toc, ok := r.(converter.TableOfContentsProvider)
|
toc, ok := r.(converter.TableOfContentsProvider)
|
||||||
c.Assert(ok, qt.Equals, true)
|
c.Assert(ok, qt.Equals, true)
|
||||||
expected := tableofcontents.Root{
|
|
||||||
Headings: tableofcontents.Headings{
|
c.Assert(toc.TableOfContents().Identifiers, qt.DeepEquals, collections.SortedStringSlice{"some-title"})
|
||||||
{
|
|
||||||
ID: "",
|
|
||||||
Text: "",
|
|
||||||
Headings: tableofcontents.Headings{
|
|
||||||
{
|
|
||||||
ID: "some-title",
|
|
||||||
Text: "Some title",
|
|
||||||
Headings: nil,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
c.Assert(toc.TableOfContents(), qt.DeepEquals, expected)
|
|
||||||
c.Assert(string(r.Bytes()), qt.Contains, "<div id=\"toc\" class=\"toc\">")
|
c.Assert(string(r.Bytes()), qt.Contains, "<div id=\"toc\" class=\"toc\">")
|
||||||
}
|
}
|
||||||
|
|
|
@ -101,7 +101,7 @@ type DocumentInfo interface {
|
||||||
|
|
||||||
// TableOfContentsProvider provides the content as a ToC structure.
|
// TableOfContentsProvider provides the content as a ToC structure.
|
||||||
type TableOfContentsProvider interface {
|
type TableOfContentsProvider interface {
|
||||||
TableOfContents() tableofcontents.Root
|
TableOfContents() *tableofcontents.Fragments
|
||||||
}
|
}
|
||||||
|
|
||||||
// AnchorNameSanitizer tells how a converter sanitizes anchor names.
|
// AnchorNameSanitizer tells how a converter sanitizes anchor names.
|
||||||
|
|
|
@ -160,11 +160,11 @@ var _ identity.IdentitiesProvider = (*converterResult)(nil)
|
||||||
|
|
||||||
type converterResult struct {
|
type converterResult struct {
|
||||||
converter.Result
|
converter.Result
|
||||||
toc tableofcontents.Root
|
toc *tableofcontents.Fragments
|
||||||
ids identity.Identities
|
ids identity.Identities
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c converterResult) TableOfContents() tableofcontents.Root {
|
func (c converterResult) TableOfContents() *tableofcontents.Fragments {
|
||||||
return c.toc
|
return c.toc
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -228,9 +228,9 @@ type parserContext struct {
|
||||||
parser.Context
|
parser.Context
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *parserContext) TableOfContents() tableofcontents.Root {
|
func (p *parserContext) TableOfContents() *tableofcontents.Fragments {
|
||||||
if v := p.Get(tocResultKey); v != nil {
|
if v := p.Get(tocResultKey); v != nil {
|
||||||
return v.(tableofcontents.Root)
|
return v.(*tableofcontents.Fragments)
|
||||||
}
|
}
|
||||||
return tableofcontents.Root{}
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,8 +41,8 @@ func (t *tocTransformer) Transform(n *ast.Document, reader text.Reader, pc parse
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
toc tableofcontents.Root
|
toc tableofcontents.Builder
|
||||||
tocHeading tableofcontents.Heading
|
tocHeading = &tableofcontents.Heading{}
|
||||||
level int
|
level int
|
||||||
row = -1
|
row = -1
|
||||||
inHeading bool
|
inHeading bool
|
||||||
|
@ -53,10 +53,10 @@ func (t *tocTransformer) Transform(n *ast.Document, reader text.Reader, pc parse
|
||||||
s := ast.WalkStatus(ast.WalkContinue)
|
s := ast.WalkStatus(ast.WalkContinue)
|
||||||
if n.Kind() == ast.KindHeading {
|
if n.Kind() == ast.KindHeading {
|
||||||
if inHeading && !entering {
|
if inHeading && !entering {
|
||||||
tocHeading.Text = headingText.String()
|
tocHeading.Title = headingText.String()
|
||||||
headingText.Reset()
|
headingText.Reset()
|
||||||
toc.AddAt(tocHeading, row, level-1)
|
toc.AddAt(tocHeading, row, level-1)
|
||||||
tocHeading = tableofcontents.Heading{}
|
tocHeading = &tableofcontents.Heading{}
|
||||||
inHeading = false
|
inHeading = false
|
||||||
return s, nil
|
return s, nil
|
||||||
}
|
}
|
||||||
|
@ -106,7 +106,7 @@ func (t *tocTransformer) Transform(n *ast.Document, reader text.Reader, pc parse
|
||||||
return s, nil
|
return s, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
pc.Set(tocResultKey, toc)
|
pc.Set(tocResultKey, toc.Build())
|
||||||
}
|
}
|
||||||
|
|
||||||
type tocExtension struct {
|
type tocExtension struct {
|
||||||
|
|
|
@ -14,35 +14,104 @@
|
||||||
package tableofcontents
|
package tableofcontents
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/common/collections"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Empty is an empty ToC.
|
||||||
|
var Empty = &Fragments{
|
||||||
|
Headings: Headings{},
|
||||||
|
HeadingsMap: map[string]*Heading{},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Builder is used to build the ToC data structure.
|
||||||
|
type Builder struct {
|
||||||
|
toc *Fragments
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add adds the heading to the ToC.
|
||||||
|
func (b *Builder) AddAt(h *Heading, row, level int) {
|
||||||
|
if b.toc == nil {
|
||||||
|
b.toc = &Fragments{}
|
||||||
|
}
|
||||||
|
b.toc.addAt(h, row, level)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build returns the ToC.
|
||||||
|
func (b Builder) Build() *Fragments {
|
||||||
|
if b.toc == nil {
|
||||||
|
return Empty
|
||||||
|
}
|
||||||
|
b.toc.HeadingsMap = make(map[string]*Heading)
|
||||||
|
b.toc.walk(func(h *Heading) {
|
||||||
|
if h.ID != "" {
|
||||||
|
b.toc.HeadingsMap[h.ID] = h
|
||||||
|
b.toc.Identifiers = append(b.toc.Identifiers, h.ID)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
sort.Strings(b.toc.Identifiers)
|
||||||
|
return b.toc
|
||||||
|
}
|
||||||
|
|
||||||
// Headings holds the top level headings.
|
// Headings holds the top level headings.
|
||||||
type Headings []Heading
|
type Headings []*Heading
|
||||||
|
|
||||||
|
// FilterBy returns a new Headings slice with all headings that matches the given predicate.
|
||||||
|
// For internal use only.
|
||||||
|
func (h Headings) FilterBy(fn func(*Heading) bool) Headings {
|
||||||
|
var out Headings
|
||||||
|
|
||||||
|
for _, h := range h {
|
||||||
|
h.walk(func(h *Heading) {
|
||||||
|
if fn(h) {
|
||||||
|
out = append(out, h)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
// Heading holds the data about a heading and its children.
|
// Heading holds the data about a heading and its children.
|
||||||
type Heading struct {
|
type Heading struct {
|
||||||
ID string
|
ID string
|
||||||
Text string
|
Title string
|
||||||
|
|
||||||
Headings Headings
|
Headings Headings
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsZero is true when no ID or Text is set.
|
// IsZero is true when no ID or Text is set.
|
||||||
func (h Heading) IsZero() bool {
|
func (h Heading) IsZero() bool {
|
||||||
return h.ID == "" && h.Text == ""
|
return h.ID == "" && h.Title == ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// Root implements AddAt, which can be used to build the
|
func (h *Heading) walk(fn func(*Heading)) {
|
||||||
// data structure for the ToC.
|
fn(h)
|
||||||
type Root struct {
|
for _, h := range h.Headings {
|
||||||
|
h.walk(fn)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fragments holds the table of contents for a page.
|
||||||
|
type Fragments struct {
|
||||||
|
// Headings holds the top level headings.
|
||||||
Headings Headings
|
Headings Headings
|
||||||
|
|
||||||
|
// Identifiers holds all the identifiers in the ToC as a sorted slice.
|
||||||
|
// Note that collections.SortedStringSlice has both a Contains and Count method
|
||||||
|
// that can be used to identify missing and duplicate IDs.
|
||||||
|
Identifiers collections.SortedStringSlice
|
||||||
|
|
||||||
|
// HeadingsMap holds all the headings in the ToC as a map.
|
||||||
|
// Note that with duplicate IDs, the last one will win.
|
||||||
|
HeadingsMap map[string]*Heading
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddAt adds the heading into the given location.
|
// addAt adds the heading into the given location.
|
||||||
func (toc *Root) AddAt(h Heading, row, level int) {
|
func (toc *Fragments) addAt(h *Heading, row, level int) {
|
||||||
for i := len(toc.Headings); i <= row; i++ {
|
for i := len(toc.Headings); i <= row; i++ {
|
||||||
toc.Headings = append(toc.Headings, Heading{})
|
toc.Headings = append(toc.Headings, &Heading{})
|
||||||
}
|
}
|
||||||
|
|
||||||
if level == 0 {
|
if level == 0 {
|
||||||
|
@ -50,19 +119,22 @@ func (toc *Root) AddAt(h Heading, row, level int) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
heading := &toc.Headings[row]
|
heading := toc.Headings[row]
|
||||||
|
|
||||||
for i := 1; i < level; i++ {
|
for i := 1; i < level; i++ {
|
||||||
if len(heading.Headings) == 0 {
|
if len(heading.Headings) == 0 {
|
||||||
heading.Headings = append(heading.Headings, Heading{})
|
heading.Headings = append(heading.Headings, &Heading{})
|
||||||
}
|
}
|
||||||
heading = &heading.Headings[len(heading.Headings)-1]
|
heading = heading.Headings[len(heading.Headings)-1]
|
||||||
}
|
}
|
||||||
heading.Headings = append(heading.Headings, h)
|
heading.Headings = append(heading.Headings, h)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToHTML renders the ToC as HTML.
|
// ToHTML renders the ToC as HTML.
|
||||||
func (toc Root) ToHTML(startLevel, stopLevel int, ordered bool) string {
|
func (toc *Fragments) ToHTML(startLevel, stopLevel int, ordered bool) string {
|
||||||
|
if toc == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
b := &tocBuilder{
|
b := &tocBuilder{
|
||||||
s: strings.Builder{},
|
s: strings.Builder{},
|
||||||
h: toc.Headings,
|
h: toc.Headings,
|
||||||
|
@ -74,6 +146,12 @@ func (toc Root) ToHTML(startLevel, stopLevel int, ordered bool) string {
|
||||||
return b.s.String()
|
return b.s.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (toc Fragments) walk(fn func(*Heading)) {
|
||||||
|
for _, h := range toc.Headings {
|
||||||
|
h.walk(fn)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type tocBuilder struct {
|
type tocBuilder struct {
|
||||||
s strings.Builder
|
s strings.Builder
|
||||||
h Headings
|
h Headings
|
||||||
|
@ -133,11 +211,11 @@ func (b *tocBuilder) writeHeadings(level, indent int, h Headings) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *tocBuilder) writeHeading(level, indent int, h Heading) {
|
func (b *tocBuilder) writeHeading(level, indent int, h *Heading) {
|
||||||
b.indent(indent)
|
b.indent(indent)
|
||||||
b.s.WriteString("<li>")
|
b.s.WriteString("<li>")
|
||||||
if !h.IsZero() {
|
if !h.IsZero() {
|
||||||
b.s.WriteString("<a href=\"#" + h.ID + "\">" + h.Text + "</a>")
|
b.s.WriteString("<a href=\"#" + h.ID + "\">" + h.Title + "</a>")
|
||||||
}
|
}
|
||||||
b.writeHeadings(level, indent, h.Headings)
|
b.writeHeadings(level, indent, h.Headings)
|
||||||
b.s.WriteString("</li>\n")
|
b.s.WriteString("</li>\n")
|
||||||
|
|
|
@ -17,18 +17,33 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
qt "github.com/frankban/quicktest"
|
qt "github.com/frankban/quicktest"
|
||||||
|
"github.com/gohugoio/hugo/common/collections"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var newTestTocBuilder = func() Builder {
|
||||||
|
var b Builder
|
||||||
|
b.AddAt(&Heading{Title: "Heading 1", ID: "h1-1"}, 0, 0)
|
||||||
|
b.AddAt(&Heading{Title: "1-H2-1", ID: "1-h2-1"}, 0, 1)
|
||||||
|
b.AddAt(&Heading{Title: "1-H2-2", ID: "1-h2-2"}, 0, 1)
|
||||||
|
b.AddAt(&Heading{Title: "1-H3-1", ID: "1-h2-2"}, 0, 2)
|
||||||
|
b.AddAt(&Heading{Title: "Heading 2", ID: "h1-2"}, 1, 0)
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
var newTestToc = func() *Fragments {
|
||||||
|
return newTestTocBuilder().Build()
|
||||||
|
}
|
||||||
|
|
||||||
func TestToc(t *testing.T) {
|
func TestToc(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
|
|
||||||
toc := &Root{}
|
toc := &Fragments{}
|
||||||
|
|
||||||
toc.AddAt(Heading{Text: "Heading 1", ID: "h1-1"}, 0, 0)
|
toc.addAt(&Heading{Title: "Heading 1", ID: "h1-1"}, 0, 0)
|
||||||
toc.AddAt(Heading{Text: "1-H2-1", ID: "1-h2-1"}, 0, 1)
|
toc.addAt(&Heading{Title: "1-H2-1", ID: "1-h2-1"}, 0, 1)
|
||||||
toc.AddAt(Heading{Text: "1-H2-2", ID: "1-h2-2"}, 0, 1)
|
toc.addAt(&Heading{Title: "1-H2-2", ID: "1-h2-2"}, 0, 1)
|
||||||
toc.AddAt(Heading{Text: "1-H3-1", ID: "1-h2-2"}, 0, 2)
|
toc.addAt(&Heading{Title: "1-H3-1", ID: "1-h2-2"}, 0, 2)
|
||||||
toc.AddAt(Heading{Text: "Heading 2", ID: "h1-2"}, 1, 0)
|
toc.addAt(&Heading{Title: "Heading 2", ID: "h1-2"}, 1, 0)
|
||||||
|
|
||||||
got := toc.ToHTML(1, -1, false)
|
got := toc.ToHTML(1, -1, false)
|
||||||
c.Assert(got, qt.Equals, `<nav id="TableOfContents">
|
c.Assert(got, qt.Equals, `<nav id="TableOfContents">
|
||||||
|
@ -97,11 +112,11 @@ func TestToc(t *testing.T) {
|
||||||
func TestTocMissingParent(t *testing.T) {
|
func TestTocMissingParent(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
|
|
||||||
toc := &Root{}
|
toc := &Fragments{}
|
||||||
|
|
||||||
toc.AddAt(Heading{Text: "H2", ID: "h2"}, 0, 1)
|
toc.addAt(&Heading{Title: "H2", ID: "h2"}, 0, 1)
|
||||||
toc.AddAt(Heading{Text: "H3", ID: "h3"}, 1, 2)
|
toc.addAt(&Heading{Title: "H3", ID: "h3"}, 1, 2)
|
||||||
toc.AddAt(Heading{Text: "H3", ID: "h3"}, 1, 2)
|
toc.addAt(&Heading{Title: "H3", ID: "h3"}, 1, 2)
|
||||||
|
|
||||||
got := toc.ToHTML(1, -1, false)
|
got := toc.ToHTML(1, -1, false)
|
||||||
c.Assert(got, qt.Equals, `<nav id="TableOfContents">
|
c.Assert(got, qt.Equals, `<nav id="TableOfContents">
|
||||||
|
@ -153,3 +168,53 @@ func TestTocMissingParent(t *testing.T) {
|
||||||
</ol>
|
</ol>
|
||||||
</nav>`, qt.Commentf(got))
|
</nav>`, qt.Commentf(got))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTocMisc(t *testing.T) {
|
||||||
|
c := qt.New(t)
|
||||||
|
|
||||||
|
c.Run("Identifiers", func(c *qt.C) {
|
||||||
|
toc := newTestToc()
|
||||||
|
c.Assert(toc.Identifiers, qt.DeepEquals, collections.SortedStringSlice{"1-h2-1", "1-h2-2", "1-h2-2", "h1-1", "h1-2"})
|
||||||
|
})
|
||||||
|
|
||||||
|
c.Run("HeadingsMap", func(c *qt.C) {
|
||||||
|
toc := newTestToc()
|
||||||
|
m := toc.HeadingsMap
|
||||||
|
c.Assert(m["h1-1"].Title, qt.Equals, "Heading 1")
|
||||||
|
c.Assert(m["doesnot exist"], qt.IsNil)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkToc(b *testing.B) {
|
||||||
|
|
||||||
|
newTocs := func(n int) []*Fragments {
|
||||||
|
var tocs []*Fragments
|
||||||
|
for i := 0; i < n; i++ {
|
||||||
|
tocs = append(tocs, newTestToc())
|
||||||
|
}
|
||||||
|
return tocs
|
||||||
|
}
|
||||||
|
|
||||||
|
b.Run("Build", func(b *testing.B) {
|
||||||
|
var builders []Builder
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
builders = append(builders, newTestTocBuilder())
|
||||||
|
}
|
||||||
|
b.ResetTimer()
|
||||||
|
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
b := builders[i]
|
||||||
|
b.Build()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
b.Run("ToHTML", func(b *testing.B) {
|
||||||
|
tocs := newTocs(b.N)
|
||||||
|
b.ResetTimer()
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
toc := tocs[i]
|
||||||
|
toc.ToHTML(1, -1, false)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
}
|
||||||
|
|
|
@ -21,6 +21,123 @@ import (
|
||||||
"github.com/gohugoio/hugo/hugolib"
|
"github.com/gohugoio/hugo/hugolib"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func TestRelatedFragments(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
|
||||||
|
files := `
|
||||||
|
-- hugo.toml --
|
||||||
|
baseURL = "http://example.com/"
|
||||||
|
disableKinds = ["taxonomy", "term", "RSS", "sitemap", "robotsTXT"]
|
||||||
|
[related]
|
||||||
|
includeNewer = false
|
||||||
|
threshold = 80
|
||||||
|
toLower = false
|
||||||
|
[[related.indices]]
|
||||||
|
name = 'pagerefs'
|
||||||
|
type = 'fragments'
|
||||||
|
applyFilter = true
|
||||||
|
weight = 90
|
||||||
|
[[related.indices]]
|
||||||
|
name = 'keywords'
|
||||||
|
weight = 80
|
||||||
|
-- content/p1.md --
|
||||||
|
---
|
||||||
|
title: p1
|
||||||
|
pagerefs: ['ref1']
|
||||||
|
---
|
||||||
|
{{< see-also >}}
|
||||||
|
|
||||||
|
## P1 title
|
||||||
|
|
||||||
|
-- content/p2.md --
|
||||||
|
---
|
||||||
|
title: p2
|
||||||
|
---
|
||||||
|
|
||||||
|
## P2 title 1
|
||||||
|
|
||||||
|
## P2 title 2
|
||||||
|
|
||||||
|
## First title {#ref1}
|
||||||
|
{{< see-also "ref1" >}}
|
||||||
|
-- content/p3.md --
|
||||||
|
---
|
||||||
|
title: p3
|
||||||
|
keywords: ['foo']
|
||||||
|
---
|
||||||
|
|
||||||
|
## P3 title 1
|
||||||
|
|
||||||
|
## P3 title 2
|
||||||
|
|
||||||
|
## Common p3, p4, p5
|
||||||
|
-- content/p4.md --
|
||||||
|
---
|
||||||
|
title: p4
|
||||||
|
---
|
||||||
|
|
||||||
|
## Common p3, p4, p5
|
||||||
|
|
||||||
|
## P4 title 1
|
||||||
|
|
||||||
|
-- content/p5.md --
|
||||||
|
---
|
||||||
|
title: p5
|
||||||
|
keywords: ['foo']
|
||||||
|
---
|
||||||
|
|
||||||
|
## P5 title 1
|
||||||
|
|
||||||
|
## Common p3, p4, p5
|
||||||
|
|
||||||
|
-- layouts/shortcodes/see-also.html --
|
||||||
|
{{ $p1 := site.GetPage "p1" }}
|
||||||
|
{{ $p2 := site.GetPage "p2" }}
|
||||||
|
{{ $p3 := site.GetPage "p3" }}
|
||||||
|
P1 Fragments: {{ $p1.Fragments.Identifiers }}
|
||||||
|
P2 Fragments: {{ $p2.Fragments.Identifiers }}
|
||||||
|
Contains ref1: {{ $p2.Fragments.Identifiers.Contains "ref1" }}
|
||||||
|
Count ref1: {{ $p2.Fragments.Identifiers.Count "ref1" }}
|
||||||
|
{{ $opts := dict "document" .Page "fragments" $.Params }}
|
||||||
|
{{ $related1 := site.RegularPages.Related $opts }}
|
||||||
|
{{ $related2 := site.RegularPages.Related $p3 }}
|
||||||
|
Len Related 1: {{ len $related1 }}
|
||||||
|
Len Related 2: {{ len $related2 }}
|
||||||
|
Related 1: {{ template "list-related" $related1 }}
|
||||||
|
Related 2: {{ template "list-related" $related2 }}
|
||||||
|
|
||||||
|
{{ define "list-related" }}{{ range $i, $e := . }} {{ $i }}: {{ .Title }}: {{ with .HeadingsFiltered}}{{ range $i, $e := .}}h{{ $i }}: {{ .Title }}|{{ .ID }}|{{ end }}{{ end }}::END{{ end }}{{ end }}
|
||||||
|
|
||||||
|
-- layouts/_default/single.html --
|
||||||
|
Content: {{ .Content }}
|
||||||
|
|
||||||
|
|
||||||
|
`
|
||||||
|
|
||||||
|
b := hugolib.NewIntegrationTestBuilder(
|
||||||
|
hugolib.IntegrationTestConfig{
|
||||||
|
T: t,
|
||||||
|
TxtarString: files,
|
||||||
|
}).Build()
|
||||||
|
|
||||||
|
expect := `
|
||||||
|
P1 Fragments: [p1-title]
|
||||||
|
P2 Fragments: [p2-title-1 p2-title-2 ref1]
|
||||||
|
Len Related 1: 1
|
||||||
|
Related 2: 2
|
||||||
|
`
|
||||||
|
|
||||||
|
for _, p := range []string{"p1", "p2"} {
|
||||||
|
b.AssertFileContent("public/"+p+"/index.html", expect)
|
||||||
|
}
|
||||||
|
|
||||||
|
b.AssertFileContent("public/p1/index.html",
|
||||||
|
"Related 1: 0: p2: h0: First title|ref1|::END",
|
||||||
|
"Related 2: 0: p5: h0: Common p3, p4, p5|common-p3-p4-p5|::END 1: p4: h0: Common p3, p4, p5|common-p3-p4-p5|::END",
|
||||||
|
)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
func BenchmarkRelatedSite(b *testing.B) {
|
func BenchmarkRelatedSite(b *testing.B) {
|
||||||
files := `
|
files := `
|
||||||
-- config.toml --
|
-- config.toml --
|
||||||
|
@ -33,6 +150,10 @@ disableKinds = ["taxonomy", "term", "RSS", "sitemap", "robotsTXT"]
|
||||||
[[related.indices]]
|
[[related.indices]]
|
||||||
name = 'keywords'
|
name = 'keywords'
|
||||||
weight = 70
|
weight = 70
|
||||||
|
[[related.indices]]
|
||||||
|
name = 'pagerefs'
|
||||||
|
type = 'fragments'
|
||||||
|
weight = 30
|
||||||
-- layouts/_default/single.html --
|
-- layouts/_default/single.html --
|
||||||
{{ range site.RegularPages }}
|
{{ range site.RegularPages }}
|
||||||
{{ $tmp := .WordCount }}
|
{{ $tmp := .WordCount }}
|
||||||
|
|
|
@ -15,20 +15,37 @@
|
||||||
package related
|
package related
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
xmaps "golang.org/x/exp/maps"
|
||||||
|
|
||||||
|
"github.com/gohugoio/hugo/common/collections"
|
||||||
"github.com/gohugoio/hugo/common/maps"
|
"github.com/gohugoio/hugo/common/maps"
|
||||||
|
"github.com/gohugoio/hugo/compare"
|
||||||
|
"github.com/gohugoio/hugo/markup/tableofcontents"
|
||||||
"github.com/spf13/cast"
|
"github.com/spf13/cast"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/types"
|
"github.com/gohugoio/hugo/common/types"
|
||||||
"github.com/mitchellh/mapstructure"
|
"github.com/mitchellh/mapstructure"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
TypeBasic = "basic"
|
||||||
|
TypeFragments = "fragments"
|
||||||
|
)
|
||||||
|
|
||||||
|
var validTypes = map[string]bool{
|
||||||
|
TypeBasic: true,
|
||||||
|
TypeFragments: true,
|
||||||
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
_ Keyword = (*StringKeyword)(nil)
|
_ Keyword = (*StringKeyword)(nil)
|
||||||
zeroDate = time.Time{}
|
zeroDate = time.Time{}
|
||||||
|
@ -37,8 +54,8 @@ var (
|
||||||
DefaultConfig = Config{
|
DefaultConfig = Config{
|
||||||
Threshold: 80,
|
Threshold: 80,
|
||||||
Indices: IndexConfigs{
|
Indices: IndexConfigs{
|
||||||
IndexConfig{Name: "keywords", Weight: 100},
|
IndexConfig{Name: "keywords", Weight: 100, Type: TypeBasic},
|
||||||
IndexConfig{Name: "date", Weight: 10},
|
IndexConfig{Name: "date", Weight: 10, Type: TypeBasic},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -84,6 +101,15 @@ func (c *Config) Add(index IndexConfig) {
|
||||||
c.Indices = append(c.Indices, index)
|
c.Indices = append(c.Indices, index)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *Config) HasType(s string) bool {
|
||||||
|
for _, i := range c.Indices {
|
||||||
|
if i.Type == s {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// IndexConfigs holds a set of index configurations.
|
// IndexConfigs holds a set of index configurations.
|
||||||
type IndexConfigs []IndexConfig
|
type IndexConfigs []IndexConfig
|
||||||
|
|
||||||
|
@ -92,6 +118,13 @@ type IndexConfig struct {
|
||||||
// The index name. This directly maps to a field or Param name.
|
// The index name. This directly maps to a field or Param name.
|
||||||
Name string
|
Name string
|
||||||
|
|
||||||
|
// The index type.
|
||||||
|
Type string
|
||||||
|
|
||||||
|
// Enable to apply a type specific filter to the results.
|
||||||
|
// This is currently only used for the "fragments" type.
|
||||||
|
ApplyFilter bool
|
||||||
|
|
||||||
// Contextual pattern used to convert the Param value into a string.
|
// Contextual pattern used to convert the Param value into a string.
|
||||||
// Currently only used for dates. Can be used to, say, bump posts in the same
|
// Currently only used for dates. Can be used to, say, bump posts in the same
|
||||||
// time frame when searching for related documents.
|
// time frame when searching for related documents.
|
||||||
|
@ -120,6 +153,14 @@ type Document interface {
|
||||||
Name() string
|
Name() string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FragmentProvider is an optional interface that can be implemented by a Document.
|
||||||
|
type FragmentProvider interface {
|
||||||
|
Fragments(context.Context) *tableofcontents.Fragments
|
||||||
|
|
||||||
|
// For internal use.
|
||||||
|
ApplyFilterToHeadings(context.Context, func(*tableofcontents.Heading) bool) Document
|
||||||
|
}
|
||||||
|
|
||||||
// InvertedIndex holds an inverted index, also sometimes named posting list, which
|
// InvertedIndex holds an inverted index, also sometimes named posting list, which
|
||||||
// lists, for every possible search term, the documents that contain that term.
|
// lists, for every possible search term, the documents that contain that term.
|
||||||
type InvertedIndex struct {
|
type InvertedIndex struct {
|
||||||
|
@ -160,7 +201,7 @@ func NewInvertedIndex(cfg Config) *InvertedIndex {
|
||||||
|
|
||||||
// Add documents to the inverted index.
|
// Add documents to the inverted index.
|
||||||
// The value must support == and !=.
|
// The value must support == and !=.
|
||||||
func (idx *InvertedIndex) Add(docs ...Document) error {
|
func (idx *InvertedIndex) Add(ctx context.Context, docs ...Document) error {
|
||||||
var err error
|
var err error
|
||||||
for _, config := range idx.cfg.Indices {
|
for _, config := range idx.cfg.Indices {
|
||||||
if config.Weight == 0 {
|
if config.Weight == 0 {
|
||||||
|
@ -179,6 +220,14 @@ func (idx *InvertedIndex) Add(docs ...Document) error {
|
||||||
for _, keyword := range words {
|
for _, keyword := range words {
|
||||||
setm[keyword] = append(setm[keyword], doc)
|
setm[keyword] = append(setm[keyword], doc)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if config.Type == TypeFragments {
|
||||||
|
if fp, ok := doc.(FragmentProvider); ok {
|
||||||
|
for _, fragment := range fp.Fragments(ctx).Identifiers {
|
||||||
|
setm[FragmentKeyword(fragment)] = append(setm[FragmentKeyword(fragment)], doc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -209,8 +258,22 @@ func (r *rank) addWeight(w int) {
|
||||||
r.Matches++
|
r.Matches++
|
||||||
}
|
}
|
||||||
|
|
||||||
func newRank(doc Document, weight int) *rank {
|
var rankPool = sync.Pool{
|
||||||
return &rank{Doc: doc, Weight: weight, Matches: 1}
|
New: func() interface{} {
|
||||||
|
return &rank{}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func getRank(doc Document, weight int) *rank {
|
||||||
|
r := rankPool.Get().(*rank)
|
||||||
|
r.Doc = doc
|
||||||
|
r.Weight = weight
|
||||||
|
r.Matches = 1
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func putRank(r *rank) {
|
||||||
|
rankPool.Put(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r ranks) Len() int { return len(r) }
|
func (r ranks) Len() int { return len(r) }
|
||||||
|
@ -225,22 +288,41 @@ func (r ranks) Less(i, j int) bool {
|
||||||
return r[i].Weight > r[j].Weight
|
return r[i].Weight > r[j].Weight
|
||||||
}
|
}
|
||||||
|
|
||||||
// SearchDoc finds the documents matching any of the keywords in the given indices
|
// SearchOpts holds the options for a related search.
|
||||||
// against the given document.
|
type SearchOpts struct {
|
||||||
|
// The Document to search for related content for.
|
||||||
|
Document Document
|
||||||
|
|
||||||
|
// The keywords to search for.
|
||||||
|
NamedSlices []types.KeyValues
|
||||||
|
|
||||||
|
// The indices to search in.
|
||||||
|
Indices []string
|
||||||
|
|
||||||
|
// Fragments holds a a list of special keywords that is used
|
||||||
|
// for indices configured as type "fragments".
|
||||||
|
// This will match the fragment identifiers of the documents.
|
||||||
|
Fragments []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Search finds the documents matching any of the keywords in the given indices
|
||||||
|
// against query options in opts.
|
||||||
// The resulting document set will be sorted according to number of matches
|
// The resulting document set will be sorted according to number of matches
|
||||||
// and the index weights, and any matches with a rank below the configured
|
// and the index weights, and any matches with a rank below the configured
|
||||||
// threshold (normalize to 0..100) will be removed.
|
// threshold (normalize to 0..100) will be removed.
|
||||||
// If an index name is provided, only that index will be queried.
|
// If an index name is provided, only that index will be queried.
|
||||||
func (idx *InvertedIndex) SearchDoc(doc Document, indices ...string) ([]Document, error) {
|
func (idx *InvertedIndex) Search(ctx context.Context, opts SearchOpts) ([]Document, error) {
|
||||||
var q []queryElement
|
|
||||||
|
|
||||||
var configs IndexConfigs
|
var (
|
||||||
|
queryElements []queryElement
|
||||||
|
configs IndexConfigs
|
||||||
|
)
|
||||||
|
|
||||||
if len(indices) == 0 {
|
if len(opts.Indices) == 0 {
|
||||||
configs = idx.cfg.Indices
|
configs = idx.cfg.Indices
|
||||||
} else {
|
} else {
|
||||||
configs = make(IndexConfigs, len(indices))
|
configs = make(IndexConfigs, len(opts.Indices))
|
||||||
for i, indexName := range indices {
|
for i, indexName := range opts.Indices {
|
||||||
cfg, found := idx.getIndexCfg(indexName)
|
cfg, found := idx.getIndexCfg(indexName)
|
||||||
if !found {
|
if !found {
|
||||||
return nil, fmt.Errorf("index %q not found", indexName)
|
return nil, fmt.Errorf("index %q not found", indexName)
|
||||||
|
@ -250,40 +332,78 @@ func (idx *InvertedIndex) SearchDoc(doc Document, indices ...string) ([]Document
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, cfg := range configs {
|
for _, cfg := range configs {
|
||||||
keywords, err := doc.RelatedKeywords(cfg)
|
var keywords []Keyword
|
||||||
if err != nil {
|
if opts.Document != nil {
|
||||||
return nil, err
|
k, err := opts.Document.RelatedKeywords(cfg)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
keywords = append(keywords, k...)
|
||||||
|
}
|
||||||
|
if cfg.Type == TypeFragments {
|
||||||
|
for _, fragment := range opts.Fragments {
|
||||||
|
keywords = append(keywords, FragmentKeyword(fragment))
|
||||||
|
}
|
||||||
|
if opts.Document != nil {
|
||||||
|
if fp, ok := opts.Document.(FragmentProvider); ok {
|
||||||
|
for _, fragment := range fp.Fragments(ctx).Identifiers {
|
||||||
|
keywords = append(keywords, FragmentKeyword(fragment))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
queryElements = append(queryElements, newQueryElement(cfg.Name, keywords...))
|
||||||
|
}
|
||||||
|
for _, slice := range opts.NamedSlices {
|
||||||
|
var keywords []Keyword
|
||||||
|
key := slice.KeyString()
|
||||||
|
if key == "" {
|
||||||
|
return nil, fmt.Errorf("index %q not valid", slice.Key)
|
||||||
|
}
|
||||||
|
conf, found := idx.getIndexCfg(key)
|
||||||
|
if !found {
|
||||||
|
return nil, fmt.Errorf("index %q not found", key)
|
||||||
}
|
}
|
||||||
|
|
||||||
q = append(q, newQueryElement(cfg.Name, keywords...))
|
for _, val := range slice.Values {
|
||||||
|
k, err := conf.ToKeywords(val)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
keywords = append(keywords, k...)
|
||||||
|
}
|
||||||
|
queryElements = append(queryElements, newQueryElement(conf.Name, keywords...))
|
||||||
}
|
}
|
||||||
|
|
||||||
return idx.searchDate(doc.PublishDate(), q...)
|
if opts.Document != nil {
|
||||||
|
return idx.searchDate(ctx, opts.Document, opts.Document.PublishDate(), queryElements...)
|
||||||
|
}
|
||||||
|
return idx.search(ctx, queryElements...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cfg IndexConfig) stringToKeyword(s string) Keyword {
|
||||||
|
if cfg.ToLower {
|
||||||
|
s = strings.ToLower(s)
|
||||||
|
}
|
||||||
|
if cfg.Type == TypeFragments {
|
||||||
|
return FragmentKeyword(s)
|
||||||
|
}
|
||||||
|
return StringKeyword(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
// ToKeywords returns a Keyword slice of the given input.
|
// ToKeywords returns a Keyword slice of the given input.
|
||||||
func (cfg IndexConfig) ToKeywords(v any) ([]Keyword, error) {
|
func (cfg IndexConfig) ToKeywords(v any) ([]Keyword, error) {
|
||||||
var (
|
var keywords []Keyword
|
||||||
keywords []Keyword
|
|
||||||
toLower = cfg.ToLower
|
|
||||||
)
|
|
||||||
switch vv := v.(type) {
|
switch vv := v.(type) {
|
||||||
case string:
|
case string:
|
||||||
if toLower {
|
keywords = append(keywords, cfg.stringToKeyword(vv))
|
||||||
vv = strings.ToLower(vv)
|
|
||||||
}
|
|
||||||
keywords = append(keywords, StringKeyword(vv))
|
|
||||||
case []string:
|
case []string:
|
||||||
if toLower {
|
vvv := make([]Keyword, len(vv))
|
||||||
vc := make([]string, len(vv))
|
for i := 0; i < len(vvv); i++ {
|
||||||
copy(vc, vv)
|
vvv[i] = cfg.stringToKeyword(vv[i])
|
||||||
for i := 0; i < len(vc); i++ {
|
|
||||||
vc[i] = strings.ToLower(vc[i])
|
|
||||||
}
|
|
||||||
vv = vc
|
|
||||||
}
|
}
|
||||||
keywords = append(keywords, StringsToKeywords(vv...)...)
|
keywords = append(keywords, vvv...)
|
||||||
case []any:
|
case []any:
|
||||||
return cfg.ToKeywords(cast.ToStringSlice(vv))
|
return cfg.ToKeywords(cast.ToStringSlice(vv))
|
||||||
case time.Time:
|
case time.Time:
|
||||||
|
@ -301,46 +421,20 @@ func (cfg IndexConfig) ToKeywords(v any) ([]Keyword, error) {
|
||||||
return keywords, nil
|
return keywords, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// SearchKeyValues finds the documents matching any of the keywords in the given indices.
|
func (idx *InvertedIndex) search(ctx context.Context, query ...queryElement) ([]Document, error) {
|
||||||
// The resulting document set will be sorted according to number of matches
|
return idx.searchDate(ctx, nil, zeroDate, query...)
|
||||||
// and the index weights, and any matches with a rank below the configured
|
|
||||||
// threshold (normalize to 0..100) will be removed.
|
|
||||||
func (idx *InvertedIndex) SearchKeyValues(args ...types.KeyValues) ([]Document, error) {
|
|
||||||
q := make([]queryElement, len(args))
|
|
||||||
|
|
||||||
for i, arg := range args {
|
|
||||||
var keywords []Keyword
|
|
||||||
key := arg.KeyString()
|
|
||||||
if key == "" {
|
|
||||||
return nil, fmt.Errorf("index %q not valid", arg.Key)
|
|
||||||
}
|
|
||||||
conf, found := idx.getIndexCfg(key)
|
|
||||||
if !found {
|
|
||||||
return nil, fmt.Errorf("index %q not found", key)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, val := range arg.Values {
|
|
||||||
k, err := conf.ToKeywords(val)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
keywords = append(keywords, k...)
|
|
||||||
}
|
|
||||||
|
|
||||||
q[i] = newQueryElement(conf.Name, keywords...)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
return idx.search(q...)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (idx *InvertedIndex) search(query ...queryElement) ([]Document, error) {
|
func (idx *InvertedIndex) searchDate(ctx context.Context, self Document, upperDate time.Time, query ...queryElement) ([]Document, error) {
|
||||||
return idx.searchDate(zeroDate, query...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (idx *InvertedIndex) searchDate(upperDate time.Time, query ...queryElement) ([]Document, error) {
|
|
||||||
matchm := make(map[Document]*rank, 200)
|
matchm := make(map[Document]*rank, 200)
|
||||||
|
defer func() {
|
||||||
|
for _, r := range matchm {
|
||||||
|
putRank(r)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
applyDateFilter := !idx.cfg.IncludeNewer && !upperDate.IsZero()
|
applyDateFilter := !idx.cfg.IncludeNewer && !upperDate.IsZero()
|
||||||
|
var fragmentsFilter collections.SortedStringSlice
|
||||||
|
|
||||||
for _, el := range query {
|
for _, el := range query {
|
||||||
setm, found := idx.index[el.Index]
|
setm, found := idx.index[el.Index]
|
||||||
|
@ -356,15 +450,27 @@ func (idx *InvertedIndex) searchDate(upperDate time.Time, query ...queryElement)
|
||||||
for _, kw := range el.Keywords {
|
for _, kw := range el.Keywords {
|
||||||
if docs, found := setm[kw]; found {
|
if docs, found := setm[kw]; found {
|
||||||
for _, doc := range docs {
|
for _, doc := range docs {
|
||||||
|
if compare.Eq(doc, self) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
if applyDateFilter {
|
if applyDateFilter {
|
||||||
// Exclude newer than the limit given
|
// Exclude newer than the limit given
|
||||||
if doc.PublishDate().After(upperDate) {
|
if doc.PublishDate().After(upperDate) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if config.Type == TypeFragments && config.ApplyFilter {
|
||||||
|
if fkw, ok := kw.(FragmentKeyword); ok {
|
||||||
|
fragmentsFilter = append(fragmentsFilter, string(fkw))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
r, found := matchm[doc]
|
r, found := matchm[doc]
|
||||||
if !found {
|
if !found {
|
||||||
matchm[doc] = newRank(doc, config.Weight)
|
r = getRank(doc, config.Weight)
|
||||||
|
matchm[doc] = r
|
||||||
} else {
|
} else {
|
||||||
r.addWeight(config.Weight)
|
r.addWeight(config.Weight)
|
||||||
}
|
}
|
||||||
|
@ -390,11 +496,19 @@ func (idx *InvertedIndex) searchDate(upperDate time.Time, query ...queryElement)
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Stable(matches)
|
sort.Stable(matches)
|
||||||
|
sort.Strings(fragmentsFilter)
|
||||||
|
|
||||||
result := make([]Document, len(matches))
|
result := make([]Document, len(matches))
|
||||||
|
|
||||||
for i, m := range matches {
|
for i, m := range matches {
|
||||||
result[i] = m.Doc
|
result[i] = m.Doc
|
||||||
|
if len(fragmentsFilter) > 0 {
|
||||||
|
if dp, ok := result[i].(FragmentProvider); ok {
|
||||||
|
result[i] = dp.ApplyFilterToHeadings(ctx, func(h *tableofcontents.Heading) bool {
|
||||||
|
return fragmentsFilter.Contains(h.ID)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return result, nil
|
return result, nil
|
||||||
|
@ -433,6 +547,14 @@ func DecodeConfig(m maps.Params) (Config, error) {
|
||||||
c.Indices[i].ToLower = true
|
c.Indices[i].ToLower = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
for i := range c.Indices {
|
||||||
|
if c.Indices[i].Type == "" {
|
||||||
|
c.Indices[i].Type = TypeBasic
|
||||||
|
}
|
||||||
|
if !validTypes[c.Indices[i].Type] {
|
||||||
|
return c, fmt.Errorf("invalid index type %q. Must be one of %v", c.Indices[i].Type, xmaps.Keys(validTypes))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return c, nil
|
return c, nil
|
||||||
}
|
}
|
||||||
|
@ -444,17 +566,24 @@ func (s StringKeyword) String() string {
|
||||||
return string(s)
|
return string(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FragmentKeyword represents a document fragment.
|
||||||
|
type FragmentKeyword string
|
||||||
|
|
||||||
|
func (f FragmentKeyword) String() string {
|
||||||
|
return string(f)
|
||||||
|
}
|
||||||
|
|
||||||
// Keyword is the interface a keyword in the search index must implement.
|
// Keyword is the interface a keyword in the search index must implement.
|
||||||
type Keyword interface {
|
type Keyword interface {
|
||||||
String() string
|
String() string
|
||||||
}
|
}
|
||||||
|
|
||||||
// StringsToKeywords converts the given slice of strings to a slice of Keyword.
|
// StringsToKeywords converts the given slice of strings to a slice of Keyword.
|
||||||
func StringsToKeywords(s ...string) []Keyword {
|
func (cfg IndexConfig) StringsToKeywords(s ...string) []Keyword {
|
||||||
kw := make([]Keyword, len(s))
|
kw := make([]Keyword, len(s))
|
||||||
|
|
||||||
for i := 0; i < len(s); i++ {
|
for i := 0; i < len(s); i++ {
|
||||||
kw[i] = StringKeyword(s[i])
|
kw[i] = cfg.stringToKeyword(s[i])
|
||||||
}
|
}
|
||||||
|
|
||||||
return kw
|
return kw
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package related
|
package related
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
"testing"
|
"testing"
|
||||||
|
@ -105,7 +106,7 @@ func TestSearch(t *testing.T) {
|
||||||
newTestDoc("tags", "g", "h").addKeywords("keywords", "a", "b"),
|
newTestDoc("tags", "g", "h").addKeywords("keywords", "a", "b"),
|
||||||
}
|
}
|
||||||
|
|
||||||
idx.Add(docs...)
|
idx.Add(context.Background(), docs...)
|
||||||
|
|
||||||
t.Run("count", func(t *testing.T) {
|
t.Run("count", func(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
|
@ -122,7 +123,8 @@ func TestSearch(t *testing.T) {
|
||||||
|
|
||||||
t.Run("search-tags", func(t *testing.T) {
|
t.Run("search-tags", func(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
m, err := idx.search(newQueryElement("tags", StringsToKeywords("a", "b", "d", "z")...))
|
var cfg IndexConfig
|
||||||
|
m, err := idx.search(context.Background(), newQueryElement("tags", cfg.StringsToKeywords("a", "b", "d", "z")...))
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(len(m), qt.Equals, 2)
|
c.Assert(len(m), qt.Equals, 2)
|
||||||
c.Assert(m[0], qt.Equals, docs[0])
|
c.Assert(m[0], qt.Equals, docs[0])
|
||||||
|
@ -131,9 +133,10 @@ func TestSearch(t *testing.T) {
|
||||||
|
|
||||||
t.Run("search-tags-and-keywords", func(t *testing.T) {
|
t.Run("search-tags-and-keywords", func(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
m, err := idx.search(
|
var cfg IndexConfig
|
||||||
newQueryElement("tags", StringsToKeywords("a", "b", "z")...),
|
m, err := idx.search(context.Background(),
|
||||||
newQueryElement("keywords", StringsToKeywords("a", "b")...))
|
newQueryElement("tags", cfg.StringsToKeywords("a", "b", "z")...),
|
||||||
|
newQueryElement("keywords", cfg.StringsToKeywords("a", "b")...))
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(len(m), qt.Equals, 3)
|
c.Assert(len(m), qt.Equals, 3)
|
||||||
c.Assert(m[0], qt.Equals, docs[3])
|
c.Assert(m[0], qt.Equals, docs[3])
|
||||||
|
@ -144,7 +147,7 @@ func TestSearch(t *testing.T) {
|
||||||
t.Run("searchdoc-all", func(t *testing.T) {
|
t.Run("searchdoc-all", func(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
doc := newTestDoc("tags", "a").addKeywords("keywords", "a")
|
doc := newTestDoc("tags", "a").addKeywords("keywords", "a")
|
||||||
m, err := idx.SearchDoc(doc)
|
m, err := idx.Search(context.Background(), SearchOpts{Document: doc})
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(len(m), qt.Equals, 2)
|
c.Assert(len(m), qt.Equals, 2)
|
||||||
c.Assert(m[0], qt.Equals, docs[3])
|
c.Assert(m[0], qt.Equals, docs[3])
|
||||||
|
@ -154,7 +157,7 @@ func TestSearch(t *testing.T) {
|
||||||
t.Run("searchdoc-tags", func(t *testing.T) {
|
t.Run("searchdoc-tags", func(t *testing.T) {
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
doc := newTestDoc("tags", "a", "b", "d", "z").addKeywords("keywords", "a", "b")
|
doc := newTestDoc("tags", "a", "b", "d", "z").addKeywords("keywords", "a", "b")
|
||||||
m, err := idx.SearchDoc(doc, "tags")
|
m, err := idx.Search(context.Background(), SearchOpts{Document: doc, Indices: []string{"tags"}})
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(len(m), qt.Equals, 2)
|
c.Assert(len(m), qt.Equals, 2)
|
||||||
c.Assert(m[0], qt.Equals, docs[0])
|
c.Assert(m[0], qt.Equals, docs[0])
|
||||||
|
@ -166,9 +169,9 @@ func TestSearch(t *testing.T) {
|
||||||
doc := newTestDoc("tags", "a", "b", "d", "z").addKeywords("keywords", "a", "b")
|
doc := newTestDoc("tags", "a", "b", "d", "z").addKeywords("keywords", "a", "b")
|
||||||
// This will get a date newer than the others.
|
// This will get a date newer than the others.
|
||||||
newDoc := newTestDoc("keywords", "a", "b")
|
newDoc := newTestDoc("keywords", "a", "b")
|
||||||
idx.Add(newDoc)
|
idx.Add(context.Background(), newDoc)
|
||||||
|
|
||||||
m, err := idx.SearchDoc(doc, "keywords")
|
m, err := idx.Search(context.Background(), SearchOpts{Document: doc, Indices: []string{"keywords"}})
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(len(m), qt.Equals, 2)
|
c.Assert(len(m), qt.Equals, 2)
|
||||||
c.Assert(m[0], qt.Equals, docs[3])
|
c.Assert(m[0], qt.Equals, docs[3])
|
||||||
|
@ -186,10 +189,10 @@ func TestSearch(t *testing.T) {
|
||||||
for i := 0; i < 10; i++ {
|
for i := 0; i < 10; i++ {
|
||||||
docc := *doc
|
docc := *doc
|
||||||
docc.name = fmt.Sprintf("doc%d", i)
|
docc.name = fmt.Sprintf("doc%d", i)
|
||||||
idx.Add(&docc)
|
idx.Add(context.Background(), &docc)
|
||||||
}
|
}
|
||||||
|
|
||||||
m, err := idx.SearchDoc(doc, "keywords")
|
m, err := idx.Search(context.Background(), SearchOpts{Document: doc, Indices: []string{"keywords"}})
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(len(m), qt.Equals, 10)
|
c.Assert(len(m), qt.Equals, 10)
|
||||||
for i := 0; i < 10; i++ {
|
for i := 0; i < 10; i++ {
|
||||||
|
@ -265,7 +268,7 @@ func BenchmarkRelatedNewIndex(b *testing.B) {
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
idx := NewInvertedIndex(cfg)
|
idx := NewInvertedIndex(cfg)
|
||||||
for _, doc := range pages {
|
for _, doc := range pages {
|
||||||
idx.Add(doc)
|
idx.Add(context.Background(), doc)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -277,14 +280,15 @@ func BenchmarkRelatedNewIndex(b *testing.B) {
|
||||||
for i := 0; i < len(pages); i++ {
|
for i := 0; i < len(pages); i++ {
|
||||||
docs[i] = pages[i]
|
docs[i] = pages[i]
|
||||||
}
|
}
|
||||||
idx.Add(docs...)
|
idx.Add(context.Background(), docs...)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkRelatedMatchesIn(b *testing.B) {
|
func BenchmarkRelatedMatchesIn(b *testing.B) {
|
||||||
q1 := newQueryElement("tags", StringsToKeywords("keyword2", "keyword5", "keyword32", "asdf")...)
|
var icfg IndexConfig
|
||||||
q2 := newQueryElement("keywords", StringsToKeywords("keyword3", "keyword4")...)
|
q1 := newQueryElement("tags", icfg.StringsToKeywords("keyword2", "keyword5", "keyword32", "asdf")...)
|
||||||
|
q2 := newQueryElement("keywords", icfg.StringsToKeywords("keyword3", "keyword4")...)
|
||||||
|
|
||||||
docs := make([]*testDoc, 1000)
|
docs := make([]*testDoc, 1000)
|
||||||
numkeywords := 20
|
numkeywords := 20
|
||||||
|
@ -315,15 +319,16 @@ func BenchmarkRelatedMatchesIn(b *testing.B) {
|
||||||
index = "keywords"
|
index = "keywords"
|
||||||
}
|
}
|
||||||
|
|
||||||
idx.Add(newTestDoc(index, allKeywords[start:end]...))
|
idx.Add(context.Background(), newTestDoc(index, allKeywords[start:end]...))
|
||||||
}
|
}
|
||||||
|
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
|
ctx := context.Background()
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
if i%10 == 0 {
|
if i%10 == 0 {
|
||||||
idx.search(q2)
|
idx.search(ctx, q2)
|
||||||
} else {
|
} else {
|
||||||
idx.search(q1)
|
idx.search(ctx, q1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package resources
|
package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"image"
|
"image"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/hugio"
|
"github.com/gohugoio/hugo/common/hugio"
|
||||||
|
@ -55,7 +56,7 @@ func (e *errorResource) ReadSeekCloser() (hugio.ReadSeekCloser, error) {
|
||||||
panic(e.ResourceError)
|
panic(e.ResourceError)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *errorResource) Content() (any, error) {
|
func (e *errorResource) Content(context.Context) (any, error) {
|
||||||
panic(e.ResourceError)
|
panic(e.ResourceError)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package resources
|
package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"image"
|
"image"
|
||||||
"image/gif"
|
"image/gif"
|
||||||
|
@ -436,7 +437,7 @@ func TestSVGImageContent(t *testing.T) {
|
||||||
svg := fetchResourceForSpec(spec, c, "circle.svg")
|
svg := fetchResourceForSpec(spec, c, "circle.svg")
|
||||||
c.Assert(svg, qt.Not(qt.IsNil))
|
c.Assert(svg, qt.Not(qt.IsNil))
|
||||||
|
|
||||||
content, err := svg.Content()
|
content, err := svg.Content(context.Background())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(content, hqt.IsSameType, "")
|
c.Assert(content, hqt.IsSameType, "")
|
||||||
c.Assert(content.(string), qt.Contains, `<svg height="100" width="100">`)
|
c.Assert(content.(string), qt.Contains, `<svg height="100" width="100">`)
|
||||||
|
|
|
@ -16,10 +16,12 @@
|
||||||
package page
|
package page
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"html/template"
|
"html/template"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/identity"
|
"github.com/gohugoio/hugo/identity"
|
||||||
"github.com/gohugoio/hugo/markup/converter"
|
"github.com/gohugoio/hugo/markup/converter"
|
||||||
|
"github.com/gohugoio/hugo/markup/tableofcontents"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/config"
|
"github.com/gohugoio/hugo/config"
|
||||||
"github.com/gohugoio/hugo/tpl"
|
"github.com/gohugoio/hugo/tpl"
|
||||||
|
@ -76,40 +78,40 @@ type ChildCareProvider interface {
|
||||||
|
|
||||||
// ContentProvider provides the content related values for a Page.
|
// ContentProvider provides the content related values for a Page.
|
||||||
type ContentProvider interface {
|
type ContentProvider interface {
|
||||||
Content() (any, error)
|
Content(context.Context) (any, error)
|
||||||
|
|
||||||
// Plain returns the Page Content stripped of HTML markup.
|
// Plain returns the Page Content stripped of HTML markup.
|
||||||
Plain() string
|
Plain(context.Context) string
|
||||||
|
|
||||||
// PlainWords returns a string slice from splitting Plain using https://pkg.go.dev/strings#Fields.
|
// PlainWords returns a string slice from splitting Plain using https://pkg.go.dev/strings#Fields.
|
||||||
PlainWords() []string
|
PlainWords(context.Context) []string
|
||||||
|
|
||||||
// Summary returns a generated summary of the content.
|
// Summary returns a generated summary of the content.
|
||||||
// The breakpoint can be set manually by inserting a summary separator in the source file.
|
// The breakpoint can be set manually by inserting a summary separator in the source file.
|
||||||
Summary() template.HTML
|
Summary(context.Context) template.HTML
|
||||||
|
|
||||||
// Truncated returns whether the Summary is truncated or not.
|
// Truncated returns whether the Summary is truncated or not.
|
||||||
Truncated() bool
|
Truncated(context.Context) bool
|
||||||
|
|
||||||
// FuzzyWordCount returns the approximate number of words in the content.
|
// FuzzyWordCount returns the approximate number of words in the content.
|
||||||
FuzzyWordCount() int
|
FuzzyWordCount(context.Context) int
|
||||||
|
|
||||||
// WordCount returns the number of words in the content.
|
// WordCount returns the number of words in the content.
|
||||||
WordCount() int
|
WordCount(context.Context) int
|
||||||
|
|
||||||
// ReadingTime returns the reading time based on the length of plain text.
|
// ReadingTime returns the reading time based on the length of plain text.
|
||||||
ReadingTime() int
|
ReadingTime(context.Context) int
|
||||||
|
|
||||||
// Len returns the length of the content.
|
// Len returns the length of the content.
|
||||||
// This is for internal use only.
|
// This is for internal use only.
|
||||||
Len() int
|
Len(context.Context) int
|
||||||
}
|
}
|
||||||
|
|
||||||
// ContentRenderer provides the content rendering methods for some content.
|
// ContentRenderer provides the content rendering methods for some content.
|
||||||
type ContentRenderer interface {
|
type ContentRenderer interface {
|
||||||
// RenderContent renders the given content.
|
// RenderContent renders the given content.
|
||||||
// For internal use only.
|
// For internal use only.
|
||||||
RenderContent(content []byte, renderTOC bool) (converter.Result, error)
|
RenderContent(ctx context.Context, content []byte, renderTOC bool) (converter.Result, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// FileProvider provides the source file.
|
// FileProvider provides the source file.
|
||||||
|
@ -167,6 +169,11 @@ type Page interface {
|
||||||
PageWithoutContent
|
PageWithoutContent
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type PageFragment interface {
|
||||||
|
resource.ResourceLinksProvider
|
||||||
|
resource.ResourceMetaProvider
|
||||||
|
}
|
||||||
|
|
||||||
// PageMetaProvider provides page metadata, typically provided via front matter.
|
// PageMetaProvider provides page metadata, typically provided via front matter.
|
||||||
type PageMetaProvider interface {
|
type PageMetaProvider interface {
|
||||||
// The 4 page dates
|
// The 4 page dates
|
||||||
|
@ -252,7 +259,7 @@ type PageMetaProvider interface {
|
||||||
// PageRenderProvider provides a way for a Page to render content.
|
// PageRenderProvider provides a way for a Page to render content.
|
||||||
type PageRenderProvider interface {
|
type PageRenderProvider interface {
|
||||||
// Render renders the given layout with this Page as context.
|
// Render renders the given layout with this Page as context.
|
||||||
Render(layout ...string) (template.HTML, error)
|
Render(ctx context.Context, layout ...string) (template.HTML, error)
|
||||||
// RenderString renders the first value in args with tPaginatorhe content renderer defined
|
// RenderString renders the first value in args with tPaginatorhe content renderer defined
|
||||||
// for this Page.
|
// for this Page.
|
||||||
// It takes an optional map as a second argument:
|
// It takes an optional map as a second argument:
|
||||||
|
@ -260,7 +267,7 @@ type PageRenderProvider interface {
|
||||||
// display (“inline”):
|
// display (“inline”):
|
||||||
// - inline or block. If inline (default), surrounding <p></p> on short snippets will be trimmed.
|
// - inline or block. If inline (default), surrounding <p></p> on short snippets will be trimmed.
|
||||||
// markup (defaults to the Page’s markup)
|
// markup (defaults to the Page’s markup)
|
||||||
RenderString(args ...any) (template.HTML, error)
|
RenderString(ctx context.Context, args ...any) (template.HTML, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// PageWithoutContent is the Page without any of the content methods.
|
// PageWithoutContent is the Page without any of the content methods.
|
||||||
|
@ -323,6 +330,14 @@ type PageWithoutContent interface {
|
||||||
// Used in change/dependency tracking.
|
// Used in change/dependency tracking.
|
||||||
identity.Provider
|
identity.Provider
|
||||||
|
|
||||||
|
// Fragments returns the fragments for this page.
|
||||||
|
Fragments(context.Context) *tableofcontents.Fragments
|
||||||
|
|
||||||
|
// Headings returns the headings for this page when a filter is set.
|
||||||
|
// This is currently only triggered with the Related content feature
|
||||||
|
// and the "fragments" type of index.
|
||||||
|
HeadingsFiltered(context.Context) tableofcontents.Headings
|
||||||
|
|
||||||
DeprecatedWarningPageMethods
|
DeprecatedWarningPageMethods
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -387,7 +402,7 @@ type SitesProvider interface {
|
||||||
// TableOfContentsProvider provides the table of contents for a Page.
|
// TableOfContentsProvider provides the table of contents for a Page.
|
||||||
type TableOfContentsProvider interface {
|
type TableOfContentsProvider interface {
|
||||||
// TableOfContents returns the table of contents for the page rendered as HTML.
|
// TableOfContents returns the table of contents for the page rendered as HTML.
|
||||||
TableOfContents() template.HTML
|
TableOfContents(context.Context) template.HTML
|
||||||
}
|
}
|
||||||
|
|
||||||
// TranslationsProvider provides access to any translations.
|
// TranslationsProvider provides access to any translations.
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package page
|
package page
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"html/template"
|
"html/template"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/lazy"
|
"github.com/gohugoio/hugo/lazy"
|
||||||
|
@ -57,7 +58,7 @@ func NewLazyContentProvider(f func() (OutputFormatContentProvider, error)) *Lazy
|
||||||
init: lazy.New(),
|
init: lazy.New(),
|
||||||
cp: NopCPageContentRenderer,
|
cp: NopCPageContentRenderer,
|
||||||
}
|
}
|
||||||
lcp.init.Add(func() (any, error) {
|
lcp.init.Add(func(context.Context) (any, error) {
|
||||||
cp, err := f()
|
cp, err := f()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -72,67 +73,67 @@ func (lcp *LazyContentProvider) Reset() {
|
||||||
lcp.init.Reset()
|
lcp.init.Reset()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lcp *LazyContentProvider) Content() (any, error) {
|
func (lcp *LazyContentProvider) Content(ctx context.Context) (any, error) {
|
||||||
lcp.init.Do()
|
lcp.init.Do(ctx)
|
||||||
return lcp.cp.Content()
|
return lcp.cp.Content(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lcp *LazyContentProvider) Plain() string {
|
func (lcp *LazyContentProvider) Plain(ctx context.Context) string {
|
||||||
lcp.init.Do()
|
lcp.init.Do(ctx)
|
||||||
return lcp.cp.Plain()
|
return lcp.cp.Plain(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lcp *LazyContentProvider) PlainWords() []string {
|
func (lcp *LazyContentProvider) PlainWords(ctx context.Context) []string {
|
||||||
lcp.init.Do()
|
lcp.init.Do(ctx)
|
||||||
return lcp.cp.PlainWords()
|
return lcp.cp.PlainWords(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lcp *LazyContentProvider) Summary() template.HTML {
|
func (lcp *LazyContentProvider) Summary(ctx context.Context) template.HTML {
|
||||||
lcp.init.Do()
|
lcp.init.Do(ctx)
|
||||||
return lcp.cp.Summary()
|
return lcp.cp.Summary(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lcp *LazyContentProvider) Truncated() bool {
|
func (lcp *LazyContentProvider) Truncated(ctx context.Context) bool {
|
||||||
lcp.init.Do()
|
lcp.init.Do(ctx)
|
||||||
return lcp.cp.Truncated()
|
return lcp.cp.Truncated(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lcp *LazyContentProvider) FuzzyWordCount() int {
|
func (lcp *LazyContentProvider) FuzzyWordCount(ctx context.Context) int {
|
||||||
lcp.init.Do()
|
lcp.init.Do(ctx)
|
||||||
return lcp.cp.FuzzyWordCount()
|
return lcp.cp.FuzzyWordCount(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lcp *LazyContentProvider) WordCount() int {
|
func (lcp *LazyContentProvider) WordCount(ctx context.Context) int {
|
||||||
lcp.init.Do()
|
lcp.init.Do(ctx)
|
||||||
return lcp.cp.WordCount()
|
return lcp.cp.WordCount(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lcp *LazyContentProvider) ReadingTime() int {
|
func (lcp *LazyContentProvider) ReadingTime(ctx context.Context) int {
|
||||||
lcp.init.Do()
|
lcp.init.Do(ctx)
|
||||||
return lcp.cp.ReadingTime()
|
return lcp.cp.ReadingTime(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lcp *LazyContentProvider) Len() int {
|
func (lcp *LazyContentProvider) Len(ctx context.Context) int {
|
||||||
lcp.init.Do()
|
lcp.init.Do(ctx)
|
||||||
return lcp.cp.Len()
|
return lcp.cp.Len(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lcp *LazyContentProvider) Render(layout ...string) (template.HTML, error) {
|
func (lcp *LazyContentProvider) Render(ctx context.Context, layout ...string) (template.HTML, error) {
|
||||||
lcp.init.Do()
|
lcp.init.Do(context.TODO())
|
||||||
return lcp.cp.Render(layout...)
|
return lcp.cp.Render(ctx, layout...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lcp *LazyContentProvider) RenderString(args ...any) (template.HTML, error) {
|
func (lcp *LazyContentProvider) RenderString(ctx context.Context, args ...any) (template.HTML, error) {
|
||||||
lcp.init.Do()
|
lcp.init.Do(ctx)
|
||||||
return lcp.cp.RenderString(args...)
|
return lcp.cp.RenderString(ctx, args...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lcp *LazyContentProvider) TableOfContents() template.HTML {
|
func (lcp *LazyContentProvider) TableOfContents(ctx context.Context) template.HTML {
|
||||||
lcp.init.Do()
|
lcp.init.Do(ctx)
|
||||||
return lcp.cp.TableOfContents()
|
return lcp.cp.TableOfContents(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (lcp *LazyContentProvider) RenderContent(content []byte, renderTOC bool) (converter.Result, error) {
|
func (lcp *LazyContentProvider) RenderContent(ctx context.Context, content []byte, renderTOC bool) (converter.Result, error) {
|
||||||
lcp.init.Do()
|
lcp.init.Do(ctx)
|
||||||
return lcp.cp.RenderContent(content, renderTOC)
|
return lcp.cp.RenderContent(ctx, content, renderTOC)
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,24 +25,10 @@ import (
|
||||||
"github.com/gohugoio/hugo/media"
|
"github.com/gohugoio/hugo/media"
|
||||||
"github.com/gohugoio/hugo/navigation"
|
"github.com/gohugoio/hugo/navigation"
|
||||||
"github.com/gohugoio/hugo/source"
|
"github.com/gohugoio/hugo/source"
|
||||||
"html/template"
|
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
func MarshalPageToJSON(p Page) ([]byte, error) {
|
func MarshalPageToJSON(p Page) ([]byte, error) {
|
||||||
content, err := p.Content()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
plain := p.Plain()
|
|
||||||
plainWords := p.PlainWords()
|
|
||||||
summary := p.Summary()
|
|
||||||
truncated := p.Truncated()
|
|
||||||
fuzzyWordCount := p.FuzzyWordCount()
|
|
||||||
wordCount := p.WordCount()
|
|
||||||
readingTime := p.ReadingTime()
|
|
||||||
length := p.Len()
|
|
||||||
tableOfContents := p.TableOfContents()
|
|
||||||
rawContent := p.RawContent()
|
rawContent := p.RawContent()
|
||||||
resourceType := p.ResourceType()
|
resourceType := p.ResourceType()
|
||||||
mediaType := p.MediaType()
|
mediaType := p.MediaType()
|
||||||
|
@ -93,16 +79,6 @@ func MarshalPageToJSON(p Page) ([]byte, error) {
|
||||||
getIdentity := p.GetIdentity()
|
getIdentity := p.GetIdentity()
|
||||||
|
|
||||||
s := struct {
|
s := struct {
|
||||||
Content interface{}
|
|
||||||
Plain string
|
|
||||||
PlainWords []string
|
|
||||||
Summary template.HTML
|
|
||||||
Truncated bool
|
|
||||||
FuzzyWordCount int
|
|
||||||
WordCount int
|
|
||||||
ReadingTime int
|
|
||||||
Len int
|
|
||||||
TableOfContents template.HTML
|
|
||||||
RawContent string
|
RawContent string
|
||||||
ResourceType string
|
ResourceType string
|
||||||
MediaType media.Type
|
MediaType media.Type
|
||||||
|
@ -152,16 +128,6 @@ func MarshalPageToJSON(p Page) ([]byte, error) {
|
||||||
Store *maps.Scratch
|
Store *maps.Scratch
|
||||||
GetIdentity identity.Identity
|
GetIdentity identity.Identity
|
||||||
}{
|
}{
|
||||||
Content: content,
|
|
||||||
Plain: plain,
|
|
||||||
PlainWords: plainWords,
|
|
||||||
Summary: summary,
|
|
||||||
Truncated: truncated,
|
|
||||||
FuzzyWordCount: fuzzyWordCount,
|
|
||||||
WordCount: wordCount,
|
|
||||||
ReadingTime: readingTime,
|
|
||||||
Len: length,
|
|
||||||
TableOfContents: tableOfContents,
|
|
||||||
RawContent: rawContent,
|
RawContent: rawContent,
|
||||||
ResourceType: resourceType,
|
ResourceType: resourceType,
|
||||||
MediaType: mediaType,
|
MediaType: mediaType,
|
||||||
|
|
|
@ -17,11 +17,13 @@ package page
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"context"
|
||||||
"html/template"
|
"html/template"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/identity"
|
"github.com/gohugoio/hugo/identity"
|
||||||
"github.com/gohugoio/hugo/markup/converter"
|
"github.com/gohugoio/hugo/markup/converter"
|
||||||
|
"github.com/gohugoio/hugo/markup/tableofcontents"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/hugofs/files"
|
"github.com/gohugoio/hugo/hugofs/files"
|
||||||
"github.com/gohugoio/hugo/tpl"
|
"github.com/gohugoio/hugo/tpl"
|
||||||
|
@ -105,7 +107,7 @@ func (p *nopPage) BundleType() files.ContentClass {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *nopPage) Content() (any, error) {
|
func (p *nopPage) Content(context.Context) (any, error) {
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -179,7 +181,7 @@ func (p *nopPage) FirstSection() Page {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *nopPage) FuzzyWordCount() int {
|
func (p *nopPage) FuzzyWordCount(context.Context) int {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -279,7 +281,7 @@ func (p *nopPage) Lastmod() (t time.Time) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *nopPage) Len() int {
|
func (p *nopPage) Len(context.Context) int {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -363,11 +365,11 @@ func (p *nopPage) Permalink() string {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *nopPage) Plain() string {
|
func (p *nopPage) Plain(context.Context) string {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *nopPage) PlainWords() []string {
|
func (p *nopPage) PlainWords(context.Context) []string {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -399,7 +401,7 @@ func (p *nopPage) RawContent() string {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *nopPage) ReadingTime() int {
|
func (p *nopPage) ReadingTime(context.Context) int {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -415,11 +417,11 @@ func (p *nopPage) RelRef(argsm map[string]any) (string, error) {
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *nopPage) Render(layout ...string) (template.HTML, error) {
|
func (p *nopPage) Render(ctx context.Context, layout ...string) (template.HTML, error) {
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *nopPage) RenderString(args ...any) (template.HTML, error) {
|
func (p *nopPage) RenderString(ctx context.Context, args ...any) (template.HTML, error) {
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -475,11 +477,11 @@ func (p *nopPage) String() string {
|
||||||
return "nopPage"
|
return "nopPage"
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *nopPage) Summary() template.HTML {
|
func (p *nopPage) Summary(context.Context) template.HTML {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *nopPage) TableOfContents() template.HTML {
|
func (p *nopPage) TableOfContents(context.Context) template.HTML {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -499,7 +501,7 @@ func (p *nopPage) Translations() Pages {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *nopPage) Truncated() bool {
|
func (p *nopPage) Truncated(context.Context) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -519,7 +521,7 @@ func (p *nopPage) Weight() int {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *nopPage) WordCount() int {
|
func (p *nopPage) WordCount(context.Context) int {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -527,9 +529,16 @@ func (p *nopPage) GetIdentity() identity.Identity {
|
||||||
return identity.NewPathIdentity("content", "foo/bar.md")
|
return identity.NewPathIdentity("content", "foo/bar.md")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *nopPage) Fragments(context.Context) *tableofcontents.Fragments {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
func (p *nopPage) HeadingsFiltered(context.Context) tableofcontents.Headings {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
type nopContentRenderer int
|
type nopContentRenderer int
|
||||||
|
|
||||||
func (r *nopContentRenderer) RenderContent(content []byte, renderTOC bool) (converter.Result, error) {
|
func (r *nopContentRenderer) RenderContent(ctx context.Context, content []byte, renderTOC bool) (converter.Result, error) {
|
||||||
b := &bytes.Buffer{}
|
b := &bytes.Buffer{}
|
||||||
return b, nil
|
return b, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package page
|
package page
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
@ -110,7 +111,7 @@ var (
|
||||||
|
|
||||||
// GroupBy groups by the value in the given field or method name and with the given order.
|
// GroupBy groups by the value in the given field or method name and with the given order.
|
||||||
// Valid values for order is asc, desc, rev and reverse.
|
// Valid values for order is asc, desc, rev and reverse.
|
||||||
func (p Pages) GroupBy(key string, order ...string) (PagesGroup, error) {
|
func (p Pages) GroupBy(ctx context.Context, key string, order ...string) (PagesGroup, error) {
|
||||||
if len(p) < 1 {
|
if len(p) < 1 {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
@ -158,7 +159,12 @@ func (p Pages) GroupBy(key string, order ...string) (PagesGroup, error) {
|
||||||
case reflect.StructField:
|
case reflect.StructField:
|
||||||
fv = ppv.Elem().FieldByName(key)
|
fv = ppv.Elem().FieldByName(key)
|
||||||
case reflect.Method:
|
case reflect.Method:
|
||||||
fv = hreflect.GetMethodByName(ppv, key).Call([]reflect.Value{})[0]
|
var args []reflect.Value
|
||||||
|
fn := hreflect.GetMethodByName(ppv, key)
|
||||||
|
if fn.Type().NumIn() > 0 && fn.Type().In(0).Implements(hreflect.ContextInterface) {
|
||||||
|
args = []reflect.Value{reflect.ValueOf(ctx)}
|
||||||
|
}
|
||||||
|
fv = fn.Call(args)[0]
|
||||||
}
|
}
|
||||||
if !fv.IsValid() {
|
if !fv.IsValid() {
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package page
|
package page
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"reflect"
|
"reflect"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
@ -68,7 +69,7 @@ func TestGroupByWithFieldNameArg(t *testing.T) {
|
||||||
{Key: 3, Pages: Pages{pages[0], pages[1]}},
|
{Key: 3, Pages: Pages{pages[0], pages[1]}},
|
||||||
}
|
}
|
||||||
|
|
||||||
groups, err := pages.GroupBy("Weight")
|
groups, err := pages.GroupBy(context.Background(), "Weight")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Unable to make PagesGroup array: %s", err)
|
t.Fatalf("Unable to make PagesGroup array: %s", err)
|
||||||
}
|
}
|
||||||
|
@ -85,7 +86,7 @@ func TestGroupByWithMethodNameArg(t *testing.T) {
|
||||||
{Key: "section2", Pages: Pages{pages[3], pages[4]}},
|
{Key: "section2", Pages: Pages{pages[3], pages[4]}},
|
||||||
}
|
}
|
||||||
|
|
||||||
groups, err := pages.GroupBy("Type")
|
groups, err := pages.GroupBy(context.Background(), "Type")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Unable to make PagesGroup array: %s", err)
|
t.Fatalf("Unable to make PagesGroup array: %s", err)
|
||||||
}
|
}
|
||||||
|
@ -102,7 +103,7 @@ func TestGroupByWithSectionArg(t *testing.T) {
|
||||||
{Key: "section2", Pages: Pages{pages[3], pages[4]}},
|
{Key: "section2", Pages: Pages{pages[3], pages[4]}},
|
||||||
}
|
}
|
||||||
|
|
||||||
groups, err := pages.GroupBy("Section")
|
groups, err := pages.GroupBy(context.Background(), "Section")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Unable to make PagesGroup array: %s", err)
|
t.Fatalf("Unable to make PagesGroup array: %s", err)
|
||||||
}
|
}
|
||||||
|
@ -120,7 +121,7 @@ func TestGroupByInReverseOrder(t *testing.T) {
|
||||||
{Key: 1, Pages: Pages{pages[3], pages[4]}},
|
{Key: 1, Pages: Pages{pages[3], pages[4]}},
|
||||||
}
|
}
|
||||||
|
|
||||||
groups, err := pages.GroupBy("Weight", "desc")
|
groups, err := pages.GroupBy(context.Background(), "Weight", "desc")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Unable to make PagesGroup array: %s", err)
|
t.Fatalf("Unable to make PagesGroup array: %s", err)
|
||||||
}
|
}
|
||||||
|
@ -132,7 +133,7 @@ func TestGroupByInReverseOrder(t *testing.T) {
|
||||||
func TestGroupByCalledWithEmptyPages(t *testing.T) {
|
func TestGroupByCalledWithEmptyPages(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
var pages Pages
|
var pages Pages
|
||||||
groups, err := pages.GroupBy("Weight")
|
groups, err := pages.GroupBy(context.Background(), "Weight")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Unable to make PagesGroup array: %s", err)
|
t.Fatalf("Unable to make PagesGroup array: %s", err)
|
||||||
}
|
}
|
||||||
|
@ -154,12 +155,12 @@ func TestReverse(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
pages := preparePageGroupTestPages(t)
|
pages := preparePageGroupTestPages(t)
|
||||||
|
|
||||||
groups1, err := pages.GroupBy("Weight", "desc")
|
groups1, err := pages.GroupBy(context.Background(), "Weight", "desc")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Unable to make PagesGroup array: %s", err)
|
t.Fatalf("Unable to make PagesGroup array: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
groups2, err := pages.GroupBy("Weight")
|
groups2, err := pages.GroupBy(context.Background(), "Weight")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Unable to make PagesGroup array: %s", err)
|
t.Fatalf("Unable to make PagesGroup array: %s", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -132,21 +132,6 @@ func (pages Pages) ProbablyEq(other any) bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ps Pages) removeFirstIfFound(p Page) Pages {
|
|
||||||
ii := -1
|
|
||||||
for i, pp := range ps {
|
|
||||||
if p.Eq(pp) {
|
|
||||||
ii = i
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if ii != -1 {
|
|
||||||
ps = append(ps[:ii], ps[ii+1:]...)
|
|
||||||
}
|
|
||||||
return ps
|
|
||||||
}
|
|
||||||
|
|
||||||
// PagesFactory somehow creates some Pages.
|
// PagesFactory somehow creates some Pages.
|
||||||
// We do a lot of lazy Pages initialization in Hugo, so we need a type.
|
// We do a lot of lazy Pages initialization in Hugo, so we need a type.
|
||||||
type PagesFactory func() Pages
|
type PagesFactory func() Pages
|
||||||
|
|
|
@ -14,11 +14,13 @@
|
||||||
package page
|
package page
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/types"
|
"github.com/gohugoio/hugo/common/types"
|
||||||
"github.com/gohugoio/hugo/related"
|
"github.com/gohugoio/hugo/related"
|
||||||
|
"github.com/mitchellh/mapstructure"
|
||||||
"github.com/spf13/cast"
|
"github.com/spf13/cast"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -34,74 +36,90 @@ type PageGenealogist interface {
|
||||||
|
|
||||||
// Template example:
|
// Template example:
|
||||||
// {{ $related := .RegularPages.Related . }}
|
// {{ $related := .RegularPages.Related . }}
|
||||||
Related(doc related.Document) (Pages, error)
|
Related(ctx context.Context, opts any) (Pages, error)
|
||||||
|
|
||||||
// Template example:
|
// Template example:
|
||||||
// {{ $related := .RegularPages.RelatedIndices . "tags" "date" }}
|
// {{ $related := .RegularPages.RelatedIndices . "tags" "date" }}
|
||||||
RelatedIndices(doc related.Document, indices ...any) (Pages, error)
|
// Deprecated: Use Related instead.
|
||||||
|
RelatedIndices(ctx context.Context, doc related.Document, indices ...any) (Pages, error)
|
||||||
|
|
||||||
// Template example:
|
// Template example:
|
||||||
// {{ $related := .RegularPages.RelatedTo ( keyVals "tags" "hugo", "rocks") ( keyVals "date" .Date ) }}
|
// {{ $related := .RegularPages.RelatedTo ( keyVals "tags" "hugo", "rocks") ( keyVals "date" .Date ) }}
|
||||||
RelatedTo(args ...types.KeyValues) (Pages, error)
|
// Deprecated: Use Related instead.
|
||||||
|
RelatedTo(ctx context.Context, args ...types.KeyValues) (Pages, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Related searches all the configured indices with the search keywords from the
|
// Related searches all the configured indices with the search keywords from the
|
||||||
// supplied document.
|
// supplied document.
|
||||||
func (p Pages) Related(doc related.Document) (Pages, error) {
|
func (p Pages) Related(ctx context.Context, optsv any) (Pages, error) {
|
||||||
result, err := p.searchDoc(doc)
|
if len(p) == 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var opts related.SearchOpts
|
||||||
|
switch v := optsv.(type) {
|
||||||
|
case related.Document:
|
||||||
|
opts.Document = v
|
||||||
|
case map[string]any:
|
||||||
|
if err := mapstructure.WeakDecode(v, &opts); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return nil, fmt.Errorf("invalid argument type %T", optsv)
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := p.search(ctx, opts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if page, ok := doc.(Page); ok {
|
|
||||||
return result.removeFirstIfFound(page), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return result, nil
|
return result, nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// RelatedIndices searches the given indices with the search keywords from the
|
// RelatedIndices searches the given indices with the search keywords from the
|
||||||
// supplied document.
|
// supplied document.
|
||||||
func (p Pages) RelatedIndices(doc related.Document, indices ...any) (Pages, error) {
|
// Deprecated: Use Related instead.
|
||||||
|
func (p Pages) RelatedIndices(ctx context.Context, doc related.Document, indices ...any) (Pages, error) {
|
||||||
indicesStr, err := cast.ToStringSliceE(indices)
|
indicesStr, err := cast.ToStringSliceE(indices)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := p.searchDoc(doc, indicesStr...)
|
opts := related.SearchOpts{
|
||||||
if err != nil {
|
Document: doc,
|
||||||
return nil, err
|
Indices: indicesStr,
|
||||||
}
|
}
|
||||||
|
|
||||||
if page, ok := doc.(Page); ok {
|
result, err := p.search(ctx, opts)
|
||||||
return result.removeFirstIfFound(page), nil
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return result, nil
|
return result, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// RelatedTo searches the given indices with the corresponding values.
|
// RelatedTo searches the given indices with the corresponding values.
|
||||||
func (p Pages) RelatedTo(args ...types.KeyValues) (Pages, error) {
|
// Deprecated: Use Related instead.
|
||||||
|
func (p Pages) RelatedTo(ctx context.Context, args ...types.KeyValues) (Pages, error) {
|
||||||
if len(p) == 0 {
|
if len(p) == 0 {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.search(args...)
|
opts := related.SearchOpts{
|
||||||
|
NamedSlices: args,
|
||||||
|
}
|
||||||
|
|
||||||
|
return p.search(ctx, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p Pages) search(args ...types.KeyValues) (Pages, error) {
|
func (p Pages) search(ctx context.Context, opts related.SearchOpts) (Pages, error) {
|
||||||
return p.withInvertedIndex(func(idx *related.InvertedIndex) ([]related.Document, error) {
|
return p.withInvertedIndex(ctx, func(idx *related.InvertedIndex) ([]related.Document, error) {
|
||||||
return idx.SearchKeyValues(args...)
|
return idx.Search(ctx, opts)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p Pages) searchDoc(doc related.Document, indices ...string) (Pages, error) {
|
func (p Pages) withInvertedIndex(ctx context.Context, search func(idx *related.InvertedIndex) ([]related.Document, error)) (Pages, error) {
|
||||||
return p.withInvertedIndex(func(idx *related.InvertedIndex) ([]related.Document, error) {
|
|
||||||
return idx.SearchDoc(doc, indices...)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p Pages) withInvertedIndex(search func(idx *related.InvertedIndex) ([]related.Document, error)) (Pages, error) {
|
|
||||||
if len(p) == 0 {
|
if len(p) == 0 {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
@ -113,7 +131,7 @@ func (p Pages) withInvertedIndex(search func(idx *related.InvertedIndex) ([]rela
|
||||||
|
|
||||||
cache := d.GetRelatedDocsHandler()
|
cache := d.GetRelatedDocsHandler()
|
||||||
|
|
||||||
searchIndex, err := cache.getOrCreateIndex(p)
|
searchIndex, err := cache.getOrCreateIndex(ctx, p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -164,8 +182,7 @@ func (s *RelatedDocsHandler) getIndex(p Pages) *related.InvertedIndex {
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
func (s *RelatedDocsHandler) getOrCreateIndex(ctx context.Context, p Pages) (*related.InvertedIndex, error) {
|
||||||
func (s *RelatedDocsHandler) getOrCreateIndex(p Pages) (*related.InvertedIndex, error) {
|
|
||||||
s.mu.RLock()
|
s.mu.RLock()
|
||||||
cachedIndex := s.getIndex(p)
|
cachedIndex := s.getIndex(p)
|
||||||
if cachedIndex != nil {
|
if cachedIndex != nil {
|
||||||
|
@ -184,7 +201,7 @@ func (s *RelatedDocsHandler) getOrCreateIndex(p Pages) (*related.InvertedIndex,
|
||||||
searchIndex := related.NewInvertedIndex(s.cfg)
|
searchIndex := related.NewInvertedIndex(s.cfg)
|
||||||
|
|
||||||
for _, page := range p {
|
for _, page := range p {
|
||||||
if err := searchIndex.Add(page); err != nil {
|
if err := searchIndex.Add(ctx, page); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package page
|
package page
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -51,26 +52,42 @@ func TestRelated(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
result, err := pages.RelatedTo(types.NewKeyValuesStrings("keywords", "hugo", "rocks"))
|
ctx := context.Background()
|
||||||
|
opts := map[string]any{
|
||||||
|
"namedSlices": types.NewKeyValuesStrings("keywords", "hugo", "rocks"),
|
||||||
|
}
|
||||||
|
result, err := pages.Related(ctx, opts)
|
||||||
|
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(len(result), qt.Equals, 2)
|
c.Assert(len(result), qt.Equals, 2)
|
||||||
c.Assert(result[0].Title(), qt.Equals, "Page 2")
|
c.Assert(result[0].Title(), qt.Equals, "Page 2")
|
||||||
c.Assert(result[1].Title(), qt.Equals, "Page 1")
|
c.Assert(result[1].Title(), qt.Equals, "Page 1")
|
||||||
|
|
||||||
result, err = pages.Related(pages[0])
|
result, err = pages.Related(ctx, pages[0])
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(len(result), qt.Equals, 2)
|
c.Assert(len(result), qt.Equals, 2)
|
||||||
c.Assert(result[0].Title(), qt.Equals, "Page 2")
|
c.Assert(result[0].Title(), qt.Equals, "Page 2")
|
||||||
c.Assert(result[1].Title(), qt.Equals, "Page 3")
|
c.Assert(result[1].Title(), qt.Equals, "Page 3")
|
||||||
|
|
||||||
result, err = pages.RelatedIndices(pages[0], "keywords")
|
opts = map[string]any{
|
||||||
|
"document": pages[0],
|
||||||
|
"indices": []string{"keywords"},
|
||||||
|
}
|
||||||
|
result, err = pages.Related(ctx, opts)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(len(result), qt.Equals, 2)
|
c.Assert(len(result), qt.Equals, 2)
|
||||||
c.Assert(result[0].Title(), qt.Equals, "Page 2")
|
c.Assert(result[0].Title(), qt.Equals, "Page 2")
|
||||||
c.Assert(result[1].Title(), qt.Equals, "Page 3")
|
c.Assert(result[1].Title(), qt.Equals, "Page 3")
|
||||||
|
|
||||||
result, err = pages.RelatedTo(types.NewKeyValuesStrings("keywords", "bep", "rocks"))
|
opts = map[string]any{
|
||||||
|
"namedSlices": []types.KeyValues{
|
||||||
|
{
|
||||||
|
Key: "keywords",
|
||||||
|
Values: []any{"bep", "rocks"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
result, err = pages.Related(context.Background(), opts)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(len(result), qt.Equals, 2)
|
c.Assert(len(result), qt.Equals, 2)
|
||||||
c.Assert(result[0].Title(), qt.Equals, "Page 2")
|
c.Assert(result[0].Title(), qt.Equals, "Page 2")
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package page
|
package page
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"sort"
|
"sort"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/collections"
|
"github.com/gohugoio/hugo/common/collections"
|
||||||
|
@ -299,7 +300,7 @@ func (p Pages) ByLastmod() Pages {
|
||||||
// Adjacent invocations on the same receiver will return a cached result.
|
// Adjacent invocations on the same receiver will return a cached result.
|
||||||
//
|
//
|
||||||
// This may safely be executed in parallel.
|
// This may safely be executed in parallel.
|
||||||
func (p Pages) ByLength() Pages {
|
func (p Pages) ByLength(ctx context.Context) Pages {
|
||||||
const key = "pageSort.ByLength"
|
const key = "pageSort.ByLength"
|
||||||
|
|
||||||
length := func(p1, p2 Page) bool {
|
length := func(p1, p2 Page) bool {
|
||||||
|
@ -314,7 +315,7 @@ func (p Pages) ByLength() Pages {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
return p1l.Len() < p2l.Len()
|
return p1l.Len(ctx) < p2l.Len(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
pages, _ := spc.get(key, pageBy(length).Sort, p)
|
pages, _ := spc.get(key, pageBy(length).Sort, p)
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package page
|
package page
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
@ -104,6 +105,12 @@ func TestSortByN(t *testing.T) {
|
||||||
d4 := d1.Add(-20 * time.Hour)
|
d4 := d1.Add(-20 * time.Hour)
|
||||||
|
|
||||||
p := createSortTestPages(4)
|
p := createSortTestPages(4)
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
byLen := func(p Pages) Pages {
|
||||||
|
return p.ByLength(ctx)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
for i, this := range []struct {
|
for i, this := range []struct {
|
||||||
sortFunc func(p Pages) Pages
|
sortFunc func(p Pages) Pages
|
||||||
|
@ -116,7 +123,7 @@ func TestSortByN(t *testing.T) {
|
||||||
{(Pages).ByPublishDate, func(p Pages) bool { return p[0].PublishDate() == d4 }},
|
{(Pages).ByPublishDate, func(p Pages) bool { return p[0].PublishDate() == d4 }},
|
||||||
{(Pages).ByExpiryDate, func(p Pages) bool { return p[0].ExpiryDate() == d4 }},
|
{(Pages).ByExpiryDate, func(p Pages) bool { return p[0].ExpiryDate() == d4 }},
|
||||||
{(Pages).ByLastmod, func(p Pages) bool { return p[1].Lastmod() == d3 }},
|
{(Pages).ByLastmod, func(p Pages) bool { return p[1].Lastmod() == d3 }},
|
||||||
{(Pages).ByLength, func(p Pages) bool { return p[0].(resource.LengthProvider).Len() == len(p[0].(*testPage).content) }},
|
{byLen, func(p Pages) bool { return p[0].(resource.LengthProvider).Len(ctx) == len(p[0].(*testPage).content) }},
|
||||||
} {
|
} {
|
||||||
setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "ab", "cde", "fg"}, [4]int{0, 3, 2, 1}, p)
|
setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "ab", "cde", "fg"}, [4]int{0, 3, 2, 1}, p)
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package page
|
package page
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"html/template"
|
"html/template"
|
||||||
"testing"
|
"testing"
|
||||||
|
@ -43,7 +44,7 @@ func TestSplitPageGroups(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
pages := createTestPages(21)
|
pages := createTestPages(21)
|
||||||
groups, _ := pages.GroupBy("Weight", "desc")
|
groups, _ := pages.GroupBy(context.Background(), "Weight", "desc")
|
||||||
chunks := splitPageGroups(groups, 5)
|
chunks := splitPageGroups(groups, 5)
|
||||||
c.Assert(len(chunks), qt.Equals, 5)
|
c.Assert(len(chunks), qt.Equals, 5)
|
||||||
|
|
||||||
|
@ -56,7 +57,7 @@ func TestSplitPageGroups(t *testing.T) {
|
||||||
// first group 10 in weight
|
// first group 10 in weight
|
||||||
c.Assert(pg.Key, qt.Equals, 10)
|
c.Assert(pg.Key, qt.Equals, 10)
|
||||||
for _, p := range pg.Pages {
|
for _, p := range pg.Pages {
|
||||||
c.Assert(p.FuzzyWordCount()%2 == 0, qt.Equals, true) // magic test
|
c.Assert(p.FuzzyWordCount(context.Background())%2 == 0, qt.Equals, true) // magic test
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -71,7 +72,7 @@ func TestSplitPageGroups(t *testing.T) {
|
||||||
// last should have 5 in weight
|
// last should have 5 in weight
|
||||||
c.Assert(pg.Key, qt.Equals, 5)
|
c.Assert(pg.Key, qt.Equals, 5)
|
||||||
for _, p := range pg.Pages {
|
for _, p := range pg.Pages {
|
||||||
c.Assert(p.FuzzyWordCount()%2 != 0, qt.Equals, true) // magic test
|
c.Assert(p.FuzzyWordCount(context.Background())%2 != 0, qt.Equals, true) // magic test
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -83,7 +84,7 @@ func TestPager(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
pages := createTestPages(21)
|
pages := createTestPages(21)
|
||||||
groups, _ := pages.GroupBy("Weight", "desc")
|
groups, _ := pages.GroupBy(context.Background(), "Weight", "desc")
|
||||||
|
|
||||||
urlFactory := func(page int) string {
|
urlFactory := func(page int) string {
|
||||||
return fmt.Sprintf("page/%d/", page)
|
return fmt.Sprintf("page/%d/", page)
|
||||||
|
@ -149,7 +150,7 @@ func TestPagerNoPages(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
c := qt.New(t)
|
c := qt.New(t)
|
||||||
pages := createTestPages(0)
|
pages := createTestPages(0)
|
||||||
groups, _ := pages.GroupBy("Weight", "desc")
|
groups, _ := pages.GroupBy(context.Background(), "Weight", "desc")
|
||||||
|
|
||||||
urlFactory := func(page int) string {
|
urlFactory := func(page int) string {
|
||||||
return fmt.Sprintf("page/%d/", page)
|
return fmt.Sprintf("page/%d/", page)
|
||||||
|
@ -249,9 +250,9 @@ func TestProbablyEqualPageLists(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
fivePages := createTestPages(5)
|
fivePages := createTestPages(5)
|
||||||
zeroPages := createTestPages(0)
|
zeroPages := createTestPages(0)
|
||||||
zeroPagesByWeight, _ := createTestPages(0).GroupBy("Weight", "asc")
|
zeroPagesByWeight, _ := createTestPages(0).GroupBy(context.Background(), "Weight", "asc")
|
||||||
fivePagesByWeight, _ := createTestPages(5).GroupBy("Weight", "asc")
|
fivePagesByWeight, _ := createTestPages(5).GroupBy(context.Background(), "Weight", "asc")
|
||||||
ninePagesByWeight, _ := createTestPages(9).GroupBy("Weight", "asc")
|
ninePagesByWeight, _ := createTestPages(9).GroupBy(context.Background(), "Weight", "asc")
|
||||||
|
|
||||||
for i, this := range []struct {
|
for i, this := range []struct {
|
||||||
v1 any
|
v1 any
|
||||||
|
@ -287,7 +288,7 @@ func TestPaginationPage(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
fivePages := createTestPages(7)
|
fivePages := createTestPages(7)
|
||||||
fivePagesFuzzyWordCount, _ := createTestPages(7).GroupBy("FuzzyWordCount", "asc")
|
fivePagesFuzzyWordCount, _ := createTestPages(7).GroupBy(context.Background(), "FuzzyWordCount", "asc")
|
||||||
|
|
||||||
p1, _ := newPaginatorFromPages(fivePages, 2, urlFactory)
|
p1, _ := newPaginatorFromPages(fivePages, 2, urlFactory)
|
||||||
p2, _ := newPaginatorFromPageGroups(fivePagesFuzzyWordCount, 2, urlFactory)
|
p2, _ := newPaginatorFromPageGroups(fivePagesFuzzyWordCount, 2, urlFactory)
|
||||||
|
@ -301,10 +302,10 @@ func TestPaginationPage(t *testing.T) {
|
||||||
page21, _ := f2.page(1)
|
page21, _ := f2.page(1)
|
||||||
page2Nil, _ := f2.page(3)
|
page2Nil, _ := f2.page(3)
|
||||||
|
|
||||||
c.Assert(page11.FuzzyWordCount(), qt.Equals, 3)
|
c.Assert(page11.FuzzyWordCount(context.Background()), qt.Equals, 3)
|
||||||
c.Assert(page1Nil, qt.IsNil)
|
c.Assert(page1Nil, qt.IsNil)
|
||||||
|
|
||||||
c.Assert(page21, qt.Not(qt.IsNil))
|
c.Assert(page21, qt.Not(qt.IsNil))
|
||||||
c.Assert(page21.FuzzyWordCount(), qt.Equals, 3)
|
c.Assert(page21.FuzzyWordCount(context.Background()), qt.Equals, 3)
|
||||||
c.Assert(page2Nil, qt.IsNil)
|
c.Assert(page2Nil, qt.IsNil)
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package page
|
package page
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"html/template"
|
"html/template"
|
||||||
"path"
|
"path"
|
||||||
|
@ -22,6 +23,7 @@ import (
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/hugofs/files"
|
"github.com/gohugoio/hugo/hugofs/files"
|
||||||
"github.com/gohugoio/hugo/identity"
|
"github.com/gohugoio/hugo/identity"
|
||||||
|
"github.com/gohugoio/hugo/markup/tableofcontents"
|
||||||
"github.com/gohugoio/hugo/tpl"
|
"github.com/gohugoio/hugo/tpl"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/modules"
|
"github.com/gohugoio/hugo/modules"
|
||||||
|
@ -153,7 +155,7 @@ func (p *testPage) BundleType() files.ContentClass {
|
||||||
panic("not implemented")
|
panic("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *testPage) Content() (any, error) {
|
func (p *testPage) Content(context.Context) (any, error) {
|
||||||
panic("not implemented")
|
panic("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -225,7 +227,7 @@ func (p *testPage) FirstSection() Page {
|
||||||
panic("not implemented")
|
panic("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *testPage) FuzzyWordCount() int {
|
func (p *testPage) FuzzyWordCount(context.Context) int {
|
||||||
return p.fuzzyWordCount
|
return p.fuzzyWordCount
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -329,11 +331,19 @@ func (p *testPage) LanguagePrefix() string {
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *testPage) Fragments(context.Context) *tableofcontents.Fragments {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *testPage) HeadingsFiltered(context.Context) tableofcontents.Headings {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (p *testPage) Lastmod() time.Time {
|
func (p *testPage) Lastmod() time.Time {
|
||||||
return p.lastMod
|
return p.lastMod
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *testPage) Len() int {
|
func (p *testPage) Len(context.Context) int {
|
||||||
return len(p.content)
|
return len(p.content)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -431,11 +441,11 @@ func (p *testPage) Permalink() string {
|
||||||
panic("not implemented")
|
panic("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *testPage) Plain() string {
|
func (p *testPage) Plain(context.Context) string {
|
||||||
panic("not implemented")
|
panic("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *testPage) PlainWords() []string {
|
func (p *testPage) PlainWords(context.Context) []string {
|
||||||
panic("not implemented")
|
panic("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -463,7 +473,7 @@ func (p *testPage) RawContent() string {
|
||||||
panic("not implemented")
|
panic("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *testPage) ReadingTime() int {
|
func (p *testPage) ReadingTime(context.Context) int {
|
||||||
panic("not implemented")
|
panic("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -487,11 +497,11 @@ func (p *testPage) RelRefFrom(argsm map[string]any, source any) (string, error)
|
||||||
return "", nil
|
return "", nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *testPage) Render(layout ...string) (template.HTML, error) {
|
func (p *testPage) Render(ctx context.Context, layout ...string) (template.HTML, error) {
|
||||||
panic("not implemented")
|
panic("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *testPage) RenderString(args ...any) (template.HTML, error) {
|
func (p *testPage) RenderString(ctx context.Context, args ...any) (template.HTML, error) {
|
||||||
panic("not implemented")
|
panic("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -552,11 +562,11 @@ func (p *testPage) String() string {
|
||||||
return p.path
|
return p.path
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *testPage) Summary() template.HTML {
|
func (p *testPage) Summary(context.Context) template.HTML {
|
||||||
panic("not implemented")
|
panic("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *testPage) TableOfContents() template.HTML {
|
func (p *testPage) TableOfContents(context.Context) template.HTML {
|
||||||
panic("not implemented")
|
panic("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -576,7 +586,7 @@ func (p *testPage) Translations() Pages {
|
||||||
panic("not implemented")
|
panic("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *testPage) Truncated() bool {
|
func (p *testPage) Truncated(context.Context) bool {
|
||||||
panic("not implemented")
|
panic("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -596,7 +606,7 @@ func (p *testPage) Weight() int {
|
||||||
return p.weight
|
return p.weight
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *testPage) WordCount() int {
|
func (p *testPage) WordCount(context.Context) int {
|
||||||
panic("not implemented")
|
panic("not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package postpub
|
package postpub
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"reflect"
|
"reflect"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
@ -101,7 +102,7 @@ func (r *PostPublishResource) GetFieldString(pattern string) (string, bool) {
|
||||||
case fieldAccessor == "ResourceType":
|
case fieldAccessor == "ResourceType":
|
||||||
return d.ResourceType(), true
|
return d.ResourceType(), true
|
||||||
case fieldAccessor == "Content":
|
case fieldAccessor == "Content":
|
||||||
content, err := d.(resource.ContentProvider).Content()
|
content, err := d.(resource.ContentProvider).Content(context.Background())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", true
|
return "", true
|
||||||
}
|
}
|
||||||
|
@ -172,7 +173,7 @@ func (r *PostPublishResource) Params() maps.Params {
|
||||||
panic(r.fieldNotSupported("Params"))
|
panic(r.fieldNotSupported("Params"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *PostPublishResource) Content() (any, error) {
|
func (r *PostPublishResource) Content(context.Context) (any, error) {
|
||||||
return r.field("Content"), nil
|
return r.field("Content"), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package resources
|
package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
@ -256,7 +257,7 @@ func (l *genericResource) cloneTo(targetPath string) resource.Resource {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *genericResource) Content() (any, error) {
|
func (l *genericResource) Content(context.Context) (any, error) {
|
||||||
if err := l.initContent(); err != nil {
|
if err := l.initContent(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,8 @@
|
||||||
package resource
|
package resource
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/common/maps"
|
"github.com/gohugoio/hugo/common/maps"
|
||||||
"github.com/gohugoio/hugo/langs"
|
"github.com/gohugoio/hugo/langs"
|
||||||
"github.com/gohugoio/hugo/media"
|
"github.com/gohugoio/hugo/media"
|
||||||
|
@ -162,7 +164,7 @@ type ContentProvider interface {
|
||||||
// * Page: template.HTML
|
// * Page: template.HTML
|
||||||
// * JSON: String
|
// * JSON: String
|
||||||
// * Etc.
|
// * Etc.
|
||||||
Content() (any, error)
|
Content(context.Context) (any, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// OpenReadSeekCloser allows setting some other way (than reading from a filesystem)
|
// OpenReadSeekCloser allows setting some other way (than reading from a filesystem)
|
||||||
|
@ -178,7 +180,7 @@ type ReadSeekCloserResource interface {
|
||||||
// LengthProvider is a Resource that provides a length
|
// LengthProvider is a Resource that provides a length
|
||||||
// (typically the length of the content).
|
// (typically the length of the content).
|
||||||
type LengthProvider interface {
|
type LengthProvider interface {
|
||||||
Len() int
|
Len(context.Context) int
|
||||||
}
|
}
|
||||||
|
|
||||||
// LanguageProvider is a Resource in a language.
|
// LanguageProvider is a Resource in a language.
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package integrity
|
package integrity
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"html/template"
|
"html/template"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -63,7 +64,7 @@ func TestTransform(t *testing.T) {
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(transformed.RelPermalink(), qt.Equals, "/hugo.a5ad1c6961214a55de53c1ce6e60d27b6b761f54851fa65e33066460dfa6a0db.txt")
|
c.Assert(transformed.RelPermalink(), qt.Equals, "/hugo.a5ad1c6961214a55de53c1ce6e60d27b6b761f54851fa65e33066460dfa6a0db.txt")
|
||||||
c.Assert(transformed.Data(), qt.DeepEquals, map[string]any{"Integrity": template.HTMLAttr("sha256-pa0caWEhSlXeU8HObmDSe2t2H1SFH6ZeMwZkYN+moNs=")})
|
c.Assert(transformed.Data(), qt.DeepEquals, map[string]any{"Integrity": template.HTMLAttr("sha256-pa0caWEhSlXeU8HObmDSe2t2H1SFH6ZeMwZkYN+moNs=")})
|
||||||
content, err := transformed.(resource.ContentProvider).Content()
|
content, err := transformed.(resource.ContentProvider).Content(context.Background())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(content, qt.Equals, "Hugo Rocks!")
|
c.Assert(content, qt.Equals, "Hugo Rocks!")
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package minifier
|
package minifier
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/gohugoio/hugo/resources/resource"
|
"github.com/gohugoio/hugo/resources/resource"
|
||||||
|
@ -36,7 +37,7 @@ func TestTransform(t *testing.T) {
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
c.Assert(transformed.RelPermalink(), qt.Equals, "/hugo.min.html")
|
c.Assert(transformed.RelPermalink(), qt.Equals, "/hugo.min.html")
|
||||||
content, err := transformed.(resource.ContentProvider).Content()
|
content, err := transformed.(resource.ContentProvider).Content(context.Background())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(content, qt.Equals, "<h1>Hugo Rocks!</h1>")
|
c.Assert(content, qt.Equals, "<h1>Hugo Rocks!</h1>")
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,6 +15,7 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"image"
|
"image"
|
||||||
"io"
|
"io"
|
||||||
|
@ -159,12 +160,12 @@ type resourceAdapter struct {
|
||||||
*resourceAdapterInner
|
*resourceAdapterInner
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *resourceAdapter) Content() (any, error) {
|
func (r *resourceAdapter) Content(context.Context) (any, error) {
|
||||||
r.init(false, true)
|
r.init(false, true)
|
||||||
if r.transformationsErr != nil {
|
if r.transformationsErr != nil {
|
||||||
return nil, r.transformationsErr
|
return nil, r.transformationsErr
|
||||||
}
|
}
|
||||||
return r.target.Content()
|
return r.target.Content(context.Background())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *resourceAdapter) Err() resource.ResourceError {
|
func (r *resourceAdapter) Err() resource.ResourceError {
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package resources
|
package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
@ -115,7 +116,7 @@ func TestTransform(t *testing.T) {
|
||||||
|
|
||||||
tr, err := r.Transform(transformation)
|
tr, err := r.Transform(transformation)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
content, err := tr.(resource.ContentProvider).Content()
|
content, err := tr.(resource.ContentProvider).Content(context.Background())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
c.Assert(content, qt.Equals, "color is green")
|
c.Assert(content, qt.Equals, "color is green")
|
||||||
|
@ -149,7 +150,7 @@ func TestTransform(t *testing.T) {
|
||||||
|
|
||||||
tr, err := r.Transform(transformation)
|
tr, err := r.Transform(transformation)
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
content, err := tr.(resource.ContentProvider).Content()
|
content, err := tr.(resource.ContentProvider).Content(context.Background())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
c.Assert(content, qt.Equals, "color is blue")
|
c.Assert(content, qt.Equals, "color is blue")
|
||||||
|
@ -184,7 +185,7 @@ func TestTransform(t *testing.T) {
|
||||||
for i, transformation := range []ResourceTransformation{t1, t2} {
|
for i, transformation := range []ResourceTransformation{t1, t2} {
|
||||||
r := createTransformer(spec, "f1.txt", "color is blue")
|
r := createTransformer(spec, "f1.txt", "color is blue")
|
||||||
tr, _ := r.Transform(transformation)
|
tr, _ := r.Transform(transformation)
|
||||||
content, err := tr.(resource.ContentProvider).Content()
|
content, err := tr.(resource.ContentProvider).Content(context.Background())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(content, qt.Equals, "color is green", qt.Commentf("i=%d", i))
|
c.Assert(content, qt.Equals, "color is green", qt.Commentf("i=%d", i))
|
||||||
|
|
||||||
|
@ -237,7 +238,7 @@ func TestTransform(t *testing.T) {
|
||||||
|
|
||||||
tr, _ := r.Transform(transformation)
|
tr, _ := r.Transform(transformation)
|
||||||
c.Assert(tr.RelPermalink(), qt.Equals, "/f1.cached.txt", msg)
|
c.Assert(tr.RelPermalink(), qt.Equals, "/f1.cached.txt", msg)
|
||||||
content, err := tr.(resource.ContentProvider).Content()
|
content, err := tr.(resource.ContentProvider).Content(context.Background())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(content, qt.Equals, "color is green", msg)
|
c.Assert(content, qt.Equals, "color is green", msg)
|
||||||
c.Assert(tr.MediaType(), eq, media.CSVType)
|
c.Assert(tr.MediaType(), eq, media.CSVType)
|
||||||
|
@ -264,7 +265,7 @@ func TestTransform(t *testing.T) {
|
||||||
|
|
||||||
relPermalink := tr.RelPermalink()
|
relPermalink := tr.RelPermalink()
|
||||||
|
|
||||||
content, err := tr.(resource.ContentProvider).Content()
|
content, err := tr.(resource.ContentProvider).Content(context.Background())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
c.Assert(relPermalink, qt.Equals, "/f1.t1.txt")
|
c.Assert(relPermalink, qt.Equals, "/f1.t1.txt")
|
||||||
|
@ -286,7 +287,7 @@ func TestTransform(t *testing.T) {
|
||||||
r := createTransformer(spec, "f1.txt", "color is blue")
|
r := createTransformer(spec, "f1.txt", "color is blue")
|
||||||
|
|
||||||
tr, _ := r.Transform(t1, t2)
|
tr, _ := r.Transform(t1, t2)
|
||||||
content, err := tr.(resource.ContentProvider).Content()
|
content, err := tr.(resource.ContentProvider).Content(context.Background())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
c.Assert(content, qt.Equals, "car is green")
|
c.Assert(content, qt.Equals, "car is green")
|
||||||
|
@ -308,9 +309,9 @@ func TestTransform(t *testing.T) {
|
||||||
tr1, _ := r.Transform(t1)
|
tr1, _ := r.Transform(t1)
|
||||||
tr2, _ := tr1.Transform(t2)
|
tr2, _ := tr1.Transform(t2)
|
||||||
|
|
||||||
content1, err := tr1.(resource.ContentProvider).Content()
|
content1, err := tr1.(resource.ContentProvider).Content(context.Background())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
content2, err := tr2.(resource.ContentProvider).Content()
|
content2, err := tr2.(resource.ContentProvider).Content(context.Background())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
c.Assert(content1, qt.Equals, "color is green")
|
c.Assert(content1, qt.Equals, "color is green")
|
||||||
|
@ -339,7 +340,7 @@ func TestTransform(t *testing.T) {
|
||||||
r := createTransformer(spec, "f1.txt", countstr.String())
|
r := createTransformer(spec, "f1.txt", countstr.String())
|
||||||
|
|
||||||
tr, _ := r.Transform(transformations...)
|
tr, _ := r.Transform(transformations...)
|
||||||
content, err := tr.(resource.ContentProvider).Content()
|
content, err := tr.(resource.ContentProvider).Content(context.Background())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
|
|
||||||
c.Assert(content, qt.Equals, "ABCDEFGHIJKLMNOPQRSTUVWXYZ")
|
c.Assert(content, qt.Equals, "ABCDEFGHIJKLMNOPQRSTUVWXYZ")
|
||||||
|
@ -417,7 +418,7 @@ func TestTransform(t *testing.T) {
|
||||||
id := (i + j) % 10
|
id := (i + j) % 10
|
||||||
tr, err := transformers[id].Transform(transformations[id])
|
tr, err := transformers[id].Transform(transformations[id])
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
content, err := tr.(resource.ContentProvider).Content()
|
content, err := tr.(resource.ContentProvider).Content(context.Background())
|
||||||
c.Assert(err, qt.IsNil)
|
c.Assert(err, qt.IsNil)
|
||||||
c.Assert(content, qt.Equals, "color is blue")
|
c.Assert(content, qt.Equals, "color is blue")
|
||||||
c.Assert(tr.RelPermalink(), qt.Equals, fmt.Sprintf("/f%d.test.txt", id))
|
c.Assert(tr.RelPermalink(), qt.Equals, fmt.Sprintf("/f%d.test.txt", id))
|
||||||
|
|
|
@ -62,6 +62,7 @@ func NewExecuter(helper ExecHelper) Executer {
|
||||||
type (
|
type (
|
||||||
dataContextKeyType string
|
dataContextKeyType string
|
||||||
hasLockContextKeyType string
|
hasLockContextKeyType string
|
||||||
|
stackContextKeyType string
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
|
|
@ -15,6 +15,7 @@
|
||||||
package transform
|
package transform
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"html"
|
"html"
|
||||||
"html/template"
|
"html/template"
|
||||||
|
|
||||||
|
@ -118,13 +119,13 @@ func (ns *Namespace) HTMLUnescape(s any) (string, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Markdownify renders s from Markdown to HTML.
|
// Markdownify renders s from Markdown to HTML.
|
||||||
func (ns *Namespace) Markdownify(s any) (template.HTML, error) {
|
func (ns *Namespace) Markdownify(ctx context.Context, s any) (template.HTML, error) {
|
||||||
|
|
||||||
home := ns.deps.Site.Home()
|
home := ns.deps.Site.Home()
|
||||||
if home == nil {
|
if home == nil {
|
||||||
panic("home must not be nil")
|
panic("home must not be nil")
|
||||||
}
|
}
|
||||||
ss, err := home.RenderString(s)
|
ss, err := home.RenderString(ctx, s)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
package transform_test
|
package transform_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"html/template"
|
"html/template"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
@ -185,7 +186,7 @@ func TestMarkdownify(t *testing.T) {
|
||||||
{tstNoStringer{}, false},
|
{tstNoStringer{}, false},
|
||||||
} {
|
} {
|
||||||
|
|
||||||
result, err := ns.Markdownify(test.s)
|
result, err := ns.Markdownify(context.Background(), test.s)
|
||||||
|
|
||||||
if bb, ok := test.expect.(bool); ok && !bb {
|
if bb, ok := test.expect.(bool); ok && !bb {
|
||||||
b.Assert(err, qt.Not(qt.IsNil))
|
b.Assert(err, qt.Not(qt.IsNil))
|
||||||
|
@ -218,7 +219,7 @@ This is some more text.
|
||||||
And then some.
|
And then some.
|
||||||
`
|
`
|
||||||
|
|
||||||
result, err := ns.Markdownify(text)
|
result, err := ns.Markdownify(context.Background(), text)
|
||||||
b.Assert(err, qt.IsNil)
|
b.Assert(err, qt.IsNil)
|
||||||
b.Assert(result, qt.Equals, template.HTML(
|
b.Assert(result, qt.Equals, template.HTML(
|
||||||
"<p>#First</p>\n<p>This is some <em>bold</em> text.</p>\n<h2 id=\"second\">Second</h2>\n<p>This is some more text.</p>\n<p>And then some.</p>\n"))
|
"<p>#First</p>\n<p>This is some <em>bold</em> text.</p>\n<h2 id=\"second\">Second</h2>\n<p>This is some more text.</p>\n<p>And then some.</p>\n"))
|
||||||
|
|
Loading…
Reference in a new issue