mirror of
https://github.com/gohugoio/hugo.git
synced 2024-11-29 19:42:10 -05:00
Add Node.Site.Pages to all nodes and ensure all pages from the site exist in the list before processing shortcodes.
Node.Site.Recent is not really just recent pages, but all pages, so I figured it was better to add a new parameter with a more informative name. I also changed the code slightly so that all pages are added to the list of pages before we start rendering shortcodes... this way you can use a shortcode to refer to another page. Previosuly, this had been broken, because the list ofg pages would not be fully populated while the shortcodes were being processed. The code that does this is not reading from disk or doing any rendering, so it shouldn't take any more time to do.
This commit is contained in:
parent
b97c6c7082
commit
2ca615c551
1 changed files with 86 additions and 28 deletions
114
hugolib/site.go
114
hugolib/site.go
|
@ -80,7 +80,8 @@ type SiteInfo struct {
|
||||||
Taxonomies TaxonomyList
|
Taxonomies TaxonomyList
|
||||||
Indexes *TaxonomyList // legacy, should be identical to Taxonomies
|
Indexes *TaxonomyList // legacy, should be identical to Taxonomies
|
||||||
Sections Taxonomy
|
Sections Taxonomy
|
||||||
Recent *Pages
|
Pages *Pages
|
||||||
|
Recent *Pages // legacy, should be identical to Pages
|
||||||
Menus *Menus
|
Menus *Menus
|
||||||
Title string
|
Title string
|
||||||
Author map[string]string
|
Author map[string]string
|
||||||
|
@ -278,6 +279,7 @@ func (s *Site) initializeSiteInfo() {
|
||||||
LanguageCode: viper.GetString("languagecode"),
|
LanguageCode: viper.GetString("languagecode"),
|
||||||
Copyright: viper.GetString("copyright"),
|
Copyright: viper.GetString("copyright"),
|
||||||
DisqusShortname: viper.GetString("DisqusShortname"),
|
DisqusShortname: viper.GetString("DisqusShortname"),
|
||||||
|
Pages: &s.Pages,
|
||||||
Recent: &s.Pages,
|
Recent: &s.Pages,
|
||||||
Menus: &s.Menus,
|
Menus: &s.Menus,
|
||||||
Params: params,
|
Params: params,
|
||||||
|
@ -312,7 +314,7 @@ func (s *Site) checkDirectories() (err error) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
type pageRenderResult struct {
|
type pageResult struct {
|
||||||
page *Page
|
page *Page
|
||||||
err error
|
err error
|
||||||
}
|
}
|
||||||
|
@ -327,8 +329,8 @@ func (s *Site) CreatePages() error {
|
||||||
|
|
||||||
files := s.Source.Files()
|
files := s.Source.Files()
|
||||||
|
|
||||||
results := make(chan pageRenderResult)
|
results := make(chan pageResult)
|
||||||
input := make(chan *source.File)
|
filechan := make(chan *source.File)
|
||||||
|
|
||||||
procs := getGoMaxProcs()
|
procs := getGoMaxProcs()
|
||||||
|
|
||||||
|
@ -336,59 +338,114 @@ func (s *Site) CreatePages() error {
|
||||||
|
|
||||||
for i := 0; i < procs*4; i++ {
|
for i := 0; i < procs*4; i++ {
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
go pageRenderer(s, input, results, wg)
|
go pageReader(s, filechan, results, wg)
|
||||||
}
|
}
|
||||||
|
|
||||||
errs := make(chan error)
|
errs := make(chan error)
|
||||||
|
|
||||||
// we can only have exactly one result collator, since it makes changes that
|
// we can only have exactly one result collator, since it makes changes that
|
||||||
// must be synchronized.
|
// must be synchronized.
|
||||||
go resultCollator(s, results, errs)
|
go readCollator(s, results, errs)
|
||||||
|
|
||||||
for _, fi := range files {
|
for _, file := range files {
|
||||||
input <- fi
|
filechan <- file
|
||||||
}
|
}
|
||||||
|
|
||||||
close(input)
|
close(filechan)
|
||||||
|
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
|
|
||||||
close(results)
|
close(results)
|
||||||
|
|
||||||
return <-errs
|
readErrs := <-errs
|
||||||
|
|
||||||
|
results = make(chan pageResult)
|
||||||
|
pagechan := make(chan *Page)
|
||||||
|
|
||||||
|
wg = &sync.WaitGroup{}
|
||||||
|
|
||||||
|
for i := 0; i < procs*4; i++ {
|
||||||
|
wg.Add(1)
|
||||||
|
go pageRenderer(s, pagechan, results, wg)
|
||||||
}
|
}
|
||||||
|
|
||||||
func pageRenderer(s *Site, input <-chan *source.File, results chan<- pageRenderResult, wg *sync.WaitGroup) {
|
go renderCollator(s, results, errs)
|
||||||
for file := range input {
|
|
||||||
|
for _, p := range s.Pages {
|
||||||
|
pagechan <- p
|
||||||
|
}
|
||||||
|
|
||||||
|
close(pagechan)
|
||||||
|
|
||||||
|
wg.Wait()
|
||||||
|
|
||||||
|
close(results)
|
||||||
|
|
||||||
|
renderErrs := <-errs
|
||||||
|
|
||||||
|
if renderErrs == nil && readErrs == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if renderErrs == nil {
|
||||||
|
return readErrs
|
||||||
|
}
|
||||||
|
if readErrs == nil {
|
||||||
|
return renderErrs
|
||||||
|
}
|
||||||
|
return fmt.Errorf("%s\n%s", readErrs, renderErrs)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pageReader(s *Site, files <-chan *source.File, results chan<- pageResult, wg *sync.WaitGroup) {
|
||||||
|
for file := range files {
|
||||||
page, err := NewPage(file.LogicalName)
|
page, err := NewPage(file.LogicalName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
results <- pageRenderResult{nil, err}
|
results <- pageResult{nil, err}
|
||||||
continue
|
|
||||||
}
|
|
||||||
err = page.ReadFrom(file.Contents)
|
|
||||||
if err != nil {
|
|
||||||
results <- pageRenderResult{nil, err}
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
page.Site = &s.Info
|
page.Site = &s.Info
|
||||||
page.Tmpl = s.Tmpl
|
page.Tmpl = s.Tmpl
|
||||||
page.Section = file.Section
|
page.Section = file.Section
|
||||||
page.Dir = file.Dir
|
page.Dir = file.Dir
|
||||||
|
if err := page.ReadFrom(file.Contents); err != nil {
|
||||||
//Handling short codes prior to Conversion to HTML
|
results <- pageResult{nil, err}
|
||||||
page.ProcessShortcodes(s.Tmpl)
|
|
||||||
|
|
||||||
err = page.Convert()
|
|
||||||
if err != nil {
|
|
||||||
results <- pageRenderResult{nil, err}
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
results <- pageRenderResult{page, nil}
|
results <- pageResult{page, nil}
|
||||||
}
|
}
|
||||||
wg.Done()
|
wg.Done()
|
||||||
}
|
}
|
||||||
|
|
||||||
func resultCollator(s *Site, results <-chan pageRenderResult, errs chan<- error) {
|
func pageRenderer(s *Site, pages <-chan *Page, results chan<- pageResult, wg *sync.WaitGroup) {
|
||||||
|
for page := range pages {
|
||||||
|
//Handling short codes prior to Conversion to HTML
|
||||||
|
page.ProcessShortcodes(s.Tmpl)
|
||||||
|
|
||||||
|
err := page.Convert()
|
||||||
|
if err != nil {
|
||||||
|
results <- pageResult{nil, err}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
results <- pageResult{page, nil}
|
||||||
|
}
|
||||||
|
wg.Done()
|
||||||
|
}
|
||||||
|
|
||||||
|
func renderCollator(s *Site, results <-chan pageResult, errs chan<- error) {
|
||||||
|
errMsgs := []string{}
|
||||||
|
for r := range results {
|
||||||
|
if r.err != nil {
|
||||||
|
errMsgs = append(errMsgs, r.err.Error())
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(errMsgs) == 0 {
|
||||||
|
errs <- nil
|
||||||
|
return
|
||||||
|
}
|
||||||
|
errs <- fmt.Errorf("Errors rendering pages: %s", strings.Join(errMsgs, "\n"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func readCollator(s *Site, results <-chan pageResult, errs chan<- error) {
|
||||||
errMsgs := []string{}
|
errMsgs := []string{}
|
||||||
for r := range results {
|
for r := range results {
|
||||||
if r.err != nil {
|
if r.err != nil {
|
||||||
|
@ -411,8 +468,9 @@ func resultCollator(s *Site, results <-chan pageRenderResult, errs chan<- error)
|
||||||
s.Pages.Sort()
|
s.Pages.Sort()
|
||||||
if len(errMsgs) == 0 {
|
if len(errMsgs) == 0 {
|
||||||
errs <- nil
|
errs <- nil
|
||||||
|
return
|
||||||
}
|
}
|
||||||
errs <- fmt.Errorf("Errors rendering pages: %s", strings.Join(errMsgs, "\n"))
|
errs <- fmt.Errorf("Errors reading pages: %s", strings.Join(errMsgs, "\n"))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Site) BuildSiteMeta() (err error) {
|
func (s *Site) BuildSiteMeta() (err error) {
|
||||||
|
|
Loading…
Reference in a new issue