shithub: hugo

Download patch

ref: 2ca615c5514f6d32232daeb8028b4f0992b18d10
parent: b97c6c7082070352b53acf9d8d311f73836d423f
author: Nate Finch <[email protected]>
date: Fri Sep 5 02:57:32 EDT 2014

Add Node.Site.Pages to all nodes and ensure all pages from the site exist in the list before processing shortcodes.

Node.Site.Recent is not really just recent pages, but all pages, so I figured it was better to add a new parameter with a more informative name.

I also changed the code slightly so that all pages are added to the list of pages before we start rendering shortcodes... this way you can use a shortcode to refer to another page.  Previosuly, this had been broken, because the list ofg pages would not be fully populated while the shortcodes were being processed.  The code that does this is not reading from disk or doing any rendering, so it shouldn't take any more time to do.

--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -80,7 +80,8 @@
 	Taxonomies      TaxonomyList
 	Indexes         *TaxonomyList // legacy, should be identical to Taxonomies
 	Sections        Taxonomy
-	Recent          *Pages
+	Pages           *Pages
+	Recent          *Pages // legacy, should be identical to Pages
 	Menus           *Menus
 	Title           string
 	Author          map[string]string
@@ -278,6 +279,7 @@
 		LanguageCode:    viper.GetString("languagecode"),
 		Copyright:       viper.GetString("copyright"),
 		DisqusShortname: viper.GetString("DisqusShortname"),
+		Pages:           &s.Pages,
 		Recent:          &s.Pages,
 		Menus:           &s.Menus,
 		Params:          params,
@@ -312,7 +314,7 @@
 	return
 }
 
-type pageRenderResult struct {
+type pageResult struct {
 	page *Page
 	err  error
 }
@@ -327,8 +329,8 @@
 
 	files := s.Source.Files()
 
-	results := make(chan pageRenderResult)
-	input := make(chan *source.File)
+	results := make(chan pageResult)
+	filechan := make(chan *source.File)
 
 	procs := getGoMaxProcs()
 
@@ -336,7 +338,7 @@
 
 	for i := 0; i < procs*4; i++ {
 		wg.Add(1)
-		go pageRenderer(s, input, results, wg)
+		go pageReader(s, filechan, results, wg)
 	}
 
 	errs := make(chan error)
@@ -343,52 +345,92 @@
 
 	// we can only have exactly one result collator, since it makes changes that
 	// must be synchronized.
-	go resultCollator(s, results, errs)
+	go readCollator(s, results, errs)
 
-	for _, fi := range files {
-		input <- fi
+	for _, file := range files {
+		filechan <- file
 	}
 
-	close(input)
+	close(filechan)
 
 	wg.Wait()
 
 	close(results)
 
-	return <-errs
+	readErrs := <-errs
+
+	results = make(chan pageResult)
+	pagechan := make(chan *Page)
+
+	wg = &sync.WaitGroup{}
+
+	for i := 0; i < procs*4; i++ {
+		wg.Add(1)
+		go pageRenderer(s, pagechan, results, wg)
+	}
+
+	go renderCollator(s, results, errs)
+
+	for _, p := range s.Pages {
+		pagechan <- p
+	}
+
+	close(pagechan)
+
+	wg.Wait()
+
+	close(results)
+
+	renderErrs := <-errs
+
+	if renderErrs == nil && readErrs == nil {
+		return nil
+	}
+	if renderErrs == nil {
+		return readErrs
+	}
+	if readErrs == nil {
+		return renderErrs
+	}
+	return fmt.Errorf("%s\n%s", readErrs, renderErrs)
 }
 
-func pageRenderer(s *Site, input <-chan *source.File, results chan<- pageRenderResult, wg *sync.WaitGroup) {
-	for file := range input {
+func pageReader(s *Site, files <-chan *source.File, results chan<- pageResult, wg *sync.WaitGroup) {
+	for file := range files {
 		page, err := NewPage(file.LogicalName)
 		if err != nil {
-			results <- pageRenderResult{nil, err}
+			results <- pageResult{nil, err}
 			continue
 		}
-		err = page.ReadFrom(file.Contents)
-		if err != nil {
-			results <- pageRenderResult{nil, err}
-			continue
-		}
 		page.Site = &s.Info
 		page.Tmpl = s.Tmpl
 		page.Section = file.Section
 		page.Dir = file.Dir
+		if err := page.ReadFrom(file.Contents); err != nil {
+			results <- pageResult{nil, err}
+			continue
+		}
+		results <- pageResult{page, nil}
+	}
+	wg.Done()
+}
 
+func pageRenderer(s *Site, pages <-chan *Page, results chan<- pageResult, wg *sync.WaitGroup) {
+	for page := range pages {
 		//Handling short codes prior to Conversion to HTML
 		page.ProcessShortcodes(s.Tmpl)
 
-		err = page.Convert()
+		err := page.Convert()
 		if err != nil {
-			results <- pageRenderResult{nil, err}
+			results <- pageResult{nil, err}
 			continue
 		}
-		results <- pageRenderResult{page, nil}
+		results <- pageResult{page, nil}
 	}
 	wg.Done()
 }
 
-func resultCollator(s *Site, results <-chan pageRenderResult, errs chan<- error) {
+func renderCollator(s *Site, results <-chan pageResult, errs chan<- error) {
 	errMsgs := []string{}
 	for r := range results {
 		if r.err != nil {
@@ -395,7 +437,22 @@
 			errMsgs = append(errMsgs, r.err.Error())
 			continue
 		}
+	}
+	if len(errMsgs) == 0 {
+		errs <- nil
+		return
+	}
+	errs <- fmt.Errorf("Errors rendering pages: %s", strings.Join(errMsgs, "\n"))
+}
 
+func readCollator(s *Site, results <-chan pageResult, errs chan<- error) {
+	errMsgs := []string{}
+	for r := range results {
+		if r.err != nil {
+			errMsgs = append(errMsgs, r.err.Error())
+			continue
+		}
+
 		if r.page.ShouldBuild() {
 			s.Pages = append(s.Pages, r.page)
 		}
@@ -411,8 +468,9 @@
 	s.Pages.Sort()
 	if len(errMsgs) == 0 {
 		errs <- nil
+		return
 	}
-	errs <- fmt.Errorf("Errors rendering pages: %s", strings.Join(errMsgs, "\n"))
+	errs <- fmt.Errorf("Errors reading pages: %s", strings.Join(errMsgs, "\n"))
 }
 
 func (s *Site) BuildSiteMeta() (err error) {