shithub: hugo

Download patch

ref: a843d5d3bbc833f00b82647295ac8a7e50fa7da2
parent: 99d11386a7de544f044f30782310175cf2df4004
author: Bjørn Erik Pedersen <[email protected]>
date: Wed Nov 16 16:06:10 EST 2016

node to page: Final TODO-fixes

Updates #2297

--- a/hugolib/hugo_sites.go
+++ b/hugolib/hugo_sites.go
@@ -15,9 +15,7 @@
 
 import (
 	"fmt"
-	"html/template"
 	"os"
-	"path"
 	"strings"
 	"sync"
 
@@ -25,7 +23,6 @@
 
 	"github.com/spf13/viper"
 
-	"github.com/bep/inflect"
 	"github.com/spf13/hugo/source"
 	"github.com/spf13/hugo/tpl"
 	jww "github.com/spf13/jwalterweatherman"
@@ -298,84 +295,6 @@
 		}
 	}
 	return nil
-}
-
-// TODO(bep) np move
-// Move the new* methods after cleanup in site.go
-func (s *Site) newNodePage(typ string) *Page {
-	return &Page{
-		pageInit: &pageInit{},
-		Kind:     typ,
-		Data:     make(map[string]interface{}),
-		Site:     &s.Info,
-		language: s.Language,
-		site:     s}
-}
-
-func (s *Site) newHomePage() *Page {
-	p := s.newNodePage(KindHome)
-	p.Title = s.Info.Title
-	pages := Pages{}
-	p.Data["Pages"] = pages
-	p.Pages = pages
-	s.setPageURLs(p, "/")
-	return p
-}
-
-func (s *Site) setPageURLs(p *Page, in string) {
-	p.URLPath.URL = s.Info.pathSpec.URLizeAndPrep(in)
-	p.URLPath.Permalink = s.Info.permalink(p.URLPath.URL)
-	p.RSSLink = template.HTML(s.Info.permalink(in + ".xml"))
-}
-
-func (s *Site) newTaxonomyPage(plural, key string) *Page {
-
-	p := s.newNodePage(KindTaxonomy)
-
-	p.sections = []string{plural, key}
-
-	if s.Info.preserveTaxonomyNames {
-		key = s.Info.pathSpec.MakePathSanitized(key)
-	}
-
-	if s.Info.preserveTaxonomyNames {
-		// keep as is in the title
-		p.Title = key
-	} else {
-		p.Title = strings.Replace(strings.Title(key), "-", " ", -1)
-	}
-
-	s.setPageURLs(p, path.Join(plural, key))
-
-	return p
-}
-
-func (s *Site) newSectionPage(name string, section WeightedPages) *Page {
-
-	p := s.newNodePage(KindSection)
-	p.sections = []string{name}
-
-	sectionName := name
-	if !s.Info.preserveTaxonomyNames && len(section) > 0 {
-		sectionName = section[0].Page.Section()
-	}
-
-	sectionName = helpers.FirstUpper(sectionName)
-	if viper.GetBool("pluralizeListTitles") {
-		p.Title = inflect.Pluralize(sectionName)
-	} else {
-		p.Title = sectionName
-	}
-	s.setPageURLs(p, name)
-	return p
-}
-
-func (s *Site) newTaxonomyTermsPage(plural string) *Page {
-	p := s.newNodePage(KindTaxonomyTerm)
-	p.sections = []string{plural}
-	p.Title = strings.Title(plural)
-	s.setPageURLs(p, plural)
-	return p
 }
 
 func (h *HugoSites) setupTranslations() {
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -175,7 +175,7 @@
 
 	site *Site
 
-	// Pulled over from Node. TODO(bep) np reorg and group (embed)
+	// Pulled over from old Node. TODO(bep) reorg and group (embed)
 
 	Site *SiteInfo `json:"-"`
 
@@ -1458,7 +1458,6 @@
 	return nil
 }
 
-// TODO(bep) np naming, move some
 func (p *Page) prepareData(s *Site) error {
 
 	var pages Pages
@@ -1507,7 +1506,7 @@
 }
 
 func (p *Page) updatePageDates() {
-	// TODO(bep) np there is a potential issue with page sorting for home pages
+	// TODO(bep) there is a potential issue with page sorting for home pages
 	// etc. without front matter dates set, but let us wrap the head around
 	// that in another time.
 	if !p.IsNode() {
@@ -1552,8 +1551,6 @@
 	c.pageInit = &pageInit{}
 	return &c
 }
-
-// TODO(bep) np these are pulled over from Node. Needs regrouping / embed
 
 func (p *Page) Now() time.Time {
 	return time.Now()
--- a/hugolib/page_collections.go
+++ b/hugolib/page_collections.go
@@ -13,10 +13,6 @@
 
 package hugolib
 
-import (
-	"fmt"
-)
-
 // PageCollections contains the page collections for a site.
 type PageCollections struct {
 	// Includes only pages of all types, and only pages in the current language.
@@ -34,6 +30,9 @@
 	// This is for the current language only.
 	RegularPages Pages
 
+	// A convenience cache for the all the regular pages.
+	AllRegularPages Pages
+
 	// Includes absolute all pages (of all types), including drafts etc.
 	rawAllPages Pages
 }
@@ -41,13 +40,7 @@
 func (c *PageCollections) refreshPageCaches() {
 	c.indexPages = c.findPagesByKindNotIn(KindPage, c.Pages)
 	c.RegularPages = c.findPagesByKindIn(KindPage, c.Pages)
-
-	// TODO(bep) np remove eventually
-	for _, n := range c.Pages {
-		if n.Kind == kindUnknown {
-			panic(fmt.Sprintf("Got unknown type %s", n.Title))
-		}
-	}
+	c.AllRegularPages = c.findPagesByKindIn(KindPage, c.AllPages)
 }
 
 func newPageCollections() *PageCollections {
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -27,6 +27,8 @@
 	"sync"
 	"time"
 
+	"github.com/bep/inflect"
+
 	"sync/atomic"
 
 	"github.com/fsnotify/fsnotify"
@@ -285,8 +287,7 @@
 	var link string
 
 	if refURL.Path != "" {
-		// TODO(bep) np relRef
-		for _, page := range s.AllPages {
+		for _, page := range s.AllRegularPages {
 			refPath := filepath.FromSlash(refURL.Path)
 			if page.Source.Path() == refPath || page.Source.LogicalName() == refPath {
 				target = page
@@ -357,8 +358,7 @@
 			}
 		}
 
-		// TODO(bep) np sourceRelativeLink
-		for _, page := range s.AllPages {
+		for _, page := range s.AllRegularPages {
 			if page.Source.Path() == refPath {
 				target = page
 				break
@@ -367,7 +367,7 @@
 		// need to exhaust the test, then try with the others :/
 		// if the refPath doesn't end in a filename with extension `.md`, then try with `.md` , and then `/index.md`
 		mdPath := strings.TrimSuffix(refPath, string(os.PathSeparator)) + ".md"
-		for _, page := range s.AllPages {
+		for _, page := range s.AllRegularPages {
 			if page.Source.Path() == mdPath {
 				target = page
 				break
@@ -374,7 +374,7 @@
 			}
 		}
 		indexPath := filepath.Join(refPath, "index.md")
-		for _, page := range s.AllPages {
+		for _, page := range s.AllRegularPages {
 			if page.Source.Path() == indexPath {
 				target = page
 				break
@@ -1525,7 +1525,7 @@
 func (s *Site) assembleSections() {
 	s.Sections = make(Taxonomy)
 	s.Info.Sections = s.Sections
-	// TODO(bep) np check these vs the caches
+
 	regularPages := s.findPagesByKind(KindPage)
 	sectionPages := s.findPagesByKind(KindSection)
 
@@ -1716,7 +1716,6 @@
 
 	var pageTarget target.Output
 
-	// TODO(bep) np ugly urls vs frontmatter
 	if p, ok := d.(*Page); ok && p.IsPage() && path.Ext(p.URLPath.URL) != "" {
 		// user has explicitly set a URL with extension for this page
 		// make sure it sticks even if "ugly URLs" are turned off.
@@ -1988,4 +1987,80 @@
 		}
 	}
 	return 1
+}
+
+func (s *Site) newNodePage(typ string) *Page {
+	return &Page{
+		pageInit: &pageInit{},
+		Kind:     typ,
+		Data:     make(map[string]interface{}),
+		Site:     &s.Info,
+		language: s.Language,
+		site:     s}
+}
+
+func (s *Site) newHomePage() *Page {
+	p := s.newNodePage(KindHome)
+	p.Title = s.Info.Title
+	pages := Pages{}
+	p.Data["Pages"] = pages
+	p.Pages = pages
+	s.setPageURLs(p, "/")
+	return p
+}
+
+func (s *Site) setPageURLs(p *Page, in string) {
+	p.URLPath.URL = s.Info.pathSpec.URLizeAndPrep(in)
+	p.URLPath.Permalink = s.Info.permalink(p.URLPath.URL)
+	p.RSSLink = template.HTML(s.Info.permalink(in + ".xml"))
+}
+
+func (s *Site) newTaxonomyPage(plural, key string) *Page {
+
+	p := s.newNodePage(KindTaxonomy)
+
+	p.sections = []string{plural, key}
+
+	if s.Info.preserveTaxonomyNames {
+		key = s.Info.pathSpec.MakePathSanitized(key)
+	}
+
+	if s.Info.preserveTaxonomyNames {
+		// keep as is in the title
+		p.Title = key
+	} else {
+		p.Title = strings.Replace(strings.Title(key), "-", " ", -1)
+	}
+
+	s.setPageURLs(p, path.Join(plural, key))
+
+	return p
+}
+
+func (s *Site) newSectionPage(name string, section WeightedPages) *Page {
+
+	p := s.newNodePage(KindSection)
+	p.sections = []string{name}
+
+	sectionName := name
+	if !s.Info.preserveTaxonomyNames && len(section) > 0 {
+		sectionName = section[0].Page.Section()
+	}
+
+	sectionName = helpers.FirstUpper(sectionName)
+	if viper.GetBool("pluralizeListTitles") {
+		p.Title = inflect.Pluralize(sectionName)
+	} else {
+		p.Title = sectionName
+	}
+	s.setPageURLs(p, name)
+	return p
+}
+
+func (s *Site) newTaxonomyTermsPage(plural string) *Page {
+	p := s.newNodePage(KindTaxonomyTerm)
+	p.sections = []string{plural}
+	p.Title = strings.Title(plural)
+	s.setPageURLs(p, plural)
+	return p
 }
--- a/hugolib/site_render.go
+++ b/hugolib/site_render.go
@@ -87,7 +87,6 @@
 }
 
 // renderPaginator must be run after the owning Page has been rendered.
-// TODO(bep) np
 func (s *Site) renderPaginator(p *Page) error {
 	if p.paginator != nil {
 		jww.DEBUG.Printf("Render paginator for page %q", p.Path())
@@ -95,10 +94,8 @@
 
 		// write alias for page 1
 		// TODO(bep) ml all of these n.addLang ... fix.
-		// TODO(bep) np URL
 
 		aliasPath := p.addLangPathPrefix(helpers.PaginateAliasPath(path.Join(p.sections...), 1))
-		//TODO(bep) np node.permalink
 		link := p.Permalink()
 		s.writeDestAlias(aliasPath, link, nil)
 
@@ -141,16 +138,18 @@
 		return nil
 	}
 
-	// TODO(bep) np check RSS titles
-	// TODO(bep) np check RSS page limit, 50?
-	rssNode := p.copy()
-	rssNode.Kind = kindRSS
-
-	// TODO(bep) np todelido URL
+	rssPage := p.copy()
+	rssPage.Kind = kindRSS
+	high := 50
+	if len(rssPage.Pages) > high {
+		rssPage.Pages = rssPage.Pages[:high]
+		rssPage.Data["Pages"] = rssPage.Pages
+	}
 	rssURI := s.Language.GetString("rssURI")
-	rssNode.URLPath.URL = path.Join(rssNode.URLPath.URL, rssURI)
+	rssPath := path.Join(rssPage.URLPath.URL, rssURI)
+	s.setPageURLs(rssPage, rssPath)
 
-	if err := s.renderAndWriteXML(rssNode.Title, rssNode.addLangFilepathPrefix(rssNode.URLPath.URL), rssNode, s.appendThemeTemplates(layouts)...); err != nil {
+	if err := s.renderAndWriteXML(rssPage.Title, rssPage.addLangFilepathPrefix(rssPath), rssPage, s.appendThemeTemplates(layouts)...); err != nil {
 		return err
 	}