ref: 9fba2a30a9ab5e17a699920079b3ca192686afd4
parent: 96f71141bd4b9e0264bce725f6fec0555c46b1c3
author: Bjørn Erik Pedersen <[email protected]>
date: Sun Nov 13 06:43:23 EST 2016
node to page: Rename PageType to Kind And embed that on Page. Updates #2297
--- a/hugolib/hugo_sites.go
+++ b/hugolib/hugo_sites.go
@@ -191,8 +191,8 @@
func (h *HugoSites) assignMissingTranslations() error {
// This looks heavy, but it should be a small number of nodes by now.
- allPages := h.findAllPagesByNodeTypeNotIn(PagePage)
- for _, nodeType := range []PageType{PageHome, PageSection, PageTaxonomy, PageTaxonomyTerm} {
+ allPages := h.findAllPagesByNodeTypeNotIn(KindPage)
+ for _, nodeType := range []Kind{KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm} {
nodes := h.findPagesByNodeTypeIn(nodeType, allPages)
// Assign translations
@@ -218,7 +218,7 @@
for _, s := range h.Sites {
// home pages
- home := s.findPagesByNodeType(PageHome)
+ home := s.findPagesByNodeType(KindHome)
if len(home) > 1 {
panic("Too many homes")
}
@@ -231,8 +231,8 @@
// taxonomy list and terms pages
taxonomies := s.Language.GetStringMapString("taxonomies")
if len(taxonomies) > 0 {
- taxonomyPages := s.findPagesByNodeType(PageTaxonomy)
- taxonomyTermsPages := s.findPagesByNodeType(PageTaxonomyTerm)
+ taxonomyPages := s.findPagesByNodeType(KindTaxonomy)
+ taxonomyTermsPages := s.findPagesByNodeType(KindTaxonomyTerm)
for _, plural := range taxonomies {
tax := s.Taxonomies[plural]
foundTaxonomyPage := false
@@ -266,7 +266,7 @@
}
}
- sectionPages := s.findPagesByNodeType(PageSection)
+ sectionPages := s.findPagesByNodeType(KindSection)
if len(sectionPages) < len(s.Sections) {
for name, section := range s.Sections {
// A section may be created for the root content folder if a
@@ -304,9 +304,9 @@
// TODO(bep) np move
// Move the new* methods after cleanup in site.go
-func (s *Site) newNodePage(typ PageType) *Page {
+func (s *Site) newNodePage(typ Kind) *Page {
return &Page{
- PageType: typ,
+ Kind: typ,
Node: Node{
Data: make(map[string]interface{}),
Site: &s.Info,
@@ -315,7 +315,7 @@
}
func (s *Site) newHomePage() *Page {
- p := s.newNodePage(PageHome)
+ p := s.newNodePage(KindHome)
p.Title = s.Info.Title
pages := Pages{}
p.Data["Pages"] = pages
@@ -334,7 +334,7 @@
func (s *Site) newTaxonomyPage(plural, key string) *Page {
- p := s.newNodePage(PageTaxonomy)
+ p := s.newNodePage(KindTaxonomy)
p.sections = []string{plural, key}
@@ -356,7 +356,7 @@
func (s *Site) newSectionPage(name string, section WeightedPages) *Page {
- p := s.newNodePage(PageSection)
+ p := s.newNodePage(KindSection)
p.sections = []string{name}
sectionName := name
@@ -375,7 +375,7 @@
}
func (s *Site) newTaxonomyTermsPage(plural string) *Page {
- p := s.newNodePage(PageTaxonomyTerm)
+ p := s.newNodePage(KindTaxonomyTerm)
p.sections = []string{plural}
p.Title = strings.Title(plural)
s.setPageURLs(p, plural)
@@ -566,19 +566,19 @@
}
// TODO(bep) np remove
-func (h *HugoSites) findAllPagesByNodeType(n PageType) Pages {
+func (h *HugoSites) findAllPagesByNodeType(n Kind) Pages {
return h.Sites[0].findAllPagesByNodeType(n)
}
-func (h *HugoSites) findPagesByNodeTypeNotIn(n PageType, inPages Pages) Pages {
+func (h *HugoSites) findPagesByNodeTypeNotIn(n Kind, inPages Pages) Pages {
return h.Sites[0].findPagesByNodeTypeNotIn(n, inPages)
}
-func (h *HugoSites) findPagesByNodeTypeIn(n PageType, inPages Pages) Pages {
+func (h *HugoSites) findPagesByNodeTypeIn(n Kind, inPages Pages) Pages {
return h.Sites[0].findPagesByNodeTypeIn(n, inPages)
}
-func (h *HugoSites) findAllPagesByNodeTypeNotIn(n PageType) Pages {
+func (h *HugoSites) findAllPagesByNodeTypeNotIn(n Kind) Pages {
return h.findPagesByNodeTypeNotIn(n, h.Sites[0].AllPages)
}
--- a/hugolib/hugo_sites_build_test.go
+++ b/hugolib/hugo_sites_build_test.go
@@ -296,7 +296,7 @@
assertFileContent(t, "public/en/sect/doc1-slug/index.html", true, "Single", "Shortcode: Hello")
// Check node translations
- homeEn := enSite.getPage(PageHome)
+ homeEn := enSite.getPage(KindHome)
require.NotNil(t, homeEn)
require.Len(t, homeEn.Translations(), 3)
require.Equal(t, "fr", homeEn.Translations()[0].Lang())
@@ -306,7 +306,7 @@
require.Equal(t, "På bokmål", homeEn.Translations()[2].Title, configSuffix)
require.Equal(t, "Bokmål", homeEn.Translations()[2].Language().LanguageName, configSuffix)
- sectFr := frSite.getPage(PageSection, "sect")
+ sectFr := frSite.getPage(KindSection, "sect")
require.NotNil(t, sectFr)
require.Equal(t, "fr", sectFr.Lang())
@@ -316,12 +316,12 @@
nnSite := sites.Sites[2]
require.Equal(t, "nn", nnSite.Language.Lang)
- taxNn := nnSite.getPage(PageTaxonomyTerm, "lag")
+ taxNn := nnSite.getPage(KindTaxonomyTerm, "lag")
require.NotNil(t, taxNn)
require.Len(t, taxNn.Translations(), 1)
require.Equal(t, "nb", taxNn.Translations()[0].Lang())
- taxTermNn := nnSite.getPage(PageTaxonomy, "lag", "sogndal")
+ taxTermNn := nnSite.getPage(KindTaxonomy, "lag", "sogndal")
require.NotNil(t, taxTermNn)
require.Len(t, taxTermNn.Translations(), 1)
require.Equal(t, "nb", taxTermNn.Translations()[0].Lang())
@@ -514,7 +514,7 @@
docFr := readDestination(t, "public/fr/sect/doc1/index.html")
require.True(t, strings.Contains(docFr, "Salut"), "No Salut")
- homeEn := enSite.getPage(PageHome)
+ homeEn := enSite.getPage(KindHome)
require.NotNil(t, homeEn)
require.Len(t, homeEn.Translations(), 3)
require.Equal(t, "fr", homeEn.Translations()[0].Lang())
@@ -621,7 +621,7 @@
require.True(t, svSite.Language.Lang == "sv", svSite.Language.Lang)
require.True(t, frSite.Language.Lang == "fr", frSite.Language.Lang)
- homeEn := enSite.getPage(PageHome)
+ homeEn := enSite.getPage(KindHome)
require.NotNil(t, homeEn)
require.Len(t, homeEn.Translations(), 4)
require.Equal(t, "sv", homeEn.Translations()[0].Lang())
--- a/hugolib/menu_test.go
+++ b/hugolib/menu_test.go
@@ -415,11 +415,11 @@
fishySectionPages := s.Sections["fish-and-chips"]
assert.Equal(t, 1, len(fishySectionPages))
- nodeFirst := s.getPage(PageSection, "first")
+ nodeFirst := s.getPage(KindSection, "first")
require.NotNil(t, nodeFirst)
- nodeSecond := s.getPage(PageSection, "second-section")
+ nodeSecond := s.getPage(KindSection, "second-section")
require.NotNil(t, nodeSecond)
- nodeFishy := s.getPage(PageSection, "fish-and-chips")
+ nodeFishy := s.getPage(KindSection, "fish-and-chips")
require.Equal(t, "fish-and-chips", nodeFishy.sections[0])
firstSectionMenuEntry := findTestMenuEntryByID(s, "spm", "first")
@@ -555,7 +555,7 @@
s := setupMenuTests(t, menuPageSources)
- home := s.getPage(PageHome)
+ home := s.getPage(KindHome)
homeMenuEntry := &MenuEntry{Name: home.Title, URL: home.URL()}
--- a/hugolib/node.go
+++ b/hugolib/node.go
@@ -176,7 +176,7 @@
}
func (p *Page) isTranslation(candidate *Page) bool {
- if p == candidate || p.PageType != candidate.PageType {
+ if p == candidate || p.Kind != candidate.Kind {
return false
}
@@ -184,7 +184,7 @@
return false
}
- if p.PageType == PagePage || p.PageType == pageUnknown {
+ if p.Kind == KindPage || p.Kind == kindUnknown {
panic("Node type not currently supported for this op")
}
@@ -291,41 +291,41 @@
}
// TODO(bep) np node identificator
-func nodeTypeFromFilename(filename string) PageType {
+func nodeTypeFromFilename(filename string) Kind {
if !strings.Contains(filename, "_index") {
- return PagePage
+ return KindPage
}
if strings.HasPrefix(filename, "_index") {
- return PageHome
+ return KindHome
}
// We don't know enough yet to determine the type.
- return pageUnknown
+ return kindUnknown
}
func (p *Page) setNodeTypeVars(s *Site) {
// TODO(bep) np taxonomies etc.
- if p.PageType == pageUnknown {
+ if p.Kind == kindUnknown {
// This is either a taxonomy list, taxonomy term or a section
nodeType := s.nodeTypeFromSections(p.sections)
- if nodeType == pageUnknown {
+ if nodeType == kindUnknown {
panic(fmt.Sprintf("Unable to determine node type from %q", p.sections))
}
- p.PageType = nodeType
+ p.Kind = nodeType
}
// TODO(bep) np node URL
// Set Node URL
- switch p.PageType {
- case PageHome:
+ switch p.Kind {
+ case KindHome:
p.URLPath.URL = ""
- case PageSection:
+ case KindSection:
p.URLPath.URL = p.sections[0]
- case PageTaxonomy:
+ case KindTaxonomy:
p.URLPath.URL = path.Join(p.sections...)
- case PageTaxonomyTerm:
+ case KindTaxonomyTerm:
p.URLPath.URL = path.Join(p.sections...)
}
--- a/hugolib/node_as_page_test.go
+++ b/hugolib/node_as_page_test.go
@@ -76,7 +76,7 @@
assertFileContent(t, filepath.Join("public", "sect1", "regular1", "index.html"), false, "Single Title: Page 01", "Content Page 01")
h := s.owner
- nodes := h.findAllPagesByNodeTypeNotIn(PagePage)
+ nodes := h.findAllPagesByNodeTypeNotIn(KindPage)
require.Len(t, nodes, 6)
home := nodes[5] // oldest
@@ -88,7 +88,7 @@
section2 := nodes[3]
require.Equal(t, "Section2", section2.Title)
- pages := h.findAllPagesByNodeType(PagePage)
+ pages := h.findAllPagesByNodeType(KindPage)
require.Len(t, pages, 4)
first := pages[0]
@@ -118,7 +118,7 @@
assertFileContent(t, filepath.Join("public", "sect1", "page", "2", "index.html"), false,
"Pag: Page 02")
- sections := h.findAllPagesByNodeType(PageSection)
+ sections := h.findAllPagesByNodeType(KindSection)
require.Len(t, sections, 2)
@@ -179,7 +179,7 @@
}
// Home page
- homePages := s.findIndexNodesByNodeType(PageHome)
+ homePages := s.findIndexNodesByNodeType(KindHome)
require.Len(t, homePages, 1)
homePage := homePages[0]
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -49,39 +49,54 @@
cjk = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`)
)
-// PageType is the discriminator that identifies the different page types
+// Kind is the discriminator that identifies the different page types
// in the different page collections. This can, as an example, be used
// to to filter regular pages, find sections etc.
// NOTE: THe exported constants below are used to filter pages from
// templates in the wild, so do not change the values!
-type PageType string
+type Kind string
const (
- PagePage PageType = "page"
+ KindPage Kind = "page"
// The rest are node types; home page, sections etc.
- PageHome PageType = "home"
- PageSection PageType = "section"
- PageTaxonomy PageType = "taxonomy"
- PageTaxonomyTerm PageType = "taxonomyTerm"
+ KindHome Kind = "home"
+ KindSection Kind = "section"
+ KindTaxonomy Kind = "taxonomy"
+ KindTaxonomyTerm Kind = "taxonomyTerm"
// Temporary state.
- pageUnknown PageType = "unknown"
+ kindUnknown Kind = "unknown"
// The following are (currently) temporary nodes,
// i.e. nodes we create just to render in isolation.
- pageSitemap PageType = "sitemap"
- pageRobotsTXT PageType = "robotsTXT"
- page404 PageType = "404"
+ kindSitemap Kind = "sitemap"
+ kindRobotsTXT Kind = "robotsTXT"
+ kind404 Kind = "404"
)
-func (p PageType) IsNode() bool {
- return p != PagePage
+// IsNode returns whether this is an item of one of the list types in Hugo,
+// i.e. not a regular content page.
+func (k Kind) IsNode() bool {
+ return k != KindPage
}
+// IsHome returns whether this is the home page.
+func (k Kind) IsHome() bool {
+ return k == KindHome
+}
+
+// IsPage returns whether this is a regular content page.
+func (k Kind) IsPage() bool {
+ return k == KindPage
+}
+
type Page struct {
- PageType PageType
+ // Kind will, for the pages available to the templates, be one of:
+ // page, home, section, taxonomy and taxonomyTerm.
+ Kind
+
// Since Hugo 0.18 we got rid of the Node type. So now all pages are ...
// pages (regular pages, home page, sections etc.).
// Sections etc. will have child pages. These were earlier placed in .Data.Pages,
@@ -185,18 +200,6 @@
return 0
}
-func (p *Page) IsNode() bool {
- return p.PageType.IsNode()
-}
-
-func (p *Page) IsHome() bool {
- return p.PageType == PageHome
-}
-
-func (p *Page) IsPage() bool {
- return p.PageType == PagePage
-}
-
type Position struct {
Prev *Page
Next *Page
@@ -482,7 +485,7 @@
func newPage(filename string) *Page {
page := Page{
- PageType: nodeTypeFromFilename(filename),
+ Kind: nodeTypeFromFilename(filename),
contentType: "",
Source: Source{File: *source.NewFile(filename)},
Node: Node{Keywords: []string{}, Sitemap: Sitemap{Priority: -1}},
@@ -521,16 +524,16 @@
}
// TODO(bep) np taxonomy etc.
- switch p.PageType {
- case PageHome:
+ switch p.Kind {
+ case KindHome:
return []string{"index.html", "_default/list.html"}
- case PageSection:
+ case KindSection:
section := p.sections[0]
return []string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"}
- case PageTaxonomy:
+ case KindTaxonomy:
singular := p.site.taxonomiesPluralSingular[p.sections[0]]
return []string{"taxonomy/" + singular + ".html", "indexes/" + singular + ".html", "_default/taxonomy.html", "_default/list.html"}
- case PageTaxonomyTerm:
+ case KindTaxonomyTerm:
singular := p.site.taxonomiesPluralSingular[p.sections[0]]
return []string{"taxonomy/" + singular + ".terms.html", "_default/terms.html", "indexes/indexes.html"}
}
@@ -555,18 +558,18 @@
// rssLayouts returns RSS layouts to use for the RSS version of this page, nil
// if no RSS should be rendered.
func (p *Page) rssLayouts() []string {
- switch p.PageType {
- case PageHome:
+ switch p.Kind {
+ case KindHome:
return []string{"rss.xml", "_default/rss.xml", "_internal/_default/rss.xml"}
- case PageSection:
+ case KindSection:
section := p.sections[0]
return []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
- case PageTaxonomy:
+ case KindTaxonomy:
singular := p.site.taxonomiesPluralSingular[p.sections[0]]
return []string{"taxonomy/" + singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
- case PageTaxonomyTerm:
+ case KindTaxonomyTerm:
// No RSS for taxonomy terms
- case PagePage:
+ case KindPage:
// No RSS for regular pages
}
@@ -776,7 +779,7 @@
func (p *Page) Permalink() (string, error) {
// TODO(bep) np permalink
- if p.PageType.IsNode() {
+ if p.IsNode() {
return p.Node.Permalink(), nil
}
link, err := p.permalink()
@@ -1014,7 +1017,7 @@
func (p *Page) HasMenuCurrent(menu string, me *MenuEntry) bool {
// TODO(bep) np menu
- if p.PageType.IsNode() {
+ if p.IsNode() {
return p.Node.HasMenuCurrent(menu, me)
}
menus := p.Menus()
@@ -1044,7 +1047,7 @@
func (p *Page) IsMenuCurrent(menu string, inme *MenuEntry) bool {
// TODO(bep) np menu
- if p.PageType.IsNode() {
+ if p.IsNode() {
return p.Node.IsMenuCurrent(menu, inme)
}
menus := p.Menus()
@@ -1253,14 +1256,14 @@
func (p *Page) TargetPath() (outfile string) {
// TODO(bep) np
- switch p.PageType {
- case PageHome:
+ switch p.Kind {
+ case KindHome:
return p.addLangFilepathPrefix(helpers.FilePathSeparator)
- case PageSection:
+ case KindSection:
return p.addLangFilepathPrefix(p.sections[0])
- case PageTaxonomy:
+ case KindTaxonomy:
return p.addLangFilepathPrefix(filepath.Join(p.sections...))
- case PageTaxonomyTerm:
+ case KindTaxonomyTerm:
return p.addLangFilepathPrefix(filepath.Join(p.sections...))
}
@@ -1305,7 +1308,7 @@
func (p *Page) prepareLayouts() error {
// TODO(bep): Check the IsRenderable logic.
- if p.PageType == PagePage {
+ if p.Kind == KindPage {
var layouts []string
if !p.IsRenderable() {
self := "__" + p.TargetPath()
@@ -1329,17 +1332,17 @@
var pages Pages
p.Data = make(map[string]interface{})
- switch p.PageType {
- case PagePage:
- case PageHome:
- pages = s.findPagesByNodeTypeNotIn(PageHome, s.Pages)
- case PageSection:
+ switch p.Kind {
+ case KindPage:
+ case KindHome:
+ pages = s.findPagesByNodeTypeNotIn(KindHome, s.Pages)
+ case KindSection:
sectionData, ok := s.Sections[p.sections[0]]
if !ok {
return fmt.Errorf("Data for section %s not found", p.Section())
}
pages = sectionData.Pages()
- case PageTaxonomy:
+ case KindTaxonomy:
plural := p.sections[0]
term := p.sections[1]
@@ -1350,7 +1353,7 @@
p.Data["Singular"] = singular
p.Data["Plural"] = plural
pages = taxonomy.Pages()
- case PageTaxonomyTerm:
+ case KindTaxonomyTerm:
plural := p.sections[0]
singular := s.taxonomiesPluralSingular[plural]
@@ -1375,7 +1378,7 @@
// TODO(bep) np there is a potential issue with page sorting for home pages
// etc. without front matter dates set, but let us wrap the head around
// that in another time.
- if !p.PageType.IsNode() {
+ if !p.Kind.IsNode() {
return
}
@@ -1415,7 +1418,7 @@
// the paginators etc., we do it manually here.
// TODO(bep) np do better
func (p *Page) copy() *Page {
- c := &Page{PageType: p.PageType, Node: Node{Site: p.Site}}
+ c := &Page{Kind: p.Kind, Node: Node{Site: p.Site}}
c.Title = p.Title
c.Data = p.Data
c.Date = p.Date
--- a/hugolib/page_collections.go
+++ b/hugolib/page_collections.go
@@ -40,12 +40,12 @@
}
func (c *PageCollections) refreshPageCaches() {
- c.indexPages = c.findPagesByNodeTypeNotIn(PagePage, c.Pages)
- c.regularPages = c.findPagesByNodeTypeIn(PagePage, c.Pages)
+ c.indexPages = c.findPagesByNodeTypeNotIn(KindPage, c.Pages)
+ c.regularPages = c.findPagesByNodeTypeIn(KindPage, c.Pages)
// TODO(bep) np remove eventually
for _, n := range c.Pages {
- if n.PageType == pageUnknown {
+ if n.Kind == kindUnknown {
panic(fmt.Sprintf("Got unknown type %s", n.Title))
}
}
@@ -61,11 +61,11 @@
// TODO(bep) np clean and remove finders
-func (c *PageCollections) findPagesByNodeType(n PageType) Pages {
+func (c *PageCollections) findPagesByNodeType(n Kind) Pages {
return c.findPagesByNodeTypeIn(n, c.Pages)
}
-func (c *PageCollections) getPage(typ PageType, path ...string) *Page {
+func (c *PageCollections) getPage(typ Kind, path ...string) *Page {
pages := c.findPagesByNodeTypeIn(typ, c.Pages)
if len(pages) == 0 {
@@ -94,14 +94,14 @@
return nil
}
-func (c *PageCollections) findIndexNodesByNodeType(n PageType) Pages {
+func (c *PageCollections) findIndexNodesByNodeType(n Kind) Pages {
return c.findPagesByNodeTypeIn(n, c.indexPages)
}
-func (*PageCollections) findPagesByNodeTypeIn(n PageType, inPages Pages) Pages {
+func (*PageCollections) findPagesByNodeTypeIn(n Kind, inPages Pages) Pages {
var pages Pages
for _, p := range inPages {
- if p.PageType == n {
+ if p.Kind == n {
pages = append(pages, p)
}
}
@@ -108,10 +108,10 @@
return pages
}
-func (*PageCollections) findPagesByNodeTypeNotIn(n PageType, inPages Pages) Pages {
+func (*PageCollections) findPagesByNodeTypeNotIn(n Kind, inPages Pages) Pages {
var pages Pages
for _, p := range inPages {
- if p.PageType != n {
+ if p.Kind != n {
pages = append(pages, p)
}
}
@@ -118,11 +118,11 @@
return pages
}
-func (c *PageCollections) findAllPagesByNodeType(n PageType) Pages {
+func (c *PageCollections) findAllPagesByNodeType(n Kind) Pages {
return c.findPagesByNodeTypeIn(n, c.Pages)
}
-func (c *PageCollections) findRawAllPagesByNodeType(n PageType) Pages {
+func (c *PageCollections) findRawAllPagesByNodeType(n Kind) Pages {
return c.findPagesByNodeTypeIn(n, c.rawAllPages)
}
--- a/hugolib/page_permalink_test.go
+++ b/hugolib/page_permalink_test.go
@@ -66,7 +66,7 @@
info := newSiteInfo(siteBuilderCfg{baseURL: string(test.base), language: helpers.NewDefaultLanguage()})
p := &Page{
- PageType: PagePage,
+ Kind: KindPage,
Node: Node{
URLPath: URLPath{
Section: "z",
--- a/hugolib/page_test.go
+++ b/hugolib/page_test.go
@@ -1269,7 +1269,7 @@
{func(n *Page) bool { return n.Now().Unix() == time.Now().Unix() }},
} {
- n := &Page{PageType: PageHome}
+ n := &Page{Kind: KindHome}
n.RSSLink = "rssLink"
if !this.assertFunc(n) {
@@ -1281,14 +1281,14 @@
func TestPageType(t *testing.T) {
// Add tests for these constants to make sure they don't change
- require.Equal(t, PageType("page"), PagePage)
- require.Equal(t, PageType("home"), PageHome)
- require.Equal(t, PageType("section"), PageSection)
- require.Equal(t, PageType("taxonomy"), PageTaxonomy)
- require.Equal(t, PageType("taxonomyTerm"), PageTaxonomyTerm)
+ require.Equal(t, Kind("page"), KindPage)
+ require.Equal(t, Kind("home"), KindHome)
+ require.Equal(t, Kind("section"), KindSection)
+ require.Equal(t, Kind("taxonomy"), KindTaxonomy)
+ require.Equal(t, Kind("taxonomyTerm"), KindTaxonomyTerm)
- require.False(t, PagePage.IsNode())
- require.True(t, PageHome.IsNode())
+ require.False(t, KindPage.IsNode())
+ require.True(t, KindHome.IsNode())
}
func TestChompBOM(t *testing.T) {
--- a/hugolib/pagination.go
+++ b/hugolib/pagination.go
@@ -260,8 +260,8 @@
// Paginator gets this Node's paginator if it's already created.
// If it's not, one will be created with all pages in Data["Pages"].
func (n *Page) Paginator(options ...interface{}) (*Pager, error) {
- if !n.PageType.IsNode() {
- return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", n.PageType, n.Title)
+ if !n.Kind.IsNode() {
+ return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", n.Kind, n.Title)
}
pagerSize, err := resolvePagerSize(options...)
@@ -303,8 +303,8 @@
// If it's not, one will be created with the qiven sequence.
// Note that repeated calls will return the same result, even if the sequence is different.
func (n *Page) Paginate(seq interface{}, options ...interface{}) (*Pager, error) {
- if !n.PageType.IsNode() {
- return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", n.PageType, n.Title)
+ if !n.Kind.IsNode() {
+ return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", n.Kind, n.Title)
}
pagerSize, err := resolvePagerSize(options...)
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -1533,8 +1533,8 @@
s.Sections = make(Taxonomy)
s.Info.Sections = s.Sections
// TODO(bep) np check these vs the caches
- regularPages := s.findPagesByNodeType(PagePage)
- sectionPages := s.findPagesByNodeType(PageSection)
+ regularPages := s.findPagesByNodeType(KindPage)
+ sectionPages := s.findPagesByNodeType(KindSection)
for i, p := range regularPages {
s.Sections.add(p.Section(), WeightedPage{regularPages[i].Weight, regularPages[i]}, s.Info.preserveTaxonomyNames)
@@ -1561,14 +1561,14 @@
}
}
-func (s *Site) nodeTypeFromSections(sections []string) PageType {
+func (s *Site) nodeTypeFromSections(sections []string) Kind {
if _, isTaxonomy := s.Taxonomies[sections[0]]; isTaxonomy {
if len(sections) == 1 {
- return PageTaxonomyTerm
+ return KindTaxonomyTerm
}
- return PageTaxonomy
+ return KindTaxonomy
}
- return PageSection
+ return KindSection
}
func (s *Site) preparePages() error {
@@ -1662,7 +1662,7 @@
// This will return nil when no page could be found.
//
// The valid page types are: home, section, taxonomy and taxonomyTerm
-func (s *SiteInfo) GetPage(typ PageType, path ...string) *Page {
+func (s *SiteInfo) GetPage(typ Kind, path ...string) *Page {
return s.getPage(typ, path...)
}
--- a/hugolib/site_render.go
+++ b/hugolib/site_render.go
@@ -67,7 +67,7 @@
for p := range pages {
targetPath := p.TargetPath()
layouts := p.layouts()
- jww.DEBUG.Printf("Render %s to %q with layouts %q", p.PageType, targetPath, layouts)
+ jww.DEBUG.Printf("Render %s to %q with layouts %q", p.Kind, targetPath, layouts)
if err := s.renderAndWritePage("page "+p.FullFilePath(), targetPath, p, s.appendThemeTemplates(layouts)...); err != nil {
results <- err
@@ -74,7 +74,7 @@
}
// Taxonomy terms have no page set to paginate, so skip that for now.
- if p.PageType.IsNode() && p.PageType != PageTaxonomyTerm {
+ if p.IsNode() && p.Kind != KindTaxonomyTerm {
if err := s.renderPaginator(p); err != nil {
results <- err
}
@@ -160,7 +160,7 @@
return nil
}
- p := s.newNodePage(page404)
+ p := s.newNodePage(kind404)
p.Title = "404 Page not found"
p.Data["Pages"] = s.Pages
p.Pages = s.Pages
@@ -181,12 +181,12 @@
sitemapDefault := parseSitemap(viper.GetStringMap("sitemap"))
- n := s.newNodePage(pageSitemap)
+ n := s.newNodePage(kindSitemap)
// Include all pages (regular, home page, taxonomies etc.)
pages := s.Pages
- page := s.newNodePage(pageSitemap)
+ page := s.newNodePage(kindSitemap)
page.URLPath.URL = ""
page.Sitemap.ChangeFreq = sitemapDefault.ChangeFreq
page.Sitemap.Priority = sitemapDefault.Priority
@@ -224,7 +224,7 @@
return nil
}
- n := s.newNodePage(pageRobotsTXT)
+ n := s.newNodePage(kindRobotsTXT)
n.Data["Pages"] = s.Pages
n.Pages = s.Pages