shithub: hugo

Download patch

ref: 063b78d2ece39b82d0eb32ac5559097c2780cef8
parent: 88972b0d5b62861128f101256027a52ab48eae86
author: Bjørn Erik Pedersen <[email protected]>
date: Fri Nov 11 04:01:47 EST 2016

node to page: Make Nodes into Pages

* `.Site.Pages` now contains all page types, including sections etc.
* `.Data.Pages` will also contain "node type" pages where relevant.

Updates #2297

--- a/hugolib/embedded_shortcodes_test.go
+++ b/hugolib/embedded_shortcodes_test.go
@@ -66,9 +66,9 @@
 	require.NoError(t, err)
 
 	require.NoError(t, sites.Build(BuildCfg{}))
-	require.Len(t, sites.Sites[0].Pages, 1)
+	require.Len(t, sites.Sites[0].regularPages, 1)
 
-	output := string(sites.Sites[0].Pages[0].Content)
+	output := string(sites.Sites[0].regularPages[0].Content)
 
 	if !strings.Contains(output, expected) {
 		t.Errorf("Got\n%q\nExpected\n%q", output, expected)
--- a/hugolib/hugo_sites.go
+++ b/hugolib/hugo_sites.go
@@ -196,9 +196,9 @@
 
 func (h *HugoSites) assignMissingTranslations() error {
 	// This looks heavy, but it should be a small number of nodes by now.
-	allNodes := h.findAllPagesByNodeTypeNotIn(NodePage)
+	allPages := h.findAllPagesByNodeTypeNotIn(NodePage)
 	for _, nodeType := range []NodeType{NodeHome, NodeSection, NodeTaxonomy, NodeTaxonomyTerms} {
-		nodes := h.findPagesByNodeTypeIn(nodeType, allNodes)
+		nodes := h.findPagesByNodeTypeIn(nodeType, allPages)
 
 		// Assign translations
 		for _, t1 := range nodes {
@@ -213,14 +213,12 @@
 
 }
 
-// createMissingNodes creates home page, taxonomies etc. that isnt't created as an
+// createMissingPages creates home page, taxonomies etc. that isnt't created as an
 // effect of having a content file.
-func (h *HugoSites) createMissingNodes() error {
-	// TODO(bep) np revisit this on languages -- as this is currently run after the page language distribution (due to taxonomies)
-	// TODO(bep) np re above, Pages vs.
+func (h *HugoSites) createMissingPages() error {
 	// TODO(bep) np check node title etc.
 
-	var newNodes Pages
+	var newPages Pages
 
 	for _, s := range h.Sites {
 
@@ -231,8 +229,8 @@
 		}
 		if len(home) == 0 {
 			n := s.newHomePage()
-			s.Nodes = append(s.Nodes, n)
-			newNodes = append(newNodes, n)
+			s.Pages = append(s.Pages, n)
+			newPages = append(newPages, n)
 		}
 
 		// taxonomy list and terms pages
@@ -259,15 +257,15 @@
 					}
 					if !foundTaxonomyPage {
 						n := s.newTaxonomyPage(plural, key)
-						s.Nodes = append(s.Nodes, n)
-						newNodes = append(newNodes, n)
+						s.Pages = append(s.Pages, n)
+						newPages = append(newPages, n)
 					}
 
 					if !foundTaxonomyTermsPage {
 						foundTaxonomyTermsPage = true
 						n := s.newTaxonomyTermsPage(plural)
-						s.Nodes = append(s.Nodes, n)
-						newNodes = append(newNodes, n)
+						s.Pages = append(s.Pages, n)
+						newPages = append(newPages, n)
 					}
 				}
 			}
@@ -292,18 +290,18 @@
 				}
 				if !foundSection {
 					n := s.newSectionPage(name, section)
-					s.Nodes = append(s.Nodes, n)
-					newNodes = append(newNodes, n)
+					s.Pages = append(s.Pages, n)
+					newPages = append(newPages, n)
 				}
 			}
 		}
 	}
 
-	if len(newNodes) > 0 {
+	if len(newPages) > 0 {
 		first := h.Sites[0]
-		first.AllNodes = append(first.AllNodes, newNodes...)
+		first.AllPages = append(first.AllPages, newPages...)
 		for i := 1; i < len(h.Sites); i++ {
-			h.Sites[i].AllNodes = first.AllNodes
+			h.Sites[i].AllPages = first.AllPages
 		}
 	}
 	return nil
@@ -404,7 +402,7 @@
 			if strings.HasPrefix(site.Language.Lang, p.Lang()) {
 				site.updateBuildStats(p)
 				if shouldBuild {
-					site.Nodes = append(site.Nodes, p)
+					site.Pages = append(site.Pages, p)
 					p.Site = &site.Info
 				}
 			}
@@ -414,7 +412,7 @@
 			}
 
 			if i == 0 {
-				site.AllNodes = append(site.AllNodes, p)
+				site.AllPages = append(site.AllPages, p)
 			}
 		}
 
@@ -422,12 +420,12 @@
 
 	// Pull over the collections from the master site
 	for i := 1; i < len(h.Sites); i++ {
-		h.Sites[i].AllNodes = h.Sites[0].AllNodes
+		h.Sites[i].AllPages = h.Sites[0].AllPages
 		h.Sites[i].Data = h.Sites[0].Data
 	}
 
 	if len(h.Sites) > 1 {
-		pages := h.Sites[0].AllNodes
+		pages := h.Sites[0].AllPages
 		allTranslations := pagesToTranslationsMap(h.multilingual, pages)
 		assignTranslationsToPages(allTranslations, pages)
 	}
@@ -524,7 +522,7 @@
 		}(pageChan, wg)
 	}
 
-	for _, p := range s.Nodes {
+	for _, p := range s.Pages {
 		pageChan <- p
 	}
 
@@ -586,7 +584,7 @@
 }
 
 func (h *HugoSites) findAllPagesByNodeTypeNotIn(n NodeType) Pages {
-	return h.findPagesByNodeTypeNotIn(n, h.Sites[0].AllNodes)
+	return h.findPagesByNodeTypeNotIn(n, h.Sites[0].AllPages)
 }
 
 // Convenience func used in tests to build a single site/language excluding render phase.
--- a/hugolib/hugo_sites_build.go
+++ b/hugolib/hugo_sites_build.go
@@ -156,7 +156,7 @@
 		}
 	}
 
-	if err := h.createMissingNodes(); err != nil {
+	if err := h.createMissingPages(); err != nil {
 		return err
 	}
 
--- a/hugolib/hugo_sites_test.go
+++ b/hugolib/hugo_sites_test.go
@@ -84,8 +84,8 @@
 
 	require.Equal(t, "/blog/en/foo", enSite.Info.pathSpec.RelURL("foo", true))
 
-	doc1en := enSite.Pages[0]
-	doc1fr := frSite.Pages[0]
+	doc1en := enSite.regularPages[0]
+	doc1fr := frSite.regularPages[0]
 
 	enPerm, _ := doc1en.Permalink()
 	enRelPerm, _ := doc1en.RelPermalink()
@@ -217,24 +217,24 @@
 
 	assert.Equal(t, "en", enSite.Language.Lang)
 
-	if len(enSite.Pages) != 4 {
+	if len(enSite.regularPages) != 4 {
 		t.Fatal("Expected 4 english pages")
 	}
 	assert.Len(t, enSite.Source.Files(), 14, "should have 13 source files")
-	assert.Len(t, enSite.AllPages, 9, "should have 8 total pages (including translations)")
+	assert.Len(t, enSite.AllPages, 28, "should have 28 total pages (including translations and index types)")
 
-	doc1en := enSite.Pages[0]
+	doc1en := enSite.regularPages[0]
 	permalink, err := doc1en.Permalink()
 	assert.NoError(t, err, "permalink call failed")
 	assert.Equal(t, "http://example.com/blog/en/sect/doc1-slug/", permalink, "invalid doc1.en permalink")
 	assert.Len(t, doc1en.Translations(), 1, "doc1-en should have one translation, excluding itself")
 
-	doc2 := enSite.Pages[1]
+	doc2 := enSite.regularPages[1]
 	permalink, err = doc2.Permalink()
 	assert.NoError(t, err, "permalink call failed")
 	assert.Equal(t, "http://example.com/blog/en/sect/doc2/", permalink, "invalid doc2 permalink")
 
-	doc3 := enSite.Pages[2]
+	doc3 := enSite.regularPages[2]
 	permalink, err = doc3.Permalink()
 	assert.NoError(t, err, "permalink call failed")
 	// Note that /superbob is a custom URL set in frontmatter.
@@ -277,10 +277,10 @@
 	frSite := sites.Sites[1]
 
 	assert.Equal(t, "fr", frSite.Language.Lang)
-	assert.Len(t, frSite.Pages, 3, "should have 3 pages")
-	assert.Len(t, frSite.AllPages, 9, "should have 8 total pages (including translations)")
+	assert.Len(t, frSite.regularPages, 3, "should have 3 pages")
+	assert.Len(t, frSite.AllPages, 28, "should have 28 total pages (including translations and nodes)")
 
-	for _, frenchPage := range frSite.Pages {
+	for _, frenchPage := range frSite.regularPages {
 		assert.Equal(t, "fr", frenchPage.Lang())
 	}
 
@@ -387,8 +387,8 @@
 	enSite := sites.Sites[0]
 	frSite := sites.Sites[1]
 
-	require.Len(t, enSite.Pages, 4)
-	require.Len(t, frSite.Pages, 3)
+	require.Len(t, enSite.regularPages, 4)
+	require.Len(t, frSite.regularPages, 3)
 
 	// Verify translations
 	assertFileContent(t, "public/en/sect/doc1-slug/index.html", true, "Hello")
@@ -414,7 +414,7 @@
 			nil,
 			[]fsnotify.Event{{Name: "content/sect/doc2.en.md", Op: fsnotify.Remove}},
 			func(t *testing.T) {
-				require.Len(t, enSite.Pages, 3, "1 en removed")
+				require.Len(t, enSite.regularPages, 3, "1 en removed")
 
 				// Check build stats
 				require.Equal(t, 1, enSite.draftCount, "Draft")
@@ -437,12 +437,12 @@
 				{Name: "content/new1.fr.md", Op: fsnotify.Create},
 			},
 			func(t *testing.T) {
-				require.Len(t, enSite.Pages, 5)
-				require.Len(t, enSite.AllPages, 11)
-				require.Len(t, frSite.Pages, 4)
-				require.Equal(t, "new_fr_1", frSite.Pages[3].Title)
-				require.Equal(t, "new_en_2", enSite.Pages[0].Title)
-				require.Equal(t, "new_en_1", enSite.Pages[1].Title)
+				require.Len(t, enSite.regularPages, 5)
+				require.Len(t, enSite.AllPages, 30)
+				require.Len(t, frSite.regularPages, 4)
+				require.Equal(t, "new_fr_1", frSite.regularPages[3].Title)
+				require.Equal(t, "new_en_2", enSite.regularPages[0].Title)
+				require.Equal(t, "new_en_1", enSite.regularPages[1].Title)
 
 				rendered := readDestination(t, "public/en/new1/index.html")
 				require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
@@ -457,7 +457,7 @@
 			},
 			[]fsnotify.Event{{Name: "content/sect/doc1.en.md", Op: fsnotify.Write}},
 			func(t *testing.T) {
-				require.Len(t, enSite.Pages, 5)
+				require.Len(t, enSite.regularPages, 5)
 				doc1 := readDestination(t, "public/en/sect/doc1-slug/index.html")
 				require.True(t, strings.Contains(doc1, "CHANGED"), doc1)
 
@@ -475,8 +475,8 @@
 				{Name: "content/new1.en.md", Op: fsnotify.Rename},
 			},
 			func(t *testing.T) {
-				require.Len(t, enSite.Pages, 5, "Rename")
-				require.Equal(t, "new_en_1", enSite.Pages[1].Title)
+				require.Len(t, enSite.regularPages, 5, "Rename")
+				require.Equal(t, "new_en_1", enSite.regularPages[1].Title)
 				rendered := readDestination(t, "public/en/new1renamed/index.html")
 				require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
 			}},
@@ -490,9 +490,9 @@
 			},
 			[]fsnotify.Event{{Name: "layouts/_default/single.html", Op: fsnotify.Write}},
 			func(t *testing.T) {
-				require.Len(t, enSite.Pages, 5)
-				require.Len(t, enSite.AllPages, 11)
-				require.Len(t, frSite.Pages, 4)
+				require.Len(t, enSite.regularPages, 5)
+				require.Len(t, enSite.AllPages, 30)
+				require.Len(t, frSite.regularPages, 4)
 				doc1 := readDestination(t, "public/en/sect/doc1-slug/index.html")
 				require.True(t, strings.Contains(doc1, "Template Changed"), doc1)
 			},
@@ -507,9 +507,9 @@
 			},
 			[]fsnotify.Event{{Name: "i18n/fr.yaml", Op: fsnotify.Write}},
 			func(t *testing.T) {
-				require.Len(t, enSite.Pages, 5)
-				require.Len(t, enSite.AllPages, 11)
-				require.Len(t, frSite.Pages, 4)
+				require.Len(t, enSite.regularPages, 5)
+				require.Len(t, enSite.AllPages, 30)
+				require.Len(t, frSite.regularPages, 4)
 				docEn := readDestination(t, "public/en/sect/doc1-slug/index.html")
 				require.True(t, strings.Contains(docEn, "Hello"), "No Hello")
 				docFr := readDestination(t, "public/fr/sect/doc1/index.html")
@@ -531,9 +531,9 @@
 				{Name: "layouts/shortcodes/shortcode.html", Op: fsnotify.Write},
 			},
 			func(t *testing.T) {
-				require.Len(t, enSite.Pages, 5)
-				require.Len(t, enSite.AllPages, 11)
-				require.Len(t, frSite.Pages, 4)
+				require.Len(t, enSite.regularPages, 5)
+				require.Len(t, enSite.AllPages, 30)
+				require.Len(t, frSite.regularPages, 4)
 				assertFileContent(t, "public/fr/sect/doc1/index.html", true, "Single", "Modified Shortcode: Salut")
 				assertFileContent(t, "public/en/sect/doc1-slug/index.html", true, "Single", "Modified Shortcode: Hello")
 			},
@@ -627,12 +627,12 @@
 	require.Len(t, homeEn.Translations(), 4)
 	require.Equal(t, "sv", homeEn.Translations()[0].Lang())
 
-	require.Len(t, enSite.Pages, 4)
-	require.Len(t, frSite.Pages, 3)
+	require.Len(t, enSite.regularPages, 4)
+	require.Len(t, frSite.regularPages, 3)
 
 	// Veriy Swedish site
-	require.Len(t, svSite.Pages, 1)
-	svPage := svSite.Pages[0]
+	require.Len(t, svSite.regularPages, 1)
+	svPage := svSite.regularPages[0]
 	require.Equal(t, "Swedish Contentfile", svPage.Title)
 	require.Equal(t, "sv", svPage.Lang())
 	require.Len(t, svPage.Translations(), 2)
--- a/hugolib/menu_test.go
+++ b/hugolib/menu_test.go
@@ -208,7 +208,7 @@
 
 	s := setupMenuTests(t, menuPageSources)
 
-	assert.Equal(t, 3, len(s.Pages), "Not enough pages")
+	assert.Equal(t, 3, len(s.regularPages), "Not enough pages")
 
 	me1 := findTestMenuEntryByID(s, "m1", "i1")
 	me2 := findTestMenuEntryByID(s, "m1", "i2")
@@ -246,7 +246,7 @@
 
 	s := setupMenuTests(t, menuPageSources)
 
-	assert.Equal(t, 3, len(s.Pages), "Not enough pages")
+	assert.Equal(t, 3, len(s.regularPages), "Not enough pages")
 
 	me1 := findTestMenuEntryByName(s, "m1", "n1")
 	me2 := findTestMenuEntryByName(s, "m1", "n2")
@@ -264,13 +264,13 @@
 
 	s := setupMenuTests(t, menuPageSources)
 
-	if len(s.Pages) != 3 {
-		t.Fatalf("Posts not created, expected 3 got %d", len(s.Pages))
+	if len(s.regularPages) != 3 {
+		t.Fatalf("Posts not created, expected 3 got %d", len(s.regularPages))
 	}
 
-	first := s.Pages[0]
-	second := s.Pages[1]
-	third := s.Pages[2]
+	first := s.regularPages[0]
+	second := s.regularPages[1]
+	third := s.regularPages[2]
 
 	pOne := findTestMenuEntryByName(s, "p_one", "One")
 	pTwo := findTestMenuEntryByID(s, "p_two", "Two")
@@ -358,9 +358,9 @@
 		{Name: filepath.FromSlash("sect/yaml1.md"), Content: ps1},
 		{Name: filepath.FromSlash("sect/yaml2.md"), Content: ps2}})
 
-	p1 := s.Pages[0]
+	p1 := s.regularPages[0]
 	assert.Len(t, p1.Menus(), 2, "List YAML")
-	p2 := s.Pages[1]
+	p2 := s.regularPages[1]
 	assert.Len(t, p2.Menus(), 2, "Map YAML")
 
 }
--- a/hugolib/node.go
+++ b/hugolib/node.go
@@ -103,21 +103,6 @@
 	lang         string
 }
 
-// The Nodes type is temporary until we get https://github.com/spf13/hugo/issues/2297 fixed.
-type Nodes []*Node
-
-func (n Nodes) Len() int {
-	return len(n)
-}
-
-func (n Nodes) Less(i, j int) bool {
-	return n[i].language.Weight < n[j].language.Weight
-}
-
-func (n Nodes) Swap(i, j int) {
-	n[i], n[j] = n[j], n[i]
-}
-
 func (n *Node) Now() time.Time {
 	return time.Now()
 }
--- a/hugolib/node_as_page_test.go
+++ b/hugolib/node_as_page_test.go
@@ -79,7 +79,7 @@
 	assertFileContent(t, filepath.Join("public", "index.html"), false,
 		"Index Title: Home Sweet Home!",
 		"Home <strong>Content!</strong>",
-		"# Pages: 4")
+		"# Pages: 9")
 
 	assertFileContent(t, filepath.Join("public", "sect1", "regular1", "index.html"), false, "Single Title: Page 01", "Content Page 01")
 
@@ -170,7 +170,7 @@
 	require.Len(t, homePages, 1)
 
 	homePage := homePages[0]
-	require.Len(t, homePage.Data["Pages"], 4)
+	require.Len(t, homePage.Data["Pages"], 9)
 
 	assertFileContent(t, filepath.Join("public", "index.html"), false,
 		"Index Title: Hugo Rocks!")
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -1277,7 +1277,7 @@
 	switch p.NodeType {
 	case NodePage:
 	case NodeHome:
-		p.Data["Pages"] = s.Pages
+		p.Data["Pages"] = s.findPagesByNodeTypeNotIn(NodeHome, s.Pages)
 	case NodeSection:
 		sectionData, ok := s.Sections[p.sections[0]]
 		if !ok {
--- a/hugolib/page_collections.go
+++ b/hugolib/page_collections.go
@@ -17,26 +17,23 @@
 	"fmt"
 )
 
-// TODO(bep) np pages names
-// TODO(bep) np this is a somewhat breaking change and should be doc. + release notes: See AllPages vs. "this language only". Looks like it is like this alread, check.
+// PageCollections contains the page collections for a site.
 type PageCollections struct {
-	// Includes only pages of NodePage type, and only pages in the current language.
+	// Includes only pages of all types, and only pages in the current language.
 	Pages Pages
 
 	// Includes all pages in all languages, including the current one.
-	// Only pages of NodePage type.
+	// Inlcudes pages of all types.
 	AllPages Pages
 
-	// Includes pages of all types, but only pages in the current language.
-	Nodes Pages
+	// A convenience cache for the traditional index types, taxonomies, home page etc.
+	// This is for the current language only.
+	indexPages Pages
 
-	// Includes all pages in all languages, including the current one.
-	// Includes pages of all types.
-	AllNodes Pages
-
-	// A convenience cache for the traditional node types, taxonomies, home page etc.
+	// A convenience cache for the regular pages.
 	// This is for the current language only.
-	indexNodes Pages
+	// TODO(bep) np consider exporting this
+	regularPages Pages
 
 	// Includes absolute all pages (of all types), including drafts etc.
 	rawAllPages Pages
@@ -43,12 +40,11 @@
 }
 
 func (c *PageCollections) refreshPageCaches() {
-	// All pages are stored in AllNodes and Nodes. Filter from those.
-	c.Pages = c.findPagesByNodeTypeIn(NodePage, c.Nodes)
-	c.indexNodes = c.findPagesByNodeTypeNotIn(NodePage, c.Nodes)
-	c.AllPages = c.findPagesByNodeTypeIn(NodePage, c.AllNodes)
+	c.indexPages = c.findPagesByNodeTypeNotIn(NodePage, c.Pages)
+	c.regularPages = c.findPagesByNodeTypeIn(NodePage, c.Pages)
 
-	for _, n := range c.Nodes {
+	// TODO(bep) np remove eventually
+	for _, n := range c.Pages {
 		if n.NodeType == NodeUnknown {
 			panic(fmt.Sprintf("Got unknown type %s", n.Title))
 		}
@@ -66,11 +62,11 @@
 // TODO(bep) np clean and remove finders
 
 func (c *PageCollections) findPagesByNodeType(n NodeType) Pages {
-	return c.findPagesByNodeTypeIn(n, c.Nodes)
+	return c.findPagesByNodeTypeIn(n, c.Pages)
 }
 
 func (c *PageCollections) getPage(n NodeType, path ...string) *Page {
-	pages := c.findPagesByNodeTypeIn(n, c.Nodes)
+	pages := c.findPagesByNodeTypeIn(n, c.Pages)
 
 	if len(pages) == 0 {
 		return nil
@@ -99,7 +95,7 @@
 }
 
 func (c *PageCollections) findIndexNodesByNodeType(n NodeType) Pages {
-	return c.findPagesByNodeTypeIn(n, c.indexNodes)
+	return c.findPagesByNodeTypeIn(n, c.indexPages)
 }
 
 func (*PageCollections) findPagesByNodeTypeIn(n NodeType, inPages Pages) Pages {
@@ -123,7 +119,7 @@
 }
 
 func (c *PageCollections) findAllPagesByNodeType(n NodeType) Pages {
-	return c.findPagesByNodeTypeIn(n, c.Nodes)
+	return c.findPagesByNodeTypeIn(n, c.Pages)
 }
 
 func (c *PageCollections) findRawAllPagesByNodeType(n NodeType) Pages {
--- a/hugolib/page_test.go
+++ b/hugolib/page_test.go
@@ -630,9 +630,9 @@
 			t.Fatalf("Failed to build site: %s", err)
 		}
 
-		require.Len(t, s.Pages, len(pageSources))
+		require.Len(t, s.regularPages, len(pageSources))
 
-		assertFunc(t, e.ext, s.Pages)
+		assertFunc(t, e.ext, s.regularPages)
 
 	}
 
@@ -740,9 +740,9 @@
 		t.Fatalf("Failed to build site: %s", err)
 	}
 
-	require.Len(t, s.Pages, 1)
+	require.Len(t, s.regularPages, 1)
 
-	p := s.Pages[0]
+	p := s.regularPages[0]
 
 	if p.Summary != template.HTML("<p>The <a href=\"http://gohugo.io/\">best static site generator</a>.<sup class=\"footnote-ref\" id=\"fnref:1\"><a rel=\"footnote\" href=\"#fn:1\">1</a></sup>\n</p>") {
 		t.Fatalf("Got summary:\n%q", p.Summary)
@@ -788,9 +788,9 @@
 		t.Fatalf("Failed to build site: %s", err)
 	}
 
-	require.Len(t, s.Pages, 1)
+	require.Len(t, s.regularPages, 1)
 
-	p := s.Pages[0]
+	p := s.regularPages[0]
 
 	checkPageContent(t, p, "<p>first line.<br />\nsecond line.</p>\n\n<p>fourth line.</p>\n")
 }
@@ -802,9 +802,9 @@
 		t.Fatalf("Failed to build site: %s", err)
 	}
 
-	require.Len(t, s.Pages, 1)
+	require.Len(t, s.regularPages, 1)
 
-	p := s.Pages[0]
+	p := s.regularPages[0]
 
 	checkPageContent(t, p, "\n\n<p>For some moments the old man did not reply. He stood with bowed head, buried in deep thought. But at last he spoke.</p>\n\n<h2 id=\"aa\">AA</h2>\n\n<p>I have no idea, of course, how long it took me to reach the limit of the plain,\nbut at last I entered the foothills, following a pretty little canyon upward\ntoward the mountains. Beside me frolicked a laughing brooklet, hurrying upon\nits noisy way down to the silent sea. In its quieter pools I discovered many\nsmall fish, of four-or five-pound weight I should imagine. In appearance,\nexcept as to size and color, they were not unlike the whale of our own seas. As\nI watched them playing about I discovered, not only that they suckled their\nyoung, but that at intervals they rose to the surface to breathe as well as to\nfeed upon certain grasses and a strange, scarlet lichen which grew upon the\nrocks just above the water line.</p>\n\n<h3 id=\"aaa\">AAA</h3>\n\n<p>I remember I felt an extraordinary persuasion that I was being played with,\nthat presently, when I was upon the very verge of safety, this mysterious\ndeath&ndash;as swift as the passage of light&ndash;would leap after me from the pit about\nthe cylinder and strike me down. ## BB</p>\n\n<h3 id=\"bbb\">BBB</h3>\n\n<p>&ldquo;You&rsquo;re a great Granser,&rdquo; he cried delightedly, &ldquo;always making believe them little marks mean something.&rdquo;</p>\n")
 	checkPageTOC(t, p, "<nav id=\"TableOfContents\">\n<ul>\n<li>\n<ul>\n<li><a href=\"#aa\">AA</a>\n<ul>\n<li><a href=\"#aaa\">AAA</a></li>\n<li><a href=\"#bbb\">BBB</a></li>\n</ul></li>\n</ul></li>\n</ul>\n</nav>")
@@ -832,9 +832,9 @@
 		t.Fatalf("Failed to build site: %s", err)
 	}
 
-	require.Len(t, s.Pages, 1)
+	require.Len(t, s.regularPages, 1)
 
-	p := s.Pages[0]
+	p := s.regularPages[0]
 	d, _ := time.Parse(time.RFC3339, "2013-05-17T16:59:30Z")
 
 	checkPageDate(t, p, d)
@@ -1266,9 +1266,9 @@
 		t.Fatalf("Failed to build site: %s", err)
 	}
 
-	require.Len(t, s.Pages, 1)
+	require.Len(t, s.regularPages, 1)
 
-	p := s.Pages[0]
+	p := s.regularPages[0]
 
 	checkPageTitle(t, p, "Simple")
 }
--- a/hugolib/shortcode_test.go
+++ b/hugolib/shortcode_test.go
@@ -69,9 +69,9 @@
 		t.Fatalf("No error from shortcode")
 	}
 
-	require.Len(t, h.Sites[0].Pages, 1)
+	require.Len(t, h.Sites[0].regularPages, 1)
 
-	output := strings.TrimSpace(string(h.Sites[0].Pages[0].Content))
+	output := strings.TrimSpace(string(h.Sites[0].regularPages[0].Content))
 	if strings.HasPrefix(output, "<p>") {
 		output = output[3:]
 	}
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -1327,7 +1327,7 @@
 
 	s.assembleMenus()
 
-	if len(s.Nodes) == 0 {
+	if len(s.Pages) == 0 {
 		return
 	}
 
@@ -1334,7 +1334,7 @@
 	s.assembleTaxonomies()
 
 	// TODO(bep) np
-	for _, p := range s.AllNodes {
+	for _, p := range s.AllPages {
 		// setNodeTypeVars needs taxonomies
 		p.setNodeTypeVars(s)
 	}
@@ -1343,7 +1343,7 @@
 	s.assembleSections()
 
 	// TODO(bep) np Site.LastMod
-	pages := s.Nodes
+	pages := s.Pages
 	s.Info.LastChange = pages[0].Lastmod
 
 	return
@@ -1418,7 +1418,7 @@
 	sectionPagesMenu := s.Language.GetString("SectionPagesMenu")
 	sectionPagesMenus := make(map[string]interface{})
 	//creating flat hash
-	pages := s.Nodes
+	pages := s.Pages
 	for _, p := range pages {
 
 		if sectionPagesMenu != "" {
@@ -1486,7 +1486,7 @@
 		s.Taxonomies[plural] = make(Taxonomy)
 		s.taxonomiesPluralSingular[plural] = singular
 
-		for _, p := range s.Nodes {
+		for _, p := range s.Pages {
 			vals := p.getParam(plural, !s.Info.preserveTaxonomyNames)
 			weight := p.GetParam(plural + "_weight")
 			if weight == nil {
@@ -1574,7 +1574,7 @@
 func (s *Site) preparePages() error {
 	var errors []error
 
-	for _, p := range s.Nodes {
+	for _, p := range s.Pages {
 		if err := p.prepareLayouts(); err != nil {
 			errors = append(errors, err)
 		}
--- a/hugolib/siteJSONEncode_test.go
+++ b/hugolib/siteJSONEncode_test.go
@@ -30,7 +30,7 @@
 	_, err := json.Marshal(s)
 	check(t, err)
 
-	_, err = json.Marshal(s.Pages[0])
+	_, err = json.Marshal(s.regularPages[0])
 	check(t, err)
 }
 
--- a/hugolib/site_render.go
+++ b/hugolib/site_render.go
@@ -45,7 +45,7 @@
 		go pageRenderer(s, pages, results, wg)
 	}
 
-	for _, page := range s.Nodes {
+	for _, page := range s.Pages {
 		pages <- page
 	}
 
@@ -183,7 +183,7 @@
 	n := s.newNodePage(NodeSitemap)
 
 	// Include all pages (regular, home page, taxonomies etc.)
-	pages := s.Nodes
+	pages := s.Pages
 
 	page := s.newNodePage(NodeSitemap)
 	page.URLPath.URL = ""
@@ -239,7 +239,7 @@
 
 // renderAliases renders shell pages that simply have a redirect in the header.
 func (s *Site) renderAliases() error {
-	for _, p := range s.Nodes {
+	for _, p := range s.Pages {
 		if len(p.Aliases) == 0 {
 			continue
 		}
--- a/hugolib/site_test.go
+++ b/hugolib/site_test.go
@@ -91,9 +91,9 @@
 		t.Fatalf("Failed to build site: %s", err)
 	}
 
-	require.Len(t, s.Pages, 1)
+	require.Len(t, s.regularPages, 1)
 
-	p := s.Pages[0]
+	p := s.regularPages[0]
 
 	err := s.renderThing(p, "foobar", nil)
 	if err == nil {
@@ -142,7 +142,7 @@
 
 	// Testing Defaults.. Only draft:true and publishDate in the past should be rendered
 	s := siteSetup(t)
-	if len(s.AllPages) != 1 {
+	if len(s.regularPages) != 1 {
 		t.Fatal("Draft or Future dated content published unexpectedly")
 	}
 
@@ -149,7 +149,7 @@
 	// only publishDate in the past should be rendered
 	viper.Set("buildDrafts", true)
 	s = siteSetup(t)
-	if len(s.AllPages) != 2 {
+	if len(s.regularPages) != 2 {
 		t.Fatal("Future Dated Posts published unexpectedly")
 	}
 
@@ -157,7 +157,7 @@
 	viper.Set("buildDrafts", false)
 	viper.Set("buildFuture", true)
 	s = siteSetup(t)
-	if len(s.AllPages) != 2 {
+	if len(s.regularPages) != 2 {
 		t.Fatal("Draft posts published unexpectedly")
 	}
 
@@ -165,7 +165,7 @@
 	viper.Set("buildDrafts", true)
 	viper.Set("buildFuture", true)
 	s = siteSetup(t)
-	if len(s.AllPages) != 4 {
+	if len(s.regularPages) != 4 {
 		t.Fatal("Drafts or Future posts not included as expected")
 	}
 
@@ -201,11 +201,11 @@
 	s := siteSetup(t)
 
 	if len(s.AllPages) != 1 {
-		if len(s.AllPages) > 1 {
+		if len(s.regularPages) > 1 {
 			t.Fatal("Expired content published unexpectedly")
 		}
 
-		if len(s.AllPages) < 1 {
+		if len(s.regularPages) < 1 {
 			t.Fatal("Valid content expired unexpectedly")
 		}
 	}
@@ -285,7 +285,7 @@
 		t.Fatalf("Failed to build site: %s", err)
 	}
 
-	if len(s.AllPages) != 3 {
+	if len(s.regularPages) != 3 {
 		t.Fatalf("Expected 3 got %d pages", len(s.AllPages))
 	}
 
@@ -377,7 +377,7 @@
 		{filepath.FromSlash("public/ugly.html"), "\n\n<h1 id=\"title\">title</h1>\n\n<p>doc2 <em>content</em></p>\n"},
 	}
 
-	for _, p := range s.Pages {
+	for _, p := range s.regularPages {
 		assert.False(t, p.IsHome())
 	}
 
@@ -649,7 +649,7 @@
 		t.Errorf("Pages in unexpected order. Second should be '%s', got '%s'", "Three", s.Sections["sect"][1].Page.Title)
 	}
 
-	bydate := s.Pages.ByDate()
+	bydate := s.regularPages.ByDate()
 
 	if bydate[0].Title != "One" {
 		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bydate[0].Title)
@@ -660,7 +660,7 @@
 		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rev[0].Title)
 	}
 
-	bypubdate := s.Pages.ByPublishDate()
+	bypubdate := s.regularPages.ByPublishDate()
 
 	if bypubdate[0].Title != "One" {
 		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bypubdate[0].Title)
@@ -671,7 +671,7 @@
 		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rbypubdate[0].Title)
 	}
 
-	bylength := s.Pages.ByLength()
+	bylength := s.regularPages.ByLength()
 	if bylength[0].Title != "One" {
 		t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bylength[0].Title)
 	}
@@ -710,7 +710,7 @@
 		t.Fatalf("Failed to build site: %s", err)
 	}
 
-	rbysection, err := s.Pages.GroupBy("Section", "desc")
+	rbysection, err := s.regularPages.GroupBy("Section", "desc")
 	if err != nil {
 		t.Fatalf("Unable to make PageGroup array: %s", err)
 	}
@@ -730,7 +730,7 @@
 		t.Errorf("PageGroup has unexpected number of pages. Third group should have '%d' pages, got '%d' pages", 2, len(rbysection[2].Pages))
 	}
 
-	bytype, err := s.Pages.GroupBy("Type", "asc")
+	bytype, err := s.regularPages.GroupBy("Type", "asc")
 	if err != nil {
 		t.Fatalf("Unable to make PageGroup array: %s", err)
 	}
@@ -750,7 +750,7 @@
 		t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(bytype[2].Pages))
 	}
 
-	bydate, err := s.Pages.GroupByDate("2006-01", "asc")
+	bydate, err := s.regularPages.GroupByDate("2006-01", "asc")
 	if err != nil {
 		t.Fatalf("Unable to make PageGroup array: %s", err)
 	}
@@ -770,7 +770,7 @@
 		t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(bydate[2].Pages))
 	}
 
-	bypubdate, err := s.Pages.GroupByPublishDate("2006")
+	bypubdate, err := s.regularPages.GroupByPublishDate("2006")
 	if err != nil {
 		t.Fatalf("Unable to make PageGroup array: %s", err)
 	}
@@ -787,7 +787,7 @@
 		t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 3, len(bypubdate[0].Pages))
 	}
 
-	byparam, err := s.Pages.GroupByParam("my_param", "desc")
+	byparam, err := s.regularPages.GroupByParam("my_param", "desc")
 	if err != nil {
 		t.Fatalf("Unable to make PageGroup array: %s", err)
 	}
@@ -807,12 +807,12 @@
 		t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byparam[0].Pages))
 	}
 
-	_, err = s.Pages.GroupByParam("not_exist")
+	_, err = s.regularPages.GroupByParam("not_exist")
 	if err == nil {
 		t.Errorf("GroupByParam didn't return an expected error")
 	}
 
-	byOnlyOneParam, err := s.Pages.GroupByParam("only_one")
+	byOnlyOneParam, err := s.regularPages.GroupByParam("only_one")
 	if err != nil {
 		t.Fatalf("Unable to make PageGroup array: %s", err)
 	}
@@ -823,7 +823,7 @@
 		t.Errorf("PageGroup array in unexpected order. First group key should be '%s', got '%s'", "yes", byOnlyOneParam[0].Key)
 	}
 
-	byParamDate, err := s.Pages.GroupByParamDate("my_date", "2006-01")
+	byParamDate, err := s.regularPages.GroupByParamDate("my_date", "2006-01")
 	if err != nil {
 		t.Fatalf("Unable to make PageGroup array: %s", err)
 	}