shithub: hugo

Download patch

ref: 640b8bed21eabfd6e256814eab4b3ab3ad2e3354
parent: 2d91b17c04c58e6186219bdbc81b711803c91e32
author: Bjørn Erik Pedersen <[email protected]>
date: Thu Nov 10 07:26:23 EST 2016

node to page: Handle sections with only _index.md

Updates #2297

--- a/hugolib/node_as_page_test.go
+++ b/hugolib/node_as_page_test.go
@@ -116,6 +116,7 @@
 		"Pag: Page 02")
 
 	sections := h.findAllPagesByNodeType(NodeSection)
+
 	require.Len(t, sections, 2)
 
 	// Check taxonomy lists
@@ -352,8 +353,6 @@
 }
 
 func TestNodesWithAlias(t *testing.T) {
-	//jww.SetStdoutThreshold(jww.LevelDebug)
-	//defer jww.SetStdoutThreshold(jww.LevelFatal)
 	testCommonResetState()
 
 	writeLayoutsForNodeAsPageTests(t)
@@ -377,6 +376,30 @@
 
 	assertFileContent(t, filepath.Join("public", "index.html"), true, "Home With Alias")
 	assertFileContent(t, filepath.Join("public", "my", "new", "home.html"), true, "content=\"0; url=/")
+
+}
+
+func TestNodesWithSectionWithIndexPageOnly(t *testing.T) {
+	testCommonResetState()
+
+	writeLayoutsForNodeAsPageTests(t)
+
+	writeSource(t, filepath.Join("content", "sect", "_index.md"), `---
+title: MySection
+---
+My Section Content
+`)
+
+	viper.Set("paginate", 1)
+	viper.Set("title", "Hugo Rocks!")
+
+	s := newSiteDefaultLang()
+
+	if err := buildAndRenderSite(s); err != nil {
+		t.Fatalf("Failed to build site: %s", err)
+	}
+
+	assertFileContent(t, filepath.Join("public", "sect", "index.html"), true, "My Section")
 
 }
 
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -1351,8 +1351,8 @@
 	// assembleSections: Needs pages (temp lookup)
 	s.assembleSections()
 
-	// TODO(bep) np
-	pages := s.findPagesByNodeType(NodePage)
+	// TODO(bep) np Site.LastMod
+	pages := s.Nodes
 	s.Info.LastChange = pages[0].Lastmod
 
 	return
@@ -1541,9 +1541,19 @@
 func (s *Site) assembleSections() {
 	s.Sections = make(Taxonomy)
 	s.Info.Sections = s.Sections
+	// TODO(bep) np check these vs the caches
 	regularPages := s.findPagesByNodeType(NodePage)
+	sectionPages := s.findPagesByNodeType(NodeSection)
+
 	for i, p := range regularPages {
 		s.Sections.add(p.Section(), WeightedPage{regularPages[i].Weight, regularPages[i]}, s.Info.preserveTaxonomyNames)
+	}
+
+	// Add sections without regular pages, but with a content page
+	for _, sectionPage := range sectionPages {
+		if _, ok := s.Sections[sectionPage.sections[0]]; !ok {
+			s.Sections[sectionPage.sections[0]] = WeightedPages{}
+		}
 	}
 
 	for k := range s.Sections {