shithub: hugo

Download patch

ref: c2f3cb2d7aef86597e9fb61aaa872308a591f8c2
parent: 734b6508a12b29444ec78fc07d3f3805cf06ea3c
author: Bjørn Erik Pedersen <[email protected]>
date: Mon Oct 31 14:03:02 EDT 2016

node to page: Handle sections

Updates #2297

--- a/hugolib/node.go
+++ b/hugolib/node.go
@@ -34,6 +34,7 @@
 
 	// The rest are node types; home page, sections etc.
 	NodeHome
+	NodeSection
 )
 
 func (p NodeType) IsNode() bool {
@@ -338,8 +339,13 @@
 
 func nodeTypeFromFilename(filename string) NodeType {
 	// TODO(bep) np
-	if !strings.HasPrefix(filename, "_node") {
+	if !strings.Contains(filename, "_node") {
 		return NodePage
 	}
-	return NodeHome
+
+	if strings.HasPrefix(filename, "_node") {
+		return NodeHome
+	}
+
+	return NodeSection
 }
--- a/hugolib/node_as_page_test.go
+++ b/hugolib/node_as_page_test.go
@@ -18,6 +18,7 @@
 	"path/filepath"
 	"testing"
 
+	jww "github.com/spf13/jwalterweatherman"
 	"github.com/spf13/viper"
 	"github.com/stretchr/testify/require"
 )
@@ -29,7 +30,10 @@
 
 */
 
-func TestHomeAsPage(t *testing.T) {
+func TestNodesAsPage(t *testing.T) {
+	//jww.SetStdoutThreshold(jww.LevelDebug)
+	jww.SetStdoutThreshold(jww.LevelFatal)
+
 	nodePageFeatureFlag = true
 	defer toggleNodePageFeatureFlag()
 
@@ -51,6 +55,18 @@
 Home **Content!**
 `)
 
+	writeSource(t, filepath.Join("content", "sect1", "_node.md"), `---
+title: Section1
+---
+Section1 **Content!**
+`)
+
+	writeSource(t, filepath.Join("content", "sect2", "_node.md"), `---
+title: Section2
+---
+Section2 **Content!**
+`)
+
 	writeSource(t, filepath.Join("layouts", "index.html"), `
 Index Title: {{ .Title }}
 Index Content: {{ .Content }}
@@ -65,16 +81,30 @@
 Single Content: {{ .Content }}
 `)
 
+	writeSource(t, filepath.Join("layouts", "_default", "section.html"), `
+Section Title: {{ .Title }}
+Section Content: {{ .Content }}
+# Pages: {{ len .Data.Pages }}
+{{ range .Paginator.Pages }}
+	Pag: {{ .Title }}
+{{ end }}
+`)
+
 	// Add some regular pages
-	for i := 0; i < 10; i++ {
-		writeSource(t, filepath.Join("content", fmt.Sprintf("regular%d.md", i)), fmt.Sprintf(`---
-title: Page %d
+	for i := 1; i <= 4; i++ {
+		sect := "sect1"
+		if i > 2 {
+			sect = "sect2"
+		}
+		writeSource(t, filepath.Join("content", sect, fmt.Sprintf("regular%d.md", i)), fmt.Sprintf(`---
+title: Page %02d
+categories: Hugo
 ---
-Content Page %d
+Content Page %02d
 `, i, i))
 	}
 
-	viper.Set("paginate", 3)
+	viper.Set("paginate", 1)
 
 	s := newSiteDefaultLang()
 
@@ -85,9 +115,9 @@
 	assertFileContent(t, filepath.Join("public", "index.html"), false,
 		"Index Title: Home Sweet Home!",
 		"Home <strong>Content!</strong>",
-		"# Pages: 10")
+		"# Pages: 4")
 
-	assertFileContent(t, filepath.Join("public", "regular1", "index.html"), false, "Single Title: Page 1", "Content Page 1")
+	assertFileContent(t, filepath.Join("public", "sect1", "regular1", "index.html"), false, "Single Title: Page 01", "Content Page 01")
 
 	h := s.owner
 	nodes := h.findPagesByNodeType(NodeHome)
@@ -100,7 +130,7 @@
 	require.False(t, home.IsPage())
 
 	pages := h.findPagesByNodeType(NodePage)
-	require.Len(t, pages, 10)
+	require.Len(t, pages, 4)
 
 	first := pages[0]
 	require.False(t, first.IsHome())
@@ -109,9 +139,19 @@
 
 	first.Paginator()
 
-	// Check paginator
-	assertFileContent(t, filepath.Join("public", "page", "3", "index.html"), false,
-		"Pag: Page 6",
-		"Pag: Page 7")
+	// Check Home paginator
+	assertFileContent(t, filepath.Join("public", "page", "2", "index.html"), false,
+		"Pag: Page 02")
+
+	// Check Sections
+	assertFileContent(t, filepath.Join("public", "sect1", "index.html"), false, "Section Title: Section", "Section1 <strong>Content!</strong>")
+	assertFileContent(t, filepath.Join("public", "sect2", "index.html"), false, "Section Title: Section", "Section2 <strong>Content!</strong>")
+
+	// Check Sections paginator
+	assertFileContent(t, filepath.Join("public", "sect1", "page", "2", "index.html"), false,
+		"Pag: Page 02")
+
+	sections := h.findPagesByNodeType(NodeSection)
+	require.Len(t, sections, 2)
 
 }
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -450,8 +450,12 @@
 	}
 
 	// TODO(bep) np
-	if p.NodeType == NodeHome {
+	switch p.NodeType {
+	case NodeHome:
 		return []string{"index.html", "_default/list.html"}
+	case NodeSection:
+		section := p.Section()
+		return []string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"}
 	}
 
 	if p.Layout != "" {
@@ -858,6 +862,15 @@
 		}
 	}
 
+	// TODO(bep) np node URL
+	// Set Node URL
+	switch p.NodeType {
+	case NodeHome:
+		p.URLPath.URL = ""
+	case NodeSection:
+		p.URLPath.URL = p.Section()
+	}
+
 	return nil
 
 }
@@ -1133,10 +1146,15 @@
 }
 
 func (p *Page) TargetPath() (outfile string) {
-	// TODO(bep) ml
-	if p.NodeType == NodeHome {
+
+	// TODO(bep) np
+	switch p.NodeType {
+	case NodeHome:
 		return "index.html"
+	case NodeSection:
+		return filepath.Join(p.Section(), "index.html")
 	}
+
 	// Always use URL if it's specified
 	if len(strings.TrimSpace(p.URLPath.URL)) > 2 {
 		outfile = strings.TrimSpace(p.URLPath.URL)
@@ -1196,14 +1214,20 @@
 	return nil
 }
 
-func (p *Page) prepareData() error {
+func (p *Page) prepareData(s *Site) error {
+	p.Data = make(map[string]interface{})
 	switch p.NodeType {
 	case NodePage:
 	case NodeHome:
-		p.Data = make(map[string]interface{})
 		// TODO(bep) np cache the below
 		// TODO(bep) np
-		p.Data["Pages"] = p.Site.owner.findPagesByNodeType(NodePage)
+		p.Data["Pages"] = s.owner.findPagesByNodeType(NodePage)
+	case NodeSection:
+		sectionData, ok := s.Sections[p.Section()]
+		if !ok {
+			return fmt.Errorf("Data for section %s not found", p.Section())
+		}
+		p.Data["Pages"] = sectionData
 	}
 
 	return nil
@@ -1215,12 +1239,11 @@
 	if p.paginator != nil {
 		paginatePath := helpers.Config().GetString("paginatePath")
 
-		{
-			// write alias for page 1
-			// TODO(bep) ml all of these n.addLang ... fix.
-			permaLink, _ := p.Permalink()
-			s.writeDestAlias(p.addLangPathPrefix(helpers.PaginateAliasPath("", 1)), permaLink, nil)
-		}
+		// write alias for page 1
+		// TODO(bep) ml all of these n.addLang ... fix.
+		//permaLink, _ := p.Permalink()
+		// TODO(bep) np fix
+		//s.writeDestAlias(p.addLangPathPrefix(helpers.PaginateAliasPath("", 1)), permaLink, nil)
 
 		pagers := p.paginator.Pagers()
 
@@ -1240,7 +1263,7 @@
 			}
 
 			pageNumber := i + 1
-			htmlBase := fmt.Sprintf("/%s/%d", paginatePath, pageNumber)
+			htmlBase := path.Join(p.URLPath.URL, fmt.Sprintf("/%s/%d", paginatePath, pageNumber))
 			htmlBase = p.addLangPathPrefix(htmlBase)
 			if err := s.renderAndWritePage(pagerNode.Title,
 				filepath.FromSlash(htmlBase), pagerNode, p.layouts()...); err != nil {
--- a/hugolib/permalinks.go
+++ b/hugolib/permalinks.go
@@ -164,6 +164,7 @@
 func pageToPermalinkSlugElseTitle(p *Page, a string) (string, error) {
 	if p.Slug != "" {
 		// Don't start or end with a -
+		// TODO(bep) this doesn't look good... Set the Slug once.
 		if strings.HasPrefix(p.Slug, "-") {
 			p.Slug = p.Slug[1:len(p.Slug)]
 		}
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -1633,7 +1633,7 @@
 		if err := p.prepareLayouts(); err != nil {
 			errors = append(errors, err)
 		}
-		if err := p.prepareData(); err != nil {
+		if err := p.prepareData(s); err != nil {
 			errors = append(errors, err)
 		}
 	}
@@ -1683,7 +1683,10 @@
 func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.WaitGroup) {
 	defer wg.Done()
 	for p := range pages {
-		if err := s.renderAndWritePage("page "+p.FullFilePath(), p.TargetPath(), p, s.appendThemeTemplates(p.layouts())...); err != nil {
+		targetPath := p.TargetPath()
+		layouts := p.layouts()
+		jww.DEBUG.Printf("Render Page to %q with layouts %q", targetPath, layouts)
+		if err := s.renderAndWritePage("page "+p.FullFilePath(), targetPath, p, s.appendThemeTemplates(layouts)...); err != nil {
 			results <- err
 		}
 
@@ -1962,6 +1965,9 @@
 
 // renderSectionLists renders a page for each section
 func (s *Site) renderSectionLists(prepare bool) error {
+	if nodePageFeatureFlag {
+		return nil
+	}
 	for section, data := range s.Sections {
 		// section keys can be lower case (depending on site.pathifyTaxonomyKeys)
 		// extract the original casing from the first page to get sensible titles.