shithub: hugo

Download patch

ref: f8bda16e154465c74a2cc42dd8149369e19f7833
parent: 3737c9bcb39527298744ca287d1bd5b1dd530f52
author: Bjørn Erik Pedersen <[email protected]>
date: Tue Nov 1 12:47:15 EDT 2016

node to page: Handle taxonomy terms

Updates #2297

--- a/hugolib/node.go
+++ b/hugolib/node.go
@@ -14,6 +14,7 @@
 package hugolib
 
 import (
+	"fmt"
 	"html/template"
 	"path"
 	"path/filepath"
@@ -39,6 +40,7 @@
 	NodeHome
 	NodeSection
 	NodeTaxonomy
+	NodeTaxonomyTerms
 )
 
 func (p NodeType) IsNode() bool {
@@ -343,7 +345,9 @@
 
 func sectionsFromFilename(filename string) []string {
 	dir, _ := filepath.Split(filename)
-	return strings.Split(dir, helpers.FilePathSeparator)
+	dir = strings.TrimSuffix(dir, helpers.FilePathSeparator)
+	sections := strings.Split(dir, helpers.FilePathSeparator)
+	return sections
 }
 
 // TODO(bep) np node identificator
@@ -364,13 +368,15 @@
 func (p *Page) setNodeTypeVars(s *Site) {
 	// TODO(bep) np taxonomies etc.
 	if p.NodeType == NodeUnknown {
-		// This is either a taxonomy or a section
-		if s.isTaxonomy(p.Section()) {
-			p.NodeType = NodeTaxonomy
-		} else {
-			p.NodeType = NodeSection
+		// This is either a taxonomy list, taxonomy term or a section
+		nodeType := s.nodeTypeFromSections(p.sections)
+
+		if nodeType == NodeUnknown {
+			panic(fmt.Sprintf("Unable to determine node type from %q", p.sections))
 		}
 
+		p.NodeType = nodeType
+
 	}
 	// TODO(bep) np node URL
 	// Set Node URL
@@ -380,6 +386,8 @@
 	case NodeSection:
 		p.URLPath.URL = p.Section()
 	case NodeTaxonomy:
+		p.URLPath.URL = path.Join(p.sections...)
+	case NodeTaxonomyTerms:
 		p.URLPath.URL = path.Join(p.sections...)
 	}
 
--- a/hugolib/node_as_page_test.go
+++ b/hugolib/node_as_page_test.go
@@ -31,8 +31,8 @@
 */
 
 func TestNodesAsPage(t *testing.T) {
-	jww.SetStdoutThreshold(jww.LevelDebug)
-	//jww.SetStdoutThreshold(jww.LevelFatal)
+	//jww.SetStdoutThreshold(jww.LevelDebug)
+	jww.SetStdoutThreshold(jww.LevelFatal)
 
 	nodePageFeatureFlag = true
 	defer toggleNodePageFeatureFlag()
@@ -73,6 +73,18 @@
 Taxonomy Hugo **Content!**
 `)
 
+	writeSource(t, filepath.Join("content", "categories", "web", "_node.md"), `---
+title: Taxonomy Web
+---
+Taxonomy Web **Content!**
+`)
+
+	writeSource(t, filepath.Join("content", "categories", "_node.md"), `---
+title: Taxonomy Term Categories
+---
+Taxonomy Term Categories **Content!**
+`)
+
 	writeSource(t, filepath.Join("layouts", "index.html"), `
 Index Title: {{ .Title }}
 Index Content: {{ .Content }}
@@ -96,6 +108,7 @@
 {{ end }}
 `)
 
+	// Taxonomy lists
 	writeSource(t, filepath.Join("layouts", "_default", "taxonomy.html"), `
 Taxonomy Title: {{ .Title }}
 Taxonomy Content: {{ .Content }}
@@ -105,6 +118,15 @@
 {{ end }}
 `)
 
+	// Taxonomy terms
+	writeSource(t, filepath.Join("layouts", "_default", "terms.html"), `
+Taxonomy Terms Title: {{ .Title }}
+Taxonomy Terms Content: {{ .Content }}
+{{ range $key, $value := .Data.Terms }}
+	k/v: {{ $key }} / {{ printf "%=v" $value }}
+{{ end }}
+`)
+
 	// Add some regular pages
 	for i := 1; i <= 4; i++ {
 		sect := "sect1"
@@ -113,7 +135,10 @@
 		}
 		writeSource(t, filepath.Join("content", sect, fmt.Sprintf("regular%d.md", i)), fmt.Sprintf(`---
 title: Page %02d
-categories: Hugo
+categories:  [
+        "Hugo",
+		"Web"
+]
 ---
 Content Page %02d
 `, i, i))
@@ -169,13 +194,22 @@
 	sections := h.findAllPagesByNodeType(NodeSection)
 	require.Len(t, sections, 2)
 
-	// Check taxonomy list
+	// Check taxonomy lists
 	assertFileContent(t, filepath.Join("public", "categories", "hugo", "index.html"), false,
 		"Taxonomy Title: Taxonomy Hugo", "Taxonomy Hugo <strong>Content!</strong>")
 
+	assertFileContent(t, filepath.Join("public", "categories", "web", "index.html"), false,
+		"Taxonomy Title: Taxonomy Web", "Taxonomy Web <strong>Content!</strong>")
+
 	// Check taxonomy list paginator
 	assertFileContent(t, filepath.Join("public", "categories", "hugo", "page", "2", "index.html"), false,
 		"Taxonomy Title: Taxonomy Hugo",
 		"Pag: Page 02")
+
+	// Check taxonomy terms
+	assertFileContent(t, filepath.Join("public", "categories", "index.html"), false,
+		"Taxonomy Terms Title: Taxonomy Term Categories", "Taxonomy Term Categories <strong>Content!</strong>", "k/v: hugo")
+
+	// There are no pages to paginate over in the taxonomy terms.
 
 }
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -475,6 +475,9 @@
 	case NodeTaxonomy:
 		singular := p.site.taxonomiesPluralSingular[p.sections[0]]
 		return []string{"taxonomy/" + singular + ".html", "indexes/" + singular + ".html", "_default/taxonomy.html", "_default/list.html"}
+	case NodeTaxonomyTerms:
+		singular := p.site.taxonomiesPluralSingular[p.sections[0]]
+		return []string{"taxonomy/" + singular + ".terms.html", "_default/terms.html", "indexes/indexes.html"}
 	}
 
 	// Regular Page handled below
@@ -1167,6 +1170,8 @@
 		return filepath.Join(p.Section(), "index.html")
 	case NodeTaxonomy:
 		return filepath.Join(append(p.sections, "index.html")...)
+	case NodeTaxonomyTerms:
+		return filepath.Join(append(p.sections, "index.html")...)
 	}
 
 	// Always use URL if it's specified
@@ -1253,7 +1258,16 @@
 		p.Data["Singular"] = singular
 		p.Data["Plural"] = plural
 		p.Data["Pages"] = taxonomy.Pages()
+	case NodeTaxonomyTerms:
+		plural := p.sections[0]
+		singular := s.taxonomiesPluralSingular[plural]
 
+		p.Data["Singular"] = singular
+		p.Data["Plural"] = plural
+		p.Data["Terms"] = s.Taxonomies[plural]
+		// keep the following just for legacy reasons
+		p.Data["OrderedIndex"] = p.Data["Terms"]
+		p.Data["Index"] = p.Data["Terms"]
 	}
 
 	return nil
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -1578,11 +1578,7 @@
 	s.Info.Sections = s.Sections
 	regularPages := s.findPagesByNodeType(NodePage)
 	for i, p := range regularPages {
-		section := p.Section()
-		if s.isTaxonomy(section) {
-			continue
-		}
-		s.Sections.add(section, WeightedPage{s.Pages[i].Weight, regularPages[i]}, s.Info.preserveTaxonomyNames)
+		s.Sections.add(p.Section(), WeightedPage{s.Pages[i].Weight, regularPages[i]}, s.Info.preserveTaxonomyNames)
 	}
 
 	for k := range s.Sections {
@@ -1599,11 +1595,14 @@
 	}
 }
 
-func (s *Site) isTaxonomy(section string) bool {
-	if _, isTaxonomy := s.Taxonomies[section]; isTaxonomy {
-		return true
+func (s *Site) nodeTypeFromSections(sections []string) NodeType {
+	if _, isTaxonomy := s.Taxonomies[sections[0]]; isTaxonomy {
+		if len(sections) == 1 {
+			return NodeTaxonomyTerms
+		}
+		return NodeTaxonomy
 	}
-	return false
+	return NodeSection
 }
 
 func (s *Site) findPagesByNodeType(n NodeType) Pages {
@@ -1891,6 +1890,9 @@
 
 // renderListsOfTaxonomyTerms renders a page per taxonomy that lists the terms for that taxonomy
 func (s *Site) renderListsOfTaxonomyTerms(prepare bool) (err error) {
+	if nodePageFeatureFlag {
+		return nil
+	}
 	taxonomies := s.Language.GetStringMapString("Taxonomies")
 	for singular, plural := range taxonomies {
 		n := s.nodeLookup(fmt.Sprintf("taxlist-%s", plural), 0, prepare)
--- a/hugolib/site_render.go
+++ b/hugolib/site_render.go
@@ -70,7 +70,8 @@
 			results <- err
 		}
 
-		if p.NodeType.IsNode() {
+		// Taxonomy terms have no page set to paginate, so skip that for now.
+		if p.NodeType.IsNode() && p.NodeType != NodeTaxonomyTerms {
 			if err := s.renderPaginator(p); err != nil {
 				results <- err
 			}