shithub: hugo

Download patch

ref: b39689393ccb8434d9a57658a64b77568c718e99
parent: bef5048580b38b0c29edef4eb8c67915033120e9
author: Bjørn Erik Pedersen <[email protected]>
date: Sun Apr 9 06:33:04 EDT 2017

hugolib: Enable nested sections

Fixes #465

--- a/hugolib/hugo_sites.go
+++ b/hugolib/hugo_sites.go
@@ -337,6 +337,11 @@
 			}
 		}
 
+		// Will create content-less root sections.
+		newSections := s.assembleSections()
+		s.Pages = append(s.Pages, newSections...)
+		newPages = append(newPages, newSections...)
+
 		// taxonomy list and terms pages
 		taxonomies := s.Language.GetStringMapString("taxonomies")
 		if len(taxonomies) > 0 {
@@ -380,33 +385,6 @@
 							s.Pages = append(s.Pages, n)
 							newPages = append(newPages, n)
 						}
-					}
-				}
-			}
-		}
-
-		if s.isEnabled(KindSection) {
-			sectionPages := s.findPagesByKind(KindSection)
-			if len(sectionPages) < len(s.Sections) {
-				for name, section := range s.Sections {
-					// A section may be created for the root content folder if a
-					// content file is placed there.
-					// We cannot create a section node for that, because
-					// that would overwrite the home page.
-					if name == "" {
-						continue
-					}
-					foundSection := false
-					for _, sectionPage := range sectionPages {
-						if sectionPage.sections[0] == name {
-							foundSection = true
-							break
-						}
-					}
-					if !foundSection {
-						n := s.newSectionPage(name, section)
-						s.Pages = append(s.Pages, n)
-						newPages = append(newPages, n)
 					}
 				}
 			}
--- a/hugolib/menu_old_test.go
+++ b/hugolib/menu_old_test.go
@@ -402,13 +402,15 @@
 		"canonifyURLs", canonifyURLs,
 	)
 
-	require.Equal(t, 3, len(s.Sections))
+	sects := s.getPage(KindHome).Sections()
 
-	firstSectionPages := s.Sections["first"]
+	require.Equal(t, 3, len(sects))
+
+	firstSectionPages := s.getPage(KindSection, "first").Pages
 	require.Equal(t, 2, len(firstSectionPages))
-	secondSectionPages := s.Sections["second-section"]
+	secondSectionPages := s.getPage(KindSection, "second-section").Pages
 	require.Equal(t, 1, len(secondSectionPages))
-	fishySectionPages := s.Sections["fish-and-chips"]
+	fishySectionPages := s.getPage(KindSection, "Fish and Chips").Pages
 	require.Equal(t, 1, len(fishySectionPages))
 
 	nodeFirst := s.getPage(KindSection, "first")
@@ -415,12 +417,12 @@
 	require.NotNil(t, nodeFirst)
 	nodeSecond := s.getPage(KindSection, "second-section")
 	require.NotNil(t, nodeSecond)
-	nodeFishy := s.getPage(KindSection, "fish-and-chips")
-	require.Equal(t, "fish-and-chips", nodeFishy.sections[0])
+	nodeFishy := s.getPage(KindSection, "Fish and Chips")
+	require.Equal(t, "Fish and Chips", nodeFishy.sections[0])
 
 	firstSectionMenuEntry := findTestMenuEntryByID(s, "spm", "first")
 	secondSectionMenuEntry := findTestMenuEntryByID(s, "spm", "second-section")
-	fishySectionMenuEntry := findTestMenuEntryByID(s, "spm", "fish-and-chips")
+	fishySectionMenuEntry := findTestMenuEntryByID(s, "spm", "Fish and Chips")
 
 	require.NotNil(t, firstSectionMenuEntry)
 	require.NotNil(t, secondSectionMenuEntry)
@@ -436,19 +438,19 @@
 	require.Equal(t, "Fish and Chips", fishySectionMenuEntry.Name)
 
 	for _, p := range firstSectionPages {
-		require.True(t, p.Page.HasMenuCurrent("spm", firstSectionMenuEntry))
-		require.False(t, p.Page.HasMenuCurrent("spm", secondSectionMenuEntry))
+		require.True(t, p.HasMenuCurrent("spm", firstSectionMenuEntry))
+		require.False(t, p.HasMenuCurrent("spm", secondSectionMenuEntry))
 	}
 
 	for _, p := range secondSectionPages {
-		require.False(t, p.Page.HasMenuCurrent("spm", firstSectionMenuEntry))
-		require.True(t, p.Page.HasMenuCurrent("spm", secondSectionMenuEntry))
+		require.False(t, p.HasMenuCurrent("spm", firstSectionMenuEntry))
+		require.True(t, p.HasMenuCurrent("spm", secondSectionMenuEntry))
 	}
 
 	for _, p := range fishySectionPages {
-		require.False(t, p.Page.HasMenuCurrent("spm", firstSectionMenuEntry))
-		require.False(t, p.Page.HasMenuCurrent("spm", secondSectionMenuEntry))
-		require.True(t, p.Page.HasMenuCurrent("spm", fishySectionMenuEntry))
+		require.False(t, p.HasMenuCurrent("spm", firstSectionMenuEntry))
+		require.False(t, p.HasMenuCurrent("spm", secondSectionMenuEntry))
+		require.True(t, p.HasMenuCurrent("spm", fishySectionMenuEntry))
 	}
 }
 
--- a/hugolib/page.go
+++ b/hugolib/page.go
@@ -175,6 +175,16 @@
 	// isn't accomanied by one.
 	sections []string
 
+	// Will only be set for sections and regular pages.
+	parent *Page
+
+	// When we create paginator pages, we create a copy of the original,
+	// but keep track of it here.
+	origOnCopy *Page
+
+	// Will only be set for section pages and the home page.
+	subSections Pages
+
 	s *Site
 
 	// Pulled over from old Node. TODO(bep) reorg and group (embed)
@@ -228,6 +238,9 @@
 
 	switch p.Kind {
 	case KindSection:
+		// In Hugo 0.22 we introduce nested sections, but we still only
+		// use the first level to pick the correct template. This may change in
+		// the future.
 		section = p.sections[0]
 	case KindTaxonomy, KindTaxonomyTerm:
 		section = p.s.taxonomiesPluralSingular[p.sections[0]]
@@ -265,6 +278,11 @@
 	return p.Kind == KindHome
 }
 
+// IsSection returns whether this is a section page.
+func (p *Page) IsSection() bool {
+	return p.Kind == KindSection
+}
+
 // IsPage returns whether this is a regular content page.
 func (p *Page) IsPage() bool {
 	return p.Kind == KindPage
@@ -667,6 +685,9 @@
 	return "page"
 }
 
+// Section returns the first path element below the content root. Note that
+// since Hugo 0.22 we support nested sections, but this will always be the first
+// element of any nested path.
 func (p *Page) Section() string {
 	if p.Kind == KindSection {
 		return p.sections[0]
@@ -1100,10 +1121,6 @@
 	if sectionPagesMenu != "" {
 		section := p.Section()
 
-		if !p.s.Info.preserveTaxonomyNames {
-			section = p.s.PathSpec.MakePathSanitized(section)
-		}
-
 		if section != "" && sectionPagesMenu == menuID && section == me.Identifier {
 			return true
 		}
@@ -1415,60 +1432,55 @@
 }
 
 func (p *Page) prepareData(s *Site) error {
+	if p.Kind != KindSection {
+		var pages Pages
+		p.Data = make(map[string]interface{})
 
-	var pages Pages
+		switch p.Kind {
+		case KindPage:
+		case KindHome:
+			pages = s.RegularPages
+		case KindTaxonomy:
+			plural := p.sections[0]
+			term := p.sections[1]
 
-	p.Data = make(map[string]interface{})
-	switch p.Kind {
-	case KindPage:
-	case KindHome:
-		pages = s.RegularPages
-	case KindSection:
-		sectionData, ok := s.Sections[p.Section()]
-		if !ok {
-			return fmt.Errorf("Data for section %s not found", p.Section())
-		}
-		pages = sectionData.Pages()
-	case KindTaxonomy:
-		plural := p.sections[0]
-		term := p.sections[1]
-
-		if s.Info.preserveTaxonomyNames {
-			if v, ok := s.taxonomiesOrigKey[fmt.Sprintf("%s-%s", plural, term)]; ok {
-				term = v
+			if s.Info.preserveTaxonomyNames {
+				if v, ok := s.taxonomiesOrigKey[fmt.Sprintf("%s-%s", plural, term)]; ok {
+					term = v
+				}
 			}
-		}
 
-		singular := s.taxonomiesPluralSingular[plural]
-		taxonomy := s.Taxonomies[plural].Get(term)
+			singular := s.taxonomiesPluralSingular[plural]
+			taxonomy := s.Taxonomies[plural].Get(term)
 
-		p.Data[singular] = taxonomy
-		p.Data["Singular"] = singular
-		p.Data["Plural"] = plural
-		p.Data["Term"] = term
-		pages = taxonomy.Pages()
-	case KindTaxonomyTerm:
-		plural := p.sections[0]
-		singular := s.taxonomiesPluralSingular[plural]
+			p.Data[singular] = taxonomy
+			p.Data["Singular"] = singular
+			p.Data["Plural"] = plural
+			p.Data["Term"] = term
+			pages = taxonomy.Pages()
+		case KindTaxonomyTerm:
+			plural := p.sections[0]
+			singular := s.taxonomiesPluralSingular[plural]
 
-		p.Data["Singular"] = singular
-		p.Data["Plural"] = plural
-		p.Data["Terms"] = s.Taxonomies[plural]
-		// keep the following just for legacy reasons
-		p.Data["OrderedIndex"] = p.Data["Terms"]
-		p.Data["Index"] = p.Data["Terms"]
+			p.Data["Singular"] = singular
+			p.Data["Plural"] = plural
+			p.Data["Terms"] = s.Taxonomies[plural]
+			// keep the following just for legacy reasons
+			p.Data["OrderedIndex"] = p.Data["Terms"]
+			p.Data["Index"] = p.Data["Terms"]
 
-		// A list of all KindTaxonomy pages with matching plural
-		for _, p := range s.findPagesByKind(KindTaxonomy) {
-			if p.sections[0] == plural {
-				pages = append(pages, p)
+			// A list of all KindTaxonomy pages with matching plural
+			for _, p := range s.findPagesByKind(KindTaxonomy) {
+				if p.sections[0] == plural {
+					pages = append(pages, p)
+				}
 			}
 		}
+
+		p.Data["Pages"] = pages
+		p.Pages = pages
 	}
 
-	p.Data["Pages"] = pages
-	p.Pages = pages
-
 	// Now we know enough to set missing dates on home page etc.
 	p.updatePageDates()
 
@@ -1736,11 +1748,8 @@
 	switch p.Kind {
 	case KindHome:
 		p.URLPath.URL = "/"
-	case KindSection:
-		p.URLPath.URL = "/" + p.sections[0] + "/"
-	case KindTaxonomy:
-		p.URLPath.URL = "/" + path.Join(p.sections...) + "/"
-	case KindTaxonomyTerm:
+	case KindPage:
+	default:
 		p.URLPath.URL = "/" + path.Join(p.sections...) + "/"
 	}
 }
--- a/hugolib/page_collections.go
+++ b/hugolib/page_collections.go
@@ -101,7 +101,6 @@
 		key = path.Join(sections...)
 	}
 
-	// TODO(bep) section error
 	p, _ := c.pageCache.Get(typ, key)
 	if p == nil {
 		return nil
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -1,4 +1,4 @@
-// Copyright 2016 The Hugo Authors. All rights reserved.
+// Copyright 2017 The Hugo Authors. All rights reserved.
 //
 // Licensed under the Apache License, Version 2.0 (the "License");
 // you may not use this file except in compliance with the License.
@@ -323,7 +323,6 @@
 	Taxonomies TaxonomyList
 	Authors    AuthorList
 	Social     SiteSocial
-	Sections   Taxonomy
 	*PageCollections
 	Files                 *[]*source.File
 	Menus                 *Menus
@@ -1512,8 +1511,6 @@
 		p.setValuesForKind(s)
 	}
 
-	s.assembleSections()
-
 	return
 }
 
@@ -1590,6 +1587,9 @@
 	if sectionPagesMenu != "" {
 		for _, p := range pages {
 			if p.Kind == KindSection {
+				// From Hugo 0.22 we have nested sections, but until we get a
+				// feel of how that would work in this setting, let us keep
+				// this menu for the top level only.
 				id := p.Section()
 				if _, ok := flat[twoD{sectionPagesMenu, id}]; ok {
 					continue
@@ -1724,72 +1724,11 @@
 
 	for _, p := range s.rawAllPages {
 		p.scratch = newScratch()
+		p.subSections = Pages{}
+		p.parent = nil
 	}
 }
 
-func (s *Site) assembleSections() {
-	s.Sections = make(Taxonomy)
-	s.Info.Sections = s.Sections
-
-	regularPages := s.findPagesByKind(KindPage)
-	sectionPages := s.findPagesByKind(KindSection)
-
-	for i, p := range regularPages {
-		section := s.getTaxonomyKey(p.Section())
-		s.Sections.add(section, WeightedPage{regularPages[i].Weight, regularPages[i]})
-	}
-
-	// Add sections without regular pages, but with a content page
-	for _, sectionPage := range sectionPages {
-		if _, ok := s.Sections[sectionPage.sections[0]]; !ok {
-			s.Sections[sectionPage.sections[0]] = WeightedPages{}
-		}
-	}
-
-	for k := range s.Sections {
-		s.Sections[k].Sort()
-
-		for i, wp := range s.Sections[k] {
-			if i > 0 {
-				wp.Page.NextInSection = s.Sections[k][i-1].Page
-			}
-			if i < len(s.Sections[k])-1 {
-				wp.Page.PrevInSection = s.Sections[k][i+1].Page
-			}
-		}
-
-	}
-
-	var (
-		sectionsParamId      = "mainSections"
-		sectionsParamIdLower = strings.ToLower(sectionsParamId)
-		mainSections         interface{}
-		found                bool
-	)
-
-	if mainSections, found = s.Info.Params[sectionsParamIdLower]; !found {
-		// Pick the section with most regular pages
-		var (
-			chosenSection string
-			pageCount     int
-		)
-
-		for sect, pages := range s.Sections {
-			if pages.Count() >= pageCount {
-				chosenSection = sect
-				pageCount = pages.Count()
-			}
-		}
-		mainSections = []string{chosenSection}
-
-		// Try to make this as backwards compatible as possible.
-		s.Info.Params[sectionsParamId] = mainSections
-		s.Info.Params[sectionsParamIdLower] = mainSections
-	} else {
-		s.Info.Params[sectionsParamId] = mainSections
-	}
-}
-
 func (s *Site) kindFromSections(sections []string) string {
 	if _, isTaxonomy := s.Taxonomies[sections[0]]; isTaxonomy {
 		if len(sections) == 1 {
@@ -1891,10 +1830,11 @@
 
 }
 
-// GetPage looks up a index page of a given type in the path given.
+// GetPage looks up a page of a given type in the path given.
 //    {{ with .Site.GetPage "section" "blog" }}{{ .Title }}{{ end }}
 //
-// This will return nil when no page could be found.
+// This will return nil when no page could be found, and will return the
+// first page found if the key is ambigous.
 func (s *SiteInfo) GetPage(typ string, path ...string) (*Page, error) {
 	return s.getPage(typ, path...), nil
 }
@@ -2163,15 +2103,10 @@
 	return p
 }
 
-func (s *Site) newSectionPage(name string, section WeightedPages) *Page {
+func (s *Site) newSectionPage(name string) *Page {
 	p := s.newNodePage(KindSection, name)
 
-	sectionName := name
-	if !s.Info.preserveTaxonomyNames && len(section) > 0 {
-		sectionName = section[0].Page.Section()
-	}
-
-	sectionName = helpers.FirstUpper(sectionName)
+	sectionName := helpers.FirstUpper(name)
 	if s.Cfg.GetBool("pluralizeListTitles") {
 		p.Title = inflect.Pluralize(sectionName)
 	} else {
--- a/hugolib/site_render.go
+++ b/hugolib/site_render.go
@@ -167,6 +167,8 @@
 				return err
 			}
 
+			pagerNode.origOnCopy = p.Page
+
 			pagerNode.paginator = pager
 			if pager.TotalPages() > 0 {
 				first, _ := pager.page(0)
--- /dev/null
+++ b/hugolib/site_sections.go
@@ -1,0 +1,295 @@
+// Copyright 2017 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"fmt"
+	"path"
+	"strconv"
+	"strings"
+
+	radix "github.com/hashicorp/go-immutable-radix"
+	"github.com/spf13/hugo/helpers"
+)
+
+// Deprecated: Use .Site.Home.Sections.
+// TODO(bep) Hugo 0.23 = Reuse as an alias for home's sections.
+func (s *SiteInfo) Sections() Taxonomy {
+
+	helpText := `In Hugo 0.22 we introduced nested sections, so this method now returns an empty taxonomy.
+	
+To list sections with its pages, you can do something like this:
+
+{{ range $.Site.Home.Sections }}
+  Section: {{ .Title }}
+  {{ range .Pages }}
+     Section Page: {{ .Title }}
+  {{ end }}
+{{ end }}
+
+To get a specific section, you can do this:
+
+{{ $section := $.Site.GetPage "section" "blog" }}
+`
+
+	helpers.Deprecated("Site", "Sections", helpText, true)
+
+	return Taxonomy{}
+}
+
+// Home is a shortcut to the home page, equivalent to .Site.GetPage "home".
+func (s *SiteInfo) Home() (*Page, error) {
+	return s.GetPage(KindHome)
+}
+
+// Parent returns a section's parent section or a page's section.
+// To get a section's subsections, see Page's Sections method.
+func (p *Page) Parent() *Page {
+	return p.parent
+}
+
+// current returns the page's current section.
+// Note that this will return nil for pages that is not regular, home or section pages.
+// Note that for paginated sections and home pages, this will return the original page pointer.
+func (p *Page) current() *Page {
+	v := p
+	if v.origOnCopy != nil {
+		v = v.origOnCopy
+	}
+	if v.IsHome() || v.IsSection() {
+		return v
+	}
+
+	return v.parent
+}
+
+// InSection returns whether the given page is in the current section.
+// Note that this will always return false for pages that are
+// not either regular, home or section pages.
+func (p *Page) InSection(other interface{}) (bool, error) {
+	if p == nil || other == nil {
+		return false, nil
+	}
+
+	if po, ok := other.(*PageOutput); ok {
+		other = po.Page
+	}
+
+	pp, ok := other.(*Page)
+	if !ok {
+		return false, fmt.Errorf("%T not supported in InSection", other)
+	}
+
+	if pp == nil {
+		return false, nil
+	}
+
+	return pp.current() == p.current(), nil
+}
+
+// Sections returns this section's subsections, if any.
+// Note that for non-sections, this method will always return an empty list.
+func (p *Page) Sections() Pages {
+	return p.subSections
+}
+
+func (s *Site) assembleSections() Pages {
+	var newPages Pages
+
+	if !s.isEnabled(KindSection) {
+		return newPages
+	}
+
+	// Maps section kind pages to their path, i.e. "my/section"
+	sectionPages := make(map[string]*Page)
+
+	// The sections with content files will already have been created.
+	for _, sect := range s.findPagesByKind(KindSection) {
+		sectionPages[path.Join(sect.sections...)] = sect
+	}
+
+	const (
+		sectKey     = "__hs"
+		sectSectKey = "_a" + sectKey
+		sectPageKey = "_b" + sectKey
+	)
+
+	var (
+		home       *Page
+		inPages    = radix.New().Txn()
+		inSections = radix.New().Txn()
+		undecided  Pages
+	)
+
+	for i, p := range s.Pages {
+		if p.Kind != KindPage {
+			if p.Kind == KindHome {
+				home = p
+			}
+			continue
+		}
+
+		if len(p.sections) == 0 {
+			// Root level pages. These will have the home page as their Parent.
+			p.parent = home
+			continue
+		}
+
+		sectionKey := path.Join(p.sections...)
+		sect, found := sectionPages[sectionKey]
+
+		if !found && len(p.sections) == 1 {
+			// We only create content-file-less sections for the root sections.
+			sect = s.newSectionPage(p.sections[0])
+			sectionPages[sectionKey] = sect
+			newPages = append(newPages, sect)
+		} else if !found {
+			// We don't know what to do with this section yet.
+			undecided = append(undecided, p)
+		}
+
+		pagePath := path.Join(sectionKey, sectPageKey, strconv.Itoa(i))
+		inPages.Insert([]byte(pagePath), p)
+	}
+
+	// Create any missing sections in the tree.
+	// A sub-section needs a content file, but to create a navigational tree,
+	// given a content file in /content/a/b/c/_index.md, we cannot create just
+	// the c section.
+	for _, sect := range sectionPages {
+		for i := len(sect.sections); i > 0; i-- {
+			sectionPath := sect.sections[:i]
+			sectionKey := path.Join(sectionPath...)
+			sect, found := sectionPages[sectionKey]
+			if !found {
+				sect = s.newSectionPage(sectionPath[len(sectionPath)-1])
+				sect.sections = sectionPath
+				sectionPages[sectionKey] = sect
+				newPages = append(newPages, sect)
+			}
+		}
+	}
+
+	// Create any missing root sections.
+	for _, p := range undecided {
+		sectionKey := p.sections[0]
+		sect, found := sectionPages[sectionKey]
+		if !found {
+			sect = s.newSectionPage(sectionKey)
+			sectionPages[sectionKey] = sect
+			newPages = append(newPages, sect)
+		}
+	}
+
+	for k, sect := range sectionPages {
+		inPages.Insert([]byte(path.Join(k, sectSectKey)), sect)
+		inSections.Insert([]byte(k), sect)
+	}
+
+	var (
+		currentSection *Page
+		children       Pages
+		rootPages      = inPages.Commit().Root()
+		rootSections   = inSections.Commit().Root()
+	)
+
+	rootPages.Walk(func(path []byte, v interface{}) bool {
+		p := v.(*Page)
+
+		if p.Kind == KindSection {
+			if currentSection != nil {
+				// A new section
+				currentSection.setPagePages(children)
+			}
+
+			currentSection = p
+			children = make(Pages, 0)
+
+			return false
+
+		}
+
+		// Regular page
+		p.parent = currentSection
+		children = append(children, p)
+		return false
+	})
+
+	if currentSection != nil {
+		currentSection.setPagePages(children)
+	}
+
+	// Build the sections hierarchy
+	for _, sect := range sectionPages {
+		if len(sect.sections) == 1 {
+			sect.parent = home
+		} else {
+			parentSearchKey := path.Join(sect.sections[:len(sect.sections)-1]...)
+			_, v, _ := rootSections.LongestPrefix([]byte(parentSearchKey))
+			p := v.(*Page)
+			sect.parent = p
+		}
+
+		if sect.parent != nil {
+			sect.parent.subSections = append(sect.parent.subSections, sect)
+		}
+	}
+
+	var (
+		sectionsParamId      = "mainSections"
+		sectionsParamIdLower = strings.ToLower(sectionsParamId)
+		mainSections         interface{}
+		mainSectionsFound    bool
+		maxSectionWeight     int
+	)
+
+	mainSections, mainSectionsFound = s.Info.Params[sectionsParamIdLower]
+
+	for _, sect := range sectionPages {
+		if sect.parent != nil {
+			sect.parent.subSections.Sort()
+		}
+
+		for i, p := range sect.Pages {
+			if i > 0 {
+				p.NextInSection = sect.Pages[i-1]
+			}
+			if i < len(sect.Pages)-1 {
+				p.PrevInSection = sect.Pages[i+1]
+			}
+		}
+
+		if !mainSectionsFound {
+			weight := len(sect.Pages) + (len(sect.Sections()) * 5)
+			if weight >= maxSectionWeight {
+				mainSections = []string{sect.Section()}
+				maxSectionWeight = weight
+			}
+		}
+	}
+
+	// Try to make this as backwards compatible as possible.
+	s.Info.Params[sectionsParamId] = mainSections
+	s.Info.Params[sectionsParamIdLower] = mainSections
+
+	return newPages
+
+}
+
+func (p *Page) setPagePages(pages Pages) {
+	pages.Sort()
+	p.Pages = pages
+	p.Data = make(map[string]interface{})
+	p.Data["Pages"] = pages
+}
--- /dev/null
+++ b/hugolib/site_sections_test.go
@@ -1,0 +1,224 @@
+// Copyright 2017-present The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+	"fmt"
+	"path/filepath"
+	"strings"
+	"testing"
+
+	"github.com/spf13/hugo/deps"
+	"github.com/stretchr/testify/require"
+)
+
+func TestNestedSections(t *testing.T) {
+	t.Parallel()
+
+	var (
+		assert  = require.New(t)
+		cfg, fs = newTestCfg()
+		th      = testHelper{cfg, fs, t}
+	)
+
+	pageTemplate := `---
+title: T%d_%d
+---
+Content
+`
+
+	// Home page
+	writeSource(t, fs, filepath.Join("content", "_index.md"), fmt.Sprintf(pageTemplate, -1, -1))
+
+	// Top level content page
+	writeSource(t, fs, filepath.Join("content", "mypage.md"), fmt.Sprintf(pageTemplate, 1234, 5))
+
+	// Top level section without index content page
+	writeSource(t, fs, filepath.Join("content", "top", "mypage2.md"), fmt.Sprintf(pageTemplate, 12345, 6))
+	// Just a page in a subfolder, i.e. not a section.
+	writeSource(t, fs, filepath.Join("content", "top", "folder", "mypage3.md"), fmt.Sprintf(pageTemplate, 12345, 67))
+
+	for level1 := 1; level1 < 3; level1++ {
+		writeSource(t, fs, filepath.Join("content", "l1", fmt.Sprintf("page_1_%d.md", level1)),
+			fmt.Sprintf(pageTemplate, 1, level1))
+	}
+
+	// Empty nested section, i.e. no regular content pages.
+	writeSource(t, fs, filepath.Join("content", "empty1", "b", "c", "_index.md"), fmt.Sprintf(pageTemplate, 33, -1))
+	// Index content file a the end and in the middle.
+	writeSource(t, fs, filepath.Join("content", "empty2", "b", "_index.md"), fmt.Sprintf(pageTemplate, 40, -1))
+	writeSource(t, fs, filepath.Join("content", "empty2", "b", "c", "d", "_index.md"), fmt.Sprintf(pageTemplate, 41, -1))
+
+	// Empty with content file in the middle.
+	writeSource(t, fs, filepath.Join("content", "empty3", "b", "c", "d", "_index.md"), fmt.Sprintf(pageTemplate, 41, -1))
+	writeSource(t, fs, filepath.Join("content", "empty3", "b", "empty3.md"), fmt.Sprintf(pageTemplate, 3, -1))
+
+	writeSource(t, fs, filepath.Join("content", "l1", "l2", "_index.md"), fmt.Sprintf(pageTemplate, 2, -1))
+	writeSource(t, fs, filepath.Join("content", "l1", "l2_2", "_index.md"), fmt.Sprintf(pageTemplate, 22, -1))
+	writeSource(t, fs, filepath.Join("content", "l1", "l2", "l3", "_index.md"), fmt.Sprintf(pageTemplate, 3, -1))
+
+	for level2 := 1; level2 < 4; level2++ {
+		writeSource(t, fs, filepath.Join("content", "l1", "l2", fmt.Sprintf("page_2_%d.md", level2)),
+			fmt.Sprintf(pageTemplate, 2, level2))
+	}
+	for level2 := 1; level2 < 3; level2++ {
+		writeSource(t, fs, filepath.Join("content", "l1", "l2_2", fmt.Sprintf("page_2_2_%d.md", level2)),
+			fmt.Sprintf(pageTemplate, 2, level2))
+	}
+	for level3 := 1; level3 < 3; level3++ {
+		writeSource(t, fs, filepath.Join("content", "l1", "l2", "l3", fmt.Sprintf("page_3_%d.md", level3)),
+			fmt.Sprintf(pageTemplate, 3, level3))
+	}
+
+	writeSource(t, fs, filepath.Join("content", "Spaces in Section", "page100.md"), fmt.Sprintf(pageTemplate, 10, 0))
+
+	writeSource(t, fs, filepath.Join("layouts", "_default", "single.html"), "<html>Single|{{ .Title }}</html>")
+	writeSource(t, fs, filepath.Join("layouts", "_default", "list.html"),
+		`
+{{ $sect := (.Site.GetPage "section" "l1" "l2") }}
+<html>List|{{ .Title }}|L1/l2-IsActive: {{ .InSection $sect }}
+{{ range .Paginator.Pages }}
+PAG|{{ .Title }}|{{ $sect.InSection . }}
+{{ end }}
+</html>`)
+
+	cfg.Set("paginate", 2)
+
+	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
+	require.Len(t, s.RegularPages, 14)
+
+	tests := []struct {
+		sections string
+		verify   func(p *Page)
+	}{
+		{"empty1", func(p *Page) {
+			// > b,c
+			assert.NotNil(p.s.getPage(KindSection, "empty1", "b"))
+			assert.NotNil(p.s.getPage(KindSection, "empty1", "b", "c"))
+
+		}},
+		{"empty2", func(p *Page) {
+			// > b,c,d where b and d have content files.
+			b := p.s.getPage(KindSection, "empty2", "b")
+			assert.NotNil(b)
+			assert.Equal("T40_-1", b.Title)
+			c := p.s.getPage(KindSection, "empty2", "b", "c")
+			assert.NotNil(c)
+			assert.Equal("Cs", c.Title)
+			d := p.s.getPage(KindSection, "empty2", "b", "c", "d")
+			assert.NotNil(d)
+			assert.Equal("T41_-1", d.Title)
+		}},
+		{"empty3", func(p *Page) {
+			// b,c,d with regular page in b
+			b := p.s.getPage(KindSection, "empty3", "b")
+			assert.NotNil(b)
+			assert.Len(b.Pages, 1)
+			assert.Equal("empty3.md", b.Pages[0].File.LogicalName())
+
+		}},
+		{"top", func(p *Page) {
+			assert.Equal("Tops", p.Title)
+			assert.Len(p.Pages, 2)
+			assert.Equal("mypage2.md", p.Pages[0].LogicalName())
+			assert.Equal("mypage3.md", p.Pages[1].LogicalName())
+			home := p.Parent()
+			assert.True(home.IsHome())
+			assert.Len(p.Sections(), 0)
+			assert.Equal(home, home.current())
+			active, err := home.InSection(home)
+			assert.NoError(err)
+			assert.True(active)
+		}},
+		{"l1", func(p *Page) {
+			assert.Equal("L1s", p.Title)
+			assert.Len(p.Pages, 2)
+			assert.True(p.Parent().IsHome())
+			assert.Len(p.Sections(), 2)
+		}},
+		{"l1,l2", func(p *Page) {
+			assert.Equal("T2_-1", p.Title)
+			assert.Len(p.Pages, 3)
+			assert.Equal(p, p.Pages[0].Parent())
+			assert.Equal("L1s", p.Parent().Title)
+			assert.Equal("/l1/l2/", p.URLPath.URL)
+			assert.Equal("/l1/l2/", p.RelPermalink())
+			assert.Len(p.Sections(), 1)
+
+			for _, child := range p.Pages {
+				assert.Equal(p, child.current())
+				active, err := child.InSection(p)
+				assert.NoError(err)
+				assert.True(active)
+				active, err = p.InSection(child)
+				assert.NoError(err)
+				assert.True(active)
+				active, err = p.InSection(p.s.getPage(KindHome))
+				assert.NoError(err)
+				assert.False(active)
+			}
+
+			assert.Equal(p, p.current())
+
+		}},
+		{"l1,l2_2", func(p *Page) {
+			assert.Equal("T22_-1", p.Title)
+			assert.Len(p.Pages, 2)
+			assert.Equal(filepath.FromSlash("l1/l2_2/page_2_2_1.md"), p.Pages[0].Path())
+			assert.Equal("L1s", p.Parent().Title)
+			assert.Len(p.Sections(), 0)
+		}},
+		{"l1,l2,l3", func(p *Page) {
+			assert.Equal("T3_-1", p.Title)
+			assert.Len(p.Pages, 2)
+			assert.Equal("T2_-1", p.Parent().Title)
+			assert.Len(p.Sections(), 0)
+		}},
+	}
+
+	for _, test := range tests {
+		sections := strings.Split(test.sections, ",")
+		p := s.getPage(KindSection, sections...)
+		assert.NotNil(p, fmt.Sprint(sections))
+
+		if p.Pages != nil {
+			assert.Equal(p.Pages, p.Data["Pages"])
+		}
+		assert.NotNil(p.Parent(), fmt.Sprintf("Parent nil: %q", test.sections))
+		test.verify(p)
+	}
+
+	home := s.getPage(KindHome)
+
+	assert.NotNil(home)
+
+	assert.Len(home.Sections(), 6)
+
+	rootPage := s.getPage(KindPage, "mypage.md")
+	assert.NotNil(rootPage)
+	assert.True(rootPage.Parent().IsHome())
+
+	// Add a odd test for this as this looks a little bit off, but I'm not in the mood
+	// to think too hard a out this right now. It works, but people will have to spell
+	// out the directory name as is.
+	// If we later decide to do something about this, we will have to do some normalization in
+	// getPage.
+	// TODO(bep)
+	sectionWithSpace := s.getPage(KindSection, "Spaces in Section")
+	require.NotNil(t, sectionWithSpace)
+	require.Equal(t, "/spaces-in-section/", sectionWithSpace.RelPermalink())
+
+	th.assertFileContent("public/l1/l2/page/2/index.html", "L1/l2-IsActive: true", "PAG|T2_3|true")
+
+}
--- a/hugolib/site_test.go
+++ b/hugolib/site_test.go
@@ -438,7 +438,7 @@
 
 	mainSections, err := s.Info.Param("mainSections")
 	require.NoError(t, err)
-	require.Equal(t, mainSections, []string{"sect"})
+	require.Equal(t, []string{"sect"}, mainSections)
 
 	th := testHelper{s.Cfg, s.Fs, t}
 	tests := []struct {
@@ -635,12 +635,8 @@
 
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true})
 
-	if s.Sections["sect"][0].Weight != 2 || s.Sections["sect"][3].Weight != 6 {
-		t.Errorf("Pages in unexpected order. First should be '%d', got '%d'", 2, s.Sections["sect"][0].Weight)
-	}
-
-	if s.Sections["sect"][1].Page.Title != "Three" || s.Sections["sect"][2].Page.Title != "Four" {
-		t.Errorf("Pages in unexpected order. Second should be '%s', got '%s'", "Three", s.Sections["sect"][1].Page.Title)
+	if s.getPage(KindSection, "sect").Pages[1].Title != "Three" || s.getPage(KindSection, "sect").Pages[2].Title != "Four" {
+		t.Error("Pages in unexpected order.")
 	}
 
 	bydate := s.RegularPages.ByDate()
--- a/hugolib/site_url_test.go
+++ b/hugolib/site_url_test.go
@@ -69,7 +69,7 @@
 	cfg.Set("uglyURLs", false)
 	cfg.Set("paginate", 10)
 
-	writeSourcesToSource(t, "content", fs, urlFakeSource...)
+	writeSourcesToSource(t, "", fs, urlFakeSource...)
 	s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{})
 
 	_, err := s.Fs.Destination.Open("public/blue")
--- a/vendor/vendor.json
+++ b/vendor/vendor.json
@@ -88,6 +88,18 @@
 			"revisionTime": "2017-03-19T17:27:27Z"
 		},
 		{
+			"checksumSHA1": "zvmksNyW6g+Fd/bywd4vcn8rp+M=",
+			"path": "github.com/hashicorp/go-immutable-radix",
+			"revision": "30664b879c9a771d8d50b137ab80ee0748cb2fcc",
+			"revisionTime": "2017-02-14T02:52:36Z"
+		},
+		{
+			"checksumSHA1": "9hffs0bAIU6CquiRhKQdzjHnKt0=",
+			"path": "github.com/hashicorp/golang-lru/simplelru",
+			"revision": "0a025b7e63adc15a622f29b0b2c4c3848243bbf6",
+			"revisionTime": "2016-08-13T22:13:03Z"
+		},
+		{
 			"checksumSHA1": "7JBkp3EZoc0MSbiyWfzVhO4RYoY=",
 			"path": "github.com/hashicorp/hcl",
 			"revision": "392dba7d905ed5d04a5794ba89f558b27e2ba1ca",