ref: 1a94148d062beb6e45d5eeef3447d0308e496718
parent: 2c22a8b79a92685f018340128634d8e7db03d321
author: Bjørn Erik Pedersen <[email protected]>
date: Thu Jul 16 09:17:54 EDT 2015
Add paginator support for page groups Fixed #1274
--- a/docs/content/extras/pagination.md
+++ b/docs/content/extras/pagination.md
@@ -35,11 +35,16 @@
For a given **Node**, it's one of the options above. The `.Paginator` is static and cannot change once created.
-
The global page size setting (`Paginate`) can be overridden by providing a positive integer as the last argument. The examples below will give five items per page:
* `{{ range (.Paginator 5).Pages }}`
* `{{ $paginator := .Paginate (where .Data.Pages "Type" "post") 5 }}`
+
+It is also possible to use the `GroupBy` functions in combination with pagination:
+
+```
+{{ range (.Paginate (.Data.Pages.GroupByDate "2006")).PageGroups }}
+```
## Build the navigation
--- a/hugolib/pagination.go
+++ b/hugolib/pagination.go
@@ -31,12 +31,29 @@
*paginator
}
+type paginatedElement interface {
+ Len() int
+}
+
+func (p Pages) Len() int {
+ return len(p)
+}
+
+func (psg PagesGroup) Len() int {
+ l := 0
+ for _, pg := range psg {
+ l += len(pg.Pages)
+ }
+ return l
+}
+
type pagers []*Pager
var paginatorEmptyPages Pages
+var paginatorEmptyPageGroups PagesGroup
type paginator struct {
- paginatedPages []Pages
+ paginatedElements []paginatedElement
pagers
paginationURLFactory
total int
@@ -63,17 +80,71 @@
return p.URL()
}
-// Pages returns the elements on this page.
+// Pages returns the Pages on this page.
+// Note: If this return a non-empty result, then PageGroups() will return empty.
func (p *Pager) Pages() Pages {
- if len(p.paginatedPages) == 0 {
+ if len(p.paginatedElements) == 0 {
return paginatorEmptyPages
}
- return p.paginatedPages[p.PageNumber()-1]
+
+ if pages, ok := p.element().(Pages); ok {
+ return pages
+ }
+
+ return paginatorEmptyPages
}
+// PageGroups return Page groups for this page.
+// Note: If this return non-empty result, then Pages() will return empty.
+func (p *Pager) PageGroups() PagesGroup {
+ if len(p.paginatedElements) == 0 {
+ return paginatorEmptyPageGroups
+ }
+
+ if groups, ok := p.element().(PagesGroup); ok {
+ return groups
+ }
+
+ return paginatorEmptyPageGroups
+}
+
+func (p *Pager) element() paginatedElement {
+ if len(p.paginatedElements) == 0 {
+ return paginatorEmptyPages
+ }
+ return p.paginatedElements[p.PageNumber()-1]
+}
+
+// page returns the Page with the given index
+func (p *Pager) page(index int) (*Page, error) {
+
+ if pages, ok := p.element().(Pages); ok {
+ if pages != nil && len(pages) > index {
+ return pages[index], nil
+ }
+ return nil, nil
+ }
+
+ // must be PagesGroup
+ // this construction looks clumsy, but ...
+ // ... it is the difference between 99.5% and 100% test coverage :-)
+ groups := p.element().(PagesGroup)
+
+ i := 0
+ for _, v := range groups {
+ for _, page := range v.Pages {
+ if i == index {
+ return page, nil
+ }
+ i++
+ }
+ }
+ return nil, nil
+}
+
// NumberOfElements gets the number of elements on this page.
func (p *Pager) NumberOfElements() int {
- return len(p.Pages())
+ return p.element().Len()
}
// HasPrev tests whether there are page(s) before the current.
@@ -91,7 +162,7 @@
// HasNext tests whether there are page(s) after the current.
func (p *Pager) HasNext() bool {
- return p.PageNumber() < len(p.paginatedPages)
+ return p.PageNumber() < len(p.paginatedElements)
}
// Next returns the pager for the next page.
@@ -124,7 +195,7 @@
// TotalPages returns the number of pages in the paginator.
func (p *paginator) TotalPages() int {
- return len(p.paginatedPages)
+ return len(p.paginatedElements)
}
// TotalNumberOfElements returns the number of elements on all pages in this paginator.
@@ -132,8 +203,8 @@
return p.total
}
-func splitPages(pages Pages, size int) []Pages {
- var split []Pages
+func splitPages(pages Pages, size int) []paginatedElement {
+ var split []paginatedElement
for low, j := 0, len(pages); low < j; low += size {
high := int(math.Min(float64(low+size), float64(len(pages))))
split = append(split, pages[low:high])
@@ -142,6 +213,44 @@
return split
}
+func splitPageGroups(pageGroups PagesGroup, size int) []paginatedElement {
+ var split []paginatedElement
+
+ type keyPage struct {
+ key interface{}
+ page *Page
+ }
+
+ var flattened []keyPage
+
+ for _, g := range pageGroups {
+ for _, p := range g.Pages {
+ flattened = append(flattened, keyPage{g.Key, p})
+ }
+ }
+
+ numPages := len(flattened)
+
+ for low, j := 0, numPages; low < j; low += size {
+ high := int(math.Min(float64(low+size), float64(numPages)))
+ var pg PagesGroup
+ var key interface{} = nil
+ var groupIndex = -1
+ for k := low; k < high; k++ {
+ kp := flattened[k]
+ if key == nil || key != kp.key {
+ key = kp.key
+ pg = append(pg, PageGroup{Key: key})
+ groupIndex++
+ }
+ pg[groupIndex].Pages = append(pg[groupIndex].Pages, kp.page)
+ }
+ split = append(split, pg)
+ }
+
+ return split
+}
+
// Paginator gets this Node's paginator if it's already created.
// If it's not, one will be created with all pages in Data["Pages"].
func (n *Node) Paginator(options ...interface{}) (*Pager, error) {
@@ -264,15 +373,21 @@
return nil, errors.New("'paginate' configuration setting must be positive to paginate")
}
- pages, err := toPages(seq)
- if err != nil {
- return nil, err
- }
-
section = strings.TrimSuffix(section, ".html")
-
urlFactory := newPaginationURLFactory(section)
- paginator, _ := newPaginator(pages, pagerSize, urlFactory)
+
+ var paginator *paginator
+
+ if groups, ok := seq.(PagesGroup); ok {
+ paginator, _ = newPaginatorFromPageGroups(groups, pagerSize, urlFactory)
+ } else {
+ pages, err := toPages(seq)
+ if err != nil {
+ return nil, err
+ }
+ paginator, _ = newPaginatorFromPages(pages, pagerSize, urlFactory)
+ }
+
pagers := paginator.Pagers()
return pagers, nil
@@ -303,6 +418,28 @@
return a1 == a2
}
+ t1 := reflect.TypeOf(a1)
+ t2 := reflect.TypeOf(a2)
+
+ if t1 != t2 {
+ return false
+ }
+
+ if g1, ok := a1.(PagesGroup); ok {
+ g2 := a2.(PagesGroup)
+ if len(g1) != len(g2) {
+ return false
+ }
+ if len(g1) == 0 {
+ return true
+ }
+ if g1.Len() != g2.Len() {
+ return false
+ }
+
+ return g1[0].Pages[0] == g2[0].Pages[0]
+ }
+
p1, err1 := toPages(a1)
p2, err2 := toPages(a2)
@@ -311,10 +448,6 @@
return true
}
- if err1 != nil || err2 != nil {
- return false
- }
-
if len(p1) != len(p2) {
return false
}
@@ -326,7 +459,7 @@
return p1[0] == p2[0]
}
-func newPaginator(pages Pages, size int, urlFactory paginationURLFactory) (*paginator, error) {
+func newPaginatorFromPages(pages Pages, size int, urlFactory paginationURLFactory) (*paginator, error) {
if size <= 0 {
return nil, errors.New("Paginator size must be positive")
@@ -334,13 +467,28 @@
split := splitPages(pages, size)
- p := &paginator{total: len(pages), paginatedPages: split, size: size, paginationURLFactory: urlFactory}
+ return newPaginator(split, len(pages), size, urlFactory)
+}
+func newPaginatorFromPageGroups(pageGroups PagesGroup, size int, urlFactory paginationURLFactory) (*paginator, error) {
+
+ if size <= 0 {
+ return nil, errors.New("Paginator size must be positive")
+ }
+
+ split := splitPageGroups(pageGroups, size)
+
+ return newPaginator(split, pageGroups.Len(), size, urlFactory)
+}
+
+func newPaginator(elements []paginatedElement, total, size int, urlFactory paginationURLFactory) (*paginator, error) {
+ p := &paginator{total: total, paginatedElements: elements, size: size, paginationURLFactory: urlFactory}
+
var ps pagers
- if len(split) > 0 {
- ps = make(pagers, len(split))
- for i := range p.paginatedPages {
+ if len(elements) > 0 {
+ ps = make(pagers, len(elements))
+ for i := range p.paginatedElements {
ps[i] = &Pager{number: (i + 1), paginator: p}
}
} else {
--- a/hugolib/pagination_test.go
+++ b/hugolib/pagination_test.go
@@ -18,25 +18,86 @@
assert.Equal(t, 5, len(chunks))
for i := 0; i < 4; i++ {
- assert.Equal(t, 5, len(chunks[i]))
+ assert.Equal(t, 5, chunks[i].Len())
}
lastChunk := chunks[4]
- assert.Equal(t, 1, len(lastChunk))
+ assert.Equal(t, 1, lastChunk.Len())
}
-func TestPager(t *testing.T) {
+func TestSplitPageGroups(t *testing.T) {
pages := createTestPages(21)
+ groups, _ := pages.GroupBy("Weight", "desc")
+ chunks := splitPageGroups(groups, 5)
+ assert.Equal(t, 5, len(chunks))
+
+ firstChunk := chunks[0]
+
+ // alternate weight 5 and 10
+ if groups, ok := firstChunk.(PagesGroup); ok {
+ assert.Equal(t, 5, groups.Len())
+ for _, pg := range groups {
+ // first group 10 in weight
+ assert.Equal(t, 10, pg.Key)
+ for _, p := range pg.Pages {
+ assert.True(t, p.FuzzyWordCount%2 == 0) // magic test
+ }
+ }
+ } else {
+ t.Fatal("Excepted PageGroup")
+ }
+
+ lastChunk := chunks[4]
+
+ if groups, ok := lastChunk.(PagesGroup); ok {
+ assert.Equal(t, 1, groups.Len())
+ for _, pg := range groups {
+ // last should have 5 in weight
+ assert.Equal(t, 5, pg.Key)
+ for _, p := range pg.Pages {
+ assert.True(t, p.FuzzyWordCount%2 != 0) // magic test
+ }
+ }
+ } else {
+ t.Fatal("Excepted PageGroup")
+ }
+
+}
+
+func TestPager(t *testing.T) {
+ pages := createTestPages(21)
+ groups, _ := pages.GroupBy("Weight", "desc")
+
urlFactory := func(page int) string {
return fmt.Sprintf("page/%d/", page)
}
- _, err := newPaginator(pages, -1, urlFactory)
+ _, err := newPaginatorFromPages(pages, -1, urlFactory)
assert.NotNil(t, err)
- paginator, _ := newPaginator(pages, 5, urlFactory)
+ _, err = newPaginatorFromPageGroups(groups, -1, urlFactory)
+ assert.NotNil(t, err)
+
+ pag, err := newPaginatorFromPages(pages, 5, urlFactory)
+ assert.Nil(t, err)
+ doTestPages(t, pag)
+ first := pag.Pagers()[0].First()
+ assert.NotEmpty(t, first.Pages())
+ assert.Empty(t, first.PageGroups())
+
+ pag, err = newPaginatorFromPageGroups(groups, 5, urlFactory)
+ assert.Nil(t, err)
+ doTestPages(t, pag)
+ first = pag.Pagers()[0].First()
+ assert.NotEmpty(t, first.PageGroups())
+ assert.Empty(t, first.Pages())
+
+}
+
+func doTestPages(t *testing.T, paginator *paginator) {
+
paginatorPages := paginator.Pagers()
assert.Equal(t, 5, len(paginatorPages))
@@ -72,11 +133,29 @@
func TestPagerNoPages(t *testing.T) {
pages := createTestPages(0)
+ groups, _ := pages.GroupBy("Weight", "desc")
+
urlFactory := func(page int) string {
return fmt.Sprintf("page/%d/", page)
}
- paginator, _ := newPaginator(pages, 5, urlFactory)
+ paginator, _ := newPaginatorFromPages(pages, 5, urlFactory)
+ doTestPagerNoPages(t, paginator)
+
+ first := paginator.Pagers()[0].First()
+ assert.Empty(t, first.PageGroups())
+ assert.Empty(t, first.Pages())
+
+ paginator, _ = newPaginatorFromPageGroups(groups, 5, urlFactory)
+ doTestPagerNoPages(t, paginator)
+
+ first = paginator.Pagers()[0].First()
+ assert.Empty(t, first.PageGroups())
+ assert.Empty(t, first.Pages())
+
+}
+
+func doTestPagerNoPages(t *testing.T, paginator *paginator) {
paginatorPages := paginator.Pagers()
assert.Equal(t, 1, len(paginatorPages))
@@ -91,7 +170,7 @@
assert.False(t, pageOne.HasPrev())
assert.Nil(t, pageOne.Next())
assert.Equal(t, 1, len(pageOne.Pagers()))
- assert.Equal(t, 0, len(pageOne.Pages()))
+ assert.Equal(t, 0, pageOne.Pages().Len())
assert.Equal(t, 0, pageOne.NumberOfElements())
assert.Equal(t, 0, pageOne.TotalNumberOfElements())
assert.Equal(t, 0, pageOne.TotalPages())
@@ -250,7 +329,8 @@
}
func TestPaginatePages(t *testing.T) {
- for i, seq := range []interface{}{createTestPages(11), WeightedPages{}, PageGroup{}, &Pages{}} {
+ groups, _ := createTestPages(31).GroupBy("Weight", "desc")
+ for i, seq := range []interface{}{createTestPages(11), groups, WeightedPages{}, PageGroup{}, &Pages{}} {
v, err := paginatePages(seq, 11, "t")
assert.NotNil(t, v, "Val %d", i)
assert.Nil(t, err, "Err %d", i)
@@ -308,6 +388,10 @@
func TestProbablyEqualPageLists(t *testing.T) {
fivePages := createTestPages(5)
zeroPages := createTestPages(0)
+ zeroPagesByWeight, _ := createTestPages(0).GroupBy("Weight", "asc")
+ fivePagesByWeight, _ := createTestPages(5).GroupBy("Weight", "asc")
+ ninePagesByWeight, _ := createTestPages(9).GroupBy("Weight", "asc")
+
for i, this := range []struct {
v1 interface{}
v2 interface{}
@@ -320,6 +404,11 @@
{fivePages, createTestPages(2), false},
{fivePages, fivePages, true},
{zeroPages, zeroPages, true},
+ {fivePagesByWeight, fivePagesByWeight, true},
+ {zeroPagesByWeight, fivePagesByWeight, false},
+ {zeroPagesByWeight, zeroPagesByWeight, true},
+ {fivePagesByWeight, fivePages, false},
+ {fivePagesByWeight, ninePagesByWeight, false},
} {
result := probablyEqualPageLists(this.v1, this.v2)
@@ -330,6 +419,33 @@
}
}
+func TestPage(t *testing.T) {
+ urlFactory := func(page int) string {
+ return fmt.Sprintf("page/%d/", page)
+ }
+
+ fivePages := createTestPages(7)
+ fivePagesFuzzyWordCount, _ := createTestPages(7).GroupBy("FuzzyWordCount", "asc")
+
+ p1, _ := newPaginatorFromPages(fivePages, 2, urlFactory)
+ p2, _ := newPaginatorFromPageGroups(fivePagesFuzzyWordCount, 2, urlFactory)
+
+ f1 := p1.pagers[0].First()
+ f2 := p2.pagers[0].First()
+
+ page11, _ := f1.page(1)
+ page1Nil, _ := f1.page(3)
+
+ page21, _ := f2.page(1)
+ page2Nil, _ := f2.page(3)
+
+ assert.Equal(t, 1, page11.FuzzyWordCount)
+ assert.Nil(t, page1Nil)
+
+ assert.Equal(t, 1, page21.FuzzyWordCount)
+ assert.Nil(t, page2Nil)
+}
+
func createTestPages(num int) Pages {
pages := make(Pages, num)
@@ -338,14 +454,20 @@
Node: Node{
URLPath: URLPath{
Section: "z",
- URL: fmt.Sprintf("http://base/x/y/p%d.html", num),
+ URL: fmt.Sprintf("http://base/x/y/p%d.html", i),
},
Site: &SiteInfo{
BaseURL: "http://base/",
},
},
- Source: Source{File: *source.NewFile(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", num)))},
+ Source: Source{File: *source.NewFile(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", i)))},
}
+ w := 5
+ if i%2 == 0 {
+ w = 10
+ }
+ pages[i].FuzzyWordCount = i
+ pages[i].Weight = w
}
return pages
--- a/hugolib/site.go
+++ b/hugolib/site.go
@@ -1118,8 +1118,9 @@
taxonomyPagerNode, _ := s.newTaxonomyNode(t)
taxonomyPagerNode.paginator = pager
if pager.TotalPages() > 0 {
- taxonomyPagerNode.Date = pager.Pages()[0].Date
- taxonomyPagerNode.Lastmod = pager.Pages()[0].Lastmod
+ first, _ := pager.page(0)
+ taxonomyPagerNode.Date = first.Date
+ taxonomyPagerNode.Lastmod = first.Lastmod
}
pageNumber := i + 1
htmlBase := fmt.Sprintf("/%s/%s/%d", base, paginatePath, pageNumber)
@@ -1224,8 +1225,9 @@
sectionPagerNode := s.newSectionListNode(sectionName, section, data)
sectionPagerNode.paginator = pager
if pager.TotalPages() > 0 {
- sectionPagerNode.Date = pager.Pages()[0].Date
- sectionPagerNode.Lastmod = pager.Pages()[0].Lastmod
+ first, _ := pager.page(0)
+ sectionPagerNode.Date = first.Date
+ sectionPagerNode.Lastmod = first.Lastmod
}
pageNumber := i + 1
htmlBase := fmt.Sprintf("/%s/%s/%d", section, paginatePath, pageNumber)
@@ -1283,8 +1285,9 @@
homePagerNode := s.newHomeNode()
homePagerNode.paginator = pager
if pager.TotalPages() > 0 {
- homePagerNode.Date = pager.Pages()[0].Date
- homePagerNode.Lastmod = pager.Pages()[0].Lastmod
+ first, _ := pager.page(0)
+ homePagerNode.Date = first.Date
+ homePagerNode.Lastmod = first.Lastmod
}
pageNumber := i + 1
htmlBase := fmt.Sprintf("/%s/%d", paginatePath, pageNumber)