Two new configuration properties, `Paginate` (default `0`) and `PaginatePath` (default `page`) are added.
Setting `paginate` to a positive value will split the list pages for the home page, sections and taxonomies into chunks of size of the `paginate` property.
A `.Paginator` is provided to help building a pager menu.
There are two ways to configure a `.Paginator`:
1. The simplest way is just to call `.Paginator.Pages` from a template. It will contain the pages for "that page" (`.Data.Pages` will (like today) contain all the pages).
2. Select a sub-set of the pages with the available template functions and pass the slice to `.Paginate` : `{{ range (.Paginate (where .Data.Pages "Type" "post")).Pages }}`
**NOTE:** For a given Node, it's one of the options above. It's perfectly legitimate to iterate over the same pager more than once, but it's static and cannot change.
The `.Paginator` contains enough information to build a full-blown paginator interface.
The pages are built on the form (note: BLANK means no value, i.e. home page):
```
[SECTION/TAXONOMY/BLANK]/index.html
[SECTION/TAXONOMY/BLANK]/page/1/index.html => redirect to [SECTION/TAXONOMY/BLANK]/index.html
[SECTION/TAXONOMY/BLANK]/page/2/index.html
....
```
Fixes #96
viper.SetDefault("FootnoteAnchorPrefix", "")
viper.SetDefault("FootnoteReturnLinkContents", "")
viper.SetDefault("NewContentEditor", "")
+ viper.SetDefault("Paginate", 0)
+ viper.SetDefault("PaginatePath", "page")
viper.SetDefault("Blackfriday", new(helpers.Blackfriday))
if hugoCmdV.PersistentFlags().Lookup("buildDrafts").Changed {
import (
"fmt"
"github.com/PuerkitoBio/purell"
+ "github.com/spf13/viper"
"net/url"
"path"
"strings"
return newPath
}
+func UrlizeAndPrep(in string) string {
+ return UrlPrep(viper.GetBool("UglyUrls"), Urlize(in))
+}
+
func UrlPrep(ugly bool, in string) string {
if ugly {
x := Uglify(SanitizeUrl(in))
import (
"html/template"
+ "sync"
"time"
)
Date time.Time
Sitemap Sitemap
UrlPath
+ paginator *pager
+ paginatorInit sync.Once
}
func (n *Node) Now() time.Time {
--- /dev/null
+// Copyright © 2013-14 Steve Francia <spf@spf13.com>.
+//
+// Licensed under the Simple Public License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://opensource.org/licenses/Simple-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+ "errors"
+ "fmt"
+ "github.com/spf13/hugo/helpers"
+ "github.com/spf13/viper"
+ "html/template"
+ "math"
+ "path"
+)
+
+type pager struct {
+ number int
+ *paginator
+}
+
+type pagers []*pager
+
+type paginator struct {
+ paginatedPages []Pages
+ pagers
+ paginationUrlFactory
+ total int
+ size int
+}
+
+type paginationUrlFactory func(int) string
+
+// PageNumber returns the current page's number in the pager sequence.
+func (p *pager) PageNumber() int {
+ return p.number
+}
+
+// Url returns the url to the current page.
+func (p *pager) Url() template.HTML {
+ return template.HTML(p.paginationUrlFactory(p.PageNumber()))
+}
+
+// Pages returns the elements on this page.
+func (p *pager) Pages() Pages {
+ return p.paginatedPages[p.PageNumber()-1]
+}
+
+// NumberOfElements gets the number of elements on this page.
+func (p *pager) NumberOfElements() int {
+ return len(p.Pages())
+}
+
+// HasPrev tests whether there are page(s) before the current.
+func (p *pager) HasPrev() bool {
+ return p.PageNumber() > 1
+}
+
+// Prev returns the pager for the previous page.
+func (p *pager) Prev() *pager {
+ if !p.HasPrev() {
+ return nil
+ }
+ return p.pagers[p.PageNumber()-2]
+}
+
+// HasNext tests whether there are page(s) after the current.
+func (p *pager) HasNext() bool {
+ return p.PageNumber() < len(p.paginatedPages)
+}
+
+// Next returns the pager for the next page.
+func (p *pager) Next() *pager {
+ if !p.HasNext() {
+ return nil
+ }
+ return p.pagers[p.PageNumber()]
+}
+
+// First returns the pager for the first page.
+func (p *pager) First() *pager {
+ if p.TotalPages() == 0 {
+ return nil
+ }
+
+ return p.pagers[0]
+}
+
+// Last returns the pager for the last page.
+func (p *pager) Last() *pager {
+ if p.TotalPages() == 0 {
+ return nil
+ }
+
+ return p.pagers[len(p.pagers)-1]
+}
+
+// Pagers returns a list of pagers that can be used to build a pagination menu.
+func (p *paginator) Pagers() pagers {
+ return p.pagers
+}
+
+// PageSize returns the size of each paginator page.
+func (p *paginator) PageSize() int {
+ return p.size
+}
+
+// TotalPages returns the number of pages in the paginator.
+func (p *paginator) TotalPages() int {
+ return len(p.paginatedPages)
+}
+
+// TotalNumberOfElements returns the number of elements on all pages in this paginator.
+func (p *paginator) TotalNumberOfElements() int {
+ return p.total
+}
+
+func splitPages(pages Pages, size int) []Pages {
+ var split []Pages
+ for low, j := 0, len(pages); low < j; low += size {
+ high := int(math.Min(float64(low+size), float64(len(pages))))
+ split = append(split, pages[low:high])
+ }
+
+ return split
+}
+
+// Paginate gets this Node's paginator if it's already created.
+// If it's not, one will be created with all pages in Data["Pages"].
+func (n *Node) Paginator() (*pager, error) {
+
+ if n.IsPage() {
+ return nil, errors.New("Paginators isn't supported for content pages.")
+ }
+
+ var initError error
+
+ n.paginatorInit.Do(func() {
+ if n.paginator != nil {
+ return
+ }
+
+ pagers, err := paginatePages(n.Data["Pages"], n.Url)
+
+ if err != nil {
+ initError = err
+ }
+
+ if len(pagers) > 0 {
+ // the rest of the nodes will be created later
+ n.paginator = pagers[0]
+ n.Site.addToPaginationPageCount(uint64(n.paginator.TotalPages()))
+ }
+
+ })
+
+ if initError != nil {
+ return nil, initError
+ }
+
+ return n.paginator, nil
+}
+
+// Paginate gets this Node's paginator if it's already created.
+// If it's not, one will be created with the qiven sequence.
+// Note that repeated calls will return the same result, even if the sequence is different.
+func (n *Node) Paginate(seq interface{}) (*pager, error) {
+
+ if n.IsPage() {
+ return nil, errors.New("Paginators isn't supported for content pages.")
+ }
+
+ var initError error
+
+ n.paginatorInit.Do(func() {
+ if n.paginator != nil {
+ return
+ }
+ pagers, err := paginatePages(seq, n.Url)
+
+ if err != nil {
+ initError = err
+ }
+
+ if len(pagers) > 0 {
+ // the rest of the nodes will be created later
+ n.paginator = pagers[0]
+ n.Site.addToPaginationPageCount(uint64(n.paginator.TotalPages()))
+ }
+
+ })
+
+ if initError != nil {
+ return nil, initError
+ }
+
+ return n.paginator, nil
+}
+
+func paginatePages(seq interface{}, section string) (pagers, error) {
+ paginateSize := viper.GetInt("paginate")
+
+ if paginateSize <= 0 {
+ return nil, errors.New("'paginate' configuration setting must be positive to paginate")
+ }
+ var pages Pages
+ switch seq.(type) {
+ case Pages:
+ pages = seq.(Pages)
+ case *Pages:
+ pages = *(seq.(*Pages))
+ case WeightedPages:
+ pages = (seq.(WeightedPages)).Pages()
+ case PageGroup:
+ pages = (seq.(PageGroup)).Pages
+ default:
+ return nil, errors.New(fmt.Sprintf("unsupported type in paginate, got %T", seq))
+ }
+
+ urlFactory := newPaginationUrlFactory(section)
+ paginator := newPaginator(pages, paginateSize, urlFactory)
+ pagers := paginator.Pagers()
+
+ return pagers, nil
+}
+
+func newPaginator(pages Pages, size int, urlFactory paginationUrlFactory) *paginator {
+
+ if size <= 0 {
+ panic("Paginator size must be positive")
+ }
+
+ split := splitPages(pages, size)
+
+ p := &paginator{total: len(pages), paginatedPages: split, size: size, paginationUrlFactory: urlFactory}
+ pagers := make(pagers, len(split))
+
+ for i := range p.paginatedPages {
+ pagers[i] = &pager{number: (i + 1), paginator: p}
+ }
+
+ p.pagers = pagers
+
+ return p
+}
+
+func newPaginationUrlFactory(pathElements ...string) paginationUrlFactory {
+ paginatePath := viper.GetString("paginatePath")
+
+ return func(page int) string {
+ var rel string
+ if page == 1 {
+ rel = fmt.Sprintf("/%s/", path.Join(pathElements...))
+ } else {
+ rel = fmt.Sprintf("/%s/%s/%d/", path.Join(pathElements...), paginatePath, page)
+ }
+
+ return helpers.UrlizeAndPrep(rel)
+ }
+}
--- /dev/null
+package hugolib
+
+import (
+ "fmt"
+ "github.com/spf13/hugo/source"
+ "github.com/spf13/viper"
+ "github.com/stretchr/testify/assert"
+ "path/filepath"
+ "testing"
+)
+
+func TestSplitPages(t *testing.T) {
+
+ pages := createTestPages(21)
+ chunks := splitPages(pages, 5)
+ assert.Equal(t, 5, len(chunks))
+
+ for i := 0; i < 4; i++ {
+ assert.Equal(t, 5, len(chunks[i]))
+ }
+
+ lastChunk := chunks[4]
+ assert.Equal(t, 1, len(lastChunk))
+
+}
+
+func TestPaginator(t *testing.T) {
+
+ pages := createTestPages(21)
+ urlFactory := func(page int) string {
+ return fmt.Sprintf("page/%d/", page)
+ }
+
+ paginator := newPaginator(pages, 5, urlFactory)
+ paginatorPages := paginator.Pagers()
+
+ assert.Equal(t, 5, len(paginatorPages))
+ assert.Equal(t, 21, paginator.TotalNumberOfElements())
+ assert.Equal(t, 5, paginator.PageSize())
+ assert.Equal(t, 5, paginator.TotalPages())
+
+ first := paginatorPages[0]
+ assert.Equal(t, "page/1/", first.Url())
+ assert.Equal(t, first, first.First())
+ assert.Equal(t, true, first.HasNext())
+ assert.Equal(t, false, first.HasPrev())
+ assert.Equal(t, 5, first.NumberOfElements())
+ assert.Equal(t, 1, first.PageNumber())
+
+ third := paginatorPages[2]
+ assert.Equal(t, true, third.HasNext())
+ assert.Equal(t, true, third.HasPrev())
+
+ last := paginatorPages[4]
+ assert.Equal(t, "page/5/", last.Url())
+ assert.Equal(t, last, last.Last())
+ assert.Equal(t, false, last.HasNext())
+ assert.Equal(t, true, last.HasPrev())
+ assert.Equal(t, 1, last.NumberOfElements())
+ assert.Equal(t, 5, last.PageNumber())
+
+}
+
+func TestPaginationUrlFactory(t *testing.T) {
+ viper.Set("PaginatePath", "zoo")
+ unicode := newPaginationUrlFactory("новости проекта")
+ fooBar := newPaginationUrlFactory("foo", "bar")
+
+ assert.Equal(t, "/%D0%BD%D0%BE%D0%B2%D0%BE%D1%81%D1%82%D0%B8-%D0%BF%D1%80%D0%BE%D0%B5%D0%BA%D1%82%D0%B0/", unicode(1))
+ assert.Equal(t, "/foo/bar/", fooBar(1))
+ assert.Equal(t, "/%D0%BD%D0%BE%D0%B2%D0%BE%D1%81%D1%82%D0%B8-%D0%BF%D1%80%D0%BE%D0%B5%D0%BA%D1%82%D0%B0/zoo/4/", unicode(4))
+ assert.Equal(t, "/foo/bar/zoo/12345/", fooBar(12345))
+
+}
+
+func createTestPages(num int) Pages {
+ pages := make(Pages, num)
+
+ for i := 0; i < num; i++ {
+ pages[i] = &Page{
+ Node: Node{
+ UrlPath: UrlPath{
+ Section: "z",
+ Url: fmt.Sprintf("http://base/x/y/p%d.html", num),
+ },
+ Site: &SiteInfo{
+ BaseUrl: "http://base/",
+ },
+ },
+ Source: Source{File: *source.NewFile(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", num)))},
+ }
+ }
+
+ return pages
+}
"io"
"net/url"
"os"
+ "path/filepath"
"strconv"
"strings"
"sync"
jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/nitro"
"github.com/spf13/viper"
+ "sync/atomic"
)
var _ = transform.AbsURL
}
type SiteInfo struct {
- BaseUrl template.URL
- Taxonomies TaxonomyList
- Authors AuthorList
- Social SiteSocial
- Indexes *TaxonomyList // legacy, should be identical to Taxonomies
- Sections Taxonomy
- Pages *Pages
- Files []*source.File
- Recent *Pages // legacy, should be identical to Pages
- Menus *Menus
- Hugo *HugoInfo
- Title string
- Author map[string]interface{}
- LanguageCode string
- DisqusShortname string
- Copyright string
- LastChange time.Time
- Permalinks PermalinkOverrides
- Params map[string]interface{}
- BuildDrafts bool
- canonifyUrls bool
+ BaseUrl template.URL
+ Taxonomies TaxonomyList
+ Authors AuthorList
+ Social SiteSocial
+ Indexes *TaxonomyList // legacy, should be identical to Taxonomies
+ Sections Taxonomy
+ Pages *Pages
+ Files []*source.File
+ Recent *Pages // legacy, should be identical to Pages
+ Menus *Menus
+ Hugo *HugoInfo
+ Title string
+ Author map[string]interface{}
+ LanguageCode string
+ DisqusShortname string
+ Copyright string
+ LastChange time.Time
+ Permalinks PermalinkOverrides
+ Params map[string]interface{}
+ BuildDrafts bool
+ canonifyUrls bool
+ paginationPageCount uint64
}
// SiteSocial is a place to put social details on a site level. These are the
return s.refLink(ref, page, true)
}
+func (s *SiteInfo) addToPaginationPageCount(cnt uint64) {
+ atomic.AddUint64(&s.paginationPageCount, cnt)
+}
+
type runmode struct {
Watching bool
}
if strings.HasPrefix(menuEntry.Url, "/") {
// make it match the nodes
menuEntryUrl := menuEntry.Url
- menuEntryUrl = s.prepUrl(menuEntryUrl)
+ menuEntryUrl = helpers.UrlizeAndPrep(menuEntryUrl)
if !s.Info.canonifyUrls {
menuEntryUrl = helpers.AddContextRoot(string(s.Info.BaseUrl), menuEntryUrl)
}
func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error, wg *sync.WaitGroup) {
defer wg.Done()
+
+ var n *Node
+
for t := range taxes {
- n, base := s.newTaxonomyNode(t)
- layouts := []string{"taxonomy/" + t.singular + ".html", "indexes/" + t.singular + ".html", "_default/taxonomy.html", "_default/list.html"}
- b, err := s.renderPage("taxononomy "+t.singular, n, s.appendThemeTemplates(layouts)...)
+
+ var base string
+ layouts := s.appendThemeTemplates(
+ []string{"taxonomy/" + t.singular + ".html", "indexes/" + t.singular + ".html", "_default/taxonomy.html", "_default/list.html"})
+
+ n, base = s.newTaxonomyNode(t)
+
+ b, err := s.renderPage("taxononomy "+t.singular, n, layouts...)
if err != nil {
results <- err
continue
- } else {
- err := s.WriteDestPage(base+".html", b)
- if err != nil {
- results <- err
+ }
+
+ err = s.WriteDestPage(base, b)
+ if err != nil {
+ results <- err
+ continue
+ }
+
+ if n.paginator != nil {
+
+ paginatePath := viper.GetString("paginatePath")
+
+ // write alias for page 1
+ s.WriteDestAlias(fmt.Sprintf("%s/%s/%d/index.html", base, paginatePath, 1), s.permalink(base))
+
+ pagers := n.paginator.Pagers()
+
+ for i, pager := range pagers {
+ if i == 0 {
+ // already created
+ continue
+ }
+
+ taxonomyPagerNode, _ := s.newTaxonomyNode(t)
+ taxonomyPagerNode.paginator = pager
+ if pager.TotalPages() > 0 {
+ taxonomyPagerNode.Date = pager.Pages()[0].Date
+ }
+ pageNumber := i + 1
+ htmlBase := fmt.Sprintf("/%s/%s/%d", base, paginatePath, pageNumber)
+ b, err := s.renderPage(fmt.Sprintf("taxononomy_%s_%d", t.singular, pageNumber), taxonomyPagerNode, layouts...)
+ if err != nil {
+ results <- err
+ continue
+ }
+
+ err = s.WriteDestPage(htmlBase, b)
+ if err != nil {
+ results <- err
+ continue
+ }
+
}
}
return
}
+func (s *Site) newSectionListNode(section string, data WeightedPages) *Node {
+ n := s.NewNode()
+ if viper.GetBool("PluralizeListTitles") {
+ n.Title = strings.Title(inflect.Pluralize(section))
+ } else {
+ n.Title = strings.Title(section)
+ }
+ s.setUrls(n, section)
+ n.Date = data[0].Page.Date
+ n.Data["Pages"] = data.Pages()
+
+ return n
+}
+
// Render a page for each section
func (s *Site) RenderSectionLists() error {
for section, data := range s.Sections {
- n := s.NewNode()
- if viper.GetBool("PluralizeListTitles") {
- n.Title = strings.Title(inflect.Pluralize(section))
- } else {
- n.Title = strings.Title(section)
- }
- s.setUrls(n, section)
- n.Date = data[0].Page.Date
- n.Data["Pages"] = data.Pages()
- layouts := []string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"}
- b, err := s.renderPage("section "+section, n, s.appendThemeTemplates(layouts)...)
+ layouts := s.appendThemeTemplates(
+ []string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"})
+
+ n := s.newSectionListNode(section, data)
+
+ b, err := s.renderPage(fmt.Sprintf("section%s_%d", section, 1), n, s.appendThemeTemplates(layouts)...)
if err != nil {
return err
}
- if err := s.WriteDestPage(section, b); err != nil {
+ if err := s.WriteDestPage(fmt.Sprintf("/%s", section), b); err != nil {
return err
}
+ if n.paginator != nil {
+
+ paginatePath := viper.GetString("paginatePath")
+
+ // write alias for page 1
+ s.WriteDestAlias(filepath.FromSlash(fmt.Sprintf("/%s/%s/%d", section, paginatePath, 1)), s.permalink(section))
+
+ pagers := n.paginator.Pagers()
+
+ for i, pager := range pagers {
+ if i == 0 {
+ // already created
+ continue
+ }
+
+ sectionPagerNode := s.newSectionListNode(section, data)
+ sectionPagerNode.paginator = pager
+ if pager.TotalPages() > 0 {
+ sectionPagerNode.Date = pager.Pages()[0].Date
+ }
+ pageNumber := i + 1
+ htmlBase := fmt.Sprintf("/%s/%s/%d", section, paginatePath, pageNumber)
+ b, err := s.renderPage(fmt.Sprintf("section_%s_%d", section, pageNumber), sectionPagerNode, layouts...)
+ if err != nil {
+ return err
+ }
+ if err := s.WriteDestPage(filepath.FromSlash(htmlBase), b); err != nil {
+ return err
+ }
+
+ }
+ }
+
if !viper.GetBool("DisableRSS") && section != "" {
// XML Feed
n.Url = s.permalinkStr(section + "/index.xml")
n.Permalink = s.permalink(section)
rssLayouts := []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
- b, err = s.renderXML("section "+section+" rss", n, s.appendThemeTemplates(rssLayouts)...)
+ b, err := s.renderXML("section "+section+" rss", n, s.appendThemeTemplates(rssLayouts)...)
if err != nil {
return err
}
func (s *Site) RenderHomePage() error {
n := s.newHomeNode()
- layouts := []string{"index.html", "_default/list.html", "_default/single.html"}
- b, err := s.renderPage("homepage", n, s.appendThemeTemplates(layouts)...)
+ layouts := s.appendThemeTemplates([]string{"index.html", "_default/list.html", "_default/single.html"})
+
+ b, err := s.renderPage("homepage", n, layouts...)
+
if err != nil {
return err
}
return err
}
+ if n.paginator != nil {
+
+ paginatePath := viper.GetString("paginatePath")
+
+ // write alias for page 1
+ s.WriteDestAlias(filepath.FromSlash(fmt.Sprintf("/%s/%d", paginatePath, 1)), s.permalink("/"))
+
+ pagers := n.paginator.Pagers()
+
+ for i, pager := range pagers {
+ if i == 0 {
+ // already created
+ continue
+ }
+
+ homePagerNode := s.newHomeNode()
+ homePagerNode.paginator = pager
+ if pager.TotalPages() > 0 {
+ homePagerNode.Date = pager.Pages()[0].Date
+ }
+ pageNumber := i + 1
+ htmlBase := fmt.Sprintf("/%s/%d", paginatePath, pageNumber)
+ b, err := s.renderPage(fmt.Sprintf("homepage_%d", pageNumber), homePagerNode, layouts...)
+ if err != nil {
+ return err
+ }
+ if err := s.WriteDestPage(filepath.FromSlash(htmlBase), b); err != nil {
+ return err
+ }
+
+ }
+ }
+
if !viper.GetBool("DisableRSS") {
// XML Feed
n.Url = s.permalinkStr("index.xml")
jww.FEEDBACK.Println(s.draftStats())
jww.FEEDBACK.Println(s.futureStats())
jww.FEEDBACK.Printf("%d pages created \n", len(s.Pages))
-
+ if viper.GetInt("paginate") > 0 {
+ jww.FEEDBACK.Printf("%d paginator pages created \n", s.Info.paginationPageCount)
+ }
taxonomies := viper.GetStringMapString("Taxonomies")
for _, pl := range taxonomies {
}
func (s *Site) setUrls(n *Node, in string) {
- n.Url = s.prepUrl(in)
+ n.Url = helpers.UrlizeAndPrep(in)
n.Permalink = s.permalink(n.Url)
n.RSSLink = s.permalink(in + ".xml")
}
}
func (s *Site) permalinkStr(plink string) string {
- return helpers.MakePermalink(string(viper.GetString("BaseUrl")), s.prepUrl(plink)).String()
-}
-
-func (s *Site) prepUrl(in string) string {
- return s.PrettifyUrl(helpers.Urlize(in))
-}
-
-func (s *Site) PrettifyUrl(in string) string {
- return helpers.UrlPrep(viper.GetBool("UglyUrls"), in)
-}
-
-func (s *Site) PrettifyPath(in string) string {
- return helpers.PathPrep(viper.GetBool("UglyUrls"), in)
+ return helpers.MakePermalink(string(viper.GetString("BaseUrl")), helpers.UrlizeAndPrep(plink)).String()
}
func (s *Site) NewNode() *Node {
hugofs.DestinationFS = new(afero.MemMapFs)
viper.Set("uglyurls", false)
+ viper.Set("paginate", 10)
s := &Site{
Source: &source.InMemorySource{ByteSource: urlFakeSource},
}