hugolib: Add a cache to GetPage
authorBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>
Thu, 25 May 2017 18:13:03 +0000 (21:13 +0300)
committerBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>
Fri, 26 May 2017 07:42:45 +0000 (10:42 +0300)
Looks to be slightly slower with the low number of section pages, but the 1000 regular pages seem to add value.

```
benchmark                     old ns/op     new ns/op     delta
BenchmarkGetPage-4            97.7          145           +48.41%
BenchmarkGetPageRegular-4     7933          161           -97.97%

benchmark                     old allocs     new allocs     delta
BenchmarkGetPage-4            0              0              +0.00%
BenchmarkGetPageRegular-4     0              0              +0.00%

benchmark                     old bytes     new bytes     delta
BenchmarkGetPage-4            0             0             +0.00%
BenchmarkGetPageRegular-4     0             0             +0.00%
```

hugolib/page.go
hugolib/page_collections.go

index 8de4ad924f4e80abd693c269595c640a028d3293..6bf42cb24f6eaeac10a741044f325817ccd1b06d 100644 (file)
@@ -42,8 +42,9 @@ import (
 )
 
 var (
-       cjk      = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`)
-       allKinds = []string{KindPage, KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm, kindRSS, kindSitemap, kindRobotsTXT, kind404}
+       cjk             = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`)
+       allKindsInPages = []string{KindPage, KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm, kindRSS}
+       allKinds        = append(allKindsInPages, []string{kindSitemap, kindRobotsTXT, kind404}...)
 )
 
 const (
index 46491d59b3730dfdbf4cef9279e1b1fd148b692f..893286871ff2e684eaa22fb409bf12204251dfc1 100644 (file)
@@ -15,6 +15,9 @@ package hugolib
 
 import (
        "path"
+       "path/filepath"
+
+       "github.com/spf13/hugo/cache"
 )
 
 // PageCollections contains the page collections for a site.
@@ -39,81 +42,70 @@ type PageCollections struct {
 
        // Includes absolute all pages (of all types), including drafts etc.
        rawAllPages Pages
+
+       pageCache *cache.PartitionedLazyCache
 }
 
 func (c *PageCollections) refreshPageCaches() {
        c.indexPages = c.findPagesByKindNotIn(KindPage, c.Pages)
        c.RegularPages = c.findPagesByKindIn(KindPage, c.Pages)
        c.AllRegularPages = c.findPagesByKindIn(KindPage, c.AllPages)
-}
-
-func newPageCollections() *PageCollections {
-       return &PageCollections{}
-}
-
-func newPageCollectionsFromPages(pages Pages) *PageCollections {
-       return &PageCollections{rawAllPages: pages}
-}
-
-func (c *PageCollections) getFirstPageMatchIn(pages Pages, typ string, pathElements ...string) *Page {
 
-       if len(pages) == 0 {
-               return nil
-       }
-
-       var filename string
-       if typ == KindPage {
-               filename = path.Join(pathElements...)
-       }
-
-       for _, p := range pages {
-               if p.Kind != typ {
-                       continue
-               }
-
-               if typ == KindHome {
-                       return p
-               }
-
-               if typ == KindPage {
-                       if p.Source.Path() == filename {
-                               return p
+       cacheLoader := func(kind string) func() (map[string]interface{}, error) {
+               return func() (map[string]interface{}, error) {
+                       cache := make(map[string]interface{})
+                       switch kind {
+                       case KindPage:
+                               // Note that we deliberately use the pages from all sites
+                               // in this cache, as we intend to use this in the ref and relref
+                               // shortcodes. If the user says "sect/doc1.en.md", he/she knows
+                               // what he/she is looking for.
+                               for _, p := range c.AllRegularPages {
+                                       // TODO(bep) section
+                                       cache[filepath.ToSlash(p.Source.Path())] = p
+                               }
+                       default:
+                               for _, p := range c.indexPages {
+                                       key := path.Join(p.sections...)
+                                       cache[key] = p
+                               }
                        }
-                       continue
-               }
 
-               match := false
-               for i := 0; i < len(pathElements); i++ {
-                       if len(p.sections) > i && pathElements[i] == p.sections[i] {
-                               match = true
-                       } else {
-                               match = false
-                               break
-                       }
-               }
-               if match {
-                       return p
+                       return cache, nil
                }
        }
 
-       return nil
+       var partitions []cache.Partition
 
-}
+       for _, kind := range allKindsInPages {
+               partitions = append(partitions, cache.Partition{Key: kind, Load: cacheLoader(kind)})
+       }
 
-func (c *PageCollections) getRegularPage(filename string) {
+       c.pageCache = cache.NewPartitionedLazyCache(partitions...)
+}
 
+func newPageCollections() *PageCollections {
+       return &PageCollections{}
 }
 
-func (c *PageCollections) getPage(typ string, path ...string) *Page {
-       var pages Pages
+func newPageCollectionsFromPages(pages Pages) *PageCollections {
+       return &PageCollections{rawAllPages: pages}
+}
 
-       if typ == KindPage {
-               pages = c.AllPages
+func (c *PageCollections) getPage(typ string, sections ...string) *Page {
+       var key string
+       if len(sections) == 1 {
+               key = filepath.ToSlash(sections[0])
        } else {
-               pages = c.indexPages
+               key = path.Join(sections...)
        }
 
-       return c.getFirstPageMatchIn(pages, typ, path...)
+       // TODO(bep) section error
+       p, _ := c.pageCache.Get(typ, key)
+       if p == nil {
+               return nil
+       }
+       return p.(*Page)
 
 }