runMode runmode
multilingual *Multilingual
+
+ // Maps internalID to a set of nodes.
+ nodeMap map[string]Nodes
+ nodeMapMu sync.Mutex
}
// NewHugoSites creates a new collection of sites given the input sites, building
return nil, err
}
- h := &HugoSites{multilingual: langConfig, Sites: sites}
+ h := &HugoSites{multilingual: langConfig, Sites: sites, nodeMap: make(map[string]Nodes)}
for _, s := range sites {
s.owner = h
return sites, nil
}
+func (h *HugoSites) addNode(nodeID string, node *Node) {
+ h.nodeMapMu.Lock()
+
+ if nodes, ok := h.nodeMap[nodeID]; ok {
+ h.nodeMap[nodeID] = append(nodes, node)
+ } else {
+ h.nodeMap[nodeID] = Nodes{node}
+ }
+ h.nodeMapMu.Unlock()
+}
+
+func (h *HugoSites) getNodes(nodeID string) Nodes {
+ // At this point it is read only, so no need to lock.
+ if nodeID != "" {
+ if nodes, ok := h.nodeMap[nodeID]; ok {
+ return nodes
+ }
+ }
+ // Paginator pages will not have related nodes.
+ return Nodes{}
+}
+
// Reset resets the sites, making it ready for a full rebuild.
func (h *HugoSites) reset() {
+ h.nodeMap = make(map[string]Nodes)
for i, s := range h.Sites {
h.Sites[i] = s.reset()
}
}
func (h *HugoSites) reCreateFromConfig() error {
+ h.nodeMap = make(map[string]Nodes)
+
sites, err := createSitesFromConfig()
if err != nil {
firstSite := h.Sites[0]
+ h.nodeMap = make(map[string]Nodes)
for _, s := range h.Sites {
s.resetBuildState()
}
// Shortcode handling is the main task in here.
// TODO(bep) We need to look at the whole handler-chain construct witht he below in mind.
func (h *HugoSites) preRender() error {
+
+ for _, s := range h.Sites {
+ // Run "render prepare"
+ if err := s.renderHomePage(true); err != nil {
+ return err
+ }
+ if err := s.renderTaxonomiesLists(true); err != nil {
+ return err
+ }
+ if err := s.renderListsOfTaxonomyTerms(true); err != nil {
+ return err
+ }
+ if err := s.renderSectionLists(true); err != nil {
+ return err
+ }
+ }
+
pageChan := make(chan *Page)
wg := &sync.WaitGroup{}
}
// Pages returns all pages for all sites.
-func (h HugoSites) Pages() Pages {
+func (h *HugoSites) Pages() Pages {
return h.Sites[0].AllPages
}
t.Fatalf("Failed to build sites: %s", err)
}
- require.Len(t, sites.Sites, 2)
+ require.Len(t, sites.Sites, 4)
enSite := sites.Sites[0]
frSite := sites.Sites[1]
if len(enSite.Pages) != 3 {
t.Fatal("Expected 3 english pages")
}
- assert.Len(t, enSite.Source.Files(), 11, "should have 11 source files")
- assert.Len(t, enSite.AllPages, 6, "should have 6 total pages (including translations)")
+ assert.Len(t, enSite.Source.Files(), 13, "should have 13 source files")
+ assert.Len(t, enSite.AllPages, 8, "should have 8 total pages (including translations)")
doc1en := enSite.Pages[0]
permalink, err := doc1en.Permalink()
assert.Equal(t, "fr", frSite.Language.Lang)
assert.Len(t, frSite.Pages, 3, "should have 3 pages")
- assert.Len(t, frSite.AllPages, 6, "should have 6 total pages (including translations)")
+ assert.Len(t, frSite.AllPages, 8, "should have 8 total pages (including translations)")
for _, frenchPage := range frSite.Pages {
assert.Equal(t, "fr", frenchPage.Lang())
languageRedirect := readDestination(t, "public/index.html")
require.True(t, strings.Contains(languageRedirect, "0; url=http://example.com/blog/fr"), languageRedirect)
+ // Check node translations
+ homeEn := enSite.getNode("home-0")
+ require.NotNil(t, homeEn)
+ require.Len(t, homeEn.Translations(), 3)
+ require.Equal(t, "fr", homeEn.Translations()[0].Lang())
+ require.Equal(t, "nn", homeEn.Translations()[1].Lang())
+ require.Equal(t, "Nynorsk", homeEn.Translations()[1].Title)
+ require.Equal(t, "nb", homeEn.Translations()[2].Lang())
+ require.Equal(t, "Bokmål", homeEn.Translations()[2].Title)
+
+ sectFr := frSite.getNode("sect-sect-0")
+ require.NotNil(t, sectFr)
+
+ require.Equal(t, "fr", sectFr.Lang())
+ require.Len(t, sectFr.Translations(), 1)
+ require.Equal(t, "en", sectFr.Translations()[0].Lang())
+ require.Equal(t, "Sects", sectFr.Translations()[0].Title)
+
+ nnSite := sites.Sites[2]
+ require.Equal(t, "nn", nnSite.Language.Lang)
+ taxNn := nnSite.getNode("taxlist-lag-0")
+ require.NotNil(t, taxNn)
+ require.Len(t, taxNn.Translations(), 1)
+ require.Equal(t, "nb", taxNn.Translations()[0].Lang())
+
+ taxTermNn := nnSite.getNode("tax-lag-sogndal-0")
+ require.NotNil(t, taxTermNn)
+ require.Len(t, taxTermNn.Translations(), 1)
+ require.Equal(t, "nb", taxTermNn.Translations()[0].Lang())
+
// Check sitemap(s)
sitemapIndex := readDestination(t, "public/sitemap.xml")
require.True(t, strings.Contains(sitemapIndex, "<loc>http:/example.com/blog/en/sitemap.xml</loc>"), sitemapIndex)
},
func(t *testing.T) {
assert.Len(t, enSite.Pages, 4)
- assert.Len(t, enSite.AllPages, 8)
+ assert.Len(t, enSite.AllPages, 10)
assert.Len(t, frSite.Pages, 4)
assert.Equal(t, "new_fr_1", frSite.Pages[3].Title)
assert.Equal(t, "new_en_2", enSite.Pages[0].Title)
[]fsnotify.Event{{Name: "layouts/_default/single.html", Op: fsnotify.Write}},
func(t *testing.T) {
assert.Len(t, enSite.Pages, 4)
- assert.Len(t, enSite.AllPages, 8)
+ assert.Len(t, enSite.AllPages, 10)
assert.Len(t, frSite.Pages, 4)
doc1 := readDestination(t, "public/en/sect/doc1-slug/index.html")
assert.True(t, strings.Contains(doc1, "Template Changed"), doc1)
[]fsnotify.Event{{Name: "i18n/fr.yaml", Op: fsnotify.Write}},
func(t *testing.T) {
assert.Len(t, enSite.Pages, 4)
- assert.Len(t, enSite.AllPages, 8)
+ assert.Len(t, enSite.AllPages, 10)
assert.Len(t, frSite.Pages, 4)
docEn := readDestination(t, "public/en/sect/doc1-slug/index.html")
assert.True(t, strings.Contains(docEn, "Hello"), "No Hello")
docFr := readDestination(t, "public/fr/sect/doc1/index.html")
assert.True(t, strings.Contains(docFr, "Salut"), "No Salut")
+
+ homeEn := enSite.getNode("home-0")
+ require.NotNil(t, homeEn)
+ require.Len(t, homeEn.Translations(), 3)
+ require.Equal(t, "fr", homeEn.Translations()[0].Lang())
+
},
},
} {
newConfig := multiSiteTomlConfig + `
-[Languages.no]
+[Languages.sv]
weight = 15
-title = "Norsk"
+title = "Svenska"
`
- writeNewContentFile(t, "Norwegian Contentfile", "2016-01-01", "content/sect/doc1.no.md", 10)
+ writeNewContentFile(t, "Swedish Contentfile", "2016-01-01", "content/sect/doc1.sv.md", 10)
// replace the config
writeSource(t, "multilangconfig.toml", newConfig)
t.Fatalf("Failed to rebuild sites: %s", err)
}
- require.Len(t, sites.Sites, 3, fmt.Sprintf("Len %d", len(sites.Sites)))
+ require.Len(t, sites.Sites, 5, fmt.Sprintf("Len %d", len(sites.Sites)))
- // The Norwegian site should be put in the middle (language weight=15)
+ // The Swedish site should be put in the middle (language weight=15)
enSite := sites.Sites[0]
- noSite := sites.Sites[1]
+ svSite := sites.Sites[1]
frSite := sites.Sites[2]
require.True(t, enSite.Language.Lang == "en", enSite.Language.Lang)
- require.True(t, noSite.Language.Lang == "no", noSite.Language.Lang)
+ require.True(t, svSite.Language.Lang == "sv", svSite.Language.Lang)
require.True(t, frSite.Language.Lang == "fr", frSite.Language.Lang)
+ homeEn := enSite.getNode("home-0")
+ require.NotNil(t, homeEn)
+ require.Len(t, homeEn.Translations(), 4)
+ require.Equal(t, "sv", homeEn.Translations()[0].Lang())
+
require.Len(t, enSite.Pages, 3)
require.Len(t, frSite.Pages, 3)
- // Veriy Norwegian site
- require.Len(t, noSite.Pages, 1)
- noPage := noSite.Pages[0]
- require.Equal(t, "Norwegian Contentfile", noPage.Title)
- require.Equal(t, "no", noPage.Lang())
- require.Len(t, noPage.Translations(), 2)
- require.Len(t, noPage.AllTranslations(), 3)
- require.Equal(t, "en", noPage.Translations()[0].Lang())
+ // Veriy Swedish site
+ require.Len(t, svSite.Pages, 1)
+ svPage := svSite.Pages[0]
+ require.Equal(t, "Swedish Contentfile", svPage.Title)
+ require.Equal(t, "sv", svPage.Lang())
+ require.Len(t, svPage.Translations(), 2)
+ require.Len(t, svPage.AllTranslations(), 3)
+ require.Equal(t, "en", svPage.Translations()[0].Lang())
//noFile := readDestination(t, "/public/no/doc1/index.html")
//require.True(t, strings.Contains("foo", noFile), noFile)
title = "Français"
[Languages.fr.Taxonomies]
plaque = "plaques"
+
+[Languages.nn]
+weight = 30
+title = "Nynorsk"
+[Languages.nn.Taxonomies]
+lag = "lag"
+
+[Languages.nb]
+weight = 40
+title = "Bokmål"
+[Languages.nb.Taxonomies]
+lag = "lag"
`
func createMultiTestSites(t *testing.T, tomlConfig string) *HugoSites {
draft: true
---
# Draft
+`)},
+ {filepath.FromSlash("stats/tax.nn.md"), []byte(`---
+title: Tax NN
+publishdate: "2000-01-06"
+weight: 1001
+lag:
+- Sogndal
+---
+# Tax NN
+`)},
+ {filepath.FromSlash("stats/tax.nb.md"), []byte(`---
+title: Tax NB
+publishdate: "2000-01-06"
+weight: 1002
+lag:
+- Sogndal
+---
+# Tax NB
`)},
}
t.Fatalf("Failed to create sites: %s", err)
}
- if len(sites.Sites) != 2 {
+ if len(sites.Sites) != 4 {
t.Fatalf("Got %d sites", len(sites.Sites))
}
"github.com/spf13/hugo/source"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
)
const (
fishySectionPages := s.Sections["fish-and-chips"]
assert.Equal(t, 1, len(fishySectionPages))
- nodeFirst := s.newSectionListNode("First", "first", firstSectionPages)
- nodeSecond := s.newSectionListNode("Second Section", "second-section", secondSectionPages)
- nodeFishy := s.newSectionListNode("Fish and Chips", "fish-and-chips", fishySectionPages)
+ nodeFirst := s.getNode("sect-first-0")
+ require.NotNil(t, nodeFirst)
+ nodeSecond := s.getNode("sect-second-section-0")
+ require.NotNil(t, nodeSecond)
+ nodeFishy := s.getNode("sect-Fish and Chips-0")
+ require.NotNil(t, nodeFishy)
+
firstSectionMenuEntry := findTestMenuEntryByID(s, "spm", "first")
secondSectionMenuEntry := findTestMenuEntryByID(s, "spm", "second-section")
fishySectionMenuEntry := findTestMenuEntryByID(s, "spm", "Fish and Chips")
&MenuEntry{Name: "Somewhere else", URL: "/somewhereelse"}, false, false},
} {
- n, _ := s.newTaxonomyNode(this.taxInfo)
+ n, _ := s.newTaxonomyNode(true, this.taxInfo, i)
isMenuCurrent := n.IsMenuCurrent(this.menu, this.menuItem)
hasMenuCurrent := n.HasMenuCurrent(this.menu, this.menuItem)
s := setupMenuTests(t, menuPageSources)
- home := s.newHomeNode()
+ home := s.getNode("home-0")
+
homeMenuEntry := &MenuEntry{Name: home.Title, URL: home.URL()}
for i, this := range []struct {
"html/template"
"path"
"path/filepath"
- "sort"
+ //"sort"
"strings"
"sync"
"time"
)
type Node struct {
+ // a natural key that should be unique for this site
+ // for the home page this will typically be "home", but it can anything
+ // as long as it is the same for repeated builds.
+ nodeID string
+
RSSLink template.HTML
Site *SiteInfo `json:"-"`
// layout string
func (n *Node) initTranslations() {
n.translationsInit.Do(func() {
- if n.translations != nil {
- return
- }
- n.translations = make(Nodes, 0)
- for _, l := range n.Site.Languages {
- if l == n.language {
- n.translations = append(n.translations, n)
- continue
- }
-
- translation := *n
- translation.language = l
- translation.translations = n.translations
- n.translations = append(n.translations, &translation)
- }
-
- sort.Sort(n.translations)
+ n.translations = n.Site.owner.getNodes(n.nodeID)
+ //sort.Sort(n.translations)
})
}
}
pages := createTestPages(12)
s := newSiteDefaultLang()
- n1 := s.newHomeNode()
- n2 := s.newHomeNode()
+ n1 := s.newHomeNode(true, 0)
+ n2 := s.newHomeNode(true, 1)
n1.Data["Pages"] = pages
var paginator1 *Pager
viper.Set("paginate", -1)
s := newSiteDefaultLang()
- _, err := s.newHomeNode().Paginator()
+ _, err := s.newHomeNode(true, 0).Paginator()
assert.NotNil(t, err)
}
pages := createTestPages(6)
s := newSiteDefaultLang()
- n1 := s.newHomeNode()
- n2 := s.newHomeNode()
+ n1 := s.newHomeNode(true, 0)
+ n2 := s.newHomeNode(true, 1)
var paginator1, paginator2 *Pager
var err error
func TestInvalidOptions(t *testing.T) {
s := newSiteDefaultLang()
- n1 := s.newHomeNode()
+ n1 := s.newHomeNode(true, 0)
_, err := n1.Paginate(createTestPages(1), 1, 2)
assert.NotNil(t, err)
_, err = n1.Paginator(1, 2)
viper.Set("paginate", -1)
s := newSiteDefaultLang()
- _, err := s.newHomeNode().Paginate(createTestPages(2))
+ _, err := s.newHomeNode(true, 0).Paginate(createTestPages(2))
assert.NotNil(t, err)
}
viper.Set("paginate", 10)
s := newSiteDefaultLang()
- n1 := s.newHomeNode()
- n2 := s.newHomeNode()
+ n1 := s.newHomeNode(true, 0)
+ n2 := s.newHomeNode(true, 1)
_, err := n1.Paginator()
assert.Nil(t, err)
viper.Set("paginate", 10)
s := newSiteDefaultLang()
- n1 := s.newHomeNode()
- n2 := s.newHomeNode()
+ n1 := s.newHomeNode(true, 0)
+ n2 := s.newHomeNode(true, 1)
p1 := createTestPages(2)
p2 := createTestPages(10)
distinctFeedbackLogger = helpers.NewDistinctFeedbackLogger()
)
+type nodeCache struct {
+ m map[string]*Node
+ sync.RWMutex
+}
+
+func (c *nodeCache) reset() {
+ c.m = make(map[string]*Node)
+}
+
// Site contains all the information relevant for constructing a static
// site. The basic flow of information is as follows:
//
//
// 5. The entire collection of files is written to disk.
type Site struct {
- owner *HugoSites
+ owner *HugoSites
+
+ // Used internally to discover duplicates.
+ nodeCache *nodeCache
+ nodeCacheInit sync.Once
+
Pages Pages
AllPages Pages
rawAllPages Pages
paginationPageCount uint64
Data *map[string]interface{}
+ owner *HugoSites
multilingual *Multilingual
Language *helpers.Language
LanguagePrefix string
return
}
s.timerStep("render and write aliases")
- if err = s.renderTaxonomiesLists(); err != nil {
+ if err = s.renderTaxonomiesLists(false); err != nil {
return
}
s.timerStep("render and write taxonomies")
- s.renderListsOfTaxonomyTerms()
+ if err = s.renderListsOfTaxonomyTerms(false); err != nil {
+ return
+ }
s.timerStep("render & write taxonomy lists")
- if err = s.renderSectionLists(); err != nil {
+ if err = s.renderSectionLists(false); err != nil {
return
}
s.timerStep("render and write lists")
return
}
s.timerStep("render and write pages")
- if err = s.renderHomePage(); err != nil {
+ if err = s.renderHomePage(false); err != nil {
return
}
s.timerStep("render and write homepage")
Params: params,
Permalinks: permalinks,
Data: &s.Data,
+ owner: s.owner,
}
}
// Prepare site for a new full build.
func (s *Site) resetBuildState() {
+ s.nodeCache.reset()
+
s.Pages = make(Pages, 0)
s.AllPages = make(Pages, 0)
// renderTaxonomiesLists renders the listing pages based on the meta data
// each unique term within a taxonomy will have a page created
-func (s *Site) renderTaxonomiesLists() error {
+func (s *Site) renderTaxonomiesLists(prepare bool) error {
wg := &sync.WaitGroup{}
taxes := make(chan taxRenderInfo)
for i := 0; i < procs*4; i++ {
wg.Add(1)
- go taxonomyRenderer(s, taxes, results, wg)
+ go taxonomyRenderer(prepare, s, taxes, results, wg)
}
errs := make(chan error)
return nil
}
-func (s *Site) newTaxonomyNode(t taxRenderInfo) (*Node, string) {
+func (s *Site) newTaxonomyNode(prepare bool, t taxRenderInfo, counter int) (*Node, string) {
key := t.key
- n := s.newNode()
+ n := s.nodeLookup(fmt.Sprintf("tax-%s-%s", t.plural, key), counter, prepare)
+
+ if s.Info.preserveTaxonomyNames {
+ key = helpers.MakePathSanitized(key)
+ }
+ base := t.plural + "/" + key
+
+ if !prepare {
+ return n, base
+ }
+
if s.Info.preserveTaxonomyNames {
key = helpers.MakePathSanitized(key)
// keep as is in the title
} else {
n.Title = strings.Replace(strings.Title(t.key), "-", " ", -1)
}
- base := t.plural + "/" + key
s.setURLs(n, base)
if len(t.pages) > 0 {
n.Date = t.pages[0].Page.Date
return n, base
}
-func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error, wg *sync.WaitGroup) {
+func taxonomyRenderer(prepare bool, s *Site, taxes <-chan taxRenderInfo, results chan<- error, wg *sync.WaitGroup) {
defer wg.Done()
var n *Node
for t := range taxes {
- var base string
- layouts := s.appendThemeTemplates(
- []string{"taxonomy/" + t.singular + ".html", "indexes/" + t.singular + ".html", "_default/taxonomy.html", "_default/list.html"})
+ var (
+ base string
+ baseWithLanguagePrefix string
+ paginatePath string
+ layouts []string
+ )
- n, base = s.newTaxonomyNode(t)
- baseWithLanguagePrefix := n.addLangPathPrefix(base)
+ n, base = s.newTaxonomyNode(prepare, t, 0)
+
+ if prepare {
+ continue
+ }
+
+ baseWithLanguagePrefix = n.addLangPathPrefix(base)
+
+ layouts = s.appendThemeTemplates(
+ []string{"taxonomy/" + t.singular + ".html", "indexes/" + t.singular + ".html", "_default/taxonomy.html", "_default/list.html"})
dest := base
if viper.GetBool("UglyURLs") {
if n.paginator != nil {
- paginatePath := viper.GetString("paginatePath")
+ paginatePath = viper.GetString("paginatePath")
// write alias for page 1
s.writeDestAlias(helpers.PaginateAliasPath(baseWithLanguagePrefix, 1), n.Permalink())
continue
}
- taxonomyPagerNode, _ := s.newTaxonomyNode(t)
+ taxonomyPagerNode, _ := s.newTaxonomyNode(true, t, i)
+
taxonomyPagerNode.paginator = pager
if pager.TotalPages() > 0 {
first, _ := pager.page(0)
taxonomyPagerNode.Date = first.Date
taxonomyPagerNode.Lastmod = first.Lastmod
}
+
pageNumber := i + 1
htmlBase := fmt.Sprintf("/%s/%s/%d", baseWithLanguagePrefix, paginatePath, pageNumber)
if err := s.renderAndWritePage(fmt.Sprintf("taxonomy %s", t.singular), htmlBase, taxonomyPagerNode, layouts...); err != nil {
}
}
+ if prepare {
+ continue
+ }
+
if !viper.GetBool("DisableRSS") {
// XML Feed
+ c := *n
+ rssNode := &c
+ rssNode.nodeID = ""
rssuri := viper.GetString("RSSUri")
- s.setURLs(n, base+"/"+rssuri)
+ s.setURLs(rssNode, base+"/"+rssuri)
+
rssLayouts := []string{"taxonomy/" + t.singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
- if err := s.renderAndWriteXML("taxonomy "+t.singular+" rss", baseWithLanguagePrefix+"/"+rssuri, n, s.appendThemeTemplates(rssLayouts)...); err != nil {
+ if err := s.renderAndWriteXML("taxonomy "+t.singular+" rss", baseWithLanguagePrefix+"/"+rssuri, rssNode, s.appendThemeTemplates(rssLayouts)...); err != nil {
results <- err
continue
}
}
// renderListsOfTaxonomyTerms renders a page per taxonomy that lists the terms for that taxonomy
-func (s *Site) renderListsOfTaxonomyTerms() (err error) {
+func (s *Site) renderListsOfTaxonomyTerms(prepare bool) (err error) {
taxonomies := s.Language.GetStringMapString("Taxonomies")
for singular, plural := range taxonomies {
- n := s.newNode()
- n.Title = strings.Title(plural)
- s.setURLs(n, plural)
- n.Data["Singular"] = singular
- n.Data["Plural"] = plural
- n.Data["Terms"] = s.Taxonomies[plural]
- // keep the following just for legacy reasons
- n.Data["OrderedIndex"] = n.Data["Terms"]
- n.Data["Index"] = n.Data["Terms"]
+ n := s.nodeLookup(fmt.Sprintf("taxlist-%s", plural), 0, prepare)
+
+ if prepare {
+ n.Title = strings.Title(plural)
+ s.setURLs(n, plural)
+ n.Data["Singular"] = singular
+ n.Data["Plural"] = plural
+ n.Data["Terms"] = s.Taxonomies[plural]
+ // keep the following just for legacy reasons
+ n.Data["OrderedIndex"] = n.Data["Terms"]
+ n.Data["Index"] = n.Data["Terms"]
+
+ continue
+ }
+
layouts := []string{"taxonomy/" + singular + ".terms.html", "_default/terms.html", "indexes/indexes.html"}
layouts = s.appendThemeTemplates(layouts)
if s.layoutExists(layouts...) {
return
}
-func (s *Site) newSectionListNode(sectionName, section string, data WeightedPages) *Node {
- n := s.newNode()
+func (s *Site) newSectionListNode(prepare bool, sectionName, section string, data WeightedPages, counter int) *Node {
+ n := s.nodeLookup(fmt.Sprintf("sect-%s", sectionName), counter, prepare)
+
+ if !prepare {
+ return n
+ }
+
sectionName = helpers.FirstUpper(sectionName)
if viper.GetBool("PluralizeListTitles") {
n.Title = inflect.Pluralize(sectionName)
}
// renderSectionLists renders a page for each section
-func (s *Site) renderSectionLists() error {
+func (s *Site) renderSectionLists(prepare bool) error {
for section, data := range s.Sections {
// section keys can be lower case (depending on site.pathifyTaxonomyKeys)
// extract the original casing from the first page to get sensible titles.
if !s.Info.preserveTaxonomyNames && len(data) > 0 {
sectionName = data[0].Page.Section()
}
+
+ n := s.newSectionListNode(prepare, sectionName, section, data, 0)
+
+ if prepare {
+ continue
+ }
+
layouts := s.appendThemeTemplates(
[]string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"})
section = helpers.MakePathSanitized(section)
}
- n := s.newSectionListNode(sectionName, section, data)
base := n.addLangPathPrefix(section)
if err := s.renderAndWritePage(fmt.Sprintf("section %s", section), base, n, s.appendThemeTemplates(layouts)...); err != nil {
continue
}
- sectionPagerNode := s.newSectionListNode(sectionName, section, data)
+ sectionPagerNode := s.newSectionListNode(true, sectionName, section, data, i)
sectionPagerNode.paginator = pager
if pager.TotalPages() > 0 {
first, _ := pager.page(0)
}
}
+ if prepare {
+ return nil
+ }
+
if !viper.GetBool("DisableRSS") && section != "" {
// XML Feed
rssuri := viper.GetString("RSSUri")
return nil
}
-func (s *Site) renderHomePage() error {
+func (s *Site) renderHomePage(prepare bool) error {
- n := s.newHomeNode()
+ n := s.newHomeNode(prepare, 0)
+ if prepare {
+ return nil
+ }
layouts := s.appendThemeTemplates([]string{"index.html", "_default/list.html"})
base := n.addLangFilepathPrefix("")
if n.paginator != nil {
paginatePath := viper.GetString("paginatePath")
- // write alias for page 1
- // TODO(bep) ml all of these n.addLang ... fix.
- s.writeDestAlias(n.addLangPathPrefix(helpers.PaginateAliasPath("", 1)), n.Permalink())
+ {
+ // write alias for page 1
+ // TODO(bep) ml all of these n.addLang ... fix.
+ s.writeDestAlias(n.addLangPathPrefix(helpers.PaginateAliasPath("", 1)), n.Permalink())
+ }
pagers := n.paginator.Pagers()
continue
}
- homePagerNode := s.newHomeNode()
+ homePagerNode := s.newHomeNode(true, i)
+
homePagerNode.paginator = pager
if pager.TotalPages() > 0 {
first, _ := pager.page(0)
homePagerNode.Date = first.Date
homePagerNode.Lastmod = first.Lastmod
}
+
pageNumber := i + 1
htmlBase := fmt.Sprintf("/%s/%d", paginatePath, pageNumber)
htmlBase = n.addLangPathPrefix(htmlBase)
- if err := s.renderAndWritePage(fmt.Sprintf("homepage"), filepath.FromSlash(htmlBase), homePagerNode, layouts...); err != nil {
+ if err := s.renderAndWritePage(fmt.Sprintf("homepage"),
+ filepath.FromSlash(htmlBase), homePagerNode, layouts...); err != nil {
return err
}
+
}
}
if !viper.GetBool("DisableRSS") {
// XML Feed
- s.setURLs(n, viper.GetString("RSSUri"))
- n.Title = ""
+ rssNode := s.newNode("rss-home")
+ s.setURLs(rssNode, viper.GetString("RSSUri"))
+ rssNode.Title = ""
high := 50
if len(s.Pages) < high {
high = len(s.Pages)
}
- n.Data["Pages"] = s.Pages[:high]
+ rssNode.Data["Pages"] = s.Pages[:high]
if len(s.Pages) > 0 {
- n.Date = s.Pages[0].Date
- n.Lastmod = s.Pages[0].Lastmod
+ rssNode.Date = s.Pages[0].Date
+ rssNode.Lastmod = s.Pages[0].Lastmod
}
rssLayouts := []string{"rss.xml", "_default/rss.xml", "_internal/_default/rss.xml"}
- if err := s.renderAndWriteXML("homepage rss", n.addLangPathPrefix(viper.GetString("RSSUri")), n, s.appendThemeTemplates(rssLayouts)...); err != nil {
+ if err := s.renderAndWriteXML("homepage rss", rssNode.addLangPathPrefix(viper.GetString("RSSUri")), rssNode, s.appendThemeTemplates(rssLayouts)...); err != nil {
return err
}
}
return nil
}
- // TODO(bep) reusing the Home Node smells trouble
- n.URLPath.URL = helpers.URLize("404.html")
- n.IsHome = false
- n.Title = "404 Page not found"
- n.URLPath.Permalink = permalink("404.html")
- n.scratch = newScratch()
+ node404 := s.newNode("404")
+ node404.Title = "404 Page not found"
+ s.setURLs(node404, "404.html")
nfLayouts := []string{"404.html"}
- if nfErr := s.renderAndWritePage("404 page", "404.html", n, s.appendThemeTemplates(nfLayouts)...); nfErr != nil {
+ if nfErr := s.renderAndWritePage("404 page", "404.html", node404, s.appendThemeTemplates(nfLayouts)...); nfErr != nil {
return nfErr
}
return nil
}
-func (s *Site) newHomeNode() *Node {
- n := s.newNode()
+func (s *Site) newHomeNode(prepare bool, counter int) *Node {
+ n := s.nodeLookup("home", counter, prepare)
n.Title = n.Site.Title
n.IsHome = true
s.setURLs(n, "/")
sitemapDefault := parseSitemap(viper.GetStringMap("Sitemap"))
- n := s.newNode()
+ n := s.newNode("sitemap")
// Prepend homepage to the list of pages
pages := make(Pages, 0)
return nil
}
- n := s.newNode()
+ n := s.newNode("robots")
n.Data["Pages"] = s.Pages
rLayouts := []string{"robots.txt", "_default/robots.txt", "_internal/_default/robots.txt"}
func permalinkStr(plink string) string {
return helpers.MakePermalink(viper.GetString("BaseURL"), helpers.URLizeAndPrep(plink)).String()
}
+func (s *Site) newNode(nodeID string) *Node {
+ return s.nodeLookup(nodeID, 0, true)
+}
-func (s *Site) newNode() *Node {
- return &Node{
+func (s *Site) getNode(nodeID string) *Node {
+ return s.getOrAddNode(nodeID, false)
+}
+
+func (s *Site) getOrAddNode(nodeID string, add bool) *Node {
+ s.nodeCacheInit.Do(func() {
+ s.nodeCache = &nodeCache{m: make(map[string]*Node)}
+ })
+
+ s.nodeCache.RLock()
+ if n, ok := s.nodeCache.m[nodeID]; ok {
+ s.nodeCache.RUnlock()
+ if !add {
+ return n
+ }
+ panic(fmt.Sprintf("Node with ID %q in use", nodeID))
+ }
+
+ s.nodeCache.RUnlock()
+ s.nodeCache.Lock()
+
+ if !add {
+ // this is a test type error, print the keys
+ for k, _ := range s.nodeCache.m {
+ fmt.Println("Node:", k)
+ }
+ return nil
+ }
+
+ // Double check
+ if _, ok := s.nodeCache.m[nodeID]; ok {
+ s.nodeCache.Unlock()
+ panic(fmt.Sprintf("Node with ID %q in use", nodeID))
+ }
+
+ n := &Node{
+ nodeID: nodeID,
Data: make(map[string]interface{}),
Site: &s.Info,
language: s.Language,
}
+
+ s.nodeCache.m[nodeID] = n
+ s.nodeCache.Unlock()
+ return n
+}
+
+func (s *Site) nodeLookup(nodeIDPrefix string, counter int, add bool) *Node {
+
+ nodeID := fmt.Sprintf("%s-%d", nodeIDPrefix, counter)
+
+ n := s.getOrAddNode(nodeID, add)
+
+ // Paginator nodes (counter > 0) gets created during rendering and cannot take part in any
+ // global translations mapping
+ if add && s.owner != nil && counter == 0 {
+ s.owner.addNode(nodeID, n)
+ }
+
+ return n
}
func (s *Site) layoutExists(layouts ...string) bool {