Simplify page tree logic
authorBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>
Sat, 3 Aug 2019 15:27:40 +0000 (17:27 +0200)
committerBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>
Thu, 8 Aug 2019 18:13:39 +0000 (20:13 +0200)
This is preparation for #6041.

For historic reasons, the code for bulding the section tree and the taxonomies were very much separate.

This works, but makes it hard to extend, maintain, and possibly not so fast as it could be.

This simplification also introduces 3 slightly breaking changes, which I suspect most people will be pleased about. See referenced issues:

This commit also switches the radix tree dependency to a mutable implementation: github.com/armon/go-radix.

Fixes #6154
Fixes #6153
Fixes #6152

33 files changed:
common/herrors/errors.go
common/maps/maps_get.go [new file with mode: 0644]
go.mod
go.sum
hugofs/rootmapping_fs.go
hugolib/hugo_sites.go
hugolib/hugo_sites_build.go
hugolib/hugo_sites_build_test.go
hugolib/hugo_sites_rebuild_test.go
hugolib/hugo_smoke_test.go
hugolib/page.go
hugolib/page__common.go
hugolib/page__data.go
hugolib/page__paginator.go
hugolib/page__per_output.go
hugolib/page__tree.go
hugolib/page_test.go
hugolib/pagebundler_test.go
hugolib/pagecollections.go
hugolib/pages_capture.go
hugolib/pages_map.go [new file with mode: 0644]
hugolib/site.go
hugolib/site_sections.go
hugolib/site_sections_test.go
hugolib/taxonomy.go
hugolib/taxonomy_test.go
hugolib/testhelpers_test.go
resources/page/page.go
resources/page/page_nop.go
resources/page/testhelpers_test.go
resources/page/weighted.go
tpl/tplimpl/embedded/templates.autogen.go
tpl/tplimpl/embedded/templates/_default/rss.xml

index 1a61070501a40a68883bbedc3f3baff713fe6c0a..e484ecb80025b52767b056d7a790d21b08501be6 100644 (file)
@@ -50,9 +50,10 @@ func FprintStackTrace(w io.Writer, err error) {
 // Recover is a helper function that can be used to capture panics.
 // Put this at the top of a method/function that crashes in a template:
 //     defer herrors.Recover()
-func Recover() {
+func Recover(args ...interface{}) {
        if r := recover(); r != nil {
-               fmt.Println("stacktrace from panic: \n" + string(debug.Stack()))
+               args = append(args, "stacktrace from panic: \n"+string(debug.Stack()), "\n")
+               fmt.Println(args...)
        }
 
 }
diff --git a/common/maps/maps_get.go b/common/maps/maps_get.go
new file mode 100644 (file)
index 0000000..9289991
--- /dev/null
@@ -0,0 +1,31 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package maps
+
+import (
+       "github.com/spf13/cast"
+)
+
+// GetString tries to get a value with key from map m and convert it to a string.
+// It will return an empty string if not found or if it cannot be convertd to a string.
+func GetString(m map[string]interface{}, key string) string {
+       if m == nil {
+               return ""
+       }
+       v, found := m[key]
+       if !found {
+               return ""
+       }
+       return cast.ToString(v)
+}
diff --git a/go.mod b/go.mod
index 616dce102e08f05773706988105b250598045ca8..8daf2a89c9addd0e0596a7b020c046fcd03d7c63 100644 (file)
--- a/go.mod
+++ b/go.mod
@@ -8,6 +8,7 @@ require (
        github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38
        github.com/alecthomas/chroma v0.6.4
        github.com/alecthomas/repr v0.0.0-20181024024818-d37bc2a10ba1 // indirect
+       github.com/armon/go-radix v1.0.0
        github.com/aws/aws-sdk-go v1.19.40
        github.com/bep/debounce v1.2.0
        github.com/bep/gitmap v1.1.0
@@ -17,29 +18,25 @@ require (
        github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385
        github.com/fortytw2/leaktest v1.3.0
        github.com/fsnotify/fsnotify v1.4.7
-       github.com/go-errors/errors v1.0.1
        github.com/gobwas/glob v0.2.3
        github.com/gohugoio/testmodBuilder/mods v0.0.0-20190520184928-c56af20f2e95
        github.com/google/go-cmp v0.3.0
        github.com/gorilla/websocket v1.4.0
-       github.com/hashicorp/go-immutable-radix v1.0.0
-       github.com/hashicorp/go-uuid v1.0.1 // indirect
        github.com/jdkato/prose v1.1.0
        github.com/kyokomi/emoji v1.5.1
        github.com/magefile/mage v1.4.0
        github.com/magiconair/properties v1.8.1 // indirect
        github.com/markbates/inflect v1.0.0
        github.com/mattn/go-isatty v0.0.8
+       github.com/mattn/go-runewidth v0.0.4 // indirect
        github.com/miekg/mmark v1.3.6
        github.com/mitchellh/hashstructure v1.0.0
        github.com/mitchellh/mapstructure v1.1.2
        github.com/muesli/smartcrop v0.0.0-20180228075044-f6ebaa786a12
-       github.com/ncw/rclone v1.48.0
        github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 // indirect
        github.com/nicksnyder/go-i18n v1.10.0
        github.com/niklasfasching/go-org v0.1.2
        github.com/olekukonko/tablewriter v0.0.0-20180506121414-d4647c9c7a84
-       github.com/op/go-logging v0.0.0-20160315200505-970db520ece7
        github.com/pelletier/go-toml v1.4.0 // indirect
        github.com/pkg/errors v0.8.1
        github.com/rogpeppe/go-internal v1.3.0
@@ -58,6 +55,7 @@ require (
        go.opencensus.io v0.22.0 // indirect
        gocloud.dev v0.15.0
        golang.org/x/image v0.0.0-20190523035834-f03afa92d3ff
+       golang.org/x/net v0.0.0-20190606173856-1492cefac77f // indirect
        golang.org/x/oauth2 v0.0.0-20190523182746-aaccbc9213b0 // indirect
        golang.org/x/sync v0.0.0-20190423024810-112230192c58
        golang.org/x/sys v0.0.0-20190712062909-fae7ac547cb7 // indirect
diff --git a/go.sum b/go.sum
index 94249500f7c26aba326fddb0afe2a3ac15f62b4a..29346c1803354c48bd2e134b5c63f51f9fc52530 100644 (file)
--- a/go.sum
+++ b/go.sum
@@ -1,4 +1,3 @@
-bazil.org/fuse v0.0.0-20180421153158-65cc252bf669/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8=
 cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
 cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
 cloud.google.com/go v0.37.4/go.mod h1:NHPJ89PdicEuT9hdPXMROBD91xc5uRDxsMtSB16k7hw=
@@ -20,7 +19,6 @@ github.com/Azure/azure-sdk-for-go v27.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9mo
 github.com/Azure/azure-service-bus-go v0.4.1/go.mod h1:d9ho9e/06euiTwGpKxmlbpPhFUsfCsq6a4tZ68r51qI=
 github.com/Azure/azure-storage-blob-go v0.6.0 h1:SEATKb3LIHcaSIX+E6/K4kJpwfuozFEsmt5rS56N6CE=
 github.com/Azure/azure-storage-blob-go v0.6.0/go.mod h1:oGfmITT1V6x//CswqY2gtAHND+xIP64/qL7a5QJix0Y=
-github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8=
 github.com/Azure/go-autorest v11.0.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
 github.com/Azure/go-autorest v11.1.1+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
 github.com/Azure/go-autorest v11.1.2+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
@@ -37,11 +35,6 @@ github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV
 github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
 github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
 github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI=
-github.com/Unknwon/goconfig v0.0.0-20181105214110-56bd8ab18619 h1:6X8iB881g299aNEv6KXrcjL31iLOH7yA6NXoQX+MbDg=
-github.com/Unknwon/goconfig v0.0.0-20181105214110-56bd8ab18619/go.mod h1:wngxua9XCNjvHjDiTiV26DaKDT+0c63QR6H5hjVUUxw=
-github.com/a8m/tree v0.0.0-20181222104329-6a0b80129de4/go.mod h1:FSdwKX97koS5efgm8WevNf7XS3PqtyFkKDDXrz778cg=
-github.com/abbot/go-http-auth v0.4.0 h1:QjmvZ5gSC7jm3Zg54DqWE/T5m1t2AfDu6QlXJT0EVT0=
-github.com/abbot/go-http-auth v0.4.0/go.mod h1:Cz6ARTIzApMJDzh5bRMSUou6UMSp0IEXg9km/ci7TJM=
 github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
 github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
 github.com/alecthomas/chroma v0.6.4 h1:Gn37/7W4a1qkmKLzfUpDy2rt3jt4X8CWycb4Gm7L360=
@@ -55,12 +48,12 @@ github.com/alecthomas/repr v0.0.0-20181024024818-d37bc2a10ba1 h1:GDQdwm/gAcJcLAK
 github.com/alecthomas/repr v0.0.0-20181024024818-d37bc2a10ba1/go.mod h1:xTS7Pm1pD1mvyM075QCDSRqH6qRLXylzS24ZTpRiSzQ=
 github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
 github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
-github.com/anacrolix/dms v0.0.0-20180117034613-8af4925bffb5/go.mod h1:DGqLjaZ3ziKKNRt+U5Q9PLWJ52Q/4rxfaaH/b3QYKaE=
 github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
 github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
+github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI=
+github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
 github.com/aws/aws-sdk-go v1.15.27/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=
 github.com/aws/aws-sdk-go v1.18.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
-github.com/aws/aws-sdk-go v1.19.11/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
 github.com/aws/aws-sdk-go v1.19.16/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
 github.com/aws/aws-sdk-go v1.19.40 h1:omRrS4bCM/IbzU6UEb8Ojg1PvlElZzYZkOh8vWWgFMc=
 github.com/aws/aws-sdk-go v1.19.40/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
@@ -72,7 +65,6 @@ github.com/bep/gitmap v1.1.0 h1:vOMIdVB+2gd1VbfJPNJdLqAmn0af6NK98t4fK/GoCdA=
 github.com/bep/gitmap v1.1.0/go.mod h1:g9VRETxFUXNWzMiuxOwcudo6DfZkW9jOsOW0Ft4kYaY=
 github.com/bep/go-tocss v0.6.0 h1:lJf+nIjsQDpifUr+NgHi9QMBnrr9cFvMvEBT+uV9Q9E=
 github.com/bep/go-tocss v0.6.0/go.mod h1:d9d3crzlTl+PUZLFzBUjfFCpp68K+ku10mzTlnqU/+A=
-github.com/billziss-gh/cgofuse v1.1.0/go.mod h1:LJjoaUojlVjgo5GQoEJTcJNqZJeRU0nCR84CyxKt2YM=
 github.com/census-instrumentation/opencensus-proto v0.2.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
 github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
 github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927 h1:SKI1/fuSdodxmNNyVBR8d7X/HuLnRpvvFO0AgyQk764=
@@ -86,8 +78,6 @@ github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8Nz
 github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
 github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
 github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
-github.com/cpuguy83/go-md2man v1.0.8 h1:DwoNytLphI8hzS2Af4D0dfaEaiSq2bN05mEm4R6vf8M=
-github.com/cpuguy83/go-md2man v1.0.8/go.mod h1:N6JayAiVKtlHSnuTCeuLSQVs75hb8q+dYQLjr7cDsKY=
 github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=
 github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
 github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ=
@@ -100,10 +90,8 @@ github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8
 github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8=
 github.com/disintegration/imaging v1.6.0 h1:nVPXRUUQ36Z7MNf0O77UzgnOb1mkMMor7lmJMJXc/mA=
 github.com/disintegration/imaging v1.6.0/go.mod h1:xuIt+sRxDFrHS0drzXUlCJthkJ8k7lkkUojDSR247MQ=
-github.com/djherbis/times v1.2.0/go.mod h1:CGMZlo255K5r4Yw0b9RRfFQpM2y7uOmxg4jm9HsaVf8=
 github.com/dlclark/regexp2 v1.1.6 h1:CqB4MjHw0MFCDj+PHHjiESmHX+N7t0tJzKvC6M97BRg=
 github.com/dlclark/regexp2 v1.1.6/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
-github.com/dropbox/dropbox-sdk-go-unofficial v5.4.0+incompatible/go.mod h1:lr+LhMM3F6Y3lW1T9j2U5l7QeuWm87N9+PPXo3yH4qY=
 github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
 github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
 github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs=
@@ -111,7 +99,6 @@ github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1
 github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I=
 github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385 h1:clC1lXBpe2kTj2VHdaIu9ajZQe4kcEY9j0NsnDDBZ3o=
 github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM=
-github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g=
 github.com/fortytw2/leaktest v1.2.0 h1:cj6GCiwJDH7l3tMHLjZDo0QqPtrXJiWSI9JgpeQKw+Q=
 github.com/fortytw2/leaktest v1.2.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
 github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=
@@ -119,7 +106,6 @@ github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHqu
 github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
 github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
 github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
-github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q=
 github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8=
 github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
 github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
@@ -128,9 +114,6 @@ github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG
 github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
 github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
 github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
-github.com/goftp/file-driver v0.0.0-20180502053751-5d604a0fc0c9/go.mod h1:GpOj6zuVBG3Inr9qjEnuVTgBlk2lZ1S9DcoFiXWyKss=
-github.com/goftp/server v0.0.0-20190304020633-eabccc535b5a/go.mod h1:k/SS6VWkxY7dHPhoMQ8IdRu8L4lQtmGbhyXGg+vCnXE=
-github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s=
 github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
 github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
 github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
@@ -153,7 +136,6 @@ github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
 github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
 github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
 github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
-github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
 github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
 github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
 github.com/google/martian v2.1.1-0.20190517191504-25dcb96d9e51+incompatible h1:xmapqc1AyLoB+ddYT6r04bD9lIjlOqGaREovi0SzFaE=
@@ -167,8 +149,6 @@ github.com/googleapis/gax-go v2.0.2+incompatible h1:silFMLAnr330+NRuag/VjIGF7TLp
 github.com/googleapis/gax-go v2.0.2+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY=
 github.com/googleapis/gax-go/v2 v2.0.4 h1:hU4mGcQI4DaAYW+IbTun+2qEZVFxK0ySjQLTbS0VQKc=
 github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
-github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
-github.com/gopherjs/gopherjs v0.0.0-20190411002643-bd77b112433e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
 github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
 github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
 github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
@@ -177,12 +157,6 @@ github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmg
 github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
 github.com/grpc-ecosystem/grpc-gateway v1.8.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
 github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
-github.com/hashicorp/go-immutable-radix v1.0.0 h1:AKDB1HM5PWEA7i4nhcpwOrO2byshxBjXVn/J/3+z5/0=
-github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
-github.com/hashicorp/go-uuid v1.0.0 h1:RS8zrF7PhGwyNPOtxSClXXj9HA8feRnJzgnI1RJCSnM=
-github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
-github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE=
-github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
 github.com/hashicorp/golang-lru v0.5.0 h1:CL2msUPvZTLb5O648aiLNJw3hnBxN2+1Jq8rCOH9wdo=
 github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
 github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU=
@@ -194,22 +168,16 @@ github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NH
 github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
 github.com/jdkato/prose v1.1.0 h1:LpvmDGwbKGTgdCH3a8VJL56sr7p/wOFPw/R4lM4PfFg=
 github.com/jdkato/prose v1.1.0/go.mod h1:jkF0lkxaX5PFSlk9l4Gh9Y+T57TqUZziWT7uZbW5ADg=
-github.com/jlaffaye/ftp v0.0.0-20190519203911-8f5b34ce006f/go.mod h1:lli8NYPQOFy3O++YmYbqVgOcQ1JPCwdOy+5zSjKJ9qY=
 github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
 github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af h1:pmfjZENx5imkbgOkpRUYLnmbU7UEFbjtDA2hxJ1ichM=
 github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
 github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
 github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
 github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
-github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
 github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
-github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8=
 github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
 github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
 github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
-github.com/koofr/go-httpclient v0.0.0-20180104120329-03786175608a/go.mod h1:3xszwh+rNrYk1r9SStc4iJ326gne1OaBcrdB1ACsbzI=
-github.com/koofr/go-koofrclient v0.0.0-20190131164641-7f327592caff/go.mod h1:MRAz4Gsxd+OzrZ0owwrUHc0zLESL+1Y5syqK/sJxK2A=
-github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
 github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
 github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
 github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
@@ -219,7 +187,6 @@ github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
 github.com/kyokomi/emoji v1.5.1 h1:qp9dub1mW7C4MlvoRENH6EAENb9skEFOvIEbp1Waj38=
 github.com/kyokomi/emoji v1.5.1/go.mod h1:mZ6aGCD7yk8j6QY6KICwnZ2pxoszVseX1DNoGtU2tBA=
 github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
-github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ=
 github.com/magefile/mage v1.4.0 h1:RI7B1CgnPAuu2O9lWszwya61RLmfL0KCdo+QyyI/Bhk=
 github.com/magefile/mage v1.4.0/go.mod h1:IUDi13rsHje59lecXokTfGX0QIzO45uVPlXnJYsXepA=
 github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY=
@@ -232,12 +199,8 @@ github.com/matryer/try v0.0.0-20161228173917-9ac251b645a2/go.mod h1:0KeJpeMD6o+O
 github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
 github.com/mattn/go-isatty v0.0.4 h1:bnP0vzxcAdeI1zdubAl5PjU6zsERjGZb7raWodagDYs=
 github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
-github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc=
-github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
 github.com/mattn/go-isatty v0.0.8 h1:HLtExJ+uU2HOZ+wI0Tt5DtUDrx8yhUqDcp7fYERX4CE=
 github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
-github.com/mattn/go-runewidth v0.0.3 h1:a+kO+98RDGEfo6asOGMmpodZq4FNtnGP54yps8BzLR4=
-github.com/mattn/go-runewidth v0.0.3/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
 github.com/mattn/go-runewidth v0.0.4 h1:2BvfKmzob6Bmd4YsL0zygOqfdFnK7GR4QL06Do4/p7Y=
 github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
 github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
@@ -252,43 +215,29 @@ github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh
 github.com/muesli/smartcrop v0.0.0-20180228075044-f6ebaa786a12 h1:l0X/8IDy2UoK+oXcQFMRSIOcyuYb5iEPytPGplnM41Y=
 github.com/muesli/smartcrop v0.0.0-20180228075044-f6ebaa786a12/go.mod h1:i2fCI/UorTfgEpPPLWiFBv4pye+YAG78RwcQLUkocpI=
 github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
-github.com/ncw/go-acd v0.0.0-20171120105400-887eb06ab6a2/go.mod h1:MLIrzg7gp/kzVBxRE1olT7CWYMCklcUWU+ekoxOD9x0=
-github.com/ncw/rclone v1.48.0 h1:Rc7A4YEQDeMPgnc1IzA6PsJ4YikyP+zS68rgGMYKJ7o=
-github.com/ncw/rclone v1.48.0/go.mod h1:CXDUKN1OQ3Y2ya1Ma6jTZ7m9ZarGzF3ZTHsdPLHWWzY=
-github.com/ncw/swift v1.0.47/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM=
 github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ=
 github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
 github.com/nicksnyder/go-i18n v1.10.0 h1:5AzlPKvXBH4qBzmZ09Ua9Gipyruv6uApMcrNZdo96+Q=
 github.com/nicksnyder/go-i18n v1.10.0/go.mod h1:HrK7VCrbOvQoUAQ7Vpy7i87N7JZZZ7R2xBGjv0j365Q=
 github.com/niklasfasching/go-org v0.1.2 h1:qdJM0O9MFWVoLU53h0rG0U1bNSoDM3zes06eyj3XxIs=
 github.com/niklasfasching/go-org v0.1.2/go.mod h1:AsLD6X7djzRIz4/RFZu8vwRL0VGjUvGZCCH1Nz0VdrU=
-github.com/nsf/termbox-go v0.0.0-20190325093121-288510b9734e/go.mod h1:IuKpRQcYE1Tfu+oAQqaLisqDeXgjyyltCfsaoYN18NQ=
 github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
-github.com/okzk/sdnotify v0.0.0-20180710141335-d9becc38acbd/go.mod h1:4soZNh0zW0LtYGdQ416i0jO0EIqMGcbtaspRS4BDvRQ=
 github.com/olekukonko/tablewriter v0.0.0-20180506121414-d4647c9c7a84 h1:fiKJgB4JDUd43CApkmCeTSQlWjtTtABrU2qsgbuP0BI=
 github.com/olekukonko/tablewriter v0.0.0-20180506121414-d4647c9c7a84/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo=
 github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
 github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
-github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
 github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
-github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
-github.com/op/go-logging v0.0.0-20160315200505-970db520ece7 h1:lDH9UUVJtmYCjyT0CI4q8xvlXPxeZ0gYCVvWbmPlp88=
-github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
 github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
 github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw=
-github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaRUnok+kx1WdO15EQc=
-github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
 github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
 github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
 github.com/pelletier/go-toml v1.4.0 h1:u3Z1r+oOXJIkxqw34zVhyPgjBsm6X2wn21NWs/HfSeg=
 github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo=
-github.com/pengsrc/go-shared v0.2.0/go.mod h1:jVblp62SafmidSkvWrXyxAme3gaTfEtWwRPGz5cpvHg=
 github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
 github.com/pkg/errors v0.8.0 h1:WdK/asTD0HN+q6hsWO3/vpuAkAr+tw6aNJNDFFf0+qw=
 github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
 github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
 github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
-github.com/pkg/sftp v1.10.1-0.20190523025818-e98a7bef6829/go.mod h1:NxmoDg/QLVWluQDUYG7XBZTLUpKeFa8e3aMf1BfjyHk=
 github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
 github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
@@ -305,8 +254,6 @@ github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R
 github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
 github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
 github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
-github.com/rfjakob/eme v0.0.0-20171028163933-2222dbd4ba46 h1:w2CpS5muK+jyydnmlkqpAhzKmHmMBzBkfYUDjQNS1Dk=
-github.com/rfjakob/eme v0.0.0-20171028163933-2222dbd4ba46/go.mod h1:U2bmx0hDj8EyDdcxmD5t3XHDnBFnyNNc22n1R4008eM=
 github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
 github.com/rogpeppe/go-internal v1.3.0 h1:RR9dF3JtopPvtkroDZuVD7qquD0bnHlKSqaQhgwt8yk=
 github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
@@ -318,14 +265,7 @@ github.com/sanity-io/litter v1.1.0 h1:BllcKWa3VbZmOZbDCoszYLk7zCsKHz5Beossi8SUcT
 github.com/sanity-io/litter v1.1.0/go.mod h1:CJ0VCw2q4qKU7LaQr3n7UOSHzgEMgcGco7N/SkZQPjw=
 github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
 github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
-github.com/sevlyar/go-daemon v0.1.4/go.mod h1:6dJpPatBT9eUwM5VCw9Bt6CdX9Tk6UWvhW3MebLDRKE=
-github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
-github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
 github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
-github.com/skratchdot/open-golang v0.0.0-20190402232053-79abb63cd66e/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog=
-github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
-github.com/smartystreets/assertions v0.0.0-20190401211740-f487f9de1cd3/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
-github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
 github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
 github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
 github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI=
@@ -334,12 +274,8 @@ github.com/spf13/afero v1.2.2 h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc=
 github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
 github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8=
 github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
-github.com/spf13/cobra v0.0.3 h1:ZlrZ4XsMRm04Fr5pSFxBgfND2EBVa1nLpiy1stUsX/8=
-github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
 github.com/spf13/cobra v0.0.4-0.20190321000552-67fc4837d267 h1:I9j1PLS64+NgCtkgbomGInboj1NFH1KF1tkVKlt3yF4=
 github.com/spf13/cobra v0.0.4-0.20190321000552-67fc4837d267/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
-github.com/spf13/fsync v0.0.0-20170320142552-12a01e648f05 h1:pQHm7pxjSgC54M1rtLSLmju25phy6RgYf3p4O6XanYE=
-github.com/spf13/fsync v0.0.0-20170320142552-12a01e648f05/go.mod h1:jdsEoy1w+v0NpuwXZEaRAH6ADTDmzfRnE2eVwshwFrM=
 github.com/spf13/fsync v0.9.0 h1:f9CEt3DOB2mnHxZaftmEOFWjABEvKM/xpf3cUwJrGOY=
 github.com/spf13/fsync v0.9.0/go.mod h1:fNtJEfG3HiltN3y4cPOz6MLjos9+2pIEqLIgszqhp/0=
 github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk=
@@ -358,7 +294,6 @@ github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1
 github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
 github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
 github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
-github.com/t3rm1n4l/go-mega v0.0.0-20190430100803-72151b53bb44/go.mod h1:XWL4vDyd3JKmJx+hZWUVgCNmmhZ2dTBcaNDcxH465s0=
 github.com/tdewolff/minify/v2 v2.3.7 h1:nhk7MKYRdTDwTxqEQZKLDkLe04tDHht8mBI+VJrsYvk=
 github.com/tdewolff/minify/v2 v2.3.7/go.mod h1:DD1stRlSx6JsHfl1+E/HVMQeXiec9rD1UQ0epklIZLc=
 github.com/tdewolff/parse/v2 v2.3.5 h1:/uS8JfhwVJsNkEh769GM5ENv6L9LOh2Z9uW3tCdlhs0=
@@ -374,14 +309,12 @@ github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGr
 github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
 github.com/wellington/go-libsass v0.9.3-0.20181113175235-c63644206701 h1:9vG9vvVNVupO4Y7uwFkRgIMNe9rdaJMCINDe8vhAhLo=
 github.com/wellington/go-libsass v0.9.3-0.20181113175235-c63644206701/go.mod h1:mxgxgam0N0E+NAUMHLcu20Ccfc3mVpDkyrLDayqfiTs=
-github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4=
 github.com/xdg/scram v0.0.0-20180814205039-7eeb5667e42c/go.mod h1:lB8K/P019DLNhemzwFU4jHLhdvlE6uDZjXFejJXr49I=
 github.com/xdg/stringprep v1.0.0/go.mod h1:Jhud4/sHMO4oL310DaZAKk9ZaJ08SJfe+sJh0HrGL1Y=
 github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
 github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
 github.com/yosssi/ace v0.0.5 h1:tUkIP/BLdKqrlrPwcmH0shwEEhTRHoGnc1wFIWmaBUA=
 github.com/yosssi/ace v0.0.5/go.mod h1:ALfIzm2vT7t5ZE7uoIZqF3TQ7SAOyupFZnkrF5id+K0=
-github.com/yunify/qingstor-sdk-go v2.2.15+incompatible/go.mod h1:w6wqLDQ5bBTzxGJ55581UrSwLrsTAsdo9N6yX/8d9RY=
 go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
 go.mongodb.org/mongo-driver v1.0.1/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM=
 go.opencensus.io v0.15.0/go.mod h1:UffZAU+4sDEINUGP/B7UfBBkq4fqLu9zXAX7ke6CHW0=
@@ -400,12 +333,8 @@ gocloud.dev v0.15.0/go.mod h1:ShXCyJaGrJu9y/7a6+DSCyBb9MFGZ1P5wwPa0Wu6w34=
 golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
 golang.org/x/crypto v0.0.0-20181001203147-e3636079e1a4/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
 golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
-golang.org/x/crypto v0.0.0-20190131182504-b8fe1690c613/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
-golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
 golang.org/x/crypto v0.0.0-20190422183909-d864b10871cd/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
-golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5 h1:58fnuSXlxZmFdJyvtTFVmVhcMLU6v5fEb/ok4wyqtNU=
-golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
 golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
 golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81 h1:00VmoueYNlNz/aHIilyyQz/MHSqGoWJzpFv/HW8xpzI=
 golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs=
@@ -462,14 +391,10 @@ golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5h
 golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20190221075227-b4e8571b14e0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190530182044-ad28b68e88f1 h1:R4dVlxdmKenVdMRS/tTspEpSTRWINYrHD8ySIU9yCIU=
-golang.org/x/sys v0.0.0-20190530182044-ad28b68e88f1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20190606203320-7fc4e5ec1444/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20190712062909-fae7ac547cb7 h1:LepdCS8Gf/MVejFIt8lsiexZATdoGVyp5bcyS+rYoUI=
 golang.org/x/sys v0.0.0-20190712062909-fae7ac547cb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
@@ -489,9 +414,7 @@ golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGm
 golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
 golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
 golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
 golang.org/x/tools v0.0.0-20190422233926-fe54fb35175b/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-golang.org/x/tools v0.0.0-20190606174628-0139d5756a7d/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
 golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7 h1:9zdDQZ7Thm29KFXgAX/+yaf3eVbP7djjWp/dXAppNCc=
 golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@@ -502,7 +425,6 @@ google.golang.org/api v0.5.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEt
 google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
 google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508=
 google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
-google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
 google.golang.org/appengine v1.6.0 h1:Tfd7cKwKbFRsI8RMAD3oqqw7JPFRrvFlOsfbgVkjOOw=
 google.golang.org/appengine v1.6.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
 google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
@@ -517,7 +439,6 @@ google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3
 google.golang.org/grpc v1.19.0 h1:cfg4PD8YEdSFnm7qLV4++93WcmhH2nIUhMjhdCvl3j8=
 google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
 google.golang.org/grpc v1.19.1/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
-google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM=
 google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
 google.golang.org/grpc v1.21.0 h1:G+97AoqBnmZIT91cLG/EkCoK9NSelj64P8bOHHNmGn0=
 google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
index 37b40a204b516542d62226fdab25de06bba29489..e5679e09b333323ca87cb7926ad878d2cfd87095 100644 (file)
@@ -23,7 +23,7 @@ import (
 
        "github.com/pkg/errors"
 
-       radix "github.com/hashicorp/go-immutable-radix"
+       radix "github.com/armon/go-radix"
        "github.com/spf13/afero"
 )
 
@@ -33,7 +33,7 @@ var filepathSeparator = string(filepath.Separator)
 // of root mappings with some optional metadata about the root.
 // Note that From represents a virtual root that maps to the actual filename in To.
 func NewRootMappingFs(fs afero.Fs, rms ...RootMapping) (*RootMappingFs, error) {
-       rootMapToReal := radix.New().Txn()
+       rootMapToReal := radix.New()
 
        for _, rm := range rms {
                (&rm).clean()
@@ -58,7 +58,7 @@ func NewRootMappingFs(fs afero.Fs, rms ...RootMapping) (*RootMappingFs, error) {
                // Extract "blog" from "content/blog"
                rm.path = strings.TrimPrefix(strings.TrimPrefix(rm.From, fromBase), filepathSeparator)
 
-               key := []byte(rm.rootKey())
+               key := rm.rootKey()
                var mappings []RootMapping
                v, found := rootMapToReal.Get(key)
                if found {
@@ -71,7 +71,7 @@ func NewRootMappingFs(fs afero.Fs, rms ...RootMapping) (*RootMappingFs, error) {
 
        rfs := &RootMappingFs{Fs: fs,
                virtualRoots:  rms,
-               rootMapToReal: rootMapToReal.Commit().Root()}
+               rootMapToReal: rootMapToReal}
 
        return rfs, nil
 }
@@ -119,7 +119,7 @@ func (r RootMapping) rootKey() string {
 // in the order given.
 type RootMappingFs struct {
        afero.Fs
-       rootMapToReal *radix.Node
+       rootMapToReal *radix.Tree
        virtualRoots  []RootMapping
        filter        func(r RootMapping) bool
 }
@@ -303,8 +303,8 @@ func (fs *RootMappingFs) isRoot(name string) bool {
 }
 
 func (fs *RootMappingFs) getRoots(name string) []RootMapping {
-       nameb := []byte(filepath.Clean(name))
-       _, v, found := fs.rootMapToReal.LongestPrefix(nameb)
+       name = filepath.Clean(name)
+       _, v, found := fs.rootMapToReal.LongestPrefix(name)
        if !found {
                return nil
        }
@@ -333,10 +333,10 @@ func (fs *RootMappingFs) getRootsWithPrefix(prefix string) []RootMapping {
        if fs.isRoot(prefix) {
                return fs.virtualRoots
        }
-       prefixb := []byte(filepath.Clean(prefix))
+       prefix = filepath.Clean(prefix)
        var roots []RootMapping
 
-       fs.rootMapToReal.WalkPrefix(prefixb, func(b []byte, v interface{}) bool {
+       fs.rootMapToReal.WalkPrefix(prefix, func(b string, v interface{}) bool {
                roots = append(roots, v.([]RootMapping)...)
                return false
        })
index 6ad8715645b7f86cd9c698dc1efc855f1a96c163..987144f1df15ad20e43e17ab7d4190d2f924efb1 100644 (file)
 package hugolib
 
 import (
-       "fmt"
        "io"
-       "path"
        "path/filepath"
        "sort"
        "strings"
        "sync"
 
-       radix "github.com/hashicorp/go-immutable-radix"
+       radix "github.com/armon/go-radix"
 
        "github.com/gohugoio/hugo/output"
        "github.com/gohugoio/hugo/parser/metadecoders"
@@ -623,118 +621,6 @@ func (h *HugoSites) renderCrossSitesArtifacts() error {
                s.siteCfg.sitemap.Filename, h.toSiteInfos(), smLayouts...)
 }
 
-// createMissingPages creates home page, taxonomies etc. that isnt't created as an
-// effect of having a content file.
-func (h *HugoSites) createMissingPages() error {
-
-       for _, s := range h.Sites {
-               if s.isEnabled(page.KindHome) {
-                       // home pages
-                       homes := s.findWorkPagesByKind(page.KindHome)
-                       if len(homes) > 1 {
-                               panic("Too many homes")
-                       }
-                       var home *pageState
-                       if len(homes) == 0 {
-                               home = s.newPage(page.KindHome)
-                               s.workAllPages = append(s.workAllPages, home)
-                       } else {
-                               home = homes[0]
-                       }
-
-                       s.home = home
-               }
-
-               // Will create content-less root sections.
-               newSections := s.assembleSections()
-               s.workAllPages = append(s.workAllPages, newSections...)
-
-               taxonomyTermEnabled := s.isEnabled(page.KindTaxonomyTerm)
-               taxonomyEnabled := s.isEnabled(page.KindTaxonomy)
-
-               // taxonomy list and terms pages
-               taxonomies := s.Language().GetStringMapString("taxonomies")
-               if len(taxonomies) > 0 {
-                       taxonomyPages := s.findWorkPagesByKind(page.KindTaxonomy)
-                       taxonomyTermsPages := s.findWorkPagesByKind(page.KindTaxonomyTerm)
-
-                       // Make them navigable from WeightedPage etc.
-                       for _, p := range taxonomyPages {
-                               ni := p.getTaxonomyNodeInfo()
-                               if ni == nil {
-                                       // This can be nil for taxonomies, e.g. an author,
-                                       // with a content file, but no actual usage.
-                                       // Create one.
-                                       sections := p.SectionsEntries()
-                                       if len(sections) < 2 {
-                                               // Invalid state
-                                               panic(fmt.Sprintf("invalid taxonomy state for %q with sections %v", p.pathOrTitle(), sections))
-                                       }
-                                       ni = p.s.taxonomyNodes.GetOrAdd(sections[0], path.Join(sections[1:]...))
-                               }
-                               ni.TransferValues(p)
-                       }
-                       for _, p := range taxonomyTermsPages {
-                               p.getTaxonomyNodeInfo().TransferValues(p)
-                       }
-
-                       for _, plural := range taxonomies {
-                               if taxonomyTermEnabled {
-                                       foundTaxonomyTermsPage := false
-                                       for _, p := range taxonomyTermsPages {
-                                               if p.SectionsPath() == plural {
-                                                       foundTaxonomyTermsPage = true
-                                                       break
-                                               }
-                                       }
-
-                                       if !foundTaxonomyTermsPage {
-                                               n := s.newPage(page.KindTaxonomyTerm, plural)
-                                               n.getTaxonomyNodeInfo().TransferValues(n)
-                                               s.workAllPages = append(s.workAllPages, n)
-                                       }
-                               }
-
-                               if taxonomyEnabled {
-                                       for termKey := range s.Taxonomies[plural] {
-
-                                               foundTaxonomyPage := false
-
-                                               for _, p := range taxonomyPages {
-                                                       sectionsPath := p.SectionsPath()
-
-                                                       if !strings.HasPrefix(sectionsPath, plural) {
-                                                               continue
-                                                       }
-
-                                                       singularKey := strings.TrimPrefix(sectionsPath, plural)
-                                                       singularKey = strings.TrimPrefix(singularKey, "/")
-
-                                                       if singularKey == termKey {
-                                                               foundTaxonomyPage = true
-                                                               break
-                                                       }
-                                               }
-
-                                               if !foundTaxonomyPage {
-                                                       info := s.taxonomyNodes.Get(plural, termKey)
-                                                       if info == nil {
-                                                               panic("no info found")
-                                                       }
-
-                                                       n := s.newTaxonomyPage(info.term, info.plural, info.termKey)
-                                                       info.TransferValues(n)
-                                                       s.workAllPages = append(s.workAllPages, n)
-                                               }
-                                       }
-                               }
-                       }
-               }
-       }
-
-       return nil
-}
-
 func (h *HugoSites) removePageByFilename(filename string) {
        for _, s := range h.Sites {
                s.removePageFilename(filename)
@@ -742,23 +628,6 @@ func (h *HugoSites) removePageByFilename(filename string) {
 }
 
 func (h *HugoSites) createPageCollections() error {
-       for _, s := range h.Sites {
-               for _, p := range s.rawAllPages {
-                       if !s.isEnabled(p.Kind()) {
-                               continue
-                       }
-
-                       shouldBuild := s.shouldBuild(p)
-                       s.buildStats.update(p)
-                       if shouldBuild {
-                               if p.m.headless {
-                                       s.headlessPages = append(s.headlessPages, p)
-                               } else {
-                                       s.workAllPages = append(s.workAllPages, p)
-                               }
-                       }
-               }
-       }
 
        allPages := newLazyPagesFactory(func() page.Pages {
                var pages page.Pages
@@ -950,8 +819,7 @@ type contentChangeMap struct {
        mu sync.RWMutex
 
        // Holds directories with leaf bundles.
-       leafBundles    *radix.Tree
-       leafBundlesTxn *radix.Txn
+       leafBundles *radix.Tree
 
        // Holds directories with branch bundles.
        branchBundles map[string]bool
@@ -969,18 +837,6 @@ type contentChangeMap struct {
        symContent   map[string]map[string]bool
 }
 
-func (m *contentChangeMap) start() {
-       m.mu.Lock()
-       m.leafBundlesTxn = m.leafBundles.Txn()
-       m.mu.Unlock()
-}
-
-func (m *contentChangeMap) stop() {
-       m.mu.Lock()
-       m.leafBundles = m.leafBundlesTxn.Commit()
-       m.mu.Unlock()
-}
-
 func (m *contentChangeMap) add(filename string, tp bundleDirType) {
        m.mu.Lock()
        dir := filepath.Dir(filename) + helpers.FilePathSeparator
@@ -989,7 +845,7 @@ func (m *contentChangeMap) add(filename string, tp bundleDirType) {
        case bundleBranch:
                m.branchBundles[dir] = true
        case bundleLeaf:
-               m.leafBundlesTxn.Insert([]byte(dir), true)
+               m.leafBundles.Insert(dir, true)
        default:
                panic("invalid bundle type")
        }
@@ -1012,8 +868,8 @@ func (m *contentChangeMap) resolveAndRemove(filename string) (string, string, bu
                return dir, dir, bundleBranch
        }
 
-       if key, _, found := m.leafBundles.Root().LongestPrefix([]byte(dir)); found {
-               m.leafBundlesTxn.Delete(key)
+       if key, _, found := m.leafBundles.LongestPrefix(dir); found {
+               m.leafBundles.Delete(key)
                dir = string(key)
                return dir, dir, bundleLeaf
        }
index d20932599c33c2c4766d8258ddbdb3321eae721d..82a189a50b223f266ca521f84b760034d78aa173 100644 (file)
@@ -18,7 +18,6 @@ import (
        "context"
        "fmt"
        "runtime/trace"
-       "sort"
 
        "github.com/gohugoio/hugo/output"
 
@@ -31,6 +30,7 @@ import (
 // Build builds all sites. If filesystem events are provided,
 // this is considered to be a potential partial rebuild.
 func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
+
        if h.running {
                // Make sure we don't trigger rebuilds in parallel.
                h.runningMu.Lock()
@@ -75,25 +75,29 @@ func (h *HugoSites) Build(config BuildCfg, events ...fsnotify.Event) error {
 
        if !config.PartialReRender {
                prepare := func() error {
-                       for _, s := range h.Sites {
-                               s.Deps.BuildStartListeners.Notify()
-                       }
-
-                       if len(events) > 0 {
-                               // Rebuild
-                               if err := h.initRebuild(conf); err != nil {
-                                       return errors.Wrap(err, "initRebuild")
+                       init := func(conf *BuildCfg) error {
+                               for _, s := range h.Sites {
+                                       s.Deps.BuildStartListeners.Notify()
                                }
-                       } else {
-                               if err := h.initSites(conf); err != nil {
-                                       return errors.Wrap(err, "initSites")
+
+                               if len(events) > 0 {
+                                       // Rebuild
+                                       if err := h.initRebuild(conf); err != nil {
+                                               return errors.Wrap(err, "initRebuild")
+                                       }
+                               } else {
+                                       if err := h.initSites(conf); err != nil {
+                                               return errors.Wrap(err, "initSites")
+                                       }
                                }
+
+                               return nil
                        }
 
                        var err error
 
                        f := func() {
-                               err = h.process(conf, events...)
+                               err = h.process(conf, init, events...)
                        }
                        trace.WithRegion(ctx, "process", f)
                        if err != nil {
@@ -195,7 +199,7 @@ func (h *HugoSites) initRebuild(config *BuildCfg) error {
        }
 
        for _, s := range h.Sites {
-               s.resetBuildState()
+               s.resetBuildState(config.whatChanged.source)
        }
 
        h.reset(config)
@@ -205,7 +209,7 @@ func (h *HugoSites) initRebuild(config *BuildCfg) error {
        return nil
 }
 
-func (h *HugoSites) process(config *BuildCfg, events ...fsnotify.Event) error {
+func (h *HugoSites) process(config *BuildCfg, init func(config *BuildCfg) error, events ...fsnotify.Event) error {
        // We should probably refactor the Site and pull up most of the logic from there to here,
        // but that seems like a daunting task.
        // So for now, if there are more than one site (language),
@@ -215,9 +219,7 @@ func (h *HugoSites) process(config *BuildCfg, events ...fsnotify.Event) error {
 
        if len(events) > 0 {
                // This is a rebuild
-               changed, err := firstSite.processPartial(events)
-               config.whatChanged = &changed
-               return err
+               return firstSite.processPartial(config, init, events)
        }
 
        return firstSite.process(*config)
@@ -235,26 +237,27 @@ func (h *HugoSites) assemble(config *BuildCfg) error {
                }
        }
 
-       if err := h.createPageCollections(); err != nil {
-               return err
+       if !config.whatChanged.source {
+               return nil
        }
 
-       if config.whatChanged.source {
-               for _, s := range h.Sites {
-                       if err := s.assembleTaxonomies(); err != nil {
-                               return err
-                       }
+       for _, s := range h.Sites {
+               if err := s.assemblePagesMap(s); err != nil {
+                       return err
+               }
+
+               if err := s.pagesMap.assembleTaxonomies(s); err != nil {
+                       return err
+               }
+
+               if err := s.createWorkAllPages(); err != nil {
+                       return err
                }
-       }
 
-       // Create pagexs for the section pages etc. without content file.
-       if err := h.createMissingPages(); err != nil {
-               return err
        }
 
-       for _, s := range h.Sites {
-               s.setupSitePages()
-               sort.Stable(s.workAllPages)
+       if err := h.createPageCollections(); err != nil {
+               return err
        }
 
        return nil
index 876f21cfa6dfe742da63021a9ecf7e7000b4f8c3..123c27b9c7b0f3b2629a8e1a419ffac06e6afe45 100644 (file)
@@ -365,7 +365,6 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
        require.NotNil(t, enTags["tag1"])
        require.NotNil(t, frTags["FRtag1"])
        b.AssertFileContent("public/fr/plaques/FRtag1/index.html", "FRtag1|Bonjour|http://example.com/blog/fr/plaques/FRtag1/")
-       b.AssertFileContent("public/en/tags/tag1/index.html", "tag1|Hello|http://example.com/blog/en/tags/tag1/")
 
        // Check Blackfriday config
        require.True(t, strings.Contains(content(doc1fr), "&laquo;"), content(doc1fr))
@@ -470,13 +469,6 @@ func TestMultiSitesRebuild(t *testing.T) {
                        func(t *testing.T) {
                                assert.Len(enSite.RegularPages(), 4, "1 en removed")
 
-                               // Check build stats
-                               require.Equal(t, 1, enSite.buildStats.draftCount, "Draft")
-                               require.Equal(t, 1, enSite.buildStats.futureCount, "Future")
-                               require.Equal(t, 1, enSite.buildStats.expiredCount, "Expired")
-                               require.Equal(t, 0, frSite.buildStats.draftCount, "Draft")
-                               require.Equal(t, 1, frSite.buildStats.futureCount, "Future")
-                               require.Equal(t, 1, frSite.buildStats.expiredCount, "Expired")
                        },
                },
                {
@@ -609,70 +601,6 @@ func TestMultiSitesRebuild(t *testing.T) {
 
 }
 
-func TestAddNewLanguage(t *testing.T) {
-       t.Parallel()
-       assert := require.New(t)
-
-       b := newMultiSiteTestDefaultBuilder(t)
-       b.CreateSites().Build(BuildCfg{})
-
-       fs := b.Fs
-
-       newConfig := multiSiteTOMLConfigTemplate + `
-
-[Languages.sv]
-weight = 15
-title = "Svenska"
-`
-
-       writeNewContentFile(t, fs.Source, "Swedish Contentfile", "2016-01-01", "content/sect/doc1.sv.md", 10)
-       // replace the config
-       b.WithNewConfig(newConfig)
-
-       sites := b.H
-
-       assert.NoError(b.LoadConfig())
-       err := b.H.Build(BuildCfg{NewConfig: b.Cfg})
-
-       if err != nil {
-               t.Fatalf("Failed to rebuild sites: %s", err)
-       }
-
-       require.Len(t, sites.Sites, 5, fmt.Sprintf("Len %d", len(sites.Sites)))
-
-       // The Swedish site should be put in the middle (language weight=15)
-       enSite := sites.Sites[0]
-       svSite := sites.Sites[1]
-       frSite := sites.Sites[2]
-       require.True(t, enSite.language.Lang == "en", enSite.language.Lang)
-       require.True(t, svSite.language.Lang == "sv", svSite.language.Lang)
-       require.True(t, frSite.language.Lang == "fr", frSite.language.Lang)
-
-       homeEn := enSite.getPage(page.KindHome)
-       require.NotNil(t, homeEn)
-       require.Len(t, homeEn.Translations(), 4)
-
-       require.Equal(t, "sv", homeEn.Translations()[0].Language().Lang)
-
-       require.Len(t, enSite.RegularPages(), 5)
-       require.Len(t, frSite.RegularPages(), 4)
-
-       // Veriy Swedish site
-       require.Len(t, svSite.RegularPages(), 1)
-       svPage := svSite.RegularPages()[0]
-
-       require.Equal(t, "Swedish Contentfile", svPage.Title())
-       require.Equal(t, "sv", svPage.Language().Lang)
-       require.Len(t, svPage.Translations(), 2)
-       require.Len(t, svPage.AllTranslations(), 3)
-       require.Equal(t, "en", svPage.Translations()[0].Language().Lang)
-
-       // Regular pages have no children
-       require.Len(t, svPage.Pages(), 0)
-       require.Len(t, svPage.Data().(page.Data).Pages(), 0)
-
-}
-
 // https://github.com/gohugoio/hugo/issues/4706
 func TestContentStressTest(t *testing.T) {
        b := newTestSitesBuilder(t)
@@ -775,13 +703,13 @@ END
 }
 
 func checkContent(s *sitesBuilder, filename string, matches ...string) {
+       s.T.Helper()
        content := readDestination(s.T, s.Fs, filename)
        for _, match := range matches {
                if !strings.Contains(content, match) {
                        s.Fatalf("No match for %q in content for %s\n%q", match, filename, content)
                }
        }
-
 }
 
 func TestTranslationsFromContentToNonContent(t *testing.T) {
index 4a81fe950f9a0a9d0e0de66502e89a060846a2fb..e36c1a1d4cb0e920b6f1011e782a35f4790424d0 100644 (file)
@@ -54,7 +54,7 @@ Content.
 {{ range (.Paginate .Site.RegularPages).Pages }}
 * Page Paginate: {{ .Title }}|Summary: {{ .Summary }}|Content: {{ .Content }}
 {{ end }}
-{{ range .Pages }}
+{{ range .Site.RegularPages }}
 * Page Pages: {{ .Title }}|Summary: {{ .Summary }}|Content: {{ .Content }}
 {{ end }}
 `)
index d5b8861ce6b260ef8012c61ffe0fcd868839ab01..a6a951fa7e78ff6243420ee2a7bc0adf5130b36b 100644 (file)
@@ -143,8 +143,8 @@ Some **Markdown** in JSON shortcode.
        const (
                commonPageTemplate            = `|{{ .Kind }}|{{ .Title }}|{{ .Path }}|{{ .Summary }}|{{ .Content }}|RelPermalink: {{ .RelPermalink }}|WordCount: {{ .WordCount }}|Pages: {{ .Pages }}|Data Pages: Pages({{ len .Data.Pages }})|Resources: {{ len .Resources }}|Summary: {{ .Summary }}`
                commonPaginatorTemplate       = `|Paginator: {{ with .Paginator }}{{ .PageNumber }}{{ else }}NIL{{ end }}`
-               commonListTemplateNoPaginator = `|{{ range $i, $e := (.Pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
-               commonListTemplate            = commonPaginatorTemplate + `|{{ range $i, $e := (.Pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
+               commonListTemplateNoPaginator = `|{{ $pages := .Pages }}{{ if .IsHome }}{{ $pages = .Site.RegularPages }}{{ end }}{{ range $i, $e := ($pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
+               commonListTemplate            = commonPaginatorTemplate + `|{{ $pages := .Pages }}{{ if .IsHome }}{{ $pages = .Site.RegularPages }}{{ end }}{{ range $i, $e := ($pages | first 1) }}|Render {{ $i }}: {{ .Kind }}|{{ .Render "li" }}|{{ end }}|Site params: {{ $.Site.Params.hugo }}|RelPermalink: {{ .RelPermalink }}`
                commonShortcodeTemplate       = `|{{ .Name }}|{{ .Ordinal }}|{{ .Page.Summary }}|{{ .Page.Content }}|WordCount: {{ .Page.WordCount }}`
                prevNextTemplate              = `|Prev: {{ with .Prev }}{{ .RelPermalink }}{{ end }}|Next: {{ with .Next }}{{ .RelPermalink }}{{ end }}`
                prevNextInSectionTemplate     = `|PrevInSection: {{ with .PrevInSection }}{{ .RelPermalink }}{{ end }}|NextInSection: {{ with .NextInSection }}{{ .RelPermalink }}{{ end }}`
@@ -193,7 +193,7 @@ Some **Markdown** in JSON shortcode.
        b.AssertFileContent("public/index.html",
                "home|In English",
                "Site params: Rules",
-               "Pages: Pages(18)|Data Pages: Pages(18)",
+               "Pages: Pages(6)|Data Pages: Pages(6)",
                "Paginator: 1",
                "First Site: In English",
                "RelPermalink: /",
index 676cba762ac0c7506aa05a81c636cf77e754e6af..8dda33009599b7cd3450042aa5a73058f0b9904c 100644 (file)
@@ -23,6 +23,8 @@ import (
        "sort"
        "strings"
 
+       "github.com/gohugoio/hugo/common/maps"
+
        "github.com/gohugoio/hugo/hugofs/files"
 
        "github.com/bep/gitmap"
@@ -121,31 +123,66 @@ func (p *pageState) MarshalJSON() ([]byte, error) {
        return page.MarshalPageToJSON(p)
 }
 
-func (p *pageState) Pages() page.Pages {
-       p.pagesInit.Do(func() {
-               if p.pages != nil {
-                       return
-               }
+func (p *pageState) getPages() page.Pages {
+       b := p.bucket
+       if b == nil {
+               return nil
+       }
+       return b.getPages()
+}
 
+func (p *pageState) getPagesAndSections() page.Pages {
+       b := p.bucket
+       if b == nil {
+               return nil
+       }
+       return b.getPagesAndSections()
+}
+
+// TODO(bep) cm add a test
+func (p *pageState) RegularPages() page.Pages {
+       p.regularPagesInit.Do(func() {
                var pages page.Pages
 
                switch p.Kind() {
                case page.KindPage:
-               case page.KindHome:
-                       pages = p.s.RegularPages()
+               case page.KindSection, page.KindHome, page.KindTaxonomyTerm:
+                       pages = p.getPages()
                case page.KindTaxonomy:
-                       termInfo := p.getTaxonomyNodeInfo()
-                       taxonomy := p.s.Taxonomies[termInfo.plural].Get(termInfo.termKey)
-                       pages = taxonomy.Pages()
-               case page.KindTaxonomyTerm:
-                       plural := p.getTaxonomyNodeInfo().plural
-                       // A list of all page.KindTaxonomy pages with matching plural
-                       for _, p := range p.s.findPagesByKind(page.KindTaxonomy) {
-                               if p.SectionsEntries()[0] == plural {
+                       all := p.Pages()
+                       for _, p := range all {
+                               if p.IsPage() {
                                        pages = append(pages, p)
                                }
                        }
-               case kind404, kindSitemap, kindRobotsTXT:
+               default:
+                       pages = p.s.RegularPages()
+               }
+
+               p.regularPages = pages
+
+       })
+
+       return p.regularPages
+}
+
+func (p *pageState) Pages() page.Pages {
+       p.pagesInit.Do(func() {
+               var pages page.Pages
+
+               switch p.Kind() {
+               case page.KindPage:
+               case page.KindSection, page.KindHome:
+                       pages = p.getPagesAndSections()
+               case page.KindTaxonomy:
+                       termInfo := p.bucket
+                       plural := maps.GetString(termInfo.meta, "plural")
+                       term := maps.GetString(termInfo.meta, "termKey")
+                       taxonomy := p.s.Taxonomies[plural].Get(term)
+                       pages = taxonomy.Pages()
+               case page.KindTaxonomyTerm:
+                       pages = p.getPagesAndSections()
+               default:
                        pages = p.s.Pages()
                }
 
@@ -295,10 +332,9 @@ func (p *pageState) getLayoutDescriptor() output.LayoutDescriptor {
                        if len(sections) > 0 {
                                section = sections[0]
                        }
-               case page.KindTaxonomyTerm:
-                       section = p.getTaxonomyNodeInfo().singular
-               case page.KindTaxonomy:
-                       section = p.getTaxonomyNodeInfo().parent.singular
+               case page.KindTaxonomyTerm, page.KindTaxonomy:
+                       section = maps.GetString(p.bucket.meta, "singular")
+
                default:
                }
 
@@ -359,11 +395,6 @@ func (p *pageState) initPage() error {
        return nil
 }
 
-func (p *pageState) setPages(pages page.Pages) {
-       page.SortByDefault(pages)
-       p.pages = pages
-}
-
 func (p *pageState) renderResources() (err error) {
        p.resourcesPublishInit.Do(func() {
                var toBeDeleted []int
@@ -489,13 +520,6 @@ func (p *pageState) addResources(r ...resource.Resource) {
        p.resources = append(p.resources, r...)
 }
 
-func (p *pageState) addSectionToParent() {
-       if p.parent == nil {
-               return
-       }
-       p.parent.subSections = append(p.parent.subSections, p)
-}
-
 func (p *pageState) mapContent(meta *pageMeta) error {
 
        s := p.shortcodeState
@@ -743,27 +767,6 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error {
        return nil
 }
 
-func (p *pageState) getTaxonomyNodeInfo() *taxonomyNodeInfo {
-       info := p.s.taxonomyNodes.Get(p.SectionsEntries()...)
-
-       if info == nil {
-               // There can be unused content pages for taxonomies (e.g. author that
-               // has not written anything, yet), and these will not have a taxonomy
-               // node created in the assemble taxonomies step.
-               return nil
-       }
-
-       return info
-
-}
-
-func (p *pageState) sortParentSections() {
-       if p.parent == nil {
-               return
-       }
-       page.SortByDefault(p.parent.subSections)
-}
-
 // sourceRef returns the reference used by GetPage and ref/relref shortcodes to refer to
 // this page. It is prefixed with a "/".
 //
index f9ceee8c9056918409762ba64e1e43a6e0aa1621..cf554bb40ad5a58193aa9b0f1642fad884f3a384 100644 (file)
@@ -30,6 +30,8 @@ type pageCommon struct {
        s *Site
        m *pageMeta
 
+       bucket *pagesMapBucket
+
        // Laziliy initialized dependencies.
        init *lazy.Init
 
@@ -101,17 +103,17 @@ type pageCommon struct {
        translationKey     string
        translationKeyInit sync.Once
 
-       // Will only be set for sections and regular pages.
+       // Will only be set for bundled pages.
        parent *pageState
 
-       // Will only be set for section pages and the home page.
-       subSections page.Pages
-
        // Set in fast render mode to force render a given page.
        forceRender bool
 }
 
 type pagePages struct {
-       pages     page.Pages
        pagesInit sync.Once
+       pages     page.Pages
+
+       regularPagesInit sync.Once
+       regularPages     page.Pages
 }
index 79a64931b4a33199402b44988f514e180f27317c..8bc818a00a0388fae0924eba6ff7364a67220318 100644 (file)
@@ -16,6 +16,8 @@ package hugolib
 import (
        "sync"
 
+       "github.com/gohugoio/hugo/common/maps"
+
        "github.com/gohugoio/hugo/resources/page"
 )
 
@@ -36,22 +38,22 @@ func (p *pageData) Data() interface{} {
 
                switch p.Kind() {
                case page.KindTaxonomy:
-                       termInfo := p.getTaxonomyNodeInfo()
-                       pluralInfo := termInfo.parent
+                       bucket := p.bucket
+                       meta := bucket.meta
+                       plural := maps.GetString(meta, "plural")
+                       singular := maps.GetString(meta, "singular")
 
-                       singular := pluralInfo.singular
-                       plural := pluralInfo.plural
-                       term := termInfo.term
-                       taxonomy := p.s.Taxonomies[plural].Get(termInfo.termKey)
+                       taxonomy := p.s.Taxonomies[plural].Get(maps.GetString(meta, "termKey"))
 
                        p.data[singular] = taxonomy
-                       p.data["Singular"] = singular
+                       p.data["Singular"] = meta["singular"]
                        p.data["Plural"] = plural
-                       p.data["Term"] = term
+                       p.data["Term"] = meta["term"]
                case page.KindTaxonomyTerm:
-                       info := p.getTaxonomyNodeInfo()
-                       plural := info.plural
-                       singular := info.singular
+                       bucket := p.bucket
+                       meta := bucket.meta
+                       plural := maps.GetString(meta, "plural")
+                       singular := maps.GetString(meta, "singular")
 
                        p.data["Singular"] = singular
                        p.data["Plural"] = plural
index 026546742c7065fa8b963224f6ab4591df32d658..20476ecfa93375cdfcdd5e0c86aed2b0a0d34cd7 100644 (file)
@@ -80,7 +80,17 @@ func (p *pagePaginator) Paginator(options ...interface{}) (*page.Pager, error) {
 
                pd := p.source.targetPathDescriptor
                pd.Type = p.source.outputFormat()
-               paginator, err := page.Paginate(pd, p.source.Pages(), pagerSize)
+
+               var pages page.Pages
+               if p.source.IsHome() {
+                       // From Hugo 0.57 we made home.Pages() work like any other
+                       // section. To avoid the default paginators for the home page
+                       // changing in the wild, we make this a special case.
+                       pages = p.source.s.RegularPages()
+               } else {
+                       pages = p.source.Pages()
+               }
+               paginator, err := page.Paginate(pd, pages, pagerSize)
                if err != nil {
                        initErr = err
                        return
index 177e0420a8b347a8002d4e4fe81eaa15a8c54604..aa0fcd48835dbbc502dfe2962c05e369f9356977 100644 (file)
@@ -27,9 +27,8 @@ import (
        bp "github.com/gohugoio/hugo/bufferpool"
        "github.com/gohugoio/hugo/tpl"
 
-       "github.com/gohugoio/hugo/output"
-
        "github.com/gohugoio/hugo/helpers"
+       "github.com/gohugoio/hugo/output"
        "github.com/gohugoio/hugo/resources/page"
        "github.com/gohugoio/hugo/resources/resource"
 )
index bddfde7c865a16cdd490f2e6e48315531d01e2ca..7bd2874bfa83433f36522b18d95ef5cca7955cc4 100644 (file)
@@ -109,9 +109,21 @@ func (pt pageTree) Page() page.Page {
 }
 
 func (pt pageTree) Parent() page.Page {
-       return pt.p.parent
+       if pt.p.parent != nil {
+               return pt.p.parent
+       }
+
+       if pt.p.bucket == nil || pt.p.bucket.parent == nil {
+               return nil
+       }
+
+       return pt.p.bucket.parent.owner
 }
 
 func (pt pageTree) Sections() page.Pages {
-       return pt.p.subSections
+       if pt.p.bucket == nil {
+               return nil
+       }
+
+       return pt.p.bucket.getSections()
 }
index 05dacbe0ac9c209d92a5eec5c4cd6ff5e1f20b5d..bb34ccfb8480ec38ca93300128282aecf4876438 100644 (file)
@@ -531,7 +531,6 @@ date: 2018-01-15
        assert.Equal(2017, s.getPage("/no-index").Date().Year())
        assert.True(s.getPage("/with-index-no-date").Date().IsZero())
        assert.Equal(2018, s.getPage("/with-index-date").Date().Year())
-
 }
 
 func TestCreateNewPage(t *testing.T) {
index 5c21dc4725d8b164919cbae381e8c5940980f6fd..13f223eb50cd0a615de8ffb3d3fdb639e58afe0b 100644 (file)
@@ -1040,6 +1040,10 @@ slug: leaf
        b.WithContent("sv/b1/data2.json", "sv: data2")
        b.WithContent("nb/b1/data2.json", "nb: data2")
 
+       b.WithContent("en/b3/_index.md", createPage("en: branch"))
+       b.WithContent("en/b3/p1.md", createPage("en: page"))
+       b.WithContent("en/b3/data1.json", "en: data")
+
        b.Build(BuildCfg{})
 
        b.AssertFileContent("public/en/index.html",
index aedcf40901e686150bdc5cdf85116c583df0a438..1c8bed9d9ea9e4d42e13316f743c4db64985e655 100644 (file)
@@ -17,8 +17,12 @@ import (
        "fmt"
        "path"
        "path/filepath"
+       "sort"
        "strings"
        "sync"
+       "time"
+
+       "github.com/gohugoio/hugo/resources/resource"
 
        "github.com/pkg/errors"
 
@@ -32,6 +36,7 @@ var ambiguityFlag = &pageState{}
 
 // PageCollections contains the page collections for a site.
 type PageCollections struct {
+       pagesMap *pagesMap
 
        // Includes absolute all pages (of all types), including drafts etc.
        rawAllPages pageStatePages
@@ -340,15 +345,6 @@ func (*PageCollections) findPagesByKindInWorkPages(kind string, inPages pageStat
        return pages
 }
 
-func (c *PageCollections) findFirstWorkPageByKindIn(kind string) *pageState {
-       for _, p := range c.workAllPages {
-               if p.Kind() == kind {
-                       return p
-               }
-       }
-       return nil
-}
-
 func (c *PageCollections) addPage(page *pageState) {
        c.rawAllPages = append(c.rawAllPages, page)
 }
@@ -389,3 +385,189 @@ func (c *PageCollections) clearResourceCacheForPage(page *pageState) {
                page.s.ResourceSpec.DeleteCacheByPrefix(page.targetPaths().SubResourceBaseTarget)
        }
 }
+
+func (c *PageCollections) assemblePagesMap(s *Site) error {
+       c.pagesMap = newPagesMap(s)
+
+       rootSections := make(map[string]bool)
+
+       // Add all branch nodes first.
+       for _, p := range c.rawAllPages {
+               rootSections[p.Section()] = true
+               if p.IsPage() {
+                       continue
+               }
+               c.pagesMap.addPage(p)
+       }
+
+       // Create missing home page and the first level sections if no
+       // _index provided.
+       s.home = c.pagesMap.getOrCreateHome()
+       for k := range rootSections {
+               c.pagesMap.createSectionIfNotExists(k)
+       }
+
+       // Attach the regular pages to their section.
+       for _, p := range c.rawAllPages {
+               if p.IsNode() {
+                       continue
+               }
+               c.pagesMap.addPage(p)
+       }
+
+       return nil
+}
+
+func (c *PageCollections) createWorkAllPages() error {
+       c.workAllPages = make(pageStatePages, 0, len(c.rawAllPages))
+       c.headlessPages = make(pageStatePages, 0)
+
+       var (
+               homeDates    *resource.Dates
+               sectionDates *resource.Dates
+               siteLastmod  time.Time
+               siteLastDate time.Time
+
+               sectionsParamId      = "mainSections"
+               sectionsParamIdLower = strings.ToLower(sectionsParamId)
+       )
+
+       mainSections, mainSectionsFound := c.pagesMap.s.Info.Params()[sectionsParamIdLower]
+
+       var (
+               bucketsToRemove []string
+               rootBuckets     []*pagesMapBucket
+       )
+
+       c.pagesMap.r.Walk(func(s string, v interface{}) bool {
+               bucket := v.(*pagesMapBucket)
+               var parentBucket *pagesMapBucket
+
+               if s != "/" {
+                       _, parentv, found := c.pagesMap.r.LongestPrefix(path.Dir(s))
+                       if !found {
+                               panic(fmt.Sprintf("[BUG] parent bucket not found for %q", s))
+                       }
+                       parentBucket = parentv.(*pagesMapBucket)
+
+                       if !mainSectionsFound && strings.Count(s, "/") == 1 {
+                               // Root section
+                               rootBuckets = append(rootBuckets, bucket)
+                       }
+               }
+
+               if bucket.owner.IsHome() {
+                       if resource.IsZeroDates(bucket.owner) {
+                               // Calculate dates from the page tree.
+                               homeDates = &bucket.owner.m.Dates
+                       }
+               }
+
+               sectionDates = nil
+               if resource.IsZeroDates(bucket.owner) {
+                       sectionDates = &bucket.owner.m.Dates
+               }
+
+               if parentBucket != nil {
+                       bucket.parent = parentBucket
+                       if bucket.owner.IsSection() {
+                               parentBucket.bucketSections = append(parentBucket.bucketSections, bucket)
+                       }
+               }
+
+               tmp := bucket.pages[:0]
+               for _, x := range bucket.pages {
+                       if c.pagesMap.s.shouldBuild(x) {
+                               tmp = append(tmp, x)
+                       }
+               }
+               bucket.pages = tmp
+
+               if bucket.isEmpty() {
+                       if bucket.owner.IsSection() && bucket.owner.File().IsZero() {
+                               // Check for any nested section.
+                               var hasDescendant bool
+                               c.pagesMap.r.WalkPrefix(s, func(ss string, v interface{}) bool {
+                                       if s != ss {
+                                               hasDescendant = true
+                                               return true
+                                       }
+                                       return false
+                               })
+                               if !hasDescendant {
+                                       // This is an auto-created section with, now, nothing in it.
+                                       bucketsToRemove = append(bucketsToRemove, s)
+                                       return false
+                               }
+                       }
+               }
+
+               if !bucket.disabled {
+                       c.workAllPages = append(c.workAllPages, bucket.owner)
+               }
+
+               if !bucket.view {
+                       for _, p := range bucket.pages {
+                               ps := p.(*pageState)
+                               ps.parent = bucket.owner
+                               if ps.m.headless {
+                                       c.headlessPages = append(c.headlessPages, ps)
+                               } else {
+                                       c.workAllPages = append(c.workAllPages, ps)
+                               }
+
+                               if homeDates != nil {
+                                       homeDates.UpdateDateAndLastmodIfAfter(ps)
+                               }
+
+                               if sectionDates != nil {
+                                       sectionDates.UpdateDateAndLastmodIfAfter(ps)
+                               }
+
+                               if p.Lastmod().After(siteLastmod) {
+                                       siteLastmod = p.Lastmod()
+                               }
+                               if p.Date().After(siteLastDate) {
+                                       siteLastDate = p.Date()
+                               }
+                       }
+               }
+
+               return false
+       })
+
+       c.pagesMap.s.lastmod = siteLastmod
+
+       if !mainSectionsFound {
+
+               // Calculare main section
+               var (
+                       maxRootBucketWeight int
+                       maxRootBucket       *pagesMapBucket
+               )
+
+               for _, b := range rootBuckets {
+                       weight := len(b.pages) + (len(b.bucketSections) * 5)
+                       if weight >= maxRootBucketWeight {
+                               maxRootBucket = b
+                               maxRootBucketWeight = weight
+                       }
+               }
+
+               if maxRootBucket != nil {
+                       // Try to make this as backwards compatible as possible.
+                       mainSections = []string{maxRootBucket.owner.Section()}
+               }
+       }
+
+       c.pagesMap.s.Info.Params()[sectionsParamId] = mainSections
+       c.pagesMap.s.Info.Params()[sectionsParamIdLower] = mainSections
+
+       for _, key := range bucketsToRemove {
+               c.pagesMap.r.Delete(key)
+       }
+
+       sort.Sort(c.workAllPages)
+
+       return nil
+}
index 361b87e84c3933084bd8e85abec4c44af4eba456..f332e85a8687e2b745f3979faef7b72b947726c1 100644 (file)
@@ -36,9 +36,8 @@ import (
 
        "github.com/gohugoio/hugo/source"
 
-       "github.com/gohugoio/hugo/hugofs"
-
        "github.com/gohugoio/hugo/common/loggers"
+       "github.com/gohugoio/hugo/hugofs"
        "github.com/spf13/afero"
 )
 
@@ -109,10 +108,6 @@ type contentDirKey struct {
 // Collect.
 func (c *pagesCollector) Collect() error {
        c.proc.Start(context.Background())
-       if c.tracker != nil {
-               c.tracker.start()
-               defer c.tracker.stop()
-       }
 
        var collectErr error
        if len(c.filenames) == 0 {
@@ -125,7 +120,7 @@ func (c *pagesCollector) Collect() error {
                        dirs[contentDirKey{dir, filename, btype}] = true
                }
 
-               for dir, _ := range dirs {
+               for dir := range dirs {
                        switch dir.tp {
                        case bundleLeaf, bundleBranch:
                                collectErr = c.collectDir(dir.dirname, true, nil)
diff --git a/hugolib/pages_map.go b/hugolib/pages_map.go
new file mode 100644 (file)
index 0000000..26e937f
--- /dev/null
@@ -0,0 +1,367 @@
+// Copyright 2019 The Hugo Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package hugolib
+
+import (
+       "fmt"
+       "path"
+       "path/filepath"
+       "strings"
+       "sync"
+
+       radix "github.com/armon/go-radix"
+       "github.com/spf13/cast"
+
+       "github.com/gohugoio/hugo/resources/page"
+)
+
+func newPagesMap(s *Site) *pagesMap {
+       return &pagesMap{
+               r: radix.New(),
+               s: s,
+       }
+}
+
+type pagesMap struct {
+       r *radix.Tree
+       s *Site
+}
+
+func (m *pagesMap) Get(key string) *pagesMapBucket {
+       key = m.cleanKey(key)
+       v, found := m.r.Get(key)
+       if !found {
+               return nil
+       }
+
+       return v.(*pagesMapBucket)
+}
+
+func (m *pagesMap) getKey(p *pageState) string {
+       if !p.File().IsZero() {
+               return m.cleanKey(p.File().Dir())
+       }
+       return m.cleanKey(p.SectionsPath())
+}
+
+func (m *pagesMap) getOrCreateHome() *pageState {
+       var home *pageState
+       b, found := m.r.Get("/")
+       if !found {
+               home = m.s.newPage(page.KindHome)
+               m.addBucketFor("/", home, nil)
+       } else {
+               home = b.(*pagesMapBucket).owner
+       }
+
+       return home
+}
+
+func (m *pagesMap) createSectionIfNotExists(section string) {
+       key := m.cleanKey(section)
+       _, found := m.r.Get(key)
+       if !found {
+               kind := m.s.kindFromSectionPath(section)
+               p := m.s.newPage(kind, section)
+               m.addBucketFor(key, p, nil)
+       }
+}
+
+func (m *pagesMap) addBucket(p *pageState) {
+       key := m.getKey(p)
+
+       m.addBucketFor(key, p, nil)
+}
+
+func (m *pagesMap) addBucketFor(key string, p *pageState, meta map[string]interface{}) *pagesMapBucket {
+       var isView bool
+       switch p.Kind() {
+       case page.KindTaxonomy, page.KindTaxonomyTerm:
+               isView = true
+       }
+
+       disabled := !m.s.isEnabled(p.Kind())
+
+       bucket := &pagesMapBucket{owner: p, view: isView, meta: meta, disabled: disabled}
+       p.bucket = bucket
+
+       m.r.Insert(key, bucket)
+
+       return bucket
+}
+
+func (m *pagesMap) addPage(p *pageState) {
+       if !p.IsPage() {
+               m.addBucket(p)
+               return
+       }
+
+       if !m.s.isEnabled(page.KindPage) {
+               return
+       }
+
+       key := m.getKey(p)
+
+       var bucket *pagesMapBucket
+
+       _, v, found := m.r.LongestPrefix(key)
+       if !found {
+               panic(fmt.Sprintf("[BUG] bucket with key %q not found", key))
+       }
+
+       bucket = v.(*pagesMapBucket)
+       p.bucket = bucket
+
+       bucket.pages = append(bucket.pages, p)
+}
+
+func (m *pagesMap) withEveryPage(f func(p *pageState)) {
+       m.r.Walk(func(k string, v interface{}) bool {
+               b := v.(*pagesMapBucket)
+               f(b.owner)
+               if !b.view {
+                       for _, p := range b.pages {
+                               f(p.(*pageState))
+                       }
+               }
+
+               return false
+       })
+}
+
+func (m *pagesMap) assembleTaxonomies(s *Site) error {
+       s.Taxonomies = make(TaxonomyList)
+
+       type bucketKey struct {
+               plural  string
+               termKey string
+       }
+
+       // Temporary cache.
+       taxonomyBuckets := make(map[bucketKey]*pagesMapBucket)
+
+       for singular, plural := range s.siteCfg.taxonomiesConfig {
+               s.Taxonomies[plural] = make(Taxonomy)
+               bkey := bucketKey{
+                       plural: plural,
+               }
+
+               bucket := m.Get(plural)
+
+               if bucket == nil {
+                       // Create the page and bucket
+                       n := s.newPage(page.KindTaxonomyTerm, plural)
+
+                       key := m.cleanKey(plural)
+                       bucket = m.addBucketFor(key, n, nil)
+               }
+
+               if bucket.meta == nil {
+                       bucket.meta = map[string]interface{}{
+                               "singular": singular,
+                               "plural":   plural,
+                       }
+               }
+
+               // Add it to the temporary cache.
+               taxonomyBuckets[bkey] = bucket
+
+               // Taxonomy entries used in page front matter will be picked up later,
+               // but there may be some yet to be used.
+               pluralPrefix := m.cleanKey(plural) + "/"
+               m.r.WalkPrefix(pluralPrefix, func(k string, v interface{}) bool {
+                       tb := v.(*pagesMapBucket)
+                       termKey := strings.TrimPrefix(k, pluralPrefix)
+                       if tb.meta == nil {
+                               tb.meta = map[string]interface{}{
+                                       "singular": singular,
+                                       "plural":   plural,
+                                       "term":     tb.owner.Title(),
+                                       "termKey":  termKey,
+                               }
+                       }
+
+                       bucket.pages = append(bucket.pages, tb.owner)
+                       bkey.termKey = termKey
+                       taxonomyBuckets[bkey] = tb
+
+                       return false
+               })
+
+       }
+
+       addTaxonomy := func(singular, plural, term string, weight int, p page.Page) {
+               bkey := bucketKey{
+                       plural: plural,
+               }
+
+               termKey := s.getTaxonomyKey(term)
+
+               b1 := taxonomyBuckets[bkey]
+
+               var b2 *pagesMapBucket
+               bkey.termKey = termKey
+               b, found := taxonomyBuckets[bkey]
+               if found {
+                       b2 = b
+               } else {
+
+                       // Create the page and bucket
+                       n := s.newTaxonomyPage(term, plural, termKey)
+                       meta := map[string]interface{}{
+                               "singular": singular,
+                               "plural":   plural,
+                               "term":     term,
+                               "termKey":  termKey,
+                       }
+
+                       key := m.cleanKey(path.Join(plural, termKey))
+                       b2 = m.addBucketFor(key, n, meta)
+                       b1.pages = append(b1.pages, b2.owner)
+                       taxonomyBuckets[bkey] = b2
+
+               }
+
+               w := page.NewWeightedPage(weight, p, b2.owner)
+
+               s.Taxonomies[plural].add(termKey, w)
+
+               b1.owner.m.Dates.UpdateDateAndLastmodIfAfter(p)
+               b2.owner.m.Dates.UpdateDateAndLastmodIfAfter(p)
+       }
+
+       m.r.Walk(func(k string, v interface{}) bool {
+               b := v.(*pagesMapBucket)
+               if b.view {
+                       return false
+               }
+
+               for singular, plural := range s.siteCfg.taxonomiesConfig {
+                       for _, p := range b.pages {
+
+                               vals := getParam(p, plural, false)
+
+                               w := getParamToLower(p, plural+"_weight")
+                               weight, err := cast.ToIntE(w)
+                               if err != nil {
+                                       m.s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %q", w, p.Path())
+                                       // weight will equal zero, so let the flow continue
+                               }
+
+                               if vals != nil {
+                                       if v, ok := vals.([]string); ok {
+                                               for _, idx := range v {
+                                                       addTaxonomy(singular, plural, idx, weight, p)
+                                               }
+                                       } else if v, ok := vals.(string); ok {
+                                               addTaxonomy(singular, plural, v, weight, p)
+                                       } else {
+                                               m.s.Log.ERROR.Printf("Invalid %s in %q\n", plural, p.Path())
+                                       }
+                               }
+
+                       }
+               }
+               return false
+       })
+
+       for _, plural := range s.siteCfg.taxonomiesConfig {
+               for k := range s.Taxonomies[plural] {
+                       s.Taxonomies[plural][k].Sort()
+               }
+       }
+
+       return nil
+}
+
+func (m *pagesMap) cleanKey(key string) string {
+       key = filepath.ToSlash(strings.ToLower(key))
+       key = strings.Trim(key, "/")
+       return "/" + key
+}
+
+func (m *pagesMap) dump() {
+       m.r.Walk(func(s string, v interface{}) bool {
+               b := v.(*pagesMapBucket)
+               fmt.Println("-------\n", s, ":", b.owner.Kind(), ":")
+               if b.owner != nil {
+                       fmt.Println("Owner:", b.owner.Path())
+               }
+               for _, p := range b.pages {
+                       fmt.Println(p.Path())
+               }
+               return false
+       })
+}
+
+type pagesMapBucket struct {
+       // Set if the pages in this bucket is also present in another bucket.
+       view bool
+
+       // Some additional metatadata attached to this node.
+       meta map[string]interface{}
+
+       owner *pageState // The branch node
+
+       // When disableKinds is enabled for this node.
+       disabled bool
+
+       // Used to navigate the sections tree
+       parent         *pagesMapBucket
+       bucketSections []*pagesMapBucket
+
+       pagesInit sync.Once
+       pages     page.Pages
+
+       pagesAndSectionsInit sync.Once
+       pagesAndSections     page.Pages
+
+       sectionsInit sync.Once
+       sections     page.Pages
+}
+
+func (b *pagesMapBucket) isEmpty() bool {
+       return len(b.pages) == 0 && len(b.bucketSections) == 0
+}
+
+func (b *pagesMapBucket) getPages() page.Pages {
+       b.pagesInit.Do(func() {
+               page.SortByDefault(b.pages)
+       })
+       return b.pages
+}
+
+func (b *pagesMapBucket) getPagesAndSections() page.Pages {
+       b.pagesAndSectionsInit.Do(func() {
+               var pas page.Pages
+               pas = append(pas, b.pages...)
+               for _, p := range b.bucketSections {
+                       pas = append(pas, p.owner)
+               }
+               b.pagesAndSections = pas
+               page.SortByDefault(b.pagesAndSections)
+       })
+       return b.pagesAndSections
+}
+
+func (b *pagesMapBucket) getSections() page.Pages {
+       b.sectionsInit.Do(func() {
+               for _, p := range b.bucketSections {
+                       b.sections = append(b.sections, p.owner)
+               }
+               page.SortByDefault(b.sections)
+       })
+
+       return b.sections
+}
index 882874db9473afcbea41eb74076ec3e225354a87..2b8a7285a5216f22ce24968587f8d83e1ad57a6b 100644 (file)
@@ -58,7 +58,6 @@ import (
        "github.com/gohugoio/hugo/related"
        "github.com/gohugoio/hugo/resources"
        "github.com/gohugoio/hugo/resources/page/pagemeta"
-       "github.com/gohugoio/hugo/resources/resource"
        "github.com/gohugoio/hugo/source"
        "github.com/gohugoio/hugo/tpl"
 
@@ -94,15 +93,11 @@ type Site struct {
 
        Taxonomies TaxonomyList
 
-       taxonomyNodes *taxonomyNodeInfos
-
        Sections Taxonomy
        Info     SiteInfo
 
        layoutHandler *output.LayoutHandler
 
-       buildStats *buildStats
-
        language *langs.Language
 
        siteCfg siteConfigHolder
@@ -216,12 +211,13 @@ func (s *Site) prepareInits() {
 
        s.init.prevNextInSection = init.Branch(func() (interface{}, error) {
                var rootSection []int
+               // TODO(bep) cm attach this to the bucket.
                for i, p1 := range s.workAllPages {
                        if p1.IsPage() && p1.Section() == "" {
                                rootSection = append(rootSection, i)
                        }
                        if p1.IsSection() {
-                               sectionPages := p1.Pages()
+                               sectionPages := p1.RegularPages()
                                for i, p2 := range sectionPages {
                                        p2s := p2.(*pageState)
                                        if p2s.posNextPrevSection == nil {
@@ -263,28 +259,6 @@ func (s *Site) prepareInits() {
 
 }
 
-// Build stats for a given site.
-type buildStats struct {
-       draftCount   int
-       futureCount  int
-       expiredCount int
-}
-
-// TODO(bep) consolidate all site stats into this
-func (b *buildStats) update(p page.Page) {
-       if p.Draft() {
-               b.draftCount++
-       }
-
-       if resource.IsFuture(p) {
-               b.futureCount++
-       }
-
-       if resource.IsExpired(p) {
-               b.expiredCount++
-       }
-}
-
 type siteRenderingContext struct {
        output.Format
 }
@@ -355,9 +329,8 @@ func (s *Site) reset() *Site {
                publisher:              s.publisher,
                siteConfigConfig:       s.siteConfigConfig,
                enableInlineShortcodes: s.enableInlineShortcodes,
-               buildStats:             &buildStats{},
                init:                   s.init,
-               PageCollections:        newPageCollections(),
+               PageCollections:        s.PageCollections,
                siteCfg:                s.siteCfg,
        }
 
@@ -453,7 +426,6 @@ func newSite(cfg deps.DepsCfg) (*Site, error) {
                outputFormatsConfig:    siteOutputFormatsConfig,
                mediaTypesConfig:       siteMediaTypesConfig,
                frontmatterHandler:     frontMatterHandler,
-               buildStats:             &buildStats{},
                enableInlineShortcodes: cfg.Language.GetBool("enableInlineShortcodes"),
                siteCfg:                siteConfig,
        }
@@ -920,7 +892,7 @@ func (s *Site) translateFileEvents(events []fsnotify.Event) []fsnotify.Event {
 // reBuild partially rebuilds a site given the filesystem events.
 // It returns whetever the content source was changed.
 // TODO(bep) clean up/rewrite this method.
-func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
+func (s *Site) processPartial(config *BuildCfg, init func(config *BuildCfg) error, events []fsnotify.Event) error {
 
        events = s.filterFileEvents(events)
        events = s.translateFileEvents(events)
@@ -974,6 +946,18 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
                }
        }
 
+       changed := &whatChanged{
+               source: len(sourceChanged) > 0 || len(shortcodesChanged) > 0,
+               other:  len(tmplChanged) > 0 || len(i18nChanged) > 0 || len(dataChanged) > 0,
+               files:  sourceFilesChanged,
+       }
+
+       config.whatChanged = changed
+
+       if err := init(config); err != nil {
+               return err
+       }
+
        // These in memory resource caches will be rebuilt on demand.
        for _, s := range s.h.Sites {
                s.ResourceSpec.ResourceCache.DeletePartitions(cachePartitions...)
@@ -987,7 +971,7 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
 
                // TOD(bep) globals clean
                if err := first.Deps.LoadResources(); err != nil {
-                       return whatChanged{}, err
+                       return err
                }
 
                for i := 1; i < len(sites); i++ {
@@ -1003,7 +987,7 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
                                return nil
                        })
                        if err != nil {
-                               return whatChanged{}, err
+                               return err
                        }
                }
        }
@@ -1062,18 +1046,12 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) {
                filenamesChanged = helpers.UniqueStringsReuse(filenamesChanged)
 
                if err := s.readAndProcessContent(filenamesChanged...); err != nil {
-                       return whatChanged{}, err
+                       return err
                }
 
        }
 
-       changed := whatChanged{
-               source: len(sourceChanged) > 0 || len(shortcodesChanged) > 0,
-               other:  len(tmplChanged) > 0 || len(i18nChanged) > 0 || len(dataChanged) > 0,
-               files:  sourceFilesChanged,
-       }
-
-       return changed, nil
+       return nil
 
 }
 
@@ -1090,54 +1068,6 @@ func (s *Site) process(config BuildCfg) (err error) {
 
 }
 
-func (s *Site) setupSitePages() {
-       var homeDates *resource.Dates
-       if s.home != nil {
-               // If the home page has no dates set, we fall back to the site dates.
-               homeDates = &s.home.m.Dates
-       }
-
-       if !s.lastmod.IsZero() && (homeDates == nil || !resource.IsZeroDates(homeDates)) {
-               return
-       }
-
-       if homeDates != nil && !s.lastmod.IsZero() {
-               homeDates.FDate = s.lastmod
-               homeDates.FLastmod = s.lastmod
-               return
-
-       }
-
-       var siteLastmod time.Time
-       var siteLastDate time.Time
-
-       for _, page := range s.workAllPages {
-               if !page.IsPage() {
-                       continue
-               }
-               // Determine Site.Info.LastChange
-               // Note that the logic to determine which date to use for Lastmod
-               // is already applied, so this is *the* date to use.
-               // We cannot just pick the last page in the default sort, because
-               // that may not be ordered by date.
-               // TODO(bep) check if this can be done earlier
-               if page.Lastmod().After(siteLastmod) {
-                       siteLastmod = page.Lastmod()
-               }
-               if page.Date().After(siteLastDate) {
-                       siteLastDate = page.Date()
-               }
-       }
-
-       s.lastmod = siteLastmod
-
-       if homeDates != nil && resource.IsZeroDates(homeDates) {
-               homeDates.FDate = siteLastDate
-               homeDates.FLastmod = s.lastmod
-       }
-
-}
-
 func (s *Site) render(ctx *siteRenderContext) (err error) {
 
        if err := page.Clear(); err != nil {
@@ -1483,81 +1413,22 @@ func (s *Site) getTaxonomyKey(key string) string {
        return strings.ToLower(s.PathSpec.MakePath(key))
 }
 
-func (s *Site) assembleTaxonomies() error {
-       s.Taxonomies = make(TaxonomyList)
-       taxonomies := s.siteCfg.taxonomiesConfig
-       for _, plural := range taxonomies {
-               s.Taxonomies[plural] = make(Taxonomy)
-       }
-
-       s.taxonomyNodes = &taxonomyNodeInfos{
-               m:      make(map[string]*taxonomyNodeInfo),
-               getKey: s.getTaxonomyKey,
-       }
-
-       s.Log.INFO.Printf("found taxonomies: %#v\n", taxonomies)
-
-       for singular, plural := range taxonomies {
-               parent := s.taxonomyNodes.GetOrCreate(plural, "")
-               parent.singular = singular
-
-               addTaxonomy := func(plural, term string, weight int, p page.Page) {
-                       key := s.getTaxonomyKey(term)
-
-                       n := s.taxonomyNodes.GetOrCreate(plural, term)
-                       n.parent = parent
-
-                       w := page.NewWeightedPage(weight, p, n.owner)
-
-                       s.Taxonomies[plural].add(key, w)
-
-                       n.UpdateFromPage(w.Page)
-                       parent.UpdateFromPage(w.Page)
-               }
-
-               for _, p := range s.workAllPages {
-                       vals := getParam(p, plural, false)
-
-                       w := getParamToLower(p, plural+"_weight")
-                       weight, err := cast.ToIntE(w)
-                       if err != nil {
-                               s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %q", w, p.pathOrTitle())
-                               // weight will equal zero, so let the flow continue
-                       }
-
-                       if vals != nil {
-                               if v, ok := vals.([]string); ok {
-                                       for _, idx := range v {
-                                               addTaxonomy(plural, idx, weight, p)
-                                       }
-                               } else if v, ok := vals.(string); ok {
-                                       addTaxonomy(plural, v, weight, p)
-                               } else {
-                                       s.Log.ERROR.Printf("Invalid %s in %q\n", plural, p.pathOrTitle())
-                               }
-                       }
-               }
-
-               for k := range s.Taxonomies[plural] {
-                       s.Taxonomies[plural][k].Sort()
-               }
-       }
-
-       return nil
-}
-
 // Prepare site for a new full build.
-func (s *Site) resetBuildState() {
+func (s *Site) resetBuildState(sourceChanged bool) {
        s.relatedDocsHandler = s.relatedDocsHandler.Clone()
-       s.PageCollections = newPageCollectionsFromPages(s.rawAllPages)
-       s.buildStats = &buildStats{}
        s.init.Reset()
 
-       for _, p := range s.rawAllPages {
-               p.pagePages = &pagePages{}
-               p.subSections = page.Pages{}
-               p.parent = nil
-               p.Scratcher = maps.NewScratcher()
+       if sourceChanged {
+               s.PageCollections = newPageCollectionsFromPages(s.rawAllPages)
+               for _, p := range s.rawAllPages {
+                       p.pagePages = &pagePages{}
+                       p.parent = nil
+                       p.Scratcher = maps.NewScratcher()
+               }
+       } else {
+               s.pagesMap.withEveryPage(func(p *pageState) {
+                       p.Scratcher = maps.NewScratcher()
+               })
        }
 }
 
@@ -1759,8 +1630,11 @@ func (s *Site) kindFromSections(sections []string) string {
                return page.KindHome
        }
 
-       sectionPath := path.Join(sections...)
+       return s.kindFromSectionPath(path.Join(sections...))
+
+}
 
+func (s *Site) kindFromSectionPath(sectionPath string) string {
        for _, plural := range s.siteCfg.taxonomiesConfig {
                if plural == sectionPath {
                        return page.KindTaxonomyTerm
index 8fce43471fc3835575108097ec445517c7f3ff77..ae343716eaa9424effef996f5581135dcb045e93 100644 (file)
 package hugolib
 
 import (
-       "path"
-       "strconv"
-       "strings"
-
        "github.com/gohugoio/hugo/resources/page"
-       "github.com/gohugoio/hugo/resources/resource"
-
-       radix "github.com/hashicorp/go-immutable-radix"
 )
 
 // Sections returns the top level sections.
@@ -37,208 +30,3 @@ func (s *SiteInfo) Sections() page.Pages {
 func (s *SiteInfo) Home() (page.Page, error) {
        return s.s.home, nil
 }
-
-func (s *Site) assembleSections() pageStatePages {
-       var newPages pageStatePages
-
-       if !s.isEnabled(page.KindSection) {
-               return newPages
-       }
-
-       // Maps section kind pages to their path, i.e. "my/section"
-       sectionPages := make(map[string]*pageState)
-
-       // The sections with content files will already have been created.
-       for _, sect := range s.findWorkPagesByKind(page.KindSection) {
-               sectionPages[sect.SectionsPath()] = sect
-       }
-
-       const (
-               sectKey     = "__hs"
-               sectSectKey = "_a" + sectKey
-               sectPageKey = "_b" + sectKey
-       )
-
-       var (
-               inPages    = radix.New().Txn()
-               inSections = radix.New().Txn()
-               undecided  pageStatePages
-       )
-
-       home := s.findFirstWorkPageByKindIn(page.KindHome)
-
-       for i, p := range s.workAllPages {
-
-               if p.Kind() != page.KindPage {
-                       continue
-               }
-
-               sections := p.SectionsEntries()
-
-               if len(sections) == 0 {
-                       // Root level pages. These will have the home page as their Parent.
-                       p.parent = home
-                       continue
-               }
-
-               sectionKey := p.SectionsPath()
-               _, found := sectionPages[sectionKey]
-
-               if !found && len(sections) == 1 {
-
-                       // We only create content-file-less sections for the root sections.
-                       n := s.newPage(page.KindSection, sections[0])
-
-                       sectionPages[sectionKey] = n
-                       newPages = append(newPages, n)
-                       found = true
-               }
-
-               if len(sections) > 1 {
-                       // Create the root section if not found.
-                       _, rootFound := sectionPages[sections[0]]
-                       if !rootFound {
-                               sect := s.newPage(page.KindSection, sections[0])
-                               sectionPages[sections[0]] = sect
-                               newPages = append(newPages, sect)
-                       }
-               }
-
-               if found {
-                       pagePath := path.Join(sectionKey, sectPageKey, strconv.Itoa(i))
-                       inPages.Insert([]byte(pagePath), p)
-               } else {
-                       undecided = append(undecided, p)
-               }
-       }
-
-       // Create any missing sections in the tree.
-       // A sub-section needs a content file, but to create a navigational tree,
-       // given a content file in /content/a/b/c/_index.md, we cannot create just
-       // the c section.
-       for _, sect := range sectionPages {
-               sections := sect.SectionsEntries()
-               for i := len(sections); i > 0; i-- {
-                       sectionPath := sections[:i]
-                       sectionKey := path.Join(sectionPath...)
-                       _, found := sectionPages[sectionKey]
-                       if !found {
-                               sect = s.newPage(page.KindSection, sectionPath[len(sectionPath)-1])
-                               sect.m.sections = sectionPath
-                               sectionPages[sectionKey] = sect
-                               newPages = append(newPages, sect)
-                       }
-               }
-       }
-
-       for k, sect := range sectionPages {
-               inPages.Insert([]byte(path.Join(k, sectSectKey)), sect)
-               inSections.Insert([]byte(k), sect)
-       }
-
-       var (
-               currentSection *pageState
-               children       page.Pages
-               dates          *resource.Dates
-               rootSections   = inSections.Commit().Root()
-       )
-
-       for i, p := range undecided {
-               // Now we can decide where to put this page into the tree.
-               sectionKey := p.SectionsPath()
-
-               _, v, _ := rootSections.LongestPrefix([]byte(sectionKey))
-               sect := v.(*pageState)
-               pagePath := path.Join(path.Join(sect.SectionsEntries()...), sectSectKey, "u", strconv.Itoa(i))
-               inPages.Insert([]byte(pagePath), p)
-       }
-
-       var rootPages = inPages.Commit().Root()
-
-       rootPages.Walk(func(path []byte, v interface{}) bool {
-               p := v.(*pageState)
-
-               if p.Kind() == page.KindSection {
-                       if currentSection != nil {
-                               // A new section
-                               currentSection.setPages(children)
-                               if dates != nil {
-                                       currentSection.m.Dates = *dates
-                               }
-                       }
-
-                       currentSection = p
-                       children = make(page.Pages, 0)
-                       dates = nil
-                       // Use section's dates from front matter if set.
-                       if resource.IsZeroDates(currentSection) {
-                               dates = &resource.Dates{}
-                       }
-
-                       return false
-
-               }
-
-               // Regular page
-               p.parent = currentSection
-               children = append(children, p)
-               if dates != nil {
-                       dates.UpdateDateAndLastmodIfAfter(p)
-               }
-
-               return false
-       })
-
-       if currentSection != nil {
-               currentSection.setPages(children)
-               if dates != nil {
-                       currentSection.m.Dates = *dates
-               }
-       }
-
-       // Build the sections hierarchy
-       for _, sect := range sectionPages {
-               sections := sect.SectionsEntries()
-               if len(sections) == 1 {
-                       if home != nil {
-                               sect.parent = home
-                       }
-               } else {
-                       parentSearchKey := path.Join(sect.SectionsEntries()[:len(sections)-1]...)
-                       _, v, _ := rootSections.LongestPrefix([]byte(parentSearchKey))
-                       p := v.(*pageState)
-                       sect.parent = p
-               }
-
-               sect.addSectionToParent()
-       }
-
-       var (
-               sectionsParamId      = "mainSections"
-               sectionsParamIdLower = strings.ToLower(sectionsParamId)
-               mainSections         interface{}
-               mainSectionsFound    bool
-               maxSectionWeight     int
-       )
-
-       mainSections, mainSectionsFound = s.Info.Params()[sectionsParamIdLower]
-
-       for _, sect := range sectionPages {
-               sect.sortParentSections()
-
-               if !mainSectionsFound {
-                       weight := len(sect.Pages()) + (len(sect.Sections()) * 5)
-                       if weight >= maxSectionWeight {
-                               mainSections = []string{sect.Section()}
-                               maxSectionWeight = weight
-                       }
-               }
-       }
-
-       // Try to make this as backwards compatible as possible.
-       s.Info.Params()[sectionsParamId] = mainSections
-       s.Info.Params()[sectionsParamIdLower] = mainSections
-
-       return newPages
-
-}
index d4aa9d354b9e1fc7f255e599183a39ea9e3c7979..2e7ffdf0b033ade5937c422dc570e61111d5dc61 100644 (file)
@@ -137,21 +137,20 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
                }},
                {"empty1", func(assert *require.Assertions, p page.Page) {
                        // > b,c
-                       assert.NotNil(getPage(p, "/empty1/b"))
+                       assert.Nil(getPage(p, "/empty1/b")) // No _index.md page.
                        assert.NotNil(getPage(p, "/empty1/b/c"))
 
                }},
                {"empty2", func(assert *require.Assertions, p page.Page) {
-                       // > b,c,d where b and d have content files.
+                       // > b,c,d where b and d have _index.md files.
                        b := getPage(p, "/empty2/b")
                        assert.NotNil(b)
                        assert.Equal("T40_-1", b.Title())
+
                        c := getPage(p, "/empty2/b/c")
+                       assert.Nil(c) // No _index.md
 
-                       assert.NotNil(c)
-                       assert.Equal("Cs", c.Title())
                        d := getPage(p, "/empty2/b/c/d")
-
                        assert.NotNil(d)
                        assert.Equal("T41_-1", d.Title())
 
@@ -163,9 +162,10 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
                {"empty3", func(assert *require.Assertions, p page.Page) {
                        // b,c,d with regular page in b
                        b := getPage(p, "/empty3/b")
-                       assert.NotNil(b)
-                       assert.Len(b.Pages(), 1)
-                       assert.Equal("empty3.md", b.Pages()[0].File().LogicalName())
+                       assert.Nil(b) // No _index.md
+                       e3 := getPage(p, "/empty3/b/empty3")
+                       assert.NotNil(e3)
+                       assert.Equal("empty3.md", e3.File().LogicalName())
 
                }},
                {"empty3", func(assert *require.Assertions, p page.Page) {
@@ -188,19 +188,23 @@ PAG|{{ .Title }}|{{ $sect.InSection . }}
                }},
                {"l1", func(assert *require.Assertions, p page.Page) {
                        assert.Equal("L1s", p.Title())
-                       assert.Len(p.Pages(), 2)
+                       assert.Len(p.Pages(), 4) // 2 pages + 2 sections
                        assert.True(p.Parent().IsHome())
                        assert.Len(p.Sections(), 2)
                }},
                {"l1,l2", func(assert *require.Assertions, p page.Page) {
                        assert.Equal("T2_-1", p.Title())
-                       assert.Len(p.Pages(), 3)
+                       assert.Len(p.Pages(), 4) // 3 pages + 1 section
                        assert.Equal(p, p.Pages()[0].Parent())
                        assert.Equal("L1s", p.Parent().Title())
                        assert.Equal("/l1/l2/", p.RelPermalink())
                        assert.Len(p.Sections(), 1)
 
                        for _, child := range p.Pages() {
+                               if child.IsSection() {
+                                       assert.Equal(child, child.CurrentSection())
+                                       continue
+                               }
 
                                assert.Equal(p, child.CurrentSection())
                                active, err := child.InSection(p)
index a7965ec26cbc59212fe756abccbbaeba1ee924d6..e3f03310927520d7b7901c72b6c5002bd6879670 100644 (file)
@@ -15,13 +15,11 @@ package hugolib
 
 import (
        "fmt"
-       "path"
        "sort"
 
        "github.com/gohugoio/hugo/compare"
 
        "github.com/gohugoio/hugo/resources/page"
-       "github.com/gohugoio/hugo/resources/resource"
 )
 
 // The TaxonomyList is a list of all taxonomies and their values
@@ -156,95 +154,3 @@ func (s *orderedTaxonomySorter) Swap(i, j int) {
 func (s *orderedTaxonomySorter) Less(i, j int) bool {
        return s.by(&s.taxonomy[i], &s.taxonomy[j])
 }
-
-// taxonomyNodeInfo stores additional metadata about a taxonomy.
-type taxonomyNodeInfo struct {
-       plural string
-
-       // Maps "tags" to "tag".
-       singular string
-
-       // The term key as used in the taxonomy map, e.g "tag1".
-       // The value is normalized for paths, but may or not be lowercased
-       // depending on the disablePathToLower setting.
-       termKey string
-
-       // The original, unedited term name. Useful for titles etc.
-       term string
-
-       dates resource.Dates
-
-       parent *taxonomyNodeInfo
-
-       // Either of Kind taxonomyTerm (parent) or taxonomy
-       owner *page.PageWrapper
-}
-
-func (t *taxonomyNodeInfo) UpdateFromPage(p page.Page) {
-
-       // Select the latest dates
-       t.dates.UpdateDateAndLastmodIfAfter(p)
-}
-
-func (t *taxonomyNodeInfo) TransferValues(p *pageState) {
-       t.owner.Page = p
-       if p.Lastmod().IsZero() && p.Date().IsZero() {
-               p.m.Dates.UpdateDateAndLastmodIfAfter(t.dates)
-       }
-}
-
-// Maps either plural or plural/term to a taxonomy node.
-// TODO(bep) consolidate somehow with s.Taxonomies
-type taxonomyNodeInfos struct {
-       m      map[string]*taxonomyNodeInfo
-       getKey func(string) string
-}
-
-// map[string]*taxonomyNodeInfo
-func (t taxonomyNodeInfos) key(parts ...string) string {
-       return path.Join(parts...)
-}
-
-// GetOrAdd will get or create and add a new taxonomy node to the parent identified with plural.
-// It will panic if the parent does not exist.
-func (t taxonomyNodeInfos) GetOrAdd(plural, term string) *taxonomyNodeInfo {
-       parent := t.GetOrCreate(plural, "")
-       if parent == nil {
-               panic(fmt.Sprintf("no parent found with plural %q", plural))
-       }
-       child := t.GetOrCreate(plural, term)
-       child.parent = parent
-       return child
-}
-
-func (t taxonomyNodeInfos) GetOrCreate(plural, term string) *taxonomyNodeInfo {
-       termKey := t.getKey(term)
-       key := t.key(plural, termKey)
-
-       n, found := t.m[key]
-       if found {
-               return n
-       }
-
-       n = &taxonomyNodeInfo{
-               plural:  plural,
-               termKey: termKey,
-               term:    term,
-               owner:   &page.PageWrapper{}, // Page will be assigned later.
-       }
-
-       t.m[key] = n
-
-       return n
-}
-
-func (t taxonomyNodeInfos) Get(sections ...string) *taxonomyNodeInfo {
-       key := t.key(sections...)
-
-       n, found := t.m[key]
-       if found {
-               return n
-       }
-
-       return nil
-}
index 2edc36d63e1a838331a76c43e68609b1917e33cc..21748d0bf71f4bcc7d5873c2aec7ce6a17ae8818 100644 (file)
@@ -168,7 +168,7 @@ permalinkeds:
        for taxonomy, count := range taxonomyTermPageCounts {
                term := s.getPage(page.KindTaxonomyTerm, taxonomy)
                require.NotNil(t, term)
-               require.Len(t, term.Pages(), count)
+               require.Len(t, term.Pages(), count, taxonomy)
 
                for _, p := range term.Pages() {
                        require.Equal(t, page.KindTaxonomy, p.Kind())
index ac511367d6e8e9f53ab49c08203024dbb9105574..d7e0d5c85c5db75bad8957f00e738b073e6b0e17 100644 (file)
@@ -698,6 +698,7 @@ type testHelper struct {
 }
 
 func (th testHelper) assertFileContent(filename string, matches ...string) {
+       th.T.Helper()
        filename = th.replaceDefaultContentLanguageValue(filename)
        content := readDestination(th.T, th.Fs, filename)
        for _, match := range matches {
index 00b449607fc5a5bab5447e3be3586cc9aa43d030..3b43b0af3f17dc214363778d34bb9e625363851f 100644 (file)
@@ -57,6 +57,13 @@ type AuthorProvider interface {
 // ChildCareProvider provides accessors to child resources.
 type ChildCareProvider interface {
        Pages() Pages
+
+       // RegularPages returns a list of pages of kind 'Page'.
+       // In Hugo 0.57 we changed the Pages method so it returns all page
+       // kinds, even sections. If you want the old behaviour, you can
+       // use RegularPages.
+       RegularPages() Pages
+
        Resources() resource.Resources
 }
 
index c3a4819f1f40fbebaccfe214a2e3a4f1e2eceb01..ea1a44d8f242cf305b859de5cab9488b40fa6e94 100644 (file)
@@ -284,6 +284,10 @@ func (p *nopPage) Pages() Pages {
        return nil
 }
 
+func (p *nopPage) RegularPages() Pages {
+       return nil
+}
+
 func (p *nopPage) Paginate(seq interface{}, options ...interface{}) (*Pager, error) {
        return nil, nil
 }
index 1a27985576d5887ca26245e9e2653323765352c1..e861c13751f7c9322334e332dbf5a85037f47eb9 100644 (file)
@@ -351,6 +351,10 @@ func (p *testPage) Pages() Pages {
        panic("not implemented")
 }
 
+func (p *testPage) RegularPages() Pages {
+       panic("not implemented")
+}
+
 func (p *testPage) Paginate(seq interface{}, options ...interface{}) (*Pager, error) {
        return nil, nil
 }
index 3f75bcc3cfe041c8990ec9fd9328e797eb8072fa..48ed736ce0f481f8c753b1fd101cd47c61386447 100644 (file)
@@ -42,7 +42,7 @@ func (p WeightedPages) Page() Page {
                return nil
        }
 
-       return first.owner.Page
+       return first.owner
 }
 
 // A WeightedPage is a Page with a weight.
@@ -54,15 +54,10 @@ type WeightedPage struct {
        // manual .Site.GetPage lookups. It is implemented in this roundabout way
        // because we cannot add additional state to the WeightedPages slice
        // without breaking lots of templates in the wild.
-       owner *PageWrapper
+       owner Page
 }
 
-// PageWrapper wraps a Page.
-type PageWrapper struct {
-       Page
-}
-
-func NewWeightedPage(weight int, p Page, owner *PageWrapper) WeightedPage {
+func NewWeightedPage(weight int, p Page, owner Page) WeightedPage {
        return WeightedPage{Weight: weight, Page: p, owner: owner}
 }
 
index e2d1d3c39bd76aa0c15dfa0a1b71e9bdf12c4a53..93713f00ce1b4cea361c4d76a0b8831a2524459c 100644 (file)
@@ -19,7 +19,8 @@ package embedded
 // EmbeddedTemplates represents all embedded templates.
 var EmbeddedTemplates = [][2]string{
        {`_default/robots.txt`, `User-agent: *`},
-       {`_default/rss.xml`, `{{- $pages := .Data.Pages -}}
+       {`_default/rss.xml`, `{{- $pages := .Pages -}}
+{{- if .IsHome -}}{{- $pages = .Site.RegularPages -}}{{- end -}}
 {{- $limit := .Site.Config.Services.RSS.Limit -}}
 {{- if ge $limit 1 -}}
 {{- $pages = $pages | first $limit -}}
index 675ecd43c3a35d1fbc91e4b0cdb30d86e034d127..a3f58010e829fcbff98e9f12a8838a3e68338d1f 100644 (file)
@@ -1,4 +1,5 @@
-{{- $pages := .Data.Pages -}}
+{{- $pages := .Pages -}}
+{{- if .IsHome -}}{{- $pages = .Site.RegularPages -}}{{- end -}}
 {{- $limit := .Site.Config.Services.RSS.Limit -}}
 {{- if ge $limit 1 -}}
 {{- $pages = $pages | first $limit -}}