In the newly consolidated file cache implementation, we forgot that we also look in the theme(s) for assets (SCSS transformations etc.), which is not good for Netlify and the demo sites.
Fixes #5460
"bytes"
"io"
"io/ioutil"
+ "os"
"path/filepath"
"strings"
"sync"
"github.com/gohugoio/hugo/helpers"
- "github.com/gohugoio/hugo/hugolib/paths"
-
"github.com/BurntSushi/locker"
"github.com/spf13/afero"
)
return f[strings.ToLower(name)]
}
-// NewCachesFromPaths creates a new set of file caches from the given
+// NewCaches creates a new set of file caches from the given
// configuration.
-func NewCachesFromPaths(p *paths.Paths) (Caches, error) {
+func NewCaches(p *helpers.PathSpec) (Caches, error) {
dcfg, err := decodeConfig(p)
if err != nil {
return nil, err
}
- genDir := filepath.FromSlash("/_gen")
-
fs := p.Fs.Source
m := make(Caches)
for k, v := range dcfg {
+ var cfs afero.Fs
+
+ if v.isResourceDir {
+ cfs = p.BaseFs.Resources.Fs
+ } else {
+ cfs = fs
+ }
+
var baseDir string
- if !strings.Contains(v.Dir, genDir) {
+ if !strings.HasPrefix(v.Dir, "_gen") {
// We do cache eviction (file removes) and since the user can set
// his/hers own cache directory, we really want to make sure
// we do not delete any files that do not belong to this cache.
} else {
baseDir = filepath.Join(v.Dir, k)
}
- if err = fs.MkdirAll(baseDir, 0777); err != nil {
+ if err = cfs.MkdirAll(baseDir, 0777); err != nil && !os.IsExist(err) {
return nil, err
}
- bfs := afero.NewBasePathFs(fs, baseDir)
+
+ bfs := afero.NewBasePathFs(cfs, baseDir)
+
m[k] = NewCache(bfs, v.MaxAge)
}
"time"
"github.com/gohugoio/hugo/helpers"
- "github.com/gohugoio/hugo/hugolib/paths"
"github.com/bep/mapstructure"
"github.com/pkg/errors"
// The directory where files are stored.
Dir string
+
+ // Will resources/_gen will get its own composite filesystem that
+ // also checks any theme.
+ isResourceDir bool
}
// GetJSONCache gets the file cache for getJSON.
return f[cacheKeyAssets]
}
-func decodeConfig(p *paths.Paths) (cachesConfig, error) {
+func decodeConfig(p *helpers.PathSpec) (cachesConfig, error) {
c := make(cachesConfig)
valid := make(map[string]bool)
// Add defaults
for i, part := range parts {
if strings.HasPrefix(part, ":") {
- resolved, err := resolveDirPlaceholder(p, part)
+ resolved, isResource, err := resolveDirPlaceholder(p, part)
if err != nil {
return c, err
}
+ if isResource {
+ v.isResourceDir = true
+ }
parts[i] = resolved
}
}
}
v.Dir = filepath.Clean(filepath.FromSlash(dir))
- if isOsFs && !filepath.IsAbs(v.Dir) {
- return c, errors.Errorf("%q must resolve to an absolute directory", v.Dir)
- }
+ if !v.isResourceDir {
+ if isOsFs && !filepath.IsAbs(v.Dir) {
+ return c, errors.Errorf("%q must resolve to an absolute directory", v.Dir)
+ }
- // Avoid cache in root, e.g. / (Unix) or c:\ (Windows)
- if len(strings.TrimPrefix(v.Dir, filepath.VolumeName(v.Dir))) == 1 {
- return c, errors.Errorf("%q is a root folder and not allowed as cache dir", v.Dir)
+ // Avoid cache in root, e.g. / (Unix) or c:\ (Windows)
+ if len(strings.TrimPrefix(v.Dir, filepath.VolumeName(v.Dir))) == 1 {
+ return c, errors.Errorf("%q is a root folder and not allowed as cache dir", v.Dir)
+ }
}
if disabled {
}
// Resolves :resourceDir => /myproject/resources etc., :cacheDir => ...
-func resolveDirPlaceholder(p *paths.Paths, placeholder string) (string, error) {
+func resolveDirPlaceholder(p *helpers.PathSpec, placeholder string) (cacheDir string, isResource bool, err error) {
switch strings.ToLower(placeholder) {
case ":resourcedir":
- return p.AbsResourcesDir, nil
+ return "", true, nil
case ":cachedir":
- return helpers.GetCacheDir(p.Fs.Source, p.Cfg)
+ d, err := helpers.GetCacheDir(p.Fs.Source, p.Cfg)
+ return d, false, err
case ":project":
- return filepath.Base(p.WorkingDir), nil
+ return filepath.Base(p.WorkingDir), false, nil
}
- return "", errors.Errorf("%q is not a valid placeholder (valid values are :cacheDir or :resourceDir)", placeholder)
+ return "", false, errors.Errorf("%q is not a valid placeholder (valid values are :cacheDir or :resourceDir)", placeholder)
}
"testing"
"time"
+ "github.com/gohugoio/hugo/helpers"
+
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/hugofs"
- "github.com/gohugoio/hugo/hugolib/paths"
"github.com/spf13/viper"
"github.com/stretchr/testify/require"
configStr := `
resourceDir = "myresources"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archetypeDir = "archetypes"
+
[caches]
[caches.getJSON]
maxAge = "10m"
cfg, err := config.FromConfigString(configStr, "toml")
assert.NoError(err)
fs := hugofs.NewMem(cfg)
- p, err := paths.New(fs, cfg)
+ p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
decoded, err := decodeConfig(p)
configStr := `
resourceDir = "myresources"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archeTypedir = "archetypes"
+
ignoreCache = true
[caches]
[caches.getJSON]
cfg, err := config.FromConfigString(configStr, "toml")
assert.NoError(err)
fs := hugofs.NewMem(cfg)
- p, err := paths.New(fs, cfg)
+ p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
decoded, err := decodeConfig(p)
func TestDecodeConfigDefault(t *testing.T) {
assert := require.New(t)
- cfg := viper.New()
- cfg.Set("workingDir", filepath.FromSlash("/my/cool/hugoproject"))
+ cfg := newTestConfig()
if runtime.GOOS == "windows" {
cfg.Set("resourceDir", "c:\\cache\\resources")
}
fs := hugofs.NewMem(cfg)
- p, err := paths.New(fs, cfg)
+ p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
decoded, err := decodeConfig(p)
assert.Equal(4, len(decoded))
+ imgConfig := decoded[cacheKeyImages]
+ jsonConfig := decoded[cacheKeyGetJSON]
+
if runtime.GOOS == "windows" {
- assert.Equal("c:\\cache\\resources\\_gen", decoded[cacheKeyImages].Dir)
+ assert.Equal("_gen", imgConfig.Dir)
} else {
- assert.Equal("/cache/resources/_gen", decoded[cacheKeyImages].Dir)
- assert.Equal("/cache/thecache/hugoproject", decoded[cacheKeyGetJSON].Dir)
+ assert.Equal("_gen", imgConfig.Dir)
+ assert.Equal("/cache/thecache/hugoproject", jsonConfig.Dir)
}
+
+ assert.True(imgConfig.isResourceDir)
+ assert.False(jsonConfig.isResourceDir)
}
func TestDecodeConfigInvalidDir(t *testing.T) {
configStr := `
resourceDir = "myresources"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archeTypedir = "archetypes"
+
[caches]
[caches.getJSON]
maxAge = "10m"
cfg, err := config.FromConfigString(configStr, "toml")
assert.NoError(err)
fs := hugofs.NewMem(cfg)
- p, err := paths.New(fs, cfg)
+ p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
_, err = decodeConfig(p)
assert.Error(err)
}
+
+func newTestConfig() *viper.Viper {
+ cfg := viper.New()
+ cfg.Set("workingDir", filepath.FromSlash("/my/cool/hugoproject"))
+ cfg.Set("contentDir", "content")
+ cfg.Set("dataDir", "data")
+ cfg.Set("resourceDir", "resources")
+ cfg.Set("i18nDir", "i18n")
+ cfg.Set("layoutDir", "layouts")
+ cfg.Set("archetypeDir", "archetypes")
+ cfg.Set("assetDir", "assets")
+
+ return cfg
+}
"time"
"github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs"
- "github.com/gohugoio/hugo/hugolib/paths"
"github.com/stretchr/testify/require"
)
configStr := `
resourceDir = "myresources"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archeTypedir = "archetypes"
+
[caches]
[caches.getjson]
maxAge = "200ms"
dir = "/cache/c"
-
+[caches.getcsv]
+maxAge = "200ms"
+dir = "/cache/d"
+[caches.assets]
+maxAge = "200ms"
+dir = ":resourceDir/_gen"
+[caches.images]
+maxAge = "200ms"
+dir = ":resourceDir/_gen"
`
cfg, err := config.FromConfigString(configStr, "toml")
assert.NoError(err)
- fs := hugofs.NewMem(cfg)
- p, err := paths.New(fs, cfg)
- assert.NoError(err)
-
- caches, err := NewCachesFromPaths(p)
- assert.NoError(err)
- jsonCache := caches.GetJSONCache()
- for i := 0; i < 10; i++ {
- id := fmt.Sprintf("i%d", i)
- jsonCache.GetOrCreateBytes(id, func() ([]byte, error) {
- return []byte("abc"), nil
- })
- if i == 4 {
- // This will expire the first 5
- time.Sleep(201 * time.Millisecond)
+ for _, name := range []string{cacheKeyGetCSV, cacheKeyGetJSON, cacheKeyAssets, cacheKeyImages} {
+ msg := fmt.Sprintf("cache: %s", name)
+ fs := hugofs.NewMem(cfg)
+ p, err := helpers.NewPathSpec(fs, cfg)
+ assert.NoError(err)
+ caches, err := NewCaches(p)
+ assert.NoError(err)
+ cache := caches[name]
+ for i := 0; i < 10; i++ {
+ id := fmt.Sprintf("i%d", i)
+ cache.GetOrCreateBytes(id, func() ([]byte, error) {
+ return []byte("abc"), nil
+ })
+ if i == 4 {
+ // This will expire the first 5
+ time.Sleep(201 * time.Millisecond)
+ }
}
- }
- count, err := caches.Prune()
- assert.NoError(err)
- assert.Equal(5, count)
-
- for i := 0; i < 10; i++ {
- id := fmt.Sprintf("i%d", i)
- v := jsonCache.getString(id)
- if i < 5 {
- assert.Equal("", v, id)
- } else {
- assert.Equal("abc", v, id)
+ count, err := caches.Prune()
+ assert.NoError(err)
+ assert.Equal(5, count, msg)
+
+ for i := 0; i < 10; i++ {
+ id := fmt.Sprintf("i%d", i)
+ v := cache.getString(id)
+ if i < 5 {
+ assert.Equal("", v, id)
+ } else {
+ assert.Equal("abc", v, id)
+ }
}
- }
- caches, err = NewCachesFromPaths(p)
- assert.NoError(err)
- jsonCache = caches.GetJSONCache()
- // Touch one and then prune.
- jsonCache.GetOrCreateBytes("i5", func() ([]byte, error) {
- return []byte("abc"), nil
- })
+ caches, err = NewCaches(p)
+ assert.NoError(err)
+ cache = caches[name]
+ // Touch one and then prune.
+ cache.GetOrCreateBytes("i5", func() ([]byte, error) {
+ return []byte("abc"), nil
+ })
- count, err = caches.Prune()
- assert.NoError(err)
- assert.Equal(4, count)
-
- // Now only the i5 should be left.
- for i := 0; i < 10; i++ {
- id := fmt.Sprintf("i%d", i)
- v := jsonCache.getString(id)
- if i != 5 {
- assert.Equal("", v, id)
- } else {
- assert.Equal("abc", v, id)
+ count, err = caches.Prune()
+ assert.NoError(err)
+ assert.Equal(4, count)
+
+ // Now only the i5 should be left.
+ for i := 0; i < 10; i++ {
+ id := fmt.Sprintf("i%d", i)
+ v := cache.getString(id)
+ if i != 5 {
+ assert.Equal("", v, id)
+ } else {
+ assert.Equal("abc", v, id)
+ }
}
+
}
}
"time"
"github.com/gohugoio/hugo/common/hugio"
-
"github.com/gohugoio/hugo/config"
+ "github.com/gohugoio/hugo/helpers"
+
"github.com/gohugoio/hugo/hugofs"
- "github.com/gohugoio/hugo/hugolib/paths"
"github.com/spf13/afero"
"github.com/stretchr/testify/require"
workingDir = "/my/work"
resourceDir = "resources"
cacheDir = "CACHEDIR"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archeTypedir = "archetypes"
+
[caches]
[caches.getJSON]
maxAge = "10h"
assert.NoError(err)
fs := hugofs.NewMem(cfg)
- p, err := paths.New(fs, cfg)
+ p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
- caches, err := NewCachesFromPaths(p)
+ caches, err := NewCaches(p)
assert.NoError(err)
c := caches.Get("GetJSON")
bfs, ok = c.Fs.(*afero.BasePathFs)
assert.True(ok)
filename, _ = bfs.RealPath("key")
- assert.Equal(filepath.FromSlash("/my/work/resources/_gen/images/key"), filename)
+ assert.Equal(filepath.FromSlash("_gen/images/key"), filename)
rf := func(s string) func() (io.ReadCloser, error) {
return func() (io.ReadCloser, error) {
configStr := `
resourceDir = "myresources"
+contentDir = "content"
+dataDir = "data"
+i18nDir = "i18n"
+layoutDir = "layouts"
+assetDir = "assets"
+archeTypedir = "archetypes"
+
[caches]
[caches.getjson]
maxAge = "1s"
cfg, err := config.FromConfigString(configStr, "toml")
assert.NoError(err)
fs := hugofs.NewMem(cfg)
- p, err := paths.New(fs, cfg)
+ p, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
- caches, err := NewCachesFromPaths(p)
+ caches, err := NewCaches(p)
assert.NoError(err)
const cacheName = "getjson"
return nil, err
}
- fileCaches, err := filecache.NewCachesFromPaths(ps.Paths)
+ fileCaches, err := filecache.NewCaches(ps)
if err != nil {
return nil, errors.WithMessage(err, "failed to create file caches from configuration")
}
github.com/tdewolff/test v1.0.0/go.mod h1:DiQUlutnqlEvdvhSn2LPGy4TFwRauAaYDsL+683RNX4=
github.com/wellington/go-libsass v0.0.0-20180624165032-615eaa47ef79 h1:ivqgxj/zO3UZuzX7ZnlcyX8cAbNqLl1oes4zPddAO5Q=
github.com/wellington/go-libsass v0.0.0-20180624165032-615eaa47ef79/go.mod h1:mxgxgam0N0E+NAUMHLcu20Ccfc3mVpDkyrLDayqfiTs=
+github.com/wellington/go-libsass v0.9.3-0.20181113175235-c63644206701 h1:9vG9vvVNVupO4Y7uwFkRgIMNe9rdaJMCINDe8vhAhLo=
github.com/wellington/go-libsass v0.9.3-0.20181113175235-c63644206701/go.mod h1:mxgxgam0N0E+NAUMHLcu20Ccfc3mVpDkyrLDayqfiTs=
github.com/yosssi/ace v0.0.5 h1:tUkIP/BLdKqrlrPwcmH0shwEEhTRHoGnc1wFIWmaBUA=
github.com/yosssi/ace v0.0.5/go.mod h1:ALfIzm2vT7t5ZE7uoIZqF3TQ7SAOyupFZnkrF5id+K0=
Layouts *SourceFilesystem
Archetypes *SourceFilesystem
Assets *SourceFilesystem
+ Resources *SourceFilesystem
// This is a unified read-only view of the project's and themes' workdir.
Work *SourceFilesystem
}
b.result.Assets = sfs
+ sfs, err = b.createFs(true, false, "resourceDir", "resources")
+ if err != nil {
+ return nil, err
+ }
+
+ b.result.Resources = sfs
+
sfs, err = b.createFs(false, true, "", "")
if err != nil {
return nil, err
checkFileCount(bfs.Data.Fs, "", assert, 9) // 7 + 2 themes
checkFileCount(bfs.Archetypes.Fs, "", assert, 10) // 8 + 2 themes
checkFileCount(bfs.Assets.Fs, "", assert, 9)
+ checkFileCount(bfs.Resources.Fs, "", assert, 10)
checkFileCount(bfs.Work.Fs, "", assert, 78)
assert.Equal([]string{filepath.FromSlash("/my/work/mydata"), filepath.FromSlash("/my/work/themes/btheme/data"), filepath.FromSlash("/my/work/themes/atheme/data")}, bfs.Data.Dirnames)
assert.Equal(filepath.Join(root, "myassets/scss"), realDirs[0])
assert.Equal(filepath.Join(themesDir, "mytheme/assets/scss"), realDirs[len(realDirs)-1])
+ checkFileCount(bfs.Resources.Fs, "", assert, 3)
+
assert.NotNil(bfs.themeFs)
fi, b, err := bfs.themeFs.(afero.Lstater).LstatIfPossible(filepath.Join("resources", "t1.txt"))
assert.NoError(err)
s, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
- filecaches, err := filecache.NewCachesFromPaths(s.Paths)
+ filecaches, err := filecache.NewCaches(s)
assert.NoError(err)
spec, err := NewSpec(s, filecaches, nil, output.DefaultFormats, media.DefaultTypes)
s, err := helpers.NewPathSpec(fs, cfg)
assert.NoError(err)
- filecaches, err := filecache.NewCachesFromPaths(s.Paths)
+ filecaches, err := filecache.NewCaches(s)
assert.NoError(err)
spec, err := NewSpec(s, filecaches, nil, output.DefaultFormats, media.DefaultTypes)
"testing"
"time"
- "github.com/gohugoio/hugo/hugolib/paths"
+ "github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/cache/filecache"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/deps"
- "github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/langs"
"github.com/spf13/afero"
func newDeps(cfg config.Provider) *deps.Deps {
cfg.Set("resourceDir", "resources")
+ cfg.Set("dataDir", "resources")
+ cfg.Set("i18nDir", "i18n")
+ cfg.Set("assetDir", "assets")
+ cfg.Set("layoutDir", "layouts")
+ cfg.Set("archetypeDir", "archetypes")
+
l := langs.NewLanguage("en", cfg)
l.Set("i18nDir", "i18n")
cs, err := helpers.NewContentSpec(l)
fs := hugofs.NewMem(l)
logger := loggers.NewErrorLogger()
- p, _ := paths.New(fs, cfg)
- fileCaches, err := filecache.NewCachesFromPaths(p)
+ p, err := helpers.NewPathSpec(fs, cfg)
+ if err != nil {
+ panic(err)
+ }
+
+ fileCaches, err := filecache.NewCaches(p)
if err != nil {
panic(err)
}