hugolib: Fix regressions with uglyURLs
authorBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>
Sat, 26 Nov 2016 14:50:32 +0000 (15:50 +0100)
committerBjørn Erik Pedersen <bjorn.erik.pedersen@gmail.com>
Sun, 27 Nov 2016 13:36:17 +0000 (14:36 +0100)
Fixes #2734

helpers/url.go
helpers/url_test.go
hugolib/embedded_shortcodes_test.go
hugolib/menu_test.go
hugolib/node_as_page_test.go
hugolib/page.go
hugolib/site.go
hugolib/site_render.go
target/page.go

index c7697b09251cdcf084ef6c8a4c2abf6183d25103..68079ce203f1e6719727b1fd19121989862bb7df 100644 (file)
@@ -292,12 +292,12 @@ func AddContextRoot(baseURL, relativePath string) string {
 // URLizeAndPrep applies misc sanitation to the given URL to get it in line
 // with the Hugo standard.
 func (p *PathSpec) URLizeAndPrep(in string) string {
-       return URLPrep(p.uglyURLs, p.URLize(in))
+       return p.URLPrep(p.URLize(in))
 }
 
 // URLPrep applies misc sanitation to the given URL.
-func URLPrep(ugly bool, in string) string {
-       if ugly {
+func (p *PathSpec) URLPrep(in string) string {
+       if p.uglyURLs {
                x := Uglify(SanitizeURL(in))
                return x
        }
index 4072b40e27c574b699f42f93d9a40339ea123fda..8dbec3f7c306351f3d3efb4cfcbe9dc84768396e 100644 (file)
@@ -248,7 +248,10 @@ func TestURLPrep(t *testing.T) {
                {true, "/section/name/index.html", "/section/name.html"},
        }
        for i, d := range data {
-               output := URLPrep(d.ugly, d.input)
+               viper.Set("uglyURLs", d.ugly)
+               p := NewPathSpecFromConfig(viper.GetViper())
+
+               output := p.URLPrep(d.input)
                if d.output != output {
                        t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output)
                }
index 0576ca02c7480c91d0d770aa8a7ce308c2ca2c8b..5ca2be5c6350e5f745ff0e34f6bc00d7cf4b33b1 100644 (file)
@@ -372,7 +372,7 @@ func TestShortcodeInstagram(t *testing.T) {
                }
 
                if this.expected != output {
-                       t.Errorf("[%d] unexpected rendering, got %d expected: %d", i, output, this.expected)
+                       t.Errorf("[%d] unexpected rendering, got %s expected: %s", i, output, this.expected)
                }
        }
 }
index 2fd2e43ab0f9198095dd30f1dad5a330ceeb82e8..af698799d9fd18d55af802063e2a5c870d999c50 100644 (file)
@@ -555,7 +555,7 @@ func TestHomeNodeMenu(t *testing.T) {
        testCommonResetState()
 
        viper.Set("canonifyURLs", true)
-       viper.Set("uglyURLs", true)
+       viper.Set("uglyURLs", false)
 
        s := setupMenuTests(t, menuPageSources)
 
index a152ba303f653e134ac49dd0a3603d12ac37d9a4..e40b2874a7980062cfac27a5be0d8829042bf70b 100644 (file)
@@ -16,6 +16,7 @@ package hugolib
 import (
        "fmt"
        "path/filepath"
+       "strings"
        "testing"
        "time"
 
@@ -32,6 +33,12 @@ import (
 */
 
 func TestNodesAsPage(t *testing.T) {
+       for _, ugly := range []bool{false, true} {
+               doTestNodeAsPage(t, ugly)
+       }
+}
+
+func doTestNodeAsPage(t *testing.T, ugly bool) {
        //jww.SetStdoutThreshold(jww.LevelDebug)
        jww.SetStdoutThreshold(jww.LevelFatal)
 
@@ -47,6 +54,8 @@ func TestNodesAsPage(t *testing.T) {
 
        testCommonResetState()
 
+       viper.Set("uglyURLs", ugly)
+
        writeLayoutsForNodeAsPageTests(t)
        writeNodePagesForNodeAsPageTests("", t)
 
@@ -73,7 +82,7 @@ func TestNodesAsPage(t *testing.T) {
                "GetPage: Section1 ",
        )
 
-       assertFileContent(t, filepath.Join("public", "sect1", "regular1", "index.html"), false, "Single Title: Page 01", "Content Page 01")
+       assertFileContent(t, expectedFilePath(ugly, "public", "sect1", "regular1"), false, "Single Title: Page 01", "Content Page 01")
 
        h := s.owner
        nodes := h.findAllPagesByKindNotIn(KindPage)
@@ -99,24 +108,24 @@ func TestNodesAsPage(t *testing.T) {
        require.True(t, first.IsPage())
 
        // Check Home paginator
-       assertFileContent(t, filepath.Join("public", "page", "2", "index.html"), false,
+       assertFileContent(t, expectedFilePath(ugly, "public", "page", "2"), false,
                "Pag: Page 02")
 
        // Check Sections
-       assertFileContent(t, filepath.Join("public", "sect1", "index.html"), false,
+       assertFileContent(t, expectedFilePath(ugly, "public", "sect1"), false,
                "Section Title: Section", "Section1 <strong>Content!</strong>",
                "Date: 2009-01-04",
                "Lastmod: 2009-01-05",
        )
 
-       assertFileContent(t, filepath.Join("public", "sect2", "index.html"), false,
+       assertFileContent(t, expectedFilePath(ugly, "public", "sect2"), false,
                "Section Title: Section", "Section2 <strong>Content!</strong>",
                "Date: 2009-01-06",
                "Lastmod: 2009-01-07",
        )
 
        // Check Sections paginator
-       assertFileContent(t, filepath.Join("public", "sect1", "page", "2", "index.html"), false,
+       assertFileContent(t, expectedFilePath(ugly, "public", "sect1", "page", "2"), false,
                "Pag: Page 02")
 
        sections := h.findAllPagesByKind(KindSection)
@@ -124,13 +133,13 @@ func TestNodesAsPage(t *testing.T) {
        require.Len(t, sections, 2)
 
        // Check taxonomy lists
-       assertFileContent(t, filepath.Join("public", "categories", "hugo", "index.html"), false,
+       assertFileContent(t, expectedFilePath(ugly, "public", "categories", "hugo"), false,
                "Taxonomy Title: Taxonomy Hugo", "Taxonomy Hugo <strong>Content!</strong>",
                "Date: 2009-01-08",
                "Lastmod: 2009-01-09",
        )
 
-       assertFileContent(t, filepath.Join("public", "categories", "web", "index.html"), false,
+       assertFileContent(t, expectedFilePath(ugly, "public", "categories", "web"), false,
                "Taxonomy Title: Taxonomy Web",
                "Taxonomy Web <strong>Content!</strong>",
                "Date: 2009-01-10",
@@ -138,12 +147,12 @@ func TestNodesAsPage(t *testing.T) {
        )
 
        // Check taxonomy list paginator
-       assertFileContent(t, filepath.Join("public", "categories", "hugo", "page", "2", "index.html"), false,
+       assertFileContent(t, expectedFilePath(ugly, "public", "categories", "hugo", "page", "2"), false,
                "Taxonomy Title: Taxonomy Hugo",
                "Pag: Page 02")
 
        // Check taxonomy terms
-       assertFileContent(t, filepath.Join("public", "categories", "index.html"), false,
+       assertFileContent(t, expectedFilePath(ugly, "public", "categories"), false,
                "Taxonomy Terms Title: Taxonomy Term Categories", "Taxonomy Term Categories <strong>Content!</strong>", "k/v: hugo",
                "Date: 2009-01-12",
                "Lastmod: 2009-01-13",
@@ -161,6 +170,12 @@ func TestNodesAsPage(t *testing.T) {
 }
 
 func TestNodesWithNoContentFile(t *testing.T) {
+       for _, ugly := range []bool{false, true} {
+               doTestNodesWithNoContentFile(t, ugly)
+       }
+}
+
+func doTestNodesWithNoContentFile(t *testing.T, ugly bool) {
        //jww.SetStdoutThreshold(jww.LevelDebug)
        jww.SetStdoutThreshold(jww.LevelFatal)
 
@@ -169,6 +184,7 @@ func TestNodesWithNoContentFile(t *testing.T) {
        writeLayoutsForNodeAsPageTests(t)
        writeRegularPagesForNodeAsPageTests(t)
 
+       viper.Set("uglyURLs", ugly)
        viper.Set("paginate", 1)
        viper.Set("title", "Hugo Rocks!")
        viper.Set("rssURI", "customrss.xml")
@@ -195,25 +211,25 @@ func TestNodesWithNoContentFile(t *testing.T) {
        )
 
        // Taxonomy list
-       assertFileContent(t, filepath.Join("public", "categories", "hugo", "index.html"), false,
+       assertFileContent(t, expectedFilePath(ugly, "public", "categories", "hugo"), false,
                "Taxonomy Title: Hugo",
                "Date: 2010-06-12",
                "Lastmod: 2010-06-13",
        )
 
        // Taxonomy terms
-       assertFileContent(t, filepath.Join("public", "categories", "index.html"), false,
+       assertFileContent(t, expectedFilePath(ugly, "public", "categories"), false,
                "Taxonomy Terms Title: Categories",
        )
 
        // Sections
-       assertFileContent(t, filepath.Join("public", "sect1", "index.html"), false,
+       assertFileContent(t, expectedFilePath(ugly, "public", "sect1"), false,
                "Section Title: Sect1s",
                "Date: 2010-06-12",
                "Lastmod: 2010-06-13",
        )
 
-       assertFileContent(t, filepath.Join("public", "sect2", "index.html"), false,
+       assertFileContent(t, expectedFilePath(ugly, "public", "sect2"), false,
                "Section Title: Sect2s",
                "Date: 2008-07-06",
                "Lastmod: 2008-07-09",
@@ -229,9 +245,17 @@ func TestNodesWithNoContentFile(t *testing.T) {
 }
 
 func TestNodesAsPageMultilingual(t *testing.T) {
+       for _, ugly := range []bool{true, false} {
+               doTestNodesAsPageMultilingual(t, ugly)
+       }
+}
+
+func doTestNodesAsPageMultilingual(t *testing.T, ugly bool) {
 
        testCommonResetState()
 
+       viper.Set("uglyURLs", ugly)
+
        writeLayoutsForNodeAsPageTests(t)
 
        writeSource(t, "config.toml",
@@ -302,6 +326,7 @@ title = "Deutsche Hugo"
        require.Len(t, deHome.Translations(), 2, deHome.Translations()[0].Language().Lang)
        require.Equal(t, "en", deHome.Translations()[1].Language().Lang)
        require.Equal(t, "nn", deHome.Translations()[0].Language().Lang)
+       require.Equal(t, expetedPermalink(ugly, "/de/"), deHome.Permalink())
 
        enSect := sites.Sites[1].getPage("section", "sect1")
        require.NotNil(t, enSect)
@@ -310,6 +335,8 @@ title = "Deutsche Hugo"
        require.Equal(t, "de", enSect.Translations()[1].Language().Lang)
        require.Equal(t, "nn", enSect.Translations()[0].Language().Lang)
 
+       require.Equal(t, expetedPermalink(ugly, "/en/sect1/"), enSect.Permalink())
+
        assertFileContent(t, filepath.Join("public", "nn", "index.html"), true,
                "Index Title: Hugo på norsk")
        assertFileContent(t, filepath.Join("public", "en", "index.html"), true,
@@ -318,27 +345,33 @@ title = "Deutsche Hugo"
                "Index Title: Home Sweet Home!", "<strong>Content!</strong>")
 
        // Taxonomy list
-       assertFileContent(t, filepath.Join("public", "nn", "categories", "hugo", "index.html"), true,
+       assertFileContent(t, expectedFilePath(ugly, "public", "nn", "categories", "hugo"), true,
                "Taxonomy Title: Hugo")
-       assertFileContent(t, filepath.Join("public", "en", "categories", "hugo", "index.html"), true,
+       assertFileContent(t, expectedFilePath(ugly, "public", "en", "categories", "hugo"), true,
                "Taxonomy Title: Taxonomy Hugo")
 
        // Taxonomy terms
-       assertFileContent(t, filepath.Join("public", "nn", "categories", "index.html"), true,
+       assertFileContent(t, expectedFilePath(ugly, "public", "nn", "categories"), true,
                "Taxonomy Terms Title: Categories")
-       assertFileContent(t, filepath.Join("public", "en", "categories", "index.html"), true,
+       assertFileContent(t, expectedFilePath(ugly, "public", "en", "categories"), true,
                "Taxonomy Terms Title: Taxonomy Term Categories")
 
        // Sections
-       assertFileContent(t, filepath.Join("public", "nn", "sect1", "index.html"), true,
+       assertFileContent(t, expectedFilePath(ugly, "public", "nn", "sect1"), true,
                "Section Title: Sect1s")
-       assertFileContent(t, filepath.Join("public", "nn", "sect2", "index.html"), true,
+       assertFileContent(t, expectedFilePath(ugly, "public", "nn", "sect2"), true,
                "Section Title: Sect2s")
-       assertFileContent(t, filepath.Join("public", "en", "sect1", "index.html"), true,
+       assertFileContent(t, expectedFilePath(ugly, "public", "en", "sect1"), true,
                "Section Title: Section1")
-       assertFileContent(t, filepath.Join("public", "en", "sect2", "index.html"), true,
+       assertFileContent(t, expectedFilePath(ugly, "public", "en", "sect2"), true,
                "Section Title: Section2")
 
+       // Regular pages
+       assertFileContent(t, expectedFilePath(ugly, "public", "en", "sect1", "regular1"), true,
+               "Single Title: Page 01")
+       assertFileContent(t, expectedFilePath(ugly, "public", "nn", "sect1", "regular2"), true,
+               "Single Title: Page 02")
+
        // RSS
        assertFileContent(t, filepath.Join("public", "nn", "customrss.xml"), true, "Hugo på norsk", "<rss")
        assertFileContent(t, filepath.Join("public", "nn", "sect1", "customrss.xml"), true, "Recent content in Sect1s on Hugo på norsk", "<rss")
@@ -660,3 +693,17 @@ Date: {{ .Date.Format "2006-01-02" }}
 Lastmod: {{ .Lastmod.Format "2006-01-02" }}
 `)
 }
+
+func expectedFilePath(ugly bool, path ...string) string {
+       if ugly {
+               return filepath.Join(append(path[0:len(path)-1], path[len(path)-1]+".html")...)
+       }
+       return filepath.Join(append(path, "index.html")...)
+}
+
+func expetedPermalink(ugly bool, path string) string {
+       if ugly {
+               return strings.TrimSuffix(path, "/") + ".html"
+       }
+       return path
+}
index 883d70c43628abf8a55f299735cf40e8fd9a021c..43ec4a360da40ff77bf8c50348fdb6c57273f7d4 100644 (file)
@@ -733,6 +733,7 @@ func (p *Page) permalink() (*url.URL, error) {
                // No permalink config for nodes (currently)
                pURL := strings.TrimSpace(p.Site.pathSpec.URLize(p.URLPath.URL))
                pURL = p.addLangPathPrefix(pURL)
+               pURL = p.Site.pathSpec.URLPrep(path.Join(pURL, "index."+p.Extension()))
                url := helpers.MakePermalink(baseURL, pURL)
                return url, nil
        }
@@ -755,10 +756,10 @@ func (p *Page) permalink() (*url.URL, error) {
                }
        } else {
                if len(pSlug) > 0 {
-                       permalink = helpers.URLPrep(viper.GetBool("uglyURLs"), path.Join(dir, p.Slug+"."+p.Extension()))
+                       permalink = p.Site.pathSpec.URLPrep(path.Join(dir, p.Slug+"."+p.Extension()))
                } else {
                        t := p.Source.TranslationBaseName()
-                       permalink = helpers.URLPrep(viper.GetBool("uglyURLs"), path.Join(dir, (strings.TrimSpace(t)+"."+p.Extension())))
+                       permalink = p.Site.pathSpec.URLPrep(path.Join(dir, (strings.TrimSpace(t) + "." + p.Extension())))
                }
        }
 
index 01539bccc445844717df0997bef84d87831c6af2..87ab050b0e021a90486d237cd80152025546d0db 100644 (file)
@@ -1865,16 +1865,22 @@ func (s *Site) languageAliasTarget() target.AliasPublisher {
 
 func (s *Site) initTargetList() {
        s.targetListInit.Do(func() {
+               langDir := ""
+               if s.Language.Lang != s.Info.multilingual.DefaultLang.Lang || s.Info.defaultContentLanguageInSubdir {
+                       langDir = s.Language.Lang
+               }
                if s.targets.page == nil {
                        s.targets.page = &target.PagePub{
                                PublishDir: s.absPublishDir(),
                                UglyURLs:   viper.GetBool("uglyURLs"),
+                               LangDir:    langDir,
                        }
                }
                if s.targets.pageUgly == nil {
                        s.targets.pageUgly = &target.PagePub{
                                PublishDir: s.absPublishDir(),
                                UglyURLs:   true,
+                               LangDir:    langDir,
                        }
                }
                if s.targets.file == nil {
index 82523e88f5eb5895fdfa9fc18117474c47486611..94fccd9504e9f054d790f58fd0ccf43bea8676d1 100644 (file)
@@ -118,7 +118,7 @@ func (s *Site) renderPaginator(p *Page) error {
                        }
 
                        pageNumber := i + 1
-                       htmlBase := path.Join(p.URLPath.URL, fmt.Sprintf("/%s/%d", paginatePath, pageNumber))
+                       htmlBase := path.Join(append(p.sections, fmt.Sprintf("/%s/%d", paginatePath, pageNumber))...)
                        htmlBase = p.addLangPathPrefix(htmlBase)
 
                        if err := s.renderAndWritePage(pagerNode.Title,
@@ -156,7 +156,8 @@ func (s *Site) renderRSS(p *Page) error {
                rssPage.Data["Pages"] = rssPage.Pages
        }
        rssURI := s.Language.GetString("rssURI")
-       rssPath := path.Join(rssPage.URLPath.URL, rssURI)
+
+       rssPath := path.Join(append(rssPage.sections, rssURI)...)
        s.setPageURLs(rssPage, rssPath)
 
        return s.renderAndWriteXML(rssPage.Title,
index eb7bb59435d398a65c39a116538856139a790f0e..ab38ded5871d7e962f58adeed6266dabde9f7267 100644 (file)
@@ -31,6 +31,10 @@ type PagePub struct {
        UglyURLs         bool
        DefaultExtension string
        PublishDir       string
+
+       // LangDir will contain the subdir for the language, i.e. "en", "de" etc.
+       // It will be empty if the site is rendered in root.
+       LangDir string
 }
 
 func (pp *PagePub) Publish(path string, r io.Reader) (err error) {
@@ -64,6 +68,14 @@ func (pp *PagePub) TranslateRelative(src string) (dest string, err error) {
        ext := pp.extension(filepath.Ext(file))
        name := filename(file)
 
+       // TODO(bep) Having all of this path logic here seems wrong, but I guess
+       // we'll clean this up when we redo the output files.
+       // This catches the home page in a language sub path. They should never
+       // have any ugly URLs.
+       if pp.LangDir != "" && dir == helpers.FilePathSeparator && name == pp.LangDir {
+               return filepath.Join(dir, name, "index"+ext), nil
+       }
+
        if pp.UglyURLs || file == "index.html" || (isRoot && file == "404.html") {
                return filepath.Join(dir, name+ext), nil
        }