Social SiteSocial
Sections Taxonomy
Pages *Pages
- Files []*source.File
+ Files *[]*source.File
Menus *Menus
Hugo *HugoInfo
Title string
return s.refLink(ref, page, true)
}
+func (s *SiteInfo) GitHub(ref string, page *Page) (string, error) {
+ return s.githubLink(ref, page, true)
+}
+
+func (s *SiteInfo) githubLink(ref string, currentPage *Page, relative bool) (string, error) {
+ var refURL *url.URL
+ var err error
+
+ // TODO can I make this a param to `hugo --use-github-links=/docs`?
+ // SVEN: add more tests - the prefix might be a real dir inside tho - add some pages that have it as a legitimate path
+ repositoryPathPrefix := "/docs"
+
+ refURL, err = url.Parse(strings.TrimPrefix(ref, repositoryPathPrefix))
+ if err != nil {
+ return "", err
+ }
+
+ if refURL.Scheme != "" {
+ // TODO: consider looking for http(s?)://github.com/user/project/prefix and replacing it - tho this may be intentional, so idk
+ //return "", fmt.Errorf("Not a plain filepath link (%s)", ref)
+ // Treat this as not an error, as the link is used as-is
+ return ref, nil
+ }
+
+ var target *Page
+ var link string
+
+ if refURL.Path != "" {
+ refPath := filepath.Clean(filepath.FromSlash(refURL.Path))
+
+ if strings.IndexRune(refPath, os.PathSeparator) == 0 { // filepath.IsAbs fails to me.
+ refPath = refPath[1:]
+ } else {
+ if currentPage != nil {
+ refPath = filepath.Join(currentPage.Source.Dir(), refURL.Path)
+ }
+ }
+
+ for _, page := range []*Page(*s.Pages) {
+ if page.Source.Path() == refPath {
+ target = page
+ break
+ }
+ }
+ // need to exhaust the test, then try with the others :/
+ // if the refPath doesn't end in a filename with extension `.md`, then try with `.md` , and then `/index.md`
+ mdPath := strings.TrimSuffix(refPath, string(os.PathSeparator)) + ".md"
+ for _, page := range []*Page(*s.Pages) {
+ if page.Source.Path() == mdPath {
+ target = page
+ break
+ }
+ }
+ indexPath := filepath.Join(refPath, "index.md")
+ for _, page := range []*Page(*s.Pages) {
+ if page.Source.Path() == indexPath {
+ target = page
+ break
+ }
+ }
+
+ if target == nil {
+ return "", fmt.Errorf("No page found for \"%s\" on page \"%s\".\n", ref, currentPage.Source.Path())
+ }
+
+ // SVEN: look at filepath.Rel() it might help, got the rel/non-rel url's (dangerous tho)
+ if relative {
+ link, err = target.RelPermalink()
+ } else {
+ link, err = target.Permalink()
+ }
+
+ if err != nil {
+ return "", err
+ }
+ }
+
+ // SVEN: add tests for github style relative fragments
+ if refURL.Fragment != "" {
+ link = link + "#" + refURL.Fragment
+
+ if refURL.Path != "" && target != nil && !target.getRenderingConfig().PlainIDAnchors {
+ link = link + ":" + target.UniqueID()
+ } else if currentPage != nil && !currentPage.getRenderingConfig().PlainIDAnchors {
+ link = link + ":" + currentPage.UniqueID()
+ }
+ }
+
+ return link, nil
+}
+
+func (s *SiteInfo) GitHubFileLink(ref string, page *Page) (string, error) {
+ return s.githubFileLink(ref, page, false)
+}
+
+// for non-pages in the site tree
+func (s *SiteInfo) githubFileLink(ref string, currentPage *Page, relative bool) (string, error) {
+ var refURL *url.URL
+ var err error
+
+ // TODO can I make this a param to `hugo --use-github-links=/docs`?
+ // SVEN: add more tests - the prefix might be a real dir inside tho - add some pages that have it as a legitimate path
+ repositoryPathPrefix := "/docs"
+
+ refURL, err = url.Parse(strings.TrimPrefix(ref, repositoryPathPrefix))
+ if err != nil {
+ return "", err
+ }
+
+ if refURL.Scheme != "" {
+ // TODO: consider looking for http(s?)://github.com/user/project/prefix and replacing it - tho this may be intentional, so idk
+ //return "", fmt.Errorf("Not a plain filepath link (%s)", ref)
+ // Treat this as not an error, as the link is used as-is
+ return ref, nil
+ }
+
+ var target *source.File
+ var link string
+
+ if refURL.Path != "" {
+ refPath := filepath.Clean(filepath.FromSlash(refURL.Path))
+
+ if strings.IndexRune(refPath, os.PathSeparator) == 0 { // filepath.IsAbs fails to me.
+ refPath = refPath[1:]
+ } else {
+ if currentPage != nil {
+ refPath = filepath.Join(currentPage.Source.Dir(), refURL.Path)
+ }
+ }
+
+ for _, file := range []*source.File(*s.Files) {
+ if file.Path() == refPath {
+ target = file
+ break
+ }
+ }
+
+ if target == nil {
+ return "", fmt.Errorf("No file found for \"%s\" on page \"%s\".\n", ref, currentPage.Source.Path())
+ }
+
+ link = target.Path()
+ // SVEN: look at filepath.Rel() it might help, got the rel/non-rel url's (dangerous tho)
+ // SVEN: reconsider the fact I hardcoded the `relative` bool in both github resolvers
+ if relative {
+ return "./" + filepath.ToSlash(link), nil
+ } else {
+ return "/" + filepath.ToSlash(link), nil
+ }
+
+ if err != nil {
+ return "", err
+ }
+
+ return link, nil
+ }
+
+ return "", fmt.Errorf("failed to find a file to match \"%s\" on page \"%s\"", ref, currentPage.Source.Path())
+}
+
func (s *SiteInfo) addToPaginationPageCount(cnt uint64) {
atomic.AddUint64(&s.paginationPageCount, cnt)
}
canonifyURLs: viper.GetBool("CanonifyURLs"),
preserveTaxonomyNames: viper.GetBool("PreserveTaxonomyNames"),
Pages: &s.Pages,
+ Files: &s.Files,
Menus: &s.Menus,
Params: params,
Permalinks: permalinks,
jww.FEEDBACK.Println(s.draftStats())
jww.FEEDBACK.Println(s.futureStats())
jww.FEEDBACK.Printf("%d pages created\n", len(s.Pages))
+ jww.FEEDBACK.Printf("%d non-page files copied\n", len(s.Files))
jww.FEEDBACK.Printf("%d paginator pages created\n", s.Info.paginationPageCount)
taxonomies := viper.GetStringMapString("Taxonomies")
package hugolib
import (
- "bitbucket.org/pkg/inflect"
"bytes"
"fmt"
"html/template"
"strings"
"testing"
+ "bitbucket.org/pkg/inflect"
+
"github.com/spf13/afero"
"github.com/spf13/hugo/helpers"
"github.com/spf13/hugo/hugofs"
return nil
}
-func TestRefLinking(t *testing.T) {
- viper.Reset()
- defer viper.Reset()
-
+func setupLinkingMockSite(t *testing.T) *Site {
hugofs.DestinationFS = new(afero.MemMapFs)
sources := []source.ByteSource{
{filepath.FromSlash("index.md"), []byte("")},
{filepath.FromSlash("rootfile.md"), []byte("")},
+ {filepath.FromSlash("root-image.png"), []byte("")},
{filepath.FromSlash("level2/2-root.md"), []byte("")},
{filepath.FromSlash("level2/index.md"), []byte("")},
{filepath.FromSlash("level2/common.md"), []byte("")},
- {filepath.FromSlash("level2b/2b-root.md"), []byte("")},
- {filepath.FromSlash("level2b/index.md"), []byte("")},
- {filepath.FromSlash("level2b/common.md"), []byte("")},
+
+// {filepath.FromSlash("level2b/2b-root.md"), []byte("")},
+// {filepath.FromSlash("level2b/index.md"), []byte("")},
+// {filepath.FromSlash("level2b/common.md"), []byte("")},
+
+ {filepath.FromSlash("level2/2-image.png"), []byte("")},
+ {filepath.FromSlash("level2/common.png"), []byte("")},
{filepath.FromSlash("level2/level3/3-root.md"), []byte("")},
{filepath.FromSlash("level2/level3/index.md"), []byte("")},
{filepath.FromSlash("level2/level3/common.md"), []byte("")},
+ {filepath.FromSlash("level2/level3/3-image.png"), []byte("")},
+ {filepath.FromSlash("level2/level3/common.png"), []byte("")},
}
site := &Site{
viper.Set("PluralizeListTitles", false)
viper.Set("CanonifyURLs", false)
- // END init mock site
+ return site
+}
+
+func TestRefLinking(t *testing.T) {
+ viper.Reset()
+ defer viper.Reset()
+ site := setupLinkingMockSite(t)
currentPage := findPage(site, "level2/level3/index.md")
if currentPage == nil {
}
// TODO: and then the failure cases.
}
+
+func TestSourceRelativeLinksing(t *testing.T) {
+ viper.Reset()
+ defer viper.Reset()
+ site := setupLinkingMockSite(t)
+
+ type resultMap map[string]string
+
+ okresults := map[string]resultMap{
+ "index.md": map[string]string{
+ "/docs/rootfile.md": "/rootfile/",
+ "/docs/index.md": "/",
+ "rootfile.md": "/rootfile/",
+ "index.md": "/",
+ "level2/2-root.md": "/level2/2-root/",
+ "level2/index.md": "/level2/",
+ "/docs/level2/2-root.md": "/level2/2-root/",
+ "/docs/level2/index.md": "/level2/",
+ "level2/level3/3-root.md": "/level2/level3/3-root/",
+ "level2/level3/index.md": "/level2/level3/",
+ "/docs/level2/level3/3-root.md": "/level2/level3/3-root/",
+ "/docs/level2/level3/index.md": "/level2/level3/",
+ "/docs/level2/2-root/": "/level2/2-root/",
+ "/docs/level2/": "/level2/",
+ "/docs/level2/2-root": "/level2/2-root/",
+ "/docs/level2": "/level2/",
+ "/level2/2-root/": "/level2/2-root/",
+ "/level2/": "/level2/",
+ "/level2/2-root": "/level2/2-root/",
+ "/level2": "/level2/",
+ }, "rootfile.md": map[string]string{
+ "/docs/rootfile.md": "/rootfile/",
+ "/docs/index.md": "/",
+ "rootfile.md": "/rootfile/",
+ "index.md": "/",
+ "level2/2-root.md": "/level2/2-root/",
+ "level2/index.md": "/level2/",
+ "/docs/level2/2-root.md": "/level2/2-root/",
+ "/docs/level2/index.md": "/level2/",
+ "level2/level3/3-root.md": "/level2/level3/3-root/",
+ "level2/level3/index.md": "/level2/level3/",
+ "/docs/level2/level3/3-root.md": "/level2/level3/3-root/",
+ "/docs/level2/level3/index.md": "/level2/level3/",
+ }, "level2/2-root.md": map[string]string{
+ "../rootfile.md": "/rootfile/",
+ "../index.md": "/",
+ "/docs/rootfile.md": "/rootfile/",
+ "/docs/index.md": "/",
+ "2-root.md": "/level2/2-root/",
+ "index.md": "/level2/",
+ "../level2/2-root.md": "/level2/2-root/",
+ "../level2/index.md": "/level2/",
+ "./2-root.md": "/level2/2-root/",
+ "./index.md": "/level2/",
+ "/docs/level2/index.md": "/level2/",
+ "/docs/level2/2-root.md": "/level2/2-root/",
+ "level3/3-root.md": "/level2/level3/3-root/",
+ "level3/index.md": "/level2/level3/",
+ "../level2/level3/index.md": "/level2/level3/",
+ "../level2/level3/3-root.md": "/level2/level3/3-root/",
+ "/docs/level2/level3/index.md": "/level2/level3/",
+ "/docs/level2/level3/3-root.md": "/level2/level3/3-root/",
+ }, "level2/index.md": map[string]string{
+ "../rootfile.md": "/rootfile/",
+ "../index.md": "/",
+ "/docs/rootfile.md": "/rootfile/",
+ "/docs/index.md": "/",
+ "2-root.md": "/level2/2-root/",
+ "index.md": "/level2/",
+ "../level2/2-root.md": "/level2/2-root/",
+ "../level2/index.md": "/level2/",
+ "./2-root.md": "/level2/2-root/",
+ "./index.md": "/level2/",
+ "/docs/level2/index.md": "/level2/",
+ "/docs/level2/2-root.md": "/level2/2-root/",
+ "level3/3-root.md": "/level2/level3/3-root/",
+ "level3/index.md": "/level2/level3/",
+ "../level2/level3/index.md": "/level2/level3/",
+ "../level2/level3/3-root.md": "/level2/level3/3-root/",
+ "/docs/level2/level3/index.md": "/level2/level3/",
+ "/docs/level2/level3/3-root.md": "/level2/level3/3-root/",
+ }, "level2/level3/3-root.md": map[string]string{
+ "../../rootfile.md": "/rootfile/",
+ "../../index.md": "/",
+ "/docs/rootfile.md": "/rootfile/",
+ "/docs/index.md": "/",
+ "../2-root.md": "/level2/2-root/",
+ "../index.md": "/level2/",
+ "/docs/level2/2-root.md": "/level2/2-root/",
+ "/docs/level2/index.md": "/level2/",
+ "3-root.md": "/level2/level3/3-root/",
+ "index.md": "/level2/level3/",
+ "./3-root.md": "/level2/level3/3-root/",
+ "./index.md": "/level2/level3/",
+ // "../level2/level3/3-root.md": "/level2/level3/3-root/",
+ // "../level2/level3/index.md": "/level2/level3/",
+ "/docs/level2/level3/3-root.md": "/level2/level3/3-root/",
+ "/docs/level2/level3/index.md": "/level2/level3/",
+ }, "level2/level3/index.md": map[string]string{
+ "../../rootfile.md": "/rootfile/",
+ "../../index.md": "/",
+ "/docs/rootfile.md": "/rootfile/",
+ "/docs/index.md": "/",
+ "../2-root.md": "/level2/2-root/",
+ "../index.md": "/level2/",
+ "/docs/level2/2-root.md": "/level2/2-root/",
+ "/docs/level2/index.md": "/level2/",
+ "3-root.md": "/level2/level3/3-root/",
+ "index.md": "/level2/level3/",
+ "./3-root.md": "/level2/level3/3-root/",
+ "./index.md": "/level2/level3/",
+ // "../level2/level3/3-root.md": "/level2/level3/3-root/",
+ // "../level2/level3/index.md": "/level2/level3/",
+ "/docs/level2/level3/3-root.md": "/level2/level3/3-root/",
+ "/docs/level2/level3/index.md": "/level2/level3/",
+ },
+ }
+
+ for currentFile, results := range okresults {
+ currentPage := findPage(site, currentFile)
+ if currentPage == nil {
+ t.Fatalf("failed to find current page in site")
+ }
+ for link, url := range results {
+ if out, err := site.Info.githubLink(link, currentPage, true); err != nil || out != url {
+ t.Errorf("Expected %s to resolve to (%s), got (%s) - error: %s", link, url, out, err)
+ } else {
+ //t.Logf("tested ok %s maps to %s", link, out)
+ }
+ }
+ }
+ // TODO: and then the failure cases.
+ // "https://docker.com": "",
+ // site_test.go:1094: Expected https://docker.com to resolve to (), got () - error: Not a plain filepath link (https://docker.com)
+
+}
+
+func TestGitHubFileLinking(t *testing.T) {
+ viper.Reset()
+ defer viper.Reset()
+ site := setupLinkingMockSite(t)
+
+ type resultMap map[string]string
+
+ okresults := map[string]resultMap{
+ "index.md": map[string]string{
+ "/root-image.png": "/root-image.png",
+ "root-image.png": "/root-image.png",
+ }, "rootfile.md": map[string]string{
+ "/root-image.png": "/root-image.png",
+ }, "level2/2-root.md": map[string]string{
+ "/root-image.png": "/root-image.png",
+ "common.png": "/level2/common.png",
+ }, "level2/index.md": map[string]string{
+ "/root-image.png": "/root-image.png",
+ "common.png": "/level2/common.png",
+ "./common.png": "/level2/common.png",
+ }, "level2/level3/3-root.md": map[string]string{
+ "/root-image.png": "/root-image.png",
+ "common.png": "/level2/level3/common.png",
+ "../common.png": "/level2/common.png",
+ }, "level2/level3/index.md": map[string]string{
+ "/root-image.png": "/root-image.png",
+ "common.png": "/level2/level3/common.png",
+ "../common.png": "/level2/common.png",
+ },
+ }
+
+ for currentFile, results := range okresults {
+ currentPage := findPage(site, currentFile)
+ if currentPage == nil {
+ t.Fatalf("failed to find current page in site")
+ }
+ for link, url := range results {
+ if out, err := site.Info.githubFileLink(link, currentPage, false); err != nil || out != url {
+ t.Errorf("Expected %s to resolve to (%s), got (%s) - error: %s", link, url, out, err)
+ } else {
+ //t.Logf("tested ok %s maps to %s", link, out)
+ }
+ }
+ }
+ // TODO: and then the failure cases.
+ // "https://docker.com": "",
+ // site_test.go:1094: Expected https://docker.com to resolve to (), got () - error: Not a plain filepath link (https://docker.com)
+
+}