var hugoCmdV *cobra.Command
// Flags that are to be added to commands.
-var BuildWatch, IgnoreCache, Draft, Future, UglyURLs, CanonifyURLs, Verbose, Logging, VerboseLog, DisableRSS, DisableSitemap, PluralizeListTitles, PreserveTaxonomyNames, NoTimes, ForceSync bool
+var BuildWatch, IgnoreCache, Draft, Future, UglyURLs, CanonifyURLs, Verbose, Logging, VerboseLog, DisableRSS, DisableSitemap, DisableRobotsTXT, PluralizeListTitles, PreserveTaxonomyNames, NoTimes, ForceSync bool
var Source, CacheDir, Destination, Theme, BaseURL, CfgFile, LogFile, Editor string
// Execute adds all child commands to the root command HugoCmd and sets flags appropriately.
cmd.Flags().BoolVarP(&Future, "buildFuture", "F", false, "include content with publishdate in the future")
cmd.Flags().BoolVar(&DisableRSS, "disableRSS", false, "Do not build RSS files")
cmd.Flags().BoolVar(&DisableSitemap, "disableSitemap", false, "Do not build Sitemap file")
+ cmd.Flags().BoolVar(&DisableRobotsTXT, "disableRobotsTXT", false, "Do not build Robots TXT file")
cmd.Flags().StringVarP(&Source, "source", "s", "", "filesystem path to read files relative from")
cmd.Flags().StringVarP(&CacheDir, "cacheDir", "", "", "filesystem path to cache directory. Defaults: $TMPDIR/hugo_cache/")
cmd.Flags().BoolVarP(&IgnoreCache, "ignoreCache", "", false, "Ignores the cache directory for reading but still writes to it")
viper.SetDefault("MetaDataFormat", "toml")
viper.SetDefault("DisableRSS", false)
viper.SetDefault("DisableSitemap", false)
+ viper.SetDefault("DisableRobotsTXT", false)
viper.SetDefault("ContentDir", "content")
viper.SetDefault("LayoutDir", "layouts")
viper.SetDefault("StaticDir", "static")
if cmdV.Flags().Lookup("disableSitemap").Changed {
viper.Set("DisableSitemap", DisableSitemap)
}
+ if cmdV.Flags().Lookup("disableRobotsTXT").Changed {
+ viper.Set("DisableRobotsTXT", DisableRobotsTXT)
+ }
if cmdV.Flags().Lookup("pluralizeListTitles").Changed {
viper.Set("PluralizeListTitles", PluralizeListTitles)
}
-d, --destination="": filesystem path to write files to
--disableRSS[=false]: Do not build RSS files
--disableSitemap[=false]: Do not build Sitemap file
+ --disableRobotsTXT[=false]: Do not build robots TXT file
--editor="": edit new content with this editor, if provided
--ignoreCache[=false]: Ignores the cache directory for reading but still writes to it
--log[=false]: Enable Logging
--- /dev/null
+---
+date: 2013-07-09
+menu:
+ main:
+ parent: extras
+next: /community/mailing-list
+prev: /extras/urls
+title: Table of Contents
+weight: 120
+---
+
+Hugo can generated customized [robots.txt](http://www.robotstxt.org/) in the
+[same way than any other template]({{< ref "templates/go-templates.md" >}}).
+
+By default it generates a robots.txt which allows everything, it looks exactly
+
+ User-agent: *
+
+To disable it just set `disableRobotsTXT` option to false in the [command line]({{< ref "commands/hugo.md" >}}) or [configuration file]({{< ref "overview/configuration.md" >}}).
+
+Hugo will use the template `robots.txt` following the list starting with the one with more priority
+
+* /layouts/robots.txt
+* /themes/`THEME`/layout/robots.txt
+
+An example of a robots.txt layout is:
+
+ User-agent: *
+
+ {{range .Data.Pages}}
+ Disallow: {{.RelPermalink}}{{end}}
+
+
+This template disallows and all the pages of the site creating one `Disallow` entry for each one.
menu:
main:
parent: extras
-next: /community/mailing-list
+next: /extras/robots-txt
notoc: true
prev: /extras/toc
title: URLs
disableRSS: false
# Do not build Sitemap file
disableSitemap: false
+ # Do not build robots.txt file
+ disableRobotsTXT: false
# edit new content with this editor, if provided
editor: ""
footnoteAnchorPrefix: ""
--- /dev/null
+package hugolib
+
+import (
+ "bytes"
+ "testing"
+
+ "github.com/spf13/afero"
+ "github.com/spf13/hugo/helpers"
+ "github.com/spf13/hugo/hugofs"
+ "github.com/spf13/hugo/source"
+ "github.com/spf13/viper"
+)
+
+const ROBOTSTXT_TEMPLATE = `User-agent: Googlebot
+ {{ range .Data.Pages }}
+ Disallow: {{.RelPermalink}}
+ {{ end }}
+`
+
+func TestRobotsTXTOutput(t *testing.T) {
+ viper.Reset()
+ defer viper.Reset()
+
+ hugofs.DestinationFS = new(afero.MemMapFs)
+
+ viper.Set("baseurl", "http://auth/bub/")
+
+ s := &Site{
+ Source: &source.InMemorySource{ByteSource: WEIGHTED_SOURCES},
+ }
+
+ s.initializeSiteInfo()
+
+ s.prepTemplates()
+ s.addTemplate("robots.txt", ROBOTSTXT_TEMPLATE)
+
+ if err := s.CreatePages(); err != nil {
+ t.Fatalf("Unable to create pages: %s", err)
+ }
+
+ if err := s.BuildSiteMeta(); err != nil {
+ t.Fatalf("Unable to build site metadata: %s", err)
+ }
+
+ if err := s.RenderHomePage(); err != nil {
+ t.Fatalf("Unable to RenderHomePage: %s", err)
+ }
+
+ if err := s.RenderSitemap(); err != nil {
+ t.Fatalf("Unable to RenderSitemap: %s", err)
+ }
+
+ if err := s.RenderRobotsTXT(); err != nil {
+ t.Fatalf("Unable to RenderRobotsTXT :%s", err)
+ }
+
+ robotsFile, err := hugofs.DestinationFS.Open("robots.txt")
+
+ if err != nil {
+ t.Fatalf("Unable to locate: robots.txt")
+ }
+
+ robots := helpers.ReaderToBytes(robotsFile)
+ if !bytes.HasPrefix(robots, []byte("User-agent: Googlebot")) {
+ t.Errorf("Robots file should start with 'User-agentL Googlebot'. %s", robots)
+ }
+}
return
}
s.timerStep("render and write Sitemap")
+
+ if err = s.RenderRobotsTXT(); err != nil {
+ return
+ }
+ s.timerStep("render and write robots.txt")
+
return
}
return nil
}
+func (s *Site) RenderRobotsTXT() error {
+ if viper.GetBool("DisableRobotsTXT") {
+ return nil
+ }
+
+ n := s.NewNode()
+ n.Data["Pages"] = s.Pages
+
+ rLayouts := []string{"robots.txt", "_default/robots.txt", "_internal/_default/robots.txt"}
+ outBuffer := bp.GetBuffer()
+ defer bp.PutBuffer(outBuffer)
+ err := s.render("robots", n, outBuffer, s.appendThemeTemplates(rLayouts)...)
+
+ if err == nil {
+ err = s.WriteDestFile("robots.txt", outBuffer)
+ }
+
+ return err
+}
+
func (s *Site) Stats() {
jww.FEEDBACK.Println(s.draftStats())
jww.FEEDBACK.Println(s.futureStats())
t.Fatalf("Unable to RenderSitemap: %s", err)
}
+ if err := s.RenderRobotsTXT(); err != nil {
+ t.Fatalf("Unable to RenderRobotsTXT :%s", err)
+ }
+
sitemapFile, err := hugofs.DestinationFS.Open("sitemap.xml")
if err != nil {
</script>
<script async src='//www.google-analytics.com/analytics.js'></script>
{{ end }}`)
+
+ t.AddInternalTemplate("_default", "robots.txt", "User-agent: *")
}