robots.txt 360 B

123456789101112
  1. # www.robotstxt.org
  2. {{- $isProduction := eq hugo.Environment "production" -}}
  3. {{- $isNetlify := eq (getenv "NETLIFY") "true" -}}
  4. {{- $allowCrawling := and (not $isNetlify) $isProduction -}}
  5. {{ if $allowCrawling }}
  6. # Allow crawling of all content
  7. {{- end }}
  8. User-agent: *
  9. Disallow:{{ if not $allowCrawling }} /{{ end }}
  10. Sitemap: {{ "/sitemap.xml" | absURL }}