# robots.txt for Harry The DevOps Guy Portfolio # https://harrythedevopsguy.github.io/robots.txt # Allow all search engines User-agent: * Allow: / # Disallow admin and private areas Disallow: /_site/ Disallow: /vendor/ Disallow: /node_modules/ Disallow: /_backup/ Disallow: /_plugins/ Disallow: /_data/ Disallow: /_includes/ Disallow: /_layouts/ Disallow: /_posts/ Disallow: /*.json$ Disallow: /*.yml$ Disallow: /*.yaml$ Disallow: /*.lock$ Disallow: /Gemfile Disallow: /package.json Disallow: /Makefile Disallow: /README.md Disallow: /tailwind.config.js Disallow: /postcss.config.js # Explicitly allow important content pages Allow: /blog/ Allow: /docs/ Allow: /portfolio/ Allow: /tools/ Allow: /cv/ Allow: /courses/ Allow: /garden/ Allow: /assets/ # Sitemap location Sitemap: https://harrythedevopsguy.github.io/sitemap.xml # Crawl-delay for respectful crawling Crawl-delay: 1 # Special rules for major search engines User-agent: Googlebot Allow: / Crawl-delay: 0 User-agent: Bingbot Allow: / Crawl-delay: 0 User-agent: Slurp Allow: / Crawl-delay: 1