# Smart BDM - Robots.txt Configuration # Generated: 2026-02-11T07:22:14.112Z # Allow all crawlers to access the site User-agent: * Allow: / # Block access to authentication and admin areas Disallow: /auth/ Disallow: /cms/ # Block access to temporary and cache files Disallow: /*.json$ Disallow: /temp/ Disallow: /_next/ # Rate limiting for crawlers to prevent server overload Crawl-delay: 1 # Specific rules for major search engines User-agent: Googlebot Allow: / Disallow: /auth/ Disallow: /cms/ Disallow: /admin/ User-agent: Bingbot Allow: / Disallow: /auth/ Disallow: /cms/ Disallow: /admin/ # Block aggressive SEO crawlers that may impact performance User-agent: AhrefsBot Disallow: / User-agent: SemrushBot Disallow: / User-agent: MJ12bot Disallow: / # Static pages sitemap Sitemap: https://blog.smartbdm.ai/sitemap.xml # Dynamic blog content sitemap Sitemap: https://blog.smartbdm.ai/sitemap-dynamic.xml # Host directive for canonical domain Host: https://blog.smartbdm.ai