User-agent: * Allow: / Allow: /stock/ Allow: /tools/ Allow: /docs/ Allow: /articles Allow: /pricing Allow: /research/ # Canonical sitemap location Sitemap: https://flashalpha.com/sitemap.xml # Disallow admin and system paths Disallow: /umbraco/ Disallow: /App_Data/ Disallow: /App_Plugins/ Disallow: /bin/ Disallow: /config/ Disallow: /api/ # Disallow noindex and redirected pages Disallow: /compare/ Disallow: /embed/ Disallow: /features/ Disallow: /internal/ Disallow: /profile Disallow: /payment-processing Disallow: /confirm-email Disallow: /reset-password # Disallow old deprecated paths Disallow: /xmlsitemap.php Disallow: /sitemap.php # Googlebot — no crawl delay needed User-agent: Googlebot Allow: / Crawl-delay: 0 # Bingbot User-agent: bingbot Allow: / Crawl-delay: 1 # AI crawlers — explicitly allow indexing for LLM visibility. # If Cloudflare's "Block AI Scrapers" feature is enabled it will prepend # Disallow rules above this block and take precedence. Turn that feature # OFF in Cloudflare Dashboard → Security → Bots → AI Scrapers and Crawlers. User-agent: GPTBot Allow: / User-agent: ChatGPT-User Allow: / User-agent: OAI-SearchBot Allow: / User-agent: Google-Extended Allow: / User-agent: anthropic-ai Allow: / User-agent: ClaudeBot Allow: / User-agent: Claude-Web Allow: / User-agent: PerplexityBot Allow: / User-agent: Perplexity-User Allow: / User-agent: CCBot Allow: / User-agent: Applebot-Extended Allow: / User-agent: Amazonbot Allow: / User-agent: Meta-ExternalAgent Allow: / User-agent: FacebookBot Allow: / User-agent: Bytespider Allow: / # Default crawl-delay for other bots User-agent: * Crawl-delay: 1 # Host directive for canonical domain (non-www) Host: https://flashalpha.com