# Riskweise — robots.txt # We explicitly welcome AI-engine indexing because GEO (Generative # Engine Optimisation) requires AI crawlers to read the content. # This file is the right place to express that — not the place to # block sensitive content (which shouldn't be on the public site # in the first place). # Default policy — every well-behaved crawler is allowed. User-agent: * Allow: / # Major search engines (explicit, for clarity) User-agent: Googlebot Allow: / User-agent: Bingbot Allow: / User-agent: DuckDuckBot Allow: / User-agent: Slurp Allow: / # AI / LLM crawlers — explicitly allowed. # These are the bots that produce AI-generated answers on # ChatGPT, Claude, Perplexity, Google AI Overviews, etc. # Allowing them is a deliberate GEO strategy. User-agent: GPTBot Allow: / User-agent: OAI-SearchBot Allow: / User-agent: ChatGPT-User Allow: / User-agent: ClaudeBot Allow: / User-agent: anthropic-ai Allow: / User-agent: Claude-Web Allow: / User-agent: PerplexityBot Allow: / User-agent: Perplexity-User Allow: / User-agent: Google-Extended Allow: / User-agent: Applebot-Extended Allow: / User-agent: cohere-ai Allow: / User-agent: Bytespider Allow: / User-agent: Meta-ExternalAgent Allow: / User-agent: FacebookBot Allow: / User-agent: YouBot Allow: / User-agent: DiffbotEngine Allow: / # Crawl-delay applies to all — be considerate but not slow Crawl-delay: 1 # Disallow API endpoints — they're not useful to crawlers Disallow: /api/ # Sitemap — generated automatically by @astrojs/sitemap on build Sitemap: https://riskweise.com/sitemap-index.xml