# Robots.txt for operative.sh # Allow all crawlers User-agent: * Allow: / # Sitemap location Sitemap: https://operative.sh/sitemap.xml # Disallow any sensitive or admin areas if they exist # Disallow: /admin/ # Disallow: /api/ # Disallow: /.env # Crawl-delay (optional, in seconds) # Crawl-delay: 1