# Luma AI - robots.txt User-agent: * Allow: / # Private/authenticated pages Disallow: /billing/success Disallow: /billing Disallow: /admin Disallow: /library Disallow: /video/ Disallow: /account # Legacy paths (blog, research, etc.) are 301 redirected to homepage # Allowing crawl so Google sees redirects and passes link equity # Block URLs with tracking parameters (prevent duplicate content) Disallow: /*?ref=* Disallow: /*?trk=* Disallow: /*?utm_* Disallow: /*?fbclid=* Disallow: /*?gclid=* Disallow: /*?from=* # Block .html versions (use clean URLs instead) Disallow: /*.html$ # Crawl-delay Crawl-delay: 1 # Sitemap Sitemap: https://luma.ai/sitemap.xml # Canonical host (preferred domain) # Note: www.luma.ai should redirect to luma.ai at DNS/hosting level Host: https://luma.ai