# robots.txt for https://www.healthypharma.com/ # Optimized for SEO, AI visibility, and performance on a static website. ############################## # 1. General Rules ############################## User-agent: * Allow: / # Block sensitive or unnecessary directories (if they exist) Disallow: /cgi-bin/ Disallow: /temp/ Disallow: /private/ Disallow: /backup/ Disallow: /old/ Disallow: /test/ Disallow: /404.html # Block duplicate content from query strings (e.g., tracking or search) Disallow: /*?* ############################## # 2. Allow Static Assets ############################## Allow: /*.css$ Allow: /*.js$ Allow: /*.jpg$ Allow: /*.jpeg$ Allow: /*.png$ Allow: /*.webp$ Allow: /*.svg$ Allow: /*.woff$ Allow: /*.woff2$ ############################## # 3. Sitemap(s) ############################## Sitemap: https://www.healthypharma.com/sitemap.xml Sitemap: https://www.healthypharma.com/sitemap_index.xml ############################## # 4. Allow AI, LLM, and Search Crawlers ############################## User-agent: GPTBot Allow: / User-agent: ChatGPT-User Allow: / User-agent: CCBot Allow: / User-agent: PerplexityBot Allow: / User-agent: ClaudeBot Allow: / User-agent: anthropic-ai Allow: / User-agent: Googlebot Allow: / User-agent: Bingbot Allow: / User-agent: Applebot Allow: / User-agent: DuckDuckBot Allow: / User-agent: Baiduspider Allow: / User-agent: Yandex Allow: / User-agent: facebookexternalhit Allow: / User-agent: Twitterbot Allow: / ############################## # 5. Optional: Crawl Delay for Aggressive Bots ############################## # Crawl-delay: 2 # Uncomment if your server is under heavy load