# Nuvio Plugin Library - robots.txt # Last Updated: 2026-04-16 # Default Rules for All Crawlers User-agent: * Allow: / Allow: /index.html Allow: /download.html Allow: /how-to-use.html Allow: /faq.html Allow: /contact.html Allow: /about.html Allow: /privacy.html Allow: /terms.html Allow: /dmca.html Allow: /sitemap.xml Allow: /robots.txt # Disallow admin and sensitive paths Disallow: /admin/ Disallow: /api/ Disallow: /*.json$ Disallow: /*.backup$ # Standard crawl delay Crawl-delay: 1 Request-rate: 30/60 # Google Bot - Optimized Crawling User-agent: Googlebot Allow: / Crawl-delay: 0 Request-rate: 100/60 # Bing Bot - Optimized Crawling User-agent: Bingbot Allow: / Crawl-delay: 1 Request-rate: 30/60 # Other Search Engines User-agent: Slurp Allow: / Crawl-delay: 1 User-agent: DuckDuckBot Allow: / Crawl-delay: 1 User-agent: Yandex Allow: / Crawl-delay: 1 # Sitemap Location Sitemap: https://nuvioplugin.com/sitemap.xml Sitemap: https://nuvioplugin.com/robots.txt