# DreamsMain.com Robots.txt # Block spam bots, allow legitimate crawlers # Legitimate search engines - ALLOW User-agent: Googlebot Allow: / User-agent: Bingbot Allow: / User-agent: Slurp Allow: / User-agent: DuckDuckBot Allow: / User-agent: facebookexternalhit Allow: / User-agent: Twitterbot Allow: / User-agent: LinkedInBot Allow: / # Block SEO/Spam bots User-agent: SemrushBot Disallow: / User-agent: AhrefsBot Disallow: / User-agent: MJ12bot Disallow: / User-agent: DotBot Disallow: / User-agent: BLEXBot Disallow: / User-agent: Rogerbot Disallow: / User-agent: SerpstatBot Disallow: / User-agent: SEOkicks-Robot Disallow: / User-agent: MegaIndex Disallow: / User-agent: BacklinkCrawler Disallow: / User-agent: Screaming Frog SEO Spider Disallow: / User-agent: DataForSeoBot Disallow: / # Block Chinese/Asian spam crawlers User-agent: Baiduspider Disallow: / User-agent: Sogou Disallow: / User-agent: Yisou Disallow: / User-agent: Easou Disallow: / User-agent: YandexBot Disallow: / User-agent: 360Spider Disallow: / User-agent: PetalBot Disallow: / # Block data harvesters User-agent: CCBot Disallow: / User-agent: Exabot Disallow: / User-agent: Gigabot Disallow: / User-agent: Nutch Disallow: / User-agent: ia_archiver Disallow: / User-agent: archive.org_bot Disallow: / # Block vulnerability scanners User-agent: Nmap Disallow: / User-agent: Nikto Disallow: / User-agent: sqlmap Disallow: / User-agent: Censys Disallow: / User-agent: Shodan Disallow: / # Block automation tools User-agent: Python-urllib Disallow: / User-agent: python-requests Disallow: / User-agent: Go-http-client Disallow: / User-agent: Java Disallow: / User-agent: curl Disallow: / User-agent: wget Disallow: / User-agent: HTTrack Disallow: / # Default rule for other bots User-agent: * Allow: / # Blocked paths for all Disallow: /users/ Disallow: /cgi-bin/ Disallow: /ayar.php Disallow: /*.sql$ Disallow: /*.log$ Disallow: /*.bak$ # Crawl delay for other bots (10 seconds between requests) Crawl-delay: 10 # Sitemap Sitemap: https://www.dreamsmain.com/sitemap.xml