# robots.txt # # This file is used to allow crawlers to index our site. # It is NOT used for any other purpose, such as filtering # impressions or clicks. # Ganze Site freigeben User-agent: * Disallow: User-Agent: bingbot Crawl-delay: 4 User-Agent: Slurp Crawl-delay: 2 User-Agent: Yandex Crawl-delay: 2 User-agent: AhrefsBot Crawl-Delay: 5