# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
# User-Agent: *
# Disallow: /
# https://www.semrush.com/bot/
# Semrush bot took out our Elasticsearch cluster with a huge crawl of search results.
User-agent: SemrushBot
Disallow: /
# Crawling search results can become very intensive on our servers
User-Agent: *
Disallow: /search