# General configuration of site allowances User-agent: * Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers # Good bots. We need them to crawl us to appear in search results User-agent: Mediapartners-Google # Ads bot - https://support.google.com/webmasters/answer/1061943?hl=en Crawl-Delay: 2 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: DuckDuckBot # https://ahrefs.com/blog/robots-txt/ Crawl-Delay: 2 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: Applebot # https://support.apple.com/en-us/HT204683 Crawl-Delay: 2 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: Baiduspider # http://help.baidu.com/question?prod_en=master&class=Baiduspider Crawl-Delay: 2 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: bingbot # https://blogs.bing.com/webmaster/2009/08/10/crawl-delay-and-the-bing-crawler-msnbot Crawl-Delay: 2 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: msnbot # https://blogs.bing.com/webmaster/2009/08/10/crawl-delay-and-the-bing-crawler-msnbot Crawl-Delay: 2 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: CCBot # http://commoncrawl.org/faq/ Crawl-Delay: 2 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: AhrefsBot # https://ahrefs.com/robot Crawl-Delay: 2 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers # Meh bots. We don't particularly care if these guys crawl everything User-agent: DomainAppender Crawl-Delay: 10 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: BLEXBot # http://webmeup-crawler.com/ Crawl-Delay: 10 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: dotbot # https://moz.com/researchtools/ose/dotbot Crawl-Delay: 10 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: rogerbot # https://moz.com/help/guides/moz-procedures/what-is-rogerbot Crawl-Delay: 10 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: SemrushBot # https://www.semrush.com/bot/ Crawl-Delay: 10 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: Yandex # https://yandex.com/support/webmaster/controlling-robot/robots-txt.html Crawl-Delay: 10 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: MJ12bot # http://mj12bot.com/ - UK search engine Crawl-Delay: 10 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: spbot # http://openlinkprofiler.org/bot Crawl-Delay: 10 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: Pinterest # https://help.pinterest.com/en/articles/about-pinterest-crawler-0 Crawl-Delay: 10 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: SeznamBot # http://www.botreports.com/user-agent/seznambot.shtml Crawl-Delay: 10 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers # It's ok if these bots crawl, but slow them down so they don't hurt us User-agent: MauiBot Crawl-Delay: 20 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers User-agent: IstellaBot Crawl-Delay: 20 Sitemap: https://www.yourmechanic.com/sitemapindex.xml Disallow: /bookdiag Disallow: /pagecache Disallow: /landing Disallow: /advice/results Disallow: /api/ Disallow: /api1/ Disallow: /v1/ Disallow: /user Disallow: /casa_user Disallow: /track Disallow: /book7 Disallow: /book Disallow: /question/ask Disallow: /scrpot.html # Honeypot to catch bad crawlers # Don't allow these bots to crawl the site. Ensure a rule exists in nginx locations config to block them too User-agent: NTENTbot # http://www.ntent.com/ntentbot/ Disallow: /