# # robots.txt # # This file is to prevent the crawling and indexing of certain parts # of your site by web crawlers and spiders run by sites like Yahoo! # and Google. By telling these "robots" where not to go on your site, # you save bandwidth and server resources. # # This file will be ignored unless it is at the root of your host: # Used: http://example.com/robots.txt # Ignored: http://example.com/site/robots.txt # # For more information about the robots.txt standard, see: # http://www.robotstxt.org/wc/robots.html # # For syntax checking, see: # http://www.sxw.org.uk/computing/robots/check.html User-agent: * Crawl-delay: 10 # Directories Disallow: /includes/ Disallow: /misc/ Disallow: /modules/ Disallow: /profiles/ Disallow: /scripts/ Disallow: /themes/ # Files Disallow: /CHANGELOG.txt Disallow: /cron.php Disallow: /INSTALL.mysql.txt Disallow: /INSTALL.pgsql.txt Disallow: /INSTALL.sqlite.txt Disallow: /install.php Disallow: /INSTALL.txt Disallow: /LICENSE.txt Disallow: /MAINTAINERS.txt Disallow: /update.php Disallow: /UPGRADE.txt Disallow: /xmlrpc.php # Paths (clean URLs) Disallow: /admin/ Disallow: /comment/reply/ Disallow: /filter/tips/ Disallow: /node/add/ Disallow: /search/ Disallow: /user/register/ Disallow: /user/password/ Disallow: /user/login/ Disallow: /user/logout/ # Paths (no clean URLs) Disallow: /?q=admin/ Disallow: /?q=comment/reply/ Disallow: /?q=filter/tips/ Disallow: /?q=node/add/ Disallow: /?q=search/ Disallow: /?q=user/password/ Disallow: /?q=user/register/ Disallow: /?q=user/login/ Disallow: /?q=user/logout/ User-agent: Yandex Disallow: / User-agent: SEOkicks Disallow: / User-agent: SEOkicks-Robot Disallow: / User-agent: sistrix Disallow: / User-agent: MajesticSEO Disallow: / User-agent: BacklinkCrawler Disallow: / User-agent: xovi Disallow: / User-agent: XoviBot Disallow: / User-agent: MJ12bot Disallow: / User-agent: spbot Disallow: / User-agent: SearchmetricsBot Disallow: / User-agent: search17 Disallow: / User-agent: AhrefsBot Disallow: / User-agent: ia_archiver Disallow: / User-agent: TurnitinBot Disallow: / User-agent: SlySearch Disallow: / User-agent: findlinks Disallow: / User-agent: magpie-crawler Disallow: / User-agent: Pixray-Seeker Disallow: / User-agent: 008 Disallow: / User-agent: Ezooms Disallow: / User-agent: lb-spider Disallow: / User-agent: WBSearchBot Disallow: / User-agent: psbot Disallow: / User-agent: HuaweiSymantecSpider Disallow: / User-agent: EC2LinkFinder Disallow: / User-agent: htdig Disallow: / User-agent: SemrushBot Disallow: / User-agent: discobot Disallow: / User-agent: linkdex.com Disallow: / User-agent: SeznamBot Disallow: / User-agent: EdisterBot Disallow: / User-agent: SWEBot Disallow: / User-agent: picmole Disallow: / User-agent: Yeti Disallow: / User-agent: Yeti-Mobile Disallow: / User-agent: PagePeeker Disallow: / User-agent: CatchBot Disallow: / User-agent: yacybot Disallow: / User-agent: netEstateNECrawler Disallow: / User-agent: SurveyBot Disallow: / User-agent: COMODOSSLChecker Disallow: / User-agent: Comodo-Certificates-Spider Disallow: / User-agent: gonzo Disallow: / User-agent: schrein Disallow: / User-agent: AfiliasWebMiningTool Disallow: / User-agent: suggybot Disallow: / User-agent: bdbrandprotect Disallow: / User-agent: BPImageWalker Disallow: / User-agent: Updownerbot Disallow: / User-agent: lex Disallow: / User-agent: ContentCrawler Disallow: / User-agent: DCPbot Disallow: / User-agent: KaloogaBot Disallow: / User-agent: MLBot Disallow: / User-agent: iCjobs Disallow: / User-agent: oBot Disallow: / User-agent: WebmasterCoffee Disallow: / User-agent: Qualidator Disallow: / User-agent: Webinator Disallow: / User-agent: Scooter Disallow: / User-agent: thunderstone Disallow: / User-agent: larbin Disallow: / User-agent: OpidooBOT Disallow: / User-agent: ips-agent Disallow: / User-agent: TinEye Disallow: / User-agent: UnisterBot Disallow: / User-agent: Unister Disallow: / User-agent: ReverseGet Disallow: / User-agent: DotBot Disallow: /