# robots.txt for http://parscms.com:80/ and friends # # advertising-related bots: User-agent: * Disallow: /modules/flash/* User-agent: Mediapartners-Google* Disallow: / # Wikipedia work bots: User-agent: IsraBot Disallow: User-agent: Orthogaffe Disallow: # Crawlers that are kind enough to obey, but which we'd rather not have # unless they're feeding search engines. User-agent: UbiCrawler Disallow: / User-agent: DOC Disallow: / User-agent: Zao Disallow: / # Some bots are known to be trouble, particularly those designed to copy # entire sites. Please obey robots.txt. User-agent: sitecheck.internetseer.com Disallow: / User-agent: Zealbot Disallow: / User-agent: MSIECrawler Disallow: / User-agent: SiteSnagger Disallow: / User-agent: WebStripper Disallow: / User-agent: WebCopier Disallow: / User-agent: * Disallow: /*linkestan* Disallow: /*prepipe* Disallow: /*uptime* Disallow: /Web-Design/CMS-EShop-B2B-CRM/%D8%B7%D8%B1%D8%A7%D8%AD%DB%8C-%D9%81%D8%B1%D9%88%D8%B4%DA%AF%D8%A7%D9%87-%D8%A7%DB%8C%D9%86%D8%AA%D8%B1%D9%86%D8%AA%DB%8C-%D9%88-%D8%A7%DB%8C%D8%B4%D8%A7%D9%BE-%D8%A7%D9%84%DA%A9%D8%AA%D8%B1%D9%88%D9%86%DB%8C%DA%A930428.html Disallow: /Web-Design/eshop-Web-Portal/%D8%B7%D8%B1%D8%A7%D8%AD%DB%8C-%D9%88%D8%A8-%D8%B3%D8%A7%DB%8C%D8%AA-%D9%81%D8%B1%D9%88%D8%B4%DA%AF%D8%A7%D9%87%DB%8C-%D9%81%D8%B1%D9%88%D8%B4%DA%AF%D8%A7%D9%87-%D9%85%D8%AC%D8%A7%D8%B2%DB%8C-%D8%A7%D9%84%DA%A9%D8%AA%D8%B1%D9%88%D9%86%DB%8C%DA%A94040.html User-agent: Fetch Disallow: / User-agent: Offline Explorer Disallow: / User-agent: Teleport Disallow: / User-agent: TeleportPro Disallow: / User-agent: WebZIP Disallow: / User-agent: linko Disallow: / User-agent: HTTrack Disallow: / User-agent: Microsoft.URL.Control Disallow: / User-agent: Xenu Disallow: / User-agent: larbin Disallow: / User-agent: libwww Disallow: / User-agent: ZyBORG Disallow: / User-agent: Download Ninja Disallow: / # # Sorry, wget in its recursive mode is a frequent problem. # Please read the man page and use it properly; there is a # --wait option you can use to set the delay between hits, # for instance. # User-agent: wget Disallow: / # # The 'grub' distributed client has been *very* poorly behaved. # User-agent: grub-client Disallow: / # # Doesn't follow robots.txt anyway, but... # User-agent: k2spider Disallow: / # # Hits many times per second, not acceptable # http://www.nameprotect.com/botinfo.html User-agent: NPBot Disallow: / # A capture bot, downloads gazillions of pages with no public benefit # http://www.webreaper.net/ User-agent: WebReaper Disallow: / # Don't allow the wayback-maschine to index user-pages #User-agent: ia_archiver #Disallow: /wiki/User #Disallow: /wiki/Benutzer # # Friendly, low-speed bots are welcome viewing article pages, but not # dynamically-generated pages please. # # Inktomi's "Slurp" can read a minimum delay between hits; if your # bot supports such a thing using the 'Crawl-delay' or another # instruction, please let us know. # #User-agent: * #Disallow: /w/ User-agent: Scooter Disallow: / User-agent: Sidewinder Disallow: / User-agent: Slurp Disallow: / User-agent: Gulliver Disallow: / User-agent: WebCrawler Disallow: / User-agent: ArchitextSpider Disallow: / User-agent: fast Disallow: / User-agent: Ask Disallow: / #User-agent: MSNBot #Disallow: / sitemap: http://parscms.com:80/modules/sitemap.xml