# robots.txt for http://www.wikipedia.org/ and friends # # Please note: There are a lot of pages on this site, and there are # some misbehaved spiders out there that go _way_ too fast. If you're # irresponsible, your access to the site may be blocked. # ## added for AI bots User-agent: Amazonbot User-agent: GPTBot User-agent: anthropic-ai User-agent: Claude-Web User-agent: Applebot-Extended User-agent: Bytespider User-agent: CCBot User-agent: ChatGPT-User User-agent: cohere-ai User-agent: Diffbot User-agent: FacebookBot User-agent: GoogleOther User-agent: Google-Extended User-agent: ImagesiftBot User-agent: PerplexityBot User-agent: OmigiliBot User-agent: Omigili User-agent: meta-externalagent Disallow: / # Observed spamming large amounts of https://en.wikipedia.org/?curid=NNNNNN # and ignoring 429 ratelimit responses, claims to respect robots: # http://mj12bot.com/ User-agent: MJ12bot Disallow: / # advertising-related bots: User-agent: Mediapartners-Google* Disallow: / # Wikipedia work bots: User-agent: IsraBot Disallow: User-agent: Orthogaffe Disallow: # Crawlers that are kind enough to obey, but which we'd rather not have # unless they're feeding search engines. User-agent: UbiCrawler Disallow: / User-agent: DOC Disallow: / User-agent: Zao Disallow: / # Some bots are known to be trouble, particularly those designed to copy # entire sites. Please obey robots.txt. User-agent: sitecheck.internetseer.com Disallow: / User-agent: Zealbot Disallow: / User-agent: MSIECrawler Disallow: / User-agent: SiteSnagger Disallow: / User-agent: WebStripper Disallow: / User-agent: WebCopier Disallow: / User-agent: Fetch Disallow: / User-agent: Offline Explorer Disallow: / User-agent: Teleport Disallow: / User-agent: TeleportPro Disallow: / User-agent: WebZIP Disallow: / User-agent: linko Disallow: / User-agent: HTTrack Disallow: / User-agent: Microsoft.URL.Control Disallow: / User-agent: Xenu Disallow: / User-agent: larbin Disallow: / User-agent: libwww Disallow: / User-agent: ZyBORG Disallow: / User-agent: Download Ninja Disallow: / # Misbehaving, requests much too fast User-agent: fast Disallow: / # Sorry, wget in its recursive mode is a frequent problem. # Please read the man page and use it properly; there is a # --wait option you can use to set the delay between hits, # for instance. User-agent: wget Disallow: / # The 'grub' distributed client has been *very* poorly behaved. User-agent: grub-client Disallow: / # Doesn't follow robots.txt anyway, but... User-agent: k2spider Disallow: / # Hits many times per second, not acceptable # http://www.nameprotect.com/botinfo.html User-agent: NPBot Disallow: / # A capture bot, downloads gazillions of pages with no public benefit # http://www.webreaper.net/ User-agent: WebReaper Disallow: / # Per their statement, semrushbot respects crawl-delay directives # We want them to overall stay within reasonable request rates to # the backend (20 rps); keeping in mind that the crawl-delay will # be applied by site and not globally by the bot, 5 seconds seem # like a reasonable approximation User-agent: SemrushBot Crawl-delay: 5 # # Friendly, low-speed bots are welcome viewing article pages, but not # dynamically-generated pages please. # # Inktomi's "Slurp" can read a minimum delay between hits; if your # bot supports such a thing using the 'Crawl-delay' or another # instruction, please let us know. # # There is a special exception for API mobileview to allow dynamic # mobile web & app views to load section content. # These views aren't HTTP-cached but use parser cache aggressively # and don't expose Special-pages and other uncached content. # # Another exception is for REST API documentation, located at # /api/rest_v1/?doc. # User-agent: * Allow: /api.php?action=mobileview& Allow: /load.php? Allow: /api/rest_v1/?doc Allow: /rest.php/site/v1/sitemap Disallow: /api/ Disallow: /trap/ Disallow: /Special: Disallow: /Spezial: Disallow: /Spesial: Disallow: /Special%3A Disallow: /Spezial%3A Disallow: /Spesial%3A # # ar: Disallow: /%D8%AE%D8%A7%D8%B5:Search Disallow: /%D8%AE%D8%A7%D8%B5%3ASearch # T16075 Disallow: /MediaWiki:Spam-blacklist Disallow: /MediaWiki%3ASpam-blacklist Disallow: /MediaWiki_talk:Spam-blacklist Disallow: /MediaWiki_talk%3ASpam-blacklist # # hewiki: Disallow: /%D7%9E%D7%99%D7%95%D7%97%D7%93:Search Disallow: /%D7%9E%D7%99%D7%95%D7%97%D7%93%3ASearch #T11517 Disallow: /ויקיפדיה:רשימת_מועמדים_למחיקה/ Disallow: /ויקיפדיה%3Aרשימת_מועמדים_למחיקה/ Disallow: /%D7%95%D7%99%D7%A7%D7%99%D7%A4%D7%93%D7%99%D7%94:%D7%A8%D7%A9%D7%99%D7%9E%D7%AA_%D7%9E%D7%95%D7%A2%D7%9E%D7%93%D7%99%D7%9D_%D7%9C%D7%9E%D7%97%D7%99%D7%A7%D7%94/ Disallow: /%D7%95%D7%99%D7%A7%D7%99%D7%A4%D7%93%D7%99%D7%94%3A%D7%A8%D7%A9%D7%99%D7%9E%D7%AA_%D7%9E%D7%95%D7%A2%D7%9E%D7%93%D7%99%D7%9D_%D7%9C%D7%9E%D7%97%D7%99%D7%A7%D7%94/ Disallow: /ויקיפדיה:ערכים_לא_קיימים_ומוגנים Disallow: /ויקיפדיה%3Aערכים_לא_קיימים_ומוגנים Disallow: /%D7%95%D7%99%D7%A7%D7%99%D7%A4%D7%93%D7%99%D7%94:%D7%A2%D7%A8%D7%9B%D7%99%D7%9D_%D7%9C%D7%90_%D7%A7%D7%99%D7%99%D7%9E%D7%99%D7%9D_%D7%95%D7%9E%D7%95%D7%92%D7%A0%D7%99%D7%9D Disallow: /%D7%95%D7%99%D7%A7%D7%99%D7%A4%D7%93%D7%99%D7%94%3A%D7%A2%D7%A8%D7%9B%D7%99%D7%9D_%D7%9C%D7%90_%D7%A7%D7%99%D7%99%D7%9E%D7%99%D7%9D_%D7%95%D7%9E%D7%95%D7%92%D7%A0%D7%99%D7%9D Disallow: /ויקיפדיה:דפים_לא_קיימים_ומוגנים Disallow: /ויקיפדיה%3Aדפים_לא_קיימים_ומוגנים Disallow: /%D7%95%D7%99%D7%A7%D7%99%D7%A4%D7%93%D7%99%D7%94:%D7%93%D7%A4%D7%99%D7%9D_%D7%9C%D7%90_%D7%A7%D7%99%D7%99%D7%9E%D7%99%D7%9D_%D7%95%D7%9E%D7%95%D7%92%D7%A0%D7%99%D7%9D Disallow: /%D7%95%D7%99%D7%A7%D7%99%D7%A4%D7%93%D7%99%D7%94%3A%D7%93%D7%A4%D7%99%D7%9D_%D7%9C%D7%90_%D7%A7%D7%99%D7%99%D7%9E%D7%99%D7%9D_%D7%95%D7%9E%D7%95%D7%92%D7%A0%D7%99%D7%9D # # huwiki: Disallow: /Speci%C3%A1lis:Search Disallow: /Speci%C3%A1lis%3ASearch # Sitemap: https://en.wikipedia.org/w/rest.php/site/v1/sitemap/0 # Disallow: /wiki/Category:Noindexed_pages