# robots.txt for http://wikiberal.org/ # # # http://mj12bot.com/ User-agent: MJ12bot Disallow: / User-agent: Orthogaffe Disallow: # Crawlers that are kind enough to obey, but which we'd rather not have # unless they're feeding search engines. User-agent: UbiCrawler Disallow: / User-agent: DOC Disallow: / User-agent: Zao Disallow: / # Some bots are known to be trouble, particularly those designed to copy # entire sites. Please obey robots.txt. User-agent: sitecheck.internetseer.com Disallow: / User-agent: Zealbot Disallow: / User-agent: MSIECrawler Disallow: / User-agent: SiteSnagger Disallow: / User-agent: WebStripper Disallow: / User-agent: WebCopier Disallow: / User-agent: Fetch Disallow: / User-agent: Offline Explorer Disallow: / User-agent: Teleport Disallow: / User-agent: TeleportPro Disallow: / User-agent: WebZIP Disallow: / User-agent: linko Disallow: / User-agent: HTTrack Disallow: / User-agent: Xenu Disallow: / User-agent: larbin Disallow: / User-agent: libwww Disallow: / User-agent: ZyBORG Disallow: / User-agent: Download Ninja Disallow: / # Misbehaving: requests much too fast: User-agent: fast Disallow: / # # Sorry, wget in its recursive mode is a frequent problem. # Please read the man page and use it properly; there is a # --wait option you can use to set the delay between hits, # for instance. # User-agent: wget Disallow: / # # The 'grub' distributed client has been *very* poorly behaved. # User-agent: grub-client Disallow: / # # Doesn't follow robots.txt anyway, but... # User-agent: k2spider Disallow: / # # Hits many times per second, not acceptable # http://www.nameprotect.com/botinfo.html User-agent: NPBot Disallow: / # A capture bot, downloads gazillions of pages with no public benefit # http://www.webreaper.net/ User-agent: WebReaper Disallow: / User-agent: * Disallow: /w/ Disallow: /api/ Disallow: /index/ Disallow: /trap/ Disallow: /wiki/Special: Disallow: /wiki/Spécial: Disallow: /wiki/Special%3A # # #----------------------------------------------------------# # # # #
#
# partie robots.txt pour https://wikiberal.org/ 
#
# Merci de vérifier chaque modification avec un vérificateur de syntaxe
# comme 
# Entrez https://wikiberal.org/robots.txt comme URL à vérifier.
#
# ------------------------------------------------------------------------
#
Disallow: /wiki/Sp%C3%A9cial:
Disallow: /wiki/Spécial:
Disallow: /wiki/Special:
Disallow: /wiki/Spécial:MobileDiff/
Disallow: /index.php?title=Spécial:Pages_liées/

Disallow: /index.php*
Disallow: /index.php?title=*&mobileaction=toggle_view_mobile
Disallow: /index.php?title=*&mobileaction=toggle_view_desktop
Disallow: /index.php?title=*&action=history
Disallow: /index.php?title=*&diff=*&oldid=*
Disallow: /index.php?title=*&printable=yes
Disallow: /index.php?title=*&oldid=*
Disallow: /index.php?curid=*&oldid=
Disallow: /index.php?title=*&diff=prev&oldid=
Disallow: /index.php?oldid=

Disallow: /wiki/Discussion

Disallow: /wiki/MediaWiki:Spam-blacklist
# : → %3A
Disallow: /wiki/Sp%C3%A9cial%3A
#