# Robots.txt file created by Trevor Rhodes 30 May 2006 # Modified by Nick Wallingford 27 June 2011 # Modified by Brian McCullough 7 October 2020 # For domain: https://www.gramps-project.org # All robots will spider the domain User-agent: * Disallow: /bugs/ Disallow: /cgi-bin/ Disallow: /download/ Disallow: /cdrom Disallow: /apple Disallow: /files/ Disallow: /library/ Disallow: /samples/ Disallow: /docs/ Disallow: /xml/ Disallow: /wiki/index.php?diff= Disallow: /wiki/index.php?oldid= Disallow: /wiki/index.php?title=Help Disallow: /wiki/index.php?title=Image Disallow: /wiki/index.php?title=MediaWiki Disallow: /wiki/index.php?title=Special: Disallow: /wiki/index.php?title=Template Disallow: /wiki/index.php?limit= Disallow: /wiki/skins/ Disallow: /wiki/index.php/Gramps_5.0* Disallow: /wiki/index.php/*:Gramps_5.0* Disallow: /wiki/index.php/Gramps_4.* Disallow: /wiki/index.php/*:Gramps_4.* Disallow: /wiki/index.php/Gramps_3.* Disallow: /wiki/index.php/*:Gramps_3.* Disallow: /wiki/index.php/Gramps_2.* Disallow: /wiki/index.php/*:Gramps_2.* User-agent: TurnitinBot/2.0 Disallow: / User-agent: TurnitinBot Disallow: / User-agent: XoviBot Disallow: / User-Agent: bingbot Disallow: / User-Agent: webmeup Disallow: / User-Agent: rogerbot Disallow: / User-Agent: dotbot Disallow: / User-Agent: DotBot/1.1 Disallow: / User-agent: 008 Disallow: / user-agent: AhrefsBot Disallow: / User-agent: MJ12bot Disallow: / User-agent: metajobbot Disallow: / User-agent: Exabot Disallow: / User-agent: Ezooms Disallow: / User-agent: fyberspider Disallow: / User-agent: MojeekBot Disallow: / # Testing to ban all robots and crawlers # User-agent: * # Disallow: /