# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file
#
# https://support.google.com/webmasters/answer/156449?rd=1
User-agent: EasouSpider
Disallow: /
User-Agent: *
Disallow: /*/book$
Disallow: /users/
#restrict yahoo to only 1 request a second to avoid request bursts
User-agent: Slurp
Crawl-delay: 1
Sitemap: https://www.gigmit.com/sitemap.xml.gz
Sitemap: https://www.gigmit.com/help/sitemap.xml