# This file tells search engines which URLs they can and cannot crawl. # www.robotstxt.org/ # http://code.google.com/web/controlcrawlindex/ #User-agent: * # Allow Googlebot to crawl all pages. User-agent: Googlebot Allow: / # Block all other search engine crawlers. User-agent: * Disallow: /