# Define access-restrictions for robots/spiders # http://www.robotstxt.org/wc/norobots.html #Go away User-agent: BLP_bbot Disallow: / User-agent: discobot Disallow: / User-agent: AhrefsBot Disallow: / User-agent: YandexBot Disallow: / User-agent: Ezooms Disallow: / User-agent: Ezooms Disallow: / #/Go away # By default we allow robots to access all areas of our site # already accessible to anonymous users User-agent: * Disallow: /redirects Disallow: /takeaction Disallow: /images Disallow: /files Disallow: /users Disallow: /partners Disallow: /archives Disallow: /landing-pages Disallow: /author Disallow: /Members Disallow: /training # Add Googlebot-specific syntax extension to exclude forms # that are repeated for each piece of content in the site # the wildcard is only supported by Googlebot # http://www.google.com/support/webmasters/bin/answer.py?answer=40367&ctx=sibling User-Agent: Googlebot Disallow: /*sendto_form$ Disallow: /*folder_factories$ Disallow: /redirects Disallow: /takeaction Disallow: /images Disallow: /files Disallow: /users Disallow: /partners Disallow: /archives Disallow: /landing-pages Disallow: /author Disallow: /Members Disallow: /trainin