# robots.txt for http://www.pik-potsdam.de/ # # Define access-restrictions for robots/spiders # http://www.robotstxt.org/robotstxt.html # By default we allow robots to access all areas of our site # already accessible to anonymous users User-agent: * Disallow: /search Disallow: /@@search Disallow: /intranet/ Disallow: /news/public-events/archiv/alter-net/former-ss/2010/night_life Disallow: /*download_as_pdf Disallow: /*send_as_pdf # Add Googlebot-specific syntax extension to exclude forms # that are repeated for each piece of content in the site # the wildcard is only supported by Googlebot # http://www.google.com/support/webmasters/bin/answer.py?answer=40367&ctx=sibling User-Agent: Googlebot Disallow: /*sendto_form$ Disallow: /*folder_factories$ # MSNSearch User-agent: msnbot Disallow: /intranet/ Crawl-delay: 47 # Yahoo User-agent: Slurp Disallow: /intranet/ Crawl-delay: 61 # Ask Jeeves User-agent: Teoma Disallow: /intranet/ Crawl-delay: 79 # Gigabot User-agent: gigabot Disallow: /intranet/ Crawl-delay: 111 # TTN User-agent: TSM Translation-Search-Machine Disallow: / # PhpDig User-agent: PhpDig/1.8.8 Disallow: /