# robots.txt for http://www.arb.ca.gov # This 'robots.txt' file is the standard way to tell # internet robots, worms, and spiders what files or # directories should not be indexed User-agent: * # Disallow: /ceidars/ # Do not run ceidars reports Disallow: /app/ # Do not run database apps Disallow: /cgibin/ # Do not run local executables Disallow: /cgi-bin/ # Do not run any executables Disallow: /cgi-dos/ # Do not run any executables Disallow: /cgi-win/ # Do not run any executables Disallow: /eib/cgi-bin/ # Do not run EIB executables Disallow: /scripts/ # Do not run MLD executables #Disallow: /usr/ # Do not index any user directories Disallow: /lists/ # Do not run MLD executables Disallow: /nsexpress/ # Do not run MLD executables Disallow: /db/search/swishe/ # added per Bill Fell 8/7/03 Disallow: /adam/ #per aqmis request 12/7/04 Disallow: /aqmis2/ #per aqmis request 12/7/04 Disallow: /testarea/ #per Dlew 1/19/2011 Disallow: /aqd/order/ #per adq bweller request 03/03/2011 Disallow: /bluebook #per Bill Fell request 06/16/2011 Disallow: /bluebook2 #per Bill Fell request 06/16/2011 ### ###