#
# robots.txt
#
# This file is to prevent the crawling and indexing of certain parts
# of your site by web crawlers and spiders run by sites like Yahoo!
# and Google. By telling these "robots" where not to go on your site,
# you save bandwidth and server resources.
#
# This file will be ignored unless it is at the root of your host:
# Used:    http://example.com/robots.txt
# Ignored: http://example.com/site/robots.txt
#
# For more information about the robots.txt standard, see:
# http://www.robotstxt.org/robotstxt.html

User-agent: *
# CSS, JS, Images
Allow: /css/*.css$
Allow: /css/*.css?
Allow: /scripts/*.js$
Allow: /scripts/*.js?
Allow: /media/*.gif
Allow: /media/*.jpg
Allow: /media/*.jpeg
Allow: /media/*.png
Allow: /media/*.svg
# Directories
Disallow: /config/
Disallow: /App_Data/
Disallow: /App_Plugins/
Disallow: /App_Start/
Disallow: /bin/
Disallow: /config/
Disallow: /Controller/
Disallow: /Properties/
Disallow: /ViewModels/
Disallow: /App_data/ # Files Disallow: /README.txt Disallow: /web.config
Disallow: /Web.Debug.config
Disallow: /Web.JSSD Other AWS PROD.config
Disallow: /Web.Release.config
Disallow: /packages.config
Disallow: /Global.asax
Disallow: /umbracoForms.lic
Disallow: /RES-Umbraco.nuspec
Disallow: /RES-Umbraco.csproj.user
Disallow: /RES-Umbraco.csproj
Disallow: /compilerconfig.json
Disallow: /compilerconfig.json.defaults
# No docments User-agent: Googlebot Disallow: /sites/default/*.pdf$ Disallow: /sites/default/*.doc$ Disallow: /sites/default/*.docx$ Disallow: /sites/default/*.xls$ Disallow: /sites/default/*.xlsx$ # Begin block Bad-Robots from robots.txt
User-agent: ia_archiver
Disallow: / Sitemap:
https://www.defenceyouth.gov.au/sitemap.xml