# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
# User-Agent: *
# Disallow: /
SITEMAP: https://hired-user-uploads.s3.amazonaws.com/sitemaps/core.xml.gz
SITEMAP: https://hired-user-uploads.s3.amazonaws.com/sitemaps/companies_details.xml.gz
SITEMAP: https://hired-user-uploads.s3.amazonaws.com/sitemaps/companies_browse.xml.gz
SITEMAP: https://hired-user-uploads.s3.amazonaws.com/sitemaps/jobs_details.xml.gz
SITEMAP: https://hired-user-uploads.s3.amazonaws.com/sitemaps/jobs_browse.xml.gz
SITEMAP: https://hired-user-uploads.s3.amazonaws.com/sitemaps/salaries_details.xml.gz
SITEMAP: https://hired-user-uploads.s3.amazonaws.com/sitemaps/salaries_browse.xml.gz
SITEMAP: https://hired-user-uploads.s3.amazonaws.com/sitemaps/skills_details.xml.gz
SITEMAP: https://hired-user-uploads.s3.amazonaws.com/sitemaps/skills_browse.xml.gz
SITEMAP: https://hired-user-uploads.s3.amazonaws.com/sitemaps/uncrawled_botify_urls.xml.gz
User-Agent: *
Disallow: *