User-agent: Googlebot Disallow: User-agent: Googlebot-image Disallow: User-agent: Mediapartners-google Disallow: # # This file contains rules to prevent the crawling and indexing of certain parts # of your web site by spiders of a major search engines likes Google and Yahoo. # By managing these rules you can allow or disallow access to specific folders # and files for such spyders. # The good way to hide private data or save a lot of bandwidth. # # # For more information about the robots.txt standard, see: # http://www.robotstxt.org/wc/robots.html # # For syntax checking, see: # http://www.sxw.org.uk/computing/robots/check.html Disallow: /admin/ Disallow: /cgi-bin/ Disallow: /ssl/ Disallow: /tmp/ Disallow: /*?* Disallow: /*? Disallow: /*~* Disallow: /*~ Disallow: ow_version.xml Disallow: INSTALL.txt Disallow: LICENSE.txt Disallow: README.txt Disallow: UPDATE.txt Disallow: CHANGES.txt Sitemap: https://www.justdivorced.com/sitemap.xml