Update robots.txt.

* Generate /robots.txt dynamically.
* Include link to sitemap.
* Update list of allowed urls.
* Disallow crawling of non-canonical subdomains.
This commit is contained in:
evazion
2020-01-28 23:45:36 -06:00
parent e715bfad8a
commit 74d77c1e23
4 changed files with 30 additions and 52 deletions

View File

@@ -0,0 +1,6 @@
class RobotsController < ApplicationController
respond_to :text
def index
end
end

View File

@@ -0,0 +1,23 @@
Sitemap: <%= root_url %>sitemap.xml
User-agent: *
Disallow: /
<% if Rails.env.production? && Danbooru.config.hostname == request.host %>
Allow: /artists
Allow: /artist_commentaries
Allow: /comments
Allow: /explore
Allow: /forum_posts
Allow: /forum_topics
Allow: /notes
Allow: /pools
Allow: /posts
Allow: /sessions
Allow: /static
Allow: /tags
Allow: /uploads
Allow: /user_upgrades
Allow: /users
Allow: /wiki_pages
<% end %>