Update robots.txt.
* Generate /robots.txt dynamically. * Include link to sitemap. * Update list of allowed urls. * Disallow crawling of non-canonical subdomains.
This commit is contained in:
6
app/controllers/robots_controller.rb
Normal file
6
app/controllers/robots_controller.rb
Normal file
@@ -0,0 +1,6 @@
|
||||
class RobotsController < ApplicationController
|
||||
respond_to :text
|
||||
|
||||
def index
|
||||
end
|
||||
end
|
||||
23
app/views/robots/index.text.erb
Normal file
23
app/views/robots/index.text.erb
Normal file
@@ -0,0 +1,23 @@
|
||||
Sitemap: <%= root_url %>sitemap.xml
|
||||
|
||||
User-agent: *
|
||||
Disallow: /
|
||||
|
||||
<% if Rails.env.production? && Danbooru.config.hostname == request.host %>
|
||||
Allow: /artists
|
||||
Allow: /artist_commentaries
|
||||
Allow: /comments
|
||||
Allow: /explore
|
||||
Allow: /forum_posts
|
||||
Allow: /forum_topics
|
||||
Allow: /notes
|
||||
Allow: /pools
|
||||
Allow: /posts
|
||||
Allow: /sessions
|
||||
Allow: /static
|
||||
Allow: /tags
|
||||
Allow: /uploads
|
||||
Allow: /user_upgrades
|
||||
Allow: /users
|
||||
Allow: /wiki_pages
|
||||
<% end %>
|
||||
Reference in New Issue
Block a user