Update robots.txt.
* Generate /robots.txt dynamically. * Include link to sitemap. * Update list of allowed urls. * Disallow crawling of non-canonical subdomains.
This commit is contained in:
@@ -232,6 +232,7 @@ Rails.application.routes.draw do
|
||||
get "reports/uploads" => "reports#uploads"
|
||||
get "reports/upload_tags" => "reports#upload_tags"
|
||||
resources :recommended_posts, only: [:index]
|
||||
resources :robots, only: [:index]
|
||||
resources :saved_searches, :except => [:show] do
|
||||
collection do
|
||||
get :labels
|
||||
|
||||
Reference in New Issue
Block a user