Browse Source

robots.txt: disallow crawling dynamically generated PDF documents (#31617).

Patch by Go MAEDA.


git-svn-id: http://svn.redmine.org/redmine/trunk@19867 e93f8b46-1217-0410-a6f0-8f06a7374b81
tags/4.2.0
Go MAEDA 3 years ago
parent
commit
09db3e7131
2 changed files with 4 additions and 0 deletions
  1. 2
    0
      app/views/welcome/robots.text.erb
  2. 2
    0
      test/integration/welcome_test.rb

+ 2
- 0
app/views/welcome/robots.text.erb View File

@@ -13,3 +13,5 @@ Disallow: <%= url_for(search_path) %>
Disallow: <%= url_for(issues_path(:sort => '')) %>
Disallow: <%= url_for(issues_path(:query_id => '')) %>
Disallow: <%= url_for(issues_path) %>?*set_filter=
Disallow: <%= url_for(issues_path(:trailing_slash => true)) %>*.pdf$
Disallow: <%= url_for(projects_path(:trailing_slash => true)) %>*.pdf$

+ 2
- 0
test/integration/welcome_test.rb View File

@@ -31,5 +31,7 @@ class WelcomeTest < Redmine::IntegrationTest
assert @response.body.match(%r{^Disallow: /projects/ecookbook/issues\r?$})
assert @response.body.match(%r{^Disallow: /issues\?sort=\r?$})
assert @response.body.match(%r{^Disallow: /issues\?\*set_filter=\r?$})
assert @response.body.match(%r{^Disallow: /issues/\*\.pdf\$\r?$})
assert @response.body.match(%r{^Disallow: /projects/\*\.pdf\$\r?$})
end
end

Loading…
Cancel
Save