Skip to content

Commit

Permalink
feat: Unpublished sites are hidden from search engines (#2570)
Browse files Browse the repository at this point in the history
  • Loading branch information
kimadactyl authored Aug 20, 2024
1 parent 82caec5 commit a753297
Show file tree
Hide file tree
Showing 7 changed files with 51 additions and 16 deletions.
7 changes: 3 additions & 4 deletions app/controllers/pages_controller.rb
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,9 @@ def find_placecal
end
end

def terms_of_use; end

def robots
robots = File.read(Rails.root.join("config/robots/robots.#{Rails.env}.txt"))
render plain: robots
render plain: current_site.robots
end

def terms_of_use; end
end
5 changes: 0 additions & 5 deletions app/controllers/sites_controller.rb
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,6 @@ def index
end
end

def robots
robots = File.read(Rails.root.join("config/robots/robots.#{Rails.env}.txt"))
render plain: robots
end

private

def set_places_to_get_computer_access
Expand Down
14 changes: 14 additions & 0 deletions app/models/site.rb
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,20 @@ def og_description
tagline && tagline.empty? ? false : tagline
end

def robots
config = File.read(Rails.root.join("config/robots/robots.#{Rails.env}.txt"))

if is_published?
config
else
<<~TXT
#{config}
User-agent: *
Disallow: /
TXT
end
end

class << self
# Find the requested Site from information in the rails request object.
#
Expand Down
4 changes: 0 additions & 4 deletions config/robots/robots.development.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1 @@
# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
User-agent: *
Disallow: /
4 changes: 1 addition & 3 deletions config/robots/robots.staging.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
User-agent: *
Disallow: /
Disallow: /
1 change: 1 addition & 0 deletions config/robots/robots.test.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
32 changes: 32 additions & 0 deletions test/integration/robots_integration_test.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# frozen_string_literal: true

require 'test_helper'

class RobotsIntegrationTest < ActionDispatch::IntegrationTest
setup do
@published_site = create(:site, is_published: true)
@unpublished_site = create(:site, is_published: false)
end

test 'robots.txt blocks site if site is unpublished' do
get "http://#{@unpublished_site.slug}.lvh.me:3000/robots.txt"
assert_response 200
assert_equal forbid_string, response.body
end

test 'robots.txt has default comment if site is published' do
get "http://#{@published_site.slug}.lvh.me:3000/robots.txt"
assert_response 200
assert_equal '# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file', response.body
end

private

def forbid_string
<<~TXT
# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
User-agent: *
Disallow: /
TXT
end
end

0 comments on commit a753297

Please sign in to comment.