mirror of
https://github.com/bookwyrm-social/bookwyrm.git
synced 2024-12-23 08:36:32 +00:00
This fixes #1871 huge amount of traffic from bots
and closes #1873 that leaves a file after testing.
This commit is contained in:
parent
91f085c876
commit
f8afdfc6a9
2 changed files with 79 additions and 2 deletions
|
@ -1,5 +1,78 @@
|
|||
# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
|
||||
|
||||
##### AhrefsBot #####
|
||||
# see http://ahrefs.com/robot/
|
||||
User-agent: AhrefsBot
|
||||
Crawl-Delay: 10
|
||||
#Disallow: /
|
||||
|
||||
|
||||
##### SemrushBot #####
|
||||
# see http://www.semrush.com/bot.html
|
||||
User-agent: SemrushBot
|
||||
Crawl-Delay: 10
|
||||
#Disallow: /
|
||||
|
||||
# To block SemrushBot from crawling your site for different SEO and technical issues:
|
||||
User-agent: SiteAuditBot
|
||||
Disallow: /
|
||||
|
||||
#To block SemrushBot from crawling your site for Backlink Audit tool:
|
||||
User-agent: SemrushBot-BA
|
||||
Disallow: /
|
||||
|
||||
#To block SemrushBot from crawling your site for On Page SEO Checker tool and similar tools:
|
||||
User-agent: SemrushBot-SI
|
||||
Disallow: /
|
||||
|
||||
#To block SemrushBot from checking URLs on your site for SWA tool:
|
||||
User-agent: SemrushBot-SWA
|
||||
Disallow: /
|
||||
|
||||
#To block SemrushBot from crawling your site for Content Analyzer and Post Tracking tools:
|
||||
User-agent: SemrushBot-CT
|
||||
Disallow: /
|
||||
|
||||
#To block SemrushBot from crawling your site for Brand Monitoring:
|
||||
User-agent: SemrushBot-BM
|
||||
Disallow: /
|
||||
|
||||
#To block SplitSignalBot from crawling your site for SplitSignal tool:
|
||||
User-agent: SplitSignalBot
|
||||
Disallow: /
|
||||
|
||||
#To block SemrushBot-COUB from crawling your site for Content Outline Builder tool:
|
||||
User-agent: SemrushBot-COUB
|
||||
Disallow: /
|
||||
|
||||
|
||||
##### DotBot #####
|
||||
# see https://opensiteexplorer.org/dotbot
|
||||
User-agent: dotbot
|
||||
Crawl-delay: 10
|
||||
#Disallow: /
|
||||
|
||||
|
||||
##### BLEXBot #####
|
||||
# see http://webmeup-crawler.com/
|
||||
User-agent: BLEXBot
|
||||
Crawl-delay: 10
|
||||
#Disallow: /
|
||||
|
||||
|
||||
##### MJ12bot #####
|
||||
# see http://mj12bot.com/
|
||||
User-Agent: MJ12bot
|
||||
Crawl-Delay: 20
|
||||
#Disallow: /
|
||||
|
||||
|
||||
##### PetalBot #####
|
||||
# see https://webmaster.petalsearch.com/site/petalbot
|
||||
User-agent: PetalBot
|
||||
Disallow: /
|
||||
|
||||
|
||||
User-agent: *
|
||||
Disallow: /static/js/
|
||||
Disallow: /static/css/
|
||||
Disallow: /static/css/
|
|
@ -1,4 +1,5 @@
|
|||
""" test for app action functionality """
|
||||
import os
|
||||
import json
|
||||
from unittest.mock import patch
|
||||
|
||||
|
@ -39,7 +40,7 @@ class FederationViews(TestCase):
|
|||
)
|
||||
|
||||
models.SiteSettings.objects.create()
|
||||
|
||||
|
||||
def test_federation_page(self):
|
||||
"""there are so many views, this just makes sure it LOADS"""
|
||||
view = views.Federation.as_view()
|
||||
|
@ -207,3 +208,6 @@ class FederationViews(TestCase):
|
|||
created = models.FederatedServer.objects.get(server_name="server.name")
|
||||
self.assertEqual(created.status, "blocked")
|
||||
self.assertEqual(created.notes, "https://explanation.url")
|
||||
|
||||
# remove file.json after test
|
||||
os.remove("file.json")
|
Loading…
Reference in a new issue