diff --git a/bookwyrm/templates/robots.txt b/bookwyrm/templates/robots.txt new file mode 100644 index 000000000..dc7b6bcbb --- /dev/null +++ b/bookwyrm/templates/robots.txt @@ -0,0 +1,5 @@ +# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file + +User-agent: * +Disallow: /static/js/ +Disallow: /static/css/ diff --git a/bookwyrm/urls.py b/bookwyrm/urls.py index a2250c486..f5c13a4b4 100644 --- a/bookwyrm/urls.py +++ b/bookwyrm/urls.py @@ -2,7 +2,7 @@ from django.conf.urls.static import static from django.contrib import admin from django.urls import path, re_path - +from django.views.generic.base import TemplateView from bookwyrm import settings, views from bookwyrm.utils import regex @@ -27,6 +27,10 @@ handler404 = "bookwyrm.views.not_found_page" handler500 = "bookwyrm.views.server_error_page" urlpatterns = [ path("admin/", admin.site.urls), + path( + "robots.txt", + TemplateView.as_view(template_name="robots.txt", content_type="text/plain"), + ), # federation endpoints re_path(r"^inbox/?$", views.Inbox.as_view()), re_path(r"%s/inbox/?$" % local_user_path, views.Inbox.as_view()),