mirror of
https://github.com/jointakahe/takahe.git
synced 2025-02-16 15:45:14 +00:00
A quick robots.txt to tell bots to stop crawling tags. (#321)
This commit is contained in:
parent
d247baa307
commit
7eff751224
2 changed files with 10 additions and 0 deletions
|
@ -36,6 +36,11 @@ http {
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
|
|
||||||
|
# Serve robots.txt from the non-collected dir as a special case.
|
||||||
|
location /robots.txt {
|
||||||
|
alias /takahe/static/robots.txt;
|
||||||
|
}
|
||||||
|
|
||||||
# Serves static files from the collected dir
|
# Serves static files from the collected dir
|
||||||
location /static/ {
|
location /static/ {
|
||||||
# Files in static have cache-busting hashes in the name, thus can be cached forever
|
# Files in static have cache-busting hashes in the name, thus can be cached forever
|
||||||
|
|
5
static/robots.txt
Normal file
5
static/robots.txt
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
User-Agent: *
|
||||||
|
|
||||||
|
# Don't allow any bot to crawl tags.
|
||||||
|
Disallow: /tags/
|
||||||
|
Disallow: /tags/*
|
Loading…
Reference in a new issue