mirror of
https://github.com/bookwyrm-social/bookwyrm.git
synced 2024-11-22 09:31:08 +00:00
Merge pull request #1421 from bookwyrm-social/pylint-update
Pylint update
This commit is contained in:
commit
5ad315faac
50 changed files with 289 additions and 207 deletions
2
.github/workflows/pylint.yml
vendored
2
.github/workflows/pylint.yml
vendored
|
@ -24,5 +24,5 @@ jobs:
|
|||
pip install pylint
|
||||
- name: Analysing the code with pylint
|
||||
run: |
|
||||
pylint bookwyrm/ --ignore=migrations,tests --disable=E1101,E1135,E1136,R0903,R0901,R0902,W0707,W0511,W0406,R0401,R0801,C0209
|
||||
pylint bookwyrm/ --ignore=migrations,tests --disable=E1101,E1135,E1136,R0903,R0901,R0902,W0707,W0511,W0406,R0401,R0801
|
||||
|
||||
|
|
|
@ -101,7 +101,7 @@ class ActivityObject:
|
|||
except KeyError:
|
||||
if field.default == MISSING and field.default_factory == MISSING:
|
||||
raise ActivitySerializerError(
|
||||
"Missing required field: %s" % field.name
|
||||
f"Missing required field: {field.name}"
|
||||
)
|
||||
value = field.default
|
||||
setattr(self, field.name, value)
|
||||
|
@ -219,8 +219,8 @@ def set_related_field(
|
|||
model_name, origin_model_name, related_field_name, related_remote_id, data
|
||||
):
|
||||
"""load reverse related fields (editions, attachments) without blocking"""
|
||||
model = apps.get_model("bookwyrm.%s" % model_name, require_ready=True)
|
||||
origin_model = apps.get_model("bookwyrm.%s" % origin_model_name, require_ready=True)
|
||||
model = apps.get_model(f"bookwyrm.{model_name}", require_ready=True)
|
||||
origin_model = apps.get_model(f"bookwyrm.{origin_model_name}", require_ready=True)
|
||||
|
||||
with transaction.atomic():
|
||||
if isinstance(data, str):
|
||||
|
@ -234,7 +234,7 @@ def set_related_field(
|
|||
# this must exist because it's the object that triggered this function
|
||||
instance = origin_model.find_existing_by_remote_id(related_remote_id)
|
||||
if not instance:
|
||||
raise ValueError("Invalid related remote id: %s" % related_remote_id)
|
||||
raise ValueError(f"Invalid related remote id: {related_remote_id}")
|
||||
|
||||
# set the origin's remote id on the activity so it will be there when
|
||||
# the model instance is created
|
||||
|
@ -265,7 +265,7 @@ def get_model_from_type(activity_type):
|
|||
]
|
||||
if not model:
|
||||
raise ActivitySerializerError(
|
||||
'No model found for activity type "%s"' % activity_type
|
||||
f'No model found for activity type "{activity_type}"'
|
||||
)
|
||||
return model[0]
|
||||
|
||||
|
@ -286,7 +286,7 @@ def resolve_remote_id(
|
|||
data = get_data(remote_id)
|
||||
except ConnectorException:
|
||||
raise ActivitySerializerError(
|
||||
"Could not connect to host for remote_id in: %s" % (remote_id)
|
||||
f"Could not connect to host for remote_id: {remote_id}"
|
||||
)
|
||||
# determine the model implicitly, if not provided
|
||||
# or if it's a model with subclasses like Status, check again
|
||||
|
|
|
@ -16,11 +16,12 @@ class ActivityStream(RedisStore):
|
|||
|
||||
def stream_id(self, user):
|
||||
"""the redis key for this user's instance of this stream"""
|
||||
return "{}-{}".format(user.id, self.key)
|
||||
return f"{user.id}-{self.key}"
|
||||
|
||||
def unread_id(self, user):
|
||||
"""the redis key for this user's unread count for this stream"""
|
||||
return "{}-unread".format(self.stream_id(user))
|
||||
stream_id = self.stream_id(user)
|
||||
return f"{stream_id}-unread"
|
||||
|
||||
def get_rank(self, obj): # pylint: disable=no-self-use
|
||||
"""statuses are sorted by date published"""
|
||||
|
|
|
@ -43,7 +43,7 @@ class AbstractMinimalConnector(ABC):
|
|||
params["min_confidence"] = min_confidence
|
||||
|
||||
data = self.get_search_data(
|
||||
"%s%s" % (self.search_url, query),
|
||||
f"{self.search_url}{query}",
|
||||
params=params,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
@ -57,7 +57,7 @@ class AbstractMinimalConnector(ABC):
|
|||
"""isbn search"""
|
||||
params = {}
|
||||
data = self.get_search_data(
|
||||
"%s%s" % (self.isbn_search_url, query),
|
||||
f"{self.isbn_search_url}{query}",
|
||||
params=params,
|
||||
)
|
||||
results = []
|
||||
|
@ -131,7 +131,7 @@ class AbstractConnector(AbstractMinimalConnector):
|
|||
work_data = data
|
||||
|
||||
if not work_data or not edition_data:
|
||||
raise ConnectorException("Unable to load book data: %s" % remote_id)
|
||||
raise ConnectorException(f"Unable to load book data: {remote_id}")
|
||||
|
||||
with transaction.atomic():
|
||||
# create activitypub object
|
||||
|
@ -222,9 +222,7 @@ def get_data(url, params=None, timeout=10):
|
|||
"""wrapper for request.get"""
|
||||
# check if the url is blocked
|
||||
if models.FederatedServer.is_blocked(url):
|
||||
raise ConnectorException(
|
||||
"Attempting to load data from blocked url: {:s}".format(url)
|
||||
)
|
||||
raise ConnectorException(f"Attempting to load data from blocked url: {url}")
|
||||
|
||||
try:
|
||||
resp = requests.get(
|
||||
|
@ -283,6 +281,7 @@ class SearchResult:
|
|||
confidence: int = 1
|
||||
|
||||
def __repr__(self):
|
||||
# pylint: disable=consider-using-f-string
|
||||
return "<SearchResult key={!r} title={!r} author={!r}>".format(
|
||||
self.key, self.title, self.author
|
||||
)
|
||||
|
|
|
@ -109,10 +109,10 @@ def get_or_create_connector(remote_id):
|
|||
connector_info = models.Connector.objects.create(
|
||||
identifier=identifier,
|
||||
connector_file="bookwyrm_connector",
|
||||
base_url="https://%s" % identifier,
|
||||
books_url="https://%s/book" % identifier,
|
||||
covers_url="https://%s/images/covers" % identifier,
|
||||
search_url="https://%s/search?q=" % identifier,
|
||||
base_url=f"https://{identifier}",
|
||||
books_url=f"https://{identifier}/book",
|
||||
covers_url=f"https://{identifier}/images/covers",
|
||||
search_url=f"https://{identifier}/search?q=",
|
||||
priority=2,
|
||||
)
|
||||
|
||||
|
@ -131,7 +131,7 @@ def load_more_data(connector_id, book_id):
|
|||
def load_connector(connector_info):
|
||||
"""instantiate the connector class"""
|
||||
connector = importlib.import_module(
|
||||
"bookwyrm.connectors.%s" % connector_info.connector_file
|
||||
f"bookwyrm.connectors.{connector_info.connector_file}"
|
||||
)
|
||||
return connector.Connector(connector_info.identifier)
|
||||
|
||||
|
@ -141,4 +141,4 @@ def load_connector(connector_info):
|
|||
def create_connector(sender, instance, created, *args, **kwargs):
|
||||
"""create a connector to an external bookwyrm server"""
|
||||
if instance.application_type == "bookwyrm":
|
||||
get_or_create_connector("https://{:s}".format(instance.server_name))
|
||||
get_or_create_connector(f"https://{instance.server_name}")
|
||||
|
|
|
@ -59,7 +59,7 @@ class Connector(AbstractConnector):
|
|||
|
||||
def get_remote_id(self, value):
|
||||
"""convert an id/uri into a url"""
|
||||
return "{:s}?action=by-uris&uris={:s}".format(self.books_url, value)
|
||||
return f"{self.books_url}?action=by-uris&uris={value}"
|
||||
|
||||
def get_book_data(self, remote_id):
|
||||
data = get_data(remote_id)
|
||||
|
@ -87,11 +87,7 @@ class Connector(AbstractConnector):
|
|||
|
||||
def format_search_result(self, search_result):
|
||||
images = search_result.get("image")
|
||||
cover = (
|
||||
"{:s}/img/entities/{:s}".format(self.covers_url, images[0])
|
||||
if images
|
||||
else None
|
||||
)
|
||||
cover = f"{self.covers_url}/img/entities/{images[0]}" if images else None
|
||||
# a deeply messy translation of inventaire's scores
|
||||
confidence = float(search_result.get("_score", 0.1))
|
||||
confidence = 0.1 if confidence < 150 else 0.999
|
||||
|
@ -99,9 +95,7 @@ class Connector(AbstractConnector):
|
|||
title=search_result.get("label"),
|
||||
key=self.get_remote_id(search_result.get("uri")),
|
||||
author=search_result.get("description"),
|
||||
view_link="{:s}/entity/{:s}".format(
|
||||
self.base_url, search_result.get("uri")
|
||||
),
|
||||
view_link=f"{self.base_url}/entity/{search_result.get('uri')}",
|
||||
cover=cover,
|
||||
confidence=confidence,
|
||||
connector=self,
|
||||
|
@ -123,9 +117,7 @@ class Connector(AbstractConnector):
|
|||
title=title[0],
|
||||
key=self.get_remote_id(search_result.get("uri")),
|
||||
author=search_result.get("description"),
|
||||
view_link="{:s}/entity/{:s}".format(
|
||||
self.base_url, search_result.get("uri")
|
||||
),
|
||||
view_link=f"{self.base_url}/entity/{search_result.get('uri')}",
|
||||
cover=self.get_cover_url(search_result.get("image")),
|
||||
connector=self,
|
||||
)
|
||||
|
@ -135,11 +127,7 @@ class Connector(AbstractConnector):
|
|||
|
||||
def load_edition_data(self, work_uri):
|
||||
"""get a list of editions for a work"""
|
||||
url = (
|
||||
"{:s}?action=reverse-claims&property=wdt:P629&value={:s}&sort=true".format(
|
||||
self.books_url, work_uri
|
||||
)
|
||||
)
|
||||
url = f"{self.books_url}?action=reverse-claims&property=wdt:P629&value={work_uri}&sort=true"
|
||||
return get_data(url)
|
||||
|
||||
def get_edition_from_work_data(self, data):
|
||||
|
@ -195,7 +183,7 @@ class Connector(AbstractConnector):
|
|||
# cover may or may not be an absolute url already
|
||||
if re.match(r"^http", cover_id):
|
||||
return cover_id
|
||||
return "%s%s" % (self.covers_url, cover_id)
|
||||
return f"{self.covers_url}{cover_id}"
|
||||
|
||||
def resolve_keys(self, keys):
|
||||
"""cool, it's "wd:Q3156592" now what the heck does that mean"""
|
||||
|
@ -213,9 +201,7 @@ class Connector(AbstractConnector):
|
|||
link = links.get("enwiki")
|
||||
if not link:
|
||||
return ""
|
||||
url = "{:s}/api/data?action=wp-extract&lang=en&title={:s}".format(
|
||||
self.base_url, link
|
||||
)
|
||||
url = f"{self.base_url}/api/data?action=wp-extract&lang=en&title={link}"
|
||||
try:
|
||||
data = get_data(url)
|
||||
except ConnectorException:
|
||||
|
|
|
@ -71,7 +71,7 @@ class Connector(AbstractConnector):
|
|||
key = data["key"]
|
||||
except KeyError:
|
||||
raise ConnectorException("Invalid book data")
|
||||
return "%s%s" % (self.books_url, key)
|
||||
return f"{self.books_url}{key}"
|
||||
|
||||
def is_work_data(self, data):
|
||||
return bool(re.match(r"^[\/\w]+OL\d+W$", data["key"]))
|
||||
|
@ -81,7 +81,7 @@ class Connector(AbstractConnector):
|
|||
key = data["key"]
|
||||
except KeyError:
|
||||
raise ConnectorException("Invalid book data")
|
||||
url = "%s%s/editions" % (self.books_url, key)
|
||||
url = f"{self.books_url}{key}/editions"
|
||||
data = self.get_book_data(url)
|
||||
edition = pick_default_edition(data["entries"])
|
||||
if not edition:
|
||||
|
@ -93,7 +93,7 @@ class Connector(AbstractConnector):
|
|||
key = data["works"][0]["key"]
|
||||
except (IndexError, KeyError):
|
||||
raise ConnectorException("No work found for edition")
|
||||
url = "%s%s" % (self.books_url, key)
|
||||
url = f"{self.books_url}{key}"
|
||||
return self.get_book_data(url)
|
||||
|
||||
def get_authors_from_data(self, data):
|
||||
|
@ -102,7 +102,7 @@ class Connector(AbstractConnector):
|
|||
author_blob = author_blob.get("author", author_blob)
|
||||
# this id is "/authors/OL1234567A"
|
||||
author_id = author_blob["key"]
|
||||
url = "%s%s" % (self.base_url, author_id)
|
||||
url = f"{self.base_url}{author_id}"
|
||||
author = self.get_or_create_author(url)
|
||||
if not author:
|
||||
continue
|
||||
|
@ -113,8 +113,8 @@ class Connector(AbstractConnector):
|
|||
if not cover_blob:
|
||||
return None
|
||||
cover_id = cover_blob[0]
|
||||
image_name = "%s-%s.jpg" % (cover_id, size)
|
||||
return "%s/b/id/%s" % (self.covers_url, image_name)
|
||||
image_name = f"{cover_id}-{size}.jpg"
|
||||
return f"{self.covers_url}/b/id/{image_name}"
|
||||
|
||||
def parse_search_data(self, data):
|
||||
return data.get("docs")
|
||||
|
@ -152,7 +152,7 @@ class Connector(AbstractConnector):
|
|||
|
||||
def load_edition_data(self, olkey):
|
||||
"""query openlibrary for editions of a work"""
|
||||
url = "%s/works/%s/editions" % (self.books_url, olkey)
|
||||
url = f"{self.books_url}/works/{olkey}/editions"
|
||||
return self.get_book_data(url)
|
||||
|
||||
def expand_book_data(self, book):
|
||||
|
|
|
@ -71,7 +71,7 @@ class Connector(AbstractConnector):
|
|||
def format_search_result(self, search_result):
|
||||
cover = None
|
||||
if search_result.cover:
|
||||
cover = "%s%s" % (self.covers_url, search_result.cover)
|
||||
cover = f"{self.covers_url}{search_result.cover}"
|
||||
|
||||
return SearchResult(
|
||||
title=search_result.title,
|
||||
|
|
|
@ -11,7 +11,7 @@ def email_data():
|
|||
"""fields every email needs"""
|
||||
site = models.SiteSettings.objects.get()
|
||||
if site.logo_small:
|
||||
logo_path = "/images/{}".format(site.logo_small.url)
|
||||
logo_path = f"/images/{site.logo_small.url}"
|
||||
else:
|
||||
logo_path = "/static/images/logo-small.png"
|
||||
|
||||
|
@ -48,18 +48,12 @@ def password_reset_email(reset_code):
|
|||
|
||||
def format_email(email_name, data):
|
||||
"""render the email templates"""
|
||||
subject = (
|
||||
get_template("email/{}/subject.html".format(email_name)).render(data).strip()
|
||||
)
|
||||
subject = get_template(f"email/{email_name}/subject.html").render(data).strip()
|
||||
html_content = (
|
||||
get_template("email/{}/html_content.html".format(email_name))
|
||||
.render(data)
|
||||
.strip()
|
||||
get_template(f"email/{email_name}/html_content.html").render(data).strip()
|
||||
)
|
||||
text_content = (
|
||||
get_template("email/{}/text_content.html".format(email_name))
|
||||
.render(data)
|
||||
.strip()
|
||||
get_template(f"email/{email_name}/text_content.html").render(data).strip()
|
||||
)
|
||||
return (subject, html_content, text_content)
|
||||
|
||||
|
|
|
@ -260,10 +260,7 @@ class CreateInviteForm(CustomForm):
|
|||
]
|
||||
),
|
||||
"use_limit": widgets.Select(
|
||||
choices=[
|
||||
(i, _("%(count)d uses" % {"count": i}))
|
||||
for i in [1, 5, 10, 25, 50, 100]
|
||||
]
|
||||
choices=[(i, _(f"{i} uses")) for i in [1, 5, 10, 25, 50, 100]]
|
||||
+ [(None, _("Unlimited"))]
|
||||
),
|
||||
}
|
||||
|
|
|
@ -127,6 +127,7 @@ def handle_imported_book(source, user, item, include_reviews, privacy):
|
|||
# but "now" is a bad guess
|
||||
published_date_guess = item.date_read or item.date_added
|
||||
if item.review:
|
||||
# pylint: disable=consider-using-f-string
|
||||
review_title = (
|
||||
"Review of {!r} on {!r}".format(
|
||||
item.book.title,
|
||||
|
|
|
@ -266,7 +266,7 @@ class ObjectMixin(ActivitypubMixin):
|
|||
signed_message = signer.sign(SHA256.new(content.encode("utf8")))
|
||||
|
||||
signature = activitypub.Signature(
|
||||
creator="%s#main-key" % user.remote_id,
|
||||
creator=f"{user.remote_id}#main-key",
|
||||
created=activity_object.published,
|
||||
signatureValue=b64encode(signed_message).decode("utf8"),
|
||||
)
|
||||
|
@ -285,16 +285,16 @@ class ObjectMixin(ActivitypubMixin):
|
|||
return activitypub.Delete(
|
||||
id=self.remote_id + "/activity",
|
||||
actor=user.remote_id,
|
||||
to=["%s/followers" % user.remote_id],
|
||||
to=[f"{user.remote_id}/followers"],
|
||||
cc=["https://www.w3.org/ns/activitystreams#Public"],
|
||||
object=self,
|
||||
).serialize()
|
||||
|
||||
def to_update_activity(self, user):
|
||||
"""wrapper for Updates to an activity"""
|
||||
activity_id = "%s#update/%s" % (self.remote_id, uuid4())
|
||||
uuid = uuid4()
|
||||
return activitypub.Update(
|
||||
id=activity_id,
|
||||
id=f"{self.remote_id}#update/{uuid}",
|
||||
actor=user.remote_id,
|
||||
to=["https://www.w3.org/ns/activitystreams#Public"],
|
||||
object=self,
|
||||
|
@ -337,8 +337,8 @@ class OrderedCollectionPageMixin(ObjectMixin):
|
|||
paginated = Paginator(queryset, PAGE_LENGTH)
|
||||
# add computed fields specific to orderd collections
|
||||
activity["totalItems"] = paginated.count
|
||||
activity["first"] = "%s?page=1" % remote_id
|
||||
activity["last"] = "%s?page=%d" % (remote_id, paginated.num_pages)
|
||||
activity["first"] = f"{remote_id}?page=1"
|
||||
activity["last"] = f"{remote_id}?page={paginated.num_pages}"
|
||||
|
||||
return serializer(**activity)
|
||||
|
||||
|
@ -420,7 +420,7 @@ class CollectionItemMixin(ActivitypubMixin):
|
|||
"""AP for shelving a book"""
|
||||
collection_field = getattr(self, self.collection_field)
|
||||
return activitypub.Add(
|
||||
id="{:s}#add".format(collection_field.remote_id),
|
||||
id=f"{collection_field.remote_id}#add",
|
||||
actor=user.remote_id,
|
||||
object=self.to_activity_dataclass(),
|
||||
target=collection_field.remote_id,
|
||||
|
@ -430,7 +430,7 @@ class CollectionItemMixin(ActivitypubMixin):
|
|||
"""AP for un-shelving a book"""
|
||||
collection_field = getattr(self, self.collection_field)
|
||||
return activitypub.Remove(
|
||||
id="{:s}#remove".format(collection_field.remote_id),
|
||||
id=f"{collection_field.remote_id}#remove",
|
||||
actor=user.remote_id,
|
||||
object=self.to_activity_dataclass(),
|
||||
target=collection_field.remote_id,
|
||||
|
@ -458,7 +458,7 @@ class ActivityMixin(ActivitypubMixin):
|
|||
"""undo an action"""
|
||||
user = self.user if hasattr(self, "user") else self.user_subject
|
||||
return activitypub.Undo(
|
||||
id="%s#undo" % self.remote_id,
|
||||
id=f"{self.remote_id}#undo",
|
||||
actor=user.remote_id,
|
||||
object=self,
|
||||
).serialize()
|
||||
|
@ -555,11 +555,11 @@ def to_ordered_collection_page(
|
|||
|
||||
prev_page = next_page = None
|
||||
if activity_page.has_next():
|
||||
next_page = "%s?page=%d" % (remote_id, activity_page.next_page_number())
|
||||
next_page = f"{remote_id}?page={activity_page.next_page_number()}"
|
||||
if activity_page.has_previous():
|
||||
prev_page = "%s?page=%d" % (remote_id, activity_page.previous_page_number())
|
||||
prev_page = f"{remote_id}?page=%d{activity_page.previous_page_number()}"
|
||||
return activitypub.OrderedCollectionPage(
|
||||
id="%s?page=%s" % (remote_id, page),
|
||||
id=f"{remote_id}?page={page}",
|
||||
partOf=remote_id,
|
||||
orderedItems=items,
|
||||
next=next_page,
|
||||
|
|
|
@ -35,7 +35,7 @@ class Author(BookDataModel):
|
|||
|
||||
def get_remote_id(self):
|
||||
"""editions and works both use "book" instead of model_name"""
|
||||
return "https://%s/author/%s" % (DOMAIN, self.id)
|
||||
return f"https://{DOMAIN}/author/{self.id}"
|
||||
|
||||
activity_serializer = activitypub.Author
|
||||
|
||||
|
|
|
@ -32,11 +32,11 @@ class BookWyrmModel(models.Model):
|
|||
|
||||
def get_remote_id(self):
|
||||
"""generate a url that resolves to the local object"""
|
||||
base_path = "https://%s" % DOMAIN
|
||||
base_path = f"https://{DOMAIN}"
|
||||
if hasattr(self, "user"):
|
||||
base_path = "%s%s" % (base_path, self.user.local_path)
|
||||
base_path = f"{base_path}{self.user.local_path}"
|
||||
model_name = type(self).__name__.lower()
|
||||
return "%s/%s/%d" % (base_path, model_name, self.id)
|
||||
return f"{base_path}/{model_name}/{self.id}"
|
||||
|
||||
class Meta:
|
||||
"""this is just here to provide default fields for other models"""
|
||||
|
@ -46,7 +46,7 @@ class BookWyrmModel(models.Model):
|
|||
@property
|
||||
def local_path(self):
|
||||
"""how to link to this object in the local app"""
|
||||
return self.get_remote_id().replace("https://%s" % DOMAIN, "")
|
||||
return self.get_remote_id().replace(f"https://{DOMAIN}", "")
|
||||
|
||||
def visible_to_user(self, viewer):
|
||||
"""is a user authorized to view an object?"""
|
||||
|
|
|
@ -164,9 +164,9 @@ class Book(BookDataModel):
|
|||
@property
|
||||
def alt_text(self):
|
||||
"""image alt test"""
|
||||
text = "%s" % self.title
|
||||
text = self.title
|
||||
if self.edition_info:
|
||||
text += " (%s)" % self.edition_info
|
||||
text += f" ({self.edition_info})"
|
||||
return text
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
@ -177,9 +177,10 @@ class Book(BookDataModel):
|
|||
|
||||
def get_remote_id(self):
|
||||
"""editions and works both use "book" instead of model_name"""
|
||||
return "https://%s/book/%d" % (DOMAIN, self.id)
|
||||
return f"https://{DOMAIN}/book/{self.id}"
|
||||
|
||||
def __repr__(self):
|
||||
# pylint: disable=consider-using-f-string
|
||||
return "<{} key={!r} title={!r}>".format(
|
||||
self.__class__,
|
||||
self.openlibrary_key,
|
||||
|
@ -216,7 +217,7 @@ class Work(OrderedCollectionPageMixin, Book):
|
|||
"""an ordered collection of editions"""
|
||||
return self.to_ordered_collection(
|
||||
self.editions.order_by("-edition_rank").all(),
|
||||
remote_id="%s/editions" % self.remote_id,
|
||||
remote_id=f"{self.remote_id}/editions",
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
|
|
@ -29,7 +29,4 @@ class Connector(BookWyrmModel):
|
|||
isbn_search_url = models.CharField(max_length=255, null=True, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
return "{} ({})".format(
|
||||
self.identifier,
|
||||
self.id,
|
||||
)
|
||||
return f"{self.identifier} ({self.id})"
|
||||
|
|
|
@ -56,7 +56,7 @@ class ActivitypubFieldMixin:
|
|||
activitypub_field=None,
|
||||
activitypub_wrapper=None,
|
||||
deduplication_field=False,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
self.deduplication_field = deduplication_field
|
||||
if activitypub_wrapper:
|
||||
|
@ -308,7 +308,7 @@ class ManyToManyField(ActivitypubFieldMixin, models.ManyToManyField):
|
|||
|
||||
def field_to_activity(self, value):
|
||||
if self.link_only:
|
||||
return "%s/%s" % (value.instance.remote_id, self.name)
|
||||
return f"{value.instance.remote_id}/{self.name}"
|
||||
return [i.remote_id for i in value.all()]
|
||||
|
||||
def field_from_activity(self, value):
|
||||
|
@ -388,7 +388,7 @@ def image_serializer(value, alt):
|
|||
else:
|
||||
return None
|
||||
if not url[:4] == "http":
|
||||
url = "https://{:s}{:s}".format(DOMAIN, url)
|
||||
url = f"https://{DOMAIN}{url}"
|
||||
return activitypub.Document(url=url, name=alt)
|
||||
|
||||
|
||||
|
@ -448,7 +448,7 @@ class ImageField(ActivitypubFieldMixin, models.ImageField):
|
|||
|
||||
image_content = ContentFile(response.content)
|
||||
extension = imghdr.what(None, image_content.read()) or ""
|
||||
image_name = "{:s}.{:s}".format(str(uuid4()), extension)
|
||||
image_name = f"{uuid4()}.{extension}"
|
||||
return [image_name, image_content]
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
|
|
|
@ -198,7 +198,9 @@ class ImportItem(models.Model):
|
|||
return []
|
||||
|
||||
def __repr__(self):
|
||||
# pylint: disable=consider-using-f-string
|
||||
return "<{!r}Item {!r}>".format(self.data["import_source"], self.data["Title"])
|
||||
|
||||
def __str__(self):
|
||||
# pylint: disable=consider-using-f-string
|
||||
return "{} by {}".format(self.data["Title"], self.data["Author"])
|
||||
|
|
|
@ -42,7 +42,7 @@ class List(OrderedCollectionMixin, BookWyrmModel):
|
|||
|
||||
def get_remote_id(self):
|
||||
"""don't want the user to be in there in this case"""
|
||||
return "https://%s/list/%d" % (DOMAIN, self.id)
|
||||
return f"https://{DOMAIN}/list/{self.id}"
|
||||
|
||||
@property
|
||||
def collection_queryset(self):
|
||||
|
|
|
@ -53,7 +53,7 @@ class UserRelationship(BookWyrmModel):
|
|||
def get_remote_id(self):
|
||||
"""use shelf identifier in remote_id"""
|
||||
base_path = self.user_subject.remote_id
|
||||
return "%s#follows/%d" % (base_path, self.id)
|
||||
return f"{base_path}#follows/{self.id}"
|
||||
|
||||
|
||||
class UserFollows(ActivityMixin, UserRelationship):
|
||||
|
@ -144,7 +144,8 @@ class UserFollowRequest(ActivitypubMixin, UserRelationship):
|
|||
"""get id for sending an accept or reject of a local user"""
|
||||
|
||||
base_path = self.user_object.remote_id
|
||||
return "%s#%s/%d" % (base_path, status, self.id or 0)
|
||||
status_id = self.id or 0
|
||||
return f"{base_path}#{status}/{status_id}"
|
||||
|
||||
def accept(self, broadcast_only=False):
|
||||
"""turn this request into the real deal"""
|
||||
|
|
|
@ -44,7 +44,7 @@ class Shelf(OrderedCollectionMixin, BookWyrmModel):
|
|||
def get_identifier(self):
|
||||
"""custom-shelf-123 for the url"""
|
||||
slug = re.sub(r"[^\w]", "", self.name).lower()
|
||||
return "{:s}-{:d}".format(slug, self.id)
|
||||
return f"{slug}-{self.id}"
|
||||
|
||||
@property
|
||||
def collection_queryset(self):
|
||||
|
@ -55,7 +55,7 @@ class Shelf(OrderedCollectionMixin, BookWyrmModel):
|
|||
"""shelf identifier instead of id"""
|
||||
base_path = self.user.remote_id
|
||||
identifier = self.identifier or self.get_identifier()
|
||||
return "%s/books/%s" % (base_path, identifier)
|
||||
return f"{base_path}/books/{identifier}"
|
||||
|
||||
class Meta:
|
||||
"""user/shelf unqiueness"""
|
||||
|
|
|
@ -87,7 +87,7 @@ class SiteInvite(models.Model):
|
|||
@property
|
||||
def link(self):
|
||||
"""formats the invite link"""
|
||||
return "https://{}/invite/{}".format(DOMAIN, self.code)
|
||||
return f"https://{DOMAIN}/invite/{self.code}"
|
||||
|
||||
|
||||
class InviteRequest(BookWyrmModel):
|
||||
|
@ -127,7 +127,7 @@ class PasswordReset(models.Model):
|
|||
@property
|
||||
def link(self):
|
||||
"""formats the invite link"""
|
||||
return "https://{}/password-reset/{}".format(DOMAIN, self.code)
|
||||
return f"https://{DOMAIN}/password-reset/{self.code}"
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
|
|
@ -179,9 +179,9 @@ class Status(OrderedCollectionPageMixin, BookWyrmModel):
|
|||
"""helper function for loading AP serialized replies to a status"""
|
||||
return self.to_ordered_collection(
|
||||
self.replies(self),
|
||||
remote_id="%s/replies" % self.remote_id,
|
||||
remote_id=f"{self.remote_id}/replies",
|
||||
collection_only=True,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
).serialize()
|
||||
|
||||
def to_activity_dataclass(self, pure=False): # pylint: disable=arguments-differ
|
||||
|
@ -226,10 +226,10 @@ class GeneratedNote(Status):
|
|||
"""indicate the book in question for mastodon (or w/e) users"""
|
||||
message = self.content
|
||||
books = ", ".join(
|
||||
'<a href="%s">"%s"</a>' % (book.remote_id, book.title)
|
||||
f'<a href="{book.remote_id}">"{book.title}"</a>'
|
||||
for book in self.mention_books.all()
|
||||
)
|
||||
return "%s %s %s" % (self.user.display_name, message, books)
|
||||
return f"{self.user.display_name} {message} {books}"
|
||||
|
||||
activity_serializer = activitypub.GeneratedNote
|
||||
pure_type = "Note"
|
||||
|
@ -277,10 +277,9 @@ class Comment(BookStatus):
|
|||
@property
|
||||
def pure_content(self):
|
||||
"""indicate the book in question for mastodon (or w/e) users"""
|
||||
return '%s<p>(comment on <a href="%s">"%s"</a>)</p>' % (
|
||||
self.content,
|
||||
self.book.remote_id,
|
||||
self.book.title,
|
||||
return (
|
||||
f'{self.content}<p>(comment on <a href="{self.book.remote_id}">'
|
||||
f'"{self.book.title}"</a>)</p>'
|
||||
)
|
||||
|
||||
activity_serializer = activitypub.Comment
|
||||
|
@ -306,11 +305,9 @@ class Quotation(BookStatus):
|
|||
"""indicate the book in question for mastodon (or w/e) users"""
|
||||
quote = re.sub(r"^<p>", '<p>"', self.quote)
|
||||
quote = re.sub(r"</p>$", '"</p>', quote)
|
||||
return '%s <p>-- <a href="%s">"%s"</a></p>%s' % (
|
||||
quote,
|
||||
self.book.remote_id,
|
||||
self.book.title,
|
||||
self.content,
|
||||
return (
|
||||
f'{quote} <p>-- <a href="{self.book.remote_id}">'
|
||||
f'"{self.book.title}"</a></p>{self.content}'
|
||||
)
|
||||
|
||||
activity_serializer = activitypub.Quotation
|
||||
|
|
|
@ -152,12 +152,13 @@ class User(OrderedCollectionPageMixin, AbstractUser):
|
|||
@property
|
||||
def following_link(self):
|
||||
"""just how to find out the following info"""
|
||||
return "{:s}/following".format(self.remote_id)
|
||||
return f"{self.remote_id}/following"
|
||||
|
||||
@property
|
||||
def alt_text(self):
|
||||
"""alt text with username"""
|
||||
return "avatar for %s" % (self.localname or self.username)
|
||||
# pylint: disable=consider-using-f-string
|
||||
return "avatar for {:s}".format(self.localname or self.username)
|
||||
|
||||
@property
|
||||
def display_name(self):
|
||||
|
@ -197,9 +198,7 @@ class User(OrderedCollectionPageMixin, AbstractUser):
|
|||
def to_outbox(self, filter_type=None, **kwargs):
|
||||
"""an ordered collection of statuses"""
|
||||
if filter_type:
|
||||
filter_class = apps.get_model(
|
||||
"bookwyrm.%s" % filter_type, require_ready=True
|
||||
)
|
||||
filter_class = apps.get_model(f"bookwyrm.{filter_type}", require_ready=True)
|
||||
if not issubclass(filter_class, Status):
|
||||
raise TypeError(
|
||||
"filter_status_class must be a subclass of models.Status"
|
||||
|
@ -223,7 +222,7 @@ class User(OrderedCollectionPageMixin, AbstractUser):
|
|||
|
||||
def to_following_activity(self, **kwargs):
|
||||
"""activitypub following list"""
|
||||
remote_id = "%s/following" % self.remote_id
|
||||
remote_id = f"{self.remote_id}/following"
|
||||
return self.to_ordered_collection(
|
||||
self.following.order_by("-updated_date").all(),
|
||||
remote_id=remote_id,
|
||||
|
@ -266,7 +265,7 @@ class User(OrderedCollectionPageMixin, AbstractUser):
|
|||
if not self.local and not re.match(regex.FULL_USERNAME, self.username):
|
||||
# generate a username that uses the domain (webfinger format)
|
||||
actor_parts = urlparse(self.remote_id)
|
||||
self.username = "%s@%s" % (self.username, actor_parts.netloc)
|
||||
self.username = f"{self.username}@{actor_parts.netloc}"
|
||||
|
||||
# this user already exists, no need to populate fields
|
||||
if not created:
|
||||
|
@ -320,7 +319,8 @@ class User(OrderedCollectionPageMixin, AbstractUser):
|
|||
@property
|
||||
def local_path(self):
|
||||
"""this model doesn't inherit bookwyrm model, so here we are"""
|
||||
return "/user/%s" % (self.localname or self.username)
|
||||
# pylint: disable=consider-using-f-string
|
||||
return "/user/{:s}".format(self.localname or self.username)
|
||||
|
||||
def create_shelves(self):
|
||||
"""default shelves for a new user"""
|
||||
|
@ -361,7 +361,7 @@ class KeyPair(ActivitypubMixin, BookWyrmModel):
|
|||
|
||||
def get_remote_id(self):
|
||||
# self.owner is set by the OneToOneField on User
|
||||
return "%s/#main-key" % self.owner.remote_id
|
||||
return f"{self.owner.remote_id}/#main-key"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
"""create a key pair"""
|
||||
|
@ -398,7 +398,7 @@ class AnnualGoal(BookWyrmModel):
|
|||
|
||||
def get_remote_id(self):
|
||||
"""put the year in the path"""
|
||||
return "{:s}/goal/{:d}".format(self.user.remote_id, self.year)
|
||||
return f"{self.user.remote_id}/goal/{self.year}"
|
||||
|
||||
@property
|
||||
def books(self):
|
||||
|
@ -454,7 +454,7 @@ def get_or_create_remote_server(domain):
|
|||
pass
|
||||
|
||||
try:
|
||||
data = get_data("https://%s/.well-known/nodeinfo" % domain)
|
||||
data = get_data(f"https://{domain}/.well-known/nodeinfo")
|
||||
try:
|
||||
nodeinfo_url = data.get("links")[0].get("href")
|
||||
except (TypeError, KeyError):
|
||||
|
|
|
@ -220,6 +220,7 @@ def generate_default_inner_img():
|
|||
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
# pylint: disable=too-many-statements
|
||||
def generate_preview_image(
|
||||
texts=None, picture=None, rating=None, show_instance_layer=True
|
||||
):
|
||||
|
@ -237,7 +238,8 @@ def generate_preview_image(
|
|||
|
||||
# Color
|
||||
if BG_COLOR in ["use_dominant_color_light", "use_dominant_color_dark"]:
|
||||
image_bg_color = "rgb(%s, %s, %s)" % dominant_color
|
||||
red, green, blue = dominant_color
|
||||
image_bg_color = f"rgb({red}, {green}, {blue})"
|
||||
|
||||
# Adjust color
|
||||
image_bg_color_rgb = [x / 255.0 for x in ImageColor.getrgb(image_bg_color)]
|
||||
|
@ -315,7 +317,8 @@ def save_and_cleanup(image, instance=None):
|
|||
"""Save and close the file"""
|
||||
if not isinstance(instance, (models.Book, models.User, models.SiteSettings)):
|
||||
return False
|
||||
file_name = "%s-%s.jpg" % (str(instance.id), str(uuid4()))
|
||||
uuid = uuid4()
|
||||
file_name = f"{instance.id}-{uuid}.jpg"
|
||||
image_buffer = BytesIO()
|
||||
|
||||
try:
|
||||
|
@ -412,7 +415,7 @@ def generate_user_preview_image_task(user_id):
|
|||
|
||||
texts = {
|
||||
"text_one": user.display_name,
|
||||
"text_three": "@{}@{}".format(user.localname, settings.DOMAIN),
|
||||
"text_three": f"@{user.localname}@{settings.DOMAIN}",
|
||||
}
|
||||
|
||||
if user.avatar:
|
||||
|
|
|
@ -48,7 +48,7 @@ class InputHtmlParser(HTMLParser): # pylint: disable=abstract-method
|
|||
return
|
||||
|
||||
self.tag_stack = self.tag_stack[:-1]
|
||||
self.output.append(("tag", "</%s>" % tag))
|
||||
self.output.append(("tag", f"</{tag}>"))
|
||||
|
||||
def handle_data(self, data):
|
||||
"""extract the answer, if we're in an answer tag"""
|
||||
|
|
|
@ -23,7 +23,7 @@ EMAIL_HOST_USER = env("EMAIL_HOST_USER")
|
|||
EMAIL_HOST_PASSWORD = env("EMAIL_HOST_PASSWORD")
|
||||
EMAIL_USE_TLS = env.bool("EMAIL_USE_TLS", True)
|
||||
EMAIL_USE_SSL = env.bool("EMAIL_USE_SSL", False)
|
||||
DEFAULT_FROM_EMAIL = "admin@{:s}".format(env("DOMAIN"))
|
||||
DEFAULT_FROM_EMAIL = f"admin@{DOMAIN}"
|
||||
|
||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
@ -178,11 +178,8 @@ USE_L10N = True
|
|||
USE_TZ = True
|
||||
|
||||
|
||||
USER_AGENT = "%s (BookWyrm/%s; +https://%s/)" % (
|
||||
requests.utils.default_user_agent(),
|
||||
VERSION,
|
||||
DOMAIN,
|
||||
)
|
||||
agent = requests.utils.default_user_agent()
|
||||
USER_AGENT = f"{agent} (BookWyrm/{VERSION}; +https://{DOMAIN}/)"
|
||||
|
||||
# Imagekit generated thumbnails
|
||||
ENABLE_THUMBNAIL_GENERATION = env.bool("ENABLE_THUMBNAIL_GENERATION", False)
|
||||
|
@ -213,11 +210,11 @@ if USE_S3:
|
|||
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
|
||||
# S3 Static settings
|
||||
STATIC_LOCATION = "static"
|
||||
STATIC_URL = "https://%s/%s/" % (AWS_S3_CUSTOM_DOMAIN, STATIC_LOCATION)
|
||||
STATIC_URL = f"https://{AWS_S3_CUSTOM_DOMAIN}/{STATIC_LOCATION}/"
|
||||
STATICFILES_STORAGE = "bookwyrm.storage_backends.StaticStorage"
|
||||
# S3 Media settings
|
||||
MEDIA_LOCATION = "images"
|
||||
MEDIA_URL = "https://%s/%s/" % (AWS_S3_CUSTOM_DOMAIN, MEDIA_LOCATION)
|
||||
MEDIA_URL = f"https://{AWS_S3_CUSTOM_DOMAIN}/{MEDIA_LOCATION}/"
|
||||
MEDIA_FULL_URL = MEDIA_URL
|
||||
DEFAULT_FILE_STORAGE = "bookwyrm.storage_backends.ImagesStorage"
|
||||
# I don't know if it's used, but the site crashes without it
|
||||
|
@ -227,5 +224,5 @@ else:
|
|||
STATIC_URL = "/static/"
|
||||
STATIC_ROOT = os.path.join(BASE_DIR, env("STATIC_ROOT", "static"))
|
||||
MEDIA_URL = "/images/"
|
||||
MEDIA_FULL_URL = "%s://%s%s" % (PROTOCOL, DOMAIN, MEDIA_URL)
|
||||
MEDIA_FULL_URL = f"{PROTOCOL}://{DOMAIN}{MEDIA_URL}"
|
||||
MEDIA_ROOT = os.path.join(BASE_DIR, env("MEDIA_ROOT", "images"))
|
||||
|
|
|
@ -26,21 +26,21 @@ def make_signature(sender, destination, date, digest):
|
|||
"""uses a private key to sign an outgoing message"""
|
||||
inbox_parts = urlparse(destination)
|
||||
signature_headers = [
|
||||
"(request-target): post %s" % inbox_parts.path,
|
||||
"host: %s" % inbox_parts.netloc,
|
||||
"date: %s" % date,
|
||||
"digest: %s" % digest,
|
||||
f"(request-target): post {inbox_parts.path}",
|
||||
f"host: {inbox_parts.netloc}",
|
||||
f"date: {date}",
|
||||
f"digest: {digest}",
|
||||
]
|
||||
message_to_sign = "\n".join(signature_headers)
|
||||
signer = pkcs1_15.new(RSA.import_key(sender.key_pair.private_key))
|
||||
signed_message = signer.sign(SHA256.new(message_to_sign.encode("utf8")))
|
||||
signature = {
|
||||
"keyId": "%s#main-key" % sender.remote_id,
|
||||
"keyId": f"{sender.remote_id}#main-key",
|
||||
"algorithm": "rsa-sha256",
|
||||
"headers": "(request-target) host date digest",
|
||||
"signature": b64encode(signed_message).decode("utf8"),
|
||||
}
|
||||
return ",".join('%s="%s"' % (k, v) for (k, v) in signature.items())
|
||||
return ",".join(f'{k}="{v}"' for (k, v) in signature.items())
|
||||
|
||||
|
||||
def make_digest(data):
|
||||
|
@ -58,7 +58,7 @@ def verify_digest(request):
|
|||
elif algorithm == "SHA-512":
|
||||
hash_function = hashlib.sha512
|
||||
else:
|
||||
raise ValueError("Unsupported hash function: {}".format(algorithm))
|
||||
raise ValueError(f"Unsupported hash function: {algorithm}")
|
||||
|
||||
expected = hash_function(request.body).digest()
|
||||
if b64decode(digest) != expected:
|
||||
|
@ -95,18 +95,18 @@ class Signature:
|
|||
def verify(self, public_key, request):
|
||||
"""verify rsa signature"""
|
||||
if http_date_age(request.headers["date"]) > MAX_SIGNATURE_AGE:
|
||||
raise ValueError("Request too old: %s" % (request.headers["date"],))
|
||||
raise ValueError(f"Request too old: {request.headers['date']}")
|
||||
public_key = RSA.import_key(public_key)
|
||||
|
||||
comparison_string = []
|
||||
for signed_header_name in self.headers.split(" "):
|
||||
if signed_header_name == "(request-target)":
|
||||
comparison_string.append("(request-target): post %s" % request.path)
|
||||
comparison_string.append(f"(request-target): post {request.path}")
|
||||
else:
|
||||
if signed_header_name == "digest":
|
||||
verify_digest(request)
|
||||
comparison_string.append(
|
||||
"%s: %s" % (signed_header_name, request.headers[signed_header_name])
|
||||
f"{signed_header_name}: {request.headers[signed_header_name]}"
|
||||
)
|
||||
comparison_string = "\n".join(comparison_string)
|
||||
|
||||
|
|
|
@ -24,8 +24,8 @@ class SuggestedUsers(RedisStore):
|
|||
def store_id(self, user): # pylint: disable=no-self-use
|
||||
"""the key used to store this user's recs"""
|
||||
if isinstance(user, int):
|
||||
return "{:d}-suggestions".format(user)
|
||||
return "{:d}-suggestions".format(user.id)
|
||||
return f"{user}-suggestions"
|
||||
return f"{user.id}-suggestions"
|
||||
|
||||
def get_counts_from_rank(self, rank): # pylint: disable=no-self-use
|
||||
"""calculate mutuals count and shared books count from rank"""
|
||||
|
|
|
@ -12,7 +12,7 @@ register = template.Library()
|
|||
@register.filter(name="uuid")
|
||||
def get_uuid(identifier):
|
||||
"""for avoiding clashing ids when there are many forms"""
|
||||
return "%s%s" % (identifier, uuid4())
|
||||
return f"{identifier}{uuid4()}"
|
||||
|
||||
|
||||
@register.filter(name="username")
|
||||
|
@ -50,7 +50,7 @@ def truncatepath(value, arg):
|
|||
length = int(arg)
|
||||
except ValueError: # invalid literal for int()
|
||||
return path_list[-1] # Fail silently.
|
||||
return "%s/…%s" % (path_list[0], path_list[-1][-length:])
|
||||
return f"{path_list[0]}/…{path_list[-1][-length:]}"
|
||||
|
||||
|
||||
@register.simple_tag(takes_context=False)
|
||||
|
@ -60,7 +60,7 @@ def get_book_cover_thumbnail(book, size="medium", ext="jpg"):
|
|||
if size == "":
|
||||
size = "medium"
|
||||
try:
|
||||
cover_thumbnail = getattr(book, "cover_bw_book_%s_%s" % (size, ext))
|
||||
cover_thumbnail = getattr(book, f"cover_bw_book_{size}_{ext}")
|
||||
return cover_thumbnail.url
|
||||
except OSError:
|
||||
return static("images/no_cover.jpg")
|
||||
|
|
95
bookwyrm/tests/models/test_site.py
Normal file
95
bookwyrm/tests/models/test_site.py
Normal file
|
@ -0,0 +1,95 @@
|
|||
""" testing models """
|
||||
from datetime import timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
from django.db import IntegrityError
|
||||
from django.test import TestCase
|
||||
from django.utils import timezone
|
||||
|
||||
from bookwyrm import models, settings
|
||||
|
||||
|
||||
class SiteModels(TestCase):
|
||||
"""tests for site models"""
|
||||
|
||||
def setUp(self):
|
||||
"""we need basic test data and mocks"""
|
||||
with patch("bookwyrm.suggested_users.rerank_suggestions_task.delay"), patch(
|
||||
"bookwyrm.activitystreams.populate_stream_task.delay"
|
||||
):
|
||||
self.local_user = models.User.objects.create_user(
|
||||
"mouse@local.com",
|
||||
"mouse@mouse.com",
|
||||
"mouseword",
|
||||
local=True,
|
||||
localname="mouse",
|
||||
remote_id="https://example.com/users/mouse",
|
||||
)
|
||||
|
||||
def test_site_settings_absent(self):
|
||||
"""create and load site settings"""
|
||||
self.assertFalse(models.SiteSettings.objects.exists())
|
||||
result = models.SiteSettings.get()
|
||||
self.assertTrue(models.SiteSettings.objects.exists())
|
||||
self.assertEqual(result.id, 1)
|
||||
self.assertEqual(result.name, "BookWyrm")
|
||||
|
||||
def test_site_settings_present(self):
|
||||
"""load site settings"""
|
||||
models.SiteSettings.objects.create(id=1, name="Fish Town")
|
||||
result = models.SiteSettings.get()
|
||||
self.assertEqual(result.id, 1)
|
||||
self.assertEqual(result.name, "Fish Town")
|
||||
self.assertEqual(models.SiteSettings.objects.all().count(), 1)
|
||||
|
||||
def test_site_invite(self):
|
||||
"""default invite"""
|
||||
invite = models.SiteInvite.objects.create(
|
||||
user=self.local_user,
|
||||
)
|
||||
self.assertTrue(invite.valid())
|
||||
|
||||
def test_site_invite_with_limit(self):
|
||||
"""with use limit"""
|
||||
# valid
|
||||
invite = models.SiteInvite.objects.create(user=self.local_user, use_limit=1)
|
||||
self.assertTrue(invite.valid())
|
||||
|
||||
# invalid
|
||||
invite = models.SiteInvite.objects.create(user=self.local_user, use_limit=0)
|
||||
self.assertFalse(invite.valid())
|
||||
invite = models.SiteInvite.objects.create(
|
||||
user=self.local_user, use_limit=1, times_used=1
|
||||
)
|
||||
self.assertFalse(invite.valid())
|
||||
|
||||
def test_site_invite_with_expiry(self):
|
||||
"""with expiration date"""
|
||||
date = timezone.now() + timedelta(days=1)
|
||||
invite = models.SiteInvite.objects.create(user=self.local_user, expiry=date)
|
||||
self.assertTrue(invite.valid())
|
||||
|
||||
date = timezone.now() - timedelta(days=1)
|
||||
invite = models.SiteInvite.objects.create(user=self.local_user, expiry=date)
|
||||
self.assertFalse(invite.valid())
|
||||
|
||||
def test_site_invite_link(self):
|
||||
"""invite link generator"""
|
||||
invite = models.SiteInvite.objects.create(user=self.local_user, code="hello")
|
||||
self.assertEqual(invite.link, f"https://{settings.DOMAIN}/invite/hello")
|
||||
|
||||
def test_invite_request(self):
|
||||
"""someone wants an invite"""
|
||||
# normal and good
|
||||
request = models.InviteRequest.objects.create(email="mouse.reeve@gmail.com")
|
||||
self.assertIsNone(request.invite)
|
||||
|
||||
# already in use
|
||||
with self.assertRaises(IntegrityError):
|
||||
request = models.InviteRequest.objects.create(email="mouse@mouse.com")
|
||||
|
||||
def test_password_reset(self):
|
||||
"""password reset token"""
|
||||
token = models.PasswordReset.objects.create(user=self.local_user, code="hello")
|
||||
self.assertTrue(token.valid())
|
||||
self.assertEqual(token.link, f"https://{settings.DOMAIN}/password-reset/hello")
|
|
@ -20,6 +20,7 @@ from bookwyrm.preview_images import (
|
|||
|
||||
# pylint: disable=unused-argument
|
||||
# pylint: disable=missing-function-docstring
|
||||
# pylint: disable=consider-using-with
|
||||
class PreviewImages(TestCase):
|
||||
"""every response to a get request, html or json"""
|
||||
|
||||
|
@ -120,3 +121,11 @@ class PreviewImages(TestCase):
|
|||
self.assertEqual(
|
||||
self.local_user.preview_image.height, settings.PREVIEW_IMG_HEIGHT
|
||||
)
|
||||
|
||||
def test_generate_user_preview_images_task(self, *args, **kwargs):
|
||||
"""test task's external calls"""
|
||||
with patch("bookwyrm.preview_images.generate_preview_image") as generate_mock:
|
||||
generate_user_preview_image_task(self.local_user.id)
|
||||
args = generate_mock.call_args.kwargs
|
||||
self.assertEqual(args["texts"]["text_one"], "possum")
|
||||
self.assertEqual(args["texts"]["text_three"], f"@possum@{settings.DOMAIN}")
|
||||
|
|
|
@ -7,8 +7,8 @@ from django.views.generic.base import TemplateView
|
|||
from bookwyrm import settings, views
|
||||
from bookwyrm.utils import regex
|
||||
|
||||
USER_PATH = r"^user/(?P<username>%s)" % regex.USERNAME
|
||||
LOCAL_USER_PATH = r"^user/(?P<username>%s)" % regex.LOCALNAME
|
||||
USER_PATH = rf"^user/(?P<username>{regex.USERNAME})"
|
||||
LOCAL_USER_PATH = rf"^user/(?P<username>{regex.LOCALNAME})"
|
||||
|
||||
status_types = [
|
||||
"status",
|
||||
|
@ -19,7 +19,9 @@ status_types = [
|
|||
"boost",
|
||||
"generatednote",
|
||||
]
|
||||
STATUS_PATH = r"%s/(%s)/(?P<status_id>\d+)" % (USER_PATH, "|".join(status_types))
|
||||
|
||||
STATUS_TYPES_STRING = "|".join(status_types)
|
||||
STATUS_PATH = rf"{USER_PATH}/({STATUS_TYPES_STRING})/(?P<status_id>\d+)"
|
||||
|
||||
BOOK_PATH = r"^book/(?P<book_id>\d+)"
|
||||
|
||||
|
@ -33,8 +35,8 @@ urlpatterns = [
|
|||
),
|
||||
# federation endpoints
|
||||
re_path(r"^inbox/?$", views.Inbox.as_view()),
|
||||
re_path(r"%s/inbox/?$" % LOCAL_USER_PATH, views.Inbox.as_view()),
|
||||
re_path(r"%s/outbox/?$" % LOCAL_USER_PATH, views.Outbox.as_view()),
|
||||
re_path(rf"{LOCAL_USER_PATH}/inbox/?$", views.Inbox.as_view()),
|
||||
re_path(rf"{LOCAL_USER_PATH}/outbox/?$", views.Outbox.as_view()),
|
||||
re_path(r"^\.well-known/webfinger/?$", views.webfinger),
|
||||
re_path(r"^\.well-known/nodeinfo/?$", views.nodeinfo_pointer),
|
||||
re_path(r"^\.well-known/host-meta/?$", views.host_meta),
|
||||
|
@ -220,12 +222,12 @@ urlpatterns = [
|
|||
name="get-started-users",
|
||||
),
|
||||
# feeds
|
||||
re_path(r"^(?P<tab>{:s})/?$".format(STREAMS), views.Feed.as_view()),
|
||||
re_path(rf"^(?P<tab>{STREAMS})/?$", views.Feed.as_view()),
|
||||
re_path(
|
||||
r"^direct-messages/?$", views.DirectMessage.as_view(), name="direct-messages"
|
||||
),
|
||||
re_path(
|
||||
r"^direct-messages/(?P<username>%s)?$" % regex.USERNAME,
|
||||
rf"^direct-messages/(?P<username>{regex.USERNAME})?$",
|
||||
views.DirectMessage.as_view(),
|
||||
name="direct-messages-user",
|
||||
),
|
||||
|
@ -235,22 +237,22 @@ urlpatterns = [
|
|||
re_path(r"^import/?$", views.Import.as_view(), name="import"),
|
||||
re_path(r"^import/(\d+)/?$", views.ImportStatus.as_view(), name="import-status"),
|
||||
# users
|
||||
re_path(r"%s\.json$" % USER_PATH, views.User.as_view()),
|
||||
re_path(r"%s/?$" % USER_PATH, views.User.as_view(), name="user-feed"),
|
||||
re_path(r"%s/rss" % USER_PATH, views.rss_feed.RssFeed(), name="user-rss"),
|
||||
re_path(rf"{USER_PATH}\.json$", views.User.as_view()),
|
||||
re_path(rf"{USER_PATH}/?$", views.User.as_view(), name="user-feed"),
|
||||
re_path(rf"{USER_PATH}/rss/?$", views.rss_feed.RssFeed(), name="user-rss"),
|
||||
re_path(
|
||||
r"%s/followers(.json)?/?$" % USER_PATH,
|
||||
rf"{USER_PATH}/followers(.json)?/?$",
|
||||
views.Followers.as_view(),
|
||||
name="user-followers",
|
||||
),
|
||||
re_path(
|
||||
r"%s/following(.json)?/?$" % USER_PATH,
|
||||
rf"{USER_PATH}/following(.json)?/?$",
|
||||
views.Following.as_view(),
|
||||
name="user-following",
|
||||
),
|
||||
re_path(r"^hide-suggestions/?$", views.hide_suggestions, name="hide-suggestions"),
|
||||
# lists
|
||||
re_path(r"%s/lists/?$" % USER_PATH, views.UserLists.as_view(), name="user-lists"),
|
||||
re_path(rf"{USER_PATH}/lists/?$", views.UserLists.as_view(), name="user-lists"),
|
||||
re_path(r"^list/?$", views.Lists.as_view(), name="lists"),
|
||||
re_path(r"^list/saved/?$", views.SavedLists.as_view(), name="saved-lists"),
|
||||
re_path(r"^list/(?P<list_id>\d+)(.json)?/?$", views.List.as_view(), name="list"),
|
||||
|
@ -272,14 +274,14 @@ urlpatterns = [
|
|||
re_path(r"^save-list/(?P<list_id>\d+)/?$", views.save_list, name="list-save"),
|
||||
re_path(r"^unsave-list/(?P<list_id>\d+)/?$", views.unsave_list, name="list-unsave"),
|
||||
# User books
|
||||
re_path(r"%s/books/?$" % USER_PATH, views.Shelf.as_view(), name="user-shelves"),
|
||||
re_path(rf"{USER_PATH}/books/?$", views.Shelf.as_view(), name="user-shelves"),
|
||||
re_path(
|
||||
r"^%s/(helf|books)/(?P<shelf_identifier>[\w-]+)(.json)?/?$" % USER_PATH,
|
||||
rf"^{USER_PATH}/(helf|books)/(?P<shelf_identifier>[\w-]+)(.json)?/?$",
|
||||
views.Shelf.as_view(),
|
||||
name="shelf",
|
||||
),
|
||||
re_path(
|
||||
r"^%s/(books|shelf)/(?P<shelf_identifier>[\w-]+)(.json)?/?$" % LOCAL_USER_PATH,
|
||||
rf"^{LOCAL_USER_PATH}/(books|shelf)/(?P<shelf_identifier>[\w-]+)(.json)?/?$",
|
||||
views.Shelf.as_view(),
|
||||
name="shelf",
|
||||
),
|
||||
|
@ -289,7 +291,7 @@ urlpatterns = [
|
|||
re_path(r"^unshelve/?$", views.unshelve),
|
||||
# goals
|
||||
re_path(
|
||||
r"%s/goal/(?P<year>\d{4})/?$" % USER_PATH,
|
||||
rf"{USER_PATH}/goal/(?P<year>\d{4})/?$",
|
||||
views.Goal.as_view(),
|
||||
name="user-goal",
|
||||
),
|
||||
|
@ -306,10 +308,10 @@ urlpatterns = [
|
|||
re_path(r"^block/(?P<user_id>\d+)/?$", views.Block.as_view()),
|
||||
re_path(r"^unblock/(?P<user_id>\d+)/?$", views.unblock),
|
||||
# statuses
|
||||
re_path(r"%s(.json)?/?$" % STATUS_PATH, views.Status.as_view(), name="status"),
|
||||
re_path(r"%s/activity/?$" % STATUS_PATH, views.Status.as_view(), name="status"),
|
||||
re_path(rf"{STATUS_PATH}(.json)?/?$", views.Status.as_view(), name="status"),
|
||||
re_path(rf"{STATUS_PATH}/activity/?$", views.Status.as_view(), name="status"),
|
||||
re_path(
|
||||
r"%s/replies(.json)?/?$" % STATUS_PATH, views.Replies.as_view(), name="replies"
|
||||
rf"{STATUS_PATH}/replies(.json)?/?$", views.Replies.as_view(), name="replies"
|
||||
),
|
||||
re_path(
|
||||
r"^post/?$",
|
||||
|
@ -339,17 +341,17 @@ urlpatterns = [
|
|||
re_path(r"^boost/(?P<status_id>\d+)/?$", views.Boost.as_view()),
|
||||
re_path(r"^unboost/(?P<status_id>\d+)/?$", views.Unboost.as_view()),
|
||||
# books
|
||||
re_path(r"%s(.json)?/?$" % BOOK_PATH, views.Book.as_view(), name="book"),
|
||||
re_path(rf"{BOOK_PATH}(.json)?/?$", views.Book.as_view(), name="book"),
|
||||
re_path(
|
||||
r"%s/(?P<user_statuses>review|comment|quote)/?$" % BOOK_PATH,
|
||||
rf"{BOOK_PATH}/(?P<user_statuses>review|comment|quote)/?$",
|
||||
views.Book.as_view(),
|
||||
name="book-user-statuses",
|
||||
),
|
||||
re_path(r"%s/edit/?$" % BOOK_PATH, views.EditBook.as_view(), name="edit-book"),
|
||||
re_path(r"%s/confirm/?$" % BOOK_PATH, views.ConfirmEditBook.as_view()),
|
||||
re_path(rf"{BOOK_PATH}/edit/?$", views.EditBook.as_view(), name="edit-book"),
|
||||
re_path(rf"{BOOK_PATH}/confirm/?$", views.ConfirmEditBook.as_view()),
|
||||
re_path(r"^create-book/?$", views.EditBook.as_view(), name="create-book"),
|
||||
re_path(r"^create-book/confirm?$", views.ConfirmEditBook.as_view()),
|
||||
re_path(r"%s/editions(.json)?/?$" % BOOK_PATH, views.Editions.as_view()),
|
||||
re_path(rf"{BOOK_PATH}/editions(.json)?/?$", views.Editions.as_view()),
|
||||
re_path(
|
||||
r"^upload-cover/(?P<book_id>\d+)/?$", views.upload_cover, name="upload-cover"
|
||||
),
|
||||
|
|
|
@ -3,8 +3,8 @@
|
|||
DOMAIN = r"[\w_\-\.]+\.[a-z]{2,}"
|
||||
LOCALNAME = r"@?[a-zA-Z_\-\.0-9]+"
|
||||
STRICT_LOCALNAME = r"@[a-zA-Z_\-\.0-9]+"
|
||||
USERNAME = r"%s(@%s)?" % (LOCALNAME, DOMAIN)
|
||||
STRICT_USERNAME = r"\B%s(@%s)?\b" % (STRICT_LOCALNAME, DOMAIN)
|
||||
FULL_USERNAME = r"%s@%s\b" % (LOCALNAME, DOMAIN)
|
||||
USERNAME = rf"{LOCALNAME}(@{DOMAIN})?"
|
||||
STRICT_USERNAME = rf"\B{STRICT_LOCALNAME}(@{DOMAIN})?\b"
|
||||
FULL_USERNAME = rf"{LOCALNAME}@{DOMAIN}\b"
|
||||
# should match (BookWyrm/1.0.0; or (BookWyrm/99.1.2;
|
||||
BOOKWYRM_USER_AGENT = r"\(BookWyrm/[0-9]+\.[0-9]+\.[0-9]+;"
|
||||
|
|
|
@ -55,4 +55,4 @@ class EditAuthor(View):
|
|||
return TemplateResponse(request, "author/edit_author.html", data)
|
||||
author = form.save()
|
||||
|
||||
return redirect("/author/%s" % author.id)
|
||||
return redirect(f"/author/{author.id}")
|
||||
|
|
|
@ -174,7 +174,7 @@ class EditBook(View):
|
|||
# check if this is an edition of an existing work
|
||||
author_text = book.author_text if book else add_author
|
||||
data["book_matches"] = connector_manager.local_search(
|
||||
"%s %s" % (form.cleaned_data.get("title"), author_text),
|
||||
f'{form.cleaned_data.get("title")} {author_text}',
|
||||
min_confidence=0.5,
|
||||
raw=True,
|
||||
)[:5]
|
||||
|
@ -212,7 +212,7 @@ class EditBook(View):
|
|||
if image:
|
||||
book.cover.save(*image, save=False)
|
||||
book.save()
|
||||
return redirect("/book/%s" % book.id)
|
||||
return redirect(f"/book/{book.id}")
|
||||
|
||||
|
||||
@method_decorator(login_required, name="dispatch")
|
||||
|
@ -238,14 +238,14 @@ class ConfirmEditBook(View):
|
|||
|
||||
# get or create author as needed
|
||||
for i in range(int(request.POST.get("author-match-count", 0))):
|
||||
match = request.POST.get("author_match-%d" % i)
|
||||
match = request.POST.get(f"author_match-{i}")
|
||||
if not match:
|
||||
return HttpResponseBadRequest()
|
||||
try:
|
||||
# if it's an int, it's an ID
|
||||
match = int(match)
|
||||
author = get_object_or_404(
|
||||
models.Author, id=request.POST["author_match-%d" % i]
|
||||
models.Author, id=request.POST[f"author_match-{i}"]
|
||||
)
|
||||
except ValueError:
|
||||
# otherwise it's a name
|
||||
|
@ -267,7 +267,7 @@ class ConfirmEditBook(View):
|
|||
for author_id in request.POST.getlist("remove_authors"):
|
||||
book.authors.remove(author_id)
|
||||
|
||||
return redirect("/book/%s" % book.id)
|
||||
return redirect(f"/book/{book.id}")
|
||||
|
||||
|
||||
@login_required
|
||||
|
@ -283,7 +283,7 @@ def upload_cover(request, book_id):
|
|||
if image:
|
||||
book.cover.save(*image)
|
||||
|
||||
return redirect("{:s}?cover_error=True".format(book.local_path))
|
||||
return redirect(f"{book.local_path}?cover_error=True")
|
||||
|
||||
form = forms.CoverForm(request.POST, request.FILES, instance=book)
|
||||
if not form.is_valid() or not form.files.get("cover"):
|
||||
|
|
|
@ -79,7 +79,7 @@ def save_user_form(form):
|
|||
|
||||
# set the name to a hash
|
||||
extension = form.files["avatar"].name.split(".")[-1]
|
||||
filename = "%s.%s" % (uuid4(), extension)
|
||||
filename = f"{uuid4()}.{extension}"
|
||||
user.avatar.save(filename, image, save=False)
|
||||
user.save()
|
||||
return user
|
||||
|
|
|
@ -96,4 +96,4 @@ def switch_edition(request):
|
|||
readthrough.book = new_edition
|
||||
readthrough.save()
|
||||
|
||||
return redirect("/book/%d" % new_edition.id)
|
||||
return redirect(f"/book/{new_edition.id}")
|
||||
|
|
|
@ -42,7 +42,7 @@ class Feed(View):
|
|||
"tab": tab,
|
||||
"streams": STREAMS,
|
||||
"goal_form": forms.GoalForm(),
|
||||
"path": "/%s" % tab["key"],
|
||||
"path": f"/{tab['key']}",
|
||||
},
|
||||
}
|
||||
return TemplateResponse(request, "feed/feed.html", data)
|
||||
|
|
|
@ -86,4 +86,4 @@ def delete_follow_request(request):
|
|||
return HttpResponseBadRequest()
|
||||
|
||||
follow_request.delete()
|
||||
return redirect("/user/%s" % request.user.localname)
|
||||
return redirect(f"/user/{request.user.localname}")
|
||||
|
|
|
@ -113,7 +113,7 @@ def handle_remote_webfinger(query):
|
|||
try:
|
||||
user = models.User.objects.get(username__iexact=query)
|
||||
except models.User.DoesNotExist:
|
||||
url = "https://%s/.well-known/webfinger?resource=acct:%s" % (domain, query)
|
||||
url = f"https://{domain}/.well-known/webfinger?resource=acct:{query}"
|
||||
try:
|
||||
data = get_data(url)
|
||||
except (ConnectorException, HTTPError):
|
||||
|
|
|
@ -68,7 +68,7 @@ class Import(View):
|
|||
|
||||
importer.start_import(job)
|
||||
|
||||
return redirect("/import/%d" % job.id)
|
||||
return redirect(f"/import/{job.id}")
|
||||
return HttpResponseBadRequest()
|
||||
|
||||
|
||||
|
@ -112,4 +112,4 @@ class ImportStatus(View):
|
|||
items,
|
||||
)
|
||||
importer.start_import(job)
|
||||
return redirect("/import/%d" % job.id)
|
||||
return redirect(f"/import/{job.id}")
|
||||
|
|
|
@ -71,7 +71,7 @@ def is_blocked_user_agent(request):
|
|||
user_agent = request.headers.get("User-Agent")
|
||||
if not user_agent:
|
||||
return False
|
||||
url = re.search(r"https?://{:s}/?".format(regex.DOMAIN), user_agent)
|
||||
url = re.search(rf"https?://{regex.DOMAIN}/?", user_agent)
|
||||
if not url:
|
||||
return False
|
||||
url = url.group()
|
||||
|
|
|
@ -324,7 +324,7 @@ def add_book(request):
|
|||
path = reverse("list", args=[book_list.id])
|
||||
params = request.GET.copy()
|
||||
params["updated"] = True
|
||||
return redirect("{:s}?{:s}".format(path, urlencode(params)))
|
||||
return redirect(f"{path}?{urlencode(params)}")
|
||||
|
||||
|
||||
@require_POST
|
||||
|
@ -398,7 +398,7 @@ def set_book_position(request, list_item_id):
|
|||
def increment_order_in_reverse(
|
||||
book_list_id: int, start: int, end: Optional[int] = None
|
||||
):
|
||||
"""increase the order nu,ber for every item in a list"""
|
||||
"""increase the order number for every item in a list"""
|
||||
try:
|
||||
book_list = models.List.objects.get(id=book_list_id)
|
||||
except models.List.DoesNotExist:
|
||||
|
|
|
@ -46,7 +46,7 @@ class Login(View):
|
|||
except models.User.DoesNotExist: # maybe it's a full username?
|
||||
username = localname
|
||||
else:
|
||||
username = "%s@%s" % (localname, DOMAIN)
|
||||
username = f"{localname}@{DOMAIN}"
|
||||
password = login_form.data["password"]
|
||||
|
||||
# perform authentication
|
||||
|
|
|
@ -38,7 +38,7 @@ class PasswordResetRequest(View):
|
|||
# create a new reset code
|
||||
code = models.PasswordReset.objects.create(user=user)
|
||||
password_reset_email(code)
|
||||
data = {"message": _("A password reset link sent to %s" % email)}
|
||||
data = {"message": _(f"A password reset link sent to {email}")}
|
||||
return TemplateResponse(request, "password_reset_request.html", data)
|
||||
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ class Register(View):
|
|||
return TemplateResponse(request, "invite.html", data)
|
||||
return TemplateResponse(request, "login.html", data)
|
||||
|
||||
username = "%s@%s" % (localname, DOMAIN)
|
||||
username = f"{localname}@{DOMAIN}"
|
||||
user = models.User.objects.create_user(
|
||||
username,
|
||||
email,
|
||||
|
|
|
@ -63,7 +63,7 @@ class Search(View):
|
|||
data["results"] = paginated
|
||||
data["remote"] = search_remote
|
||||
|
||||
return TemplateResponse(request, "search/{:s}.html".format(search_type), data)
|
||||
return TemplateResponse(request, f"search/{search_type}.html", data)
|
||||
|
||||
|
||||
def book_search(query, _, min_confidence, search_remote=False):
|
||||
|
|
|
@ -36,7 +36,7 @@ class CreateStatus(View):
|
|||
status_type = status_type[0].upper() + status_type[1:]
|
||||
|
||||
try:
|
||||
form = getattr(forms, "%sForm" % status_type)(request.POST)
|
||||
form = getattr(forms, f"{status_type}Form")(request.POST)
|
||||
except AttributeError:
|
||||
return HttpResponseBadRequest()
|
||||
if not form.is_valid():
|
||||
|
@ -58,8 +58,8 @@ class CreateStatus(View):
|
|||
|
||||
# turn the mention into a link
|
||||
content = re.sub(
|
||||
r"%s([^@]|$)" % mention_text,
|
||||
r'<a href="%s">%s</a>\g<1>' % (mention_user.remote_id, mention_text),
|
||||
rf"{mention_text}([^@]|$)",
|
||||
rf'<a href="{mention_user.remote_id}">{mention_text}</a>\g<1>',
|
||||
content,
|
||||
)
|
||||
# add reply parent to mentions
|
||||
|
@ -182,7 +182,7 @@ def format_links(content):
|
|||
if url.fragment != "":
|
||||
link += "#" + url.fragment
|
||||
|
||||
formatted_content += '<a href="%s">%s</a>' % (potential_link, link)
|
||||
formatted_content += f'<a href="{potential_link}">{link}</a>'
|
||||
except (ValidationError, UnicodeError):
|
||||
formatted_content += potential_link
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ def webfinger(request):
|
|||
|
||||
return JsonResponse(
|
||||
{
|
||||
"subject": "acct:%s" % (user.username),
|
||||
"subject": f"acct:{user.username}",
|
||||
"links": [
|
||||
{
|
||||
"rel": "self",
|
||||
|
@ -46,7 +46,7 @@ def nodeinfo_pointer(_):
|
|||
"links": [
|
||||
{
|
||||
"rel": "http://nodeinfo.diaspora.software/ns/schema/2.0",
|
||||
"href": "https://%s/nodeinfo/2.0" % DOMAIN,
|
||||
"href": f"https://{DOMAIN}/nodeinfo/2.0",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue