2020-03-07 06:56:44 +00:00
|
|
|
''' base model with default fields '''
|
2020-09-17 20:02:52 +00:00
|
|
|
from base64 import b64encode
|
2020-12-12 21:39:55 +00:00
|
|
|
from functools import reduce
|
|
|
|
import operator
|
2020-09-17 20:02:52 +00:00
|
|
|
from uuid import uuid4
|
|
|
|
|
|
|
|
from Crypto.PublicKey import RSA
|
|
|
|
from Crypto.Signature import pkcs1_15
|
|
|
|
from Crypto.Hash import SHA256
|
2020-12-01 03:01:43 +00:00
|
|
|
from django.core.paginator import Paginator
|
2020-02-18 00:50:44 +00:00
|
|
|
from django.db import models
|
2020-12-12 21:39:55 +00:00
|
|
|
from django.db.models import Q
|
2020-05-13 01:56:28 +00:00
|
|
|
from django.dispatch import receiver
|
2020-02-18 00:50:44 +00:00
|
|
|
|
2020-09-21 15:10:37 +00:00
|
|
|
from bookwyrm import activitypub
|
2020-12-01 03:01:43 +00:00
|
|
|
from bookwyrm.settings import DOMAIN, PAGE_LENGTH
|
2020-12-13 20:02:26 +00:00
|
|
|
from .fields import ImageField, ManyToManyField, RemoteIdField
|
2020-02-18 00:50:44 +00:00
|
|
|
|
2020-10-30 18:21:02 +00:00
|
|
|
|
2020-09-21 15:16:34 +00:00
|
|
|
class BookWyrmModel(models.Model):
|
2020-09-17 20:02:52 +00:00
|
|
|
''' shared fields '''
|
2020-02-18 00:50:44 +00:00
|
|
|
created_date = models.DateTimeField(auto_now_add=True)
|
2020-02-21 01:33:50 +00:00
|
|
|
updated_date = models.DateTimeField(auto_now=True)
|
2020-11-30 18:32:13 +00:00
|
|
|
remote_id = RemoteIdField(null=True, activitypub_field='id')
|
2020-02-18 00:50:44 +00:00
|
|
|
|
2020-05-13 01:56:28 +00:00
|
|
|
def get_remote_id(self):
|
|
|
|
''' generate a url that resolves to the local object '''
|
2020-02-18 00:50:44 +00:00
|
|
|
base_path = 'https://%s' % DOMAIN
|
2020-02-21 02:01:50 +00:00
|
|
|
if hasattr(self, 'user'):
|
2020-05-13 01:56:28 +00:00
|
|
|
base_path = self.user.remote_id
|
2020-02-18 01:53:40 +00:00
|
|
|
model_name = type(self).__name__.lower()
|
2020-02-18 00:50:44 +00:00
|
|
|
return '%s/%s/%d' % (base_path, model_name, self.id)
|
|
|
|
|
|
|
|
class Meta:
|
2020-09-17 20:02:52 +00:00
|
|
|
''' this is just here to provide default fields for other models '''
|
2020-02-18 00:50:44 +00:00
|
|
|
abstract = True
|
2020-05-13 01:56:28 +00:00
|
|
|
|
2020-12-31 01:36:35 +00:00
|
|
|
@property
|
|
|
|
def local_path(self):
|
|
|
|
''' how to link to this object in the local app '''
|
|
|
|
return self.get_remote_id().replace('https://%s' % DOMAIN, '')
|
|
|
|
|
2020-05-13 01:56:28 +00:00
|
|
|
|
2020-05-14 01:23:54 +00:00
|
|
|
@receiver(models.signals.post_save)
|
2020-12-13 02:06:48 +00:00
|
|
|
#pylint: disable=unused-argument
|
2020-05-13 01:56:28 +00:00
|
|
|
def execute_after_save(sender, instance, created, *args, **kwargs):
|
|
|
|
''' set the remote_id after save (when the id is available) '''
|
2020-05-14 01:23:54 +00:00
|
|
|
if not created or not hasattr(instance, 'get_remote_id'):
|
2020-05-13 01:56:28 +00:00
|
|
|
return
|
2020-05-14 18:28:45 +00:00
|
|
|
if not instance.remote_id:
|
|
|
|
instance.remote_id = instance.get_remote_id()
|
|
|
|
instance.save()
|
2020-09-17 20:02:52 +00:00
|
|
|
|
|
|
|
|
2021-01-11 22:05:08 +00:00
|
|
|
def unfurl_related_field(related_field, sort_field=None):
|
2020-11-30 22:24:31 +00:00
|
|
|
''' load reverse lookups (like public key owner or Status attachment '''
|
|
|
|
if hasattr(related_field, 'all'):
|
2021-01-11 22:05:08 +00:00
|
|
|
return [unfurl_related_field(i) for i in related_field.order_by(
|
|
|
|
sort_field).all()]
|
2020-11-30 22:24:31 +00:00
|
|
|
if related_field.reverse_unfurl:
|
2020-11-30 22:54:45 +00:00
|
|
|
return related_field.field_to_activity()
|
2020-11-30 22:24:31 +00:00
|
|
|
return related_field.remote_id
|
|
|
|
|
|
|
|
|
2020-09-17 20:02:52 +00:00
|
|
|
class ActivitypubMixin:
|
|
|
|
''' add this mixin for models that are AP serializable '''
|
|
|
|
activity_serializer = lambda: {}
|
2020-11-30 22:24:31 +00:00
|
|
|
reverse_unfurl = False
|
2020-09-17 20:02:52 +00:00
|
|
|
|
2020-12-13 19:16:12 +00:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
''' collect some info on model fields '''
|
|
|
|
self.image_fields = []
|
|
|
|
self.many_to_many_fields = []
|
|
|
|
self.simple_fields = [] # "simple"
|
|
|
|
for field in self._meta.get_fields():
|
|
|
|
if not hasattr(field, 'field_to_activity'):
|
|
|
|
continue
|
|
|
|
|
2020-12-13 20:02:26 +00:00
|
|
|
if isinstance(field, ImageField):
|
2020-12-13 19:16:12 +00:00
|
|
|
self.image_fields.append(field)
|
2020-12-13 20:02:26 +00:00
|
|
|
elif isinstance(field, ManyToManyField):
|
2020-12-13 19:16:12 +00:00
|
|
|
self.many_to_many_fields.append(field)
|
|
|
|
else:
|
|
|
|
self.simple_fields.append(field)
|
|
|
|
|
2020-12-13 21:03:17 +00:00
|
|
|
self.activity_fields = self.image_fields + \
|
|
|
|
self.many_to_many_fields + self.simple_fields
|
|
|
|
|
2020-12-13 19:16:12 +00:00
|
|
|
self.deserialize_reverse_fields = self.deserialize_reverse_fields \
|
|
|
|
if hasattr(self, 'deserialize_reverse_fields') else []
|
|
|
|
self.serialize_reverse_fields = self.serialize_reverse_fields \
|
|
|
|
if hasattr(self, 'serialize_reverse_fields') else []
|
|
|
|
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
|
|
|
|
|
2020-12-12 21:39:55 +00:00
|
|
|
@classmethod
|
|
|
|
def find_existing_by_remote_id(cls, remote_id):
|
|
|
|
''' look up a remote id in the db '''
|
|
|
|
return cls.find_existing({'id': remote_id})
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def find_existing(cls, data):
|
|
|
|
''' compare data to fields that can be used for deduplation.
|
|
|
|
This always includes remote_id, but can also be unique identifiers
|
|
|
|
like an isbn for an edition '''
|
|
|
|
filters = []
|
|
|
|
for field in cls._meta.get_fields():
|
|
|
|
if not hasattr(field, 'deduplication_field') or \
|
|
|
|
not field.deduplication_field:
|
|
|
|
continue
|
|
|
|
|
2020-12-31 20:22:00 +00:00
|
|
|
value = data.get(field.get_activitypub_field())
|
2020-12-12 21:39:55 +00:00
|
|
|
if not value:
|
|
|
|
continue
|
|
|
|
filters.append({field.name: value})
|
|
|
|
|
|
|
|
if hasattr(cls, 'origin_id') and 'id' in data:
|
|
|
|
# kinda janky, but this handles special case for books
|
|
|
|
filters.append({'origin_id': data['id']})
|
|
|
|
|
|
|
|
if not filters:
|
|
|
|
# if there are no deduplication fields, it will match the first
|
|
|
|
# item no matter what. this shouldn't happen but just in case.
|
|
|
|
return None
|
|
|
|
|
|
|
|
objects = cls.objects
|
|
|
|
if hasattr(objects, 'select_subclasses'):
|
|
|
|
objects = objects.select_subclasses()
|
|
|
|
|
|
|
|
# an OR operation on all the match fields
|
|
|
|
match = objects.filter(
|
|
|
|
reduce(
|
|
|
|
operator.or_, (Q(**f) for f in filters)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
# there OUGHT to be only one match
|
|
|
|
return match.first()
|
|
|
|
|
|
|
|
|
2020-11-30 18:32:54 +00:00
|
|
|
def to_activity(self):
|
2020-09-17 20:02:52 +00:00
|
|
|
''' convert from a model to an activity '''
|
2020-11-30 18:32:13 +00:00
|
|
|
activity = {}
|
2020-12-13 21:03:17 +00:00
|
|
|
for field in self.activity_fields:
|
|
|
|
field.set_activity_from_field(activity, self)
|
2020-12-03 20:35:57 +00:00
|
|
|
|
2020-11-30 18:32:54 +00:00
|
|
|
if hasattr(self, 'serialize_reverse_fields'):
|
2020-12-03 20:35:57 +00:00
|
|
|
# for example, editions of a work
|
2021-01-11 19:16:10 +00:00
|
|
|
for model_field_name, activity_field_name, sort_field in \
|
2020-12-08 02:28:42 +00:00
|
|
|
self.serialize_reverse_fields:
|
2021-01-11 22:05:08 +00:00
|
|
|
related_field = getattr(self, model_field_name)
|
2020-12-08 02:28:42 +00:00
|
|
|
activity[activity_field_name] = \
|
2021-01-11 22:05:08 +00:00
|
|
|
unfurl_related_field(related_field, sort_field)
|
2020-11-30 18:32:54 +00:00
|
|
|
|
2020-12-04 01:23:08 +00:00
|
|
|
if not activity.get('id'):
|
|
|
|
activity['id'] = self.get_remote_id()
|
2020-11-30 18:32:54 +00:00
|
|
|
return self.activity_serializer(**activity).serialize()
|
2020-09-17 20:02:52 +00:00
|
|
|
|
|
|
|
|
2020-12-16 23:59:42 +00:00
|
|
|
def to_create_activity(self, user, **kwargs):
|
2020-09-17 20:02:52 +00:00
|
|
|
''' returns the object wrapped in a Create activity '''
|
2020-12-16 23:59:42 +00:00
|
|
|
activity_object = self.to_activity(**kwargs)
|
2020-09-17 20:02:52 +00:00
|
|
|
|
2020-12-04 19:22:08 +00:00
|
|
|
signer = pkcs1_15.new(RSA.import_key(user.key_pair.private_key))
|
2020-09-17 20:02:52 +00:00
|
|
|
content = activity_object['content']
|
|
|
|
signed_message = signer.sign(SHA256.new(content.encode('utf8')))
|
|
|
|
create_id = self.remote_id + '/activity'
|
|
|
|
|
|
|
|
signature = activitypub.Signature(
|
|
|
|
creator='%s#main-key' % user.remote_id,
|
|
|
|
created=activity_object['published'],
|
|
|
|
signatureValue=b64encode(signed_message).decode('utf8')
|
|
|
|
)
|
|
|
|
|
|
|
|
return activitypub.Create(
|
|
|
|
id=create_id,
|
|
|
|
actor=user.remote_id,
|
2020-11-30 22:24:31 +00:00
|
|
|
to=activity_object['to'],
|
|
|
|
cc=activity_object['cc'],
|
2020-09-17 20:02:52 +00:00
|
|
|
object=activity_object,
|
|
|
|
signature=signature,
|
|
|
|
).serialize()
|
|
|
|
|
|
|
|
|
2020-10-31 17:39:42 +00:00
|
|
|
def to_delete_activity(self, user):
|
|
|
|
''' notice of deletion '''
|
|
|
|
return activitypub.Delete(
|
|
|
|
id=self.remote_id + '/activity',
|
|
|
|
actor=user.remote_id,
|
|
|
|
to=['%s/followers' % user.remote_id],
|
|
|
|
cc=['https://www.w3.org/ns/activitystreams#Public'],
|
2020-12-01 03:01:43 +00:00
|
|
|
object=self.to_activity(),
|
2020-10-31 17:39:42 +00:00
|
|
|
).serialize()
|
|
|
|
|
|
|
|
|
2020-09-17 20:02:52 +00:00
|
|
|
def to_update_activity(self, user):
|
|
|
|
''' wrapper for Updates to an activity '''
|
2020-12-04 19:46:16 +00:00
|
|
|
activity_id = '%s#update/%s' % (self.remote_id, uuid4())
|
2020-09-17 20:02:52 +00:00
|
|
|
return activitypub.Update(
|
|
|
|
id=activity_id,
|
|
|
|
actor=user.remote_id,
|
|
|
|
to=['https://www.w3.org/ns/activitystreams#Public'],
|
|
|
|
object=self.to_activity()
|
|
|
|
).serialize()
|
|
|
|
|
|
|
|
|
|
|
|
def to_undo_activity(self, user):
|
|
|
|
''' undo an action '''
|
|
|
|
return activitypub.Undo(
|
2020-12-04 19:52:01 +00:00
|
|
|
id='%s#undo' % self.remote_id,
|
2020-09-17 20:02:52 +00:00
|
|
|
actor=user.remote_id,
|
|
|
|
object=self.to_activity()
|
2020-12-04 19:52:01 +00:00
|
|
|
).serialize()
|
2020-09-17 20:02:52 +00:00
|
|
|
|
|
|
|
|
|
|
|
class OrderedCollectionPageMixin(ActivitypubMixin):
|
|
|
|
''' just the paginator utilities, so you don't HAVE to
|
|
|
|
override ActivitypubMixin's to_activity (ie, for outbox '''
|
|
|
|
@property
|
|
|
|
def collection_remote_id(self):
|
|
|
|
''' this can be overriden if there's a special remote id, ie outbox '''
|
|
|
|
return self.remote_id
|
|
|
|
|
|
|
|
|
|
|
|
def to_ordered_collection(self, queryset, \
|
|
|
|
remote_id=None, page=False, **kwargs):
|
|
|
|
''' an ordered collection of whatevers '''
|
2021-01-17 07:31:47 +00:00
|
|
|
if not queryset.ordered:
|
|
|
|
raise RuntimeError('queryset must be ordered')
|
|
|
|
|
2020-09-17 20:02:52 +00:00
|
|
|
remote_id = remote_id or self.remote_id
|
|
|
|
if page:
|
2020-12-01 03:01:43 +00:00
|
|
|
return to_ordered_collection_page(
|
2020-09-17 20:02:52 +00:00
|
|
|
queryset, remote_id, **kwargs)
|
2020-12-01 03:01:43 +00:00
|
|
|
name = self.name if hasattr(self, 'name') else None
|
|
|
|
owner = self.user.remote_id if hasattr(self, 'user') else ''
|
2020-09-17 20:02:52 +00:00
|
|
|
|
2020-12-01 03:01:43 +00:00
|
|
|
paginated = Paginator(queryset, PAGE_LENGTH)
|
2020-09-17 20:02:52 +00:00
|
|
|
return activitypub.OrderedCollection(
|
|
|
|
id=remote_id,
|
2020-12-01 03:01:43 +00:00
|
|
|
totalItems=paginated.count,
|
2020-09-17 20:02:52 +00:00
|
|
|
name=name,
|
2020-11-28 18:18:24 +00:00
|
|
|
owner=owner,
|
2020-12-01 03:01:43 +00:00
|
|
|
first='%s?page=1' % remote_id,
|
|
|
|
last='%s?page=%d' % (remote_id, paginated.num_pages)
|
2020-09-17 20:02:52 +00:00
|
|
|
).serialize()
|
|
|
|
|
|
|
|
|
2020-12-30 21:14:16 +00:00
|
|
|
# pylint: disable=unused-argument
|
|
|
|
def to_ordered_collection_page(
|
|
|
|
queryset, remote_id, id_only=False, page=1, **kwargs):
|
2020-12-01 03:01:43 +00:00
|
|
|
''' serialize and pagiante a queryset '''
|
|
|
|
paginated = Paginator(queryset, PAGE_LENGTH)
|
|
|
|
|
|
|
|
activity_page = paginated.page(page)
|
|
|
|
if id_only:
|
|
|
|
items = [s.remote_id for s in activity_page.object_list]
|
|
|
|
else:
|
|
|
|
items = [s.to_activity() for s in activity_page.object_list]
|
|
|
|
|
|
|
|
prev_page = next_page = None
|
|
|
|
if activity_page.has_next():
|
|
|
|
next_page = '%s?page=%d' % (remote_id, activity_page.next_page_number())
|
|
|
|
if activity_page.has_previous():
|
|
|
|
prev_page = '%s?page=%d' % \
|
|
|
|
(remote_id, activity_page.previous_page_number())
|
|
|
|
return activitypub.OrderedCollectionPage(
|
|
|
|
id='%s?page=%s' % (remote_id, page),
|
|
|
|
partOf=remote_id,
|
|
|
|
orderedItems=items,
|
|
|
|
next=next_page,
|
|
|
|
prev=prev_page
|
|
|
|
).serialize()
|
|
|
|
|
|
|
|
|
2020-09-17 20:02:52 +00:00
|
|
|
class OrderedCollectionMixin(OrderedCollectionPageMixin):
|
|
|
|
''' extends activitypub models to work as ordered collections '''
|
|
|
|
@property
|
|
|
|
def collection_queryset(self):
|
|
|
|
''' usually an ordered collection model aggregates a different model '''
|
|
|
|
raise NotImplementedError('Model must define collection_queryset')
|
|
|
|
|
|
|
|
activity_serializer = activitypub.OrderedCollection
|
|
|
|
|
|
|
|
def to_activity(self, **kwargs):
|
|
|
|
''' an ordered collection of the specified model queryset '''
|
|
|
|
return self.to_ordered_collection(self.collection_queryset, **kwargs)
|