lib.itmens/journal/models/common.py

604 lines
20 KiB
Python
Raw Normal View History

import re
import uuid
2024-06-13 20:44:15 -04:00
from abc import abstractmethod
from datetime import datetime
from functools import cached_property
2024-06-13 20:44:15 -04:00
from typing import TYPE_CHECKING, Any, Self
2024-07-03 16:42:20 -04:00
import django_rq
2024-06-13 20:44:15 -04:00
# from deepmerge import always_merger
from django.conf import settings
2024-07-04 00:17:12 -04:00
from django.core.exceptions import PermissionDenied, RequestAborted
2024-05-29 10:48:45 -04:00
from django.core.signing import b62_decode, b62_encode
2024-07-03 00:07:07 -04:00
from django.db import models
from django.db.models import CharField, Q
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
2024-06-13 20:44:15 -04:00
from loguru import logger
from polymorphic.models import PolymorphicModel
2024-07-03 16:42:20 -04:00
from user_messages import api as messages
2024-07-03 00:07:07 -04:00
from catalog.common.models import Item, ItemCategory
from catalog.models import item_categories, item_content_types
2023-07-20 21:59:49 -04:00
from takahe.utils import Takahe
2024-07-03 16:42:20 -04:00
from users.middlewares import activate_language_for_user
2023-07-20 21:59:49 -04:00
from users.models import APIdentity, User
2024-12-30 01:51:19 -05:00
from .index import JournalIndex
from .mixins import UserOwnedObjectMixin
if TYPE_CHECKING:
from takahe.models import Post
2024-05-27 15:44:12 -04:00
from .like import Like
class VisibilityType(models.IntegerChoices):
Public = 0, _("Public") # type:ignore[reportCallIssue]
Follower_Only = 1, _("Followers Only") # type:ignore[reportCallIssue]
Private = 2, _("Mentioned Only") # type:ignore[reportCallIssue]
2023-07-20 21:59:49 -04:00
def q_owned_piece_visible_to_user(viewing_user: User, owner: APIdentity):
if not viewing_user or not viewing_user.is_authenticated:
if owner.anonymous_viewable:
return Q(owner=owner, visibility=0)
else:
return Q(pk__in=[])
2023-07-20 21:59:49 -04:00
viewer = viewing_user.identity
if viewer == owner:
2023-12-02 15:24:07 -05:00
return Q(owner=owner)
# elif viewer.is_blocked_by(owner):
# return Q(pk__in=[])
2023-07-20 21:59:49 -04:00
elif viewer.is_following(owner):
return Q(owner=owner, visibility__in=[0, 1])
else:
2023-07-20 21:59:49 -04:00
return Q(owner=owner, visibility=0)
2023-07-20 21:59:49 -04:00
def max_visiblity_to_user(viewing_user: User, owner: APIdentity):
if not viewing_user or not viewing_user.is_authenticated:
2023-07-20 21:59:49 -04:00
return 0
viewer = viewing_user.identity
if viewer == owner:
return 2
2023-07-20 21:59:49 -04:00
elif viewer.is_following(owner):
return 1
else:
return 0
def q_piece_visible_to_user(viewing_user: User):
if not viewing_user or not viewing_user.is_authenticated:
2023-12-29 02:11:36 -05:00
return Q(visibility=0, owner__anonymous_viewable=True)
viewer = viewing_user.identity
return (
2023-07-20 21:59:49 -04:00
Q(visibility=0)
| Q(owner_id__in=viewer.following, visibility=1)
| Q(owner_id=viewer.pk)
) & ~Q(owner_id__in=viewer.ignoring)
def q_piece_in_home_feed_of_user(viewing_user: User):
viewer = viewing_user.identity
2023-12-31 15:49:26 -05:00
return Q(owner_id__in=viewer.following, visibility__lt=2) | Q(owner_id=viewer.pk)
2024-05-26 22:57:49 -04:00
def q_item_in_category(item_category: ItemCategory):
classes = item_categories()[item_category]
# q = Q(item__instance_of=classes[0])
# for cls in classes[1:]:
# q = q | Q(instance_of=cls)
# return q
ct = item_content_types()
contenttype_ids = [ct[cls] for cls in classes]
return Q(item__polymorphic_ctype__in=contenttype_ids)
class Piece(PolymorphicModel, UserOwnedObjectMixin):
2024-05-27 15:44:12 -04:00
if TYPE_CHECKING:
likes: models.QuerySet["Like"]
2024-07-03 16:42:20 -04:00
metadata: models.JSONField[Any, Any]
url_path = "p" # subclass must specify this
uid = models.UUIDField(default=uuid.uuid4, editable=False, db_index=True)
2023-07-20 21:59:49 -04:00
local = models.BooleanField(default=True)
posts = models.ManyToManyField(
"takahe.Post", related_name="pieces", through="PiecePost"
)
2025-01-19 16:04:22 -05:00
previous_visibility: int | None = None
post_when_save: bool = False
crosspost_when_save: bool = False
index_when_save: bool = False
2024-06-18 00:10:31 -04:00
@property
def classname(self) -> str:
return self.__class__.__name__.lower()
2025-01-19 16:04:22 -05:00
@classmethod
def from_db(cls, db, field_names, values):
instance = super().from_db(db, field_names, values)
instance.previous_visibility = instance.visibility
return instance
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
if self.local and self.post_when_save:
visibility_changed = self.previous_visibility != self.visibility
self.previous_visibility = self.visibility
self.sync_to_timeline(1 if visibility_changed else 0)
if self.crosspost_when_save:
self.sync_to_social_accounts(0)
if self.index_when_save:
self.update_index()
2024-06-13 20:44:15 -04:00
def delete(self, *args, **kwargs):
if self.local:
2024-12-30 01:51:19 -05:00
self.delete_from_timeline()
2024-07-05 10:53:43 -04:00
self.delete_crossposts()
2024-12-30 01:51:19 -05:00
self.delete_index()
2024-06-13 20:44:15 -04:00
return super().delete(*args, **kwargs)
@property
def uuid(self):
2024-05-29 10:48:45 -04:00
return b62_encode(self.uid.int)
@property
def url(self):
2024-07-05 16:26:26 -04:00
return f"/{self.url_path}/{self.uuid}"
@property
def absolute_url(self):
2024-07-05 16:26:26 -04:00
return settings.SITE_INFO["site_url"] + self.url
@property
def api_url(self):
return f"/api/{self.url}" if self.url_path else None
@property
def like_count(self):
2023-07-20 21:59:49 -04:00
return (
Takahe.get_post_stats(self.latest_post.pk).get("likes", 0)
if self.latest_post
else 0
2023-07-20 21:59:49 -04:00
)
2023-08-29 06:00:02 +00:00
def is_liked_by(self, identity):
return self.latest_post and Takahe.post_liked_by(
self.latest_post.pk, identity.pk
)
2023-08-15 15:46:11 -04:00
@property
def reply_count(self):
return (
Takahe.get_post_stats(self.latest_post.pk).get("replies", 0)
if self.latest_post
else 0
2023-08-15 15:46:11 -04:00
)
def get_replies(self, viewing_identity):
return Takahe.get_replies_for_posts(
self.all_post_ids, viewing_identity.pk if viewing_identity else None
2023-08-15 15:46:11 -04:00
)
@classmethod
def get_by_url(cls, url_or_b62):
b62 = url_or_b62.strip().split("/")[-1]
if len(b62) not in [21, 22]:
r = re.search(r"[A-Za-z0-9]{21,22}", url_or_b62)
if r:
b62 = r[0]
try:
2024-05-29 10:48:45 -04:00
obj = cls.objects.get(uid=uuid.UUID(int=b62_decode(b62)))
2024-04-06 00:13:50 -04:00
except Exception:
obj = None
return obj
2025-01-19 16:04:22 -05:00
@classmethod
def get_by_url_and_owner(cls, url_or_b62, owner_id):
b62 = url_or_b62.strip().split("/")[-1]
if len(b62) not in [21, 22]:
r = re.search(r"[A-Za-z0-9]{21,22}", url_or_b62)
if r:
b62 = r[0]
try:
obj = cls.objects.get(uid=uuid.UUID(int=b62_decode(b62)), owner_id=owner_id)
except Exception:
obj = None
return obj
2024-04-19 20:24:34 -04:00
@classmethod
def get_by_post_id(cls, post_id: int):
pp = PiecePost.objects.filter(post_id=post_id).first()
return pp.piece if pp else None
def link_post_id(self, post_id: int):
PiecePost.objects.get_or_create(piece=self, post_id=post_id)
2024-06-13 20:44:15 -04:00
try:
del self.latest_post_id
del self.latest_post
except AttributeError:
pass
2023-11-20 01:59:26 -05:00
def clear_post_ids(self):
PiecePost.objects.filter(piece=self).delete()
@cached_property
2023-11-20 19:11:02 -05:00
def latest_post_id(self):
# post id is ordered by their created time
pp = PiecePost.objects.filter(piece=self).order_by("-post_id").first()
2023-11-20 19:11:02 -05:00
return pp.post_id if pp else None
@cached_property
def latest_post(self) -> "Post | None":
2023-11-20 19:11:02 -05:00
pk = self.latest_post_id
return Takahe.get_post(pk) if pk else None
@cached_property
def all_post_ids(self):
post_ids = list(
PiecePost.objects.filter(piece=self).values_list("post_id", flat=True)
)
return post_ids
2024-06-13 20:44:15 -04:00
@property
def ap_object(self):
raise NotImplementedError("subclass must implement this")
@classmethod
@abstractmethod
def params_from_ap_object(
cls, post: "Post", obj: dict[str, Any], piece: Self | None
) -> dict[str, Any]:
2024-06-13 20:44:15 -04:00
return {}
@abstractmethod
def to_post_params(self) -> dict[str, Any]:
2024-07-05 16:26:26 -04:00
"""
returns a dict of parameter to create a post
"""
2024-06-13 20:44:15 -04:00
return {}
@abstractmethod
2024-07-03 16:42:20 -04:00
def to_crosspost_params(self) -> dict[str, Any]:
2024-07-05 16:26:26 -04:00
"""
returns a dict of parameter to create a post for each platform
"content" - required, may contain ##obj## / ##obj_link_if_plain## / ##rating##
...
"""
2024-06-13 20:44:15 -04:00
return {}
@classmethod
def update_by_ap_object(
2025-01-19 16:04:22 -05:00
cls,
owner: APIdentity,
item: Item,
obj,
post: "Post",
crosspost: bool | None = False,
) -> Self | None:
2024-06-13 20:44:15 -04:00
"""
Create or update a content piece with related AP message
"""
p = cls.get_by_post_id(post.id)
if p and p.owner.pk != post.author_id:
logger.warning(f"Owner mismatch: {p.owner.pk} != {post.author_id}")
return
local = post.local
visibility = Takahe.visibility_t2n(post.visibility)
d = cls.params_from_ap_object(post, obj, p)
if p:
# update existing piece
edited = post.edited if local else datetime.fromisoformat(obj["updated"])
if p.edited_time >= edited:
# incoming ap object is older than what we have, no update needed
return p
d["edited_time"] = edited
for k, v in d.items():
setattr(p, k, v)
2025-01-19 16:04:22 -05:00
if crosspost is not None:
p.crosspost_when_save = crosspost
2024-06-13 20:44:15 -04:00
p.save(update_fields=d.keys())
else:
# no previously linked piece, create a new one and link to post
d.update(
{
"item": item,
"owner": owner,
"local": post.local,
"visibility": visibility,
"remote_id": None if local else obj["id"],
}
)
if local:
d["created_time"] = post.published
d["edited_time"] = post.edited or post.published
else:
d["created_time"] = datetime.fromisoformat(obj["published"])
d["edited_time"] = datetime.fromisoformat(obj["updated"])
2025-01-19 16:04:22 -05:00
p = cls(**d)
if crosspost is not None:
p.crosspost_when_save = crosspost
p.save()
2024-06-13 20:44:15 -04:00
p.link_post_id(post.id)
2024-06-15 21:54:39 -04:00
# subclass may have to add additional code to update type_data in local post
2024-06-13 20:44:15 -04:00
return p
2024-07-05 10:53:43 -04:00
@classmethod
def _delete_crossposts(cls, user_pk, metadata: dict):
user = User.objects.get(pk=user_pk)
toot_id = metadata.get("mastodon_id")
if toot_id and user.mastodon:
user.mastodon.delete_post(toot_id)
post_id = metadata.get("bluesky_id")
if toot_id and user.bluesky:
user.bluesky.delete_post(post_id)
def delete_crossposts(self):
if hasattr(self, "metadata") and self.metadata:
django_rq.get_queue("mastodon").enqueue(
self._delete_crossposts, self.owner.user_id, self.metadata
)
2024-07-03 16:42:20 -04:00
def get_crosspost_params(self):
d = {
"visibility": self.visibility,
2024-07-04 12:43:45 -04:00
"update_ids": self.metadata.copy() if hasattr(self, "metadata") else {},
2024-07-03 16:42:20 -04:00
}
d.update(self.to_crosspost_params())
return d
def sync_to_social_accounts(self, update_mode: int = 0):
"""update_mode: 0 update if exists otherwise create; 1: delete if exists and create; 2: only create"""
django_rq.get_queue("mastodon").enqueue(
self._sync_to_social_accounts, update_mode
)
def _sync_to_social_accounts(self, update_mode: int):
2024-07-04 12:43:45 -04:00
def params_for_platform(params, platform):
p = params.copy()
for k in ["update_id", "reply_to_id"]:
ks = k + "s"
if ks in p:
d = p.pop(ks)
v = d.get(platform + "_id")
if v:
p[k] = v
return p
2024-07-03 16:42:20 -04:00
activate_language_for_user(self.owner.user)
metadata = self.metadata.copy()
2024-07-05 10:53:43 -04:00
# backward compatible with previous way of storing mastodon id
legacy_mastodon_url = self.metadata.pop("shared_link", None)
if legacy_mastodon_url and not self.metadata.get("mastodon_id"):
self.metadata["mastodon_id"] = legacy_mastodon_url.split("/")[-1]
self.metadata["mastodon_url"] = legacy_mastodon_url
2024-07-04 12:43:45 -04:00
params = self.get_crosspost_params()
self.sync_to_mastodon(params_for_platform(params, "mastodon"), update_mode)
self.sync_to_threads(params_for_platform(params, "threads"), update_mode)
self.sync_to_bluesky(params_for_platform(params, "bluesky"), update_mode)
2024-07-03 16:42:20 -04:00
if self.metadata != metadata:
self.save(update_fields=["metadata"])
2024-07-04 12:43:45 -04:00
def sync_to_bluesky(self, params, update_mode):
# skip non-public post as Bluesky does not support it
# update_mode 0 will act like 1 as bsky.app does not support edit
bluesky = self.owner.user.bluesky
if params["visibility"] != 0 or not bluesky:
return False
if update_mode in [0, 1]:
post_id = self.metadata.get("bluesky_id")
if post_id:
try:
bluesky.delete_post(post_id)
except Exception as e:
logger.warning(f"Delete {bluesky} post {post_id} error {e}")
r = bluesky.post(**params)
2024-07-05 16:26:26 -04:00
if r:
self.metadata.update({"bluesky_" + k: v for k, v in r.items()})
2024-07-04 12:43:45 -04:00
return True
2024-07-03 16:42:20 -04:00
def sync_to_threads(self, params, update_mode):
# skip non-public post as Threads does not support it
# update_mode will be ignored as update/delete are not supported either
threads = self.owner.user.threads
2024-07-04 12:43:45 -04:00
# return
2024-07-03 16:42:20 -04:00
if params["visibility"] != 0 or not threads:
2024-07-04 12:43:45 -04:00
return False
2024-07-03 16:42:20 -04:00
try:
r = threads.post(**params)
2024-07-04 00:17:12 -04:00
except RequestAborted:
2024-07-03 16:42:20 -04:00
logger.warning(f"{self} post to {threads} failed")
messages.error(threads.user, _("A recent post was not posted to Threads."))
return False
2024-07-05 16:26:26 -04:00
if r:
self.metadata.update({"threads_" + k: v for k, v in r.items()})
2024-07-03 16:42:20 -04:00
return True
def sync_to_mastodon(self, params, update_mode):
mastodon = self.owner.user.mastodon
if not mastodon:
2024-07-04 12:43:45 -04:00
return False
2024-07-03 16:42:20 -04:00
if self.owner.user.preference.mastodon_repost_mode == 1:
if update_mode == 1:
toot_id = self.metadata.pop("mastodon_id", None)
if toot_id:
self.metadata.pop("mastodon_url", None)
2024-07-04 12:43:45 -04:00
mastodon.delete_post(toot_id)
2024-11-17 23:46:40 -05:00
elif update_mode == 2:
2024-07-03 16:42:20 -04:00
params.pop("update_id", None)
return self.crosspost_to_mastodon(params)
2024-06-13 20:44:15 -04:00
elif self.latest_post:
2024-07-03 16:42:20 -04:00
mastodon.boost(self.latest_post.url)
2024-06-13 20:44:15 -04:00
else:
logger.warning("No post found for piece")
2024-07-04 12:43:45 -04:00
return True
2024-06-13 20:44:15 -04:00
2024-07-03 16:42:20 -04:00
def crosspost_to_mastodon(self, params):
mastodon = self.owner.user.mastodon
if not mastodon:
return False
try:
2024-07-04 00:17:12 -04:00
r = mastodon.post(**params)
2024-07-03 16:42:20 -04:00
except PermissionDenied:
messages.error(
mastodon.user,
_("A recent post was not posted to Mastodon, please re-authorize."),
meta={"url": mastodon.get_reauthorize_url()},
)
return False
2024-07-04 00:17:12 -04:00
except RequestAborted:
2024-07-03 16:42:20 -04:00
logger.warning(f"{self} post to {mastodon} failed")
messages.error(
mastodon.user, _("A recent post was not posted to Mastodon.")
)
return False
self.metadata.update({"mastodon_" + k: v for k, v in r.items()})
return True
def get_ap_data(self):
return {
"object": {
"tag": (
[self.item.ap_object_ref] # type:ignore
if hasattr(self, "item")
else []
),
"relatedWith": [self.ap_object],
}
2024-06-13 20:44:15 -04:00
}
2024-12-30 01:51:19 -05:00
def delete_from_timeline(self):
Takahe.delete_posts(self.all_post_ids)
2024-07-03 16:42:20 -04:00
def sync_to_timeline(self, update_mode: int = 0):
"""update_mode: 0 update if exists otherwise create; 1: delete if exists and create; 2: only create"""
2024-06-13 20:44:15 -04:00
user = self.owner.user
v = Takahe.visibility_n2t(self.visibility, user.preference.post_public_mode)
existing_post = self.latest_post
2024-07-03 16:42:20 -04:00
if existing_post:
if (
existing_post.state in ["deleted", "deleted_fanned_out"]
or update_mode == 2
):
existing_post = None
elif update_mode == 1:
Takahe.delete_posts([existing_post.pk])
existing_post = None
2024-06-13 20:44:15 -04:00
params = {
"author_pk": self.owner.pk,
"visibility": v,
"post_pk": existing_post.pk if existing_post else None,
"post_time": self.created_time, # type:ignore subclass must have this
"edit_time": self.edited_time, # type:ignore subclass must have this
2024-07-03 16:42:20 -04:00
"data": self.get_ap_data(),
2024-06-13 20:44:15 -04:00
}
params.update(self.to_post_params())
post = Takahe.post(**params)
if post and post != existing_post:
self.link_post_id(post.pk)
return post
2024-12-30 01:51:19 -05:00
def update_index(self):
index = JournalIndex.instance()
doc = index.piece_to_doc(self)
if doc:
try:
index.delete_by_piece([self.pk])
index.replace_docs([doc])
except Exception as e:
logger.error(f"Indexing {self} error {e}")
def delete_index(self):
index = JournalIndex.instance()
index.delete_by_piece([self.pk])
def to_indexable_doc(self) -> dict[str, Any]:
raise NotImplementedError(
f"{self.__class__} should override this to make itself searchable"
)
class PiecePost(models.Model):
2023-11-20 19:11:02 -05:00
post_id: int
piece = models.ForeignKey(Piece, on_delete=models.CASCADE)
post = models.ForeignKey(
"takahe.Post", db_constraint=False, db_index=True, on_delete=models.CASCADE
)
class Meta:
constraints = [
models.UniqueConstraint(fields=["piece", "post"], name="unique_piece_post"),
]
2023-12-27 09:23:41 -05:00
class PieceInteraction(models.Model):
2023-12-27 16:23:46 -05:00
target = models.ForeignKey(
Piece, on_delete=models.CASCADE, related_name="interactions"
)
target_type = models.CharField(max_length=50)
2023-12-27 09:23:41 -05:00
interaction_type = models.CharField(max_length=50)
2023-12-27 16:23:46 -05:00
identity = models.ForeignKey(
APIdentity, on_delete=models.CASCADE, related_name="interactions"
)
2023-12-27 09:23:41 -05:00
created_time = models.DateTimeField(default=timezone.now)
class Meta:
constraints = [
models.UniqueConstraint(
2023-12-27 16:23:46 -05:00
fields=["identity", "interaction_type", "target"],
2023-12-27 09:23:41 -05:00
name="unique_interaction",
),
]
indexes = [
models.Index(fields=["identity", "interaction_type", "created_time"]),
2023-12-27 16:23:46 -05:00
models.Index(fields=["target", "interaction_type", "created_time"]),
2023-12-27 09:23:41 -05:00
]
class Content(Piece):
2023-07-20 21:59:49 -04:00
owner = models.ForeignKey(APIdentity, on_delete=models.PROTECT)
visibility = models.PositiveSmallIntegerField(
2024-06-15 18:26:20 -04:00
choices=VisibilityType.choices, default=0, null=False
) # type:ignore
created_time = models.DateTimeField(default=timezone.now)
2023-11-20 19:31:31 -05:00
edited_time = models.DateTimeField(auto_now=True)
metadata = models.JSONField(default=dict)
item = models.ForeignKey(Item, on_delete=models.PROTECT)
2023-07-20 21:59:49 -04:00
remote_id = models.CharField(max_length=200, null=True, default=None)
def __str__(self):
2023-12-24 18:04:55 -05:00
return f"{self.__class__.__name__}:{self.uuid}@{self.item}"
2024-07-05 16:26:26 -04:00
@property
def display_title(self) -> str:
raise NotImplementedError("subclass should override this")
@property
2024-07-13 00:16:47 -04:00
def brief_description(self) -> str:
2024-07-05 16:26:26 -04:00
raise NotImplementedError("subclass should override this")
class Meta:
abstract = True
2024-02-04 18:41:34 -05:00
class Debris(Content):
class_name = CharField(max_length=50)
@classmethod
2024-05-27 15:44:12 -04:00
def create_from_piece(cls, c: Content):
2024-02-04 18:41:34 -05:00
return cls.objects.create(
class_name=c.__class__.__name__,
owner=c.owner,
visibility=c.visibility,
created_time=c.created_time,
metadata=c.ap_object,
item=c.item,
remote_id=c.remote_id if hasattr(c, "remote_id") else None,
)
2024-12-30 01:51:19 -05:00
def to_indexable_doc(self) -> dict[str, Any]:
return {}