2023-12-05 21:57:00 -05:00
|
|
|
from time import sleep
|
2024-06-13 20:44:15 -04:00
|
|
|
from typing import Any
|
2023-07-20 21:59:49 -04:00
|
|
|
|
2024-06-13 20:44:15 -04:00
|
|
|
from django.conf import settings
|
2023-07-20 21:59:49 -04:00
|
|
|
from loguru import logger
|
|
|
|
|
|
|
|
from catalog.common import *
|
2024-06-13 20:44:15 -04:00
|
|
|
from journal.models import (
|
|
|
|
Comment,
|
|
|
|
Note,
|
|
|
|
Piece,
|
|
|
|
PieceInteraction,
|
|
|
|
Rating,
|
|
|
|
Review,
|
|
|
|
ShelfMember,
|
|
|
|
)
|
2024-12-30 15:34:18 -05:00
|
|
|
from journal.models.index import JournalIndex
|
2024-06-15 21:54:39 -04:00
|
|
|
from users.middlewares import activate_language_for_user
|
2023-12-27 16:23:46 -05:00
|
|
|
from users.models.apidentity import APIdentity
|
2023-07-20 21:59:49 -04:00
|
|
|
|
2024-07-03 00:07:07 -04:00
|
|
|
from .models import Identity, Post, TimelineEvent
|
2023-07-20 21:59:49 -04:00
|
|
|
from .utils import Takahe
|
|
|
|
|
|
|
|
_supported_ap_catalog_item_types = [
|
|
|
|
"Edition",
|
|
|
|
"Movie",
|
|
|
|
"TVShow",
|
|
|
|
"TVSeason",
|
|
|
|
"TVEpisode",
|
|
|
|
"Album",
|
|
|
|
"Game",
|
|
|
|
"Podcast",
|
2023-11-19 15:19:49 -05:00
|
|
|
"PodcastEpisode",
|
2023-07-20 21:59:49 -04:00
|
|
|
"Performance",
|
|
|
|
"PerformanceProduction",
|
|
|
|
]
|
|
|
|
|
|
|
|
_supported_ap_journal_types = {
|
|
|
|
"Status": ShelfMember,
|
|
|
|
"Rating": Rating,
|
|
|
|
"Comment": Comment,
|
|
|
|
"Review": Review,
|
2024-06-13 20:44:15 -04:00
|
|
|
"Note": Note,
|
2023-07-20 21:59:49 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-06-13 20:44:15 -04:00
|
|
|
def _parse_items(objects) -> list[dict[str, Any]]:
|
2023-11-17 22:46:31 -05:00
|
|
|
logger.debug(f"Parsing item links from {objects}")
|
2023-11-18 20:30:48 -05:00
|
|
|
if not objects:
|
|
|
|
return []
|
|
|
|
objs = objects if isinstance(objects, list) else [objects]
|
2023-11-19 15:19:49 -05:00
|
|
|
items = [obj for obj in objs if obj["type"] in _supported_ap_catalog_item_types]
|
2023-11-17 22:46:31 -05:00
|
|
|
return items
|
|
|
|
|
|
|
|
|
2024-06-13 20:44:15 -04:00
|
|
|
def _parse_piece_objects(objects) -> list[dict[str, Any]]:
|
2023-11-17 22:46:31 -05:00
|
|
|
logger.debug(f"Parsing pieces from {objects}")
|
2023-11-18 20:30:48 -05:00
|
|
|
if not objects:
|
|
|
|
return []
|
|
|
|
objs = objects if isinstance(objects, list) else [objects]
|
2023-07-20 21:59:49 -04:00
|
|
|
pieces = []
|
2023-11-18 20:30:48 -05:00
|
|
|
for obj in objs:
|
2023-11-17 22:46:31 -05:00
|
|
|
if obj["type"] in _supported_ap_journal_types.keys():
|
2023-07-20 21:59:49 -04:00
|
|
|
pieces.append(obj)
|
|
|
|
else:
|
2025-01-11 17:20:02 -05:00
|
|
|
logger.warning(f"Unknown link type {obj['type']}")
|
2023-11-17 22:46:31 -05:00
|
|
|
return pieces
|
2023-07-20 21:59:49 -04:00
|
|
|
|
|
|
|
|
2024-06-13 20:44:15 -04:00
|
|
|
def _get_or_create_item(item_obj) -> Item | None:
|
2023-11-19 15:19:49 -05:00
|
|
|
logger.debug(f"Fetching item by ap from {item_obj}")
|
|
|
|
typ = item_obj["type"]
|
|
|
|
url = item_obj["href"]
|
2024-06-13 20:44:15 -04:00
|
|
|
if url.startswith(settings.SITE_INFO["site_url"]):
|
|
|
|
item = Item.get_by_url(url, True)
|
|
|
|
if not item:
|
|
|
|
logger.warning(f"Item not found for {url}")
|
|
|
|
return item
|
2023-11-19 15:19:49 -05:00
|
|
|
if typ in ["TVEpisode", "PodcastEpisode"]:
|
|
|
|
# TODO support episode item
|
|
|
|
# match and fetch parent item first
|
2023-11-20 19:11:02 -05:00
|
|
|
logger.debug(f"{typ}:{url} not supported yet")
|
2023-11-19 15:19:49 -05:00
|
|
|
return None
|
2023-07-20 21:59:49 -04:00
|
|
|
site = SiteManager.get_site_by_url(url)
|
|
|
|
if not site:
|
2023-11-20 19:11:02 -05:00
|
|
|
logger.warning(f"Site not found for {url}")
|
2023-07-20 21:59:49 -04:00
|
|
|
return None
|
|
|
|
site.get_resource_ready()
|
|
|
|
item = site.get_item()
|
2023-11-20 19:11:02 -05:00
|
|
|
if not item:
|
|
|
|
logger.warning(f"Item not fetched for {url}")
|
2023-07-20 21:59:49 -04:00
|
|
|
return item
|
|
|
|
|
|
|
|
|
2024-06-13 20:44:15 -04:00
|
|
|
def post_created(pk, post_data):
|
2024-12-30 15:34:18 -05:00
|
|
|
return _post_fetched(pk, True, post_data)
|
2023-07-20 21:59:49 -04:00
|
|
|
|
|
|
|
|
2024-06-13 20:44:15 -04:00
|
|
|
def post_edited(pk, post_data):
|
2024-12-30 15:34:18 -05:00
|
|
|
return _post_fetched(pk, True, post_data, False)
|
2024-06-13 20:44:15 -04:00
|
|
|
|
|
|
|
|
|
|
|
def post_fetched(pk, post_data):
|
2024-12-30 15:34:18 -05:00
|
|
|
return _post_fetched(pk, False, post_data, True)
|
|
|
|
|
|
|
|
|
|
|
|
def _post_fetched(pk, local, post_data, create: bool | None = None):
|
|
|
|
post: Post = Post.objects.get(pk=pk)
|
2023-08-13 18:00:10 -04:00
|
|
|
owner = Takahe.get_or_create_remote_apidentity(post.author)
|
2024-12-30 15:34:18 -05:00
|
|
|
if local:
|
|
|
|
activate_language_for_user(owner.user)
|
|
|
|
reply_to = post.in_reply_to_post()
|
|
|
|
items = []
|
|
|
|
pieces = []
|
|
|
|
if post_data and "raw_content" in post_data:
|
|
|
|
# Local post, extract info for Note if possible
|
|
|
|
if (
|
|
|
|
reply_to
|
|
|
|
and reply_to.author_id == post.author_id
|
|
|
|
and reply_to.type_data
|
|
|
|
and "object" in reply_to.type_data
|
|
|
|
and "relatedWith" in reply_to.type_data["object"]
|
|
|
|
):
|
|
|
|
items = _parse_items(reply_to.type_data["object"].get("tag", []))
|
|
|
|
elif (
|
|
|
|
not create
|
|
|
|
and post.type_data
|
|
|
|
and "object" in post.type_data
|
|
|
|
and "relatedWith" in post.type_data["object"]
|
|
|
|
):
|
|
|
|
items = _parse_items(post.type_data["object"].get("tag", []))
|
|
|
|
pieces = [{"type": "Note", "content": post_data["raw_content"]}]
|
|
|
|
if not items or not pieces:
|
|
|
|
# Local post has no related items or usable pieces, update index and move on
|
|
|
|
JournalIndex.instance().replace_posts([post])
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
if not post.type_data and not post_data:
|
|
|
|
logger.warning(f"Remote post {post} has no type_data")
|
|
|
|
return
|
|
|
|
ap_objects = post_data or post.type_data.get("object", {})
|
|
|
|
items = _parse_items(ap_objects.get("tag"))
|
|
|
|
pieces = _parse_piece_objects(ap_objects.get("relatedWith"))
|
2023-07-20 21:59:49 -04:00
|
|
|
if len(items) == 0:
|
2024-12-30 15:34:18 -05:00
|
|
|
logger.warning(f"Post {post} has no items")
|
2023-07-20 21:59:49 -04:00
|
|
|
return
|
|
|
|
elif len(items) > 1:
|
2024-12-30 15:34:18 -05:00
|
|
|
logger.warning(f"Post {post} has more than one item")
|
2023-07-20 21:59:49 -04:00
|
|
|
return
|
2024-12-30 15:34:18 -05:00
|
|
|
logger.info(f"Post {post} has items {items} and pieces {pieces}")
|
2023-11-19 15:19:49 -05:00
|
|
|
item = _get_or_create_item(items[0])
|
2023-07-20 21:59:49 -04:00
|
|
|
if not item:
|
2023-11-19 15:19:49 -05:00
|
|
|
logger.warning(f"Post {post} has no local item matched or created")
|
2023-07-20 21:59:49 -04:00
|
|
|
return
|
|
|
|
for p in pieces:
|
2023-11-20 19:11:02 -05:00
|
|
|
cls = _supported_ap_journal_types.get(p["type"])
|
|
|
|
if not cls:
|
2025-01-11 17:20:02 -05:00
|
|
|
logger.warning(f"Unknown link type {p['type']}")
|
2023-11-20 19:11:02 -05:00
|
|
|
continue
|
2024-06-13 20:44:15 -04:00
|
|
|
cls.update_by_ap_object(owner, item, p, post)
|
2023-07-20 21:59:49 -04:00
|
|
|
|
|
|
|
|
2024-12-30 15:34:18 -05:00
|
|
|
def post_deleted(pk, local, post_data):
|
2024-06-13 20:44:15 -04:00
|
|
|
for piece in Piece.objects.filter(posts__id=pk):
|
|
|
|
if piece.local and piece.__class__ != Note:
|
|
|
|
# no delete other than Note, for backward compatibility, should reconsider later
|
|
|
|
return
|
2023-11-20 19:11:02 -05:00
|
|
|
# delete piece if the deleted post is the most recent one for the piece
|
|
|
|
if piece.latest_post_id == pk:
|
2024-06-13 20:44:15 -04:00
|
|
|
logger.debug(f"Deleting piece {piece}")
|
2023-11-20 19:11:02 -05:00
|
|
|
piece.delete()
|
|
|
|
else:
|
2024-06-13 20:44:15 -04:00
|
|
|
logger.debug(f"Matched piece {piece} has newer posts, not deleting")
|
2023-07-20 21:59:49 -04:00
|
|
|
|
|
|
|
|
2023-12-27 09:23:41 -05:00
|
|
|
def post_interacted(interaction_pk, interaction, post_pk, identity_pk):
|
2023-12-27 16:23:46 -05:00
|
|
|
if interaction not in ["like", "boost", "pin"]:
|
|
|
|
return
|
|
|
|
p = Piece.objects.filter(posts__id=post_pk).first()
|
|
|
|
if not p:
|
|
|
|
return
|
2024-02-25 10:45:07 -05:00
|
|
|
apid = APIdentity.objects.filter(pk=identity_pk).first()
|
|
|
|
if not apid:
|
2023-12-27 16:23:46 -05:00
|
|
|
logger.warning(f"Identity {identity_pk} not found for interaction")
|
|
|
|
return
|
2024-02-25 10:45:07 -05:00
|
|
|
if (
|
|
|
|
interaction == "boost"
|
|
|
|
and p.local
|
2024-07-03 00:07:07 -04:00
|
|
|
and p.owner.user.mastodon
|
|
|
|
and p.owner.user.mastodon.handle == apid.full_handle
|
2024-02-25 10:45:07 -05:00
|
|
|
):
|
|
|
|
# ignore boost by oneself
|
|
|
|
TimelineEvent.objects.filter(
|
|
|
|
identity_id=p.owner_id,
|
|
|
|
type="boosted",
|
|
|
|
subject_post_id=post_pk,
|
|
|
|
subject_identity_id=identity_pk,
|
|
|
|
).delete()
|
|
|
|
return
|
2023-12-27 16:23:46 -05:00
|
|
|
PieceInteraction.objects.get_or_create(
|
|
|
|
target=p,
|
|
|
|
identity_id=identity_pk,
|
|
|
|
interaction_type=interaction,
|
|
|
|
defaults={"target_type": p.__class__.__name__},
|
|
|
|
)
|
2023-12-27 09:23:41 -05:00
|
|
|
|
|
|
|
|
|
|
|
def post_uninteracted(interaction_pk, interaction, post_pk, identity_pk):
|
2023-12-27 16:23:46 -05:00
|
|
|
if interaction not in ["like", "boost", "pin"]:
|
|
|
|
return
|
|
|
|
p = Piece.objects.filter(posts__id=post_pk).first()
|
|
|
|
if not p:
|
|
|
|
return
|
|
|
|
if not APIdentity.objects.filter(pk=identity_pk).exists():
|
|
|
|
logger.warning(f"Identity {identity_pk} not found for interaction")
|
|
|
|
return
|
|
|
|
PieceInteraction.objects.filter(
|
|
|
|
target=p,
|
|
|
|
identity_id=identity_pk,
|
|
|
|
interaction_type=interaction,
|
|
|
|
).delete()
|
2023-12-27 09:23:41 -05:00
|
|
|
|
|
|
|
|
2023-11-18 11:13:26 -05:00
|
|
|
def identity_fetched(pk):
|
2023-12-05 21:57:00 -05:00
|
|
|
try:
|
|
|
|
identity = Identity.objects.get(pk=pk)
|
|
|
|
except Identity.DoesNotExist:
|
|
|
|
sleep(2)
|
|
|
|
try:
|
|
|
|
identity = Identity.objects.get(pk=pk)
|
|
|
|
except Identity.DoesNotExist:
|
|
|
|
logger.warning(f"Fetched identity {pk} not found")
|
|
|
|
return
|
2023-11-18 11:13:26 -05:00
|
|
|
if identity.username and identity.domain:
|
|
|
|
apid = Takahe.get_or_create_remote_apidentity(identity)
|
|
|
|
if apid:
|
|
|
|
logger.debug(f"Identity {identity} synced")
|
|
|
|
else:
|
|
|
|
logger.warning(f"Identity {identity} not synced")
|
|
|
|
else:
|
|
|
|
logger.warning(f"Identity {identity} has no username or domain")
|