2023-07-20 21:59:49 -04:00
|
|
|
from datetime import datetime
|
2023-12-05 21:57:00 -05:00
|
|
|
from time import sleep
|
2023-07-20 21:59:49 -04:00
|
|
|
|
|
|
|
from loguru import logger
|
|
|
|
|
|
|
|
from catalog.common import *
|
|
|
|
from journal.models import Comment, Piece, Rating, Review, ShelfMember
|
|
|
|
from users.models import User as NeoUser
|
|
|
|
|
|
|
|
from .models import Follow, Identity, Post
|
|
|
|
from .utils import Takahe
|
|
|
|
|
|
|
|
_supported_ap_catalog_item_types = [
|
|
|
|
"Edition",
|
|
|
|
"Movie",
|
|
|
|
"TVShow",
|
|
|
|
"TVSeason",
|
|
|
|
"TVEpisode",
|
|
|
|
"Album",
|
|
|
|
"Game",
|
|
|
|
"Podcast",
|
2023-11-19 15:19:49 -05:00
|
|
|
"PodcastEpisode",
|
2023-07-20 21:59:49 -04:00
|
|
|
"Performance",
|
|
|
|
"PerformanceProduction",
|
|
|
|
]
|
|
|
|
|
|
|
|
_supported_ap_journal_types = {
|
|
|
|
"Status": ShelfMember,
|
|
|
|
"Rating": Rating,
|
|
|
|
"Comment": Comment,
|
|
|
|
"Review": Review,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2023-11-19 15:19:49 -05:00
|
|
|
def _parse_items(objects):
|
2023-11-17 22:46:31 -05:00
|
|
|
logger.debug(f"Parsing item links from {objects}")
|
2023-11-18 20:30:48 -05:00
|
|
|
if not objects:
|
|
|
|
return []
|
|
|
|
objs = objects if isinstance(objects, list) else [objects]
|
2023-11-19 15:19:49 -05:00
|
|
|
items = [obj for obj in objs if obj["type"] in _supported_ap_catalog_item_types]
|
2023-11-17 22:46:31 -05:00
|
|
|
return items
|
|
|
|
|
|
|
|
|
|
|
|
def _parse_piece_objects(objects):
|
|
|
|
logger.debug(f"Parsing pieces from {objects}")
|
2023-11-18 20:30:48 -05:00
|
|
|
if not objects:
|
|
|
|
return []
|
|
|
|
objs = objects if isinstance(objects, list) else [objects]
|
2023-07-20 21:59:49 -04:00
|
|
|
pieces = []
|
2023-11-18 20:30:48 -05:00
|
|
|
for obj in objs:
|
2023-11-17 22:46:31 -05:00
|
|
|
if obj["type"] in _supported_ap_journal_types.keys():
|
2023-07-20 21:59:49 -04:00
|
|
|
pieces.append(obj)
|
|
|
|
else:
|
|
|
|
logger.warning(f'Unknown link type {obj["type"]}')
|
2023-11-17 22:46:31 -05:00
|
|
|
return pieces
|
2023-07-20 21:59:49 -04:00
|
|
|
|
|
|
|
|
2023-11-19 15:19:49 -05:00
|
|
|
def _get_or_create_item(item_obj):
|
|
|
|
logger.debug(f"Fetching item by ap from {item_obj}")
|
|
|
|
typ = item_obj["type"]
|
|
|
|
url = item_obj["href"]
|
|
|
|
if typ in ["TVEpisode", "PodcastEpisode"]:
|
|
|
|
# TODO support episode item
|
|
|
|
# match and fetch parent item first
|
2023-11-20 19:11:02 -05:00
|
|
|
logger.debug(f"{typ}:{url} not supported yet")
|
2023-11-19 15:19:49 -05:00
|
|
|
return None
|
2023-07-20 21:59:49 -04:00
|
|
|
site = SiteManager.get_site_by_url(url)
|
|
|
|
if not site:
|
2023-11-20 19:11:02 -05:00
|
|
|
logger.warning(f"Site not found for {url}")
|
2023-07-20 21:59:49 -04:00
|
|
|
return None
|
|
|
|
site.get_resource_ready()
|
|
|
|
item = site.get_item()
|
2023-11-20 19:11:02 -05:00
|
|
|
if not item:
|
|
|
|
logger.warning(f"Item not fetched for {url}")
|
2023-07-20 21:59:49 -04:00
|
|
|
return item
|
|
|
|
|
|
|
|
|
|
|
|
def _get_visibility(post_visibility):
|
|
|
|
match post_visibility:
|
|
|
|
case 2:
|
|
|
|
return 1
|
|
|
|
case 3:
|
|
|
|
return 2
|
|
|
|
case _:
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
2023-11-19 15:19:49 -05:00
|
|
|
def post_fetched(pk, obj):
|
2023-07-20 21:59:49 -04:00
|
|
|
post = Post.objects.get(pk=pk)
|
2023-08-13 18:00:10 -04:00
|
|
|
owner = Takahe.get_or_create_remote_apidentity(post.author)
|
2023-07-20 21:59:49 -04:00
|
|
|
if not post.type_data:
|
|
|
|
logger.warning(f"Post {post} has no type_data")
|
|
|
|
return
|
2023-11-27 23:02:58 -05:00
|
|
|
ap_object = post.type_data.get("object", {})
|
|
|
|
items = _parse_items(ap_object.get("tag"))
|
|
|
|
pieces = _parse_piece_objects(ap_object.get("relatedWith"))
|
2023-07-20 21:59:49 -04:00
|
|
|
logger.info(f"Post {post} has items {items} and pieces {pieces}")
|
|
|
|
if len(items) == 0:
|
|
|
|
logger.warning(f"Post {post} has no remote items")
|
|
|
|
return
|
|
|
|
elif len(items) > 1:
|
|
|
|
logger.warning(f"Post {post} has more than one remote item")
|
|
|
|
return
|
2023-11-19 15:19:49 -05:00
|
|
|
item = _get_or_create_item(items[0])
|
2023-07-20 21:59:49 -04:00
|
|
|
if not item:
|
2023-11-19 15:19:49 -05:00
|
|
|
logger.warning(f"Post {post} has no local item matched or created")
|
2023-07-20 21:59:49 -04:00
|
|
|
return
|
|
|
|
for p in pieces:
|
2023-11-20 19:11:02 -05:00
|
|
|
cls = _supported_ap_journal_types.get(p["type"])
|
|
|
|
if not cls:
|
|
|
|
logger.warning(f'Unknown link type {p["type"]}')
|
|
|
|
continue
|
2023-07-20 21:59:49 -04:00
|
|
|
cls.update_by_ap_object(owner, item, p, pk, _get_visibility(post.visibility))
|
|
|
|
|
|
|
|
|
|
|
|
def post_deleted(pk, obj):
|
2023-11-20 19:11:02 -05:00
|
|
|
for piece in Piece.objects.filter(posts__id=pk, local=False):
|
|
|
|
# delete piece if the deleted post is the most recent one for the piece
|
|
|
|
if piece.latest_post_id == pk:
|
|
|
|
logger.debug(f"Deleting remote piece {piece}")
|
|
|
|
piece.delete()
|
|
|
|
else:
|
|
|
|
logger.debug(f"Matched remote piece {piece} has newer posts, not deleting")
|
2023-07-20 21:59:49 -04:00
|
|
|
|
|
|
|
|
2023-11-18 11:13:26 -05:00
|
|
|
def identity_fetched(pk):
|
2023-12-05 21:57:00 -05:00
|
|
|
try:
|
|
|
|
identity = Identity.objects.get(pk=pk)
|
|
|
|
except Identity.DoesNotExist:
|
|
|
|
sleep(2)
|
|
|
|
try:
|
|
|
|
identity = Identity.objects.get(pk=pk)
|
|
|
|
except Identity.DoesNotExist:
|
|
|
|
logger.warning(f"Fetched identity {pk} not found")
|
|
|
|
return
|
2023-11-18 11:13:26 -05:00
|
|
|
if identity.username and identity.domain:
|
|
|
|
apid = Takahe.get_or_create_remote_apidentity(identity)
|
|
|
|
if apid:
|
|
|
|
logger.debug(f"Identity {identity} synced")
|
|
|
|
else:
|
|
|
|
logger.warning(f"Identity {identity} not synced")
|
|
|
|
else:
|
|
|
|
logger.warning(f"Identity {identity} has no username or domain")
|