2023-01-10 11:22:42 -05:00
|
|
|
import re
|
2022-12-07 19:09:05 -05:00
|
|
|
import uuid
|
2023-08-10 11:27:31 -04:00
|
|
|
from functools import cached_property
|
2024-05-27 15:44:12 -04:00
|
|
|
from typing import TYPE_CHECKING, Any, Iterable, Self, Type, cast
|
2023-08-10 11:27:31 -04:00
|
|
|
|
|
|
|
from auditlog.context import disable_auditlog
|
|
|
|
from auditlog.models import AuditlogHistoryField, LogEntry
|
2022-12-23 00:08:42 -05:00
|
|
|
from django.conf import settings
|
2023-08-10 11:27:31 -04:00
|
|
|
from django.contrib.contenttypes.models import ContentType
|
|
|
|
from django.core.files.uploadedfile import SimpleUploadedFile
|
2024-05-29 10:48:45 -04:00
|
|
|
from django.core.signing import b62_decode, b62_encode
|
2023-08-10 11:27:31 -04:00
|
|
|
from django.db import connection, models
|
2024-05-27 15:44:12 -04:00
|
|
|
from django.db.models import QuerySet, Value
|
|
|
|
from django.template.defaultfilters import default
|
2023-08-10 11:27:31 -04:00
|
|
|
from django.utils import timezone
|
2024-07-13 00:16:47 -04:00
|
|
|
from django.utils.translation import get_language
|
2023-08-10 11:27:31 -04:00
|
|
|
from django.utils.translation import gettext_lazy as _
|
2024-05-25 23:38:11 -04:00
|
|
|
from loguru import logger
|
2023-07-20 21:59:49 -04:00
|
|
|
from ninja import Field, Schema
|
2023-08-10 11:27:31 -04:00
|
|
|
from polymorphic.models import PolymorphicModel
|
|
|
|
|
|
|
|
from catalog.common import jsondata
|
2024-07-14 10:24:26 -04:00
|
|
|
from common.models import (
|
|
|
|
LANGUAGE_CHOICES,
|
|
|
|
LOCALE_CHOICES,
|
|
|
|
PREFERRED_LOCALES,
|
|
|
|
SCRIPT_CHOICES,
|
|
|
|
)
|
2024-07-13 00:16:47 -04:00
|
|
|
from common.models.lang import get_current_locales
|
|
|
|
from common.models.misc import uniq
|
2023-08-10 11:27:31 -04:00
|
|
|
|
|
|
|
from .utils import DEFAULT_ITEM_COVER, item_cover_path, resource_cover_path
|
2022-12-07 19:09:05 -05:00
|
|
|
|
2023-08-11 01:43:19 -04:00
|
|
|
if TYPE_CHECKING:
|
2024-05-27 15:44:12 -04:00
|
|
|
from journal.models import Collection
|
2023-08-11 16:14:44 -04:00
|
|
|
from users.models import User
|
|
|
|
|
2024-05-27 15:44:12 -04:00
|
|
|
from .sites import ResourceContent
|
|
|
|
|
2022-12-07 19:09:05 -05:00
|
|
|
|
2022-12-15 17:29:35 -05:00
|
|
|
class SiteName(models.TextChoices):
|
2024-05-29 09:49:13 -04:00
|
|
|
Unknown = "unknown", _("Unknown") # type:ignore[reportCallIssue]
|
|
|
|
Douban = "douban", _("Douban") # type:ignore[reportCallIssue]
|
|
|
|
Goodreads = "goodreads", _("Goodreads") # type:ignore[reportCallIssue]
|
|
|
|
GoogleBooks = "googlebooks", _("Google Books") # type:ignore[reportCallIssue]
|
|
|
|
BooksTW = "bookstw", _("BooksTW") # type:ignore[reportCallIssue]
|
|
|
|
IMDB = "imdb", _("IMDb") # type:ignore[reportCallIssue]
|
|
|
|
TMDB = "tmdb", _("TMDB") # type:ignore[reportCallIssue]
|
|
|
|
Bandcamp = "bandcamp", _("Bandcamp") # type:ignore[reportCallIssue]
|
|
|
|
Spotify = "spotify", _("Spotify") # type:ignore[reportCallIssue]
|
|
|
|
IGDB = "igdb", _("IGDB") # type:ignore[reportCallIssue]
|
|
|
|
Steam = "steam", _("Steam") # type:ignore[reportCallIssue]
|
|
|
|
Bangumi = "bangumi", _("Bangumi") # type:ignore[reportCallIssue]
|
|
|
|
BGG = "bgg", _("BGG") # type:ignore[reportCallIssue]
|
|
|
|
# ApplePodcast = "apple_podcast", _("Apple Podcast") # type:ignore[reportCallIssue]
|
|
|
|
RSS = "rss", _("RSS") # type:ignore[reportCallIssue]
|
|
|
|
Discogs = "discogs", _("Discogs") # type:ignore[reportCallIssue]
|
|
|
|
AppleMusic = "apple_music", _("Apple Music") # type:ignore[reportCallIssue]
|
|
|
|
Fediverse = "fedi", _("Fediverse") # type:ignore[reportCallIssue]
|
2022-12-15 17:29:35 -05:00
|
|
|
|
|
|
|
|
2022-12-07 19:09:05 -05:00
|
|
|
class IdType(models.TextChoices):
|
2024-05-29 09:49:13 -04:00
|
|
|
WikiData = "wikidata", _("WikiData") # type:ignore[reportCallIssue]
|
|
|
|
ISBN10 = "isbn10", _("ISBN10") # type:ignore[reportCallIssue]
|
|
|
|
ISBN = "isbn", _("ISBN") # ISBN 13 # type:ignore[reportCallIssue]
|
|
|
|
ASIN = "asin", _("ASIN") # type:ignore[reportCallIssue]
|
|
|
|
ISSN = "issn", _("ISSN") # type:ignore[reportCallIssue]
|
|
|
|
CUBN = "cubn", _("CUBN") # type:ignore[reportCallIssue]
|
|
|
|
ISRC = "isrc", _("ISRC") # only for songs # type:ignore[reportCallIssue]
|
2024-06-03 23:39:20 -04:00
|
|
|
GTIN = (
|
|
|
|
"gtin",
|
|
|
|
_("GTIN UPC EAN"),
|
2024-05-29 09:49:13 -04:00
|
|
|
) # GTIN-13, ISBN is separate # type:ignore[reportCallIssue]
|
|
|
|
RSS = "rss", _("RSS Feed URL") # type:ignore[reportCallIssue]
|
|
|
|
IMDB = "imdb", _("IMDb") # type:ignore[reportCallIssue]
|
|
|
|
TMDB_TV = "tmdb_tv", _("TMDB TV Serie") # type:ignore[reportCallIssue]
|
|
|
|
TMDB_TVSeason = "tmdb_tvseason", _("TMDB TV Season") # type:ignore[reportCallIssue]
|
|
|
|
TMDB_TVEpisode = "tmdb_tvepisode", _(
|
|
|
|
"TMDB TV Episode"
|
|
|
|
) # type:ignore[reportCallIssue]
|
|
|
|
TMDB_Movie = "tmdb_movie", _("TMDB Movie") # type:ignore[reportCallIssue]
|
|
|
|
Goodreads = "goodreads", _("Goodreads") # type:ignore[reportCallIssue]
|
|
|
|
Goodreads_Work = "goodreads_work", _(
|
|
|
|
"Goodreads Work"
|
|
|
|
) # type:ignore[reportCallIssue]
|
|
|
|
GoogleBooks = "googlebooks", _("Google Books") # type:ignore[reportCallIssue]
|
|
|
|
DoubanBook = "doubanbook", _("Douban Book") # type:ignore[reportCallIssue]
|
|
|
|
DoubanBook_Work = "doubanbook_work", _(
|
|
|
|
"Douban Book Work"
|
|
|
|
) # type:ignore[reportCallIssue]
|
|
|
|
DoubanMovie = "doubanmovie", _("Douban Movie") # type:ignore[reportCallIssue]
|
|
|
|
DoubanMusic = "doubanmusic", _("Douban Music") # type:ignore[reportCallIssue]
|
|
|
|
DoubanGame = "doubangame", _("Douban Game") # type:ignore[reportCallIssue]
|
|
|
|
DoubanDrama = "doubandrama", _("Douban Drama") # type:ignore[reportCallIssue]
|
|
|
|
DoubanDramaVersion = "doubandrama_version", _(
|
|
|
|
"Douban Drama Version"
|
|
|
|
) # type:ignore[reportCallIssue]
|
|
|
|
BooksTW = "bookstw", _("BooksTW Book") # type:ignore[reportCallIssue]
|
|
|
|
Bandcamp = "bandcamp", _("Bandcamp") # type:ignore[reportCallIssue]
|
|
|
|
Spotify_Album = "spotify_album", _("Spotify Album") # type:ignore[reportCallIssue]
|
|
|
|
Spotify_Show = "spotify_show", _("Spotify Podcast") # type:ignore[reportCallIssue]
|
|
|
|
Discogs_Release = "discogs_release", _(
|
|
|
|
"Discogs Release"
|
|
|
|
) # type:ignore[reportCallIssue]
|
|
|
|
Discogs_Master = "discogs_master", _(
|
|
|
|
"Discogs Master"
|
|
|
|
) # type:ignore[reportCallIssue]
|
|
|
|
MusicBrainz = "musicbrainz", _("MusicBrainz ID") # type:ignore[reportCallIssue]
|
|
|
|
# DoubanBook_Author = "doubanbook_author", _("Douban Book Author") # type:ignore[reportCallIssue]
|
|
|
|
# DoubanCelebrity = "doubanmovie_celebrity", _("Douban Movie Celebrity") # type:ignore[reportCallIssue]
|
|
|
|
# Goodreads_Author = "goodreads_author", _("Goodreads Author") # type:ignore[reportCallIssue]
|
|
|
|
# Spotify_Artist = "spotify_artist", _("Spotify Artist") # type:ignore[reportCallIssue]
|
|
|
|
# TMDB_Person = "tmdb_person", _("TMDB Person") # type:ignore[reportCallIssue]
|
|
|
|
IGDB = "igdb", _("IGDB Game") # type:ignore[reportCallIssue]
|
|
|
|
BGG = "bgg", _("BGG Boardgame") # type:ignore[reportCallIssue]
|
|
|
|
Steam = "steam", _("Steam Game") # type:ignore[reportCallIssue]
|
|
|
|
Bangumi = "bangumi", _("Bangumi") # type:ignore[reportCallIssue]
|
|
|
|
ApplePodcast = "apple_podcast", _("Apple Podcast") # type:ignore[reportCallIssue]
|
|
|
|
AppleMusic = "apple_music", _("Apple Music") # type:ignore[reportCallIssue]
|
|
|
|
Fediverse = "fedi", _("Fediverse") # type:ignore[reportCallIssue]
|
2022-12-07 19:09:05 -05:00
|
|
|
|
|
|
|
|
2023-05-14 10:31:04 -04:00
|
|
|
IdealIdTypes = [
|
|
|
|
IdType.ISBN,
|
|
|
|
IdType.CUBN,
|
|
|
|
IdType.ASIN,
|
|
|
|
IdType.GTIN,
|
|
|
|
IdType.ISRC,
|
|
|
|
IdType.MusicBrainz,
|
|
|
|
IdType.RSS,
|
|
|
|
IdType.IMDB,
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2022-12-07 19:09:05 -05:00
|
|
|
class ItemType(models.TextChoices):
|
2024-05-29 09:49:13 -04:00
|
|
|
Book = "book", _("Book") # type:ignore[reportCallIssue]
|
|
|
|
TVShow = "tvshow", _("TV Serie") # type:ignore[reportCallIssue]
|
|
|
|
TVSeason = "tvseason", _("TV Season") # type:ignore[reportCallIssue]
|
|
|
|
TVEpisode = "tvepisode", _("TV Episode") # type:ignore[reportCallIssue]
|
|
|
|
Movie = "movie", _("Movie") # type:ignore[reportCallIssue]
|
|
|
|
Album = "music", _("Album") # type:ignore[reportCallIssue]
|
|
|
|
Game = "game", _("Game") # type:ignore[reportCallIssue]
|
|
|
|
Podcast = "podcast", _("Podcast Program") # type:ignore[reportCallIssue]
|
|
|
|
PodcastEpisode = "podcastepisode", _(
|
|
|
|
"Podcast Episode"
|
|
|
|
) # type:ignore[reportCallIssue]
|
|
|
|
Performance = "performance", _("Performance") # type:ignore[reportCallIssue]
|
|
|
|
PerformanceProduction = "production", _(
|
|
|
|
"Production"
|
|
|
|
) # type:ignore[reportCallIssue]
|
|
|
|
FanFic = "fanfic", _("Fanfic") # type:ignore[reportCallIssue]
|
|
|
|
Exhibition = "exhibition", _("Exhibition") # type:ignore[reportCallIssue]
|
|
|
|
Collection = "collection", _("Collection") # type:ignore[reportCallIssue]
|
2022-12-07 19:09:05 -05:00
|
|
|
|
|
|
|
|
2022-12-11 23:20:28 +00:00
|
|
|
class ItemCategory(models.TextChoices):
|
2024-05-29 09:49:13 -04:00
|
|
|
Book = "book", _("Book") # type:ignore[reportCallIssue]
|
|
|
|
Movie = "movie", _("Movie") # type:ignore[reportCallIssue]
|
|
|
|
TV = "tv", _("TV") # type:ignore[reportCallIssue]
|
|
|
|
Music = "music", _("Music") # type:ignore[reportCallIssue]
|
|
|
|
Game = "game", _("Game") # type:ignore[reportCallIssue]
|
|
|
|
Podcast = "podcast", _("Podcast") # type:ignore[reportCallIssue]
|
|
|
|
Performance = "performance", _("Performance") # type:ignore[reportCallIssue]
|
|
|
|
FanFic = "fanfic", _("FanFic") # type:ignore[reportCallIssue]
|
|
|
|
Exhibition = "exhibition", _("Exhibition") # type:ignore[reportCallIssue]
|
|
|
|
Collection = "collection", _("Collection") # type:ignore[reportCallIssue]
|
2022-12-11 23:20:28 +00:00
|
|
|
|
|
|
|
|
2023-06-02 21:54:48 -04:00
|
|
|
class AvailableItemCategory(models.TextChoices):
|
2024-05-29 09:49:13 -04:00
|
|
|
Book = "book", _("Book") # type:ignore[reportCallIssue]
|
|
|
|
Movie = "movie", _("Movie") # type:ignore[reportCallIssue]
|
|
|
|
TV = "tv", _("TV") # type:ignore[reportCallIssue]
|
|
|
|
Music = "music", _("Music") # type:ignore[reportCallIssue]
|
|
|
|
Game = "game", _("Game") # type:ignore[reportCallIssue]
|
|
|
|
Podcast = "podcast", _("Podcast") # type:ignore[reportCallIssue]
|
|
|
|
Performance = "performance", _("Performance") # type:ignore[reportCallIssue]
|
2023-06-02 21:54:48 -04:00
|
|
|
|
|
|
|
|
2023-01-29 20:05:30 -05:00
|
|
|
# class SubItemType(models.TextChoices):
|
2024-05-29 09:49:13 -04:00
|
|
|
# Season = "season", _("season") # type:ignore[reportCallIssue]
|
|
|
|
# Episode = "episode", _("episode") # type:ignore[reportCallIssue]
|
|
|
|
# Version = "production", _("production") # type:ignore[reportCallIssue]
|
2022-12-29 14:30:31 -05:00
|
|
|
|
2022-12-07 19:09:05 -05:00
|
|
|
|
|
|
|
# class CreditType(models.TextChoices):
|
2024-05-26 22:57:49 -04:00
|
|
|
# Author = 'author', _('author')
|
|
|
|
# Translater = 'translater', _('translater')
|
|
|
|
# Producer = 'producer', _('producer')
|
|
|
|
# Director = 'director', _('director')
|
|
|
|
# Actor = 'actor', _('actor')
|
|
|
|
# Playwright = 'playwright', _('playwright')
|
|
|
|
# VoiceActor = 'voiceactor', _('voiceactor')
|
|
|
|
# Host = 'host', _('host')
|
|
|
|
# Developer = 'developer', _('developer')
|
|
|
|
# Publisher = 'publisher', _('publisher')
|
2022-12-07 19:09:05 -05:00
|
|
|
|
|
|
|
|
|
|
|
class PrimaryLookupIdDescriptor(object): # TODO make it mixin of Field
|
2024-05-26 22:57:49 -04:00
|
|
|
def __init__(self, id_type: IdType):
|
2022-12-07 19:09:05 -05:00
|
|
|
self.id_type = id_type
|
|
|
|
|
2024-05-27 15:44:12 -04:00
|
|
|
def __get__(
|
|
|
|
self, instance: "Item | None", cls: type[Any] | None = None
|
|
|
|
) -> str | Self | None:
|
2022-12-07 19:09:05 -05:00
|
|
|
if instance is None:
|
|
|
|
return self
|
|
|
|
if self.id_type != instance.primary_lookup_id_type:
|
|
|
|
return None
|
|
|
|
return instance.primary_lookup_id_value
|
|
|
|
|
2024-05-27 15:44:12 -04:00
|
|
|
def __set__(self, instance: "Item", id_value: str | None):
|
2022-12-07 19:09:05 -05:00
|
|
|
if id_value:
|
|
|
|
instance.primary_lookup_id_type = self.id_type
|
|
|
|
instance.primary_lookup_id_value = id_value
|
|
|
|
else:
|
|
|
|
instance.primary_lookup_id_type = None
|
|
|
|
instance.primary_lookup_id_value = None
|
|
|
|
|
|
|
|
|
|
|
|
class LookupIdDescriptor(object): # TODO make it mixin of Field
|
2024-05-26 22:57:49 -04:00
|
|
|
def __init__(self, id_type: IdType):
|
2022-12-07 19:09:05 -05:00
|
|
|
self.id_type = id_type
|
|
|
|
|
|
|
|
def __get__(self, instance, cls=None):
|
|
|
|
if instance is None:
|
|
|
|
return self
|
|
|
|
return instance.get_lookup_id(self.id_type)
|
|
|
|
|
|
|
|
def __set__(self, instance, value):
|
|
|
|
instance.set_lookup_id(self.id_type, value)
|
|
|
|
|
|
|
|
|
|
|
|
# class ItemId(models.Model):
|
|
|
|
# item = models.ForeignKey('Item', models.CASCADE)
|
2024-05-26 22:57:49 -04:00
|
|
|
# id_type = models.CharField(_("Id Type"), blank=False, choices=IdType.choices, max_length=50)
|
|
|
|
# id_value = models.CharField(_("ID Value"), blank=False, max_length=1000)
|
2022-12-07 19:09:05 -05:00
|
|
|
|
|
|
|
|
|
|
|
# class ItemCredit(models.Model):
|
|
|
|
# item = models.ForeignKey('Item', models.CASCADE)
|
2024-05-26 22:57:49 -04:00
|
|
|
# credit_type = models.CharField(_("Credit Type"), choices=CreditType.choices, blank=False, max_length=50)
|
|
|
|
# name = models.CharField(_("Name"), blank=False, max_length=1000)
|
2022-12-07 19:09:05 -05:00
|
|
|
|
|
|
|
|
|
|
|
# def check_source_id(sid):
|
|
|
|
# if not sid:
|
|
|
|
# return True
|
|
|
|
# s = sid.split(':')
|
|
|
|
# if len(s) < 2:
|
|
|
|
# return False
|
|
|
|
# return sid[0] in IdType.values()
|
|
|
|
|
|
|
|
|
2023-02-15 15:45:57 -05:00
|
|
|
class ExternalResourceSchema(Schema):
|
|
|
|
url: str
|
|
|
|
|
|
|
|
|
|
|
|
class BaseSchema(Schema):
|
2023-07-20 21:59:49 -04:00
|
|
|
id: str = Field(alias="absolute_url")
|
|
|
|
type: str = Field(alias="ap_object_type")
|
2023-02-15 15:45:57 -05:00
|
|
|
uuid: str
|
|
|
|
url: str
|
|
|
|
api_url: str
|
|
|
|
category: ItemCategory
|
2023-06-05 02:04:52 -04:00
|
|
|
parent_uuid: str | None
|
2023-06-05 02:46:26 -04:00
|
|
|
display_title: str
|
2023-02-15 15:45:57 -05:00
|
|
|
external_resources: list[ExternalResourceSchema] | None
|
|
|
|
|
|
|
|
|
2024-07-13 00:16:47 -04:00
|
|
|
class LocalizedTitleSchema(Schema):
|
|
|
|
lang: str
|
|
|
|
text: str
|
|
|
|
|
|
|
|
|
2023-02-15 15:45:57 -05:00
|
|
|
class ItemInSchema(Schema):
|
2024-07-13 16:19:45 -04:00
|
|
|
title: str = Field(alias="display_title")
|
|
|
|
description: str = Field(default=None, alias="display_description")
|
2024-07-13 00:16:47 -04:00
|
|
|
localized_title: list[LocalizedTitleSchema] = []
|
|
|
|
localized_description: list[LocalizedTitleSchema] = []
|
2023-03-13 21:59:34 -04:00
|
|
|
cover_image_url: str | None
|
|
|
|
rating: float | None
|
|
|
|
rating_count: int | None
|
2024-07-13 16:19:45 -04:00
|
|
|
# brief is deprecated
|
|
|
|
brief: str = Field(deprecated=True, alias="display_description")
|
2023-02-15 15:45:57 -05:00
|
|
|
|
|
|
|
|
2024-05-26 22:57:49 -04:00
|
|
|
class ItemSchema(BaseSchema, ItemInSchema):
|
2023-02-15 15:45:57 -05:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
2024-07-13 00:16:47 -04:00
|
|
|
def get_locale_choices_for_jsonform(choices):
|
|
|
|
"""return list for jsonform schema"""
|
|
|
|
return [{"title": v, "value": k} for k, v in choices]
|
|
|
|
|
|
|
|
|
|
|
|
LOCALE_CHOICES_JSONFORM = get_locale_choices_for_jsonform(LOCALE_CHOICES)
|
|
|
|
LANGUAGE_CHOICES_JSONFORM = get_locale_choices_for_jsonform(LANGUAGE_CHOICES)
|
|
|
|
|
|
|
|
LOCALIZED_TITLE_SCHEMA = {
|
|
|
|
"type": "list",
|
|
|
|
"items": {
|
|
|
|
"type": "dict",
|
|
|
|
"keys": {
|
|
|
|
"lang": {
|
|
|
|
"type": "string",
|
2024-07-13 16:19:45 -04:00
|
|
|
"title": _("locale"),
|
2024-07-13 00:16:47 -04:00
|
|
|
"choices": LOCALE_CHOICES_JSONFORM,
|
|
|
|
},
|
2024-07-13 16:19:45 -04:00
|
|
|
"text": {"type": "string", "title": _("text content")},
|
2024-07-13 00:16:47 -04:00
|
|
|
},
|
|
|
|
"required": ["lang", "s"],
|
|
|
|
},
|
2024-07-13 16:19:45 -04:00
|
|
|
"minItems": 1,
|
2024-07-13 00:16:47 -04:00
|
|
|
"uniqueItems": True,
|
|
|
|
}
|
|
|
|
|
|
|
|
LOCALIZED_DESCRIPTION_SCHEMA = {
|
|
|
|
"type": "list",
|
|
|
|
"items": {
|
|
|
|
"type": "dict",
|
|
|
|
"keys": {
|
|
|
|
"lang": {
|
|
|
|
"type": "string",
|
2024-07-13 16:19:45 -04:00
|
|
|
"title": _("locale"),
|
2024-07-13 00:16:47 -04:00
|
|
|
"choices": LOCALE_CHOICES_JSONFORM,
|
|
|
|
},
|
2024-07-13 16:19:45 -04:00
|
|
|
"text": {
|
|
|
|
"type": "string",
|
|
|
|
"title": _("text content"),
|
|
|
|
"widget": "textarea",
|
|
|
|
},
|
2024-07-13 00:16:47 -04:00
|
|
|
},
|
|
|
|
"required": ["lang", "s"],
|
|
|
|
},
|
|
|
|
"uniqueItems": True,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def LanguageListField():
|
|
|
|
return jsondata.ArrayField(
|
|
|
|
verbose_name=_("language"),
|
|
|
|
base_field=models.CharField(blank=True, default="", max_length=100),
|
|
|
|
null=True,
|
|
|
|
blank=True,
|
|
|
|
default=list,
|
|
|
|
# schema={
|
|
|
|
# "type": "list",
|
|
|
|
# "items": {"type": "string", "choices": LANGUAGE_CHOICES_JSONFORM},
|
|
|
|
# },
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-05-27 15:44:12 -04:00
|
|
|
class Item(PolymorphicModel):
|
|
|
|
if TYPE_CHECKING:
|
|
|
|
external_resources: QuerySet["ExternalResource"]
|
|
|
|
collections: QuerySet["Collection"]
|
|
|
|
merged_from_items: QuerySet["Item"]
|
|
|
|
merged_to_item_id: int
|
|
|
|
category: ItemCategory # subclass must specify this
|
2023-06-06 13:06:03 -04:00
|
|
|
url_path = "item" # subclass must specify this
|
2023-12-23 11:21:04 -05:00
|
|
|
child_class = None # subclass may specify this to allow link to parent item
|
2023-06-05 18:57:52 -04:00
|
|
|
parent_class = None # subclass may specify this to allow create child item
|
2022-12-11 23:20:28 +00:00
|
|
|
uid = models.UUIDField(default=uuid.uuid4, editable=False, db_index=True)
|
2024-03-10 20:55:50 -04:00
|
|
|
title = models.CharField(_("title"), max_length=1000, default="")
|
|
|
|
brief = models.TextField(_("description"), blank=True, default="")
|
2022-12-29 14:30:31 -05:00
|
|
|
primary_lookup_id_type = models.CharField(
|
2024-03-10 20:55:50 -04:00
|
|
|
_("Primary ID Type"), blank=False, null=True, max_length=50
|
2022-12-29 14:30:31 -05:00
|
|
|
)
|
|
|
|
primary_lookup_id_value = models.CharField(
|
2024-03-10 20:55:50 -04:00
|
|
|
_("Primary ID Value"),
|
|
|
|
blank=False,
|
|
|
|
null=True,
|
|
|
|
max_length=1000,
|
|
|
|
help_text="automatically detected, usually no change necessary, left empty if unsure",
|
2022-12-29 14:30:31 -05:00
|
|
|
)
|
2024-03-10 20:55:50 -04:00
|
|
|
metadata = models.JSONField(_("metadata"), blank=True, null=True, default=dict)
|
2022-12-29 14:30:31 -05:00
|
|
|
cover = models.ImageField(
|
2024-03-10 20:55:50 -04:00
|
|
|
_("cover"), upload_to=item_cover_path, default=DEFAULT_ITEM_COVER, blank=True
|
2022-12-29 14:30:31 -05:00
|
|
|
)
|
2022-12-07 19:09:05 -05:00
|
|
|
created_time = models.DateTimeField(auto_now_add=True)
|
|
|
|
edited_time = models.DateTimeField(auto_now=True)
|
2022-12-11 23:20:28 +00:00
|
|
|
is_deleted = models.BooleanField(default=False, db_index=True)
|
2022-12-29 14:30:31 -05:00
|
|
|
merged_to_item = models.ForeignKey(
|
|
|
|
"Item",
|
|
|
|
null=True,
|
|
|
|
on_delete=models.SET_NULL,
|
|
|
|
default=None,
|
|
|
|
related_name="merged_from_items",
|
|
|
|
)
|
2022-12-07 19:09:05 -05:00
|
|
|
|
2024-07-13 00:16:47 -04:00
|
|
|
localized_title = jsondata.JSONField(
|
|
|
|
verbose_name=_("title"),
|
|
|
|
null=False,
|
|
|
|
blank=True,
|
|
|
|
default=list,
|
|
|
|
schema=LOCALIZED_TITLE_SCHEMA,
|
|
|
|
)
|
|
|
|
|
|
|
|
localized_description = jsondata.JSONField(
|
|
|
|
verbose_name=_("description"),
|
|
|
|
null=False,
|
|
|
|
blank=True,
|
|
|
|
default=list,
|
|
|
|
schema=LOCALIZED_DESCRIPTION_SCHEMA,
|
|
|
|
)
|
|
|
|
|
2022-12-07 19:09:05 -05:00
|
|
|
class Meta:
|
2023-01-07 12:00:09 -05:00
|
|
|
index_together = [
|
2022-12-29 14:30:31 -05:00
|
|
|
[
|
|
|
|
"primary_lookup_id_type",
|
|
|
|
"primary_lookup_id_value",
|
|
|
|
]
|
|
|
|
]
|
2022-12-07 19:09:05 -05:00
|
|
|
|
2024-05-27 15:44:12 -04:00
|
|
|
def delete(
|
|
|
|
self,
|
|
|
|
using: Any = None,
|
|
|
|
keep_parents: bool = False,
|
|
|
|
soft: bool = True,
|
|
|
|
*args: tuple[Any, ...],
|
|
|
|
**kwargs: dict[str, Any],
|
|
|
|
) -> tuple[int, dict[str, int]]:
|
|
|
|
if soft:
|
|
|
|
self.clear()
|
|
|
|
self.is_deleted = True
|
|
|
|
self.save(using=using)
|
|
|
|
return 0, {}
|
|
|
|
else:
|
|
|
|
return super().delete(
|
|
|
|
using=using, keep_parents=keep_parents, *args, **kwargs
|
|
|
|
)
|
|
|
|
|
2023-06-18 23:13:30 -04:00
|
|
|
@cached_property
|
|
|
|
def history(self):
|
|
|
|
# can't use AuditlogHistoryField bc it will only return history with current content type
|
|
|
|
return LogEntry.objects.filter(
|
2024-05-14 10:54:49 -04:00
|
|
|
object_id=self.pk, content_type_id__in=list(item_content_types().values())
|
2023-06-18 23:13:30 -04:00
|
|
|
)
|
|
|
|
|
2023-08-11 16:14:44 -04:00
|
|
|
@cached_property
|
|
|
|
def last_editor(self) -> "User | None":
|
|
|
|
last_edit = self.history.order_by("-timestamp").first()
|
|
|
|
return last_edit.actor if last_edit else None
|
|
|
|
|
2022-12-13 06:44:29 +00:00
|
|
|
def clear(self):
|
2023-06-12 15:07:55 -04:00
|
|
|
self.set_parent_item(None)
|
2022-12-13 06:44:29 +00:00
|
|
|
self.primary_lookup_id_value = None
|
|
|
|
self.primary_lookup_id_type = None
|
2023-06-12 15:07:55 -04:00
|
|
|
for res in self.external_resources.all():
|
|
|
|
res.item = None
|
|
|
|
res.save()
|
2022-12-11 23:20:28 +00:00
|
|
|
|
2022-12-07 19:09:05 -05:00
|
|
|
def __str__(self):
|
2024-05-14 10:54:49 -04:00
|
|
|
return f"{self.__class__.__name__}|{self.pk}|{self.uuid} {self.primary_lookup_id_type}:{self.primary_lookup_id_value if self.primary_lookup_id_value else ''} ({self.title})"
|
2022-12-07 19:09:05 -05:00
|
|
|
|
2023-01-05 03:06:13 -05:00
|
|
|
@classmethod
|
|
|
|
def lookup_id_type_choices(cls):
|
|
|
|
return IdType.choices
|
|
|
|
|
|
|
|
@classmethod
|
2024-05-26 22:57:49 -04:00
|
|
|
def lookup_id_cleanup(
|
|
|
|
cls, lookup_id_type: str | IdType, lookup_id_value: str
|
|
|
|
) -> tuple[str | IdType, str] | tuple[None, None]:
|
2023-01-05 03:06:13 -05:00
|
|
|
if not lookup_id_type or not lookup_id_value or not lookup_id_value.strip():
|
|
|
|
return None, None
|
|
|
|
return lookup_id_type, lookup_id_value.strip()
|
|
|
|
|
2022-12-07 19:09:05 -05:00
|
|
|
@classmethod
|
2024-05-27 15:44:12 -04:00
|
|
|
def get_best_lookup_id(cls, lookup_ids: dict[str, str]) -> tuple[str, str]:
|
2022-12-29 14:30:31 -05:00
|
|
|
"""get best available lookup id, ideally commonly used"""
|
2023-05-14 10:31:04 -04:00
|
|
|
for t in IdealIdTypes:
|
2022-12-07 19:09:05 -05:00
|
|
|
if lookup_ids.get(t):
|
|
|
|
return t, lookup_ids[t]
|
|
|
|
return list(lookup_ids.items())[0]
|
|
|
|
|
2023-06-05 02:04:52 -04:00
|
|
|
@property
|
|
|
|
def parent_item(self):
|
|
|
|
return None
|
|
|
|
|
2023-06-08 17:22:11 -04:00
|
|
|
@property
|
2024-05-26 22:57:49 -04:00
|
|
|
def child_items(self) -> "QuerySet[Item]":
|
2023-06-10 23:13:14 -04:00
|
|
|
return Item.objects.none()
|
|
|
|
|
|
|
|
@property
|
2024-05-26 22:57:49 -04:00
|
|
|
def child_item_ids(self) -> list[int]:
|
2023-06-10 23:13:14 -04:00
|
|
|
return list(self.child_items.values_list("id", flat=True))
|
2023-06-08 17:22:11 -04:00
|
|
|
|
2024-05-26 22:57:49 -04:00
|
|
|
def set_parent_item(self, value: "Item | None"):
|
2023-06-12 15:07:55 -04:00
|
|
|
# raise ValueError("cannot set parent item")
|
|
|
|
pass
|
2023-06-05 18:57:52 -04:00
|
|
|
|
2023-06-05 02:04:52 -04:00
|
|
|
@property
|
2024-05-26 22:57:49 -04:00
|
|
|
def parent_uuid(self) -> str | None:
|
2023-06-05 02:04:52 -04:00
|
|
|
return self.parent_item.uuid if self.parent_item else None
|
|
|
|
|
2024-04-09 17:22:21 -04:00
|
|
|
@property
|
2024-05-26 22:57:49 -04:00
|
|
|
def sibling_items(self) -> "QuerySet[Item]":
|
2024-04-09 17:22:21 -04:00
|
|
|
return Item.objects.none()
|
|
|
|
|
2024-04-13 00:53:56 -04:00
|
|
|
@property
|
|
|
|
def title_deco(self) -> str:
|
|
|
|
return ""
|
|
|
|
|
2024-04-09 17:22:21 -04:00
|
|
|
@property
|
2024-05-26 22:57:49 -04:00
|
|
|
def sibling_item_ids(self) -> list[int]:
|
2024-04-09 17:22:21 -04:00
|
|
|
return list(self.sibling_items.values_list("id", flat=True))
|
|
|
|
|
2023-07-20 21:59:49 -04:00
|
|
|
@classmethod
|
2024-05-26 22:57:49 -04:00
|
|
|
def get_ap_object_type(cls) -> str:
|
2023-07-20 21:59:49 -04:00
|
|
|
return cls.__name__
|
|
|
|
|
|
|
|
@property
|
2024-05-26 22:57:49 -04:00
|
|
|
def ap_object_type(self) -> str:
|
2023-07-20 21:59:49 -04:00
|
|
|
return self.get_ap_object_type()
|
|
|
|
|
|
|
|
@property
|
2024-05-26 22:57:49 -04:00
|
|
|
def ap_object_ref(self) -> dict[str, Any]:
|
2023-07-20 21:59:49 -04:00
|
|
|
o = {
|
|
|
|
"type": self.get_ap_object_type(),
|
2023-11-17 22:46:31 -05:00
|
|
|
"href": self.absolute_url,
|
2023-07-20 21:59:49 -04:00
|
|
|
"name": self.title,
|
|
|
|
}
|
|
|
|
if self.has_cover():
|
|
|
|
o["image"] = self.cover_image_url
|
|
|
|
return o
|
|
|
|
|
2024-05-26 22:57:49 -04:00
|
|
|
def log_action(self, changes: dict[str, Any]):
|
2023-09-16 11:09:57 -04:00
|
|
|
LogEntry.objects.log_create( # type: ignore
|
2023-06-19 00:17:56 -04:00
|
|
|
self, action=LogEntry.Action.UPDATE, changes=changes
|
|
|
|
)
|
|
|
|
|
2024-05-26 22:57:49 -04:00
|
|
|
def merge_to(self, to_item: "Item | None"):
|
2022-12-11 23:20:28 +00:00
|
|
|
if to_item is None:
|
2023-06-19 12:30:39 -04:00
|
|
|
if self.merged_to_item is not None:
|
|
|
|
self.merged_to_item = None
|
|
|
|
self.save()
|
|
|
|
return
|
2023-07-01 19:47:01 -04:00
|
|
|
if to_item.pk == self.pk:
|
|
|
|
raise ValueError("cannot merge to self")
|
2023-06-28 23:25:36 -04:00
|
|
|
if to_item.merged_to_item is not None:
|
|
|
|
raise ValueError("cannot merge to item which is merged to another item")
|
2024-05-26 22:57:49 -04:00
|
|
|
if not isinstance(to_item, self.__class__):
|
2024-04-06 00:13:50 -04:00
|
|
|
raise ValueError("cannot merge to item in a different model")
|
2023-06-28 23:25:36 -04:00
|
|
|
self.log_action({"!merged": [str(self.merged_to_item), str(to_item)]})
|
2023-01-07 00:35:30 -05:00
|
|
|
self.merged_to_item = to_item
|
|
|
|
self.save()
|
|
|
|
for res in self.external_resources.all():
|
|
|
|
res.item = to_item
|
|
|
|
res.save()
|
|
|
|
|
2024-05-27 15:44:12 -04:00
|
|
|
def recast_to(self, model: "type[Any]") -> "Item":
|
2024-05-25 23:38:11 -04:00
|
|
|
logger.warning(f"recast item {self} to {model}")
|
2024-05-26 22:57:49 -04:00
|
|
|
if isinstance(self, model):
|
2023-01-23 21:24:31 -05:00
|
|
|
return self
|
2024-05-26 22:57:49 -04:00
|
|
|
if not issubclass(model, Item):
|
2023-01-23 21:24:31 -05:00
|
|
|
raise ValueError("invalid model to recast to")
|
|
|
|
ct = ContentType.objects.get_for_model(model)
|
2023-06-18 23:13:30 -04:00
|
|
|
old_ct = self.polymorphic_ctype
|
2023-06-19 13:08:44 -04:00
|
|
|
if not old_ct:
|
|
|
|
raise ValueError("cannot recast item without polymorphic_ctype")
|
2023-01-23 21:24:31 -05:00
|
|
|
tbl = self.__class__._meta.db_table
|
2023-06-18 23:13:30 -04:00
|
|
|
with disable_auditlog():
|
|
|
|
# disable audit as serialization won't work here
|
|
|
|
obj = model(item_ptr_id=self.pk, polymorphic_ctype=ct)
|
|
|
|
obj.save_base(raw=True)
|
|
|
|
obj.save(update_fields=["polymorphic_ctype"])
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
cursor.execute(f"DELETE FROM {tbl} WHERE item_ptr_id = %s", [self.pk])
|
|
|
|
obj = model.objects.get(pk=obj.pk)
|
2023-06-19 16:37:35 -04:00
|
|
|
obj.log_action({"!recast": [old_ct.model, ct.model]})
|
2023-06-18 23:13:30 -04:00
|
|
|
return obj
|
2022-12-11 23:20:28 +00:00
|
|
|
|
2022-12-14 21:12:37 -05:00
|
|
|
@property
|
2022-12-17 23:03:19 -05:00
|
|
|
def uuid(self):
|
2024-05-29 10:48:45 -04:00
|
|
|
return b62_encode(self.uid.int).zfill(22)
|
2022-12-14 21:12:37 -05:00
|
|
|
|
2022-12-11 23:20:28 +00:00
|
|
|
@property
|
|
|
|
def url(self):
|
2023-06-06 13:06:03 -04:00
|
|
|
return f"/{self.url_path}/{self.uuid}"
|
2022-12-17 23:03:19 -05:00
|
|
|
|
2022-12-23 00:08:42 -05:00
|
|
|
@property
|
|
|
|
def absolute_url(self):
|
2023-07-10 15:29:29 -04:00
|
|
|
return f"{settings.SITE_INFO['site_url']}{self.url}"
|
2022-12-23 00:08:42 -05:00
|
|
|
|
2022-12-17 23:03:19 -05:00
|
|
|
@property
|
|
|
|
def api_url(self):
|
2023-06-06 13:06:03 -04:00
|
|
|
return f"/api{self.url}"
|
2022-12-11 23:20:28 +00:00
|
|
|
|
2022-12-15 17:29:35 -05:00
|
|
|
@property
|
2024-05-26 22:57:49 -04:00
|
|
|
def class_name(self) -> str:
|
2022-12-15 17:29:35 -05:00
|
|
|
return self.__class__.__name__.lower()
|
|
|
|
|
2024-07-13 00:16:47 -04:00
|
|
|
def get_localized_title(self) -> str | None:
|
|
|
|
if self.localized_title:
|
2024-07-14 13:36:52 -04:00
|
|
|
locales = get_current_locales() + PREFERRED_LOCALES
|
2024-07-13 00:16:47 -04:00
|
|
|
for loc in locales:
|
|
|
|
v = next(
|
|
|
|
filter(lambda t: t["lang"] == loc, self.localized_title), {}
|
|
|
|
).get("text")
|
|
|
|
if v:
|
|
|
|
return v
|
|
|
|
|
|
|
|
def get_localized_description(self) -> str | None:
|
|
|
|
if self.localized_description:
|
2024-07-14 10:24:26 -04:00
|
|
|
locales = get_current_locales() + PREFERRED_LOCALES
|
2024-07-13 00:16:47 -04:00
|
|
|
for loc in locales:
|
|
|
|
v = next(
|
|
|
|
filter(lambda t: t["lang"] == loc, self.localized_description), {}
|
|
|
|
).get("text")
|
|
|
|
if v:
|
|
|
|
return v
|
|
|
|
|
2024-07-14 00:19:41 -04:00
|
|
|
@cached_property
|
2024-05-26 22:57:49 -04:00
|
|
|
def display_title(self) -> str:
|
2024-07-13 18:14:40 -04:00
|
|
|
# return title in current locale if possible, otherwise any title
|
|
|
|
return (self.get_localized_title() or self.title) or (
|
|
|
|
self.localized_title[0]["text"] if self.localized_title else ""
|
|
|
|
)
|
2023-06-05 02:04:52 -04:00
|
|
|
|
2024-07-14 00:19:41 -04:00
|
|
|
@cached_property
|
|
|
|
def additional_title(self) -> list[str]:
|
|
|
|
title = self.display_title
|
|
|
|
return [t["text"] for t in self.localized_title if t["text"] != title]
|
|
|
|
|
|
|
|
@cached_property
|
2024-07-13 00:16:47 -04:00
|
|
|
def display_description(self) -> str:
|
|
|
|
return (
|
|
|
|
self.get_localized_description()
|
|
|
|
or self.brief
|
|
|
|
or (
|
|
|
|
self.localized_description[0]["text"]
|
|
|
|
if self.localized_description
|
|
|
|
else ""
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def brief_description(self):
|
2024-07-14 10:45:27 -04:00
|
|
|
return self.display_description[:155]
|
2024-07-05 16:26:26 -04:00
|
|
|
|
2022-12-11 23:20:28 +00:00
|
|
|
@classmethod
|
2024-06-13 20:44:15 -04:00
|
|
|
def get_by_url(cls, url_or_b62: str, resolve_merge=False) -> "Self | None":
|
2022-12-29 14:30:31 -05:00
|
|
|
b62 = url_or_b62.strip().split("/")[-1]
|
2023-01-10 11:22:42 -05:00
|
|
|
if len(b62) not in [21, 22]:
|
|
|
|
r = re.search(r"[A-Za-z0-9]{21,22}", url_or_b62)
|
|
|
|
if r:
|
|
|
|
b62 = r[0]
|
|
|
|
try:
|
2024-05-29 10:48:45 -04:00
|
|
|
item = cls.objects.get(uid=uuid.UUID(int=b62_decode(b62)))
|
2024-06-13 20:44:15 -04:00
|
|
|
if resolve_merge:
|
|
|
|
resolve_cnt = 5
|
|
|
|
while item.merged_to_item and resolve_cnt > 0:
|
|
|
|
item = item.merged_to_item
|
|
|
|
resolve_cnt -= 1
|
|
|
|
if resolve_cnt == 0:
|
|
|
|
logger.error(f"resolve merge loop for {item}")
|
|
|
|
item = None
|
2024-04-06 00:13:50 -04:00
|
|
|
except Exception:
|
2023-01-10 11:22:42 -05:00
|
|
|
item = None
|
|
|
|
return item
|
2022-12-11 23:20:28 +00:00
|
|
|
|
2024-06-17 15:10:34 -04:00
|
|
|
@classmethod
|
|
|
|
def get_by_remote_url(cls, url: str) -> "Self | None":
|
|
|
|
url_ = url.replace("/~neodb~/", "/")
|
|
|
|
if url_.startswith(settings.SITE_INFO["site_url"]):
|
|
|
|
return cls.get_by_url(url_, True)
|
|
|
|
er = ExternalResource.objects.filter(url=url_).first()
|
|
|
|
return er.item if er else None
|
|
|
|
|
2022-12-13 06:44:29 +00:00
|
|
|
# def get_lookup_id(self, id_type: str) -> str:
|
|
|
|
# prefix = id_type.strip().lower() + ':'
|
|
|
|
# return next((x[len(prefix):] for x in self.lookup_ids if x.startswith(prefix)), None)
|
|
|
|
|
2024-05-27 15:44:12 -04:00
|
|
|
def update_lookup_ids(self, lookup_ids: list[tuple[str, str]]):
|
2023-07-01 14:54:45 -04:00
|
|
|
for t, v in lookup_ids:
|
|
|
|
if t in IdealIdTypes and self.primary_lookup_id_type not in IdealIdTypes:
|
|
|
|
self.primary_lookup_id_type = t
|
|
|
|
self.primary_lookup_id_value = v
|
|
|
|
return
|
|
|
|
if t == self.primary_lookup_id_type:
|
|
|
|
self.primary_lookup_id_value = v
|
2022-12-07 19:09:05 -05:00
|
|
|
|
2022-12-29 14:30:31 -05:00
|
|
|
METADATA_COPY_LIST = [
|
2024-07-13 16:19:45 -04:00
|
|
|
# "title",
|
|
|
|
# "brief",
|
2024-07-13 00:16:47 -04:00
|
|
|
"localized_title",
|
|
|
|
"localized_description",
|
2022-12-29 14:30:31 -05:00
|
|
|
] # list of metadata keys to copy from resource to item
|
2024-07-13 00:16:47 -04:00
|
|
|
METADATA_MERGE_LIST = [
|
|
|
|
"localized_title",
|
|
|
|
"localized_description",
|
|
|
|
]
|
2022-12-07 19:09:05 -05:00
|
|
|
|
|
|
|
@classmethod
|
2024-05-27 15:44:12 -04:00
|
|
|
def copy_metadata(cls, metadata: dict[str, Any]) -> dict[str, Any]:
|
2022-12-29 14:30:31 -05:00
|
|
|
return dict(
|
|
|
|
(k, v)
|
|
|
|
for k, v in metadata.items()
|
|
|
|
if k in cls.METADATA_COPY_LIST and v is not None
|
|
|
|
)
|
2022-12-07 19:09:05 -05:00
|
|
|
|
2024-05-27 15:44:12 -04:00
|
|
|
def has_cover(self) -> bool:
|
|
|
|
return bool(self.cover) and self.cover != DEFAULT_ITEM_COVER
|
2022-12-14 21:12:37 -05:00
|
|
|
|
2023-02-10 16:39:58 -05:00
|
|
|
@property
|
2024-05-27 15:44:12 -04:00
|
|
|
def cover_image_url(self) -> str | None:
|
2023-02-10 16:39:58 -05:00
|
|
|
return (
|
2024-05-27 15:44:12 -04:00
|
|
|
f"{settings.SITE_INFO['site_url']}{self.cover.url}" # type:ignore
|
2023-02-10 16:39:58 -05:00
|
|
|
if self.cover and self.cover != DEFAULT_ITEM_COVER
|
|
|
|
else None
|
|
|
|
)
|
|
|
|
|
2024-07-13 00:16:47 -04:00
|
|
|
def merge_data_from_external_resource(
|
|
|
|
self, p: "ExternalResource", ignore_existing_content: bool = False
|
|
|
|
):
|
|
|
|
for k in self.METADATA_COPY_LIST:
|
|
|
|
v = p.metadata.get(k)
|
|
|
|
if v:
|
|
|
|
if not getattr(self, k) or ignore_existing_content:
|
|
|
|
setattr(self, k, v)
|
|
|
|
elif k in self.METADATA_MERGE_LIST:
|
|
|
|
setattr(self, k, uniq((v or []) + getattr(self, k, [])))
|
|
|
|
if p.cover and (not self.has_cover() or ignore_existing_content):
|
|
|
|
self.cover = p.cover
|
|
|
|
|
2024-05-27 15:44:12 -04:00
|
|
|
def merge_data_from_external_resources(self, ignore_existing_content: bool = False):
|
2022-12-07 19:09:05 -05:00
|
|
|
"""Subclass may override this"""
|
|
|
|
lookup_ids = []
|
2022-12-08 16:08:59 +00:00
|
|
|
for p in self.external_resources.all():
|
2022-12-07 19:09:05 -05:00
|
|
|
lookup_ids.append((p.id_type, p.id_value))
|
|
|
|
lookup_ids += p.other_lookup_ids.items()
|
2024-07-13 00:16:47 -04:00
|
|
|
self.merge_data_from_external_resource(p, ignore_existing_content)
|
2023-07-01 14:54:45 -04:00
|
|
|
self.update_lookup_ids(list(set(lookup_ids)))
|
2022-12-07 19:09:05 -05:00
|
|
|
|
2024-05-27 15:44:12 -04:00
|
|
|
def update_linked_items_from_external_resource(self, resource: "ExternalResource"):
|
2022-12-07 19:09:05 -05:00
|
|
|
"""Subclass should override this"""
|
|
|
|
pass
|
|
|
|
|
2023-01-08 22:10:48 -05:00
|
|
|
def skip_index(self):
|
|
|
|
return False
|
|
|
|
|
2023-05-22 19:14:31 -04:00
|
|
|
@property
|
|
|
|
def editable(self):
|
|
|
|
return not self.is_deleted and self.merged_to_item is None
|
|
|
|
|
2023-08-10 11:27:31 -04:00
|
|
|
@property
|
|
|
|
def rating(self):
|
|
|
|
from journal.models import Rating
|
|
|
|
|
|
|
|
return Rating.get_rating_for_item(self)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def rating_count(self):
|
|
|
|
from journal.models import Rating
|
|
|
|
|
|
|
|
return Rating.get_rating_count_for_item(self)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def rating_dist(self):
|
|
|
|
from journal.models import Rating
|
|
|
|
|
|
|
|
return Rating.get_rating_distribution_for_item(self)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def tags(self):
|
|
|
|
from journal.models import TagManager
|
|
|
|
|
|
|
|
return TagManager.indexable_tags_for_item(self)
|
|
|
|
|
|
|
|
def journal_exists(self):
|
|
|
|
from journal.models import journal_exists_for_item
|
|
|
|
|
|
|
|
return journal_exists_for_item(self)
|
|
|
|
|
2022-12-07 19:09:05 -05:00
|
|
|
|
|
|
|
class ItemLookupId(models.Model):
|
2022-12-29 14:30:31 -05:00
|
|
|
item = models.ForeignKey(
|
|
|
|
Item, null=True, on_delete=models.SET_NULL, related_name="lookup_ids"
|
|
|
|
)
|
|
|
|
id_type = models.CharField(
|
2024-03-10 20:55:50 -04:00
|
|
|
_("source site"), blank=True, choices=IdType.choices, max_length=50
|
|
|
|
)
|
|
|
|
id_value = models.CharField(_("ID on source site"), blank=True, max_length=1000)
|
|
|
|
raw_url = models.CharField(
|
|
|
|
_("source url"), blank=True, max_length=1000, unique=True
|
2022-12-29 14:30:31 -05:00
|
|
|
)
|
2022-12-07 19:09:05 -05:00
|
|
|
|
|
|
|
class Meta:
|
2022-12-29 14:30:31 -05:00
|
|
|
unique_together = [["id_type", "id_value"]]
|
2022-12-07 19:09:05 -05:00
|
|
|
|
|
|
|
|
2022-12-08 16:08:59 +00:00
|
|
|
class ExternalResource(models.Model):
|
2024-05-27 15:44:12 -04:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
required_resources: list[dict[str, str]]
|
|
|
|
related_resources: list[dict[str, str]]
|
2024-05-31 11:15:10 -04:00
|
|
|
prematched_resources: list[dict[str, str]]
|
2022-12-29 14:30:31 -05:00
|
|
|
item = models.ForeignKey(
|
|
|
|
Item, null=True, on_delete=models.SET_NULL, related_name="external_resources"
|
|
|
|
)
|
|
|
|
id_type = models.CharField(
|
|
|
|
_("IdType of the source site"),
|
|
|
|
blank=False,
|
|
|
|
choices=IdType.choices,
|
|
|
|
max_length=50,
|
|
|
|
)
|
|
|
|
id_value = models.CharField(
|
|
|
|
_("Primary Id on the source site"), blank=False, max_length=1000
|
|
|
|
)
|
|
|
|
url = models.CharField(
|
|
|
|
_("url to the resource"), blank=False, max_length=1000, unique=True
|
|
|
|
)
|
|
|
|
cover = models.ImageField(
|
2023-01-05 03:06:13 -05:00
|
|
|
upload_to=resource_cover_path, default=DEFAULT_ITEM_COVER, blank=True
|
2022-12-29 14:30:31 -05:00
|
|
|
)
|
2022-12-07 19:09:05 -05:00
|
|
|
other_lookup_ids = models.JSONField(default=dict)
|
|
|
|
metadata = models.JSONField(default=dict)
|
|
|
|
scraped_time = models.DateTimeField(null=True)
|
|
|
|
created_time = models.DateTimeField(auto_now_add=True)
|
|
|
|
edited_time = models.DateTimeField(auto_now=True)
|
2024-05-27 15:44:12 -04:00
|
|
|
|
2023-01-05 03:06:13 -05:00
|
|
|
required_resources = jsondata.ArrayField(
|
|
|
|
models.CharField(), null=False, blank=False, default=list
|
2024-05-27 15:44:12 -04:00
|
|
|
) # type: ignore
|
|
|
|
""" links required to generate Item from this resource, e.g. parent TVShow of TVSeason """
|
|
|
|
|
2023-01-05 03:06:13 -05:00
|
|
|
related_resources = jsondata.ArrayField(
|
|
|
|
models.CharField(), null=False, blank=False, default=list
|
2024-05-27 15:44:12 -04:00
|
|
|
) # type: ignore
|
|
|
|
"""links related to this resource which may be fetched later, e.g. sub TVSeason of TVShow"""
|
|
|
|
|
2023-07-20 21:59:49 -04:00
|
|
|
prematched_resources = jsondata.ArrayField(
|
|
|
|
models.CharField(), null=False, blank=False, default=list
|
2024-05-31 11:15:10 -04:00
|
|
|
) # type: ignore
|
2024-05-27 15:44:12 -04:00
|
|
|
"""links to help match an existing Item from this resource"""
|
2022-12-07 19:09:05 -05:00
|
|
|
|
|
|
|
class Meta:
|
2022-12-29 14:30:31 -05:00
|
|
|
unique_together = [["id_type", "id_value"]]
|
2022-12-07 19:09:05 -05:00
|
|
|
|
|
|
|
def __str__(self):
|
2023-06-19 00:17:56 -04:00
|
|
|
return f"{self.pk}:{self.id_type}:{self.id_value or ''} ({self.url})"
|
|
|
|
|
|
|
|
def unlink_from_item(self):
|
2023-06-28 23:25:36 -04:00
|
|
|
if not self.item:
|
|
|
|
return
|
2023-06-19 16:37:35 -04:00
|
|
|
self.item.log_action({"!unlink": [str(self), None]})
|
2023-06-19 00:17:56 -04:00
|
|
|
self.item = None
|
|
|
|
self.save()
|
2023-01-07 00:35:30 -05:00
|
|
|
|
|
|
|
def get_site(self):
|
2023-08-10 11:27:31 -04:00
|
|
|
from .sites import SiteManager
|
|
|
|
|
|
|
|
return SiteManager.get_site_cls_by_id_type(self.id_type)
|
2022-12-07 19:09:05 -05:00
|
|
|
|
2022-12-14 21:12:37 -05:00
|
|
|
@property
|
2023-07-20 21:59:49 -04:00
|
|
|
def site_name(self) -> SiteName:
|
2023-08-10 11:27:31 -04:00
|
|
|
try:
|
2023-07-20 21:59:49 -04:00
|
|
|
site = self.get_site()
|
|
|
|
return site.SITE_NAME if site else SiteName.Unknown
|
2024-04-06 00:13:50 -04:00
|
|
|
except Exception:
|
2024-05-25 23:38:11 -04:00
|
|
|
logger.warning(f"Unknown site for {self}")
|
2023-08-10 11:27:31 -04:00
|
|
|
return SiteName.Unknown
|
2022-12-14 21:12:37 -05:00
|
|
|
|
2023-07-20 21:59:49 -04:00
|
|
|
@property
|
2024-05-26 22:57:49 -04:00
|
|
|
def site_label(self) -> str:
|
2023-07-20 21:59:49 -04:00
|
|
|
if self.id_type == IdType.Fediverse:
|
|
|
|
from takahe.utils import Takahe
|
|
|
|
|
|
|
|
domain = self.id_value.split("://")[1].split("/")[0]
|
|
|
|
n = Takahe.get_node_name_for_domain(domain)
|
|
|
|
return n or domain
|
|
|
|
return self.site_name.label
|
|
|
|
|
2024-05-27 15:44:12 -04:00
|
|
|
def update_content(self, resource_content: "ResourceContent"):
|
2022-12-08 16:08:59 +00:00
|
|
|
self.other_lookup_ids = resource_content.lookup_ids
|
|
|
|
self.metadata = resource_content.metadata
|
|
|
|
if resource_content.cover_image and resource_content.cover_image_extention:
|
2022-12-29 14:30:31 -05:00
|
|
|
self.cover = SimpleUploadedFile(
|
|
|
|
"temp." + resource_content.cover_image_extention,
|
|
|
|
resource_content.cover_image,
|
|
|
|
)
|
2022-12-17 02:04:12 -05:00
|
|
|
else:
|
2022-12-29 14:30:31 -05:00
|
|
|
self.cover = resource_content.metadata.get("cover_image_path")
|
2022-12-07 19:09:05 -05:00
|
|
|
self.scraped_time = timezone.now()
|
|
|
|
self.save()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def ready(self):
|
2022-12-08 05:53:00 +00:00
|
|
|
return bool(self.metadata and self.scraped_time)
|
2022-12-07 19:09:05 -05:00
|
|
|
|
2024-05-27 15:44:12 -04:00
|
|
|
def get_all_lookup_ids(self) -> dict[str, str]:
|
2022-12-07 19:09:05 -05:00
|
|
|
d = self.other_lookup_ids.copy()
|
|
|
|
d[self.id_type] = self.id_value
|
|
|
|
d = {k: v for k, v in d.items() if bool(v)}
|
|
|
|
return d
|
|
|
|
|
2024-05-27 15:44:12 -04:00
|
|
|
def get_lookup_ids(
|
|
|
|
self, default_model: type[Item] | None = None
|
|
|
|
) -> list[tuple[str, str]]:
|
2023-07-20 21:59:49 -04:00
|
|
|
lookup_ids = self.get_all_lookup_ids()
|
|
|
|
model = self.get_item_model(default_model)
|
|
|
|
bt, bv = model.get_best_lookup_id(lookup_ids)
|
|
|
|
ids = [(t, v) for t, v in lookup_ids.items() if t and v and t != bt]
|
|
|
|
if bt and bv:
|
|
|
|
ids = [(bt, bv)] + ids
|
|
|
|
return ids
|
|
|
|
|
2024-05-27 15:44:12 -04:00
|
|
|
def get_item_model(self, default_model: type[Item] | None) -> type[Item]:
|
2022-12-29 14:30:31 -05:00
|
|
|
model = self.metadata.get("preferred_model")
|
2022-12-07 19:09:05 -05:00
|
|
|
if model:
|
2022-12-29 14:30:31 -05:00
|
|
|
m = ContentType.objects.filter(
|
|
|
|
app_label="catalog", model=model.lower()
|
|
|
|
).first()
|
2022-12-07 19:09:05 -05:00
|
|
|
if m:
|
2024-05-27 15:44:12 -04:00
|
|
|
mc: type[Item] | None = m.model_class() # type: ignore
|
|
|
|
if not mc:
|
|
|
|
raise ValueError(
|
|
|
|
f"preferred model {model} does not exist in ContentType"
|
|
|
|
)
|
|
|
|
return mc
|
2022-12-07 19:09:05 -05:00
|
|
|
else:
|
2022-12-29 14:30:31 -05:00
|
|
|
raise ValueError(f"preferred model {model} does not exist")
|
2024-05-27 15:44:12 -04:00
|
|
|
if not default_model:
|
|
|
|
raise ValueError("no default preferred model specified")
|
2023-07-20 21:59:49 -04:00
|
|
|
return default_model
|
2023-06-19 12:57:45 -04:00
|
|
|
|
|
|
|
|
|
|
|
_CONTENT_TYPE_LIST = None
|
|
|
|
|
|
|
|
|
2024-05-27 15:44:12 -04:00
|
|
|
def item_content_types() -> dict[type[Item], int]:
|
2023-06-19 12:57:45 -04:00
|
|
|
global _CONTENT_TYPE_LIST
|
|
|
|
if _CONTENT_TYPE_LIST is None:
|
|
|
|
_CONTENT_TYPE_LIST = {}
|
|
|
|
for cls in Item.__subclasses__():
|
|
|
|
_CONTENT_TYPE_LIST[cls] = ContentType.objects.get(
|
|
|
|
app_label="catalog", model=cls.__name__.lower()
|
|
|
|
).id
|
|
|
|
return _CONTENT_TYPE_LIST
|
|
|
|
|
|
|
|
|
|
|
|
_CATEGORY_LIST = None
|
|
|
|
|
|
|
|
|
2024-05-26 22:57:49 -04:00
|
|
|
def item_categories() -> dict[ItemCategory, list[type[Item]]]:
|
2023-06-19 12:57:45 -04:00
|
|
|
global _CATEGORY_LIST
|
|
|
|
if _CATEGORY_LIST is None:
|
|
|
|
_CATEGORY_LIST = {}
|
|
|
|
for cls in Item.__subclasses__():
|
|
|
|
c = getattr(cls, "category", None)
|
|
|
|
if c not in _CATEGORY_LIST:
|
|
|
|
_CATEGORY_LIST[c] = [cls]
|
|
|
|
else:
|
|
|
|
_CATEGORY_LIST[c].append(cls)
|
|
|
|
return _CATEGORY_LIST
|