diff --git a/.github/workflows/django.yml b/.github/workflows/django.yml index 1e7a948e..fe7611dc 100644 --- a/.github/workflows/django.yml +++ b/.github/workflows/django.yml @@ -1,4 +1,4 @@ -name: all tests +name: tests on: push: @@ -6,8 +6,7 @@ on: branches: [ "main" ] jobs: - build: - + django: runs-on: ubuntu-latest services: redis: @@ -15,20 +14,25 @@ jobs: ports: - 6379:6379 db: - image: postgres:12.13-alpine + image: postgres env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: admin123 - POSTGRES_DB: test + POSTGRES_USER: testuser + POSTGRES_PASSWORD: testpass + POSTGRES_DB: test_neodb ports: - 5432:5432 - options: --mount type=tmpfs,destination=/var/lib/postgresql/data --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - + db2: + image: postgres + env: + POSTGRES_USER: testuser + POSTGRES_PASSWORD: testpass + POSTGRES_DB: test_neodb_takahe + ports: + - 15432:5432 strategy: max-parallel: 4 matrix: - python-version: ['3.10', '3.11'] - + python-version: ['3.11'] steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} diff --git a/boofilsic/settings.py b/boofilsic/settings.py index 4a2d965d..6e23f630 100644 --- a/boofilsic/settings.py +++ b/boofilsic/settings.py @@ -1,6 +1,11 @@ import os +# import django_stubs_ext + +# django_stubs_ext.monkeypatch() + NEODB_VERSION = "0.8" +DATABASE_ROUTERS = ["takahe.db_routes.TakaheRouter"] PROJECT_ROOT = os.path.abspath(os.path.dirname(__name__)) @@ -65,6 +70,7 @@ INSTALLED_APPS += [ "journal.apps.JournalConfig", "social.apps.SocialConfig", "developer.apps.DeveloperConfig", + "takahe.apps.TakaheConfig", "legacy.apps.LegacyConfig", ] @@ -110,6 +116,8 @@ TEMPLATES = [ WSGI_APPLICATION = "boofilsic.wsgi.application" +SESSION_COOKIE_NAME = "neodbsid" + CACHES = { "default": { "BACKEND": "django.core.cache.backends.locmem.LocMemCache", @@ -131,7 +139,25 @@ DATABASES = { "client_encoding": "UTF8", # 'isolation_level': psycopg2.extensions.ISOLATION_LEVEL_DEFAULT, }, - } + "TEST": { + "DEPENDENCIES": ["takahe"], + }, + }, + "takahe": { + "ENGINE": "django.db.backends.postgresql", + "NAME": os.environ.get("TAKAHE_DB_NAME", "test_neodb_takahe"), + "USER": os.environ.get("TAKAHE_DB_USER", "testuser"), + "PASSWORD": os.environ.get("TAKAHE_DB_PASSWORD", "testpass"), + "HOST": os.environ.get("TAKAHE_DB_HOST", "127.0.0.1"), + "PORT": os.environ.get("TAKAHE_DB_PORT", 15432), + "OPTIONS": { + "client_encoding": "UTF8", + # 'isolation_level': psycopg2.extensions.ISOLATION_LEVEL_DEFAULT, + }, + "TEST": { + "DEPENDENCIES": [], + }, + }, } # Customized auth backend, glue OAuth2 and Django User model together @@ -189,6 +215,8 @@ AUTH_USER_MODEL = "users.User" SILENCED_SYSTEM_CHECKS = [ "admin.E404", # Required by django-user-messages + "models.W035", # Required by takahe: identical table name in different database + "fields.W344", # Required by takahe: identical table name in different database ] MEDIA_URL = "/media/" @@ -358,6 +386,7 @@ SEARCH_BACKEND = None if os.environ.get("NEODB_TYPESENSE_ENABLE", ""): SEARCH_BACKEND = "TYPESENSE" +TYPESENSE_INDEX_NAME = "catalog" TYPESENSE_CONNECTION = { "api_key": os.environ.get("NEODB_TYPESENSE_KEY", "insecure"), "nodes": [ @@ -371,6 +400,7 @@ TYPESENSE_CONNECTION = { } +DOWNLOADER_CACHE_TIMEOUT = 300 DOWNLOADER_RETRIES = 3 DOWNLOADER_SAVEDIR = None DISABLE_MODEL_SIGNAL = False # disable index and social feeds during importing/etc diff --git a/catalog/book/models.py b/catalog/book/models.py index 5be14f2d..d451b9bf 100644 --- a/catalog/book/models.py +++ b/catalog/book/models.py @@ -166,7 +166,7 @@ class Edition(Item): """add Work from resource.metadata['work'] if not yet""" links = resource.required_resources + resource.related_resources for w in links: - if w["model"] == "Work": + if w.get("model") == "Work": work = Work.objects.filter( primary_lookup_id_type=w["id_type"], primary_lookup_id_value=w["id_value"], diff --git a/catalog/common/__init__.py b/catalog/common/__init__.py index 51f055af..fe04dde8 100644 --- a/catalog/common/__init__.py +++ b/catalog/common/__init__.py @@ -24,6 +24,7 @@ __all__ = ( "use_local_response", "RetryDownloader", "BasicDownloader", + "CachedDownloader", "ProxiedDownloader", "BasicImageDownloader", "ProxiedImageDownloader", diff --git a/catalog/common/downloaders.py b/catalog/common/downloaders.py index a579a1e8..31d0507d 100644 --- a/catalog/common/downloaders.py +++ b/catalog/common/downloaders.py @@ -10,6 +10,7 @@ from urllib.parse import quote import filetype import requests from django.conf import settings +from django.core.cache import cache from lxml import html from PIL import Image from requests import Response @@ -153,7 +154,6 @@ class BasicDownloader: def _download(self, url) -> Tuple[DownloaderResponse | MockResponse, int]: try: if not _mock_mode: - # TODO cache = get/set from redis resp = requests.get( url, headers=self.headers, timeout=self.get_timeout() ) @@ -256,6 +256,19 @@ class RetryDownloader(BasicDownloader): raise DownloadError(self, "max out of retries") +class CachedDownloader(BasicDownloader): + def download(self): + cache_key = "dl:" + self.url + resp = cache.get(cache_key) + if resp: + self.response_type = RESPONSE_OK + else: + resp = super().download() + if self.response_type == RESPONSE_OK: + cache.set(cache_key, resp, timeout=settings.DOWNLOADER_CACHE_TIMEOUT) + return resp + + class ImageDownloaderMixin: def __init__(self, url, referer=None): self.extention = None diff --git a/catalog/common/models.py b/catalog/common/models.py index e0818efa..cea95d55 100644 --- a/catalog/common/models.py +++ b/catalog/common/models.py @@ -13,7 +13,7 @@ from django.db import connection, models from django.utils import timezone from django.utils.baseconv import base62 from django.utils.translation import gettext_lazy as _ -from ninja import Schema +from ninja import Field, Schema from polymorphic.models import PolymorphicModel from catalog.common import jsondata @@ -46,6 +46,7 @@ class SiteName(models.TextChoices): RSS = "rss", _("RSS") Discogs = "discogs", _("Discogs") AppleMusic = "apple_music", _("苹果音乐") + Fediverse = "fedi", _("联邦实例") class IdType(models.TextChoices): @@ -90,6 +91,7 @@ class IdType(models.TextChoices): Bangumi = "bangumi", _("Bangumi") ApplePodcast = "apple_podcast", _("苹果播客") AppleMusic = "apple_music", _("苹果音乐") + Fediverse = "fedi", _("联邦实例") IdealIdTypes = [ @@ -225,6 +227,8 @@ class ExternalResourceSchema(Schema): class BaseSchema(Schema): + id: str = Field(alias="absolute_url") + type: str = Field(alias="ap_object_type") uuid: str url: str api_url: str @@ -250,7 +254,7 @@ class Item(SoftDeleteMixin, PolymorphicModel): url_path = "item" # subclass must specify this type = None # subclass must specify this parent_class = None # subclass may specify this to allow create child item - category: ItemCategory | None = None # subclass must specify this + category: ItemCategory # subclass must specify this demonstrative: "_StrOrPromise | None" = None # subclass must specify this uid = models.UUIDField(default=uuid.uuid4, editable=False, db_index=True) title = models.CharField(_("标题"), max_length=1000, default="") @@ -345,6 +349,25 @@ class Item(SoftDeleteMixin, PolymorphicModel): def parent_uuid(self): return self.parent_item.uuid if self.parent_item else None + @classmethod + def get_ap_object_type(cls): + return cls.__name__ + + @property + def ap_object_type(self): + return self.get_ap_object_type() + + @property + def ap_object_ref(self): + o = { + "type": self.get_ap_object_type(), + "url": self.absolute_url, + "name": self.title, + } + if self.has_cover(): + o["image"] = self.cover_image_url + return o + def log_action(self, changes): LogEntry.objects.log_create( self, action=LogEntry.Action.UPDATE, changes=changes @@ -561,10 +584,13 @@ class ExternalResource(models.Model): edited_time = models.DateTimeField(auto_now=True) required_resources = jsondata.ArrayField( models.CharField(), null=False, blank=False, default=list - ) + ) # links required to generate Item from this resource, e.g. parent TVShow of TVSeason related_resources = jsondata.ArrayField( models.CharField(), null=False, blank=False, default=list - ) + ) # links related to this resource which may be fetched later, e.g. sub TVSeason of TVShow + prematched_resources = jsondata.ArrayField( + models.CharField(), null=False, blank=False, default=list + ) # links to help match an existing Item from this resource class Meta: unique_together = [["id_type", "id_value"]] @@ -585,13 +611,24 @@ class ExternalResource(models.Model): return SiteManager.get_site_cls_by_id_type(self.id_type) @property - def site_name(self): + def site_name(self) -> SiteName: try: - return self.get_site().SITE_NAME + site = self.get_site() + return site.SITE_NAME if site else SiteName.Unknown except: _logger.warning(f"Unknown site for {self}") return SiteName.Unknown + @property + def site_label(self): + if self.id_type == IdType.Fediverse: + from takahe.utils import Takahe + + domain = self.id_value.split("://")[1].split("/")[0] + n = Takahe.get_node_name_for_domain(domain) + return n or domain + return self.site_name.label + def update_content(self, resource_content): self.other_lookup_ids = resource_content.lookup_ids self.metadata = resource_content.metadata @@ -615,7 +652,16 @@ class ExternalResource(models.Model): d = {k: v for k, v in d.items() if bool(v)} return d - def get_preferred_model(self) -> type[Item] | None: + def get_lookup_ids(self, default_model): + lookup_ids = self.get_all_lookup_ids() + model = self.get_item_model(default_model) + bt, bv = model.get_best_lookup_id(lookup_ids) + ids = [(t, v) for t, v in lookup_ids.items() if t and v and t != bt] + if bt and bv: + ids = [(bt, bv)] + ids + return ids + + def get_item_model(self, default_model: type[Item]) -> type[Item]: model = self.metadata.get("preferred_model") if model: m = ContentType.objects.filter( @@ -625,7 +671,7 @@ class ExternalResource(models.Model): return cast(Item, m).model_class() else: raise ValueError(f"preferred model {model} does not exist") - return None + return default_model _CONTENT_TYPE_LIST = None diff --git a/catalog/common/sites.py b/catalog/common/sites.py index d6b6f11e..1777864b 100644 --- a/catalog/common/sites.py +++ b/catalog/common/sites.py @@ -39,7 +39,7 @@ class AbstractSite: Abstract class to represent a site """ - SITE_NAME: SiteName | None = None + SITE_NAME: SiteName ID_TYPE: IdType | None = None WIKI_PROPERTY_ID: str | None = "P0undefined0" DEFAULT_MODEL: Type[Item] | None = None @@ -104,18 +104,29 @@ class AbstractSite: return content.xpath(query)[0].strip() @classmethod - def get_model_for_resource(cls, resource): - model = resource.get_preferred_model() - return model or cls.DEFAULT_MODEL + def match_existing_item_for_resource( + cls, resource: ExternalResource + ) -> Item | None: + """ + try match an existing Item for a given ExternalResource - @classmethod - def match_existing_item_for_resource(cls, resource) -> Item | None: - model = cls.get_model_for_resource(resource) + order of matching: + 1. look for other ExternalResource by url in prematched_resources, if found, return the item + 2. look for Item by primary_lookup_id_type and primary_lookup_id_value + + """ + for resource_link in resource.prematched_resources: # type: ignore + url = resource_link.get("url") + if url: + matched_resource = ExternalResource.objects.filter(url=url).first() + if matched_resource and matched_resource.item: + return matched_resource.item + model = resource.get_item_model(cls.DEFAULT_MODEL) if not model: return None - t, v = model.get_best_lookup_id(resource.get_all_lookup_ids()) - matched = None - if t is not None: + ids = resource.get_lookup_ids(cls.DEFAULT_MODEL) + for t, v in ids: + matched = None matched = model.objects.filter( primary_lookup_id_type=t, primary_lookup_id_value=v, @@ -143,14 +154,15 @@ class AbstractSite: matched.primary_lookup_id_type = t matched.primary_lookup_id_value = v matched.save() - return matched + if matched: + return matched @classmethod def match_or_create_item_for_resource(cls, resource): previous_item = resource.item resource.item = cls.match_existing_item_for_resource(resource) or previous_item if resource.item is None: - model = cls.get_model_for_resource(resource) + model = resource.get_item_model(cls.DEFAULT_MODEL) if not model: return None t, v = model.get_best_lookup_id(resource.get_all_lookup_ids()) @@ -243,7 +255,7 @@ class AbstractSite: ) else: _logger.error(f"unable to get site for {linked_url}") - if p.related_resources: + if p.related_resources or p.prematched_resources: django_rq.get_queue("crawl").enqueue(crawl_related_resources_task, p.pk) if p.item: p.item.update_linked_items_from_external_resource(p) @@ -318,7 +330,7 @@ def crawl_related_resources_task(resource_pk): if not resource: _logger.warn(f"crawl resource not found {resource_pk}") return - links = resource.related_resources + links = (resource.related_resources or []) + (resource.prematched_resources or []) # type: ignore for w in links: # type: ignore try: item = None diff --git a/catalog/common/utils.py b/catalog/common/utils.py index 0882af5d..08023c09 100644 --- a/catalog/common/utils.py +++ b/catalog/common/utils.py @@ -36,4 +36,4 @@ def piece_cover_path(item, filename): + "." + filename.split(".")[-1] ) - return f"user/{item.owner_id}/{fn}" + return f"user/{item.owner_id or '_'}/{fn}" diff --git a/catalog/management/commands/cat.py b/catalog/management/commands/cat.py index 6fddbc65..f2c13cd1 100644 --- a/catalog/management/commands/cat.py +++ b/catalog/management/commands/cat.py @@ -31,10 +31,17 @@ class Command(BaseCommand): self.stdout.write(f"Fetching from {site}") if options["save"]: resource = site.get_resource_ready(ignore_existing_content=options["force"]) - pprint.pp(resource.metadata) - pprint.pp(site.get_item()) - pprint.pp(site.get_item().cover) - pprint.pp(site.get_item().metadata) + if resource: + pprint.pp(resource.metadata) + else: + self.stdout.write(self.style.ERROR(f"Unable to get resource for {url}")) + item = site.get_item() + if item: + pprint.pp(item.cover) + pprint.pp(item.metadata) + pprint.pp(item.absolute_url) + else: + self.stdout.write(self.style.ERROR(f"Unable to get item for {url}")) else: resource = site.scrape() pprint.pp(resource.metadata) diff --git a/catalog/management/commands/crawl.py b/catalog/management/commands/crawl.py index cacc368f..241fb0a8 100644 --- a/catalog/management/commands/crawl.py +++ b/catalog/management/commands/crawl.py @@ -29,16 +29,19 @@ class Command(BaseCommand): logger.info(f"Navigating {url}") content = ProxiedDownloader(url).download().html() urls = content.xpath("//a/@href") - for _u in urls: + for _u in urls: # type:ignore u = urljoin(url, _u) if u not in history and u not in queue: if len([p for p in item_patterns if re.match(p, u)]) > 0: site = SiteManager.get_site_by_url(u) - u = site.url - if u not in history: - history.append(u) - logger.info(f"Fetching {u}") - site.get_resource_ready() + if site: + u = site.url + if u not in history: + history.append(u) + logger.info(f"Fetching {u}") + site.get_resource_ready() + else: + logger.warning(f"unable to parse {u}") elif pattern and u.find(pattern) >= 0: queue.append(u) logger.info("Crawl finished.") diff --git a/catalog/management/commands/discover.py b/catalog/management/commands/discover.py index 32e316ba..d259189a 100644 --- a/catalog/management/commands/discover.py +++ b/catalog/management/commands/discover.py @@ -7,7 +7,7 @@ from django.utils import timezone from loguru import logger from catalog.models import * -from journal.models import Comment, ShelfMember, query_item_category +from journal.models import Comment, ShelfMember, q_item_in_category MAX_ITEMS_PER_PERIOD = 12 MIN_MARKS = 2 @@ -28,7 +28,7 @@ class Command(BaseCommand): def get_popular_marked_item_ids(self, category, days, exisiting_ids): item_ids = [ m["item_id"] - for m in ShelfMember.objects.filter(query_item_category(category)) + for m in ShelfMember.objects.filter(q_item_in_category(category)) .filter(created_time__gt=timezone.now() - timedelta(days=days)) .exclude(item_id__in=exisiting_ids) .values("item_id") @@ -40,7 +40,7 @@ class Command(BaseCommand): def get_popular_commented_podcast_ids(self, days, exisiting_ids): return list( - Comment.objects.filter(query_item_category(ItemCategory.Podcast)) + Comment.objects.filter(q_item_in_category(ItemCategory.Podcast)) .filter(created_time__gt=timezone.now() - timedelta(days=days)) .annotate(p=F("item__podcastepisode__program")) .filter(p__isnull=False) diff --git a/catalog/management/commands/index.py b/catalog/management/commands/index.py index d7916761..1a07cb59 100644 --- a/catalog/management/commands/index.py +++ b/catalog/management/commands/index.py @@ -1,6 +1,7 @@ import pprint from datetime import timedelta from time import sleep +from typing import TYPE_CHECKING from django.conf import settings from django.core.management.base import BaseCommand @@ -8,7 +9,8 @@ from django.core.paginator import Paginator from django.utils import timezone from tqdm import tqdm -from catalog.models import * +from catalog.models import Item +from catalog.search.typesense import Indexer BATCH_SIZE = 1000 diff --git a/catalog/migrations/0011_alter_externalresource_id_type_and_more.py b/catalog/migrations/0011_alter_externalresource_id_type_and_more.py new file mode 100644 index 00000000..3659a6d5 --- /dev/null +++ b/catalog/migrations/0011_alter_externalresource_id_type_and_more.py @@ -0,0 +1,117 @@ +# Generated by Django 4.2.3 on 2023-08-06 02:01 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("catalog", "0011_remove_item_last_editor"), + ] + + operations = [ + migrations.AlterField( + model_name="externalresource", + name="id_type", + field=models.CharField( + choices=[ + ("wikidata", "维基数据"), + ("isbn10", "ISBN10"), + ("isbn", "ISBN"), + ("asin", "ASIN"), + ("issn", "ISSN"), + ("cubn", "统一书号"), + ("isrc", "ISRC"), + ("gtin", "GTIN UPC EAN码"), + ("rss", "RSS Feed URL"), + ("imdb", "IMDb"), + ("tmdb_tv", "TMDB剧集"), + ("tmdb_tvseason", "TMDB剧集"), + ("tmdb_tvepisode", "TMDB剧集"), + ("tmdb_movie", "TMDB电影"), + ("goodreads", "Goodreads"), + ("goodreads_work", "Goodreads著作"), + ("googlebooks", "谷歌图书"), + ("doubanbook", "豆瓣读书"), + ("doubanbook_work", "豆瓣读书著作"), + ("doubanmovie", "豆瓣电影"), + ("doubanmusic", "豆瓣音乐"), + ("doubangame", "豆瓣游戏"), + ("doubandrama", "豆瓣舞台剧"), + ("doubandrama_version", "豆瓣舞台剧版本"), + ("bookstw", "博客来图书"), + ("bandcamp", "Bandcamp"), + ("spotify_album", "Spotify专辑"), + ("spotify_show", "Spotify播客"), + ("discogs_release", "Discogs Release"), + ("discogs_master", "Discogs Master"), + ("musicbrainz", "MusicBrainz ID"), + ("doubanbook_author", "豆瓣读书作者"), + ("doubanmovie_celebrity", "豆瓣电影影人"), + ("goodreads_author", "Goodreads作者"), + ("spotify_artist", "Spotify艺术家"), + ("tmdb_person", "TMDB影人"), + ("igdb", "IGDB游戏"), + ("steam", "Steam游戏"), + ("bangumi", "Bangumi"), + ("apple_podcast", "苹果播客"), + ("apple_music", "苹果音乐"), + ("fedi", "联邦实例"), + ], + max_length=50, + verbose_name="IdType of the source site", + ), + ), + migrations.AlterField( + model_name="itemlookupid", + name="id_type", + field=models.CharField( + blank=True, + choices=[ + ("wikidata", "维基数据"), + ("isbn10", "ISBN10"), + ("isbn", "ISBN"), + ("asin", "ASIN"), + ("issn", "ISSN"), + ("cubn", "统一书号"), + ("isrc", "ISRC"), + ("gtin", "GTIN UPC EAN码"), + ("rss", "RSS Feed URL"), + ("imdb", "IMDb"), + ("tmdb_tv", "TMDB剧集"), + ("tmdb_tvseason", "TMDB剧集"), + ("tmdb_tvepisode", "TMDB剧集"), + ("tmdb_movie", "TMDB电影"), + ("goodreads", "Goodreads"), + ("goodreads_work", "Goodreads著作"), + ("googlebooks", "谷歌图书"), + ("doubanbook", "豆瓣读书"), + ("doubanbook_work", "豆瓣读书著作"), + ("doubanmovie", "豆瓣电影"), + ("doubanmusic", "豆瓣音乐"), + ("doubangame", "豆瓣游戏"), + ("doubandrama", "豆瓣舞台剧"), + ("doubandrama_version", "豆瓣舞台剧版本"), + ("bookstw", "博客来图书"), + ("bandcamp", "Bandcamp"), + ("spotify_album", "Spotify专辑"), + ("spotify_show", "Spotify播客"), + ("discogs_release", "Discogs Release"), + ("discogs_master", "Discogs Master"), + ("musicbrainz", "MusicBrainz ID"), + ("doubanbook_author", "豆瓣读书作者"), + ("doubanmovie_celebrity", "豆瓣电影影人"), + ("goodreads_author", "Goodreads作者"), + ("spotify_artist", "Spotify艺术家"), + ("tmdb_person", "TMDB影人"), + ("igdb", "IGDB游戏"), + ("steam", "Steam游戏"), + ("bangumi", "Bangumi"), + ("apple_podcast", "苹果播客"), + ("apple_music", "苹果音乐"), + ("fedi", "联邦实例"), + ], + max_length=50, + verbose_name="源网站", + ), + ), + ] diff --git a/catalog/search/external.py b/catalog/search/external.py index 51da0806..338eb471 100644 --- a/catalog/search/external.py +++ b/catalog/search/external.py @@ -23,7 +23,8 @@ class SearchResultItem: "all": [ { "url": source_url, - "site_name": {"label": source_site, "value": source_site}, + "site_name": source_site, + "site_label": source_site, } ] } diff --git a/catalog/search/typesense.py b/catalog/search/typesense.py index 61b3e32c..57816a12 100644 --- a/catalog/search/typesense.py +++ b/catalog/search/typesense.py @@ -14,7 +14,6 @@ from typesense.exceptions import ObjectNotFound from catalog.models import Item -INDEX_NAME = "catalog" SEARCHABLE_ATTRIBUTES = [ "title", "orig_title", @@ -125,7 +124,7 @@ class Indexer: def instance(cls) -> Collection: if cls._instance is None: cls._instance = typesense.Client(settings.TYPESENSE_CONNECTION).collections[ - INDEX_NAME + settings.TYPESENSE_INDEX_NAME ] return cls._instance # type: ignore @@ -178,7 +177,7 @@ class Indexer: {"name": ".*", "optional": True, "locale": "zh", "type": "auto"}, ] return { - "name": INDEX_NAME, + "name": settings.TYPESENSE_INDEX_NAME, "fields": fields, # "default_sorting_field": "rating_count", } diff --git a/catalog/search/views.py b/catalog/search/views.py index d3ab3780..4af3c393 100644 --- a/catalog/search/views.py +++ b/catalog/search/views.py @@ -130,9 +130,14 @@ def search(request): ) if keywords.find("://") > 0: + host = keywords.split("://")[1].split("/")[0] + if host == settings.SITE_INFO["site_domain"]: + return redirect(keywords) site = SiteManager.get_site_by_url(keywords) if site: return fetch(request, keywords, False, site) + if request.GET.get("r"): + return redirect(keywords) items, num_pages, _, dup_items = query_index(keywords, categories, tag, p) return render( diff --git a/catalog/sites/__init__.py b/catalog/sites/__init__.py index 6fb4f868..7518ebfb 100644 --- a/catalog/sites/__init__.py +++ b/catalog/sites/__init__.py @@ -9,13 +9,14 @@ from .douban_drama import DoubanDrama from .douban_game import DoubanGame from .douban_movie import DoubanMovie from .douban_music import DoubanMusic +from .fedi import FediverseInstance from .goodreads import Goodreads from .google_books import GoogleBooks from .igdb import IGDB from .imdb import IMDB - -# from .apple_podcast import ApplePodcast from .rss import RSS from .spotify import Spotify from .steam import Steam from .tmdb import TMDB_Movie + +# from .apple_podcast import ApplePodcast diff --git a/catalog/sites/fedi.py b/catalog/sites/fedi.py new file mode 100644 index 00000000..958b33ec --- /dev/null +++ b/catalog/sites/fedi.py @@ -0,0 +1,101 @@ +import re + +from django.core.validators import URLValidator +from loguru import logger + +from catalog.common import * +from catalog.models import * + + +@SiteManager.register +class FediverseInstance(AbstractSite): + SITE_NAME = SiteName.Fediverse + ID_TYPE = IdType.Fediverse + URL_PATTERNS = [] + WIKI_PROPERTY_ID = "" + DEFAULT_MODEL = None + id_type_mapping = { + "isbn": IdType.ISBN, + "imdb": IdType.IMDB, + "barcode": IdType.GTIN, + } + supported_types = { + "Book": Edition, + "Movie": Movie, + "TVShow": TVShow, + "TVSeason": TVSeason, + "TVEpisode": TVEpisode, + "Album": Album, + "Game": Game, + "Podcast": Podcast, + "Performance": Performance, + "PerformanceProduction": PerformanceProduction, + } + request_header = {"User-Agent": "NeoDB/0.5", "Accept": "application/activity+json"} + + @classmethod + def id_to_url(cls, id_value): + return id_value + + @classmethod + def url_to_id(cls, url: str): + u = url.split("://", 1)[1].split("/", 1) + return "https://" + u[0].lower() + "/" + u[1] + + @classmethod + def validate_url_fallback(cls, url): + val = URLValidator() + try: + val(url) + if ( + url.split("://", 1)[1].split("/", 1)[0].lower() + == settings.SITE_INFO["site_domain"] + ): + # disallow local instance URLs + return False + return cls.get_json_from_url(url) is not None + except Exception: + return False + + @classmethod + def get_json_from_url(cls, url): + j = CachedDownloader(url, headers=cls.request_header).download().json() + if j.get("type") not in cls.supported_types.keys(): + raise ValueError("Not a supported format or type") + if j.get("id") != url: + logger.warning(f"ID mismatch: {j.get('id')} != {url}") + return j + + def scrape(self): + data = self.get_json_from_url(self.url) + img_url = data.get("cover_image_url") + raw_img, img_ext = ( + BasicImageDownloader.download_image(img_url, None, headers={}) + if img_url + else (None, None) + ) + ids = {} + data["preferred_model"] = data.get("type") + data["prematched_resources"] = [] + for ext in data.get("external_resources", []): + site = SiteManager.get_site_by_url(ext.get("url")) + if site and site.ID_TYPE != self.ID_TYPE: + ids[site.ID_TYPE] = site.id_value + data["prematched_resources"].append( + { + "model": data["preferred_model"], + "id_type": site.ID_TYPE, + "id_value": site.id_value, + "url": site.url, + } + ) + # for k, v in self.id_type_mapping.items(): + # if data.get(k): + # ids[v] = data.get(k) + d = ResourceContent( + metadata=data, + cover_image=raw_img, + cover_image_extention=img_ext, + lookup_ids=ids, + ) + return d diff --git a/catalog/sites/rss.py b/catalog/sites/rss.py index 7089a511..11dba3c3 100644 --- a/catalog/sites/rss.py +++ b/catalog/sites/rss.py @@ -33,7 +33,8 @@ class RSS(AbstractSite): def parse_feed_from_url(url): if not url: return None - feed = cache.get(url) + cache_key = f"rss:{url}" + feed = cache.get(cache_key) if feed: return feed if get_mock_mode(): @@ -50,7 +51,7 @@ class RSS(AbstractSite): feed, open(settings.DOWNLOADER_SAVEDIR + "/" + get_mock_file(url), "wb"), ) - cache.set(url, feed, timeout=300) + cache.set(cache_key, feed, timeout=settings.DOWNLOADER_CACHE_TIMEOUT) return feed @classmethod diff --git a/catalog/templates/_item_card.html b/catalog/templates/_item_card.html index ade3588d..2e24c059 100644 --- a/catalog/templates/_item_card.html +++ b/catalog/templates/_item_card.html @@ -7,7 +7,7 @@ {% if not hide_category %}[{{ item.category.label }}]{% endif %} {% for res in item.external_resources.all %} - {{ res.site_name.label }} + {{ res.site_label }} {% endfor %} diff --git a/catalog/templates/_item_card_metadata_base.html b/catalog/templates/_item_card_metadata_base.html index 3cad9768..4915fdfd 100644 --- a/catalog/templates/_item_card_metadata_base.html +++ b/catalog/templates/_item_card_metadata_base.html @@ -15,7 +15,7 @@ {% if not hide_category %}[{{ item.category.label }}]{% endif %} {% for res in item.external_resources.all %} - {{ res.site_name.label }} + {{ res.site_label }} {% endfor %} diff --git a/catalog/templates/_item_comments.html b/catalog/templates/_item_comments.html index 61fd2b3d..2a620b58 100644 --- a/catalog/templates/_item_comments.html +++ b/catalog/templates/_item_comments.html @@ -53,7 +53,7 @@ + {% if comment.shared_link %} href="{{ comment.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> diff --git a/catalog/templates/_item_comments_by_episode.html b/catalog/templates/_item_comments_by_episode.html index 47bfd7c8..0487894e 100644 --- a/catalog/templates/_item_comments_by_episode.html +++ b/catalog/templates/_item_comments_by_episode.html @@ -58,7 +58,7 @@ + {% if comment.shared_link %} href="{{ comment.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> diff --git a/catalog/templates/_item_reviews.html b/catalog/templates/_item_reviews.html index c18590ce..5908c93d 100644 --- a/catalog/templates/_item_reviews.html +++ b/catalog/templates/_item_reviews.html @@ -18,7 +18,7 @@ + {% if review.shared_link %} href="{{ review.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> diff --git a/catalog/templates/_item_user_pieces.html b/catalog/templates/_item_user_pieces.html index 04994860..3e90664a 100644 --- a/catalog/templates/_item_user_pieces.html +++ b/catalog/templates/_item_user_pieces.html @@ -66,7 +66,7 @@ + {% if mark.comment.shared_link %} href="{{ mark.comment.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {% comment %} {{ mark.comment.created_time|date }} {% endcomment %} @@ -89,7 +89,7 @@ + {% if comment.shared_link %} href="{{ comment.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {% comment %} {{ comment.created_time|date }} {% endcomment %} @@ -127,7 +127,7 @@ + {% if mark.review.shared_link %} href="{{ mark.review.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {{ mark.review.created_time|date }} diff --git a/catalog/templates/_sidebar_edit.html b/catalog/templates/_sidebar_edit.html index 1bd7a3f2..1a84f0bf 100644 --- a/catalog/templates/_sidebar_edit.html +++ b/catalog/templates/_sidebar_edit.html @@ -52,7 +52,7 @@ {% for res in item.external_resources.all %}
- {% trans '源网站' %}: {{ res.site_name.label }} + {% trans '源网站' %}: {{ res.site_label }}
{% for res in item.external_resources.all %} - {{ res.site_name.label }} + {{ res.site_label }} {% endfor %}
diff --git a/catalog/templates/item_mark_list.html b/catalog/templates/item_mark_list.html index e4da00f3..ed0c3505 100644 --- a/catalog/templates/item_mark_list.html +++ b/catalog/templates/item_mark_list.html @@ -43,7 +43,7 @@ + {% if mark.shared_link %} href="{{ mark.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {{ mark.created_time|date }} diff --git a/catalog/templates/item_review_list.html b/catalog/templates/item_review_list.html index aa4d3c2b..90e3339e 100644 --- a/catalog/templates/item_review_list.html +++ b/catalog/templates/item_review_list.html @@ -31,7 +31,7 @@ + {% if review.shared_link %} href="{{ review.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {% liked_piece review as liked %} diff --git a/catalog/urls.py b/catalog/urls.py index 94125c23..3e41ccf6 100644 --- a/catalog/urls.py +++ b/catalog/urls.py @@ -129,8 +129,9 @@ urlpatterns = [ mark_list, name="mark_list", ), - path("search/", search, name="search"), - path("search/external/", external_search, name="external_search"), + path("search", search, name="search"), + path("search/", search, name="search_legacy"), + path("search/external", external_search, name="external_search"), path("fetch_refresh/", fetch_refresh, name="fetch_refresh"), path("refetch", refetch, name="refetch"), path("unlink", unlink, name="unlink"), diff --git a/catalog/views.py b/catalog/views.py index 11ac0b3d..a9635c28 100644 --- a/catalog/views.py +++ b/catalog/views.py @@ -19,9 +19,9 @@ from journal.models import ( ShelfMember, ShelfType, ShelfTypeNames, - query_following, - query_item_category, - query_visible, + q_item_in_category, + q_piece_in_home_feed_of_user, + q_piece_visible_to_user, ) from .forms import * @@ -74,6 +74,8 @@ def retrieve(request, item_path, item_uuid): item_url = f"/{item_path}/{item_uuid}" if item.url != item_url: return redirect(item.url) + if request.headers.get("Accept", "").endswith("json"): + return redirect(item.api_url) skipcheck = request.GET.get("skipcheck", False) and request.user.is_authenticated if not skipcheck and item.merged_to_item: return redirect(item.merged_to_item.url) @@ -91,16 +93,16 @@ def retrieve(request, item_path, item_uuid): child_item_comments = [] shelf_types = [(n[1], n[2]) for n in iter(ShelfTypeNames) if n[0] == item.category] if request.user.is_authenticated: - visible = query_visible(request.user) - mark = Mark(request.user, item) + visible = q_piece_visible_to_user(request.user) + mark = Mark(request.user.identity, item) child_item_comments = Comment.objects.filter( - owner=request.user, item__in=item.child_items.all() + owner=request.user.identity, item__in=item.child_items.all() ) review = mark.review - my_collections = item.collections.all().filter(owner=request.user) + my_collections = item.collections.all().filter(owner=request.user.identity) collection_list = ( item.collections.all() - .exclude(owner=request.user) + .exclude(owner=request.user.identity) .filter(visible) .annotate(like_counts=Count("likes")) .order_by("-like_counts") @@ -145,9 +147,9 @@ def mark_list(request, item_path, item_uuid, following_only=False): raise Http404() queryset = ShelfMember.objects.filter(item=item).order_by("-created_time") if following_only: - queryset = queryset.filter(query_following(request.user)) + queryset = queryset.filter(q_piece_in_home_feed_of_user(request.user)) else: - queryset = queryset.filter(query_visible(request.user)) + queryset = queryset.filter(q_piece_visible_to_user(request.user)) paginator = Paginator(queryset, NUM_REVIEWS_ON_LIST_PAGE) page_number = request.GET.get("page", default=1) marks = paginator.get_page(page_number) @@ -169,7 +171,7 @@ def review_list(request, item_path, item_uuid): if not item: raise Http404() queryset = Review.objects.filter(item=item).order_by("-created_time") - queryset = queryset.filter(query_visible(request.user)) + queryset = queryset.filter(q_piece_visible_to_user(request.user)) paginator = Paginator(queryset, NUM_REVIEWS_ON_LIST_PAGE) page_number = request.GET.get("page", default=1) reviews = paginator.get_page(page_number) @@ -192,7 +194,7 @@ def comments(request, item_path, item_uuid): raise Http404() ids = item.child_item_ids + [item.id] queryset = Comment.objects.filter(item_id__in=ids).order_by("-created_time") - queryset = queryset.filter(query_visible(request.user)) + queryset = queryset.filter(q_piece_visible_to_user(request.user)) before_time = request.GET.get("last") if before_time: queryset = queryset.filter(created_time__lte=before_time) @@ -218,7 +220,7 @@ def comments_by_episode(request, item_path, item_uuid): else: ids = item.child_item_ids queryset = Comment.objects.filter(item_id__in=ids).order_by("-created_time") - queryset = queryset.filter(query_visible(request.user)) + queryset = queryset.filter(q_piece_visible_to_user(request.user)) before_time = request.GET.get("last") if before_time: queryset = queryset.filter(created_time__lte=before_time) @@ -240,7 +242,7 @@ def reviews(request, item_path, item_uuid): raise Http404() ids = item.child_item_ids + [item.id] queryset = Review.objects.filter(item_id__in=ids).order_by("-created_time") - queryset = queryset.filter(query_visible(request.user)) + queryset = queryset.filter(q_piece_visible_to_user(request.user)) before_time = request.GET.get("last") if before_time: queryset = queryset.filter(created_time__lte=before_time) diff --git a/common/static/scss/_sitelabel.scss b/common/static/scss/_sitelabel.scss index de8c0b2e..021d1493 100644 --- a/common/static/scss/_sitelabel.scss +++ b/common/static/scss/_sitelabel.scss @@ -71,6 +71,12 @@ font-weight: lighter; } + .fedi { + background: var(--pico-primary); + color: white; + font-weight: lighter; + } + .tmdb { background: linear-gradient(90deg, #91CCA3, #1FB4E2); color: white; diff --git a/common/templates/_sidebar.html b/common/templates/_sidebar.html index 54fb321e..1783ceaa 100644 --- a/common/templates/_sidebar.html +++ b/common/templates/_sidebar.html @@ -51,7 +51,7 @@ target="_blank" rel="noopener" onclick="window.open(this.href); return false;"> - @{{ user.handler }} + {{ user.handler }} diff --git a/common/templatetags/mastodon.py b/common/templatetags/mastodon.py index 8d31bf40..c06cb1b0 100644 --- a/common/templatetags/mastodon.py +++ b/common/templatetags/mastodon.py @@ -3,6 +3,8 @@ from django.conf import settings from django.template.defaultfilters import stringfilter from django.utils.translation import gettext_lazy as _ +from users.models import APIdentity, User + register = template.Library() @@ -13,9 +15,10 @@ def mastodon(domain): @register.simple_tag(takes_context=True) -def current_user_relationship(context, user): +def current_user_relationship(context, user: "User"): current_user = context["request"].user r = { + "requesting": False, "following": False, "unfollowable": False, "muting": False, @@ -24,21 +27,23 @@ def current_user_relationship(context, user): "status": "", } if current_user and current_user.is_authenticated and current_user != user: - if current_user.is_blocking(user) or user.is_blocking(current_user): + current_identity = context["request"].user.identity + target_identity = user.identity + if current_identity.is_blocking( + target_identity + ) or current_identity.is_blocked_by(target_identity): r["rejecting"] = True else: - r["muting"] = current_user.is_muting(user) - if user in current_user.local_muting.all(): - r["unmutable"] = current_user - if current_user.is_following(user): - r["following"] = True - if user in current_user.local_following.all(): - r["unfollowable"] = True - if current_user.is_followed_by(user): + r["muting"] = current_identity.is_muting(target_identity) + r["unmutable"] = r["muting"] + r["following"] = current_identity.is_following(target_identity) + r["unfollowable"] = r["following"] + if r["following"]: + if current_identity.is_followed_by(target_identity): r["status"] = _("互相关注") else: r["status"] = _("已关注") else: - if current_user.is_followed_by(user): + if current_identity.is_followed_by(target_identity): r["status"] = _("被ta关注") return r diff --git a/common/urls.py b/common/urls.py index 679dc795..e8bc0a15 100644 --- a/common/urls.py +++ b/common/urls.py @@ -1,4 +1,4 @@ -from django.urls import path +from django.urls import path, re_path from .views import * @@ -7,4 +7,5 @@ urlpatterns = [ path("", home), path("home/", home, name="home"), path("me/", me, name="me"), + re_path("^~neodb~(?P.+)", ap_redirect), ] diff --git a/common/utils.py b/common/utils.py index fe43222b..80408737 100644 --- a/common/utils.py +++ b/common/utils.py @@ -1,9 +1,22 @@ import uuid +from typing import TYPE_CHECKING -from django.http import Http404 +from django.http import Http404, HttpRequest from django.utils import timezone from django.utils.baseconv import base62 +if TYPE_CHECKING: + from users.models import APIdentity, User + + +class AuthedHttpRequest(HttpRequest): + """ + A subclass of HttpRequest for type-checking only + """ + + user: "User" + target_identity: "APIdentity" + class PageLinksGenerator: # TODO inherit django paginator diff --git a/common/views.py b/common/views.py index 3b26f24b..ce10d644 100644 --- a/common/views.py +++ b/common/views.py @@ -6,7 +6,7 @@ from django.urls import reverse @login_required def me(request): - return redirect(request.user.url) + return redirect(request.user.identity.url) def home(request): @@ -22,6 +22,10 @@ def home(request): return redirect(reverse("catalog:discover")) +def ap_redirect(request, uri): + return redirect(uri) + + def error_400(request, exception=None): return render( request, diff --git a/doc/install.md b/doc/install.md index 87d8bc59..336b6677 100644 --- a/doc/install.md +++ b/doc/install.md @@ -33,8 +33,8 @@ Install PostgreSQL, Redis and Python (3.10 or above) if not yet ### 1.1 Database Setup database ``` -CREATE DATABASE neodb ENCODING 'UTF8' LC_COLLATE='en_US.UTF-8' LC_CTYPE='en_US.UTF-8' TEMPLATE template0; CREATE ROLE neodb with LOGIN ENCRYPTED PASSWORD 'abadface'; +CREATE DATABASE neodb ENCODING 'UTF8' LC_COLLATE='en_US.UTF-8' LC_CTYPE='en_US.UTF-8' TEMPLATE template0; GRANT ALL ON DATABASE neodb TO neodb; ``` diff --git a/journal/api.py b/journal/api.py index 6435d345..e23cc870 100644 --- a/journal/api.py +++ b/journal/api.py @@ -10,8 +10,9 @@ from oauth2_provider.decorators import protected_resource from catalog.common.models import * from common.api import * +from mastodon.api import share_review -from .models import * +from .models import Mark, Review, ShelfType, TagManager, q_item_in_category class MarkSchema(Schema): @@ -84,9 +85,9 @@ def mark_item(request, item_uuid: str, mark: MarkInSchema): item = Item.get_by_url(item_uuid) if not item: return 404, {"message": "Item not found"} - m = Mark(request.user, item) + m = Mark(request.user.identity, item) try: - TagManager.tag_item_by_user(item, request.user, mark.tags, mark.visibility) + TagManager.tag_item(item, request.user, mark.tags, mark.visibility) m.update( mark.shelf_type, mark.comment_text, @@ -114,7 +115,7 @@ def delete_mark(request, item_uuid: str): m = Mark(request.user, item) m.delete() # skip tag deletion for now to be consistent with web behavior - # TagManager.tag_item_by_user(item, request.user, [], 0) + # TagManager.tag_item(item, request.user, [], 0) return 200, {"message": "OK"} @@ -144,9 +145,9 @@ def list_reviews(request, category: AvailableItemCategory | None = None): `category` is optional, reviews for all categories will be returned if not specified. """ - queryset = Review.objects.filter(owner=request.user) + queryset = Review.objects.filter(owner=request.user.identity) if category: - queryset = queryset.filter(query_item_category(category)) + queryset = queryset.filter(q_item_in_category(category)) return queryset.prefetch_related("item") @@ -161,7 +162,7 @@ def get_review_by_item(request, item_uuid: str): item = Item.get_by_url(item_uuid) if not item: return 404, {"message": "Item not found"} - review = Review.objects.filter(owner=request.user, item=item).first() + review = Review.objects.filter(owner=request.user.identity, item=item).first() if not review: return 404, {"message": "Review not found"} return review @@ -182,15 +183,17 @@ def review_item(request, item_uuid: str, review: ReviewInSchema): item = Item.get_by_url(item_uuid) if not item: return 404, {"message": "Item not found"} - Review.review_item_by_user( + Review.update_item_review( item, request.user, review.title, review.body, review.visibility, created_time=review.created_time, - share_to_mastodon=review.post_to_fediverse, ) + if review.post_to_fediverse and request.user.mastodon_username: + share_review(review) + return 200, {"message": "OK"} @@ -205,7 +208,7 @@ def delete_review(request, item_uuid: str): item = Item.get_by_url(item_uuid) if not item: return 404, {"message": "Item not found"} - Review.review_item_by_user(item, request.user, None, None) + Review.update_item_review(item, request.user, None, None) return 200, {"message": "OK"} diff --git a/journal/exporters/doufen.py b/journal/exporters/doufen.py index f37311a9..d859a611 100644 --- a/journal/exporters/doufen.py +++ b/journal/exporters/doufen.py @@ -47,9 +47,7 @@ def export_marks_task(user): ]: ws = wb.create_sheet(title=label) shelf = user.shelf_manager.get_shelf(status) - q = query_item_category(ItemCategory.Movie) | query_item_category( - ItemCategory.TV - ) + q = q_item_in_category(ItemCategory.Movie) | q_item_in_category(ItemCategory.TV) marks = shelf.members.all().filter(q).order_by("created_time") ws.append(heading) for mm in marks: @@ -95,7 +93,7 @@ def export_marks_task(user): ]: ws = wb.create_sheet(title=label) shelf = user.shelf_manager.get_shelf(status) - q = query_item_category(ItemCategory.Music) + q = q_item_in_category(ItemCategory.Music) marks = shelf.members.all().filter(q).order_by("created_time") ws.append(heading) for mm in marks: @@ -135,7 +133,7 @@ def export_marks_task(user): ]: ws = wb.create_sheet(title=label) shelf = user.shelf_manager.get_shelf(status) - q = query_item_category(ItemCategory.Book) + q = q_item_in_category(ItemCategory.Book) marks = shelf.members.all().filter(q).order_by("created_time") ws.append(heading) for mm in marks: @@ -177,7 +175,7 @@ def export_marks_task(user): ]: ws = wb.create_sheet(title=label) shelf = user.shelf_manager.get_shelf(status) - q = query_item_category(ItemCategory.Game) + q = q_item_in_category(ItemCategory.Game) marks = shelf.members.all().filter(q).order_by("created_time") ws.append(heading) for mm in marks: @@ -219,7 +217,7 @@ def export_marks_task(user): ]: ws = wb.create_sheet(title=label) shelf = user.shelf_manager.get_shelf(status) - q = query_item_category(ItemCategory.Podcast) + q = q_item_in_category(ItemCategory.Podcast) marks = shelf.members.all().filter(q).order_by("created_time") ws.append(heading) for mm in marks: @@ -267,7 +265,7 @@ def export_marks_task(user): (ItemCategory.Podcast, "播客评论"), ]: ws = wb.create_sheet(title=label) - q = query_item_category(category) + q = q_item_in_category(category) reviews = Review.objects.filter(owner=user).filter(q).order_by("created_time") ws.append(review_heading) for review in reviews: diff --git a/journal/importers/douban.py b/journal/importers/douban.py index ee649ff8..6232d860 100644 --- a/journal/importers/douban.py +++ b/journal/importers/douban.py @@ -261,7 +261,7 @@ class DoubanImporter: ) print("+", end="", flush=True) if tags: - TagManager.tag_item_by_user(item, self.user, tags) + TagManager.tag_item(item, self.user, tags) return 1 def import_review_sheet(self, worksheet, sheet_name): diff --git a/journal/management/commands/journal.py b/journal/management/commands/journal.py index 3fadaf91..fd8645a0 100644 --- a/journal/management/commands/journal.py +++ b/journal/management/commands/journal.py @@ -1,9 +1,10 @@ -import pprint - from django.core.management.base import BaseCommand +from catalog.models import Item from journal.importers.douban import DoubanImporter from journal.models import * +from journal.models.common import Content +from journal.models.itemlist import ListMember from users.models import User diff --git a/journal/migrations/0014_alter_piece_options_piece_local_piece_post_id_and_more.py b/journal/migrations/0014_alter_piece_options_piece_local_piece_post_id_and_more.py new file mode 100644 index 00000000..c6f07aba --- /dev/null +++ b/journal/migrations/0014_alter_piece_options_piece_local_piece_post_id_and_more.py @@ -0,0 +1,50 @@ +# Generated by Django 4.2.3 on 2023-08-06 02:01 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ( + "journal", + "0014_remove_reply_piece_ptr_remove_reply_reply_to_content_and_more", + ), + ] + + operations = [ + migrations.AlterModelOptions( + name="piece", + options={}, + ), + migrations.AddField( + model_name="piece", + name="local", + field=models.BooleanField(default=True), + ), + migrations.AddField( + model_name="piece", + name="post_id", + field=models.BigIntegerField(default=None, null=True), + ), + migrations.AddField( + model_name="comment", + name="remote_id", + field=models.CharField(default=None, max_length=200, null=True), + ), + migrations.AddField( + model_name="rating", + name="remote_id", + field=models.CharField(default=None, max_length=200, null=True), + ), + migrations.AddField( + model_name="review", + name="remote_id", + field=models.CharField(default=None, max_length=200, null=True), + ), + migrations.AddIndex( + model_name="piece", + index=models.Index( + fields=["post_id"], name="journal_pie_post_id_6a74ff_idx" + ), + ), + ] diff --git a/journal/migrations/0015_alter_collection_featured_by_users_and_more.py b/journal/migrations/0015_alter_collection_featured_by_users_and_more.py new file mode 100644 index 00000000..ad0a7d72 --- /dev/null +++ b/journal/migrations/0015_alter_collection_featured_by_users_and_more.py @@ -0,0 +1,111 @@ +# Generated by Django 4.2.4 on 2023-08-09 13:26 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("users", "0012_apidentity"), + ("journal", "0014_alter_piece_options_piece_local_piece_post_id_and_more"), + ] + + operations = [ + migrations.RemoveField( + model_name="collection", + name="featured_by_users", + ), + migrations.AddField( + model_name="collection", + name="featured_by", + field=models.ManyToManyField( + related_name="featured_collections", + through="journal.FeaturedCollection", + to="users.apidentity", + ), + ), + migrations.AlterField( + model_name="collection", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="collectionmember", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="comment", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="featuredcollection", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="like", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="rating", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="review", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="shelf", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="shelflogentry", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="shelfmember", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="tag", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + migrations.AlterField( + model_name="tagmember", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, to="users.apidentity" + ), + ), + ] diff --git a/journal/models/__init__.py b/journal/models/__init__.py index d1609f0b..3e9ab152 100644 --- a/journal/models/__init__.py +++ b/journal/models/__init__.py @@ -4,11 +4,11 @@ from .common import ( Piece, UserOwnedObjectMixin, VisibilityType, - max_visiblity_to, - q_visible_to, - query_following, - query_item_category, - query_visible, + max_visiblity_to_user, + q_item_in_category, + q_owned_piece_visible_to_user, + q_piece_in_home_feed_of_user, + q_piece_visible_to_user, ) from .like import Like from .mark import Mark diff --git a/journal/models/collection.py b/journal/models/collection.py index 430ea9c5..f204bc56 100644 --- a/journal/models/collection.py +++ b/journal/models/collection.py @@ -1,14 +1,14 @@ import re from functools import cached_property -from django.db import connection, models +from django.db import models from django.utils.translation import gettext_lazy as _ from catalog.collection.models import Collection as CatalogCollection from catalog.common import jsondata from catalog.common.utils import DEFAULT_ITEM_COVER, piece_cover_path from catalog.models import Item -from users.models import User +from users.models import APIdentity from .common import Piece from .itemlist import List, ListMember @@ -42,8 +42,8 @@ class Collection(List): collaborative = models.PositiveSmallIntegerField( default=0 ) # 0: Editable by owner only / 1: Editable by bi-direction followers - featured_by_users = models.ManyToManyField( - to=User, related_name="featured_collections", through="FeaturedCollection" + featured_by = models.ManyToManyField( + to=APIdentity, related_name="featured_collections", through="FeaturedCollection" ) @property @@ -56,25 +56,25 @@ class Collection(List): html = render_md(self.brief) return _RE_HTML_TAG.sub(" ", html) - def featured_by_user_since(self, user): - f = FeaturedCollection.objects.filter(target=self, owner=user).first() + def featured_since(self, owner: APIdentity): + f = FeaturedCollection.objects.filter(target=self, owner=owner).first() return f.created_time if f else None - def get_stats_for_user(self, user): + def get_stats(self, owner: APIdentity): items = list(self.members.all().values_list("item_id", flat=True)) stats = {"total": len(items)} - for st, shelf in user.shelf_manager.shelf_list.items(): + for st, shelf in owner.shelf_manager.shelf_list.items(): stats[st] = shelf.members.all().filter(item_id__in=items).count() stats["percentage"] = ( round(stats["complete"] * 100 / stats["total"]) if stats["total"] else 0 ) return stats - def get_progress_for_user(self, user): + def get_progress(self, owner: APIdentity): items = list(self.members.all().values_list("item_id", flat=True)) if len(items) == 0: return 0 - shelf = user.shelf_manager.shelf_list["complete"] + shelf = owner.shelf_manager.shelf_list["complete"] return round( shelf.members.all().filter(item_id__in=items).count() * 100 / len(items) ) @@ -94,7 +94,7 @@ class Collection(List): class FeaturedCollection(Piece): - owner = models.ForeignKey(User, on_delete=models.CASCADE) + owner = models.ForeignKey(APIdentity, on_delete=models.CASCADE) target = models.ForeignKey(Collection, on_delete=models.CASCADE) created_time = models.DateTimeField(auto_now_add=True) edited_time = models.DateTimeField(auto_now=True) @@ -108,4 +108,4 @@ class FeaturedCollection(Piece): @cached_property def progress(self): - return self.target.get_progress_for_user(self.owner) + return self.target.get_progress(self.owner) diff --git a/journal/models/comment.py b/journal/models/comment.py index 05c59e1d..dc566dee 100644 --- a/journal/models/comment.py +++ b/journal/models/comment.py @@ -1,10 +1,11 @@ +from datetime import datetime from functools import cached_property from django.db import models from django.utils import timezone from catalog.models import Item -from users.models import User +from users.models import APIdentity from .common import Content from .rating import Rating @@ -14,13 +15,44 @@ from .renderers import render_text class Comment(Content): text = models.TextField(blank=False, null=False) + @property + def ap_object(self): + return { + "id": self.absolute_url, + "type": "Comment", + "content": self.text, + "published": self.created_time.isoformat(), + "updated": self.edited_time.isoformat(), + "attributedTo": self.owner.actor_uri, + "relatedWith": self.item.absolute_url, + "url": self.absolute_url, + } + + @classmethod + def update_by_ap_object(cls, owner, item, obj, post_id, visibility): + content = obj.get("content", "").strip() if obj else "" + if not content: + cls.objects.filter(owner=owner, item=item).delete() + return + d = { + "text": content, + "local": False, + "remote_id": obj["id"], + "post_id": post_id, + "visibility": visibility, + "created_time": datetime.fromisoformat(obj["published"]), + "edited_time": datetime.fromisoformat(obj["updated"]), + } + p, _ = cls.objects.update_or_create(owner=owner, item=item, defaults=d) + return p + @property def html(self): return render_text(self.text) @cached_property def rating_grade(self): - return Rating.get_item_rating_by_user(self.item, self.owner) + return Rating.get_item_rating(self.item, self.owner) @cached_property def mark(self): @@ -38,17 +70,17 @@ class Comment(Content): return self.item.url @staticmethod - def comment_item_by_user( - item: Item, user: User, text: str | None, visibility=0, created_time=None + def comment_item( + item: Item, owner: APIdentity, text: str | None, visibility=0, created_time=None ): - comment = Comment.objects.filter(owner=user, item=item).first() + comment = Comment.objects.filter(owner=owner, item=item).first() if not text: if comment is not None: comment.delete() comment = None elif comment is None: comment = Comment.objects.create( - owner=user, + owner=owner, item=item, text=text, visibility=visibility, diff --git a/journal/models/common.py b/journal/models/common.py index a8861e86..4b5b35ff 100644 --- a/journal/models/common.py +++ b/journal/models/common.py @@ -1,30 +1,20 @@ import re import uuid -from functools import cached_property -import django.dispatch from django.conf import settings -from django.contrib.contenttypes.models import ContentType -from django.core.exceptions import PermissionDenied -from django.core.validators import MaxValueValidator, MinValueValidator, RegexValidator from django.db import connection, models from django.db.models import Avg, Count, Q from django.utils import timezone from django.utils.baseconv import base62 from django.utils.translation import gettext_lazy as _ -from markdownx.models import MarkdownxField from polymorphic.models import PolymorphicModel -from catalog.collection.models import Collection as CatalogCollection -from catalog.common import jsondata -from catalog.common.models import Item, ItemCategory -from catalog.common.utils import DEFAULT_ITEM_COVER, piece_cover_path +from catalog.common.models import AvailableItemCategory, Item, ItemCategory from catalog.models import * -from mastodon.api import share_review -from users.models import User +from takahe.utils import Takahe +from users.models import APIdentity, User from .mixins import UserOwnedObjectMixin -from .renderers import render_md, render_text _logger = logging.getLogger(__name__) @@ -35,46 +25,57 @@ class VisibilityType(models.IntegerChoices): Private = 2, _("仅自己") -def q_visible_to(viewer, owner): +def q_owned_piece_visible_to_user(viewing_user: User, owner: APIdentity): + if ( + not viewing_user + or not viewing_user.is_authenticated + or not viewing_user.identity + ): + return Q(visibility=0) + viewer = viewing_user.identity if viewer == owner: return Q() # elif viewer.is_blocked_by(owner): # return Q(pk__in=[]) - elif viewer.is_authenticated and viewer.is_following(owner): - return Q(visibility__in=[0, 1]) + elif viewer.is_following(owner): + return Q(owner=owner, visibility__in=[0, 1]) else: - return Q(visibility=0) + return Q(owner=owner, visibility=0) -def max_visiblity_to(viewer, owner): +def max_visiblity_to_user(viewing_user: User, owner: APIdentity): + if ( + not viewing_user + or not viewing_user.is_authenticated + or not viewing_user.identity + ): + return 0 + viewer = viewing_user.identity if viewer == owner: return 2 - # elif viewer.is_blocked_by(owner): - # return Q(pk__in=[]) - elif viewer.is_authenticated and viewer.is_following(owner): + elif viewer.is_following(owner): return 1 else: return 0 -def query_visible(user): +def q_piece_visible_to_user(user: User): + if not user or not user.is_authenticated or not user.identity: + return Q(visibility=0) return ( - ( - Q(visibility=0) - | Q(owner_id__in=user.following, visibility=1) - | Q(owner_id=user.id) - ) - & ~Q(owner_id__in=user.ignoring) - if user.is_authenticated - else Q(visibility=0) + Q(visibility=0) + | Q(owner_id__in=user.identity.following, visibility=1) + | Q(owner_id=user.identity.pk) + ) & ~Q(owner_id__in=user.identity.ignoring) + + +def q_piece_in_home_feed_of_user(user: User): + return Q(owner_id__in=user.identity.following, visibility__lt=2) | Q( + owner_id=user.identity.pk ) -def query_following(user): - return Q(owner_id__in=user.following, visibility__lt=2) | Q(owner_id=user.id) - - -def query_item_category(item_category): +def q_item_in_category(item_category: ItemCategory | AvailableItemCategory): classes = item_categories()[item_category] # q = Q(item__instance_of=classes[0]) # for cls in classes[1:]: @@ -92,7 +93,7 @@ def query_item_category(item_category): # class ImportSession(models.Model): -# owner = models.ForeignKey(User, on_delete=models.CASCADE) +# owner = models.ForeignKey(APIdentity, on_delete=models.CASCADE) # status = models.PositiveSmallIntegerField(default=ImportStatus.QUEUED) # importer = models.CharField(max_length=50) # file = models.CharField() @@ -115,6 +116,13 @@ def query_item_category(item_category): class Piece(PolymorphicModel, UserOwnedObjectMixin): url_path = "p" # subclass must specify this uid = models.UUIDField(default=uuid.uuid4, editable=False, db_index=True) + local = models.BooleanField(default=True) + post_id = models.BigIntegerField(null=True, default=None) + + class Meta: + indexes = [ + models.Index(fields=["post_id"]), + ] @property def uuid(self): @@ -132,9 +140,18 @@ class Piece(PolymorphicModel, UserOwnedObjectMixin): def api_url(self): return f"/api/{self.url}" if self.url_path else None + @property + def shared_link(self): + return Takahe.get_post_url(self.post_id) if self.post_id else None + @property def like_count(self): - return self.likes.all().count() + return ( + Takahe.get_post_stats(self.post_id).get("likes", 0) if self.post_id else 0 + ) + + def is_liked_by(self, user): + return self.post_id and Takahe.post_liked_by(self.post_id, user) @classmethod def get_by_url(cls, url_or_b62): @@ -149,9 +166,17 @@ class Piece(PolymorphicModel, UserOwnedObjectMixin): obj = None return obj + @classmethod + def update_by_ap_object(cls, owner, item, obj, post_id, visibility): + raise NotImplemented + + @property + def ap_object(self): + raise NotImplemented + class Content(Piece): - owner = models.ForeignKey(User, on_delete=models.PROTECT) + owner = models.ForeignKey(APIdentity, on_delete=models.PROTECT) visibility = models.PositiveSmallIntegerField( default=0 ) # 0: Public / 1: Follower only / 2: Self only @@ -161,6 +186,7 @@ class Content(Piece): ) # auto_now=True FIXME revert this after migration metadata = models.JSONField(default=dict) item = models.ForeignKey(Item, on_delete=models.PROTECT) + remote_id = models.CharField(max_length=200, null=True, default=None) def __str__(self): return f"{self.uuid}@{self.item}" diff --git a/journal/models/itemlist.py b/journal/models/itemlist.py index a5b5b543..f70a0497 100644 --- a/journal/models/itemlist.py +++ b/journal/models/itemlist.py @@ -5,7 +5,7 @@ from django.db import models from django.utils import timezone from catalog.models import Item, ItemCategory -from users.models import User +from users.models import APIdentity from .common import Piece @@ -15,24 +15,21 @@ list_remove = django.dispatch.Signal() class List(Piece): """ - List (abstract class) + List (abstract model) """ - owner = models.ForeignKey(User, on_delete=models.PROTECT) + owner = models.ForeignKey(APIdentity, on_delete=models.PROTECT) visibility = models.PositiveSmallIntegerField( default=0 ) # 0: Public / 1: Follower only / 2: Self only - created_time = models.DateTimeField( - default=timezone.now - ) # auto_now_add=True FIXME revert this after migration - edited_time = models.DateTimeField( - default=timezone.now - ) # auto_now=True FIXME revert this after migration + created_time = models.DateTimeField(default=timezone.now) + edited_time = models.DateTimeField(default=timezone.now) metadata = models.JSONField(default=dict) class Meta: abstract = True + MEMBER_CLASS: Piece # MEMBER_CLASS = None # subclass must override this # subclass must add this: # items = models.ManyToManyField(Item, through='ListMember') @@ -146,14 +143,12 @@ class ListMember(Piece): parent = models.ForeignKey('List', related_name='members', on_delete=models.CASCADE) """ - owner = models.ForeignKey(User, on_delete=models.PROTECT) + owner = models.ForeignKey(APIdentity, on_delete=models.PROTECT) visibility = models.PositiveSmallIntegerField( default=0 ) # 0: Public / 1: Follower only / 2: Self only created_time = models.DateTimeField(default=timezone.now) - edited_time = models.DateTimeField( - default=timezone.now - ) # auto_now=True FIXME revert this after migration + edited_time = models.DateTimeField(default=timezone.now) metadata = models.JSONField(default=dict) item = models.ForeignKey(Item, on_delete=models.PROTECT) position = models.PositiveIntegerField() diff --git a/journal/models/like.py b/journal/models/like.py index e0150915..9a06e433 100644 --- a/journal/models/like.py +++ b/journal/models/like.py @@ -3,13 +3,13 @@ from django.db import connection, models from django.utils import timezone from django.utils.translation import gettext_lazy as _ -from users.models import User +from users.models import APIdentity from .common import Piece -class Like(Piece): - owner = models.ForeignKey(User, on_delete=models.PROTECT) +class Like(Piece): # TODO remove + owner = models.ForeignKey(APIdentity, on_delete=models.PROTECT) visibility = models.PositiveSmallIntegerField( default=0 ) # 0: Public / 1: Follower only / 2: Self only @@ -18,25 +18,27 @@ class Like(Piece): target = models.ForeignKey(Piece, on_delete=models.CASCADE, related_name="likes") @staticmethod - def user_liked_piece(user, piece): - return Like.objects.filter(owner=user, target=piece).exists() + def user_liked_piece(owner, piece): + return Like.objects.filter(owner=owner.identity, target=piece).exists() @staticmethod - def user_like_piece(user, piece): + def user_like_piece(owner, piece): if not piece: return - like = Like.objects.filter(owner=user, target=piece).first() + like = Like.objects.filter(owner=owner.identity, target=piece).first() if not like: - like = Like.objects.create(owner=user, target=piece) + like = Like.objects.create(owner=owner.identity, target=piece) return like @staticmethod - def user_unlike_piece(user, piece): + def user_unlike_piece(owner, piece): if not piece: return - Like.objects.filter(owner=user, target=piece).delete() + Like.objects.filter(owner=owner.identity, target=piece).delete() @staticmethod - def user_likes_by_class(user, cls): + def user_likes_by_class(owner, cls): ctype_id = ContentType.objects.get_for_model(cls) - return Like.objects.filter(owner=user, target__polymorphic_ctype=ctype_id) + return Like.objects.filter( + owner=owner.identity, target__polymorphic_ctype=ctype_id + ) diff --git a/journal/models/mark.py b/journal/models/mark.py index ac5bbbf1..1961366c 100644 --- a/journal/models/mark.py +++ b/journal/models/mark.py @@ -12,6 +12,7 @@ from django.db.models import Avg, Count, Q from django.utils import timezone from django.utils.baseconv import base62 from django.utils.translation import gettext_lazy as _ +from loguru import logger from markdownx.models import MarkdownxField from polymorphic.models import PolymorphicModel @@ -20,16 +21,14 @@ from catalog.common import jsondata from catalog.common.models import Item, ItemCategory from catalog.common.utils import DEFAULT_ITEM_COVER, piece_cover_path from catalog.models import * -from mastodon.api import share_review -from users.models import User +from takahe.utils import Takahe +from users.models import APIdentity from .comment import Comment from .rating import Rating from .review import Review from .shelf import Shelf, ShelfLogEntry, ShelfManager, ShelfMember, ShelfType -_logger = logging.getLogger(__name__) - class Mark: """ @@ -38,8 +37,8 @@ class Mark: it mimics previous mark behaviour. """ - def __init__(self, user, item): - self.owner = user + def __init__(self, owner: APIdentity, item: Item): + self.owner = owner self.item = item @cached_property @@ -60,7 +59,7 @@ class Mark: @property def action_label(self) -> str: - if self.shelfmember: + if self.shelfmember and self.shelf_type: return ShelfManager.get_action_label(self.shelf_type, self.item.category) if self.comment: return ShelfManager.get_action_label( @@ -72,7 +71,7 @@ class Mark: def shelf_label(self) -> str | None: return ( ShelfManager.get_label(self.shelf_type, self.item.category) - if self.shelfmember + if self.shelf_type else None ) @@ -86,19 +85,23 @@ class Mark: @property def visibility(self) -> int: - return ( - self.shelfmember.visibility - if self.shelfmember - else self.owner.preference.default_visibility - ) + if self.shelfmember: + return self.shelfmember.visibility + else: + logger.warning(f"no shelfmember for mark {self.owner}, {self.item}") + return 2 @cached_property def tags(self) -> list[str]: return self.owner.tag_manager.get_item_tags(self.item) + @cached_property + def rating(self): + return Rating.objects.filter(owner=self.owner, item=self.item).first() + @cached_property def rating_grade(self) -> int | None: - return Rating.get_item_rating_by_user(self.item, self.owner) + return Rating.get_item_rating(self.item, self.owner) @cached_property def comment(self) -> Comment | None: @@ -118,29 +121,24 @@ class Mark: def update( self, - shelf_type: ShelfType | None, - comment_text: str | None, - rating_grade: int | None, - visibility: int, + shelf_type, + comment_text, + rating_grade, + visibility, metadata=None, created_time=None, share_to_mastodon=False, - silence=False, ): - # silence=False means update is logged. - share = ( - share_to_mastodon - and self.owner.mastodon_username - and shelf_type is not None - and ( - shelf_type != self.shelf_type - or comment_text != self.comment_text - or rating_grade != self.rating_grade - ) + post_to_feed = shelf_type is not None and ( + shelf_type != self.shelf_type + or comment_text != self.comment_text + or rating_grade != self.rating_grade ) + if shelf_type is None: + Takahe.delete_mark(self) if created_time and created_time >= timezone.now(): created_time = None - share_as_new_post = shelf_type != self.shelf_type + post_as_new = shelf_type != self.shelf_type original_visibility = self.visibility if shelf_type != self.shelf_type or visibility != original_visibility: self.shelfmember = self.owner.shelf_manager.move_item( @@ -148,9 +146,8 @@ class Mark: shelf_type, visibility=visibility, metadata=metadata, - silence=silence, ) - if not silence and self.shelfmember and created_time: + if self.shelfmember and created_time: # if it's an update(not delete) and created_time is specified, # update the timestamp of the shelfmember and log log = ShelfLogEntry.objects.filter( @@ -172,7 +169,7 @@ class Mark: timestamp=created_time, ) if comment_text != self.comment_text or visibility != original_visibility: - self.comment = Comment.comment_item_by_user( + self.comment = Comment.comment_item( self.item, self.owner, comment_text, @@ -180,35 +177,15 @@ class Mark: self.shelfmember.created_time if self.shelfmember else None, ) if rating_grade != self.rating_grade or visibility != original_visibility: - Rating.rate_item_by_user(self.item, self.owner, rating_grade, visibility) + Rating.update_item_rating(self.item, self.owner, rating_grade, visibility) self.rating_grade = rating_grade - if share: - # this is a bit hacky but let's keep it until move to implement ActivityPub, - # by then, we'll just change this to boost - from mastodon.api import share_mark - self.shared_link = ( - self.shelfmember.metadata.get("shared_link") - if self.shelfmember.metadata and not share_as_new_post - else None - ) - self.save = lambda **args: None - result, code = share_mark(self) - if not result: - if code == 401: - raise PermissionDenied() - else: - raise ValueError(code) - if self.shelfmember.metadata.get("shared_link") != self.shared_link: - self.shelfmember.metadata["shared_link"] = self.shared_link - self.shelfmember.save() - elif share_as_new_post and self.shelfmember: - self.shelfmember.metadata["shared_link"] = None - self.shelfmember.save() + if post_to_feed: + Takahe.post_mark(self, post_as_new) - def delete(self, silence=False): + def delete(self): # self.logs.delete() # When deleting a mark, all logs of the mark are deleted first. - self.update(None, None, None, 0, silence=silence) + self.update(None, None, None, 0) def delete_log(self, log_id): ShelfLogEntry.objects.filter( diff --git a/journal/models/mixins.py b/journal/models/mixins.py index 69d597d2..ac3f836b 100644 --- a/journal/models/mixins.py +++ b/journal/models/mixins.py @@ -1,4 +1,6 @@ -from typing import TYPE_CHECKING, Type +from typing import TYPE_CHECKING + +from users.models import APIdentity, User if TYPE_CHECKING: from .common import Piece @@ -9,18 +11,24 @@ class UserOwnedObjectMixin: UserOwnedObjectMixin Models must add these: - owner = models.ForeignKey(User, on_delete=models.PROTECT) + owner = models.ForeignKey(APIdentity, on_delete=models.PROTECT) visibility = models.PositiveSmallIntegerField(default=0) """ - def is_visible_to(self: "Piece", viewer): # type: ignore + owner: APIdentity + visibility: int + + def is_visible_to(self: "Piece | Self", viewing_user: User) -> bool: # type: ignore owner = self.owner - if owner == viewer: - return True - if not owner.is_active: + if not owner or not owner.is_active: return False - if not viewer.is_authenticated: + if owner.user == viewing_user: + return True + if not viewing_user.is_authenticated: return self.visibility == 0 + viewer = viewing_user.identity # type: ignore[assignment] + if not viewer: + return False if self.visibility == 2: return False if viewer.is_blocking(owner) or owner.is_blocking(viewer): @@ -30,27 +38,9 @@ class UserOwnedObjectMixin: else: return True - def is_editable_by(self: "Piece", viewer): # type: ignore - return viewer.is_authenticated and ( - viewer.is_staff or viewer.is_superuser or viewer == self.owner + def is_editable_by(self: "Piece", viewing_user: User): # type: ignore + return viewing_user.is_authenticated and ( + viewing_user.is_staff + or viewing_user.is_superuser + or viewing_user == self.owner.user ) - - @classmethod - def get_available(cls: "Type[Piece]", entity, request_user, following_only=False): # type: ignore - # e.g. SongMark.get_available(song, request.user) - query_kwargs = {entity.__class__.__name__.lower(): entity} - all_entities = cls.objects.filter(**query_kwargs).order_by( - "-created_time" - ) # get all marks for song - visible_entities = list( - filter( - lambda _entity: _entity.is_visible_to(request_user) - and ( - _entity.owner.mastodon_acct in request_user.mastodon_following - if following_only - else True - ), - all_entities, - ) - ) - return visible_entities diff --git a/journal/models/rating.py b/journal/models/rating.py index 255e049b..b9034ab2 100644 --- a/journal/models/rating.py +++ b/journal/models/rating.py @@ -1,10 +1,12 @@ +from datetime import datetime + from django.core.validators import MaxValueValidator, MinValueValidator, RegexValidator from django.db import connection, models from django.db.models import Avg, Count, Q from django.utils.translation import gettext_lazy as _ from catalog.models import Item, ItemCategory -from users.models import User +from users.models import APIdentity from .common import Content @@ -20,6 +22,51 @@ class Rating(Content): default=0, validators=[MaxValueValidator(10), MinValueValidator(1)], null=True ) + @property + def ap_object(self): + return { + "id": self.absolute_url, + "type": "Rating", + "best": 10, + "worst": 1, + "value": self.grade, + "published": self.created_time.isoformat(), + "updated": self.edited_time.isoformat(), + "attributedTo": self.owner.actor_uri, + "relatedWith": self.item.absolute_url, + "url": self.absolute_url, + } + + @classmethod + def update_by_ap_object(cls, owner, item, obj, post_id, visibility): + value = obj.get("value", 0) if obj else 0 + if not value: + cls.objects.filter(owner=owner, item=item).delete() + return + best = obj.get("best", 5) + worst = obj.get("worst", 1) + if best <= worst: + return + if value < worst: + value = worst + if value > best: + value = best + if best != 10 or worst != 1: + value = round(9 * (value - worst) / (best - worst)) + 1 + else: + value = round(value) + d = { + "grade": value, + "local": False, + "remote_id": obj["id"], + "post_id": post_id, + "visibility": visibility, + "created_time": datetime.fromisoformat(obj["published"]), + "edited_time": datetime.fromisoformat(obj["updated"]), + } + p, _ = cls.objects.update_or_create(owner=owner, item=item, defaults=d) + return p + @staticmethod def get_rating_for_item(item: Item) -> float | None: stat = Rating.objects.filter(grade__isnull=False) @@ -65,19 +112,19 @@ class Rating(Content): return r @staticmethod - def rate_item_by_user( - item: Item, user: User, rating_grade: int | None, visibility: int = 0 + def update_item_rating( + item: Item, owner: APIdentity, rating_grade: int | None, visibility: int = 0 ): if rating_grade and (rating_grade < 1 or rating_grade > 10): raise ValueError(f"Invalid rating grade: {rating_grade}") - rating = Rating.objects.filter(owner=user, item=item).first() + rating = Rating.objects.filter(owner=owner, item=item).first() if not rating_grade: if rating: rating.delete() rating = None elif rating is None: rating = Rating.objects.create( - owner=user, item=item, grade=rating_grade, visibility=visibility + owner=owner, item=item, grade=rating_grade, visibility=visibility ) elif rating.grade != rating_grade or rating.visibility != visibility: rating.visibility = visibility @@ -86,6 +133,6 @@ class Rating(Content): return rating @staticmethod - def get_item_rating_by_user(item: Item, user: User) -> int | None: - rating = Rating.objects.filter(owner=user, item=item).first() + def get_item_rating(item: Item, owner: APIdentity) -> int | None: + rating = Rating.objects.filter(owner=owner, item=item).first() return (rating.grade or None) if rating else None diff --git a/journal/models/renderers.py b/journal/models/renderers.py index ef6c2f5a..5e261031 100644 --- a/journal/models/renderers.py +++ b/journal/models/renderers.py @@ -19,7 +19,7 @@ _mistune_plugins = [ _markdown = mistune.create_markdown(plugins=_mistune_plugins) -def convert_leading_space_in_md(body) -> str: +def convert_leading_space_in_md(body: str) -> str: body = re.sub(r"^\s+$", "", body, flags=re.MULTILINE) body = re.sub( r"^(\u2003*)( +)", @@ -30,11 +30,11 @@ def convert_leading_space_in_md(body) -> str: return body -def render_md(s) -> str: +def render_md(s: str) -> str: return cast(str, _markdown(s)) -def _spolier(s): +def _spolier(s: str) -> str: l = s.split(">!", 1) if len(l) == 1: return escape(s) @@ -48,5 +48,5 @@ def _spolier(s): ) -def render_text(s): +def render_text(s: str) -> str: return _spolier(s) diff --git a/journal/models/review.py b/journal/models/review.py index 31424c27..c7129f42 100644 --- a/journal/models/review.py +++ b/journal/models/review.py @@ -7,8 +7,7 @@ from django.utils.translation import gettext_lazy as _ from markdownx.models import MarkdownxField from catalog.models import Item -from mastodon.api import share_review -from users.models import User +from users.models import APIdentity from .common import Content from .rating import Rating @@ -44,21 +43,20 @@ class Review(Content): @cached_property def rating_grade(self): - return Rating.get_item_rating_by_user(self.item, self.owner) + return Rating.get_item_rating(self.item, self.owner) @classmethod - def review_item_by_user( + def update_item_review( cls, item: Item, - user: User, + owner: APIdentity, title: str | None, body: str | None, visibility=0, created_time=None, - share_to_mastodon=False, ): if title is None: - review = Review.objects.filter(owner=user, item=item).first() + review = Review.objects.filter(owner=owner, item=item).first() if review is not None: review.delete() return None @@ -71,9 +69,7 @@ class Review(Content): defaults["created_time"] = ( created_time if created_time < timezone.now() else timezone.now() ) - review, created = cls.objects.update_or_create( - item=item, owner=user, defaults=defaults + review, _ = cls.objects.update_or_create( + item=item, owner=owner, defaults=defaults ) - if share_to_mastodon and user.mastodon_username: - share_review(review) return review diff --git a/journal/models/shelf.py b/journal/models/shelf.py index 91290c0d..d7b28852 100644 --- a/journal/models/shelf.py +++ b/journal/models/shelf.py @@ -1,14 +1,17 @@ +from datetime import datetime from functools import cached_property from typing import TYPE_CHECKING from django.db import connection, models from django.utils import timezone from django.utils.translation import gettext_lazy as _ +from loguru import logger from catalog.models import Item, ItemCategory -from users.models import User +from takahe.models import Identity +from users.models import APIdentity -from .common import query_item_category +from .common import q_item_in_category from .itemlist import List, ListMember if TYPE_CHECKING: @@ -60,6 +63,43 @@ class ShelfMember(ListMember): models.Index(fields=["parent_id", "visibility", "created_time"]), ] + @property + def ap_object(self): + return { + "id": self.absolute_url, + "type": "Status", + "status": self.parent.shelf_type, + "published": self.created_time.isoformat(), + "updated": self.edited_time.isoformat(), + "attributedTo": self.owner.actor_uri, + "relatedWith": self.item.absolute_url, + "url": self.absolute_url, + } + + @classmethod + def update_by_ap_object( + cls, owner: APIdentity, item: Identity, obj: dict, post_id: int, visibility: int + ): + if not obj: + cls.objects.filter(owner=owner, item=item).delete() + return + shelf = owner.shelf_manager.get_shelf(obj["status"]) + if not shelf: + logger.warning(f"unable to locate shelf for {owner}, {obj}") + return + d = { + "parent": shelf, + "position": 0, + "local": False, + # "remote_id": obj["id"], + "post_id": post_id, + "visibility": visibility, + "created_time": datetime.fromisoformat(obj["published"]), + "edited_time": datetime.fromisoformat(obj["updated"]), + } + p, _ = cls.objects.update_or_create(owner=owner, item=item, defaults=d) + return p + @cached_property def mark(self) -> "Mark": from .mark import Mark @@ -108,7 +148,7 @@ class Shelf(List): class ShelfLogEntry(models.Model): - owner = models.ForeignKey(User, on_delete=models.PROTECT) + owner = models.ForeignKey(APIdentity, on_delete=models.PROTECT) shelf_type = models.CharField(choices=ShelfType.choices, max_length=100, null=True) item = models.ForeignKey(Item, on_delete=models.PROTECT) timestamp = models.DateTimeField() # this may later be changed by user @@ -135,8 +175,8 @@ class ShelfManager: ShelfLogEntry can later be modified if user wish to change history """ - def __init__(self, user): - self.owner = user + def __init__(self, owner): + self.owner = owner qs = Shelf.objects.filter(owner=self.owner) self.shelf_list = {v.shelf_type: v for v in qs} if len(self.shelf_list) == 0: @@ -146,13 +186,18 @@ class ShelfManager: for qt in ShelfType: self.shelf_list[qt] = Shelf.objects.create(owner=self.owner, shelf_type=qt) - def locate_item(self, item) -> ShelfMember | None: + def locate_item(self, item: Item) -> ShelfMember | None: return ShelfMember.objects.filter(item=item, owner=self.owner).first() - def move_item(self, item, shelf_type, visibility=0, metadata=None, silence=False): + def move_item( + self, + item: Item, + shelf_type: ShelfType, + visibility: int = 0, + metadata: dict | None = None, + ): # shelf_type=None means remove from current shelf # metadata=None means no change - # silence=False means move_item is logged. if not item: raise ValueError("empty item") new_shelfmember = None @@ -185,7 +230,7 @@ class ShelfManager: elif visibility != last_visibility: # change visibility last_shelfmember.visibility = visibility last_shelfmember.save() - if changed and not silence: + if changed: if metadata is None: metadata = last_metadata or {} log_time = ( @@ -205,18 +250,20 @@ class ShelfManager: def get_log(self): return ShelfLogEntry.objects.filter(owner=self.owner).order_by("timestamp") - def get_log_for_item(self, item): + def get_log_for_item(self, item: Item): return ShelfLogEntry.objects.filter(owner=self.owner, item=item).order_by( "timestamp" ) - def get_shelf(self, shelf_type): + def get_shelf(self, shelf_type: ShelfType): return self.shelf_list[shelf_type] - def get_latest_members(self, shelf_type, item_category=None): + def get_latest_members( + self, shelf_type: ShelfType, item_category: ItemCategory | None = None + ): qs = self.shelf_list[shelf_type].members.all().order_by("-created_time") if item_category: - return qs.filter(query_item_category(item_category)) + return qs.filter(q_item_in_category(item_category)) else: return qs @@ -229,14 +276,16 @@ class ShelfManager: # return shelf.members.all().order_by @classmethod - def get_action_label(cls, shelf_type, item_category) -> str: + def get_action_label( + cls, shelf_type: ShelfType, item_category: ItemCategory + ) -> str: sts = [ n[2] for n in ShelfTypeNames if n[0] == item_category and n[1] == shelf_type ] return sts[0] if sts else str(shelf_type) @classmethod - def get_label(cls, shelf_type, item_category): + def get_label(cls, shelf_type: ShelfType, item_category: ItemCategory): ic = ItemCategory(item_category).label st = cls.get_action_label(shelf_type, item_category) return ( @@ -246,10 +295,10 @@ class ShelfManager: ) @staticmethod - def get_manager_for_user(user): - return ShelfManager(user) + def get_manager_for_user(owner: APIdentity): + return ShelfManager(owner) - def get_calendar_data(self, max_visiblity): + def get_calendar_data(self, max_visiblity: int): shelf_id = self.get_shelf(ShelfType.COMPLETE).pk timezone_offset = timezone.localtime(timezone.now()).strftime("%z") timezone_offset = timezone_offset[: len(timezone_offset) - 2] diff --git a/journal/models/tag.py b/journal/models/tag.py index 3b550bcf..28d43adc 100644 --- a/journal/models/tag.py +++ b/journal/models/tag.py @@ -8,7 +8,7 @@ from django.utils.translation import gettext_lazy as _ from catalog.collection.models import Collection as CatalogCollection from catalog.models import Item -from users.models import User +from users.models import APIdentity from .itemlist import List, ListMember @@ -66,9 +66,9 @@ class TagManager: return tag_titles @staticmethod - def all_tags_for_user(user, public_only=False): + def all_tags_by_owner(owner, public_only=False): tags = ( - user.tag_set.all() + owner.tag_set.all() .values("title") .annotate(frequency=Count("members__id")) .order_by("-frequency") @@ -78,46 +78,44 @@ class TagManager: return list(map(lambda t: t["title"], tags)) @staticmethod - def tag_item_by_user(item, user, tag_titles, default_visibility=0): + def tag_item( + item: Item, + owner: APIdentity, + tag_titles: list[str], + default_visibility: int = 0, + ): titles = set([Tag.cleanup_title(tag_title) for tag_title in tag_titles]) current_titles = set( - [m.parent.title for m in TagMember.objects.filter(owner=user, item=item)] + [m.parent.title for m in TagMember.objects.filter(owner=owner, item=item)] ) for title in titles - current_titles: - tag = Tag.objects.filter(owner=user, title=title).first() + tag = Tag.objects.filter(owner=owner, title=title).first() if not tag: tag = Tag.objects.create( - owner=user, title=title, visibility=default_visibility + owner=owner, title=title, visibility=default_visibility ) tag.append_item(item, visibility=default_visibility) for title in current_titles - titles: - tag = Tag.objects.filter(owner=user, title=title).first() + tag = Tag.objects.filter(owner=owner, title=title).first() if tag: tag.remove_item(item) @staticmethod - def get_item_tags_by_user(item, user): - current_titles = [ - m.parent.title for m in TagMember.objects.filter(owner=user, item=item) - ] - return current_titles + def get_manager_for_user(owner): + return TagManager(owner) - @staticmethod - def get_manager_for_user(user): - return TagManager(user) - - def __init__(self, user): - self.owner = user + def __init__(self, owner): + self.owner = owner @property def all_tags(self): - return TagManager.all_tags_for_user(self.owner) + return TagManager.all_tags_by_owner(self.owner) @property def public_tags(self): - return TagManager.all_tags_for_user(self.owner, public_only=True) + return TagManager.all_tags_by_owner(self.owner, public_only=True) - def get_item_tags(self, item): + def get_item_tags(self, item: Item): return sorted( [ m["parent__title"] diff --git a/journal/models/utils.py b/journal/models/utils.py index eba94481..0ab8b4b0 100644 --- a/journal/models/utils.py +++ b/journal/models/utils.py @@ -2,7 +2,7 @@ from django.utils.translation import gettext_lazy as _ from loguru import logger from catalog.models import Item -from users.models import User +from users.models import APIdentity from .collection import Collection, CollectionMember, FeaturedCollection from .comment import Comment @@ -10,27 +10,28 @@ from .common import Content from .itemlist import ListMember from .rating import Rating from .review import Review -from .shelf import Shelf, ShelfLogEntry, ShelfManager, ShelfMember -from .tag import Tag, TagManager, TagMember +from .shelf import ShelfLogEntry, ShelfMember +from .tag import Tag, TagMember -def reset_journal_visibility_for_user(user: User, visibility: int): - ShelfMember.objects.filter(owner=user).update(visibility=visibility) - Comment.objects.filter(owner=user).update(visibility=visibility) - Rating.objects.filter(owner=user).update(visibility=visibility) - Review.objects.filter(owner=user).update(visibility=visibility) +def reset_journal_visibility_for_user(owner: APIdentity, visibility: int): + ShelfMember.objects.filter(owner=owner).update(visibility=visibility) + Comment.objects.filter(owner=owner).update(visibility=visibility) + Rating.objects.filter(owner=owner).update(visibility=visibility) + Review.objects.filter(owner=owner).update(visibility=visibility) -def remove_data_by_user(user: User): - ShelfMember.objects.filter(owner=user).delete() - Comment.objects.filter(owner=user).delete() - Rating.objects.filter(owner=user).delete() - Review.objects.filter(owner=user).delete() - TagMember.objects.filter(owner=user).delete() - Tag.objects.filter(owner=user).delete() - CollectionMember.objects.filter(owner=user).delete() - Collection.objects.filter(owner=user).delete() - FeaturedCollection.objects.filter(owner=user).delete() +def remove_data_by_user(owner: APIdentity): + ShelfMember.objects.filter(owner=owner).delete() + ShelfLogEntry.objects.filter(owner=owner).delete() + Comment.objects.filter(owner=owner).delete() + Rating.objects.filter(owner=owner).delete() + Review.objects.filter(owner=owner).delete() + TagMember.objects.filter(owner=owner).delete() + Tag.objects.filter(owner=owner).delete() + CollectionMember.objects.filter(owner=owner).delete() + Collection.objects.filter(owner=owner).delete() + FeaturedCollection.objects.filter(owner=owner).delete() def update_journal_for_merged_item( diff --git a/journal/templates/_list_item.html b/journal/templates/_list_item.html index feb98f83..344b171d 100644 --- a/journal/templates/_list_item.html +++ b/journal/templates/_list_item.html @@ -55,7 +55,7 @@ + {% if mark.shared_link %} href="{{ mark.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {{ mark.created_time|date }} @@ -88,7 +88,7 @@ + {% if mark.review.shared_link %} href="{{ mark.review.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {{ mark.review.created_time|date }} diff --git a/journal/templates/profile.html b/journal/templates/profile.html index 660ffca0..4f08bd79 100644 --- a/journal/templates/profile.html +++ b/journal/templates/profile.html @@ -15,14 +15,14 @@ {% else %} {{ site_name }} - {{ user.display_name }} {% endif %} - + {% if user.preference.no_anonymous_view %}{% endif %} {% include "common_libs.html" with jquery=0 v2=1 %} diff --git a/journal/templates/review.html b/journal/templates/review.html index 3c892ee4..bfdc36a9 100644 --- a/journal/templates/review.html +++ b/journal/templates/review.html @@ -41,7 +41,7 @@ + {% if review.shared_link %} href="{{ review.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {% if request.user == review.owner %}{% endif %} diff --git a/journal/templates/user_collection_list.html b/journal/templates/user_collection_list.html index e8071350..ed8ed9e9 100644 --- a/journal/templates/user_collection_list.html +++ b/journal/templates/user_collection_list.html @@ -37,7 +37,7 @@ + {% if collection.shared_link %} href="{{ collection.shared_link }}" title="打开联邦宇宙分享链接" {% else %} class="disabled" {% endif %}> {{ collection.created_time|date }} diff --git a/journal/templatetags/collection.py b/journal/templatetags/collection.py index b6a577b6..b8f4b0b3 100644 --- a/journal/templatetags/collection.py +++ b/journal/templatetags/collection.py @@ -1,32 +1,34 @@ from django import template from django.template.defaultfilters import stringfilter -from journal.models import Collection, Like +from journal.models import Collection +from journal.models.mixins import UserOwnedObjectMixin +from users.models.user import User register = template.Library() @register.simple_tag(takes_context=True) -def user_visibility_of(context, piece): +def user_visibility_of(context, piece: UserOwnedObjectMixin): user = context["request"].user return piece.is_visible_to(user) @register.simple_tag() -def user_progress_of(collection, user): +def user_progress_of(collection: Collection, user: User): return ( - collection.get_progress_for_user(user) if user and user.is_authenticated else 0 + collection.get_progress(user.identity) if user and user.is_authenticated else 0 ) @register.simple_tag() -def user_stats_of(collection, user): - return collection.get_stats_for_user(user) if user and user.is_authenticated else {} +def user_stats_of(collection: Collection, user: User): + return collection.get_stats(user.identity) if user and user.is_authenticated else {} @register.filter(is_safe=True) @stringfilter -def prural_items(category): +def prural_items(category: str): # TODO support i18n here # return _(f"items of {category}") if category == "book": diff --git a/journal/templatetags/user_actions.py b/journal/templatetags/user_actions.py index 6f9eecb7..d1a68b5d 100644 --- a/journal/templatetags/user_actions.py +++ b/journal/templatetags/user_actions.py @@ -2,6 +2,7 @@ from django import template from django.urls import reverse from journal.models import Collection, Like +from takahe.utils import Takahe register = template.Library() @@ -22,10 +23,9 @@ def wish_item_action(context, item): def like_piece_action(context, piece): user = context["request"].user action = {} - if user and user.is_authenticated: + if user and user.is_authenticated and piece and piece.post_id: action = { - "taken": piece.owner == user - or Like.objects.filter(target=piece, owner=user).first() is not None, + "taken": Takahe.post_liked_by(piece.post_id, user), "url": reverse("journal:like", args=[piece.uuid]), } return action @@ -34,4 +34,9 @@ def like_piece_action(context, piece): @register.simple_tag(takes_context=True) def liked_piece(context, piece): user = context["request"].user - return user and user.is_authenticated and Like.user_liked_piece(user, piece) + return ( + user + and user.is_authenticated + and piece.post_id + and Takahe.get_user_interaction(piece.post_id, user, "like") + ) diff --git a/journal/tests.py b/journal/tests.py index d422bfbb..0e87ae1e 100644 --- a/journal/tests.py +++ b/journal/tests.py @@ -9,15 +9,16 @@ from .models import * class CollectionTest(TestCase): + databases = "__all__" + def setUp(self): self.book1 = Edition.objects.create(title="Hyperion") self.book2 = Edition.objects.create(title="Andymion") - self.user = User.register(email="a@b.com") - pass + self.user = User.register(email="a@b.com", username="user") def test_collection(self): - collection = Collection.objects.create(title="test", owner=self.user) - collection = Collection.objects.filter(title="test", owner=self.user).first() + Collection.objects.create(title="test", owner=self.user.identity) + collection = Collection.objects.get(title="test", owner=self.user.identity) self.assertEqual(collection.catalog_item.title, "test") member1 = collection.append_item(self.book1) member1.note = "my notes" @@ -38,13 +39,15 @@ class CollectionTest(TestCase): class ShelfTest(TestCase): + databases = "__all__" + def setUp(self): pass def test_shelf(self): - user = User.register(mastodon_site="site", mastodon_username="name") - shelf_manager = ShelfManager(user=user) - self.assertEqual(user.shelf_set.all().count(), 3) + user = User.register(email="a@b.com", username="user") + shelf_manager = user.identity.shelf_manager + self.assertEqual(len(shelf_manager.shelf_list.items()), 3) book1 = Edition.objects.create(title="Hyperion") book2 = Edition.objects.create(title="Andymion") q1 = shelf_manager.get_shelf(ShelfType.WISHLIST) @@ -64,90 +67,86 @@ class ShelfTest(TestCase): self.assertEqual(q2.members.all().count(), 1) log = shelf_manager.get_log_for_item(book1) self.assertEqual(log.count(), 2) - self.assertEqual(log.last().metadata, {}) + last_log = log.last() + self.assertEqual(last_log.metadata if last_log else 42, {}) shelf_manager.move_item(book1, ShelfType.PROGRESS, metadata={"progress": 1}) time.sleep(0.001) self.assertEqual(q1.members.all().count(), 1) self.assertEqual(q2.members.all().count(), 1) log = shelf_manager.get_log_for_item(book1) self.assertEqual(log.count(), 3) - self.assertEqual(log.last().metadata, {"progress": 1}) + last_log = log.last() + self.assertEqual(last_log.metadata if last_log else 42, {"progress": 1}) shelf_manager.move_item(book1, ShelfType.PROGRESS, metadata={"progress": 1}) time.sleep(0.001) log = shelf_manager.get_log_for_item(book1) self.assertEqual(log.count(), 3) - self.assertEqual(log.last().metadata, {"progress": 1}) + last_log = log.last() + self.assertEqual(last_log.metadata if last_log else 42, {"progress": 1}) shelf_manager.move_item(book1, ShelfType.PROGRESS, metadata={"progress": 10}) time.sleep(0.001) log = shelf_manager.get_log_for_item(book1) self.assertEqual(log.count(), 4) - self.assertEqual(log.last().metadata, {"progress": 10}) + + last_log = log.last() + self.assertEqual(last_log.metadata if last_log else 42, {"progress": 10}) shelf_manager.move_item(book1, ShelfType.PROGRESS) time.sleep(0.001) log = shelf_manager.get_log_for_item(book1) self.assertEqual(log.count(), 4) - self.assertEqual(log.last().metadata, {"progress": 10}) + last_log = log.last() + self.assertEqual(last_log.metadata if last_log else 42, {"progress": 10}) shelf_manager.move_item(book1, ShelfType.PROGRESS, metadata={"progress": 90}) time.sleep(0.001) log = shelf_manager.get_log_for_item(book1) self.assertEqual(log.count(), 5) - self.assertEqual(Mark(user, book1).visibility, 0) + self.assertEqual(Mark(user.identity, book1).visibility, 0) shelf_manager.move_item( book1, ShelfType.PROGRESS, metadata={"progress": 90}, visibility=1 ) time.sleep(0.001) - self.assertEqual(Mark(user, book1).visibility, 1) + self.assertEqual(Mark(user.identity, book1).visibility, 1) self.assertEqual(shelf_manager.get_log_for_item(book1).count(), 5) - # test silence mark mode -> no log - shelf_manager.move_item(book1, ShelfType.WISHLIST, silence=True) - self.assertEqual(log.count(), 5) - shelf_manager.move_item(book1, ShelfType.PROGRESS, silence=True) - self.assertEqual(log.count(), 5) - # test delete one log - first_log = log.first() - Mark(user, book1).delete_log(first_log.id) - self.assertEqual(log.count(), 4) - # # test delete mark -> leave one log: 移除标记 - # Mark(user, book1).delete() - # self.assertEqual(log.count(), 1) - # # test delete all logs - # shelf_manager.move_item(book1, ShelfType.PROGRESS) - # self.assertEqual(log.count(), 2) - # Mark(user, book1).delete(silence=True) - # self.assertEqual(log.count(), 0) + # test delete mark -> one more log + Mark(user.identity, book1).delete() + self.assertEqual(log.count(), 6) class TagTest(TestCase): + databases = "__all__" + def setUp(self): self.book1 = Edition.objects.create(title="Hyperion") self.book2 = Edition.objects.create(title="Andymion") - self.movie1 = Edition.objects.create(title="Hyperion, The Movie") - self.user1 = User.register(mastodon_site="site", mastodon_username="name") - self.user2 = User.register(mastodon_site="site2", mastodon_username="name2") - self.user3 = User.register(mastodon_site="site2", mastodon_username="name3") + self.movie1 = Edition.objects.create(title="Fight Club") + self.user1 = User.register(email="a@b.com", username="user") + self.user2 = User.register(email="x@b.com", username="user2") + self.user3 = User.register(email="y@b.com", username="user3") pass def test_user_tag(self): t1 = "tag 1" t2 = "tag 2" t3 = "tag 3" - TagManager.tag_item_by_user(self.book1, self.user2, [t1, t3]) + TagManager.tag_item(self.book1, self.user2.identity, [t1, t3]) self.assertEqual(self.book1.tags, [t1, t3]) - TagManager.tag_item_by_user(self.book1, self.user2, [t2, t3]) + TagManager.tag_item(self.book1, self.user2.identity, [t2, t3]) self.assertEqual(self.book1.tags, [t2, t3]) class MarkTest(TestCase): + databases = "__all__" + def setUp(self): self.book1 = Edition.objects.create(title="Hyperion") - self.user1 = User.register(mastodon_site="site", mastodon_username="name") + self.user1 = User.register(email="a@b.com", username="user") pref = self.user1.preference pref.default_visibility = 2 pref.save() def test_mark(self): - mark = Mark(self.user1, self.book1) + mark = Mark(self.user1.identity, self.book1) self.assertEqual(mark.shelf_type, None) self.assertEqual(mark.shelf_label, None) self.assertEqual(mark.comment_text, None) @@ -157,7 +156,7 @@ class MarkTest(TestCase): self.assertEqual(mark.tags, []) mark.update(ShelfType.WISHLIST, "a gentle comment", 9, 1) - mark = Mark(self.user1, self.book1) + mark = Mark(self.user1.identity, self.book1) self.assertEqual(mark.shelf_type, ShelfType.WISHLIST) self.assertEqual(mark.shelf_label, "想读的书") self.assertEqual(mark.comment_text, "a gentle comment") @@ -166,10 +165,17 @@ class MarkTest(TestCase): self.assertEqual(mark.review, None) self.assertEqual(mark.tags, []) - review = Review.review_item_by_user(self.book1, self.user1, "Critic", "Review") - mark = Mark(self.user1, self.book1) + def test_review(self): + review = Review.update_item_review( + self.book1, self.user1.identity, "Critic", "Review" + ) + mark = Mark(self.user1.identity, self.book1) self.assertEqual(mark.review, review) + Review.update_item_review(self.book1, self.user1.identity, None, None) + mark = Mark(self.user1.identity, self.book1) + self.assertIsNone(mark.review) - TagManager.tag_item_by_user(self.book1, self.user1, [" Sci-Fi ", " fic "]) - mark = Mark(self.user1, self.book1) + def test_tag(self): + TagManager.tag_item(self.book1, self.user1.identity, [" Sci-Fi ", " fic "]) + mark = Mark(self.user1.identity, self.book1) self.assertEqual(mark.tags, ["Sci-Fi", "fic"]) diff --git a/journal/views/collection.py b/journal/views/collection.py index 6519498e..b89d834f 100644 --- a/journal/views/collection.py +++ b/journal/views/collection.py @@ -1,28 +1,28 @@ from django.contrib.auth.decorators import login_required from django.core.exceptions import BadRequest, ObjectDoesNotExist, PermissionDenied -from django.http import Http404, HttpResponse, HttpResponseRedirect +from django.http import Http404, HttpRequest, HttpResponse, HttpResponseRedirect from django.shortcuts import get_object_or_404, redirect, render from django.urls import reverse from django.utils import timezone from django.utils.translation import gettext_lazy as _ -from catalog.models import * -from common.utils import PageLinksGenerator, get_uuid_or_404 -from journal.models.renderers import convert_leading_space_in_md +from catalog.models import Item +from common.utils import AuthedHttpRequest, get_uuid_or_404 from mastodon.api import share_collection from users.models import User +from users.models.apidentity import APIdentity from users.views import render_user_blocked, render_user_not_found from ..forms import * from ..models import * -from .common import render_relogin +from .common import render_relogin, target_identity_required @login_required -def add_to_collection(request, item_uuid): +def add_to_collection(request: AuthedHttpRequest, item_uuid): item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) if request.method == "GET": - collections = Collection.objects.filter(owner=request.user) + collections = Collection.objects.filter(owner=request.user.identity) return render( request, "add_to_collection.html", @@ -35,14 +35,14 @@ def add_to_collection(request, item_uuid): cid = int(request.POST.get("collection_id", default=0)) if not cid: cid = Collection.objects.create( - owner=request.user, title=f"{request.user.display_name}的收藏单" + owner=request.user.identity, title=f"{request.user.display_name}的收藏单" ).id - collection = Collection.objects.get(owner=request.user, id=cid) + collection = Collection.objects.get(owner=request.user.identity, id=cid) collection.append_item(item, note=request.POST.get("note")) - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) -def collection_retrieve(request, collection_uuid): +def collection_retrieve(request: AuthedHttpRequest, collection_uuid): collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if not collection.is_visible_to(request.user): raise PermissionDenied() @@ -53,19 +53,19 @@ def collection_retrieve(request, collection_uuid): else False ) featured_since = ( - collection.featured_by_user_since(request.user) + collection.featured_since(request.user.identity) if request.user.is_authenticated else None ) available_as_featured = ( request.user.is_authenticated - and (following or request.user == collection.owner) + and (following or request.user.identity == collection.owner) and not featured_since and collection.members.all().exists() ) stats = {} if featured_since: - stats = collection.get_stats_for_user(request.user) + stats = collection.get_stats(request.user.identity) stats["wishlist_deg"] = ( round(stats["wishlist"] / stats["total"] * 360) if stats["total"] else 0 ) @@ -90,33 +90,35 @@ def collection_retrieve(request, collection_uuid): @login_required -def collection_add_featured(request, collection_uuid): +def collection_add_featured(request: AuthedHttpRequest, collection_uuid): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if not collection.is_visible_to(request.user): raise PermissionDenied() - FeaturedCollection.objects.update_or_create(owner=request.user, target=collection) - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + FeaturedCollection.objects.update_or_create( + owner=request.user.identity, target=collection + ) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) @login_required -def collection_remove_featured(request, collection_uuid): +def collection_remove_featured(request: AuthedHttpRequest, collection_uuid): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if not collection.is_visible_to(request.user): raise PermissionDenied() fc = FeaturedCollection.objects.filter( - owner=request.user, target=collection + owner=request.user.identity, target=collection ).first() if fc: fc.delete() - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) @login_required -def collection_share(request, collection_uuid): +def collection_share(request: AuthedHttpRequest, collection_uuid): collection = ( get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if collection_uuid @@ -130,14 +132,16 @@ def collection_share(request, collection_uuid): visibility = int(request.POST.get("visibility", default=0)) comment = request.POST.get("comment") if share_collection(collection, comment, request.user, visibility): - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) else: return render_relogin(request) else: raise BadRequest() -def collection_retrieve_items(request, collection_uuid, edit=False, msg=None): +def collection_retrieve_items( + request: AuthedHttpRequest, collection_uuid, edit=False, msg=None +): collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if not collection.is_visible_to(request.user): raise PermissionDenied() @@ -155,7 +159,7 @@ def collection_retrieve_items(request, collection_uuid, edit=False, msg=None): @login_required -def collection_append_item(request, collection_uuid): +def collection_append_item(request: AuthedHttpRequest, collection_uuid): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) @@ -175,7 +179,7 @@ def collection_append_item(request, collection_uuid): @login_required -def collection_remove_item(request, collection_uuid, item_uuid): +def collection_remove_item(request: AuthedHttpRequest, collection_uuid, item_uuid): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) @@ -187,7 +191,9 @@ def collection_remove_item(request, collection_uuid, item_uuid): @login_required -def collection_move_item(request, direction, collection_uuid, item_uuid): +def collection_move_item( + request: AuthedHttpRequest, direction, collection_uuid, item_uuid +): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) @@ -202,7 +208,7 @@ def collection_move_item(request, direction, collection_uuid, item_uuid): @login_required -def collection_update_member_order(request, collection_uuid): +def collection_update_member_order(request: AuthedHttpRequest, collection_uuid): if request.method != "POST": raise BadRequest() collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) @@ -217,7 +223,7 @@ def collection_update_member_order(request, collection_uuid): @login_required -def collection_update_item_note(request, collection_uuid, item_uuid): +def collection_update_item_note(request: AuthedHttpRequest, collection_uuid, item_uuid): collection = get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if not collection.is_editable_by(request.user): raise PermissionDenied() @@ -241,7 +247,7 @@ def collection_update_item_note(request, collection_uuid, item_uuid): @login_required -def collection_edit(request, collection_uuid=None): +def collection_edit(request: AuthedHttpRequest, collection_uuid=None): collection = ( get_object_or_404(Collection, uid=get_uuid_or_404(collection_uuid)) if collection_uuid @@ -259,7 +265,7 @@ def collection_edit(request, collection_uuid=None): { "form": form, "collection": collection, - "user": collection.owner if collection else request.user, + "user": collection.owner.user if collection else request.user, }, ) elif request.method == "POST": @@ -270,7 +276,7 @@ def collection_edit(request, collection_uuid=None): ) if form.is_valid(): if not collection: - form.instance.owner = request.user + form.instance.owner = request.user.identity form.instance.edited_time = timezone.now() form.save() return redirect( @@ -283,47 +289,34 @@ def collection_edit(request, collection_uuid=None): @login_required -def user_collection_list(request, user_name): - user = User.get(user_name) - if user is None: - return render_user_not_found(request) - if user != request.user and ( - request.user.is_blocked_by(user) or request.user.is_blocking(user) - ): - return render_user_blocked(request) - collections = Collection.objects.filter(owner=user) - if user != request.user: - if request.user.is_following(user): - collections = collections.filter(visibility__in=[0, 1]) - else: - collections = collections.filter(visibility=0) +@target_identity_required +def user_collection_list(request: AuthedHttpRequest, user_name): + target = request.target_identity + collections = Collection.objects.filter(owner=target).filter( + q_owned_piece_visible_to_user(request.user, target) + ) return render( request, "user_collection_list.html", { - "user": user, + "user": target.user, "collections": collections, }, ) @login_required -def user_liked_collection_list(request, user_name): - user = User.get(user_name) - if user is None: - return render_user_not_found(request) - if user != request.user and ( - request.user.is_blocked_by(user) or request.user.is_blocking(user) - ): - return render_user_blocked(request) - collections = Collection.objects.filter(likes__owner=user) - if user != request.user: - collections = collections.filter(query_visible(request.user)) +@target_identity_required +def user_liked_collection_list(request: AuthedHttpRequest, user_name): + target = request.target_identity + collections = Collection.objects.filter(likes__owner=target) + if target.user != request.user: + collections = collections.filter(q_piece_visible_to_user(request.user)) return render( request, "user_collection_list.html", { - "user": user, + "user": target.user, "collections": collections, "liked": True, }, diff --git a/journal/views/common.py b/journal/views/common.py index cb36aa36..38e4178d 100644 --- a/journal/views/common.py +++ b/journal/views/common.py @@ -1,3 +1,5 @@ +import functools + from django.contrib.auth.decorators import login_required from django.core.exceptions import BadRequest, ObjectDoesNotExist, PermissionDenied from django.core.paginator import Paginator @@ -6,8 +8,8 @@ from django.urls import reverse from django.utils.translation import gettext_lazy as _ from catalog.models import * -from common.utils import PageLinksGenerator, get_uuid_or_404 -from users.models import User +from common.utils import AuthedHttpRequest, PageLinksGenerator, get_uuid_or_404 +from users.models import APIdentity from users.views import render_user_blocked, render_user_not_found from ..forms import * @@ -16,6 +18,25 @@ from ..models import * PAGE_SIZE = 10 +def target_identity_required(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + request = kwargs["request"] + handler = kwargs["user_name"] + try: + target = APIdentity.get_by_handler(handler) + except: + return render_user_not_found(request) + if not target.is_visible_to_user(request.user): + return render_user_blocked(request) + request.target_identity = target + # request.identity = ( + # request.user.identity if request.user.is_authenticated else None + # ) + + return wrapper + + def render_relogin(request): return render( request, @@ -41,42 +62,45 @@ def render_list_not_found(request): ) +@login_required +@target_identity_required def render_list( - request, user_name, type, shelf_type=None, item_category=None, tag_title=None + request: AuthedHttpRequest, + user_name, + type, + shelf_type=None, + item_category=None, + tag_title=None, ): - user = User.get(user_name) - if user is None: - return render_user_not_found(request) - if user != request.user and ( - request.user.is_blocked_by(user) or request.user.is_blocking(user) - ): - return render_user_blocked(request) + target = request.target_identity + viewer = request.user.identity tag = None if type == "mark": - queryset = user.shelf_manager.get_latest_members(shelf_type, item_category) + queryset = target.user.shelf_manager.get_latest_members( + shelf_type, item_category + ) elif type == "tagmember": - tag = Tag.objects.filter(owner=user, title=tag_title).first() + tag = Tag.objects.filter(owner=target, title=tag_title).first() if not tag: return render_list_not_found(request) - if tag.visibility != 0 and user != request.user: + if tag.visibility != 0 and target != viewer: return render_list_not_found(request) queryset = TagMember.objects.filter(parent=tag) - elif type == "review": - queryset = Review.objects.filter(owner=user) - queryset = queryset.filter(query_item_category(item_category)) + elif type == "review" and item_category: + queryset = Review.objects.filter(q_item_in_category(item_category)) else: raise BadRequest() - queryset = queryset.filter(q_visible_to(request.user, user)).order_by( - "-created_time" - ) + queryset = queryset.filter( + q_owned_piece_visible_to_user(request.user, target) + ).order_by("-created_time") paginator = Paginator(queryset, PAGE_SIZE) - page_number = request.GET.get("page", default=1) + page_number = int(request.GET.get("page", default=1)) members = paginator.get_page(page_number) pagination = PageLinksGenerator(PAGE_SIZE, page_number, paginator.num_pages) return render( request, f"user_{type}_list.html", - {"user": user, "members": members, "tag": tag, "pagination": pagination}, + {"user": target.user, "members": members, "tag": tag, "pagination": pagination}, ) diff --git a/journal/views/mark.py b/journal/views/mark.py index b121e89d..1e24269c 100644 --- a/journal/views/mark.py +++ b/journal/views/mark.py @@ -12,17 +12,18 @@ from django.utils.dateparse import parse_datetime from django.utils.translation import gettext_lazy as _ from catalog.models import * -from common.utils import PageLinksGenerator, get_uuid_or_404 +from common.utils import AuthedHttpRequest, PageLinksGenerator, get_uuid_or_404 from mastodon.api import ( get_spoiler_text, get_status_id_by_url, get_visibility, post_toot, ) +from takahe.utils import Takahe from ..forms import * from ..models import * -from .common import render_list, render_relogin +from .common import render_list, render_relogin, target_identity_required _logger = logging.getLogger(__name__) PAGE_SIZE = 10 @@ -31,28 +32,29 @@ _checkmark = "✔️".encode("utf-8") @login_required -def wish(request, item_uuid): +def wish(request: AuthedHttpRequest, item_uuid): if request.method != "POST": raise BadRequest() item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) if not item: raise Http404() - request.user.shelf_manager.move_item(item, ShelfType.WISHLIST) + request.user.identity.shelf_manager.move_item(item, ShelfType.WISHLIST) if request.GET.get("back"): - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) return HttpResponse(_checkmark) @login_required -def like(request, piece_uuid): +def like(request: AuthedHttpRequest, piece_uuid): if request.method != "POST": raise BadRequest() piece = get_object_or_404(Piece, uid=get_uuid_or_404(piece_uuid)) if not piece: raise Http404() - Like.user_like_piece(request.user, piece) + if piece.post_id: + Takahe.like_post(piece.post_id, request.user.identity.pk) if request.GET.get("back"): - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) elif request.GET.get("stats"): return render( request, @@ -68,15 +70,16 @@ def like(request, piece_uuid): @login_required -def unlike(request, piece_uuid): +def unlike(request: AuthedHttpRequest, piece_uuid): if request.method != "POST": raise BadRequest() piece = get_object_or_404(Piece, uid=get_uuid_or_404(piece_uuid)) if not piece: raise Http404() - Like.user_unlike_piece(request.user, piece) + if piece.post_id: + Takahe.unlike_post(piece.post_id, request.user.identity.pk) if request.GET.get("back"): - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) elif request.GET.get("stats"): return render( request, @@ -92,11 +95,11 @@ def unlike(request, piece_uuid): @login_required -def mark(request, item_uuid): +def mark(request: AuthedHttpRequest, item_uuid): item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) - mark = Mark(request.user, item) + mark = Mark(request.user.identity, item) if request.method == "GET": - tags = TagManager.get_item_tags_by_user(item, request.user) + tags = request.user.identity.tag_manager.get_item_tags(item) shelf_types = [ (n[1], n[2]) for n in iter(ShelfTypeNames) if n[0] == item.category ] @@ -115,15 +118,8 @@ def mark(request, item_uuid): ) elif request.method == "POST": if request.POST.get("delete", default=False): - silence = request.POST.get("silence", False) - mark.delete(silence=silence) - if ( - silence - ): # this means the mark is deleted from mark_history, thus redirect to item page - return redirect( - reverse("catalog:retrieve", args=[item.url_path, item.uuid]) - ) - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + mark.delete() + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) else: visibility = int(request.POST.get("visibility", default=0)) rating_grade = request.POST.get("rating_grade", default=0) @@ -143,7 +139,7 @@ def mark(request, item_uuid): ) if mark_date and mark_date >= timezone.now(): mark_date = None - TagManager.tag_item_by_user(item, request.user, tags, visibility) + TagManager.tag_item(item, request.user.identity, tags, visibility) try: mark.update( status, @@ -167,7 +163,7 @@ def mark(request, item_uuid): "secondary_msg": err, }, ) - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) raise BadRequest() @@ -202,12 +198,12 @@ def share_comment(user, item, text, visibility, shared_link=None, position=None) @login_required -def mark_log(request, item_uuid, log_id): +def mark_log(request: AuthedHttpRequest, item_uuid, log_id): """ Delete log of one item by log id. """ item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) - mark = Mark(request.user, item) + mark = Mark(request.user.identity, item) if request.method == "POST": if request.GET.get("delete", default=False): if log_id: @@ -219,7 +215,7 @@ def mark_log(request, item_uuid, log_id): @login_required -def comment(request, item_uuid): +def comment(request: AuthedHttpRequest, item_uuid): item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) if not item.class_name in ["podcastepisode", "tvepisode"]: raise BadRequest("不支持评论此类型的条目") @@ -246,7 +242,7 @@ def comment(request, item_uuid): if not comment: raise Http404() comment.delete() - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) visibility = int(request.POST.get("visibility", default=0)) text = request.POST.get("text") position = None @@ -302,12 +298,11 @@ def comment(request, item_uuid): # ) if post_error: return render_relogin(request) - return HttpResponseRedirect(request.META.get("HTTP_REFERER")) + return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/")) raise BadRequest() -@login_required -def user_mark_list(request, user_name, shelf_type, item_category): +def user_mark_list(request: AuthedHttpRequest, user_name, shelf_type, item_category): return render_list( request, user_name, "mark", shelf_type=shelf_type, item_category=item_category ) diff --git a/journal/views/profile.py b/journal/views/profile.py index 04876050..7f6f6dfc 100644 --- a/journal/views/profile.py +++ b/journal/views/profile.py @@ -6,30 +6,32 @@ from django.utils.translation import gettext_lazy as _ from user_messages import api as msg from catalog.models import * -from users.models import User +from common.utils import AuthedHttpRequest +from users.models import APIdentity, User from users.views import render_user_blocked, render_user_not_found from ..forms import * from ..models import * -from .common import render_list +from .common import render_list, target_identity_required -def profile(request, user_name): +@target_identity_required +def profile(request: AuthedHttpRequest, user_name): if request.method != "GET": raise BadRequest() - user = User.get(user_name, case_sensitive=True) - if user is None or not user.is_active: - return render_user_not_found(request) - if user.mastodon_acct != user_name and user.username != user_name: - return redirect(user.url) - if not request.user.is_authenticated and user.preference.no_anonymous_view: - return render(request, "users/home_anonymous.html", {"user": user}) - if user != request.user and ( - user.is_blocked_by(request.user) or user.is_blocking(request.user) + target = request.target_identity + # if user.mastodon_acct != user_name and user.username != user_name: + # return redirect(user.url) + if not request.user.is_authenticated and target.preference.no_anonymous_view: + return render(request, "users/home_anonymous.html", {"user": target.user}) + me = target.user == request.user + if not me and ( + target.is_blocked_by(request.user.identity) + or target.is_blocking(request.user.identity) ): return render_user_blocked(request) - qv = q_visible_to(request.user, user) + qv = q_owned_piece_visible_to_user(request.user, target) shelf_list = {} visbile_categories = [ ItemCategory.Book, @@ -43,9 +45,9 @@ def profile(request, user_name): for category in visbile_categories: shelf_list[category] = {} for shelf_type in ShelfType: - label = user.shelf_manager.get_label(shelf_type, category) + label = target.shelf_manager.get_label(shelf_type, category) if label is not None: - members = user.shelf_manager.get_latest_members( + members = target.shelf_manager.get_latest_members( shelf_type, category ).filter(qv) shelf_list[category][shelf_type] = { @@ -53,35 +55,32 @@ def profile(request, user_name): "count": members.count(), "members": members[:10].prefetch_related("item"), } - reviews = ( - Review.objects.filter(owner=user) - .filter(qv) - .filter(query_item_category(category)) - .order_by("-created_time") + reviews = Review.objects.filter(q_item_in_category(category)).order_by( + "-created_time" ) shelf_list[category]["reviewed"] = { "title": "评论过的" + category.label, "count": reviews.count(), "members": reviews[:10].prefetch_related("item"), } - collections = ( - Collection.objects.filter(owner=user).filter(qv).order_by("-created_time") - ) + collections = Collection.objects.filter(qv).order_by("-created_time") liked_collections = ( - Like.user_likes_by_class(user, Collection) + Like.user_likes_by_class(target, Collection) .order_by("-edited_time") .values_list("target_id", flat=True) ) - if user != request.user: - liked_collections = liked_collections.filter(query_visible(request.user)) - top_tags = user.tag_manager.public_tags[:10] + if not me: + liked_collections = liked_collections.filter( + q_piece_visible_to_user(request.user) + ) + top_tags = target.tag_manager.public_tags[:10] else: - top_tags = user.tag_manager.all_tags[:10] + top_tags = target.tag_manager.all_tags[:10] return render( request, "profile.html", { - "user": user, + "user": target.user, "top_tags": top_tags, "shelf_list": shelf_list, "collections": collections[:10], @@ -91,7 +90,7 @@ def profile(request, user_name): for i in liked_collections.order_by("-edited_time")[:10] ], "liked_collections_count": liked_collections.count(), - "layout": user.preference.profile_layout, + "layout": target.preference.profile_layout, }, ) @@ -102,7 +101,7 @@ def user_calendar_data(request, user_name): user = User.get(user_name) if user is None or not request.user.is_authenticated: return HttpResponse("") - max_visiblity = max_visiblity_to(request.user, user) + max_visiblity = max_visiblity_to_user(request.user, user.identity) calendar_data = user.shelf_manager.get_calendar_data(max_visiblity) return render( request, diff --git a/journal/views/review.py b/journal/views/review.py index 52904779..adce1029 100644 --- a/journal/views/review.py +++ b/journal/views/review.py @@ -12,9 +12,11 @@ from django.utils.dateparse import parse_datetime from django.utils.translation import gettext_lazy as _ from catalog.models import * -from common.utils import PageLinksGenerator, get_uuid_or_404 +from common.utils import AuthedHttpRequest, PageLinksGenerator, get_uuid_or_404 from journal.models.renderers import convert_leading_space_in_md, render_md +from mastodon.api import share_review from users.models import User +from users.models.apidentity import APIdentity from ..forms import * from ..models import * @@ -32,7 +34,7 @@ def review_retrieve(request, review_uuid): @login_required -def review_edit(request, item_uuid, review_uuid=None): +def review_edit(request: AuthedHttpRequest, item_uuid, review_uuid=None): item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) review = ( get_object_or_404(Review, uid=get_uuid_or_404(review_uuid)) @@ -65,24 +67,28 @@ def review_edit(request, item_uuid, review_uuid=None): if form.is_valid(): mark_date = None if request.POST.get("mark_anotherday"): - dt = parse_datetime(request.POST.get("mark_date") + " 20:00:00") + dt = parse_datetime(request.POST.get("mark_date", "") + " 20:00:00") mark_date = ( dt.replace(tzinfo=timezone.get_current_timezone()) if dt else None ) body = form.instance.body if request.POST.get("leading_space"): body = convert_leading_space_in_md(body) - review = Review.review_item_by_user( + review = Review.update_item_review( item, - request.user, + request.user.identity, form.cleaned_data["title"], body, form.cleaned_data["visibility"], mark_date, - form.cleaned_data["share_to_mastodon"], ) if not review: raise BadRequest() + if ( + form.cleaned_data["share_to_mastodon"] + and request.user.mastodon_username + ): + share_review(review) return redirect(reverse("journal:review_retrieve", args=[review.uuid])) else: raise BadRequest() @@ -90,7 +96,6 @@ def review_edit(request, item_uuid, review_uuid=None): raise BadRequest() -@login_required def user_review_list(request, user_name, item_category): return render_list(request, user_name, "review", item_category=item_category) @@ -100,16 +105,16 @@ MAX_ITEM_PER_TYPE = 10 class ReviewFeed(Feed): def get_object(self, request, id): - return User.get(id) + return APIdentity.get_by_handler(id) - def title(self, user): - return "%s的评论" % user.display_name if user else "无效链接" + def title(self, owner): + return "%s的评论" % owner.display_name if owner else "无效链接" - def link(self, user): - return user.url if user else settings.SITE_INFO["site_url"] + def link(self, owner): + return owner.url if owner else settings.SITE_INFO["site_url"] - def description(self, user): - return "%s的评论合集 - NeoDB" % user.display_name if user else "无效链接" + def description(self, owner): + return "%s的评论合集 - NeoDB" % owner.display_name if owner else "无效链接" def items(self, user): if user is None or user.preference.no_anonymous_view: diff --git a/journal/views/tag.py b/journal/views/tag.py index b2847349..c9f1239f 100644 --- a/journal/views/tag.py +++ b/journal/views/tag.py @@ -13,29 +13,24 @@ from users.views import render_user_blocked, render_user_not_found from ..forms import * from ..models import * -from .common import render_list +from .common import render_list, target_identity_required PAGE_SIZE = 10 @login_required +@target_identity_required def user_tag_list(request, user_name): - user = User.get(user_name) - if user is None: - return render_user_not_found(request) - if user != request.user and ( - request.user.is_blocked_by(user) or request.user.is_blocking(user) - ): - return render_user_blocked(request) - tags = Tag.objects.filter(owner=user) - if user != request.user: + target = request.target + tags = Tag.objects.filter(owner=target) + if target.user != request.user: tags = tags.filter(visibility=0) tags = tags.values("title").annotate(total=Count("members")).order_by("-total") return render( request, "user_tag_list.html", { - "user": user, + "user": target.user, "tags": tags, }, ) @@ -47,7 +42,7 @@ def user_tag_edit(request): tag_title = Tag.cleanup_title(request.GET.get("tag", ""), replace=False) if not tag_title: raise Http404() - tag = Tag.objects.filter(owner=request.user, title=tag_title).first() + tag = Tag.objects.filter(owner=request.user.identity, title=tag_title).first() if not tag: raise Http404() return render(request, "tag_edit.html", {"tag": tag}) @@ -55,7 +50,7 @@ def user_tag_edit(request): tag_title = Tag.cleanup_title(request.POST.get("title", ""), replace=False) tag_id = request.POST.get("id") tag = ( - Tag.objects.filter(owner=request.user, id=tag_id).first() + Tag.objects.filter(owner=request.user.identity, id=tag_id).first() if tag_id else None ) @@ -70,7 +65,9 @@ def user_tag_edit(request): ) elif ( tag_title != tag.title - and Tag.objects.filter(owner=request.user, title=tag_title).exists() + and Tag.objects.filter( + owner=request.user.identity, title=tag_title + ).exists() ): msg.error(request.user, _("标签已存在")) return HttpResponseRedirect(request.META.get("HTTP_REFERER")) @@ -88,6 +85,5 @@ def user_tag_edit(request): raise BadRequest() -@login_required def user_tag_member_list(request, user_name, tag_title): return render_list(request, user_name, "tagmember", tag_title=tag_title) diff --git a/mastodon/api.py b/mastodon/api.py index 0a43e048..0f5f1d69 100644 --- a/mastodon/api.py +++ b/mastodon/api.py @@ -1,5 +1,5 @@ import functools -import logging +import html import random import re import string @@ -193,7 +193,7 @@ def detect_server_info(login_domain): try: response = get(url, headers={"User-Agent": USER_AGENT}) except Exception as e: - logger.error(f"Error connecting {login_domain} {e}") + logger.error(f"Error connecting {login_domain}: {e}") raise Exception(f"无法连接 {login_domain}") if response.status_code != 200: logger.error(f"Error connecting {login_domain}: {response.status_code}") @@ -363,7 +363,7 @@ def get_visibility(visibility, user): def share_mark(mark): from catalog.common import ItemCategory - user = mark.owner + user = mark.owner.user if mark.visibility == 2: visibility = TootVisibilityEnum.DIRECT elif mark.visibility == 1: @@ -466,10 +466,10 @@ def share_collection(collection, comment, user, visibility_no): ) user_str = ( "我" - if user == collection.owner + if user == collection.owner.user else ( - " @" + collection.owner.mastodon_acct + " " - if collection.owner.mastodon_acct + " @" + collection.owner.user.mastodon_acct + " " + if collection.owner.user.mastodon_acct else " " + collection.owner.username + " " ) ) diff --git a/pyproject.toml b/pyproject.toml index b242d65f..e48493aa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [tool.pyright] -exclude = [ "media", ".venv", ".git", "playground", "**/tests.py", "neodb", "**/migrations", "**/commands", "**/sites/douban_*" ] +exclude = [ "media", ".venv", ".git", "playground", "catalog/*/tests.py", "neodb", "**/migrations", "**/sites/douban_*" ] [tool.djlint] ignore="T002,T003,H006,H019,H020,H021,H023,H030,H031" diff --git a/requirements-dev.txt b/requirements-dev.txt index b146c739..513e98fe 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,5 +4,6 @@ django-debug-toolbar django-stubs djlint~=1.32.1 isort~=5.12.0 +lxml-stubs pre-commit pyright==1.1.322 diff --git a/requirements.txt b/requirements.txt index 98d75b85..87127e33 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,8 @@ +cachetools dateparser discord.py django~=4.2.4 django-anymail -django-auditlog django-auditlog @ git+https://github.com/jazzband/django-auditlog.git@45591463e8192b4ac0095e259cc4dcea0ac2fd6c django-bleach django-compressor @@ -25,6 +25,7 @@ easy-thumbnails filetype fontawesomefree gunicorn +httpx igdb-api-v4 libsass listparser @@ -41,3 +42,4 @@ rq>=1.12.0 setproctitle tqdm typesense +urlman diff --git a/social/migrations/0007_alter_localactivity_owner.py b/social/migrations/0007_alter_localactivity_owner.py new file mode 100644 index 00000000..f7e3176b --- /dev/null +++ b/social/migrations/0007_alter_localactivity_owner.py @@ -0,0 +1,22 @@ +# Generated by Django 4.2.4 on 2023-08-09 13:26 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("users", "0012_apidentity"), + ("social", "0006_alter_localactivity_template"), + ] + + operations = [ + migrations.AlterField( + model_name="localactivity", + name="owner", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to="users.apidentity" + ), + ), + ] diff --git a/social/models.py b/social/models.py index f0e4190e..c89197e8 100644 --- a/social/models.py +++ b/social/models.py @@ -27,7 +27,7 @@ from journal.models import ( ShelfMember, UserOwnedObjectMixin, ) -from users.models import User +from users.models import APIdentity _logger = logging.getLogger(__name__) @@ -42,10 +42,8 @@ class ActivityTemplate(models.TextChoices): class LocalActivity(models.Model, UserOwnedObjectMixin): - owner = models.ForeignKey(User, on_delete=models.CASCADE) - visibility = models.PositiveSmallIntegerField( - default=0 - ) # 0: Public / 1: Follower only / 2: Self only + owner = models.ForeignKey(APIdentity, on_delete=models.CASCADE) # type: ignore + visibility = models.PositiveSmallIntegerField(default=0) # type: ignore template = models.CharField( blank=False, choices=ActivityTemplate.choices, max_length=50 ) @@ -62,11 +60,11 @@ class LocalActivity(models.Model, UserOwnedObjectMixin): class ActivityManager: - def __init__(self, user): - self.owner = user + def __init__(self, owner: APIdentity): + self.owner = owner def get_timeline(self, before_time=None): - following = [x for x in self.owner.following if x not in self.owner.ignoring] + following = [x for x in self.owner.following if x not in self.owner.muting] q = Q(owner_id__in=following, visibility__lt=2) | Q(owner=self.owner) if before_time: q = q & Q(created_time__lt=before_time) @@ -205,5 +203,5 @@ class CommentChildItemProcessor(DefaultActivityProcessor): super().updated() -def reset_social_visibility_for_user(user: User, visibility: int): - LocalActivity.objects.filter(owner=user).update(visibility=visibility) +def reset_social_visibility_for_user(owner: APIdentity, visibility: int): + LocalActivity.objects.filter(owner=owner).update(visibility=visibility) diff --git a/social/templates/activity/comment_child_item.html b/social/templates/activity/comment_child_item.html index a1d92c68..c048b3d2 100644 --- a/social/templates/activity/comment_child_item.html +++ b/social/templates/activity/comment_child_item.html @@ -53,7 +53,7 @@ {% endif %} - +
diff --git a/social/templates/activity/mark_item.html b/social/templates/activity/mark_item.html index 083ffd2b..8a52f79a 100644 --- a/social/templates/activity/mark_item.html +++ b/social/templates/activity/mark_item.html @@ -40,7 +40,7 @@ {% endif %} - +
diff --git a/social/templates/activity/review_item.html b/social/templates/activity/review_item.html index 277d0a83..3092cd71 100644 --- a/social/templates/activity/review_item.html +++ b/social/templates/activity/review_item.html @@ -33,7 +33,7 @@ {% endif %} - +
diff --git a/social/tests.py b/social/tests.py index 3d6093f2..b881977e 100644 --- a/social/tests.py +++ b/social/tests.py @@ -2,65 +2,86 @@ from django.test import TestCase from catalog.models import * from journal.models import * +from takahe.utils import Takahe from users.models import User from .models import * class SocialTest(TestCase): + databases = "__all__" + def setUp(self): self.book1 = Edition.objects.create(title="Hyperion") self.book2 = Edition.objects.create(title="Andymion") self.movie = Edition.objects.create(title="Fight Club") - self.alice = User.register(mastodon_site="MySpace", mastodon_username="Alice") - self.bob = User.register(mastodon_site="KKCity", mastodon_username="Bob") + self.alice = User.register( + username="Alice", mastodon_site="MySpace", mastodon_username="Alice" + ) + self.bob = User.register( + username="Bob", mastodon_site="KKCity", mastodon_username="Bob" + ) def test_timeline(self): + alice_feed = self.alice.identity.activity_manager + bob_feed = self.bob.identity.activity_manager + # alice see 0 activity in timeline in the beginning - timeline = self.alice.activity_manager.get_timeline() - self.assertEqual(len(timeline), 0) + self.assertEqual(len(alice_feed.get_timeline()), 0) # 1 activity after adding first book to shelf - self.alice.shelf_manager.move_item(self.book1, ShelfType.WISHLIST, visibility=1) - timeline = self.alice.activity_manager.get_timeline() - self.assertEqual(len(timeline), 1) + self.alice.identity.shelf_manager.move_item( + self.book1, ShelfType.WISHLIST, visibility=1 + ) + self.assertEqual(len(alice_feed.get_timeline()), 1) # 2 activities after adding second book to shelf - self.alice.shelf_manager.move_item(self.book2, ShelfType.WISHLIST) - timeline = self.alice.activity_manager.get_timeline() - self.assertEqual(len(timeline), 2) + self.alice.identity.shelf_manager.move_item(self.book2, ShelfType.WISHLIST) + self.assertEqual(len(alice_feed.get_timeline()), 2) # 2 activities after change first mark - self.alice.shelf_manager.move_item(self.book1, ShelfType.PROGRESS) - timeline = self.alice.activity_manager.get_timeline() - self.assertEqual(len(timeline), 2) + self.alice.identity.shelf_manager.move_item(self.book1, ShelfType.PROGRESS) + self.assertEqual(len(alice_feed.get_timeline()), 2) # bob see 0 activity in timeline in the beginning - timeline2 = self.bob.activity_manager.get_timeline() - self.assertEqual(len(timeline2), 0) + self.assertEqual(len(bob_feed.get_timeline()), 0) # bob follows alice, see 2 activities - self.bob.mastodon_following = ["Alice@MySpace"] - self.alice.mastodon_follower = ["Bob@KKCity"] - self.bob.merge_relationships() - timeline2 = self.bob.activity_manager.get_timeline() - self.assertEqual(len(timeline2), 2) + self.bob.identity.follow(self.alice.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 2) + + # bob mute, then unmute alice + self.bob.identity.mute(self.alice.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 0) + self.bob.identity.unmute(self.alice.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 2) # alice:3 bob:2 after alice adding second book to shelf as private - self.alice.shelf_manager.move_item(self.movie, ShelfType.WISHLIST, visibility=2) - timeline = self.alice.activity_manager.get_timeline() - self.assertEqual(len(timeline), 3) - timeline2 = self.bob.activity_manager.get_timeline() - self.assertEqual(len(timeline2), 2) + self.alice.identity.shelf_manager.move_item( + self.movie, ShelfType.WISHLIST, visibility=2 + ) + self.assertEqual(len(alice_feed.get_timeline()), 3) + self.assertEqual(len(bob_feed.get_timeline()), 2) - # remote unfollow - self.bob.mastodon_following = [] - self.alice.mastodon_follower = [] - self.bob.merge_relationships() - timeline = self.bob.activity_manager.get_timeline() - self.assertEqual(len(timeline), 0) + # alice mute bob + self.alice.identity.mute(self.bob.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 2) - # local follow - self.bob.follow(self.alice) - timeline = self.bob.activity_manager.get_timeline() - self.assertEqual(len(timeline), 2) + # bob unfollow alice + self.bob.identity.unfollow(self.alice.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 0) + + # bob follow alice + self.bob.identity.follow(self.alice.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 2) + + # alice block bob + self.alice.identity.block(self.bob.identity) + Takahe._force_state_cycle() + self.assertEqual(len(bob_feed.get_timeline()), 0) diff --git a/social/views.py b/social/views.py index 9a68b775..757cb2f4 100644 --- a/social/views.py +++ b/social/views.py @@ -1,7 +1,6 @@ import logging -from django.conf import settings -from django.contrib.auth.decorators import login_required, permission_required +from django.contrib.auth.decorators import login_required from django.core.exceptions import BadRequest from django.shortcuts import render from django.utils.translation import gettext_lazy as _ @@ -65,7 +64,7 @@ def data(request): request, "feed_data.html", { - "activities": ActivityManager(request.user).get_timeline( + "activities": ActivityManager(request.user.identity).get_timeline( before_time=request.GET.get("last") )[:PAGE_SIZE], }, diff --git a/takahe/__init__.py b/takahe/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/takahe/admin.py b/takahe/admin.py new file mode 100644 index 00000000..8c38f3f3 --- /dev/null +++ b/takahe/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/takahe/ap_handlers.py b/takahe/ap_handlers.py new file mode 100644 index 00000000..b4d0ee6a --- /dev/null +++ b/takahe/ap_handlers.py @@ -0,0 +1,123 @@ +from datetime import datetime + +from loguru import logger + +from catalog.common import * +from journal.models import Comment, Piece, Rating, Review, ShelfMember +from users.models import User as NeoUser + +from .models import Follow, Identity, Post +from .utils import Takahe + +_supported_ap_catalog_item_types = [ + "Edition", + "Movie", + "TVShow", + "TVSeason", + "TVEpisode", + "Album", + "Game", + "Podcast", + "Performance", + "PerformanceProduction", +] + +_supported_ap_journal_types = { + "Status": ShelfMember, + "Rating": Rating, + "Comment": Comment, + "Review": Review, +} + + +def _parse_links(objects): + logger.debug(f"Parsing links from {objects}") + items = [] + pieces = [] + for obj in objects: + if obj["type"] in _supported_ap_catalog_item_types: + items.append(obj["url"]) + elif obj["type"] in _supported_ap_journal_types.keys(): + pieces.append(obj) + else: + logger.warning(f'Unknown link type {obj["type"]}') + return items, pieces + + +def _get_or_create_item_by_ap_url(url): + logger.debug(f"Fetching item by ap from {url}") + site = SiteManager.get_site_by_url(url) + if not site: + return None + site.get_resource_ready() + item = site.get_item() + return item + + +def _get_visibility(post_visibility): + match post_visibility: + case 2: + return 1 + case 3: + return 2 + case _: + return 0 + + +def _update_or_create_post(pk, obj): + post = Post.objects.get(pk=pk) + owner = Takahe.get_or_create_apidentity(post.author) + if not post.type_data: + logger.warning(f"Post {post} has no type_data") + return + items, pieces = _parse_links(post.type_data["object"]["relatedWith"]) + logger.info(f"Post {post} has items {items} and pieces {pieces}") + if len(items) == 0: + logger.warning(f"Post {post} has no remote items") + return + elif len(items) > 1: + logger.warning(f"Post {post} has more than one remote item") + return + remote_url = items[0] + item = _get_or_create_item_by_ap_url(remote_url) + if not item: + logger.warning(f"Post {post} has no local item") + return + for p in pieces: + cls = _supported_ap_journal_types[p["type"]] + cls.update_by_ap_object(owner, item, p, pk, _get_visibility(post.visibility)) + + +def post_created(pk, obj): + _update_or_create_post(pk, obj) + + +def post_updated(pk, obj): + _update_or_create_post(pk, obj) + + +def post_deleted(pk, obj): + Piece.objects.filter(post_id=pk, local=False).delete() + + +def user_follow_updated(source_identity_pk, target_identity_pk): + u = Takahe.get_local_user_by_identity(source_identity_pk) + # Takahe.update_user_following(u) + logger.info(f"User {u} following updated") + + +def user_mute_updated(source_identity_pk, target_identity_pk): + u = Takahe.get_local_user_by_identity(source_identity_pk) + # Takahe.update_user_muting(u) + logger.info(f"User {u} muting updated") + + +def user_block_updated(source_identity_pk, target_identity_pk): + u = Takahe.get_local_user_by_identity(source_identity_pk) + if u: + # Takahe.update_user_rejecting(u) + logger.info(f"User {u} rejecting updated") + u = Takahe.get_local_user_by_identity(target_identity_pk) + if u: + # Takahe.update_user_rejecting(u) + logger.info(f"User {u} rejecting updated") diff --git a/takahe/apps.py b/takahe/apps.py new file mode 100644 index 00000000..7d39fe99 --- /dev/null +++ b/takahe/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class TakaheConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "takahe" diff --git a/takahe/db_routes.py b/takahe/db_routes.py new file mode 100644 index 00000000..09015105 --- /dev/null +++ b/takahe/db_routes.py @@ -0,0 +1,27 @@ +from django.conf import settings + +_is_testing = "testserver" in settings.ALLOWED_HOSTS + + +class TakaheRouter: + def db_for_read(self, model, **hints): + if model._meta.app_label == "takahe": + return "takahe" + return None + + def db_for_write(self, model, **hints): + if model._meta.app_label == "takahe": + return "takahe" + return None + + def allow_relation(self, obj1, obj2, **hints): + # skip this check but please make sure + # not create relations between takahe models and other apps + if obj1._meta.app_label == "takahe" or obj2._meta.app_label == "takahe": + return obj1._meta.app_label == obj2._meta.app_label + return None + + def allow_migrate(self, db, app_label, model_name=None, **hints): + if app_label == "takahe" or db == "takahe": + return _is_testing and app_label == db + return None diff --git a/takahe/html.py b/takahe/html.py new file mode 100644 index 00000000..c598be2f --- /dev/null +++ b/takahe/html.py @@ -0,0 +1,379 @@ +import html +import re +from html.parser import HTMLParser + +from django.utils.safestring import mark_safe + + +class FediverseHtmlParser(HTMLParser): + """ + A custom HTML parser that only allows a certain tag subset and behaviour: + - br, p tags are passed through + - a tags are passed through if they're not hashtags or mentions + - Another set of tags are converted to p + + It also linkifies URLs, mentions, hashtags, and imagifies emoji. + """ + + REWRITE_TO_P = [ + "p", + "h1", + "h2", + "h3", + "h4", + "h5", + "h6", + "blockquote", + "pre", + "ul", + "ol", + ] + + REWRITE_TO_BR = [ + "br", + "li", + ] + + MENTION_REGEX = re.compile( + r"(^|[^\w\d\-_/])@([\w\d\-_]+(?:@[\w\d\-_\.]+[\w\d\-_]+)?)" + ) + + HASHTAG_REGEX = re.compile(r"\B#([a-zA-Z0-9(_)]+\b)(?!;)") + + EMOJI_REGEX = re.compile(r"\B:([a-zA-Z0-9(_)-]+):\B") + + URL_REGEX = re.compile( + r"""(\(* # Match any opening parentheses. + \b(?"]*)?) + # /path/zz (excluding "unsafe" chars from RFC 1738, + # except for # and ~, which happen in practice) + """, + re.IGNORECASE | re.VERBOSE | re.UNICODE, + ) + + def __init__( + self, + html: str, + uri_domain: str | None = None, + mentions: list | None = None, + find_mentions: bool = False, + find_hashtags: bool = False, + find_emojis: bool = False, + emoji_domain=None, + ): + super().__init__() + self.uri_domain = uri_domain + self.emoji_domain = emoji_domain + self.find_mentions = find_mentions + self.find_hashtags = find_hashtags + self.find_emojis = find_emojis + self.calculate_mentions(mentions) + self._data_buffer = "" + self.html_output = "" + self.text_output = "" + self.emojis: set[str] = set() + self.mentions: set[str] = set() + self.hashtags: set[str] = set() + self._pending_a: dict | None = None + self._fresh_p = False + self.feed(html.replace("\n", "")) + self.flush_data() + + def calculate_mentions(self, mentions: list | None): + """ + Prepares a set of content that we expect to see mentions look like + (this imp) + """ + self.mention_matches: dict[str, str] = {} + self.mention_aliases: dict[str, str] = {} + for mention in mentions or []: + if self.uri_domain: + url = mention.absolute_profile_uri() + else: + url = str(mention.urls.view) + if mention.username: + username = mention.username.lower() + domain = mention.domain_id.lower() + self.mention_matches[f"{username}"] = url + self.mention_matches[f"{username}@{domain}"] = url + self.mention_matches[mention.absolute_profile_uri()] = url + + def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: + if tag in self.REWRITE_TO_P: + self.flush_data() + self.html_output += "

" + elif tag in self.REWRITE_TO_BR: + self.flush_data() + if not self._fresh_p: + self.html_output += "
" + self.text_output += "\n" + elif tag == "a": + self.flush_data() + self._pending_a = {"attrs": dict(attrs), "content": ""} + self._fresh_p = tag in self.REWRITE_TO_P + + def handle_endtag(self, tag: str) -> None: + self._fresh_p = False + if tag in self.REWRITE_TO_P: + self.flush_data() + self.html_output += "

" + self.text_output += "\n\n" + elif tag == "a": + if self._pending_a: + href = self._pending_a["attrs"].get("href") + content = self._pending_a["content"].strip() + has_ellipsis = "ellipsis" in self._pending_a["attrs"].get("class", "") + # Is it a mention? + if content.lower().lstrip("@") in self.mention_matches: + self.html_output += self.create_mention(content, href) + self.text_output += content + # Is it a hashtag? + elif self.HASHTAG_REGEX.match(content): + self.html_output += self.create_hashtag(content) + self.text_output += content + elif content: + # Shorten the link if we need to + self.html_output += self.create_link( + href, + content, + has_ellipsis=has_ellipsis, + ) + self.text_output += href + self._pending_a = None + + def handle_data(self, data: str) -> None: + self._fresh_p = False + if self._pending_a: + self._pending_a["content"] += data + else: + self._data_buffer += data + + def flush_data(self) -> None: + """ + We collect data segments until we encounter a tag we care about, + so we can treat #hashtag as #hashtag + """ + self.text_output += self._data_buffer + self.html_output += self.linkify(self._data_buffer) + self._data_buffer = "" + + def create_link(self, href, content, has_ellipsis=False): + """ + Generates a link, doing optional shortening. + + All return values from this function should be HTML-safe. + """ + looks_like_link = bool(self.URL_REGEX.match(content)) + if looks_like_link: + protocol, content = content.split("://", 1) + else: + protocol = "" + if (looks_like_link and len(content) > 30) or has_ellipsis: + return f'{html.escape(content[:30])}' + elif looks_like_link: + return f'{html.escape(content)}' + else: + return f'{html.escape(content)}' + + def create_mention(self, handle, href: str | None = None) -> str: + """ + Generates a mention link. Handle should have a leading @. + + All return values from this function should be HTML-safe + """ + handle = handle.lstrip("@") + if "@" in handle: + short_handle = handle.split("@", 1)[0] + else: + short_handle = handle + handle_hash = handle.lower() + short_hash = short_handle.lower() + self.mentions.add(handle_hash) + url = self.mention_matches.get(handle_hash) + # If we have a captured link out, use that as the actual resolver + if href and href in self.mention_matches: + url = self.mention_matches[href] + if url: + if short_hash not in self.mention_aliases: + self.mention_aliases[short_hash] = handle_hash + elif self.mention_aliases.get(short_hash) != handle_hash: + short_handle = handle + return f'@{html.escape(short_handle)}' + else: + return "@" + html.escape(handle) + + def create_hashtag(self, hashtag) -> str: + """ + Generates a hashtag link. Hashtag does not need to start with # + + All return values from this function should be HTML-safe + """ + hashtag = hashtag.lstrip("#") + self.hashtags.add(hashtag.lower()) + if self.uri_domain: + return f'' + else: + return f'' + + def create_emoji(self, shortcode) -> str: + """ + Generates an emoji tag + + All return values from this function should be HTML-safe + """ + from .models import Emoji + + emoji = Emoji.get_by_domain(shortcode, self.emoji_domain) + if emoji and emoji.is_usable: + self.emojis.add(shortcode) + return emoji.as_html() + return f":{shortcode}:" + + def linkify(self, data): + """ + Linkifies some content that is plaintext. + + Handles URLs first, then mentions. Note that this takes great care to + keep track of what is HTML and what needs to be escaped. + """ + # Split the string by the URL regex so we know what to escape and what + # not to escape. + bits = self.URL_REGEX.split(data) + result = "" + # Even indices are data we should pass though, odd indices are links + for i, bit in enumerate(bits): + # A link! + if i % 2 == 1: + result += self.create_link(bit, bit) + # Not a link + elif self.mention_matches or self.find_mentions: + result += self.linkify_mentions(bit) + elif self.find_hashtags: + result += self.linkify_hashtags(bit) + elif self.find_emojis: + result += self.linkify_emoji(bit) + else: + result += html.escape(bit) + return result + + def linkify_mentions(self, data): + """ + Linkifies mentions + """ + bits = self.MENTION_REGEX.split(data) + result = "" + for i, bit in enumerate(bits): + # Mention content + if i % 3 == 2: + result += self.create_mention(bit) + # Not part of a mention (0) or mention preamble (1) + elif self.find_hashtags: + result += self.linkify_hashtags(bit) + elif self.find_emojis: + result += self.linkify_emoji(bit) + else: + result += html.escape(bit) + return result + + def linkify_hashtags(self, data): + """ + Linkifies hashtags + """ + bits = self.HASHTAG_REGEX.split(data) + result = "" + for i, bit in enumerate(bits): + # Not part of a hashtag + if i % 2 == 0: + if self.find_emojis: + result += self.linkify_emoji(bit) + else: + result += html.escape(bit) + # Hashtag content + else: + result += self.create_hashtag(bit) + return result + + def linkify_emoji(self, data): + """ + Linkifies emoji + """ + bits = self.EMOJI_REGEX.split(data) + result = "" + for i, bit in enumerate(bits): + # Not part of an emoji + if i % 2 == 0: + result += html.escape(bit) + # Emoji content + else: + result += self.create_emoji(bit) + return result + + @property + def html(self): + return self.html_output.strip() + + @property + def plain_text(self): + return self.text_output.strip() + + +class ContentRenderer: + """ + Renders HTML for posts, identity fields, and more. + + The `local` parameter affects whether links are absolute (False) or relative (True) + """ + + def __init__(self, local: bool): + self.local = local + + def render_post(self, html: str, post) -> str: + """ + Given post HTML, normalises it and renders it for presentation. + """ + if not html: + return "" + parser = FediverseHtmlParser( + html, + mentions=post.mentions.all(), + uri_domain=(None if self.local else post.author.domain.uri_domain), + find_hashtags=True, + find_emojis=self.local, + emoji_domain=post.author.domain, + ) + return mark_safe(parser.html) + + def render_identity_summary(self, html: str, identity) -> str: + """ + Given identity summary HTML, normalises it and renders it for presentation. + """ + if not html: + return "" + parser = FediverseHtmlParser( + html, + uri_domain=(None if self.local else identity.domain.uri_domain), + find_hashtags=True, + find_emojis=self.local, + emoji_domain=identity.domain, + ) + return mark_safe(parser.html) + + def render_identity_data(self, html: str, identity, strip: bool = False) -> str: + """ + Given name/basic value HTML, normalises it and renders it for presentation. + """ + if not html: + return "" + parser = FediverseHtmlParser( + html, + uri_domain=(None if self.local else identity.domain.uri_domain), + find_hashtags=False, + find_emojis=self.local, + emoji_domain=identity.domain, + ) + if strip: + return mark_safe(parser.html) + else: + return mark_safe(parser.html) diff --git a/takahe/management/commands/takahe.py b/takahe/management/commands/takahe.py new file mode 100644 index 00000000..fc555a28 --- /dev/null +++ b/takahe/management/commands/takahe.py @@ -0,0 +1,42 @@ +from django.conf import settings +from django.core.management.base import BaseCommand +from django.db.models import Count, F +from loguru import logger +from tqdm import tqdm + +from catalog.common import * +from catalog.common.models import * +from catalog.models import * +from journal.models import Tag, update_journal_for_merged_item +from takahe.utils import * +from users.models import User as NeoUser + + +class Command(BaseCommand): + def add_arguments(self, parser): + parser.add_argument( + "--verbose", + action="store_true", + ) + parser.add_argument( + "--sync", + action="store_true", + ) + + def sync(self): + logger.info(f"Syncing domain...") + Takahe.get_domain() + logger.info(f"Syncing users...") + for u in tqdm(NeoUser.objects.filter(is_active=True, username__isnull=False)): + Takahe.init_identity_for_local_user(u) + # Takahe.update_user_following(u) + # Takahe.update_user_muting(u) + # Takahe.update_user_rejecting(u) + + def handle(self, *args, **options): + self.verbose = options["verbose"] + + if options["sync"]: + self.sync() + + self.stdout.write(self.style.SUCCESS(f"Done.")) diff --git a/takahe/migrations/0001_initial.py b/takahe/migrations/0001_initial.py new file mode 100644 index 00000000..66193237 --- /dev/null +++ b/takahe/migrations/0001_initial.py @@ -0,0 +1,489 @@ +# Generated by Django 4.2.4 on 2023-08-12 16:48 + +import django.db.models.deletion +import django.utils.timezone +from django.db import migrations, models + +import takahe.models + + +class Migration(migrations.Migration): + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="Domain", + fields=[ + ( + "domain", + models.CharField(max_length=250, primary_key=True, serialize=False), + ), + ( + "service_domain", + models.CharField( + blank=True, + db_index=True, + max_length=250, + null=True, + unique=True, + ), + ), + ("state", models.CharField(default="outdated", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("nodeinfo", models.JSONField(blank=True, null=True)), + ("local", models.BooleanField()), + ("blocked", models.BooleanField(default=False)), + ("public", models.BooleanField(default=False)), + ("default", models.BooleanField(default=False)), + ("notes", models.TextField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ], + options={ + "db_table": "users_domain", + }, + ), + migrations.CreateModel( + name="Emoji", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("shortcode", models.SlugField(max_length=100)), + ("local", models.BooleanField(default=True)), + ("public", models.BooleanField(null=True)), + ( + "object_uri", + models.CharField( + blank=True, max_length=500, null=True, unique=True + ), + ), + ("mimetype", models.CharField(max_length=200)), + ("file", models.ImageField(blank=True, null=True, upload_to="")), + ("remote_url", models.CharField(blank=True, max_length=500, null=True)), + ("category", models.CharField(blank=True, max_length=100, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ( + "domain", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="takahe.domain", + ), + ), + ], + options={ + "db_table": "activities_emoji", + }, + ), + migrations.CreateModel( + name="Hashtag", + fields=[ + ( + "hashtag", + models.SlugField(max_length=100, primary_key=True, serialize=False), + ), + ( + "name_override", + models.CharField(blank=True, max_length=100, null=True), + ), + ("public", models.BooleanField(null=True)), + ("state", models.CharField(default="outdated", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("stats", models.JSONField(blank=True, null=True)), + ("stats_updated", models.DateTimeField(blank=True, null=True)), + ("aliases", models.JSONField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ], + options={ + "db_table": "activities_hashtag", + }, + ), + migrations.CreateModel( + name="Identity", + fields=[ + ( + "id", + models.BigIntegerField( + default=takahe.models.Snowflake.generate_identity, + primary_key=True, + serialize=False, + ), + ), + ("actor_uri", models.CharField(max_length=500, unique=True)), + ("state", models.CharField(default="outdated", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("local", models.BooleanField(db_index=True)), + ("username", models.CharField(blank=True, max_length=500, null=True)), + ("name", models.CharField(blank=True, max_length=500, null=True)), + ("summary", models.TextField(blank=True, null=True)), + ( + "manually_approves_followers", + models.BooleanField(blank=True, null=True), + ), + ("discoverable", models.BooleanField(default=True)), + ( + "profile_uri", + models.CharField(blank=True, max_length=500, null=True), + ), + ("inbox_uri", models.CharField(blank=True, max_length=500, null=True)), + ( + "shared_inbox_uri", + models.CharField(blank=True, max_length=500, null=True), + ), + ("outbox_uri", models.CharField(blank=True, max_length=500, null=True)), + ("icon_uri", models.CharField(blank=True, max_length=500, null=True)), + ("image_uri", models.CharField(blank=True, max_length=500, null=True)), + ( + "followers_uri", + models.CharField(blank=True, max_length=500, null=True), + ), + ( + "following_uri", + models.CharField(blank=True, max_length=500, null=True), + ), + ( + "featured_collection_uri", + models.CharField(blank=True, max_length=500, null=True), + ), + ("actor_type", models.CharField(default="person", max_length=100)), + ("metadata", models.JSONField(blank=True, null=True)), + ("pinned", models.JSONField(blank=True, null=True)), + ("sensitive", models.BooleanField(default=False)), + ( + "restriction", + models.IntegerField( + choices=[(0, "None"), (1, "Limited"), (2, "Blocked")], + db_index=True, + default=0, + ), + ), + ("admin_notes", models.TextField(blank=True, null=True)), + ("private_key", models.TextField(blank=True, null=True)), + ("public_key", models.TextField(blank=True, null=True)), + ("public_key_id", models.TextField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ("fetched", models.DateTimeField(blank=True, null=True)), + ("deleted", models.DateTimeField(blank=True, null=True)), + ( + "domain", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.PROTECT, + related_name="identities", + to="takahe.domain", + ), + ), + ], + options={ + "verbose_name_plural": "identities", + "db_table": "users_identity", + }, + ), + migrations.CreateModel( + name="Post", + fields=[ + ( + "id", + models.BigIntegerField( + default=takahe.models.Snowflake.generate_post, + primary_key=True, + serialize=False, + ), + ), + ("state", models.CharField(default="new", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("local", models.BooleanField()), + ( + "object_uri", + models.CharField( + blank=True, max_length=2048, null=True, unique=True + ), + ), + ( + "visibility", + models.IntegerField( + choices=[ + (0, "Public"), + (4, "Local Only"), + (1, "Unlisted"), + (2, "Followers"), + (3, "Mentioned"), + ], + default=0, + ), + ), + ("content", models.TextField()), + ( + "type", + models.CharField( + choices=[ + ("Article", "Article"), + ("Audio", "Audio"), + ("Event", "Event"), + ("Image", "Image"), + ("Note", "Note"), + ("Page", "Page"), + ("Question", "Question"), + ("Video", "Video"), + ], + default="Note", + max_length=20, + ), + ), + ("type_data", models.JSONField(blank=True, null=True)), + ("sensitive", models.BooleanField(default=False)), + ("summary", models.TextField(blank=True, null=True)), + ("url", models.CharField(blank=True, max_length=2048, null=True)), + ( + "in_reply_to", + models.CharField( + blank=True, db_index=True, max_length=500, null=True + ), + ), + ("hashtags", models.JSONField(blank=True, null=True)), + ("stats", models.JSONField(blank=True, null=True)), + ("published", models.DateTimeField(default=django.utils.timezone.now)), + ("edited", models.DateTimeField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ( + "author", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="posts", + to="takahe.identity", + ), + ), + ( + "emojis", + models.ManyToManyField( + blank=True, related_name="posts_using_emoji", to="takahe.emoji" + ), + ), + ( + "mentions", + models.ManyToManyField( + blank=True, + related_name="posts_mentioning", + to="takahe.identity", + ), + ), + ( + "to", + models.ManyToManyField( + blank=True, related_name="posts_to", to="takahe.identity" + ), + ), + ], + options={ + "db_table": "activities_post", + }, + ), + migrations.CreateModel( + name="User", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("password", models.CharField(max_length=128, verbose_name="password")), + ( + "last_login", + models.DateTimeField( + blank=True, null=True, verbose_name="last login" + ), + ), + ("email", models.EmailField(max_length=254, unique=True)), + ("admin", models.BooleanField(default=False)), + ("moderator", models.BooleanField(default=False)), + ("banned", models.BooleanField(default=False)), + ("deleted", models.BooleanField(default=False)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ("last_seen", models.DateTimeField(auto_now_add=True)), + ], + options={ + "db_table": "users_user", + }, + ), + migrations.CreateModel( + name="PostInteraction", + fields=[ + ( + "id", + models.BigIntegerField( + default=takahe.models.Snowflake.generate_post_interaction, + primary_key=True, + serialize=False, + ), + ), + ("state", models.CharField(default="new", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ( + "object_uri", + models.CharField( + blank=True, max_length=500, null=True, unique=True + ), + ), + ( + "type", + models.CharField( + choices=[ + ("like", "Like"), + ("boost", "Boost"), + ("vote", "Vote"), + ("pin", "Pin"), + ], + max_length=100, + ), + ), + ("value", models.CharField(blank=True, max_length=50, null=True)), + ("published", models.DateTimeField(default=django.utils.timezone.now)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ( + "identity", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="interactions", + to="takahe.identity", + ), + ), + ( + "post", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="interactions", + to="takahe.post", + ), + ), + ], + options={ + "db_table": "activities_postinteraction", + }, + ), + migrations.AddField( + model_name="identity", + name="users", + field=models.ManyToManyField( + blank=True, related_name="identities", to="takahe.user" + ), + ), + migrations.AddField( + model_name="domain", + name="users", + field=models.ManyToManyField( + blank=True, related_name="domains", to="takahe.user" + ), + ), + migrations.CreateModel( + name="Block", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("state", models.CharField(default="new", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("uri", models.CharField(blank=True, max_length=500, null=True)), + ("mute", models.BooleanField()), + ("include_notifications", models.BooleanField(default=False)), + ("expires", models.DateTimeField(blank=True, null=True)), + ("note", models.TextField(blank=True, null=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ( + "source", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="outbound_blocks", + to="takahe.identity", + ), + ), + ( + "target", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="inbound_blocks", + to="takahe.identity", + ), + ), + ], + options={ + "db_table": "users_block", + }, + ), + migrations.AlterUniqueTogether( + name="identity", + unique_together={("username", "domain")}, + ), + migrations.CreateModel( + name="Follow", + fields=[ + ( + "id", + models.BigIntegerField( + default=takahe.models.Snowflake.generate_follow, + primary_key=True, + serialize=False, + ), + ), + ( + "boosts", + models.BooleanField( + default=True, help_text="Also follow boosts from this user" + ), + ), + ("uri", models.CharField(blank=True, max_length=500, null=True)), + ("note", models.TextField(blank=True, null=True)), + ("state", models.CharField(default="unrequested", max_length=100)), + ("state_changed", models.DateTimeField(auto_now_add=True)), + ("created", models.DateTimeField(auto_now_add=True)), + ("updated", models.DateTimeField(auto_now=True)), + ( + "source", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="outbound_follows", + to="takahe.identity", + ), + ), + ( + "target", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="inbound_follows", + to="takahe.identity", + ), + ), + ], + options={ + "db_table": "users_follow", + "unique_together": {("source", "target")}, + }, + ), + ] diff --git a/takahe/migrations/__init__.py b/takahe/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/takahe/models.py b/takahe/models.py new file mode 100644 index 00000000..c9879c12 --- /dev/null +++ b/takahe/models.py @@ -0,0 +1,1395 @@ +import datetime +import re +import secrets +import ssl +import time +from datetime import date +from functools import cached_property, partial +from typing import TYPE_CHECKING, Literal, Optional +from urllib.parse import urlparse + +import httpx +import urlman +from cachetools import TTLCache, cached +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import padding, rsa +from django.conf import settings +from django.contrib.auth.models import AbstractBaseUser, BaseUserManager +from django.db import models, transaction +from django.template.defaultfilters import linebreaks_filter +from django.utils import timezone +from django.utils.safestring import mark_safe +from loguru import logger +from lxml import etree + +from .html import FediverseHtmlParser +from .uris import * + +if TYPE_CHECKING: + from django.db.models.manager import RelatedManager + + +class Snowflake: + """ + Snowflake ID generator and parser. + """ + + # Epoch is 2022/1/1 at midnight, as these are used for _created_ times in our + # own database, not original publish times (which would need an earlier one) + EPOCH = 1641020400 + + TYPE_POST = 0b000 + TYPE_POST_INTERACTION = 0b001 + TYPE_IDENTITY = 0b010 + TYPE_REPORT = 0b011 + TYPE_FOLLOW = 0b100 + + @classmethod + def generate(cls, type_id: int) -> int: + """ + Generates a snowflake-style ID for the given "type". They are designed + to fit inside 63 bits (a signed bigint) + + ID layout is: + * 41 bits of millisecond-level timestamp (enough for EPOCH + 69 years) + * 19 bits of random data (1% chance of clash at 10000 per millisecond) + * 3 bits of type information + + We use random data rather than a sequence ID to try and avoid pushing + this job onto the DB - we may do that in future. If a clash does + occur, the insert will fail and Stator will retry the work for anything + that's coming in remotely, leaving us to just handle that scenario for + our own posts, likes, etc. + """ + # Get the current time in milliseconds + now: int = int((time.time() - cls.EPOCH) * 1000) + # Generate random data + rand_seq: int = secrets.randbits(19) + # Compose them together + return (now << 22) | (rand_seq << 3) | type_id + + @classmethod + def get_type(cls, snowflake: int) -> int: + """ + Returns the type of a given snowflake ID + """ + if snowflake < (1 << 22): + raise ValueError("Not a valid Snowflake ID") + return snowflake & 0b111 + + @classmethod + def get_time(cls, snowflake: int) -> float: + """ + Returns the generation time (in UNIX timestamp seconds) of the ID + """ + if snowflake < (1 << 22): + raise ValueError("Not a valid Snowflake ID") + return ((snowflake >> 22) / 1000) + cls.EPOCH + + # Handy pre-baked methods for django model defaults + @classmethod + def generate_post(cls) -> int: + return cls.generate(cls.TYPE_POST) + + @classmethod + def generate_post_interaction(cls) -> int: + return cls.generate(cls.TYPE_POST_INTERACTION) + + @classmethod + def generate_identity(cls) -> int: + return cls.generate(cls.TYPE_IDENTITY) + + @classmethod + def generate_report(cls) -> int: + return cls.generate(cls.TYPE_REPORT) + + @classmethod + def generate_follow(cls) -> int: + return cls.generate(cls.TYPE_FOLLOW) + + +class RsaKeys: + @classmethod + def generate_keypair(cls) -> tuple[str, str]: + """ + Generates a new RSA keypair + """ + private_key = rsa.generate_private_key( + public_exponent=65537, + key_size=2048, + ) + private_key_serialized = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ).decode("ascii") + public_key_serialized = ( + private_key.public_key() + .public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + .decode("ascii") + ) + return private_key_serialized, public_key_serialized + + +class User(AbstractBaseUser): + identities: "RelatedManager[Identity]" + + class Meta: + # managed = False + db_table = "users_user" + + email = models.EmailField(unique=True) + admin = models.BooleanField(default=False) + moderator = models.BooleanField(default=False) + banned = models.BooleanField(default=False) + deleted = models.BooleanField(default=False) + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + last_seen = models.DateTimeField(auto_now_add=True) + USERNAME_FIELD = "email" + EMAIL_FIELD = "email" + REQUIRED_FIELDS: list[str] = [] + + @property + def is_active(self): + return not (self.deleted or self.banned) + + @property + def is_superuser(self): + return self.admin + + @property + def is_staff(self): + return self.admin + + def has_module_perms(self, module): + return self.admin + + def has_perm(self, perm): + return self.admin + + # @cached_property + # def config_user(self) -> Config.UserOptions: + # return Config.load_user(self) + + +class Domain(models.Model): + """ + Represents a domain that a user can have an account on. + + For protocol reasons, if we want to allow custom usernames + per domain, each "display" domain (the one in the handle) must either let + us serve on it directly, or have a "service" domain that maps + to it uniquely that we can serve on that. + + That way, someone coming in with just an Actor URI as their + entrypoint can still try to webfinger preferredUsername@actorDomain + and we can return an appropriate response. + + It's possible to just have one domain do both jobs, of course. + This model also represents _other_ servers' domains, which we treat as + display domains for now, until we start doing better probing. + """ + + domain = models.CharField(max_length=250, primary_key=True) + service_domain = models.CharField( + max_length=250, + null=True, + blank=True, + db_index=True, + unique=True, + ) + + # state = StateField(DomainStates) + state = models.CharField(max_length=100, default="outdated") + state_changed = models.DateTimeField(auto_now_add=True) + + # nodeinfo 2.0 detail about the remote server + nodeinfo = models.JSONField(null=True, blank=True) + + # If we own this domain + local = models.BooleanField() + + # If we have blocked this domain from interacting with us + blocked = models.BooleanField(default=False) + + # Domains can be joinable by any user of the instance (as the default one + # should) + public = models.BooleanField(default=False) + + # If this is the default domain (shown as the default entry for new users) + default = models.BooleanField(default=False) + + # Domains can also be linked to one or more users for their private use + # This should be display domains ONLY + users = models.ManyToManyField("takahe.User", related_name="domains", blank=True) + + # Free-form notes field for admins + notes = models.TextField(blank=True, null=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + class urls(urlman.Urls): + root = "/admin/domains/" + create = "/admin/domains/create/" + edit = "/admin/domains/{self.domain}/" + delete = "{edit}delete/" + root_federation = "/admin/federation/" + edit_federation = "/admin/federation/{self.domain}/" + + class Meta: + # managed = False + db_table = "users_domain" + indexes: list = [] + + @classmethod + def get_remote_domain(cls, domain: str) -> "Domain": + return cls.objects.get_or_create(domain=domain.lower(), local=False)[0] + + @classmethod + def get_domain(cls, domain: str) -> Optional["Domain"]: + try: + return cls.objects.get( + models.Q(domain=domain.lower()) + | models.Q(service_domain=domain.lower()) + ) + except cls.DoesNotExist: + return None + + @property + def uri_domain(self) -> str: + if self.service_domain: + return self.service_domain + return self.domain + + @classmethod + def available_for_user(cls, user): + """ + Returns domains that are available for the user to put an identity on + """ + return cls.objects.filter( + models.Q(public=True) | models.Q(users__id=user.id), + local=True, + ).order_by("-default", "domain") + + def __str__(self): + return self.domain + + +class Identity(models.Model): + """ + Represents both local and remote Fediverse identities (actors) + """ + + domain_id: int + + class Restriction(models.IntegerChoices): + none = 0 + limited = 1 + blocked = 2 + + ACTOR_TYPES = ["person", "service", "application", "group", "organization"] + + id = models.BigIntegerField(primary_key=True, default=Snowflake.generate_identity) + + # The Actor URI is essentially also a PK - we keep the default numeric + # one around as well for making nice URLs etc. + actor_uri = models.CharField(max_length=500, unique=True) + + # state = StateField(IdentityStates) + state = models.CharField(max_length=100, default="outdated") + state_changed = models.DateTimeField(auto_now_add=True) + + local = models.BooleanField(db_index=True) + users = models.ManyToManyField( + "takahe.User", + related_name="identities", + blank=True, + ) + + username = models.CharField(max_length=500, blank=True, null=True) + # Must be a display domain if present + domain = models.ForeignKey( + Domain, + blank=True, + null=True, + on_delete=models.PROTECT, + related_name="identities", + ) + + name = models.CharField(max_length=500, blank=True, null=True) + summary = models.TextField(blank=True, null=True) + manually_approves_followers = models.BooleanField(blank=True, null=True) + discoverable = models.BooleanField(default=True) + + profile_uri = models.CharField(max_length=500, blank=True, null=True) + inbox_uri = models.CharField(max_length=500, blank=True, null=True) + shared_inbox_uri = models.CharField(max_length=500, blank=True, null=True) + outbox_uri = models.CharField(max_length=500, blank=True, null=True) + icon_uri = models.CharField(max_length=500, blank=True, null=True) + image_uri = models.CharField(max_length=500, blank=True, null=True) + followers_uri = models.CharField(max_length=500, blank=True, null=True) + following_uri = models.CharField(max_length=500, blank=True, null=True) + featured_collection_uri = models.CharField(max_length=500, blank=True, null=True) + actor_type = models.CharField(max_length=100, default="person") + + # icon = models.ImageField( + # upload_to=partial(upload_namer, "profile_images"), blank=True, null=True + # ) + # image = models.ImageField( + # upload_to=partial(upload_namer, "background_images"), blank=True, null=True + # ) + + # Should be a list of {"name":..., "value":...} dicts + metadata = models.JSONField(blank=True, null=True) + + # Should be a list of object URIs (we don't want a full M2M here) + pinned = models.JSONField(blank=True, null=True) + + # Admin-only moderation fields + sensitive = models.BooleanField(default=False) + restriction = models.IntegerField( + choices=Restriction.choices, default=Restriction.none, db_index=True + ) + admin_notes = models.TextField(null=True, blank=True) + + private_key = models.TextField(null=True, blank=True) + public_key = models.TextField(null=True, blank=True) + public_key_id = models.TextField(null=True, blank=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + fetched = models.DateTimeField(null=True, blank=True) + deleted = models.DateTimeField(null=True, blank=True) + + # objects = IdentityManager() + + ### Model attributes ### + + class Meta: + # managed = False + db_table = "users_identity" + verbose_name_plural = "identities" + unique_together = [("username", "domain")] + indexes: list = [] # We need this so Stator can add its own + + class urls(urlman.Urls): + view = "/@{self.username}@{self.domain_id}/" + replies = "{view}replies/" + settings = "{view}settings/" + action = "{view}action/" + followers = "{view}followers/" + following = "{view}following/" + search = "{view}search/" + activate = "{view}activate/" + admin = "/admin/identities/" + admin_edit = "{admin}{self.pk}/" + djadmin_edit = "/djadmin/users/identity/{self.id}/change/" + + def get_scheme(self, url): + return "https" + + def get_hostname(self, url): + return self.instance.domain.uri_domain + + def __str__(self): + if self.username and self.domain: + return self.handle + return self.actor_uri + + def absolute_profile_uri(self): + """ + Returns a profile URI that is always absolute, for sending out to + other servers. + """ + if self.local: + return f"https://{self.domain.uri_domain}/@{self.username}/" + else: + return self.profile_uri + + @property + def handle(self): + if self.username is None: + return "(unknown user)" + if self.domain_id: + return f"{self.username}@{self.domain_id}" + return f"{self.username}@(unknown server)" + + @property + def user_pk(self): + user = self.users.first() + return user.pk if user else None + + @classmethod + def fetch_webfinger_url(cls, domain: str) -> str: + """ + Given a domain (hostname), returns the correct webfinger URL to use + based on probing host-meta. + """ + with httpx.Client( + timeout=settings.SETUP.REMOTE_TIMEOUT, + headers={"User-Agent": settings.TAKAHE_USER_AGENT}, + ) as client: + try: + response = client.get( + f"https://{domain}/.well-known/host-meta", + follow_redirects=True, + headers={"Accept": "application/xml"}, + ) + + # In the case of anything other than a success, we'll still try + # hitting the webfinger URL on the domain we were given to handle + # incorrectly setup servers. + if response.status_code == 200 and response.content.strip(): + tree = etree.fromstring(response.content) + template = tree.xpath( + "string(.//*[local-name() = 'Link' and @rel='lrdd' and (not(@type) or @type='application/jrd+json')]/@template)" + ) + if template: + return template # type: ignore + except (httpx.RequestError, etree.ParseError): + pass + + return f"https://{domain}/.well-known/webfinger?resource={{uri}}" + + @classmethod + def fetch_webfinger(cls, handle: str) -> tuple[str | None, str | None]: + """ + Given a username@domain handle, returns a tuple of + (actor uri, canonical handle) or None, None if it does not resolve. + """ + domain = handle.split("@")[1].lower() + try: + webfinger_url = cls.fetch_webfinger_url(domain) + except ssl.SSLCertVerificationError: + return None, None + + # Go make a Webfinger request + with httpx.Client( + timeout=settings.SETUP.REMOTE_TIMEOUT, + headers={"User-Agent": settings.TAKAHE_USER_AGENT}, + ) as client: + try: + response = client.get( + webfinger_url.format(uri=f"acct:{handle}"), + follow_redirects=True, + headers={"Accept": "application/json"}, + ) + response.raise_for_status() + except (httpx.HTTPError, ssl.SSLCertVerificationError) as ex: + response = getattr(ex, "response", None) + if ( + response + and response.status_code < 500 + and response.status_code not in [400, 401, 403, 404, 406, 410] + ): + raise ValueError( + f"Client error fetching webfinger: {response.status_code}", + response.content, + ) + return None, None + + try: + data = response.json() + except ValueError: + # Some servers return these with a 200 status code! + if b"not found" in response.content.lower(): + return None, None + raise ValueError( + "JSON parse error fetching webfinger", + response.content, + ) + try: + if data["subject"].startswith("acct:"): + data["subject"] = data["subject"][5:] + for link in data["links"]: + if ( + link.get("type") == "application/activity+json" + and link.get("rel") == "self" + ): + return link["href"], data["subject"] + except KeyError: + # Server returning wrong payload structure + pass + return None, None + + @classmethod + def by_username_and_domain( + cls, + username: str, + domain: str | Domain, + fetch: bool = False, + local: bool = False, + ): + """ + Get an Identity by username and domain. + + When fetch is True, a failed lookup will do a webfinger lookup to attempt to do + a lookup by actor_uri, creating an Identity record if one does not exist. When + local is True, lookups will be restricted to local domains. + + If domain is a Domain, domain.local is used instead of passsed local. + + """ + if username.startswith("@"): + raise ValueError("Username must not start with @") + + domain_instance = None + + if isinstance(domain, Domain): + domain_instance = domain + local = domain.local + domain = domain.domain + else: + domain = domain.lower() + try: + if local: + return cls.objects.get( + username__iexact=username, + domain_id=domain, + local=True, + ) + else: + return cls.objects.get( + username__iexact=username, + domain_id=domain, + ) + except cls.DoesNotExist: + if fetch and not local: + actor_uri, handle = cls.fetch_webfinger(f"{username}@{domain}") + if handle is None: + return None + # See if this actually does match an existing actor + try: + return cls.objects.get(actor_uri=actor_uri) + except cls.DoesNotExist: + pass + # OK, make one + username, domain = handle.split("@") + if not domain_instance: + domain_instance = Domain.get_remote_domain(domain) + return cls.objects.create( + actor_uri=actor_uri, + username=username, + domain_id=domain_instance, + local=False, + ) + return None + + def generate_keypair(self): + if not self.local: + raise ValueError("Cannot generate keypair for remote user") + self.private_key, self.public_key = RsaKeys.generate_keypair() + self.public_key_id = self.actor_uri + "#main-key" + self.save() + + +class Follow(models.Model): + """ + When one user (the source) follows other (the target) + """ + + id = models.BigIntegerField(primary_key=True, default=Snowflake.generate_follow) + + source = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="outbound_follows", + ) + target = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="inbound_follows", + ) + + boosts = models.BooleanField( + default=True, help_text="Also follow boosts from this user" + ) + + uri = models.CharField(blank=True, null=True, max_length=500) + note = models.TextField(blank=True, null=True) + + # state = StateField(FollowStates) + state = models.CharField(max_length=100, default="unrequested") + state_changed = models.DateTimeField(auto_now_add=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + class Meta: + # managed = False + db_table = "users_follow" + unique_together = [("source", "target")] + indexes: list = [] # We need this so Stator can add its own + + def __str__(self): + return f"#{self.id}: {self.source} → {self.target}" + + +class Post(models.Model): + """ + A post (status, toot) that is either local or remote. + """ + + interactions: "models.QuerySet[PostInteraction]" + + class Visibilities(models.IntegerChoices): + public = 0 + local_only = 4 + unlisted = 1 + followers = 2 + mentioned = 3 + + class Types(models.TextChoices): + article = "Article" + audio = "Audio" + event = "Event" + image = "Image" + note = "Note" + page = "Page" + question = "Question" + video = "Video" + + id = models.BigIntegerField(primary_key=True, default=Snowflake.generate_post) + + # The author (attributedTo) of the post + author = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="posts", + ) + + # The state the post is in + # state = StateField(PostStates) + state = models.CharField(max_length=100, default="new") + state_changed = models.DateTimeField(auto_now_add=True) + + # If it is our post or not + local = models.BooleanField() + + # The canonical object ID + object_uri = models.CharField(max_length=2048, blank=True, null=True, unique=True) + + # Who should be able to see this Post + visibility = models.IntegerField( + choices=Visibilities.choices, + default=Visibilities.public, + ) + + # The main (HTML) content + content = models.TextField() + + type = models.CharField( + max_length=20, + choices=Types.choices, + default=Types.note, + ) + type_data = models.JSONField( + blank=True, + null=True, # , encoder=PostTypeDataEncoder, decoder=PostTypeDataDecoder + ) + + # If the contents of the post are sensitive, and the summary (content + # warning) to show if it is + sensitive = models.BooleanField(default=False) + summary = models.TextField(blank=True, null=True) + + # The public, web URL of this Post on the original server + url = models.CharField(max_length=2048, blank=True, null=True) + + # The Post it is replying to as an AP ID URI + # (as otherwise we'd have to pull entire threads to use IDs) + in_reply_to = models.CharField(max_length=500, blank=True, null=True, db_index=True) + + # The identities the post is directly to (who can see it if not public) + to = models.ManyToManyField( + "takahe.Identity", + related_name="posts_to", + blank=True, + ) + + # The identities mentioned in the post + mentions = models.ManyToManyField( + "takahe.Identity", + related_name="posts_mentioning", + blank=True, + ) + + # Hashtags in the post + hashtags = models.JSONField(blank=True, null=True) + + emojis = models.ManyToManyField( + "takahe.Emoji", + related_name="posts_using_emoji", + blank=True, + ) + + # Like/Boost/etc counts + stats = models.JSONField(blank=True, null=True) + + # When the post was originally created (as opposed to when we received it) + published = models.DateTimeField(default=timezone.now) + + # If the post has been edited after initial publication + edited = models.DateTimeField(blank=True, null=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + class Meta: + # managed = False + db_table = "activities_post" + + class urls(urlman.Urls): + view = "{self.author.urls.view}posts/{self.id}/" + object_uri = "{self.author.actor_uri}posts/{self.id}/" + action_like = "{view}like/" + action_unlike = "{view}unlike/" + action_boost = "{view}boost/" + action_unboost = "{view}unboost/" + action_bookmark = "{view}bookmark/" + action_unbookmark = "{view}unbookmark/" + action_delete = "{view}delete/" + action_edit = "{view}edit/" + action_report = "{view}report/" + action_reply = "/compose/?reply_to={self.id}" + admin_edit = "/djadmin/activities/post/{self.id}/change/" + + def get_scheme(self, url): + return "https" + + def get_hostname(self, url): + return self.instance.author.domain.uri_domain + + def __str__(self): + return f"{self.author} #{self.id}" + + def get_absolute_url(self): + return self.urls.view + + def absolute_object_uri(self): + """ + Returns an object URI that is always absolute, for sending out to + other servers. + """ + if self.local: + return self.author.absolute_profile_uri() + f"posts/{self.id}/" + else: + return self.object_uri + + def in_reply_to_post(self) -> Optional["Post"]: + """ + Returns the actual Post object we're replying to, if we can find it + """ + if self.in_reply_to is None: + return None + return ( + Post.objects.filter(object_uri=self.in_reply_to) + .select_related("author") + .first() + ) + + @classmethod + def create_local( + cls, + author: Identity, + raw_prepend_content: str, + content: str, + summary: str | None = None, + sensitive: bool = False, + visibility: int = Visibilities.public, + reply_to: Optional["Post"] = None, + attachments: list | None = None, + type_data: dict | None = None, + published: datetime.datetime | None = None, + ) -> "Post": + with transaction.atomic(): + # Find mentions in this post + mentions = cls.mentions_from_content(content, author) + # mentions = set() + if reply_to: + mentions.add(reply_to.author) + # Maintain local-only for replies + if reply_to.visibility == reply_to.Visibilities.local_only: + visibility = reply_to.Visibilities.local_only + # Find emoji in this post + emojis = Emoji.emojis_from_content(content, None) + # Strip all unwanted HTML and apply linebreaks filter, grabbing hashtags on the way + parser = FediverseHtmlParser(linebreaks_filter(content), find_hashtags=True) + content = parser.html.replace("

", "

" + raw_prepend_content, 1) + hashtags = ( + sorted([tag[: Hashtag.MAXIMUM_LENGTH] for tag in parser.hashtags]) + or None + ) + # Make the Post object + post = cls.objects.create( + author=author, + content=content, + summary=summary or None, + sensitive=bool(summary) or sensitive, + local=True, + visibility=visibility, + hashtags=hashtags, + in_reply_to=reply_to.object_uri if reply_to else None, + ) + post.object_uri = post.urls.object_uri + post.url = post.absolute_object_uri() + post.mentions.set(mentions) + post.emojis.set(emojis) + if published and published < timezone.now(): + post.published = published + if timezone.now() - published > datetime.timedelta(days=2): + post.state = "fanned_out" # add post quietly if it's old + # if attachments:# FIXME + # post.attachments.set(attachments) + # if question: # FIXME + # post.type = question["type"] + # post.type_data = PostTypeData(__root__=question).__root__ + if type_data: + post.type_data = type_data + post.save() + # Recalculate parent stats for replies + if reply_to: + reply_to.calculate_stats() + return post + + def edit_local( + self, + raw_prepend_content: str, + content: str, + summary: str | None = None, + sensitive: bool | None = None, + visibility: int = Visibilities.public, + attachments: list | None = None, + attachment_attributes: list | None = None, + type_data: dict | None = None, + ): + with transaction.atomic(): + # Strip all HTML and apply linebreaks filter + parser = FediverseHtmlParser(linebreaks_filter(content), find_hashtags=True) + self.content = parser.html.replace("

", "

" + raw_prepend_content, 1) + self.hashtags = ( + sorted([tag[: Hashtag.MAXIMUM_LENGTH] for tag in parser.hashtags]) + or None + ) + self.summary = summary or None + self.sensitive = bool(summary) if sensitive is None else sensitive + self.visibility = visibility + self.edited = timezone.now() + self.mentions.set(self.mentions_from_content(content, self.author)) + self.emojis.set(Emoji.emojis_from_content(content, None)) + # self.attachments.set(attachments or []) # fixme + if type_data: + self.type_data = type_data + self.save() + + for attrs in attachment_attributes or []: + attachment = next( + (a for a in attachments or [] if str(a.id) == attrs.id), None + ) + if attachment is None: + continue + attachment.name = attrs.description + attachment.save() + + self.state = "edited" + self.state_changed = timezone.now() + self.state_next_attempt = None + self.state_locked_until = None + self.save() + + @classmethod + def mentions_from_content(cls, content, author) -> set[Identity]: + mention_hits = FediverseHtmlParser(content, find_mentions=True).mentions + mentions = set() + for handle in mention_hits: + handle = handle.lower() + if "@" in handle: + username, domain = handle.split("@", 1) + else: + username = handle + domain = author.domain_id + identity = Identity.by_username_and_domain( + username=username, + domain=domain, + fetch=True, + ) + if identity is not None: + mentions.add(identity) + return mentions + + def ensure_hashtags(self) -> None: + """ + Ensure any of the already parsed hashtags from this Post + have a corresponding Hashtag record. + """ + # Ensure hashtags + if self.hashtags: + for hashtag in self.hashtags: + tag, _ = Hashtag.objects.get_or_create( + hashtag=hashtag[: Hashtag.MAXIMUM_LENGTH], + ) + tag.transition_perform("outdated") + + def calculate_stats(self, save=True): + """ + Recalculates our stats dict + """ + from .models import PostInteraction + + self.stats = { + "likes": self.interactions.filter( + type=PostInteraction.Types.like, + state__in=["new", "fanned_out"], + ).count(), + "boosts": self.interactions.filter( + type=PostInteraction.Types.boost, + state__in=["new", "fanned_out"], + ).count(), + "replies": Post.objects.filter(in_reply_to=self.object_uri).count(), + } + if save: + self.save() + + +class EmojiQuerySet(models.QuerySet): + def usable(self, domain: Domain | None = None): + """ + Returns all usable emoji, optionally filtering by domain too. + """ + visible_q = models.Q(local=True) | models.Q(public=True) + if True: # Config.system.emoji_unreviewed_are_public: + visible_q |= models.Q(public__isnull=True) + qs = self.filter(visible_q) + + if domain: + if not domain.local: + qs = qs.filter(domain=domain) + + return qs + + +class EmojiManager(models.Manager): + def get_queryset(self): + return EmojiQuerySet(self.model, using=self._db) + + def usable(self, domain: Domain | None = None): + return self.get_queryset().usable(domain) + + +class Emoji(models.Model): + class Meta: + # managed = False + db_table = "activities_emoji" + + # Normalized Emoji without the ':' + shortcode = models.SlugField(max_length=100, db_index=True) + + domain = models.ForeignKey( + "takahe.Domain", null=True, blank=True, on_delete=models.CASCADE + ) + local = models.BooleanField(default=True) + + # Should this be shown in the public UI? + public = models.BooleanField(null=True) + + object_uri = models.CharField(max_length=500, blank=True, null=True, unique=True) + + mimetype = models.CharField(max_length=200) + + # Files may not be populated if it's remote and not cached on our side yet + file = models.ImageField( + # upload_to=partial(upload_emoji_namer, "emoji"), + null=True, + blank=True, + ) + + # A link to the custom emoji + remote_url = models.CharField(max_length=500, blank=True, null=True) + + # Used for sorting custom emoji in the picker + category = models.CharField(max_length=100, blank=True, null=True) + + # State of this Emoji + # state = StateField(EmojiStates) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + objects = EmojiManager() + + @classmethod + def emojis_from_content(cls, content: str, domain: Domain | None) -> list["Emoji"]: + """ + Return a parsed and sanitized of emoji found in content without + the surrounding ':'. + """ + emoji_hits = FediverseHtmlParser( + content, find_emojis=True, emoji_domain=domain + ).emojis + emojis = sorted({emoji for emoji in emoji_hits}) + q = models.Q(local=True) | models.Q(public=True) | models.Q(public__isnull=True) + if domain and not domain.local: + q = q & models.Q(domain=domain) + return list( + cls.objects.filter(local=(domain is None) or domain.local) + .filter(q) + .filter(shortcode__in=emojis) + ) + + @classmethod + @cached(cache=TTLCache(maxsize=1000, ttl=60)) + def get_by_domain(cls, shortcode, domain: Domain | None) -> "Emoji | None": + """ + Given an emoji shortcode and optional domain, looks up the single + emoji and returns it. Raises Emoji.DoesNotExist if there isn't one. + """ + try: + if domain is None or domain.local: + return cls.objects.get(local=True, shortcode=shortcode) + else: + return cls.objects.get(domain=domain, shortcode=shortcode) + except Emoji.DoesNotExist: + return None + + @property + def fullcode(self): + return f":{self.shortcode}:" + + @property + def is_usable(self) -> bool: + """ + Return True if this Emoji is usable. + """ + return self.public or self.public is None + + def full_url(self, always_show=False) -> RelativeAbsoluteUrl: + if self.is_usable or always_show: + if self.file: + return AutoAbsoluteUrl(self.file.url) + elif self.remote_url: + return ProxyAbsoluteUrl( + f"/proxy/emoji/{self.pk}/", + remote_url=self.remote_url, + ) + return StaticAbsoluteUrl("img/blank-emoji-128.png") + + def as_html(self): + if self.is_usable: + return mark_safe( + f'Emoji {self.shortcode}' + ) + return self.fullcode + + +class HashtagQuerySet(models.QuerySet): + def public(self): + public_q = models.Q(public=True) + if True: # Config.system.hashtag_unreviewed_are_public: + public_q |= models.Q(public__isnull=True) + return self.filter(public_q) + + def hashtag_or_alias(self, hashtag: str): + return self.filter( + models.Q(hashtag=hashtag) | models.Q(aliases__contains=hashtag) + ) + + +class HashtagManager(models.Manager): + def get_queryset(self): + return HashtagQuerySet(self.model, using=self._db) + + def public(self): + return self.get_queryset().public() + + def hashtag_or_alias(self, hashtag: str): + return self.get_queryset().hashtag_or_alias(hashtag) + + +class Hashtag(models.Model): + class Meta: + # managed = False + db_table = "activities_hashtag" + + MAXIMUM_LENGTH = 100 + + # Normalized hashtag without the '#' + hashtag = models.SlugField(primary_key=True, max_length=100) + + # Friendly display override + name_override = models.CharField(max_length=100, null=True, blank=True) + + # Should this be shown in the public UI? + public = models.BooleanField(null=True) + + # State of this Hashtag + # state = StateField(HashtagStates) + state = models.CharField(max_length=100, default="outdated") + state_changed = models.DateTimeField(auto_now_add=True) + + # Metrics for this Hashtag + stats = models.JSONField(null=True, blank=True) + # Timestamp of last time the stats were updated + stats_updated = models.DateTimeField(null=True, blank=True) + + # List of other hashtags that are considered similar + aliases = models.JSONField(null=True, blank=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + objects = HashtagManager() + + class urls(urlman.Urls): + view = "/tags/{self.hashtag}/" + follow = "/tags/{self.hashtag}/follow/" + unfollow = "/tags/{self.hashtag}/unfollow/" + admin = "/admin/hashtags/" + admin_edit = "{admin}{self.hashtag}/" + admin_enable = "{admin_edit}enable/" + admin_disable = "{admin_edit}disable/" + timeline = "/tags/{self.hashtag}/" + + hashtag_regex = re.compile(r"\B#([a-zA-Z0-9(_)]+\b)(?!;)") + + def save(self, *args, **kwargs): + self.hashtag = self.hashtag.lstrip("#") + if self.name_override: + self.name_override = self.name_override.lstrip("#") + return super().save(*args, **kwargs) + + @property + def display_name(self): + return self.name_override or self.hashtag + + def __str__(self): + return self.display_name + + def usage_months(self, num: int = 12) -> dict[date, int]: + """ + Return the most recent num months of stats + """ + if not self.stats: + return {} + results = {} + for key, val in self.stats.items(): + parts = key.split("-") + if len(parts) == 2: + year = int(parts[0]) + month = int(parts[1]) + results[date(year, month, 1)] = val + return dict(sorted(results.items(), reverse=True)[:num]) + + def usage_days(self, num: int = 7) -> dict[date, int]: + """ + Return the most recent num days of stats + """ + if not self.stats: + return {} + results = {} + for key, val in self.stats.items(): + parts = key.split("-") + if len(parts) == 3: + year = int(parts[0]) + month = int(parts[1]) + day = int(parts[2]) + results[date(year, month, day)] = val + return dict(sorted(results.items(), reverse=True)[:num]) + + def to_mastodon_json(self, following: bool | None = None): + value = { + "name": self.hashtag, + "url": self.urls.view.full(), # type: ignore + "history": [], + } + + if following is not None: + value["following"] = following + + return value + + +class PostInteraction(models.Model): + """ + Handles both boosts and likes + """ + + class Types(models.TextChoices): + like = "like" + boost = "boost" + vote = "vote" + pin = "pin" + + id = models.BigIntegerField( + primary_key=True, + default=Snowflake.generate_post_interaction, + ) + + # The state the boost is in + # state = StateField(PostInteractionStates) + state = models.CharField(max_length=100, default="new") + state_changed = models.DateTimeField(auto_now_add=True) + + # The canonical object ID + object_uri = models.CharField(max_length=500, blank=True, null=True, unique=True) + + # What type of interaction it is + type = models.CharField(max_length=100, choices=Types.choices) + + # The user who boosted/liked/etc. + identity = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="interactions", + ) + + # The post that was boosted/liked/etc + post = models.ForeignKey( + "takahe.Post", + on_delete=models.CASCADE, + related_name="interactions", + ) + + # Used to store any interaction extra text value like the vote + # in the question/poll case + value = models.CharField(max_length=50, blank=True, null=True) + + # When the activity was originally created (as opposed to when we received it) + # Mastodon only seems to send this for boosts, not likes + published = models.DateTimeField(default=timezone.now) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + class Meta: + # managed = False + db_table = "activities_postinteraction" + + +class Block(models.Model): + """ + When one user (the source) mutes or blocks another (the target) + """ + + # state = StateField(BlockStates) + state = models.CharField(max_length=100, default="new") + state_changed = models.DateTimeField(auto_now_add=True) + + source = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="outbound_blocks", + ) + + target = models.ForeignKey( + "takahe.Identity", + on_delete=models.CASCADE, + related_name="inbound_blocks", + ) + + uri = models.CharField(blank=True, null=True, max_length=500) + + # If it is a mute, we will stop delivering any activities from target to + # source, but we will still deliver activities from source to target. + # A full block (mute=False) stops activities both ways. + mute = models.BooleanField() + include_notifications = models.BooleanField(default=False) + + expires = models.DateTimeField(blank=True, null=True) + note = models.TextField(blank=True, null=True) + + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) + + class Meta: + # managed = False + db_table = "users_block" + + def __str__(self): + return f"#{self.pk}: {self.source} blocks {self.target}" + + ### Alternate fetchers/constructors ### + + @classmethod + def maybe_get( + cls, source, target, mute=False, require_active=False + ) -> Optional["Block"]: + """ + Returns a Block if it exists between source and target + """ + try: + if require_active: + return cls.objects.filter( + status__in=["new", "sent", "awaiting_expiry"] + ).get(source=source, target=target, mute=mute) + else: + return cls.objects.get(source=source, target=target, mute=mute) + except cls.DoesNotExist: + return None + + @classmethod + def create_local_block(cls, source, target) -> "Block": + """ + Creates or updates a full Block from a local Identity to the target + (which can be local or remote). + """ + if not source.local: + raise ValueError("You cannot block from a remote Identity") + block = cls.maybe_get(source=source, target=target, mute=False) + if block is not None: + if not block.state in ["new", "sent", "awaiting_expiry"]: + block.state = BlockStates.new # type:ignore + block.save() + else: + with transaction.atomic(): + block = cls.objects.create( + source=source, + target=target, + mute=False, + ) + block.uri = source.actor_uri + f"block/{block.pk}/" + block.save() + return block + + @classmethod + def create_local_mute( + cls, + source, + target, + duration=None, + include_notifications=False, + ) -> "Block": + """ + Creates or updates a muting Block from a local Identity to the target + (which can be local or remote). + """ + if not source.local: + raise ValueError("You cannot mute from a remote Identity") + block = cls.maybe_get(source=source, target=target, mute=True) + if block is not None: + if not block in ["new", "sent", "awaiting_expiry"]: + block.state = BlockStates.new # type:ignore + if duration: + block.expires = timezone.now() + datetime.timedelta(seconds=duration) + block.include_notifications = include_notifications + block.save() + else: + with transaction.atomic(): + block = cls.objects.create( + source=source, + target=target, + mute=True, + include_notifications=include_notifications, + expires=( + timezone.now() + datetime.timedelta(seconds=duration) + if duration + else None + ), + ) + block.uri = source.actor_uri + f"block/{block.pk}/" + block.save() + return block diff --git a/takahe/tests.py b/takahe/tests.py new file mode 100644 index 00000000..7ce503c2 --- /dev/null +++ b/takahe/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/takahe/uris.py b/takahe/uris.py new file mode 100644 index 00000000..e686108b --- /dev/null +++ b/takahe/uris.py @@ -0,0 +1,89 @@ +import hashlib +import sys +from urllib.parse import urljoin + +from django.conf import settings +from django.contrib.staticfiles.storage import staticfiles_storage + + +class RelativeAbsoluteUrl: + """ + Represents a URL that can have both "relative" and "absolute" forms + for various use either locally or remotely. + """ + + absolute: str + relative: str + + def __init__(self, absolute: str, relative: str | None = None): + if "://" not in absolute: + raise ValueError(f"Absolute URL {absolute!r} is not absolute!") + self.absolute = absolute + self.relative = relative or absolute + + +class AutoAbsoluteUrl(RelativeAbsoluteUrl): + """ + Automatically makes the absolute variant by using either settings.MAIN_DOMAIN + or a passed identity's URI domain. + """ + + def __init__( + self, + relative: str, + identity=None, + ): + self.relative = relative + if identity: + absolute_prefix = f"https://{identity.domain.uri_domain}/" + else: + absolute_prefix = f"https://{settings.MAIN_DOMAIN}/" + self.absolute = urljoin(absolute_prefix, self.relative) + + +class ProxyAbsoluteUrl(AutoAbsoluteUrl): + """ + AutoAbsoluteUrl variant for proxy paths, that also attaches a remote URI hash + plus extension to the end if it can. + """ + + def __init__( + self, + relative: str, + identity=None, + remote_url: str | None = None, + ): + if remote_url: + # See if there is a file extension we can grab + extension = "bin" + remote_filename = remote_url.split("/")[-1] + if "." in remote_filename: + extension = remote_filename.split(".")[-1] + # When provided, attach a hash of the remote URL + # SHA1 chosen as it generally has the best performance in modern python, and security is not a concern + # Hash truncation is generally fine, as in the typical use case the hash is scoped to the identity PK. + relative += f"{hashlib.sha1(remote_url.encode('ascii')).hexdigest()[:10]}.{extension}" + super().__init__(relative, identity) + + +class StaticAbsoluteUrl(RelativeAbsoluteUrl): + """ + Creates static URLs given only the static-relative path + """ + + def __init__(self, path: str): + try: + static_url = staticfiles_storage.url(path) + except ValueError: + # Suppress static issues during the first collectstatic + # Yes, I know it's a big hack! Pull requests welcome :) + if "collectstatic" in sys.argv: + super().__init__("https://example.com/") + return + raise + if "://" in static_url: + super().__init__(static_url) + else: + super().__init__( + urljoin(f"https://{settings.MAIN_DOMAIN}/", static_url), static_url + ) diff --git a/takahe/utils.py b/takahe/utils.py new file mode 100644 index 00000000..f8498b9d --- /dev/null +++ b/takahe/utils.py @@ -0,0 +1,486 @@ +from typing import TYPE_CHECKING + +from django.conf import settings + +from .models import * + +if TYPE_CHECKING: + from users.models import APIdentity + from users.models import User as NeoUser + + +def _int(s: str): + try: + return int(s) + except: + return -1 + + +def _rating_to_emoji(score: int, star_mode=0): + """convert score(0~10) to mastodon star emoji code""" + if score is None or score == "" or score == 0: + return "" + solid_stars = score // 2 + half_star = int(bool(score % 2)) + empty_stars = 5 - solid_stars if not half_star else 5 - solid_stars - 1 + if star_mode == 1: + emoji_code = "🌕" * solid_stars + "🌗" * half_star + "🌑" * empty_stars + else: + emoji_code = ( + settings.STAR_SOLID * solid_stars + + settings.STAR_HALF * half_star + + settings.STAR_EMPTY * empty_stars + ) + emoji_code = emoji_code.replace("::", ": :") + emoji_code = " " + emoji_code + " " + return emoji_code + + +class Takahe: + Visibilities = Post.Visibilities + + @staticmethod + def get_domain(): + domain = settings.SITE_INFO["site_domain"] + d = Domain.objects.filter(domain=domain).first() + if not d: + logger.info(f"Creating takahe domain {domain}") + d = Domain.objects.create( + domain=domain, + local=True, + service_domain=None, + notes="NeoDB", + nodeinfo=None, + ) + return d + + @staticmethod + def get_node_name_for_domain(d: str): + domain = Domain.objects.filter(domain=d).first() + if domain and domain.nodeinfo: + return domain.nodeinfo.get("metadata", {}).get("nodeName") + + @staticmethod + def init_identity_for_local_user(u: "NeoUser"): + """ + When a new local NeoDB user is created, + create a takahe user with the NeoDB user pk, + create a takahe identity, + then create a NeoDB APIdentity with the takahe identity pk. + """ + from users.models import APIdentity + + if not u.username: + logger.warning(f"User {u} has no username") + return None + user = User.objects.filter(pk=u.pk).first() + handler = "@" + u.username + if not user: + logger.info(f"Creating takahe user {u}") + user = User.objects.create(pk=u.pk, email=handler) + else: + if user.email != handler: + logger.warning(f"Updating takahe user {u} email to {handler}") + user.email = handler + user.save() + domain = Domain.objects.get(domain=settings.SITE_INFO["site_domain"]) + identity = Identity.objects.filter(username=u.username, local=True).first() + if not identity: + logger.info(f"Creating takahe identity {u}@{domain}") + identity = Identity.objects.create( + actor_uri=f"https://{domain.uri_domain}/@{u.username}@{domain.domain}/", + username=u.username, + domain=domain, + name=u.username, + local=True, + discoverable=not u.preference.no_anonymous_view, + ) + identity.generate_keypair() + if not user.identities.filter(pk=identity.pk).exists(): + user.identities.add(identity) + apidentity = APIdentity.objects.filter(pk=identity.pk).first() + if not apidentity: + logger.info(f"Creating APIdentity for {identity}") + apidentity = APIdentity.objects.create( + user=u, + id=identity.pk, + local=True, + username=u.username, + domain_name=domain.domain, + deleted=identity.deleted, + ) + elif apidentity.username != identity.username: + logger.warning( + f"Updating APIdentity {apidentity} username to {identity.username}" + ) + apidentity.username = identity.username + apidentity.save() + if u.identity != apidentity: + logger.warning(f"Linking user {u} identity to {apidentity}") + u.identity = apidentity + u.save(update_fields=["identity"]) + return apidentity + + @staticmethod + def get_identity(pk: int): + return Identity.objects.get(pk=pk) + + @staticmethod + def get_identity_by_local_user(u: "NeoUser"): + return ( + Identity.objects.filter(pk=u.identity.pk, local=True).first() + if u and u.is_authenticated and u.identity + else None + ) + + @staticmethod + def get_or_create_apidentity(identity: Identity): + from users.models import APIdentity + + apid = APIdentity.objects.filter(pk=identity.pk).first() + if not apid: + if identity.local: + raise ValueError(f"local takahe identity {identity} missing APIdentity") + if not identity.domain: + raise ValueError(f"remote takahe identity {identity} missing domain") + apid = APIdentity.objects.create( + id=identity.pk, + local=False, + username=identity.username, + domain_name=identity.domain.domain, + deleted=identity.deleted, + ) + return apid + + @staticmethod + def get_local_user_by_identity(identity: Identity): + from users.models import User as NeoUser + + return NeoUser.objects.get(identity_id=identity.pk) if identity.local else None + + @staticmethod + def get_following_ids(identity_pk: int): + targets = Follow.objects.filter( + source_id=identity_pk, state="accepted" + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def get_follower_ids(identity_pk: int): + targets = Follow.objects.filter( + target_id=identity_pk, state="accepted" + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def get_following_request_ids(identity_pk: int): + targets = Follow.objects.filter( + source_id=identity_pk, state="pending_approval" + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def get_requested_follower_ids(identity_pk: int): + targets = Follow.objects.filter( + target_id=identity_pk, state="pending_approval" + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def update_follow_state( + source_pk: int, target_pk: int, from_states: list[str], to_state: str + ): + follow = Follow.objects.filter(source_id=source_pk, target_id=target_pk).first() + if ( + follow + and (not from_states or follow.state in from_states) + and follow.state != to_state + ): + follow.state = to_state + follow.save() + return follow + + @staticmethod + def follow(source_pk: int, target_pk: int): + try: + follow = Follow.objects.get(source_id=source_pk, target_id=target_pk) + if follow.state != "accepted": + follow.state = "unrequested" + follow.save() + except Follow.DoesNotExist: + source = Identity.objects.get(pk=source_pk) + follow = Follow.objects.create( + source_id=source_pk, + target_id=target_pk, + boosts=True, + uri="", + state="unrequested", + ) + follow.uri = source.actor_uri + f"follow/{follow.pk}/" + follow.save() + + @staticmethod + def unfollow(source_pk: int, target_pk: int): + Takahe.update_follow_state(source_pk, target_pk, [], "undone") + # InboxMessage.create_internal( + # { + # "type": "ClearTimeline", + # "object": target_identity.pk, + # "actor": self.identity.pk, + # } + # ) + + @staticmethod + def accept_follow_request(source_pk: int, target_pk: int): + Takahe.update_follow_state(source_pk, target_pk, [], "accepting") + + @staticmethod + def reject_follow_request(source_pk: int, target_pk: int): + Takahe.update_follow_state(source_pk, target_pk, [], "rejecting") + + @staticmethod + def get_muting_ids(identity_pk: int) -> list[int]: + targets = Block.objects.filter( + source_id=identity_pk, + mute=True, + state__in=["new", "sent", "awaiting_expiry"], + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def get_blocking_ids(identity_pk: int) -> list[int]: + targets = Block.objects.filter( + source_id=identity_pk, + mute=False, + state__in=["new", "sent", "awaiting_expiry"], + ).values_list("target", flat=True) + return list(targets) + + @staticmethod + def get_rejecting_ids(identity_pk: int) -> list[int]: + pks1 = Block.objects.filter( + source_id=identity_pk, + mute=False, + state__in=["new", "sent", "awaiting_expiry"], + ).values_list("target", flat=True) + pks2 = Block.objects.filter( + target_id=identity_pk, + mute=False, + state__in=["new", "sent", "awaiting_expiry"], + ).values_list("source", flat=True) + return list(set(list(pks1) + list(pks2))) + + @staticmethod + def block_or_mute(source_pk: int, target_pk: int, is_mute: bool): + source = Identity.objects.get(pk=source_pk) + if not source.local: + raise ValueError(f"Cannot block/mute from remote identity {source}") + with transaction.atomic(): + block, _ = Block.objects.update_or_create( + defaults={"state": "new"}, + source_id=source_pk, + target_id=target_pk, + mute=is_mute, + ) + if block.state != "new" or not block.uri: + block.state = "new" + block.uri = source.actor_uri + f"block/{block.pk}/" + block.save() + if not is_mute: + Takahe.unfollow(source_pk, target_pk) + Takahe.reject_follow_request(target_pk, source_pk) + return block + + @staticmethod + def undo_block_or_mute(source_pk: int, target_pk: int, is_mute: bool): + Block.objects.filter( + source_id=source_pk, target_id=target_pk, mute=is_mute + ).update(state="undone") + + @staticmethod + def block(source_pk: int, target_pk: int): + return Takahe.block_or_mute(source_pk, target_pk, False) + + @staticmethod + def unblock(source_pk: int, target_pk: int): + return Takahe.undo_block_or_mute(source_pk, target_pk, False) + + @staticmethod + def mute(source_pk: int, target_pk: int): + return Takahe.block_or_mute(source_pk, target_pk, True) + + @staticmethod + def unmute(source_pk: int, target_pk: int): + return Takahe.undo_block_or_mute(source_pk, target_pk, True) + + @staticmethod + def _force_state_cycle(): # for unit testing only + Follow.objects.filter( + state__in=["rejecting", "undone", "pending_removal"] + ).delete() + Follow.objects.all().update(state="accepted") + Block.objects.filter(state="new").update(state="sent") + Block.objects.exclude(state="sent").delete() + + @staticmethod + def post( + author_pk: int, + pre_conetent: str, + content: str, + visibility: Visibilities, + data: dict | None = None, + post_pk: int | None = None, + post_time: datetime.datetime | None = None, + ) -> int | None: + identity = Identity.objects.get(pk=author_pk) + post = ( + Post.objects.filter(author=identity, pk=post_pk).first() + if post_pk + else None + ) + if post: + post.edit_local( + pre_conetent, content, visibility=visibility, type_data=data + ) + else: + post = Post.create_local( + identity, + pre_conetent, + content, + visibility=visibility, + type_data=data, + published=post_time, + ) + return post.pk if post else None + + @staticmethod + def get_post_url(post_pk: int) -> str | None: + post = Post.objects.filter(pk=post_pk).first() if post_pk else None + return post.object_uri if post else None + + @staticmethod + def delete_mark(mark): + if mark.shelfmember and mark.shelfmember.post_id: + Post.objects.filter(pk=mark.shelfmember.post_id).update(state="deleted") + + @staticmethod + def post_mark(mark, share_as_new_post: bool): + from catalog.common import ItemCategory + from takahe.utils import Takahe + + user = mark.owner.user + tags = ( + "\n" + + user.preference.mastodon_append_tag.replace( + "[category]", str(ItemCategory(mark.item.category).label) + ) + if user.preference.mastodon_append_tag + else "" + ) + stars = _rating_to_emoji(mark.rating_grade, 0) + item_link = f"{settings.SITE_INFO['site_url']}/~neodb~{mark.item.url}" + + pre_conetent = ( + f'{mark.action_label}《{mark.item.display_title}》' + ) + content = f"{stars}\n{mark.comment_text or ''}{tags}" + data = { + "object": { + "relatedWith": [mark.item.ap_object_ref, mark.shelfmember.ap_object] + } + } + if mark.comment: + data["object"]["relatedWith"].append(mark.comment.ap_object) + if mark.rating: + data["object"]["relatedWith"].append(mark.rating.ap_object) + if mark.visibility == 1: + v = Takahe.Visibilities.followers + elif mark.visibility == 2: + v = Takahe.Visibilities.mentioned + elif user.preference.mastodon_publish_public: + v = Takahe.Visibilities.public + else: + v = Takahe.Visibilities.unlisted + post_pk = Takahe.post( + mark.owner.pk, + pre_conetent, + content, + v, + data, + None if share_as_new_post else mark.shelfmember.post_id, + mark.shelfmember.created_time, + ) + if post_pk != mark.shelfmember.post_id: + mark.shelfmember.post_id = post_pk + mark.shelfmember.save(update_fields=["post_id"]) + if mark.comment and post_pk != mark.comment.post_id: + mark.comment.post_id = post_pk + mark.comment.save(update_fields=["post_id"]) + if mark.rating and post_pk != mark.rating.post_id: + mark.rating.post_id = post_pk + mark.rating.save(update_fields=["post_id"]) + + @staticmethod + def interact_post(post_pk: int, identity_pk: int, type: str): + post = Post.objects.filter(pk=post_pk).first() + if not post: + logger.warning(f"Cannot find post {post_pk}") + return + interaction = PostInteraction.objects.get_or_create( + type=type, + identity_id=identity_pk, + post=post, + )[0] + if interaction.state not in ["new", "fanned_out"]: + interaction.state = "new" + interaction.save() + post.calculate_stats() + return interaction + + @staticmethod + def uninteract_post(post_pk: int, identity_pk: int, type: str): + post = Post.objects.filter(pk=post_pk).first() + if not post: + logger.warning(f"Cannot find post {post_pk}") + return + for interaction in PostInteraction.objects.filter( + type=type, + identity_id=identity_pk, + post=post, + ): + interaction.state = "undone" + interaction.save() + post.calculate_stats() + + @staticmethod + def like_post(post_pk: int, identity_pk: int): + return Takahe.interact_post(post_pk, identity_pk, "like") + + @staticmethod + def unlike_post(post_pk: int, identity_pk: int): + return Takahe.uninteract_post(post_pk, identity_pk, "like") + + @staticmethod + def post_liked_by(post_pk: int, identity_pk: int) -> bool: + interaction = Takahe.get_user_interaction(post_pk, identity_pk, "like") + return interaction is not None and interaction.state in ["new", "fanned_out"] + + @staticmethod + def get_user_interaction(post_pk: int, identity_pk: int, type: str): + post = Post.objects.filter(pk=post_pk).first() + if not post: + logger.warning(f"Cannot find post {post_pk}") + return None + return PostInteraction.objects.filter( + type=type, + identity_id=identity_pk, + post=post, + ).first() + + @staticmethod + def get_post_stats(post_pk: int) -> dict: + post = Post.objects.filter(pk=post_pk).first() + if not post: + logger.warning(f"Cannot find post {post_pk}") + return {} + return post.stats or {} diff --git a/takahe/views.py b/takahe/views.py new file mode 100644 index 00000000..91ea44a2 --- /dev/null +++ b/takahe/views.py @@ -0,0 +1,3 @@ +from django.shortcuts import render + +# Create your views here. diff --git a/users/account.py b/users/account.py index 1f6499bb..5a6c1eb4 100644 --- a/users/account.py +++ b/users/account.py @@ -396,6 +396,7 @@ def register(request): ) messages.add_message(request, messages.INFO, _("已发送验证邮件,请查收。")) if username_changed: + request.user.initiatialize() messages.add_message(request, messages.INFO, _("用户名已设置。")) if email_cleared: messages.add_message(request, messages.INFO, _("电子邮件地址已取消关联。")) @@ -480,9 +481,9 @@ def auth_logout(request): def clear_data_task(user_id): user = User.objects.get(pk=user_id) user_str = str(user) - remove_data_by_user(user) + if user.identity: + remove_data_by_user(user.identity) user.clear() - user.save() logger.warning(f"User {user_str} data cleared.") diff --git a/users/management/commands/refresh_following.py b/users/management/commands/refresh_following.py deleted file mode 100644 index c57329d2..00000000 --- a/users/management/commands/refresh_following.py +++ /dev/null @@ -1,21 +0,0 @@ -from datetime import timedelta - -from django.core.management.base import BaseCommand -from django.utils import timezone -from tqdm import tqdm - -from users.models import User - - -class Command(BaseCommand): - help = "Refresh following data for all users" - - def handle(self, *args, **options): - count = 0 - for user in tqdm(User.objects.all()): - user.following = user.merged_following_ids() - if user.following: - count += 1 - user.save(update_fields=["following"]) - - print(f"{count} users updated") diff --git a/users/migrations/0012_apidentity.py b/users/migrations/0012_apidentity.py new file mode 100644 index 00000000..ab04b30e --- /dev/null +++ b/users/migrations/0012_apidentity.py @@ -0,0 +1,63 @@ +# Generated by Django 4.2.4 on 2023-08-09 13:37 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + # replaces = [ + # ("users", "0012_user_local"), + # ("users", "0013_user_identity"), + # ("users", "0014_remove_user_identity_apidentity_user"), + # ("users", "0015_alter_apidentity_user"), + # ] + + dependencies = [ + ("users", "0011_preference_hidden_categories"), + ("takahe", "0001_initial"), + ] + + operations = [ + migrations.CreateModel( + name="APIdentity", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("local", models.BooleanField()), + ("username", models.CharField(blank=True, max_length=500, null=True)), + ( + "domain_name", + models.CharField(blank=True, max_length=500, null=True), + ), + ("deleted", models.DateTimeField(blank=True, null=True)), + ( + "user", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="identity", + to=settings.AUTH_USER_MODEL, + ), + ), + ], + options={ + "indexes": [ + models.Index( + fields=["local", "username"], + name="users_apide_local_2d8170_idx", + ), + models.Index( + fields=["domain_name", "username"], + name="users_apide_domain__53ffa5_idx", + ), + ], + }, + ), + ] diff --git a/users/migrations/0013_init_identity.py b/users/migrations/0013_init_identity.py new file mode 100644 index 00000000..551faeee --- /dev/null +++ b/users/migrations/0013_init_identity.py @@ -0,0 +1,77 @@ +# Generated by Django 4.2.4 on 2023-08-09 16:54 + +from django.conf import settings +from django.db import migrations, models, transaction +from loguru import logger +from tqdm import tqdm + +from takahe.models import Domain as TakaheDomain +from takahe.models import Identity as TakaheIdentity +from takahe.models import User as TakaheUser + +domain = settings.SITE_INFO["site_domain"] +service_domain = settings.SITE_INFO.get("site_service_domain") + + +def init_domain(apps, schema_editor): + d = TakaheDomain.objects.filter(domain=domain).first() + if not d: + logger.info(f"Creating takahe domain {domain}") + TakaheDomain.objects.create( + domain=domain, + local=True, + service_domain=service_domain, + notes="NeoDB", + nodeinfo={}, + ) + else: + logger.info(f"Takahe domain {domain} already exists") + + +def init_identity(apps, schema_editor): + User = apps.get_model("users", "User") + APIdentity = apps.get_model("users", "APIdentity") + tdomain = TakaheDomain.objects.filter(domain=domain).first() + if User.objects.filter(username__isnull=True).exists(): + raise ValueError("null username detected, aborting migration") + if TakaheUser.objects.exists(): + raise ValueError("existing Takahe users detected, aborting migration") + if TakaheIdentity.objects.exists(): + raise ValueError("existing Takahe identities detected, aborting migration") + if APIdentity.objects.exists(): + raise ValueError("existing APIdentity data detected, aborting migration") + logger.info(f"Creating takahe users/identities") + for user in tqdm(User.objects.all()): + username = user.username + handler = "@" + username + identity = APIdentity.objects.create( + pk=user.pk, + user=user, + local=True, + username=username, + domain_name=domain, + deleted=None if user.is_active else user.updated, + ) + takahe_user = TakaheUser.objects.create(pk=user.pk, email=handler) + takahe_identity = TakaheIdentity.objects.create( + pk=user.pk, + actor_uri=f"https://{service_domain or domain}/@{username}@{domain}/", + username=username, + domain=tdomain, + name=username, + local=True, + discoverable=not user.preference.no_anonymous_view, + ) + takahe_user.identities.add(takahe_identity) + + +class Migration(migrations.Migration): + + dependencies = [ + ("users", "0012_apidentity"), + ] + + operations = [ + migrations.RunPython(init_domain), + migrations.RunPython(init_identity), + ] diff --git a/users/models/__init__.py b/users/models/__init__.py index d1e45854..14d42a2e 100644 --- a/users/models/__init__.py +++ b/users/models/__init__.py @@ -1,3 +1,4 @@ +from .apidentity import APIdentity from .preference import Preference from .report import Report from .user import User diff --git a/users/models/apidentity.py b/users/models/apidentity.py new file mode 100644 index 00000000..a22c68d2 --- /dev/null +++ b/users/models/apidentity.py @@ -0,0 +1,192 @@ +from functools import cached_property + +from django.conf import settings +from django.db import models +from loguru import logger + +from takahe.utils import Takahe + +from .user import User + + +class APIdentity(models.Model): + """ + An identity/actor in ActivityPub service. + + This model is used as 1:1 mapping to Takahe Identity Model + """ + + user = models.OneToOneField("User", models.CASCADE, related_name="identity") + local = models.BooleanField() + username = models.CharField(max_length=500, blank=True, null=True) + domain_name = models.CharField(max_length=500, blank=True, null=True) + deleted = models.DateTimeField(null=True, blank=True) + + class Meta: + indexes = [ + models.Index(fields=["local", "username"]), + models.Index(fields=["domain_name", "username"]), + ] + + @cached_property + def takahe_identity(self): + return Takahe.get_identity(self.pk) + + @property + def is_active(self): + return self.user.is_active and self.takahe_identity.deleted is None + + @property + def name(self): + return self.takahe_identity.name + + @property + def discoverable(self): + return self.takahe_identity.discoverable + + @property + def actor_uri(self): + return self.takahe_identity.actor_uri + + @property + def icon_uri(self): + return self.takahe_identity.icon_uri + + @property + def display_name(self): + return self.takahe_identity.name + + @property + def avatar(self): + return self.user.avatar # FiXME + + @property + def url(self): + return f"/users/{self.handler}/" + + @property + def preference(self): + return self.user.preference + + @property + def handler(self): + if self.local: + return self.username + else: + return f"{self.username}@{self.domain_name}" + + @property + def following(self): + return Takahe.get_following_ids(self.pk) + + @property + def muting(self): + return Takahe.get_muting_ids(self.pk) + + @property + def blocking(self): + return Takahe.get_blocking_ids(self.pk) + + @property + def rejecting(self): + return Takahe.get_rejecting_ids(self.pk) + + @property + def ignoring(self): + return self.muting + self.rejecting + + def follow(self, target: "APIdentity"): + Takahe.follow(self.pk, target.pk) + + def unfollow(self, target: "APIdentity"): # this also cancels follow request + Takahe.unfollow(self.pk, target.pk) + + def requested_followers(self): + Takahe.get_requested_follower_ids(self.pk) + + def following_request(self): + Takahe.get_following_request_ids(self.pk) + + def accept_follow_request(self, target: "APIdentity"): + Takahe.accept_follow_request(self.pk, target.pk) + + def reject_follow_request(self, target: "APIdentity"): + Takahe.reject_follow_request(self.pk, target.pk) + + def block(self, target: "APIdentity"): + Takahe.block(self.pk, target.pk) + + def unblock(self, target: "APIdentity"): + Takahe.unblock(self.pk, target.pk) + + def mute(self, target: "APIdentity"): + Takahe.mute(self.pk, target.pk) + + def unmute(self, target: "APIdentity"): + Takahe.unmute(self.pk, target.pk) + + def is_rejecting(self, target: "APIdentity"): + return self != target and ( + target.is_blocked_by(self) or target.is_blocking(self) + ) + + def is_blocking(self, target: "APIdentity"): + return target.pk in self.blocking + + def is_blocked_by(self, target: "APIdentity"): + return target.is_blocking(self) + + def is_muting(self, target: "APIdentity"): + return target.pk in self.muting + + def is_following(self, target: "APIdentity"): + return target.pk in self.following + + def is_followed_by(self, target: "APIdentity"): + return target.is_following(self) + + def is_visible_to_user(self, viewing_user: User): + return ( + viewing_user.is_authenticated + or viewing_user == self.user + or ( + not self.is_blocking(viewing_user.identity) + and not self.is_blocked_by(viewing_user.identity) + ) + ) + + @classmethod + def get_by_handler(cls, handler: str) -> "APIdentity": + s = handler.split("@") + if len(s) == 1: + return cls.objects.get(username=s[0], local=True, deleted__isnull=True) + elif len(s) == 2: + return cls.objects.get( + user__mastodon_username=s[0], + user__mastodon_site=s[1], + deleted__isnull=True, + ) + elif len(s) == 3 and s[0] == "": + return cls.objects.get( + username=s[0], domain_name=s[1], local=False, deleted__isnull=True + ) + else: + raise cls.DoesNotExist(f"Invalid handler {handler}") + + @cached_property + def activity_manager(self): + from social.models import ActivityManager + + return ActivityManager(self) + + @cached_property + def shelf_manager(self): + from journal.models import ShelfManager + + return ShelfManager(self) + + @cached_property + def tag_manager(self): + from journal.models import TagManager + + return TagManager(self) diff --git a/users/models/preference.py b/users/models/preference.py index 6cc96ef9..ac5a0b45 100644 --- a/users/models/preference.py +++ b/users/models/preference.py @@ -20,6 +20,7 @@ from loguru import logger from common.utils import GenerateDateUUIDMediaFilePath from management.models import Announcement from mastodon.api import * +from takahe.utils import Takahe from .user import User diff --git a/users/models/report.py b/users/models/report.py index caabd49c..4a65d2a2 100644 --- a/users/models/report.py +++ b/users/models/report.py @@ -1,24 +1,9 @@ -import hashlib -import re -from functools import cached_property - from django.conf import settings -from django.contrib.auth.models import AbstractUser -from django.core import validators -from django.core.exceptions import ValidationError -from django.core.serializers.json import DjangoJSONEncoder from django.db import models -from django.db.models import F, Q, Value -from django.db.models.functions import Concat, Lower -from django.templatetags.static import static -from django.urls import reverse -from django.utils import timezone -from django.utils.deconstruct import deconstructible from django.utils.translation import gettext_lazy as _ from loguru import logger from common.utils import GenerateDateUUIDMediaFilePath -from management.models import Announcement from mastodon.api import * from .user import User diff --git a/users/models/user.py b/users/models/user.py index 7f77db0c..68a8c322 100644 --- a/users/models/user.py +++ b/users/models/user.py @@ -5,10 +5,9 @@ from typing import TYPE_CHECKING from django.contrib.auth.models import AbstractUser from django.contrib.auth.validators import UnicodeUsernameValidator -from django.core import validators from django.core.exceptions import ValidationError from django.db import models -from django.db.models import F, Q, Value +from django.db.models import F, Manager, Q, Value from django.db.models.functions import Concat, Lower from django.templatetags.static import static from django.urls import reverse @@ -19,8 +18,10 @@ from loguru import logger from management.models import Announcement from mastodon.api import * +from takahe.utils import Takahe if TYPE_CHECKING: + from .apidentity import APIdentity from .preference import Preference _RESERVED_USERNAMES = [ @@ -48,6 +49,7 @@ class UsernameValidator(UnicodeUsernameValidator): class User(AbstractUser): + identity: "APIdentity" preference: "Preference" username_validator = UsernameValidator() username = models.CharField( @@ -142,15 +144,6 @@ class User(AbstractUser): ), ] - @staticmethod - def register(**param): - from .preference import Preference - - new_user = User(**param) - new_user.save() - Preference.objects.create(user=new_user) - return new_user - @cached_property def mastodon_acct(self): return ( @@ -185,7 +178,9 @@ class User(AbstractUser): @property def handler(self): - return self.mastodon_acct or self.username or f"~{self.pk}" + return ( + f"{self.username}" if self.username else self.mastodon_acct or f"~{self.pk}" + ) @property def url(self): @@ -194,105 +189,6 @@ class User(AbstractUser): def __str__(self): return f'{self.pk}:{self.username or ""}:{self.mastodon_acct}' - @property - def ignoring(self): - return self.muting + self.rejecting - - def follow(self, target: "User"): - if ( - target is None - or target.locked - or self.is_following(target) - or self.is_blocking(target) - or self.is_blocked_by(target) - ): - return False - self.local_following.add(target) - self.following.append(target.pk) - self.save(update_fields=["following"]) - return True - - def unfollow(self, target: "User"): - if target and target in self.local_following.all(): - self.local_following.remove(target) - if ( - target.pk in self.following - and target.mastodon_acct not in self.mastodon_following - ): - self.following.remove(target.pk) - self.save(update_fields=["following"]) - return True - return False - - def remove_follower(self, target: "User"): - if target is None or self not in target.local_following.all(): - return False - target.local_following.remove(self) - if ( - self.pk in target.following - and self.mastodon_acct not in target.mastodon_following - ): - target.following.remove(self.pk) - target.save(update_fields=["following"]) - return True - - def block(self, target: "User"): - if target is None or target in self.local_blocking.all(): - return False - self.local_blocking.add(target) - if target.pk in self.following: - self.following.remove(target.pk) - self.save(update_fields=["following"]) - if self.pk in target.following: - target.following.remove(self.pk) - target.save(update_fields=["following"]) - if target in self.local_following.all(): - self.local_following.remove(target) - if self in target.local_following.all(): - target.local_following.remove(self) - if target.pk not in self.rejecting: - self.rejecting.append(target.pk) - self.save(update_fields=["rejecting"]) - if self.pk not in target.rejecting: - target.rejecting.append(self.pk) - target.save(update_fields=["rejecting"]) - return True - - def unblock(self, target: "User"): - if target and target in self.local_blocking.all(): - self.local_blocking.remove(target) - if not self.is_blocked_by(target): - if target.pk in self.rejecting: - self.rejecting.remove(target.pk) - self.save(update_fields=["rejecting"]) - if self.pk in target.rejecting: - target.rejecting.remove(self.pk) - target.save(update_fields=["rejecting"]) - return True - return False - - def mute(self, target: "User"): - if ( - target is None - or target in self.local_muting.all() - or target.mastodon_acct in self.mastodon_mutes - ): - return False - self.local_muting.add(target) - if target.pk not in self.muting: - self.muting.append(target.pk) - self.save() - return True - - def unmute(self, target: "User"): - if target and target in self.local_muting.all(): - self.local_muting.remove(target) - if target.pk in self.muting: - self.muting.remove(target.pk) - self.save() - return True - return False - def clear(self): if self.mastodon_site == "removed" and not self.is_active: return @@ -313,40 +209,13 @@ class User(AbstractUser): self.mastodon_blocks = [] self.mastodon_domain_blocks = [] self.mastodon_account = {} + self.save() + self.identity.deleted = timezone.now() + self.identity.save() - def merge_relationships(self): - self.muting = self.merged_muting_ids() - self.rejecting = self.merged_rejecting_ids() - # caculate following after rejecting is merged - self.following = self.merged_following_ids() - - @classmethod - def merge_rejected_by(cls): - """ - Caculate rejecting field to include blocked by for external users - Should be invoked after invoking merge_relationships() for all users - """ - # FIXME this is quite inifficient, should only invoked in async task - external_users = list( - cls.objects.filter(mastodon_username__isnull=False, is_active=True) - ) - reject_changed = [] - follow_changed = [] - for u in external_users: - for v in external_users: - if v.pk in u.rejecting and u.pk not in v.rejecting: - v.rejecting.append(u.pk) - if v not in reject_changed: - reject_changed.append(v) - if u.pk in v.following: - v.following.remove(u.pk) - if v not in follow_changed: - follow_changed.append(v) - for u in reject_changed: - u.save(update_fields=["rejecting"]) - for u in follow_changed: - u.save(update_fields=["following"]) - return len(follow_changed) + len(reject_changed) + def sync_relationships(self): + # FIXME + pass def refresh_mastodon_data(self): """Try refresh account data from mastodon server, return true if refreshed successfully, note it will not save to db""" @@ -390,112 +259,13 @@ class User(AbstractUser): self.mastodon_domain_blocks = get_related_acct_list( self.mastodon_site, self.mastodon_token, "/api/v1/domain_blocks" ) - self.merge_relationships() + self.sync_relationships() updated = True elif code == 401: logger.error(f"Refresh mastodon data error 401 for {self}") self.mastodon_token = "" return updated - def merged_following_ids(self): - fl = [] - for m in self.mastodon_following: - target = User.get(m) - if target and ( - (not target.mastodon_locked) - or self.mastodon_acct in target.mastodon_followers - ): - fl.append(target.pk) - for user in self.local_following.all(): - if user.pk not in fl and not user.locked and not user.is_blocking(self): - fl.append(user.pk) - fl = [x for x in fl if x not in self.rejecting] - return sorted(fl) - - def merged_muting_ids(self): - external_muting_user_ids = list( - User.objects.all() - .annotate(acct=Concat("mastodon_username", Value("@"), "mastodon_site")) - .filter(acct__in=self.mastodon_mutes) - .values_list("pk", flat=True) - ) - l = list( - set( - external_muting_user_ids - + list(self.local_muting.all().values_list("pk", flat=True)) - ) - ) - return sorted(l) - - def merged_rejecting_ids(self): - domain_blocked_user_ids = list( - User.objects.filter( - mastodon_site__in=self.mastodon_domain_blocks - ).values_list("pk", flat=True) - ) - external_blocking_user_ids = list( - User.objects.all() - .annotate(acct=Concat("mastodon_username", Value("@"), "mastodon_site")) - .filter(acct__in=self.mastodon_blocks) - .values_list("pk", flat=True) - ) - l = list( - set( - domain_blocked_user_ids - + external_blocking_user_ids - + list(self.local_blocking.all().values_list("pk", flat=True)) - + list(self.local_blocked_by.all().values_list("pk", flat=True)) # type: ignore - + list(self.local_muting.all().values_list("pk", flat=True)) - ) - ) - return sorted(l) - - def is_blocking(self, target): - return ( - ( - target in self.local_blocking.all() - or target.mastodon_acct in self.mastodon_blocks - or target.mastodon_site in self.mastodon_domain_blocks - ) - if target.is_authenticated - else self.preference.no_anonymous_view - ) - - def is_blocked_by(self, target): - return target.is_authenticated and target.is_blocking(self) - - def is_muting(self, target): - return target.pk in self.muting or target.mastodon_acct in self.mastodon_mutes - - def is_following(self, target): - return ( - self.mastodon_acct in target.mastodon_followers - if target.locked - else target.pk in self.following - # or target.mastodon_acct in self.mastodon_following - # or self.mastodon_acct in target.mastodon_followers - ) - - def is_followed_by(self, target): - return target.is_following(self) - - def get_mark_for_item(self, item): - params = {item.__class__.__name__.lower() + "_id": item.id, "owner": self} - mark = item.mark_class.objects.filter(**params).first() - return mark - - def get_max_visibility(self, viewer): - if not viewer.is_authenticated: - return 0 - elif viewer == self: - return 2 - elif viewer.is_blocked_by(self): - return -1 - elif viewer.is_following(self): - return 1 - else: - return 0 - @property def unread_announcements(self): unread_announcements = Announcement.objects.filter( @@ -503,59 +273,71 @@ class User(AbstractUser): ).order_by("-pk") return unread_announcements + @property + def activity_manager(self): + if not self.identity: + raise ValueError("User has no identity") + return self.identity.activity_manager + + @property + def shelf_manager(self): + if not self.identity: + raise ValueError("User has no identity") + return self.identity.shelf_manager + + @property + def tag_manager(self): + if not self.identity: + raise ValueError("User has no identity") + return self.identity.tag_manager + @classmethod def get(cls, name, case_sensitive=False): if isinstance(name, str): - sp = name.split("@") if name.startswith("~"): try: query_kwargs = {"pk": int(name[1:])} except: return None - elif len(sp) == 1: + elif name.startswith("@"): query_kwargs = { - "username__iexact" if case_sensitive else "username": name - } - elif len(sp) == 2: - query_kwargs = { - "mastodon_username__iexact" - if case_sensitive - else "mastodon_username": sp[0], - "mastodon_site__iexact" - if case_sensitive - else "mastodon_site": sp[1], + "username__iexact" if case_sensitive else "username": name[1:] } else: - return None + sp = name.split("@") + if len(sp) == 2: + query_kwargs = { + "mastodon_username__iexact" + if case_sensitive + else "mastodon_username": sp[0], + "mastodon_site__iexact" + if case_sensitive + else "mastodon_site": sp[1], + } + else: + return None elif isinstance(name, int): query_kwargs = {"pk": name} else: return None return User.objects.filter(**query_kwargs).first() - @property - def tags(self): - from journal.models import TagManager + @classmethod + def register(cls, **param): + from .preference import Preference - return TagManager.all_tags_for_user(self) + new_user = cls(**param) + new_user.save() + Preference.objects.create(user=new_user) + if new_user.username: # TODO make username required in registeration + new_user.initialize() + return new_user - @cached_property - def tag_manager(self): - from journal.models import TagManager + def initialize(self): + Takahe.init_identity_for_local_user(self) - return TagManager.get_manager_for_user(self) - @cached_property - def shelf_manager(self): - from journal.models import ShelfManager - - return ShelfManager.get_manager_for_user(self) - - @cached_property - def activity_manager(self): - from social.models import ActivityManager - - return ActivityManager.get_manager_for_user(self) +# TODO the following models should be deprecated soon class Follow(models.Model): diff --git a/users/tasks.py b/users/tasks.py index 06d684a1..c6cb4c8a 100644 --- a/users/tasks.py +++ b/users/tasks.py @@ -42,6 +42,4 @@ def refresh_all_mastodon_data_task(ttl_hours): else: logger.warning(f"Missing token for {user}") logger.info(f"{count} users updated") - c = User.merge_rejected_by() - logger.info(f"{c} users's rejecting list updated") logger.info(f"Mastodon data refresh done") diff --git a/users/tests.py b/users/tests.py index 3e801a29..e5513aaf 100644 --- a/users/tests.py +++ b/users/tests.py @@ -1,168 +1,70 @@ from django.test import TestCase +from takahe.utils import Takahe + from .models import * -from .models.user import Block, Follow, Mute class UserTest(TestCase): - def setUp(self): - self.alice = User.register(mastodon_site="MySpace", mastodon_username="Alice") - self.bob = User.register(mastodon_site="KKCity", mastodon_username="Bob") + databases = "__all__" - def test_local_follow(self): - self.assertTrue(self.alice.follow(self.bob)) - self.assertTrue( - Follow.objects.filter(owner=self.alice, target=self.bob).exists() - ) - self.assertEqual(self.alice.merged_following_ids(), [self.bob.pk]) - self.assertEqual(self.alice.following, [self.bob.pk]) + def setUp(self): + self.alice = User.register( + mastodon_site="MySpace", mastodon_username="Alice", username="alice" + ).identity + self.bob = User.register( + mastodon_site="KKCity", mastodon_username="Bob", username="bob" + ).identity + + def test_follow(self): + self.alice.follow(self.bob) + Takahe._force_state_cycle() self.assertTrue(self.alice.is_following(self.bob)) self.assertTrue(self.bob.is_followed_by(self.alice)) - - self.assertFalse(self.alice.follow(self.bob)) - self.assertEqual( - Follow.objects.filter(owner=self.alice, target=self.bob).count(), 1 - ) self.assertEqual(self.alice.following, [self.bob.pk]) - self.assertTrue(self.alice.unfollow(self.bob)) - self.assertFalse( - Follow.objects.filter(owner=self.alice, target=self.bob).exists() - ) + self.alice.unfollow(self.bob) + Takahe._force_state_cycle() self.assertFalse(self.alice.is_following(self.bob)) self.assertFalse(self.bob.is_followed_by(self.alice)) self.assertEqual(self.alice.following, []) - def test_locked(self): - self.bob.mastodon_locked = True - self.bob.save() - self.assertFalse(self.alice.follow(self.bob)) - self.bob.mastodon_locked = False - self.bob.save() - self.assertTrue(self.alice.follow(self.bob)) - self.assertTrue(self.alice.is_following(self.bob)) - self.bob.mastodon_locked = True - self.bob.save() - self.assertFalse(self.alice.is_following(self.bob)) - - def test_external_follow(self): - self.alice.mastodon_following.append(self.bob.mastodon_acct) - self.alice.merge_relationships() - self.alice.save() - self.assertTrue(self.alice.is_following(self.bob)) - self.assertEqual(self.alice.following, [self.bob.pk]) - self.assertFalse(self.alice.follow(self.bob)) - - self.alice.mastodon_following.remove(self.bob.mastodon_acct) - self.alice.merge_relationships() - self.alice.save() - self.assertFalse(self.alice.is_following(self.bob)) - self.assertEqual(self.alice.following, []) - self.assertTrue(self.alice.follow(self.bob)) - self.assertTrue(self.alice.is_following(self.bob)) - - def test_local_mute(self): + def test_mute(self): self.alice.mute(self.bob) - self.assertTrue(Mute.objects.filter(owner=self.alice, target=self.bob).exists()) - self.assertEqual(self.alice.merged_muting_ids(), [self.bob.pk]) - self.assertEqual(self.alice.ignoring, [self.bob.pk]) + Takahe._force_state_cycle() self.assertTrue(self.alice.is_muting(self.bob)) - - self.alice.mute(self.bob) - self.assertEqual( - Mute.objects.filter(owner=self.alice, target=self.bob).count(), 1 - ) self.assertEqual(self.alice.ignoring, [self.bob.pk]) + self.assertEqual(self.alice.rejecting, []) - self.alice.unmute(self.bob) - self.assertFalse( - Mute.objects.filter(owner=self.alice, target=self.bob).exists() - ) - self.assertFalse(self.alice.is_muting(self.bob)) - self.assertEqual(self.alice.ignoring, []) - self.assertEqual(self.alice.merged_muting_ids(), []) - - def test_external_mute(self): - self.alice.mastodon_mutes.append(self.bob.mastodon_acct) - self.alice.save() - self.assertTrue(self.alice.is_muting(self.bob)) - self.assertEqual(self.alice.merged_muting_ids(), [self.bob.pk]) - - self.alice.mastodon_mutes.remove(self.bob.mastodon_acct) - self.assertFalse(self.alice.is_muting(self.bob)) - self.assertEqual(self.alice.merged_muting_ids(), []) - - def test_local_block_follow(self): + def test_block(self): self.alice.block(self.bob) - self.assertEqual(self.bob.follow(self.alice), False) - self.alice.unblock(self.bob) - self.assertEqual(self.bob.follow(self.alice), True) - self.assertEqual(self.bob.following, [self.alice.pk]) - self.alice.block(self.bob) - self.assertEqual(self.bob.following, []) - - def test_local_block(self): - self.alice.block(self.bob) - self.assertTrue( - Block.objects.filter(owner=self.alice, target=self.bob).exists() - ) - self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk]) - self.assertEqual(self.alice.ignoring, [self.bob.pk]) + Takahe._force_state_cycle() self.assertTrue(self.alice.is_blocking(self.bob)) self.assertTrue(self.bob.is_blocked_by(self.alice)) - - self.alice.block(self.bob) - self.assertEqual( - Block.objects.filter(owner=self.alice, target=self.bob).count(), 1 - ) + self.assertEqual(self.alice.rejecting, [self.bob.pk]) self.assertEqual(self.alice.ignoring, [self.bob.pk]) self.alice.unblock(self.bob) - self.assertFalse( - Block.objects.filter(owner=self.alice, target=self.bob).exists() - ) + Takahe._force_state_cycle() self.assertFalse(self.alice.is_blocking(self.bob)) self.assertFalse(self.bob.is_blocked_by(self.alice)) + self.assertEqual(self.alice.rejecting, []) self.assertEqual(self.alice.ignoring, []) - self.assertEqual(self.alice.merged_rejecting_ids(), []) - def test_external_block(self): - self.bob.follow(self.alice) - self.assertEqual(self.bob.following, [self.alice.pk]) - self.alice.mastodon_blocks.append(self.bob.mastodon_acct) - self.alice.save() - self.assertTrue(self.alice.is_blocking(self.bob)) - self.assertTrue(self.bob.is_blocked_by(self.alice)) - self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk]) - self.alice.merge_relationships() - self.assertEqual(self.alice.rejecting, [self.bob.pk]) - self.alice.save() - self.assertEqual(self.bob.following, [self.alice.pk]) - self.assertEqual(self.bob.rejecting, []) - self.assertEqual(User.merge_rejected_by(), 2) - self.bob.refresh_from_db() - self.assertEqual(self.bob.rejecting, [self.alice.pk]) - self.assertEqual(self.bob.following, []) + # def test_external_domain_block(self): + # self.alice.mastodon_domain_blocks.append(self.bob.mastodon_site) + # self.alice.save() + # self.assertTrue(self.alice.is_blocking(self.bob)) + # self.assertTrue(self.bob.is_blocked_by(self.alice)) + # self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk]) + # self.alice.merge_relationships() + # self.assertEqual(self.alice.rejecting, [self.bob.pk]) + # self.alice.save() + # self.assertEqual(User.merge_rejected_by(), 1) + # self.bob.refresh_from_db() + # self.assertEqual(self.bob.rejecting, [self.alice.pk]) - self.alice.mastodon_blocks.remove(self.bob.mastodon_acct) - self.assertFalse(self.alice.is_blocking(self.bob)) - self.assertFalse(self.bob.is_blocked_by(self.alice)) - self.assertEqual(self.alice.merged_rejecting_ids(), []) - - def test_external_domain_block(self): - self.alice.mastodon_domain_blocks.append(self.bob.mastodon_site) - self.alice.save() - self.assertTrue(self.alice.is_blocking(self.bob)) - self.assertTrue(self.bob.is_blocked_by(self.alice)) - self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk]) - self.alice.merge_relationships() - self.assertEqual(self.alice.rejecting, [self.bob.pk]) - self.alice.save() - self.assertEqual(User.merge_rejected_by(), 1) - self.bob.refresh_from_db() - self.assertEqual(self.bob.rejecting, [self.alice.pk]) - - self.alice.mastodon_domain_blocks.remove(self.bob.mastodon_site) - self.assertFalse(self.alice.is_blocking(self.bob)) - self.assertFalse(self.bob.is_blocked_by(self.alice)) - self.assertEqual(self.alice.merged_rejecting_ids(), []) + # self.alice.mastodon_domain_blocks.remove(self.bob.mastodon_site) + # self.assertFalse(self.alice.is_blocking(self.bob)) + # self.assertFalse(self.bob.is_blocked_by(self.alice)) + # self.assertEqual(self.alice.merged_rejecting_ids(), [])