"
+ elif tag in self.REWRITE_TO_BR:
+ self.flush_data()
+ if not self._fresh_p:
+ self.html_output += "
"
+ self.text_output += "\n"
+ elif tag == "a":
+ self.flush_data()
+ self._pending_a = {"attrs": dict(attrs), "content": ""}
+ self._fresh_p = tag in self.REWRITE_TO_P
+
+ def handle_endtag(self, tag: str) -> None:
+ self._fresh_p = False
+ if tag in self.REWRITE_TO_P:
+ self.flush_data()
+ self.html_output += "
", "
" + raw_prepend_content, 1) + hashtags = ( + sorted([tag[: Hashtag.MAXIMUM_LENGTH] for tag in parser.hashtags]) + or None + ) + # Make the Post object + post = cls.objects.create( + author=author, + content=content, + summary=summary or None, + sensitive=bool(summary) or sensitive, + local=True, + visibility=visibility, + hashtags=hashtags, + in_reply_to=reply_to.object_uri if reply_to else None, + ) + post.object_uri = post.urls.object_uri + post.url = post.absolute_object_uri() + post.mentions.set(mentions) + post.emojis.set(emojis) + if published and published < timezone.now(): + post.published = published + if timezone.now() - published > datetime.timedelta(days=2): + post.state = "fanned_out" # add post quietly if it's old + # if attachments:# FIXME + # post.attachments.set(attachments) + # if question: # FIXME + # post.type = question["type"] + # post.type_data = PostTypeData(__root__=question).__root__ + if type_data: + post.type_data = type_data + post.save() + # Recalculate parent stats for replies + if reply_to: + reply_to.calculate_stats() + return post + + def edit_local( + self, + raw_prepend_content: str, + content: str, + summary: str | None = None, + sensitive: bool | None = None, + visibility: int = Visibilities.public, + attachments: list | None = None, + attachment_attributes: list | None = None, + type_data: dict | None = None, + ): + with transaction.atomic(): + # Strip all HTML and apply linebreaks filter + parser = FediverseHtmlParser(linebreaks_filter(content), find_hashtags=True) + self.content = parser.html.replace("
", "
" + raw_prepend_content, 1)
+ self.hashtags = (
+ sorted([tag[: Hashtag.MAXIMUM_LENGTH] for tag in parser.hashtags])
+ or None
+ )
+ self.summary = summary or None
+ self.sensitive = bool(summary) if sensitive is None else sensitive
+ self.visibility = visibility
+ self.edited = timezone.now()
+ self.mentions.set(self.mentions_from_content(content, self.author))
+ self.emojis.set(Emoji.emojis_from_content(content, None))
+ # self.attachments.set(attachments or []) # fixme
+ if type_data:
+ self.type_data = type_data
+ self.save()
+
+ for attrs in attachment_attributes or []:
+ attachment = next(
+ (a for a in attachments or [] if str(a.id) == attrs.id), None
+ )
+ if attachment is None:
+ continue
+ attachment.name = attrs.description
+ attachment.save()
+
+ self.state = "edited"
+ self.state_changed = timezone.now()
+ self.state_next_attempt = None
+ self.state_locked_until = None
+ self.save()
+
+ @classmethod
+ def mentions_from_content(cls, content, author) -> set[Identity]:
+ mention_hits = FediverseHtmlParser(content, find_mentions=True).mentions
+ mentions = set()
+ for handle in mention_hits:
+ handle = handle.lower()
+ if "@" in handle:
+ username, domain = handle.split("@", 1)
+ else:
+ username = handle
+ domain = author.domain_id
+ identity = Identity.by_username_and_domain(
+ username=username,
+ domain=domain,
+ fetch=True,
+ )
+ if identity is not None:
+ mentions.add(identity)
+ return mentions
+
+ def ensure_hashtags(self) -> None:
+ """
+ Ensure any of the already parsed hashtags from this Post
+ have a corresponding Hashtag record.
+ """
+ # Ensure hashtags
+ if self.hashtags:
+ for hashtag in self.hashtags:
+ tag, _ = Hashtag.objects.get_or_create(
+ hashtag=hashtag[: Hashtag.MAXIMUM_LENGTH],
+ )
+ tag.transition_perform("outdated")
+
+ def calculate_stats(self, save=True):
+ """
+ Recalculates our stats dict
+ """
+ from .models import PostInteraction
+
+ self.stats = {
+ "likes": self.interactions.filter(
+ type=PostInteraction.Types.like,
+ state__in=["new", "fanned_out"],
+ ).count(),
+ "boosts": self.interactions.filter(
+ type=PostInteraction.Types.boost,
+ state__in=["new", "fanned_out"],
+ ).count(),
+ "replies": Post.objects.filter(in_reply_to=self.object_uri).count(),
+ }
+ if save:
+ self.save()
+
+
+class EmojiQuerySet(models.QuerySet):
+ def usable(self, domain: Domain | None = None):
+ """
+ Returns all usable emoji, optionally filtering by domain too.
+ """
+ visible_q = models.Q(local=True) | models.Q(public=True)
+ if True: # Config.system.emoji_unreviewed_are_public:
+ visible_q |= models.Q(public__isnull=True)
+ qs = self.filter(visible_q)
+
+ if domain:
+ if not domain.local:
+ qs = qs.filter(domain=domain)
+
+ return qs
+
+
+class EmojiManager(models.Manager):
+ def get_queryset(self):
+ return EmojiQuerySet(self.model, using=self._db)
+
+ def usable(self, domain: Domain | None = None):
+ return self.get_queryset().usable(domain)
+
+
+class Emoji(models.Model):
+ class Meta:
+ # managed = False
+ db_table = "activities_emoji"
+
+ # Normalized Emoji without the ':'
+ shortcode = models.SlugField(max_length=100, db_index=True)
+
+ domain = models.ForeignKey(
+ "takahe.Domain", null=True, blank=True, on_delete=models.CASCADE
+ )
+ local = models.BooleanField(default=True)
+
+ # Should this be shown in the public UI?
+ public = models.BooleanField(null=True)
+
+ object_uri = models.CharField(max_length=500, blank=True, null=True, unique=True)
+
+ mimetype = models.CharField(max_length=200)
+
+ # Files may not be populated if it's remote and not cached on our side yet
+ file = models.ImageField(
+ # upload_to=partial(upload_emoji_namer, "emoji"),
+ null=True,
+ blank=True,
+ )
+
+ # A link to the custom emoji
+ remote_url = models.CharField(max_length=500, blank=True, null=True)
+
+ # Used for sorting custom emoji in the picker
+ category = models.CharField(max_length=100, blank=True, null=True)
+
+ # State of this Emoji
+ # state = StateField(EmojiStates)
+
+ created = models.DateTimeField(auto_now_add=True)
+ updated = models.DateTimeField(auto_now=True)
+
+ objects = EmojiManager()
+
+ @classmethod
+ def emojis_from_content(cls, content: str, domain: Domain | None) -> list["Emoji"]:
+ """
+ Return a parsed and sanitized of emoji found in content without
+ the surrounding ':'.
+ """
+ emoji_hits = FediverseHtmlParser(
+ content, find_emojis=True, emoji_domain=domain
+ ).emojis
+ emojis = sorted({emoji for emoji in emoji_hits})
+ q = models.Q(local=True) | models.Q(public=True) | models.Q(public__isnull=True)
+ if domain and not domain.local:
+ q = q & models.Q(domain=domain)
+ return list(
+ cls.objects.filter(local=(domain is None) or domain.local)
+ .filter(q)
+ .filter(shortcode__in=emojis)
+ )
+
+ @classmethod
+ @cached(cache=TTLCache(maxsize=1000, ttl=60))
+ def get_by_domain(cls, shortcode, domain: Domain | None) -> "Emoji | None":
+ """
+ Given an emoji shortcode and optional domain, looks up the single
+ emoji and returns it. Raises Emoji.DoesNotExist if there isn't one.
+ """
+ try:
+ if domain is None or domain.local:
+ return cls.objects.get(local=True, shortcode=shortcode)
+ else:
+ return cls.objects.get(domain=domain, shortcode=shortcode)
+ except Emoji.DoesNotExist:
+ return None
+
+ @property
+ def fullcode(self):
+ return f":{self.shortcode}:"
+
+ @property
+ def is_usable(self) -> bool:
+ """
+ Return True if this Emoji is usable.
+ """
+ return self.public or self.public is None
+
+ def full_url(self, always_show=False) -> RelativeAbsoluteUrl:
+ if self.is_usable or always_show:
+ if self.file:
+ return AutoAbsoluteUrl(self.file.url)
+ elif self.remote_url:
+ return ProxyAbsoluteUrl(
+ f"/proxy/emoji/{self.pk}/",
+ remote_url=self.remote_url,
+ )
+ return StaticAbsoluteUrl("img/blank-emoji-128.png")
+
+ def as_html(self):
+ if self.is_usable:
+ return mark_safe(
+ f''
+ )
+ return self.fullcode
+
+
+class HashtagQuerySet(models.QuerySet):
+ def public(self):
+ public_q = models.Q(public=True)
+ if True: # Config.system.hashtag_unreviewed_are_public:
+ public_q |= models.Q(public__isnull=True)
+ return self.filter(public_q)
+
+ def hashtag_or_alias(self, hashtag: str):
+ return self.filter(
+ models.Q(hashtag=hashtag) | models.Q(aliases__contains=hashtag)
+ )
+
+
+class HashtagManager(models.Manager):
+ def get_queryset(self):
+ return HashtagQuerySet(self.model, using=self._db)
+
+ def public(self):
+ return self.get_queryset().public()
+
+ def hashtag_or_alias(self, hashtag: str):
+ return self.get_queryset().hashtag_or_alias(hashtag)
+
+
+class Hashtag(models.Model):
+ class Meta:
+ # managed = False
+ db_table = "activities_hashtag"
+
+ MAXIMUM_LENGTH = 100
+
+ # Normalized hashtag without the '#'
+ hashtag = models.SlugField(primary_key=True, max_length=100)
+
+ # Friendly display override
+ name_override = models.CharField(max_length=100, null=True, blank=True)
+
+ # Should this be shown in the public UI?
+ public = models.BooleanField(null=True)
+
+ # State of this Hashtag
+ # state = StateField(HashtagStates)
+ state = models.CharField(max_length=100, default="outdated")
+ state_changed = models.DateTimeField(auto_now_add=True)
+
+ # Metrics for this Hashtag
+ stats = models.JSONField(null=True, blank=True)
+ # Timestamp of last time the stats were updated
+ stats_updated = models.DateTimeField(null=True, blank=True)
+
+ # List of other hashtags that are considered similar
+ aliases = models.JSONField(null=True, blank=True)
+
+ created = models.DateTimeField(auto_now_add=True)
+ updated = models.DateTimeField(auto_now=True)
+
+ objects = HashtagManager()
+
+ class urls(urlman.Urls):
+ view = "/tags/{self.hashtag}/"
+ follow = "/tags/{self.hashtag}/follow/"
+ unfollow = "/tags/{self.hashtag}/unfollow/"
+ admin = "/admin/hashtags/"
+ admin_edit = "{admin}{self.hashtag}/"
+ admin_enable = "{admin_edit}enable/"
+ admin_disable = "{admin_edit}disable/"
+ timeline = "/tags/{self.hashtag}/"
+
+ hashtag_regex = re.compile(r"\B#([a-zA-Z0-9(_)]+\b)(?!;)")
+
+ def save(self, *args, **kwargs):
+ self.hashtag = self.hashtag.lstrip("#")
+ if self.name_override:
+ self.name_override = self.name_override.lstrip("#")
+ return super().save(*args, **kwargs)
+
+ @property
+ def display_name(self):
+ return self.name_override or self.hashtag
+
+ def __str__(self):
+ return self.display_name
+
+ def usage_months(self, num: int = 12) -> dict[date, int]:
+ """
+ Return the most recent num months of stats
+ """
+ if not self.stats:
+ return {}
+ results = {}
+ for key, val in self.stats.items():
+ parts = key.split("-")
+ if len(parts) == 2:
+ year = int(parts[0])
+ month = int(parts[1])
+ results[date(year, month, 1)] = val
+ return dict(sorted(results.items(), reverse=True)[:num])
+
+ def usage_days(self, num: int = 7) -> dict[date, int]:
+ """
+ Return the most recent num days of stats
+ """
+ if not self.stats:
+ return {}
+ results = {}
+ for key, val in self.stats.items():
+ parts = key.split("-")
+ if len(parts) == 3:
+ year = int(parts[0])
+ month = int(parts[1])
+ day = int(parts[2])
+ results[date(year, month, day)] = val
+ return dict(sorted(results.items(), reverse=True)[:num])
+
+ def to_mastodon_json(self, following: bool | None = None):
+ value = {
+ "name": self.hashtag,
+ "url": self.urls.view.full(), # type: ignore
+ "history": [],
+ }
+
+ if following is not None:
+ value["following"] = following
+
+ return value
+
+
+class PostInteraction(models.Model):
+ """
+ Handles both boosts and likes
+ """
+
+ class Types(models.TextChoices):
+ like = "like"
+ boost = "boost"
+ vote = "vote"
+ pin = "pin"
+
+ id = models.BigIntegerField(
+ primary_key=True,
+ default=Snowflake.generate_post_interaction,
+ )
+
+ # The state the boost is in
+ # state = StateField(PostInteractionStates)
+ state = models.CharField(max_length=100, default="new")
+ state_changed = models.DateTimeField(auto_now_add=True)
+
+ # The canonical object ID
+ object_uri = models.CharField(max_length=500, blank=True, null=True, unique=True)
+
+ # What type of interaction it is
+ type = models.CharField(max_length=100, choices=Types.choices)
+
+ # The user who boosted/liked/etc.
+ identity = models.ForeignKey(
+ "takahe.Identity",
+ on_delete=models.CASCADE,
+ related_name="interactions",
+ )
+
+ # The post that was boosted/liked/etc
+ post = models.ForeignKey(
+ "takahe.Post",
+ on_delete=models.CASCADE,
+ related_name="interactions",
+ )
+
+ # Used to store any interaction extra text value like the vote
+ # in the question/poll case
+ value = models.CharField(max_length=50, blank=True, null=True)
+
+ # When the activity was originally created (as opposed to when we received it)
+ # Mastodon only seems to send this for boosts, not likes
+ published = models.DateTimeField(default=timezone.now)
+
+ created = models.DateTimeField(auto_now_add=True)
+ updated = models.DateTimeField(auto_now=True)
+
+ class Meta:
+ # managed = False
+ db_table = "activities_postinteraction"
+
+
+class Block(models.Model):
+ """
+ When one user (the source) mutes or blocks another (the target)
+ """
+
+ # state = StateField(BlockStates)
+ state = models.CharField(max_length=100, default="new")
+ state_changed = models.DateTimeField(auto_now_add=True)
+
+ source = models.ForeignKey(
+ "takahe.Identity",
+ on_delete=models.CASCADE,
+ related_name="outbound_blocks",
+ )
+
+ target = models.ForeignKey(
+ "takahe.Identity",
+ on_delete=models.CASCADE,
+ related_name="inbound_blocks",
+ )
+
+ uri = models.CharField(blank=True, null=True, max_length=500)
+
+ # If it is a mute, we will stop delivering any activities from target to
+ # source, but we will still deliver activities from source to target.
+ # A full block (mute=False) stops activities both ways.
+ mute = models.BooleanField()
+ include_notifications = models.BooleanField(default=False)
+
+ expires = models.DateTimeField(blank=True, null=True)
+ note = models.TextField(blank=True, null=True)
+
+ created = models.DateTimeField(auto_now_add=True)
+ updated = models.DateTimeField(auto_now=True)
+
+ class Meta:
+ # managed = False
+ db_table = "users_block"
+
+ def __str__(self):
+ return f"#{self.pk}: {self.source} blocks {self.target}"
+
+ ### Alternate fetchers/constructors ###
+
+ @classmethod
+ def maybe_get(
+ cls, source, target, mute=False, require_active=False
+ ) -> Optional["Block"]:
+ """
+ Returns a Block if it exists between source and target
+ """
+ try:
+ if require_active:
+ return cls.objects.filter(
+ status__in=["new", "sent", "awaiting_expiry"]
+ ).get(source=source, target=target, mute=mute)
+ else:
+ return cls.objects.get(source=source, target=target, mute=mute)
+ except cls.DoesNotExist:
+ return None
+
+ @classmethod
+ def create_local_block(cls, source, target) -> "Block":
+ """
+ Creates or updates a full Block from a local Identity to the target
+ (which can be local or remote).
+ """
+ if not source.local:
+ raise ValueError("You cannot block from a remote Identity")
+ block = cls.maybe_get(source=source, target=target, mute=False)
+ if block is not None:
+ if not block.state in ["new", "sent", "awaiting_expiry"]:
+ block.state = BlockStates.new # type:ignore
+ block.save()
+ else:
+ with transaction.atomic():
+ block = cls.objects.create(
+ source=source,
+ target=target,
+ mute=False,
+ )
+ block.uri = source.actor_uri + f"block/{block.pk}/"
+ block.save()
+ return block
+
+ @classmethod
+ def create_local_mute(
+ cls,
+ source,
+ target,
+ duration=None,
+ include_notifications=False,
+ ) -> "Block":
+ """
+ Creates or updates a muting Block from a local Identity to the target
+ (which can be local or remote).
+ """
+ if not source.local:
+ raise ValueError("You cannot mute from a remote Identity")
+ block = cls.maybe_get(source=source, target=target, mute=True)
+ if block is not None:
+ if not block in ["new", "sent", "awaiting_expiry"]:
+ block.state = BlockStates.new # type:ignore
+ if duration:
+ block.expires = timezone.now() + datetime.timedelta(seconds=duration)
+ block.include_notifications = include_notifications
+ block.save()
+ else:
+ with transaction.atomic():
+ block = cls.objects.create(
+ source=source,
+ target=target,
+ mute=True,
+ include_notifications=include_notifications,
+ expires=(
+ timezone.now() + datetime.timedelta(seconds=duration)
+ if duration
+ else None
+ ),
+ )
+ block.uri = source.actor_uri + f"block/{block.pk}/"
+ block.save()
+ return block
diff --git a/takahe/tests.py b/takahe/tests.py
new file mode 100644
index 00000000..7ce503c2
--- /dev/null
+++ b/takahe/tests.py
@@ -0,0 +1,3 @@
+from django.test import TestCase
+
+# Create your tests here.
diff --git a/takahe/uris.py b/takahe/uris.py
new file mode 100644
index 00000000..e686108b
--- /dev/null
+++ b/takahe/uris.py
@@ -0,0 +1,89 @@
+import hashlib
+import sys
+from urllib.parse import urljoin
+
+from django.conf import settings
+from django.contrib.staticfiles.storage import staticfiles_storage
+
+
+class RelativeAbsoluteUrl:
+ """
+ Represents a URL that can have both "relative" and "absolute" forms
+ for various use either locally or remotely.
+ """
+
+ absolute: str
+ relative: str
+
+ def __init__(self, absolute: str, relative: str | None = None):
+ if "://" not in absolute:
+ raise ValueError(f"Absolute URL {absolute!r} is not absolute!")
+ self.absolute = absolute
+ self.relative = relative or absolute
+
+
+class AutoAbsoluteUrl(RelativeAbsoluteUrl):
+ """
+ Automatically makes the absolute variant by using either settings.MAIN_DOMAIN
+ or a passed identity's URI domain.
+ """
+
+ def __init__(
+ self,
+ relative: str,
+ identity=None,
+ ):
+ self.relative = relative
+ if identity:
+ absolute_prefix = f"https://{identity.domain.uri_domain}/"
+ else:
+ absolute_prefix = f"https://{settings.MAIN_DOMAIN}/"
+ self.absolute = urljoin(absolute_prefix, self.relative)
+
+
+class ProxyAbsoluteUrl(AutoAbsoluteUrl):
+ """
+ AutoAbsoluteUrl variant for proxy paths, that also attaches a remote URI hash
+ plus extension to the end if it can.
+ """
+
+ def __init__(
+ self,
+ relative: str,
+ identity=None,
+ remote_url: str | None = None,
+ ):
+ if remote_url:
+ # See if there is a file extension we can grab
+ extension = "bin"
+ remote_filename = remote_url.split("/")[-1]
+ if "." in remote_filename:
+ extension = remote_filename.split(".")[-1]
+ # When provided, attach a hash of the remote URL
+ # SHA1 chosen as it generally has the best performance in modern python, and security is not a concern
+ # Hash truncation is generally fine, as in the typical use case the hash is scoped to the identity PK.
+ relative += f"{hashlib.sha1(remote_url.encode('ascii')).hexdigest()[:10]}.{extension}"
+ super().__init__(relative, identity)
+
+
+class StaticAbsoluteUrl(RelativeAbsoluteUrl):
+ """
+ Creates static URLs given only the static-relative path
+ """
+
+ def __init__(self, path: str):
+ try:
+ static_url = staticfiles_storage.url(path)
+ except ValueError:
+ # Suppress static issues during the first collectstatic
+ # Yes, I know it's a big hack! Pull requests welcome :)
+ if "collectstatic" in sys.argv:
+ super().__init__("https://example.com/")
+ return
+ raise
+ if "://" in static_url:
+ super().__init__(static_url)
+ else:
+ super().__init__(
+ urljoin(f"https://{settings.MAIN_DOMAIN}/", static_url), static_url
+ )
diff --git a/takahe/utils.py b/takahe/utils.py
new file mode 100644
index 00000000..f8498b9d
--- /dev/null
+++ b/takahe/utils.py
@@ -0,0 +1,486 @@
+from typing import TYPE_CHECKING
+
+from django.conf import settings
+
+from .models import *
+
+if TYPE_CHECKING:
+ from users.models import APIdentity
+ from users.models import User as NeoUser
+
+
+def _int(s: str):
+ try:
+ return int(s)
+ except:
+ return -1
+
+
+def _rating_to_emoji(score: int, star_mode=0):
+ """convert score(0~10) to mastodon star emoji code"""
+ if score is None or score == "" or score == 0:
+ return ""
+ solid_stars = score // 2
+ half_star = int(bool(score % 2))
+ empty_stars = 5 - solid_stars if not half_star else 5 - solid_stars - 1
+ if star_mode == 1:
+ emoji_code = "🌕" * solid_stars + "🌗" * half_star + "🌑" * empty_stars
+ else:
+ emoji_code = (
+ settings.STAR_SOLID * solid_stars
+ + settings.STAR_HALF * half_star
+ + settings.STAR_EMPTY * empty_stars
+ )
+ emoji_code = emoji_code.replace("::", ": :")
+ emoji_code = " " + emoji_code + " "
+ return emoji_code
+
+
+class Takahe:
+ Visibilities = Post.Visibilities
+
+ @staticmethod
+ def get_domain():
+ domain = settings.SITE_INFO["site_domain"]
+ d = Domain.objects.filter(domain=domain).first()
+ if not d:
+ logger.info(f"Creating takahe domain {domain}")
+ d = Domain.objects.create(
+ domain=domain,
+ local=True,
+ service_domain=None,
+ notes="NeoDB",
+ nodeinfo=None,
+ )
+ return d
+
+ @staticmethod
+ def get_node_name_for_domain(d: str):
+ domain = Domain.objects.filter(domain=d).first()
+ if domain and domain.nodeinfo:
+ return domain.nodeinfo.get("metadata", {}).get("nodeName")
+
+ @staticmethod
+ def init_identity_for_local_user(u: "NeoUser"):
+ """
+ When a new local NeoDB user is created,
+ create a takahe user with the NeoDB user pk,
+ create a takahe identity,
+ then create a NeoDB APIdentity with the takahe identity pk.
+ """
+ from users.models import APIdentity
+
+ if not u.username:
+ logger.warning(f"User {u} has no username")
+ return None
+ user = User.objects.filter(pk=u.pk).first()
+ handler = "@" + u.username
+ if not user:
+ logger.info(f"Creating takahe user {u}")
+ user = User.objects.create(pk=u.pk, email=handler)
+ else:
+ if user.email != handler:
+ logger.warning(f"Updating takahe user {u} email to {handler}")
+ user.email = handler
+ user.save()
+ domain = Domain.objects.get(domain=settings.SITE_INFO["site_domain"])
+ identity = Identity.objects.filter(username=u.username, local=True).first()
+ if not identity:
+ logger.info(f"Creating takahe identity {u}@{domain}")
+ identity = Identity.objects.create(
+ actor_uri=f"https://{domain.uri_domain}/@{u.username}@{domain.domain}/",
+ username=u.username,
+ domain=domain,
+ name=u.username,
+ local=True,
+ discoverable=not u.preference.no_anonymous_view,
+ )
+ identity.generate_keypair()
+ if not user.identities.filter(pk=identity.pk).exists():
+ user.identities.add(identity)
+ apidentity = APIdentity.objects.filter(pk=identity.pk).first()
+ if not apidentity:
+ logger.info(f"Creating APIdentity for {identity}")
+ apidentity = APIdentity.objects.create(
+ user=u,
+ id=identity.pk,
+ local=True,
+ username=u.username,
+ domain_name=domain.domain,
+ deleted=identity.deleted,
+ )
+ elif apidentity.username != identity.username:
+ logger.warning(
+ f"Updating APIdentity {apidentity} username to {identity.username}"
+ )
+ apidentity.username = identity.username
+ apidentity.save()
+ if u.identity != apidentity:
+ logger.warning(f"Linking user {u} identity to {apidentity}")
+ u.identity = apidentity
+ u.save(update_fields=["identity"])
+ return apidentity
+
+ @staticmethod
+ def get_identity(pk: int):
+ return Identity.objects.get(pk=pk)
+
+ @staticmethod
+ def get_identity_by_local_user(u: "NeoUser"):
+ return (
+ Identity.objects.filter(pk=u.identity.pk, local=True).first()
+ if u and u.is_authenticated and u.identity
+ else None
+ )
+
+ @staticmethod
+ def get_or_create_apidentity(identity: Identity):
+ from users.models import APIdentity
+
+ apid = APIdentity.objects.filter(pk=identity.pk).first()
+ if not apid:
+ if identity.local:
+ raise ValueError(f"local takahe identity {identity} missing APIdentity")
+ if not identity.domain:
+ raise ValueError(f"remote takahe identity {identity} missing domain")
+ apid = APIdentity.objects.create(
+ id=identity.pk,
+ local=False,
+ username=identity.username,
+ domain_name=identity.domain.domain,
+ deleted=identity.deleted,
+ )
+ return apid
+
+ @staticmethod
+ def get_local_user_by_identity(identity: Identity):
+ from users.models import User as NeoUser
+
+ return NeoUser.objects.get(identity_id=identity.pk) if identity.local else None
+
+ @staticmethod
+ def get_following_ids(identity_pk: int):
+ targets = Follow.objects.filter(
+ source_id=identity_pk, state="accepted"
+ ).values_list("target", flat=True)
+ return list(targets)
+
+ @staticmethod
+ def get_follower_ids(identity_pk: int):
+ targets = Follow.objects.filter(
+ target_id=identity_pk, state="accepted"
+ ).values_list("target", flat=True)
+ return list(targets)
+
+ @staticmethod
+ def get_following_request_ids(identity_pk: int):
+ targets = Follow.objects.filter(
+ source_id=identity_pk, state="pending_approval"
+ ).values_list("target", flat=True)
+ return list(targets)
+
+ @staticmethod
+ def get_requested_follower_ids(identity_pk: int):
+ targets = Follow.objects.filter(
+ target_id=identity_pk, state="pending_approval"
+ ).values_list("target", flat=True)
+ return list(targets)
+
+ @staticmethod
+ def update_follow_state(
+ source_pk: int, target_pk: int, from_states: list[str], to_state: str
+ ):
+ follow = Follow.objects.filter(source_id=source_pk, target_id=target_pk).first()
+ if (
+ follow
+ and (not from_states or follow.state in from_states)
+ and follow.state != to_state
+ ):
+ follow.state = to_state
+ follow.save()
+ return follow
+
+ @staticmethod
+ def follow(source_pk: int, target_pk: int):
+ try:
+ follow = Follow.objects.get(source_id=source_pk, target_id=target_pk)
+ if follow.state != "accepted":
+ follow.state = "unrequested"
+ follow.save()
+ except Follow.DoesNotExist:
+ source = Identity.objects.get(pk=source_pk)
+ follow = Follow.objects.create(
+ source_id=source_pk,
+ target_id=target_pk,
+ boosts=True,
+ uri="",
+ state="unrequested",
+ )
+ follow.uri = source.actor_uri + f"follow/{follow.pk}/"
+ follow.save()
+
+ @staticmethod
+ def unfollow(source_pk: int, target_pk: int):
+ Takahe.update_follow_state(source_pk, target_pk, [], "undone")
+ # InboxMessage.create_internal(
+ # {
+ # "type": "ClearTimeline",
+ # "object": target_identity.pk,
+ # "actor": self.identity.pk,
+ # }
+ # )
+
+ @staticmethod
+ def accept_follow_request(source_pk: int, target_pk: int):
+ Takahe.update_follow_state(source_pk, target_pk, [], "accepting")
+
+ @staticmethod
+ def reject_follow_request(source_pk: int, target_pk: int):
+ Takahe.update_follow_state(source_pk, target_pk, [], "rejecting")
+
+ @staticmethod
+ def get_muting_ids(identity_pk: int) -> list[int]:
+ targets = Block.objects.filter(
+ source_id=identity_pk,
+ mute=True,
+ state__in=["new", "sent", "awaiting_expiry"],
+ ).values_list("target", flat=True)
+ return list(targets)
+
+ @staticmethod
+ def get_blocking_ids(identity_pk: int) -> list[int]:
+ targets = Block.objects.filter(
+ source_id=identity_pk,
+ mute=False,
+ state__in=["new", "sent", "awaiting_expiry"],
+ ).values_list("target", flat=True)
+ return list(targets)
+
+ @staticmethod
+ def get_rejecting_ids(identity_pk: int) -> list[int]:
+ pks1 = Block.objects.filter(
+ source_id=identity_pk,
+ mute=False,
+ state__in=["new", "sent", "awaiting_expiry"],
+ ).values_list("target", flat=True)
+ pks2 = Block.objects.filter(
+ target_id=identity_pk,
+ mute=False,
+ state__in=["new", "sent", "awaiting_expiry"],
+ ).values_list("source", flat=True)
+ return list(set(list(pks1) + list(pks2)))
+
+ @staticmethod
+ def block_or_mute(source_pk: int, target_pk: int, is_mute: bool):
+ source = Identity.objects.get(pk=source_pk)
+ if not source.local:
+ raise ValueError(f"Cannot block/mute from remote identity {source}")
+ with transaction.atomic():
+ block, _ = Block.objects.update_or_create(
+ defaults={"state": "new"},
+ source_id=source_pk,
+ target_id=target_pk,
+ mute=is_mute,
+ )
+ if block.state != "new" or not block.uri:
+ block.state = "new"
+ block.uri = source.actor_uri + f"block/{block.pk}/"
+ block.save()
+ if not is_mute:
+ Takahe.unfollow(source_pk, target_pk)
+ Takahe.reject_follow_request(target_pk, source_pk)
+ return block
+
+ @staticmethod
+ def undo_block_or_mute(source_pk: int, target_pk: int, is_mute: bool):
+ Block.objects.filter(
+ source_id=source_pk, target_id=target_pk, mute=is_mute
+ ).update(state="undone")
+
+ @staticmethod
+ def block(source_pk: int, target_pk: int):
+ return Takahe.block_or_mute(source_pk, target_pk, False)
+
+ @staticmethod
+ def unblock(source_pk: int, target_pk: int):
+ return Takahe.undo_block_or_mute(source_pk, target_pk, False)
+
+ @staticmethod
+ def mute(source_pk: int, target_pk: int):
+ return Takahe.block_or_mute(source_pk, target_pk, True)
+
+ @staticmethod
+ def unmute(source_pk: int, target_pk: int):
+ return Takahe.undo_block_or_mute(source_pk, target_pk, True)
+
+ @staticmethod
+ def _force_state_cycle(): # for unit testing only
+ Follow.objects.filter(
+ state__in=["rejecting", "undone", "pending_removal"]
+ ).delete()
+ Follow.objects.all().update(state="accepted")
+ Block.objects.filter(state="new").update(state="sent")
+ Block.objects.exclude(state="sent").delete()
+
+ @staticmethod
+ def post(
+ author_pk: int,
+ pre_conetent: str,
+ content: str,
+ visibility: Visibilities,
+ data: dict | None = None,
+ post_pk: int | None = None,
+ post_time: datetime.datetime | None = None,
+ ) -> int | None:
+ identity = Identity.objects.get(pk=author_pk)
+ post = (
+ Post.objects.filter(author=identity, pk=post_pk).first()
+ if post_pk
+ else None
+ )
+ if post:
+ post.edit_local(
+ pre_conetent, content, visibility=visibility, type_data=data
+ )
+ else:
+ post = Post.create_local(
+ identity,
+ pre_conetent,
+ content,
+ visibility=visibility,
+ type_data=data,
+ published=post_time,
+ )
+ return post.pk if post else None
+
+ @staticmethod
+ def get_post_url(post_pk: int) -> str | None:
+ post = Post.objects.filter(pk=post_pk).first() if post_pk else None
+ return post.object_uri if post else None
+
+ @staticmethod
+ def delete_mark(mark):
+ if mark.shelfmember and mark.shelfmember.post_id:
+ Post.objects.filter(pk=mark.shelfmember.post_id).update(state="deleted")
+
+ @staticmethod
+ def post_mark(mark, share_as_new_post: bool):
+ from catalog.common import ItemCategory
+ from takahe.utils import Takahe
+
+ user = mark.owner.user
+ tags = (
+ "\n"
+ + user.preference.mastodon_append_tag.replace(
+ "[category]", str(ItemCategory(mark.item.category).label)
+ )
+ if user.preference.mastodon_append_tag
+ else ""
+ )
+ stars = _rating_to_emoji(mark.rating_grade, 0)
+ item_link = f"{settings.SITE_INFO['site_url']}/~neodb~{mark.item.url}"
+
+ pre_conetent = (
+ f'{mark.action_label}《{mark.item.display_title}》'
+ )
+ content = f"{stars}\n{mark.comment_text or ''}{tags}"
+ data = {
+ "object": {
+ "relatedWith": [mark.item.ap_object_ref, mark.shelfmember.ap_object]
+ }
+ }
+ if mark.comment:
+ data["object"]["relatedWith"].append(mark.comment.ap_object)
+ if mark.rating:
+ data["object"]["relatedWith"].append(mark.rating.ap_object)
+ if mark.visibility == 1:
+ v = Takahe.Visibilities.followers
+ elif mark.visibility == 2:
+ v = Takahe.Visibilities.mentioned
+ elif user.preference.mastodon_publish_public:
+ v = Takahe.Visibilities.public
+ else:
+ v = Takahe.Visibilities.unlisted
+ post_pk = Takahe.post(
+ mark.owner.pk,
+ pre_conetent,
+ content,
+ v,
+ data,
+ None if share_as_new_post else mark.shelfmember.post_id,
+ mark.shelfmember.created_time,
+ )
+ if post_pk != mark.shelfmember.post_id:
+ mark.shelfmember.post_id = post_pk
+ mark.shelfmember.save(update_fields=["post_id"])
+ if mark.comment and post_pk != mark.comment.post_id:
+ mark.comment.post_id = post_pk
+ mark.comment.save(update_fields=["post_id"])
+ if mark.rating and post_pk != mark.rating.post_id:
+ mark.rating.post_id = post_pk
+ mark.rating.save(update_fields=["post_id"])
+
+ @staticmethod
+ def interact_post(post_pk: int, identity_pk: int, type: str):
+ post = Post.objects.filter(pk=post_pk).first()
+ if not post:
+ logger.warning(f"Cannot find post {post_pk}")
+ return
+ interaction = PostInteraction.objects.get_or_create(
+ type=type,
+ identity_id=identity_pk,
+ post=post,
+ )[0]
+ if interaction.state not in ["new", "fanned_out"]:
+ interaction.state = "new"
+ interaction.save()
+ post.calculate_stats()
+ return interaction
+
+ @staticmethod
+ def uninteract_post(post_pk: int, identity_pk: int, type: str):
+ post = Post.objects.filter(pk=post_pk).first()
+ if not post:
+ logger.warning(f"Cannot find post {post_pk}")
+ return
+ for interaction in PostInteraction.objects.filter(
+ type=type,
+ identity_id=identity_pk,
+ post=post,
+ ):
+ interaction.state = "undone"
+ interaction.save()
+ post.calculate_stats()
+
+ @staticmethod
+ def like_post(post_pk: int, identity_pk: int):
+ return Takahe.interact_post(post_pk, identity_pk, "like")
+
+ @staticmethod
+ def unlike_post(post_pk: int, identity_pk: int):
+ return Takahe.uninteract_post(post_pk, identity_pk, "like")
+
+ @staticmethod
+ def post_liked_by(post_pk: int, identity_pk: int) -> bool:
+ interaction = Takahe.get_user_interaction(post_pk, identity_pk, "like")
+ return interaction is not None and interaction.state in ["new", "fanned_out"]
+
+ @staticmethod
+ def get_user_interaction(post_pk: int, identity_pk: int, type: str):
+ post = Post.objects.filter(pk=post_pk).first()
+ if not post:
+ logger.warning(f"Cannot find post {post_pk}")
+ return None
+ return PostInteraction.objects.filter(
+ type=type,
+ identity_id=identity_pk,
+ post=post,
+ ).first()
+
+ @staticmethod
+ def get_post_stats(post_pk: int) -> dict:
+ post = Post.objects.filter(pk=post_pk).first()
+ if not post:
+ logger.warning(f"Cannot find post {post_pk}")
+ return {}
+ return post.stats or {}
diff --git a/takahe/views.py b/takahe/views.py
new file mode 100644
index 00000000..91ea44a2
--- /dev/null
+++ b/takahe/views.py
@@ -0,0 +1,3 @@
+from django.shortcuts import render
+
+# Create your views here.
diff --git a/users/account.py b/users/account.py
index 1f6499bb..5a6c1eb4 100644
--- a/users/account.py
+++ b/users/account.py
@@ -396,6 +396,7 @@ def register(request):
)
messages.add_message(request, messages.INFO, _("已发送验证邮件,请查收。"))
if username_changed:
+ request.user.initiatialize()
messages.add_message(request, messages.INFO, _("用户名已设置。"))
if email_cleared:
messages.add_message(request, messages.INFO, _("电子邮件地址已取消关联。"))
@@ -480,9 +481,9 @@ def auth_logout(request):
def clear_data_task(user_id):
user = User.objects.get(pk=user_id)
user_str = str(user)
- remove_data_by_user(user)
+ if user.identity:
+ remove_data_by_user(user.identity)
user.clear()
- user.save()
logger.warning(f"User {user_str} data cleared.")
diff --git a/users/management/commands/refresh_following.py b/users/management/commands/refresh_following.py
deleted file mode 100644
index c57329d2..00000000
--- a/users/management/commands/refresh_following.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from datetime import timedelta
-
-from django.core.management.base import BaseCommand
-from django.utils import timezone
-from tqdm import tqdm
-
-from users.models import User
-
-
-class Command(BaseCommand):
- help = "Refresh following data for all users"
-
- def handle(self, *args, **options):
- count = 0
- for user in tqdm(User.objects.all()):
- user.following = user.merged_following_ids()
- if user.following:
- count += 1
- user.save(update_fields=["following"])
-
- print(f"{count} users updated")
diff --git a/users/migrations/0012_apidentity.py b/users/migrations/0012_apidentity.py
new file mode 100644
index 00000000..ab04b30e
--- /dev/null
+++ b/users/migrations/0012_apidentity.py
@@ -0,0 +1,63 @@
+# Generated by Django 4.2.4 on 2023-08-09 13:37
+
+import django.db.models.deletion
+from django.conf import settings
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+ # replaces = [
+ # ("users", "0012_user_local"),
+ # ("users", "0013_user_identity"),
+ # ("users", "0014_remove_user_identity_apidentity_user"),
+ # ("users", "0015_alter_apidentity_user"),
+ # ]
+
+ dependencies = [
+ ("users", "0011_preference_hidden_categories"),
+ ("takahe", "0001_initial"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="APIdentity",
+ fields=[
+ (
+ "id",
+ models.BigAutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("local", models.BooleanField()),
+ ("username", models.CharField(blank=True, max_length=500, null=True)),
+ (
+ "domain_name",
+ models.CharField(blank=True, max_length=500, null=True),
+ ),
+ ("deleted", models.DateTimeField(blank=True, null=True)),
+ (
+ "user",
+ models.OneToOneField(
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="identity",
+ to=settings.AUTH_USER_MODEL,
+ ),
+ ),
+ ],
+ options={
+ "indexes": [
+ models.Index(
+ fields=["local", "username"],
+ name="users_apide_local_2d8170_idx",
+ ),
+ models.Index(
+ fields=["domain_name", "username"],
+ name="users_apide_domain__53ffa5_idx",
+ ),
+ ],
+ },
+ ),
+ ]
diff --git a/users/migrations/0013_init_identity.py b/users/migrations/0013_init_identity.py
new file mode 100644
index 00000000..551faeee
--- /dev/null
+++ b/users/migrations/0013_init_identity.py
@@ -0,0 +1,77 @@
+# Generated by Django 4.2.4 on 2023-08-09 16:54
+
+from django.conf import settings
+from django.db import migrations, models, transaction
+from loguru import logger
+from tqdm import tqdm
+
+from takahe.models import Domain as TakaheDomain
+from takahe.models import Identity as TakaheIdentity
+from takahe.models import User as TakaheUser
+
+domain = settings.SITE_INFO["site_domain"]
+service_domain = settings.SITE_INFO.get("site_service_domain")
+
+
+def init_domain(apps, schema_editor):
+ d = TakaheDomain.objects.filter(domain=domain).first()
+ if not d:
+ logger.info(f"Creating takahe domain {domain}")
+ TakaheDomain.objects.create(
+ domain=domain,
+ local=True,
+ service_domain=service_domain,
+ notes="NeoDB",
+ nodeinfo={},
+ )
+ else:
+ logger.info(f"Takahe domain {domain} already exists")
+
+
+def init_identity(apps, schema_editor):
+ User = apps.get_model("users", "User")
+ APIdentity = apps.get_model("users", "APIdentity")
+ tdomain = TakaheDomain.objects.filter(domain=domain).first()
+ if User.objects.filter(username__isnull=True).exists():
+ raise ValueError("null username detected, aborting migration")
+ if TakaheUser.objects.exists():
+ raise ValueError("existing Takahe users detected, aborting migration")
+ if TakaheIdentity.objects.exists():
+ raise ValueError("existing Takahe identities detected, aborting migration")
+ if APIdentity.objects.exists():
+ raise ValueError("existing APIdentity data detected, aborting migration")
+ logger.info(f"Creating takahe users/identities")
+ for user in tqdm(User.objects.all()):
+ username = user.username
+ handler = "@" + username
+ identity = APIdentity.objects.create(
+ pk=user.pk,
+ user=user,
+ local=True,
+ username=username,
+ domain_name=domain,
+ deleted=None if user.is_active else user.updated,
+ )
+ takahe_user = TakaheUser.objects.create(pk=user.pk, email=handler)
+ takahe_identity = TakaheIdentity.objects.create(
+ pk=user.pk,
+ actor_uri=f"https://{service_domain or domain}/@{username}@{domain}/",
+ username=username,
+ domain=tdomain,
+ name=username,
+ local=True,
+ discoverable=not user.preference.no_anonymous_view,
+ )
+ takahe_user.identities.add(takahe_identity)
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("users", "0012_apidentity"),
+ ]
+
+ operations = [
+ migrations.RunPython(init_domain),
+ migrations.RunPython(init_identity),
+ ]
diff --git a/users/models/__init__.py b/users/models/__init__.py
index d1e45854..14d42a2e 100644
--- a/users/models/__init__.py
+++ b/users/models/__init__.py
@@ -1,3 +1,4 @@
+from .apidentity import APIdentity
from .preference import Preference
from .report import Report
from .user import User
diff --git a/users/models/apidentity.py b/users/models/apidentity.py
new file mode 100644
index 00000000..a22c68d2
--- /dev/null
+++ b/users/models/apidentity.py
@@ -0,0 +1,192 @@
+from functools import cached_property
+
+from django.conf import settings
+from django.db import models
+from loguru import logger
+
+from takahe.utils import Takahe
+
+from .user import User
+
+
+class APIdentity(models.Model):
+ """
+ An identity/actor in ActivityPub service.
+
+ This model is used as 1:1 mapping to Takahe Identity Model
+ """
+
+ user = models.OneToOneField("User", models.CASCADE, related_name="identity")
+ local = models.BooleanField()
+ username = models.CharField(max_length=500, blank=True, null=True)
+ domain_name = models.CharField(max_length=500, blank=True, null=True)
+ deleted = models.DateTimeField(null=True, blank=True)
+
+ class Meta:
+ indexes = [
+ models.Index(fields=["local", "username"]),
+ models.Index(fields=["domain_name", "username"]),
+ ]
+
+ @cached_property
+ def takahe_identity(self):
+ return Takahe.get_identity(self.pk)
+
+ @property
+ def is_active(self):
+ return self.user.is_active and self.takahe_identity.deleted is None
+
+ @property
+ def name(self):
+ return self.takahe_identity.name
+
+ @property
+ def discoverable(self):
+ return self.takahe_identity.discoverable
+
+ @property
+ def actor_uri(self):
+ return self.takahe_identity.actor_uri
+
+ @property
+ def icon_uri(self):
+ return self.takahe_identity.icon_uri
+
+ @property
+ def display_name(self):
+ return self.takahe_identity.name
+
+ @property
+ def avatar(self):
+ return self.user.avatar # FiXME
+
+ @property
+ def url(self):
+ return f"/users/{self.handler}/"
+
+ @property
+ def preference(self):
+ return self.user.preference
+
+ @property
+ def handler(self):
+ if self.local:
+ return self.username
+ else:
+ return f"{self.username}@{self.domain_name}"
+
+ @property
+ def following(self):
+ return Takahe.get_following_ids(self.pk)
+
+ @property
+ def muting(self):
+ return Takahe.get_muting_ids(self.pk)
+
+ @property
+ def blocking(self):
+ return Takahe.get_blocking_ids(self.pk)
+
+ @property
+ def rejecting(self):
+ return Takahe.get_rejecting_ids(self.pk)
+
+ @property
+ def ignoring(self):
+ return self.muting + self.rejecting
+
+ def follow(self, target: "APIdentity"):
+ Takahe.follow(self.pk, target.pk)
+
+ def unfollow(self, target: "APIdentity"): # this also cancels follow request
+ Takahe.unfollow(self.pk, target.pk)
+
+ def requested_followers(self):
+ Takahe.get_requested_follower_ids(self.pk)
+
+ def following_request(self):
+ Takahe.get_following_request_ids(self.pk)
+
+ def accept_follow_request(self, target: "APIdentity"):
+ Takahe.accept_follow_request(self.pk, target.pk)
+
+ def reject_follow_request(self, target: "APIdentity"):
+ Takahe.reject_follow_request(self.pk, target.pk)
+
+ def block(self, target: "APIdentity"):
+ Takahe.block(self.pk, target.pk)
+
+ def unblock(self, target: "APIdentity"):
+ Takahe.unblock(self.pk, target.pk)
+
+ def mute(self, target: "APIdentity"):
+ Takahe.mute(self.pk, target.pk)
+
+ def unmute(self, target: "APIdentity"):
+ Takahe.unmute(self.pk, target.pk)
+
+ def is_rejecting(self, target: "APIdentity"):
+ return self != target and (
+ target.is_blocked_by(self) or target.is_blocking(self)
+ )
+
+ def is_blocking(self, target: "APIdentity"):
+ return target.pk in self.blocking
+
+ def is_blocked_by(self, target: "APIdentity"):
+ return target.is_blocking(self)
+
+ def is_muting(self, target: "APIdentity"):
+ return target.pk in self.muting
+
+ def is_following(self, target: "APIdentity"):
+ return target.pk in self.following
+
+ def is_followed_by(self, target: "APIdentity"):
+ return target.is_following(self)
+
+ def is_visible_to_user(self, viewing_user: User):
+ return (
+ viewing_user.is_authenticated
+ or viewing_user == self.user
+ or (
+ not self.is_blocking(viewing_user.identity)
+ and not self.is_blocked_by(viewing_user.identity)
+ )
+ )
+
+ @classmethod
+ def get_by_handler(cls, handler: str) -> "APIdentity":
+ s = handler.split("@")
+ if len(s) == 1:
+ return cls.objects.get(username=s[0], local=True, deleted__isnull=True)
+ elif len(s) == 2:
+ return cls.objects.get(
+ user__mastodon_username=s[0],
+ user__mastodon_site=s[1],
+ deleted__isnull=True,
+ )
+ elif len(s) == 3 and s[0] == "":
+ return cls.objects.get(
+ username=s[0], domain_name=s[1], local=False, deleted__isnull=True
+ )
+ else:
+ raise cls.DoesNotExist(f"Invalid handler {handler}")
+
+ @cached_property
+ def activity_manager(self):
+ from social.models import ActivityManager
+
+ return ActivityManager(self)
+
+ @cached_property
+ def shelf_manager(self):
+ from journal.models import ShelfManager
+
+ return ShelfManager(self)
+
+ @cached_property
+ def tag_manager(self):
+ from journal.models import TagManager
+
+ return TagManager(self)
diff --git a/users/models/preference.py b/users/models/preference.py
index 6cc96ef9..ac5a0b45 100644
--- a/users/models/preference.py
+++ b/users/models/preference.py
@@ -20,6 +20,7 @@ from loguru import logger
from common.utils import GenerateDateUUIDMediaFilePath
from management.models import Announcement
from mastodon.api import *
+from takahe.utils import Takahe
from .user import User
diff --git a/users/models/report.py b/users/models/report.py
index caabd49c..4a65d2a2 100644
--- a/users/models/report.py
+++ b/users/models/report.py
@@ -1,24 +1,9 @@
-import hashlib
-import re
-from functools import cached_property
-
from django.conf import settings
-from django.contrib.auth.models import AbstractUser
-from django.core import validators
-from django.core.exceptions import ValidationError
-from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
-from django.db.models import F, Q, Value
-from django.db.models.functions import Concat, Lower
-from django.templatetags.static import static
-from django.urls import reverse
-from django.utils import timezone
-from django.utils.deconstruct import deconstructible
from django.utils.translation import gettext_lazy as _
from loguru import logger
from common.utils import GenerateDateUUIDMediaFilePath
-from management.models import Announcement
from mastodon.api import *
from .user import User
diff --git a/users/models/user.py b/users/models/user.py
index 7f77db0c..68a8c322 100644
--- a/users/models/user.py
+++ b/users/models/user.py
@@ -5,10 +5,9 @@ from typing import TYPE_CHECKING
from django.contrib.auth.models import AbstractUser
from django.contrib.auth.validators import UnicodeUsernameValidator
-from django.core import validators
from django.core.exceptions import ValidationError
from django.db import models
-from django.db.models import F, Q, Value
+from django.db.models import F, Manager, Q, Value
from django.db.models.functions import Concat, Lower
from django.templatetags.static import static
from django.urls import reverse
@@ -19,8 +18,10 @@ from loguru import logger
from management.models import Announcement
from mastodon.api import *
+from takahe.utils import Takahe
if TYPE_CHECKING:
+ from .apidentity import APIdentity
from .preference import Preference
_RESERVED_USERNAMES = [
@@ -48,6 +49,7 @@ class UsernameValidator(UnicodeUsernameValidator):
class User(AbstractUser):
+ identity: "APIdentity"
preference: "Preference"
username_validator = UsernameValidator()
username = models.CharField(
@@ -142,15 +144,6 @@ class User(AbstractUser):
),
]
- @staticmethod
- def register(**param):
- from .preference import Preference
-
- new_user = User(**param)
- new_user.save()
- Preference.objects.create(user=new_user)
- return new_user
-
@cached_property
def mastodon_acct(self):
return (
@@ -185,7 +178,9 @@ class User(AbstractUser):
@property
def handler(self):
- return self.mastodon_acct or self.username or f"~{self.pk}"
+ return (
+ f"{self.username}" if self.username else self.mastodon_acct or f"~{self.pk}"
+ )
@property
def url(self):
@@ -194,105 +189,6 @@ class User(AbstractUser):
def __str__(self):
return f'{self.pk}:{self.username or ""}:{self.mastodon_acct}'
- @property
- def ignoring(self):
- return self.muting + self.rejecting
-
- def follow(self, target: "User"):
- if (
- target is None
- or target.locked
- or self.is_following(target)
- or self.is_blocking(target)
- or self.is_blocked_by(target)
- ):
- return False
- self.local_following.add(target)
- self.following.append(target.pk)
- self.save(update_fields=["following"])
- return True
-
- def unfollow(self, target: "User"):
- if target and target in self.local_following.all():
- self.local_following.remove(target)
- if (
- target.pk in self.following
- and target.mastodon_acct not in self.mastodon_following
- ):
- self.following.remove(target.pk)
- self.save(update_fields=["following"])
- return True
- return False
-
- def remove_follower(self, target: "User"):
- if target is None or self not in target.local_following.all():
- return False
- target.local_following.remove(self)
- if (
- self.pk in target.following
- and self.mastodon_acct not in target.mastodon_following
- ):
- target.following.remove(self.pk)
- target.save(update_fields=["following"])
- return True
-
- def block(self, target: "User"):
- if target is None or target in self.local_blocking.all():
- return False
- self.local_blocking.add(target)
- if target.pk in self.following:
- self.following.remove(target.pk)
- self.save(update_fields=["following"])
- if self.pk in target.following:
- target.following.remove(self.pk)
- target.save(update_fields=["following"])
- if target in self.local_following.all():
- self.local_following.remove(target)
- if self in target.local_following.all():
- target.local_following.remove(self)
- if target.pk not in self.rejecting:
- self.rejecting.append(target.pk)
- self.save(update_fields=["rejecting"])
- if self.pk not in target.rejecting:
- target.rejecting.append(self.pk)
- target.save(update_fields=["rejecting"])
- return True
-
- def unblock(self, target: "User"):
- if target and target in self.local_blocking.all():
- self.local_blocking.remove(target)
- if not self.is_blocked_by(target):
- if target.pk in self.rejecting:
- self.rejecting.remove(target.pk)
- self.save(update_fields=["rejecting"])
- if self.pk in target.rejecting:
- target.rejecting.remove(self.pk)
- target.save(update_fields=["rejecting"])
- return True
- return False
-
- def mute(self, target: "User"):
- if (
- target is None
- or target in self.local_muting.all()
- or target.mastodon_acct in self.mastodon_mutes
- ):
- return False
- self.local_muting.add(target)
- if target.pk not in self.muting:
- self.muting.append(target.pk)
- self.save()
- return True
-
- def unmute(self, target: "User"):
- if target and target in self.local_muting.all():
- self.local_muting.remove(target)
- if target.pk in self.muting:
- self.muting.remove(target.pk)
- self.save()
- return True
- return False
-
def clear(self):
if self.mastodon_site == "removed" and not self.is_active:
return
@@ -313,40 +209,13 @@ class User(AbstractUser):
self.mastodon_blocks = []
self.mastodon_domain_blocks = []
self.mastodon_account = {}
+ self.save()
+ self.identity.deleted = timezone.now()
+ self.identity.save()
- def merge_relationships(self):
- self.muting = self.merged_muting_ids()
- self.rejecting = self.merged_rejecting_ids()
- # caculate following after rejecting is merged
- self.following = self.merged_following_ids()
-
- @classmethod
- def merge_rejected_by(cls):
- """
- Caculate rejecting field to include blocked by for external users
- Should be invoked after invoking merge_relationships() for all users
- """
- # FIXME this is quite inifficient, should only invoked in async task
- external_users = list(
- cls.objects.filter(mastodon_username__isnull=False, is_active=True)
- )
- reject_changed = []
- follow_changed = []
- for u in external_users:
- for v in external_users:
- if v.pk in u.rejecting and u.pk not in v.rejecting:
- v.rejecting.append(u.pk)
- if v not in reject_changed:
- reject_changed.append(v)
- if u.pk in v.following:
- v.following.remove(u.pk)
- if v not in follow_changed:
- follow_changed.append(v)
- for u in reject_changed:
- u.save(update_fields=["rejecting"])
- for u in follow_changed:
- u.save(update_fields=["following"])
- return len(follow_changed) + len(reject_changed)
+ def sync_relationships(self):
+ # FIXME
+ pass
def refresh_mastodon_data(self):
"""Try refresh account data from mastodon server, return true if refreshed successfully, note it will not save to db"""
@@ -390,112 +259,13 @@ class User(AbstractUser):
self.mastodon_domain_blocks = get_related_acct_list(
self.mastodon_site, self.mastodon_token, "/api/v1/domain_blocks"
)
- self.merge_relationships()
+ self.sync_relationships()
updated = True
elif code == 401:
logger.error(f"Refresh mastodon data error 401 for {self}")
self.mastodon_token = ""
return updated
- def merged_following_ids(self):
- fl = []
- for m in self.mastodon_following:
- target = User.get(m)
- if target and (
- (not target.mastodon_locked)
- or self.mastodon_acct in target.mastodon_followers
- ):
- fl.append(target.pk)
- for user in self.local_following.all():
- if user.pk not in fl and not user.locked and not user.is_blocking(self):
- fl.append(user.pk)
- fl = [x for x in fl if x not in self.rejecting]
- return sorted(fl)
-
- def merged_muting_ids(self):
- external_muting_user_ids = list(
- User.objects.all()
- .annotate(acct=Concat("mastodon_username", Value("@"), "mastodon_site"))
- .filter(acct__in=self.mastodon_mutes)
- .values_list("pk", flat=True)
- )
- l = list(
- set(
- external_muting_user_ids
- + list(self.local_muting.all().values_list("pk", flat=True))
- )
- )
- return sorted(l)
-
- def merged_rejecting_ids(self):
- domain_blocked_user_ids = list(
- User.objects.filter(
- mastodon_site__in=self.mastodon_domain_blocks
- ).values_list("pk", flat=True)
- )
- external_blocking_user_ids = list(
- User.objects.all()
- .annotate(acct=Concat("mastodon_username", Value("@"), "mastodon_site"))
- .filter(acct__in=self.mastodon_blocks)
- .values_list("pk", flat=True)
- )
- l = list(
- set(
- domain_blocked_user_ids
- + external_blocking_user_ids
- + list(self.local_blocking.all().values_list("pk", flat=True))
- + list(self.local_blocked_by.all().values_list("pk", flat=True)) # type: ignore
- + list(self.local_muting.all().values_list("pk", flat=True))
- )
- )
- return sorted(l)
-
- def is_blocking(self, target):
- return (
- (
- target in self.local_blocking.all()
- or target.mastodon_acct in self.mastodon_blocks
- or target.mastodon_site in self.mastodon_domain_blocks
- )
- if target.is_authenticated
- else self.preference.no_anonymous_view
- )
-
- def is_blocked_by(self, target):
- return target.is_authenticated and target.is_blocking(self)
-
- def is_muting(self, target):
- return target.pk in self.muting or target.mastodon_acct in self.mastodon_mutes
-
- def is_following(self, target):
- return (
- self.mastodon_acct in target.mastodon_followers
- if target.locked
- else target.pk in self.following
- # or target.mastodon_acct in self.mastodon_following
- # or self.mastodon_acct in target.mastodon_followers
- )
-
- def is_followed_by(self, target):
- return target.is_following(self)
-
- def get_mark_for_item(self, item):
- params = {item.__class__.__name__.lower() + "_id": item.id, "owner": self}
- mark = item.mark_class.objects.filter(**params).first()
- return mark
-
- def get_max_visibility(self, viewer):
- if not viewer.is_authenticated:
- return 0
- elif viewer == self:
- return 2
- elif viewer.is_blocked_by(self):
- return -1
- elif viewer.is_following(self):
- return 1
- else:
- return 0
-
@property
def unread_announcements(self):
unread_announcements = Announcement.objects.filter(
@@ -503,59 +273,71 @@ class User(AbstractUser):
).order_by("-pk")
return unread_announcements
+ @property
+ def activity_manager(self):
+ if not self.identity:
+ raise ValueError("User has no identity")
+ return self.identity.activity_manager
+
+ @property
+ def shelf_manager(self):
+ if not self.identity:
+ raise ValueError("User has no identity")
+ return self.identity.shelf_manager
+
+ @property
+ def tag_manager(self):
+ if not self.identity:
+ raise ValueError("User has no identity")
+ return self.identity.tag_manager
+
@classmethod
def get(cls, name, case_sensitive=False):
if isinstance(name, str):
- sp = name.split("@")
if name.startswith("~"):
try:
query_kwargs = {"pk": int(name[1:])}
except:
return None
- elif len(sp) == 1:
+ elif name.startswith("@"):
query_kwargs = {
- "username__iexact" if case_sensitive else "username": name
- }
- elif len(sp) == 2:
- query_kwargs = {
- "mastodon_username__iexact"
- if case_sensitive
- else "mastodon_username": sp[0],
- "mastodon_site__iexact"
- if case_sensitive
- else "mastodon_site": sp[1],
+ "username__iexact" if case_sensitive else "username": name[1:]
}
else:
- return None
+ sp = name.split("@")
+ if len(sp) == 2:
+ query_kwargs = {
+ "mastodon_username__iexact"
+ if case_sensitive
+ else "mastodon_username": sp[0],
+ "mastodon_site__iexact"
+ if case_sensitive
+ else "mastodon_site": sp[1],
+ }
+ else:
+ return None
elif isinstance(name, int):
query_kwargs = {"pk": name}
else:
return None
return User.objects.filter(**query_kwargs).first()
- @property
- def tags(self):
- from journal.models import TagManager
+ @classmethod
+ def register(cls, **param):
+ from .preference import Preference
- return TagManager.all_tags_for_user(self)
+ new_user = cls(**param)
+ new_user.save()
+ Preference.objects.create(user=new_user)
+ if new_user.username: # TODO make username required in registeration
+ new_user.initialize()
+ return new_user
- @cached_property
- def tag_manager(self):
- from journal.models import TagManager
+ def initialize(self):
+ Takahe.init_identity_for_local_user(self)
- return TagManager.get_manager_for_user(self)
- @cached_property
- def shelf_manager(self):
- from journal.models import ShelfManager
-
- return ShelfManager.get_manager_for_user(self)
-
- @cached_property
- def activity_manager(self):
- from social.models import ActivityManager
-
- return ActivityManager.get_manager_for_user(self)
+# TODO the following models should be deprecated soon
class Follow(models.Model):
diff --git a/users/tasks.py b/users/tasks.py
index 06d684a1..c6cb4c8a 100644
--- a/users/tasks.py
+++ b/users/tasks.py
@@ -42,6 +42,4 @@ def refresh_all_mastodon_data_task(ttl_hours):
else:
logger.warning(f"Missing token for {user}")
logger.info(f"{count} users updated")
- c = User.merge_rejected_by()
- logger.info(f"{c} users's rejecting list updated")
logger.info(f"Mastodon data refresh done")
diff --git a/users/tests.py b/users/tests.py
index 3e801a29..e5513aaf 100644
--- a/users/tests.py
+++ b/users/tests.py
@@ -1,168 +1,70 @@
from django.test import TestCase
+from takahe.utils import Takahe
+
from .models import *
-from .models.user import Block, Follow, Mute
class UserTest(TestCase):
- def setUp(self):
- self.alice = User.register(mastodon_site="MySpace", mastodon_username="Alice")
- self.bob = User.register(mastodon_site="KKCity", mastodon_username="Bob")
+ databases = "__all__"
- def test_local_follow(self):
- self.assertTrue(self.alice.follow(self.bob))
- self.assertTrue(
- Follow.objects.filter(owner=self.alice, target=self.bob).exists()
- )
- self.assertEqual(self.alice.merged_following_ids(), [self.bob.pk])
- self.assertEqual(self.alice.following, [self.bob.pk])
+ def setUp(self):
+ self.alice = User.register(
+ mastodon_site="MySpace", mastodon_username="Alice", username="alice"
+ ).identity
+ self.bob = User.register(
+ mastodon_site="KKCity", mastodon_username="Bob", username="bob"
+ ).identity
+
+ def test_follow(self):
+ self.alice.follow(self.bob)
+ Takahe._force_state_cycle()
self.assertTrue(self.alice.is_following(self.bob))
self.assertTrue(self.bob.is_followed_by(self.alice))
-
- self.assertFalse(self.alice.follow(self.bob))
- self.assertEqual(
- Follow.objects.filter(owner=self.alice, target=self.bob).count(), 1
- )
self.assertEqual(self.alice.following, [self.bob.pk])
- self.assertTrue(self.alice.unfollow(self.bob))
- self.assertFalse(
- Follow.objects.filter(owner=self.alice, target=self.bob).exists()
- )
+ self.alice.unfollow(self.bob)
+ Takahe._force_state_cycle()
self.assertFalse(self.alice.is_following(self.bob))
self.assertFalse(self.bob.is_followed_by(self.alice))
self.assertEqual(self.alice.following, [])
- def test_locked(self):
- self.bob.mastodon_locked = True
- self.bob.save()
- self.assertFalse(self.alice.follow(self.bob))
- self.bob.mastodon_locked = False
- self.bob.save()
- self.assertTrue(self.alice.follow(self.bob))
- self.assertTrue(self.alice.is_following(self.bob))
- self.bob.mastodon_locked = True
- self.bob.save()
- self.assertFalse(self.alice.is_following(self.bob))
-
- def test_external_follow(self):
- self.alice.mastodon_following.append(self.bob.mastodon_acct)
- self.alice.merge_relationships()
- self.alice.save()
- self.assertTrue(self.alice.is_following(self.bob))
- self.assertEqual(self.alice.following, [self.bob.pk])
- self.assertFalse(self.alice.follow(self.bob))
-
- self.alice.mastodon_following.remove(self.bob.mastodon_acct)
- self.alice.merge_relationships()
- self.alice.save()
- self.assertFalse(self.alice.is_following(self.bob))
- self.assertEqual(self.alice.following, [])
- self.assertTrue(self.alice.follow(self.bob))
- self.assertTrue(self.alice.is_following(self.bob))
-
- def test_local_mute(self):
+ def test_mute(self):
self.alice.mute(self.bob)
- self.assertTrue(Mute.objects.filter(owner=self.alice, target=self.bob).exists())
- self.assertEqual(self.alice.merged_muting_ids(), [self.bob.pk])
- self.assertEqual(self.alice.ignoring, [self.bob.pk])
+ Takahe._force_state_cycle()
self.assertTrue(self.alice.is_muting(self.bob))
-
- self.alice.mute(self.bob)
- self.assertEqual(
- Mute.objects.filter(owner=self.alice, target=self.bob).count(), 1
- )
self.assertEqual(self.alice.ignoring, [self.bob.pk])
+ self.assertEqual(self.alice.rejecting, [])
- self.alice.unmute(self.bob)
- self.assertFalse(
- Mute.objects.filter(owner=self.alice, target=self.bob).exists()
- )
- self.assertFalse(self.alice.is_muting(self.bob))
- self.assertEqual(self.alice.ignoring, [])
- self.assertEqual(self.alice.merged_muting_ids(), [])
-
- def test_external_mute(self):
- self.alice.mastodon_mutes.append(self.bob.mastodon_acct)
- self.alice.save()
- self.assertTrue(self.alice.is_muting(self.bob))
- self.assertEqual(self.alice.merged_muting_ids(), [self.bob.pk])
-
- self.alice.mastodon_mutes.remove(self.bob.mastodon_acct)
- self.assertFalse(self.alice.is_muting(self.bob))
- self.assertEqual(self.alice.merged_muting_ids(), [])
-
- def test_local_block_follow(self):
+ def test_block(self):
self.alice.block(self.bob)
- self.assertEqual(self.bob.follow(self.alice), False)
- self.alice.unblock(self.bob)
- self.assertEqual(self.bob.follow(self.alice), True)
- self.assertEqual(self.bob.following, [self.alice.pk])
- self.alice.block(self.bob)
- self.assertEqual(self.bob.following, [])
-
- def test_local_block(self):
- self.alice.block(self.bob)
- self.assertTrue(
- Block.objects.filter(owner=self.alice, target=self.bob).exists()
- )
- self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk])
- self.assertEqual(self.alice.ignoring, [self.bob.pk])
+ Takahe._force_state_cycle()
self.assertTrue(self.alice.is_blocking(self.bob))
self.assertTrue(self.bob.is_blocked_by(self.alice))
-
- self.alice.block(self.bob)
- self.assertEqual(
- Block.objects.filter(owner=self.alice, target=self.bob).count(), 1
- )
+ self.assertEqual(self.alice.rejecting, [self.bob.pk])
self.assertEqual(self.alice.ignoring, [self.bob.pk])
self.alice.unblock(self.bob)
- self.assertFalse(
- Block.objects.filter(owner=self.alice, target=self.bob).exists()
- )
+ Takahe._force_state_cycle()
self.assertFalse(self.alice.is_blocking(self.bob))
self.assertFalse(self.bob.is_blocked_by(self.alice))
+ self.assertEqual(self.alice.rejecting, [])
self.assertEqual(self.alice.ignoring, [])
- self.assertEqual(self.alice.merged_rejecting_ids(), [])
- def test_external_block(self):
- self.bob.follow(self.alice)
- self.assertEqual(self.bob.following, [self.alice.pk])
- self.alice.mastodon_blocks.append(self.bob.mastodon_acct)
- self.alice.save()
- self.assertTrue(self.alice.is_blocking(self.bob))
- self.assertTrue(self.bob.is_blocked_by(self.alice))
- self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk])
- self.alice.merge_relationships()
- self.assertEqual(self.alice.rejecting, [self.bob.pk])
- self.alice.save()
- self.assertEqual(self.bob.following, [self.alice.pk])
- self.assertEqual(self.bob.rejecting, [])
- self.assertEqual(User.merge_rejected_by(), 2)
- self.bob.refresh_from_db()
- self.assertEqual(self.bob.rejecting, [self.alice.pk])
- self.assertEqual(self.bob.following, [])
+ # def test_external_domain_block(self):
+ # self.alice.mastodon_domain_blocks.append(self.bob.mastodon_site)
+ # self.alice.save()
+ # self.assertTrue(self.alice.is_blocking(self.bob))
+ # self.assertTrue(self.bob.is_blocked_by(self.alice))
+ # self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk])
+ # self.alice.merge_relationships()
+ # self.assertEqual(self.alice.rejecting, [self.bob.pk])
+ # self.alice.save()
+ # self.assertEqual(User.merge_rejected_by(), 1)
+ # self.bob.refresh_from_db()
+ # self.assertEqual(self.bob.rejecting, [self.alice.pk])
- self.alice.mastodon_blocks.remove(self.bob.mastodon_acct)
- self.assertFalse(self.alice.is_blocking(self.bob))
- self.assertFalse(self.bob.is_blocked_by(self.alice))
- self.assertEqual(self.alice.merged_rejecting_ids(), [])
-
- def test_external_domain_block(self):
- self.alice.mastodon_domain_blocks.append(self.bob.mastodon_site)
- self.alice.save()
- self.assertTrue(self.alice.is_blocking(self.bob))
- self.assertTrue(self.bob.is_blocked_by(self.alice))
- self.assertEqual(self.alice.merged_rejecting_ids(), [self.bob.pk])
- self.alice.merge_relationships()
- self.assertEqual(self.alice.rejecting, [self.bob.pk])
- self.alice.save()
- self.assertEqual(User.merge_rejected_by(), 1)
- self.bob.refresh_from_db()
- self.assertEqual(self.bob.rejecting, [self.alice.pk])
-
- self.alice.mastodon_domain_blocks.remove(self.bob.mastodon_site)
- self.assertFalse(self.alice.is_blocking(self.bob))
- self.assertFalse(self.bob.is_blocked_by(self.alice))
- self.assertEqual(self.alice.merged_rejecting_ids(), [])
+ # self.alice.mastodon_domain_blocks.remove(self.bob.mastodon_site)
+ # self.assertFalse(self.alice.is_blocking(self.bob))
+ # self.assertFalse(self.bob.is_blocked_by(self.alice))
+ # self.assertEqual(self.alice.merged_rejecting_ids(), [])