2023-12-22 23:59:48 -05:00
|
|
|
import io
|
2024-06-03 07:27:44 -04:00
|
|
|
from datetime import timedelta
|
2023-07-20 21:59:49 -04:00
|
|
|
from typing import TYPE_CHECKING
|
|
|
|
|
2023-12-28 23:04:28 -05:00
|
|
|
import blurhash
|
2023-07-20 21:59:49 -04:00
|
|
|
from django.conf import settings
|
2023-08-22 21:55:02 +00:00
|
|
|
from django.core.cache import cache
|
2023-12-22 23:59:48 -05:00
|
|
|
from django.core.files.images import ImageFile
|
2024-06-10 17:28:20 -04:00
|
|
|
from django.core.signing import b62_encode
|
2024-06-03 07:27:44 -04:00
|
|
|
from django.db.models import Count
|
|
|
|
from django.utils import timezone
|
2023-12-22 23:59:48 -05:00
|
|
|
from PIL import Image
|
2025-01-11 17:20:02 -05:00
|
|
|
from loguru import logger
|
2023-07-20 21:59:49 -04:00
|
|
|
from .models import *
|
|
|
|
|
|
|
|
if TYPE_CHECKING:
|
|
|
|
from users.models import User as NeoUser
|
|
|
|
|
|
|
|
|
|
|
|
class Takahe:
|
|
|
|
Visibilities = Post.Visibilities
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_domain():
|
|
|
|
domain = settings.SITE_INFO["site_domain"]
|
|
|
|
d = Domain.objects.filter(domain=domain).first()
|
|
|
|
if not d:
|
|
|
|
logger.info(f"Creating takahe domain {domain}")
|
|
|
|
d = Domain.objects.create(
|
|
|
|
domain=domain,
|
|
|
|
local=True,
|
|
|
|
service_domain=None,
|
|
|
|
notes="NeoDB",
|
|
|
|
nodeinfo=None,
|
|
|
|
)
|
|
|
|
return d
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_node_name_for_domain(d: str):
|
|
|
|
domain = Domain.objects.filter(domain=d).first()
|
|
|
|
if domain and domain.nodeinfo:
|
|
|
|
return domain.nodeinfo.get("metadata", {}).get("nodeName")
|
|
|
|
|
2023-12-03 15:30:21 -05:00
|
|
|
@staticmethod
|
|
|
|
def sync_password(u: "NeoUser"):
|
|
|
|
user = User.objects.filter(pk=u.pk).first()
|
|
|
|
if not user:
|
|
|
|
raise ValueError(f"Cannot find takahe user {u}")
|
|
|
|
elif user.password != u.password:
|
|
|
|
logger.info(f"Updating takahe user {u} password")
|
|
|
|
user.password = u.password
|
|
|
|
user.save()
|
|
|
|
|
2023-07-20 21:59:49 -04:00
|
|
|
@staticmethod
|
|
|
|
def init_identity_for_local_user(u: "NeoUser"):
|
|
|
|
"""
|
|
|
|
When a new local NeoDB user is created,
|
|
|
|
create a takahe user with the NeoDB user pk,
|
|
|
|
create a takahe identity,
|
|
|
|
then create a NeoDB APIdentity with the takahe identity pk.
|
|
|
|
"""
|
|
|
|
from users.models import APIdentity
|
|
|
|
|
2023-08-13 23:11:12 -04:00
|
|
|
logger.info(f"User {u} initialize identity")
|
2023-07-20 21:59:49 -04:00
|
|
|
if not u.username:
|
|
|
|
logger.warning(f"User {u} has no username")
|
|
|
|
return None
|
2024-12-30 01:51:19 -05:00
|
|
|
with transaction.atomic(using="takahe"):
|
|
|
|
user = User.objects.filter(pk=u.pk).first()
|
|
|
|
handler = "@" + u.username
|
|
|
|
if not user:
|
|
|
|
logger.info(f"Creating takahe user {u}")
|
|
|
|
user = User.objects.create(pk=u.pk, email=handler, password=u.password)
|
|
|
|
else:
|
|
|
|
if user.email != handler:
|
|
|
|
logger.warning(f"Updating takahe user {u} email to {handler}")
|
|
|
|
user.email = handler
|
|
|
|
user.save()
|
|
|
|
domain = Domain.objects.get(domain=settings.SITE_INFO["site_domain"])
|
|
|
|
identity = Identity.objects.filter(username=u.username, local=True).first()
|
|
|
|
if not identity:
|
|
|
|
logger.info(f"Creating takahe identity {u}@{domain}")
|
|
|
|
identity = Identity.objects.create(
|
|
|
|
actor_uri=f"https://{domain.uri_domain}/@{u.username}@{domain.domain}/",
|
|
|
|
profile_uri=u.absolute_url,
|
|
|
|
username=u.username,
|
|
|
|
domain=domain,
|
|
|
|
name=u.username,
|
|
|
|
local=True,
|
|
|
|
discoverable=True,
|
|
|
|
)
|
|
|
|
if not identity.private_key and not identity.public_key:
|
|
|
|
identity.generate_keypair()
|
|
|
|
identity.ensure_uris()
|
|
|
|
if not user.identities.filter(pk=identity.pk).exists():
|
|
|
|
user.identities.add(identity)
|
|
|
|
apidentity = APIdentity.objects.filter(pk=identity.pk).first()
|
|
|
|
if not apidentity:
|
|
|
|
logger.info(f"Creating APIdentity for {identity}")
|
|
|
|
apidentity = APIdentity.objects.create(
|
|
|
|
user=u,
|
|
|
|
id=identity.pk,
|
|
|
|
local=True,
|
|
|
|
username=u.username,
|
|
|
|
domain_name=domain.domain,
|
|
|
|
deleted=identity.deleted,
|
|
|
|
)
|
|
|
|
elif apidentity.username != identity.username:
|
|
|
|
logger.warning(
|
|
|
|
f"Updating APIdentity {apidentity} username to {identity.username}"
|
|
|
|
)
|
|
|
|
apidentity.username = identity.username
|
|
|
|
apidentity.save()
|
|
|
|
if u.identity != apidentity:
|
|
|
|
logger.warning(f"Linking user {u} identity to {apidentity}")
|
|
|
|
u.identity = apidentity
|
|
|
|
u.save(update_fields=["identity"])
|
|
|
|
return apidentity
|
2023-07-20 21:59:49 -04:00
|
|
|
|
2023-08-13 18:00:10 -04:00
|
|
|
@staticmethod
|
|
|
|
def get_identity_by_handler(username: str, domain: str) -> Identity | None:
|
|
|
|
return Identity.objects.filter(
|
|
|
|
username__iexact=username, domain__domain__iexact=domain
|
|
|
|
).first()
|
|
|
|
|
2023-12-10 19:38:00 -05:00
|
|
|
@staticmethod
|
|
|
|
def delete_identity(identity_pk: int):
|
|
|
|
identity = Identity.objects.filter(pk=identity_pk).first()
|
|
|
|
if not identity:
|
|
|
|
logger.warning(f"Cannot find identity {identity_pk}")
|
|
|
|
return
|
|
|
|
logger.warning(f"Deleting identity {identity}")
|
|
|
|
identity.state = "deleted"
|
|
|
|
identity.deleted = timezone.now()
|
|
|
|
identity.state_next_attempt = timezone.now()
|
|
|
|
identity.save()
|
|
|
|
|
2023-09-03 21:25:30 +00:00
|
|
|
@staticmethod
|
|
|
|
def create_internal_message(message: dict):
|
|
|
|
InboxMessage.create_internal(message)
|
|
|
|
|
2023-08-13 18:00:10 -04:00
|
|
|
@staticmethod
|
|
|
|
def fetch_remote_identity(handler: str) -> int | None:
|
|
|
|
InboxMessage.create_internal({"type": "FetchIdentity", "handle": handler})
|
|
|
|
|
2023-07-20 21:59:49 -04:00
|
|
|
@staticmethod
|
|
|
|
def get_identity(pk: int):
|
|
|
|
return Identity.objects.get(pk=pk)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_identity_by_local_user(u: "NeoUser"):
|
|
|
|
return (
|
|
|
|
Identity.objects.filter(pk=u.identity.pk, local=True).first()
|
|
|
|
if u and u.is_authenticated and u.identity
|
|
|
|
else None
|
|
|
|
)
|
|
|
|
|
|
|
|
@staticmethod
|
2023-08-13 18:00:10 -04:00
|
|
|
def get_or_create_remote_apidentity(identity: Identity):
|
2023-07-20 21:59:49 -04:00
|
|
|
from users.models import APIdentity
|
|
|
|
|
|
|
|
apid = APIdentity.objects.filter(pk=identity.pk).first()
|
|
|
|
if not apid:
|
|
|
|
if identity.local:
|
|
|
|
raise ValueError(f"local takahe identity {identity} missing APIdentity")
|
2023-08-13 18:00:10 -04:00
|
|
|
if not identity.domain_id:
|
2023-07-20 21:59:49 -04:00
|
|
|
raise ValueError(f"remote takahe identity {identity} missing domain")
|
2024-01-21 00:06:32 -05:00
|
|
|
apid = APIdentity.objects.get_or_create(
|
2023-07-20 21:59:49 -04:00
|
|
|
id=identity.pk,
|
2024-01-21 00:06:32 -05:00
|
|
|
defaults={
|
|
|
|
"user": None,
|
|
|
|
"local": False,
|
|
|
|
"username": identity.username,
|
|
|
|
"domain_name": identity.domain_id,
|
|
|
|
"deleted": identity.deleted,
|
|
|
|
"anonymous_viewable": False,
|
|
|
|
},
|
|
|
|
)[0]
|
2023-07-20 21:59:49 -04:00
|
|
|
return apid
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_local_user_by_identity(identity: Identity):
|
|
|
|
from users.models import User as NeoUser
|
|
|
|
|
|
|
|
return NeoUser.objects.get(identity_id=identity.pk) if identity.local else None
|
|
|
|
|
2023-12-25 16:47:49 -05:00
|
|
|
@staticmethod
|
|
|
|
def get_is_following(identity_pk: int, target_pk: int):
|
|
|
|
return Follow.objects.filter(
|
|
|
|
source_id=identity_pk, target_id=target_pk, state="accepted"
|
|
|
|
).exists()
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_is_follow_requesting(identity_pk: int, target_pk: int):
|
|
|
|
return Follow.objects.filter(
|
|
|
|
source_id=identity_pk,
|
|
|
|
target_id=target_pk,
|
|
|
|
state__in=["unrequested", "pending_approval"],
|
|
|
|
).exists()
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_is_muting(identity_pk: int, target_pk: int):
|
|
|
|
return Block.objects.filter(
|
|
|
|
source_id=identity_pk,
|
|
|
|
target_id=target_pk,
|
|
|
|
state__in=["new", "sent", "awaiting_expiry"],
|
|
|
|
mute=True,
|
|
|
|
).exists()
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_is_blocking(identity_pk: int, target_pk: int):
|
|
|
|
return Block.objects.filter(
|
|
|
|
source_id=identity_pk,
|
|
|
|
target_id=target_pk,
|
|
|
|
state__in=["new", "sent", "awaiting_expiry"],
|
|
|
|
mute=False,
|
|
|
|
).exists()
|
|
|
|
|
2023-07-20 21:59:49 -04:00
|
|
|
@staticmethod
|
|
|
|
def get_following_ids(identity_pk: int):
|
|
|
|
targets = Follow.objects.filter(
|
|
|
|
source_id=identity_pk, state="accepted"
|
|
|
|
).values_list("target", flat=True)
|
|
|
|
return list(targets)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_follower_ids(identity_pk: int):
|
|
|
|
targets = Follow.objects.filter(
|
|
|
|
target_id=identity_pk, state="accepted"
|
2023-08-20 18:45:28 +00:00
|
|
|
).values_list("source", flat=True)
|
2023-07-20 21:59:49 -04:00
|
|
|
return list(targets)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_following_request_ids(identity_pk: int):
|
|
|
|
targets = Follow.objects.filter(
|
2023-12-12 21:01:46 -05:00
|
|
|
source_id=identity_pk, state__in=["unrequested", "pending_approval"]
|
2023-07-20 21:59:49 -04:00
|
|
|
).values_list("target", flat=True)
|
|
|
|
return list(targets)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_requested_follower_ids(identity_pk: int):
|
|
|
|
targets = Follow.objects.filter(
|
|
|
|
target_id=identity_pk, state="pending_approval"
|
2023-08-20 18:45:28 +00:00
|
|
|
).values_list("source", flat=True)
|
2023-07-20 21:59:49 -04:00
|
|
|
return list(targets)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def update_follow_state(
|
|
|
|
source_pk: int, target_pk: int, from_states: list[str], to_state: str
|
|
|
|
):
|
|
|
|
follow = Follow.objects.filter(source_id=source_pk, target_id=target_pk).first()
|
|
|
|
if (
|
|
|
|
follow
|
|
|
|
and (not from_states or follow.state in from_states)
|
|
|
|
and follow.state != to_state
|
|
|
|
):
|
|
|
|
follow.state = to_state
|
|
|
|
follow.save()
|
|
|
|
return follow
|
|
|
|
|
|
|
|
@staticmethod
|
2023-12-25 16:47:49 -05:00
|
|
|
def follow(source_pk: int, target_pk: int, force_accept: bool = False):
|
2023-07-20 21:59:49 -04:00
|
|
|
try:
|
|
|
|
follow = Follow.objects.get(source_id=source_pk, target_id=target_pk)
|
|
|
|
if follow.state != "accepted":
|
2023-12-25 16:47:49 -05:00
|
|
|
follow.state = "accepted" if force_accept else "unrequested"
|
2023-07-20 21:59:49 -04:00
|
|
|
follow.save()
|
|
|
|
except Follow.DoesNotExist:
|
|
|
|
source = Identity.objects.get(pk=source_pk)
|
|
|
|
follow = Follow.objects.create(
|
|
|
|
source_id=source_pk,
|
|
|
|
target_id=target_pk,
|
|
|
|
boosts=True,
|
|
|
|
uri="",
|
2023-12-25 16:47:49 -05:00
|
|
|
state="accepted" if force_accept else "unrequested",
|
2023-07-20 21:59:49 -04:00
|
|
|
)
|
|
|
|
follow.uri = source.actor_uri + f"follow/{follow.pk}/"
|
|
|
|
follow.save()
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def unfollow(source_pk: int, target_pk: int):
|
|
|
|
Takahe.update_follow_state(source_pk, target_pk, [], "undone")
|
|
|
|
# InboxMessage.create_internal(
|
|
|
|
# {
|
|
|
|
# "type": "ClearTimeline",
|
|
|
|
# "object": target_identity.pk,
|
|
|
|
# "actor": self.identity.pk,
|
|
|
|
# }
|
|
|
|
# )
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def accept_follow_request(source_pk: int, target_pk: int):
|
|
|
|
Takahe.update_follow_state(source_pk, target_pk, [], "accepting")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def reject_follow_request(source_pk: int, target_pk: int):
|
|
|
|
Takahe.update_follow_state(source_pk, target_pk, [], "rejecting")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_muting_ids(identity_pk: int) -> list[int]:
|
|
|
|
targets = Block.objects.filter(
|
|
|
|
source_id=identity_pk,
|
|
|
|
mute=True,
|
|
|
|
state__in=["new", "sent", "awaiting_expiry"],
|
|
|
|
).values_list("target", flat=True)
|
|
|
|
return list(targets)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_blocking_ids(identity_pk: int) -> list[int]:
|
|
|
|
targets = Block.objects.filter(
|
|
|
|
source_id=identity_pk,
|
|
|
|
mute=False,
|
|
|
|
state__in=["new", "sent", "awaiting_expiry"],
|
|
|
|
).values_list("target", flat=True)
|
|
|
|
return list(targets)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_rejecting_ids(identity_pk: int) -> list[int]:
|
|
|
|
pks1 = Block.objects.filter(
|
|
|
|
source_id=identity_pk,
|
|
|
|
mute=False,
|
|
|
|
state__in=["new", "sent", "awaiting_expiry"],
|
|
|
|
).values_list("target", flat=True)
|
|
|
|
pks2 = Block.objects.filter(
|
|
|
|
target_id=identity_pk,
|
|
|
|
mute=False,
|
|
|
|
state__in=["new", "sent", "awaiting_expiry"],
|
|
|
|
).values_list("source", flat=True)
|
|
|
|
return list(set(list(pks1) + list(pks2)))
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def block_or_mute(source_pk: int, target_pk: int, is_mute: bool):
|
|
|
|
source = Identity.objects.get(pk=source_pk)
|
|
|
|
if not source.local:
|
|
|
|
raise ValueError(f"Cannot block/mute from remote identity {source}")
|
|
|
|
with transaction.atomic():
|
|
|
|
block, _ = Block.objects.update_or_create(
|
|
|
|
defaults={"state": "new"},
|
|
|
|
source_id=source_pk,
|
|
|
|
target_id=target_pk,
|
|
|
|
mute=is_mute,
|
|
|
|
)
|
|
|
|
if block.state != "new" or not block.uri:
|
|
|
|
block.state = "new"
|
|
|
|
block.uri = source.actor_uri + f"block/{block.pk}/"
|
|
|
|
block.save()
|
|
|
|
if not is_mute:
|
|
|
|
Takahe.unfollow(source_pk, target_pk)
|
|
|
|
Takahe.reject_follow_request(target_pk, source_pk)
|
|
|
|
return block
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def undo_block_or_mute(source_pk: int, target_pk: int, is_mute: bool):
|
|
|
|
Block.objects.filter(
|
|
|
|
source_id=source_pk, target_id=target_pk, mute=is_mute
|
|
|
|
).update(state="undone")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def block(source_pk: int, target_pk: int):
|
|
|
|
return Takahe.block_or_mute(source_pk, target_pk, False)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def unblock(source_pk: int, target_pk: int):
|
|
|
|
return Takahe.undo_block_or_mute(source_pk, target_pk, False)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def mute(source_pk: int, target_pk: int):
|
|
|
|
return Takahe.block_or_mute(source_pk, target_pk, True)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def unmute(source_pk: int, target_pk: int):
|
|
|
|
return Takahe.undo_block_or_mute(source_pk, target_pk, True)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _force_state_cycle(): # for unit testing only
|
|
|
|
Follow.objects.filter(
|
|
|
|
state__in=["rejecting", "undone", "pending_removal"]
|
|
|
|
).delete()
|
|
|
|
Follow.objects.all().update(state="accepted")
|
|
|
|
Block.objects.filter(state="new").update(state="sent")
|
|
|
|
Block.objects.exclude(state="sent").delete()
|
|
|
|
|
2023-12-22 23:59:48 -05:00
|
|
|
@staticmethod
|
|
|
|
def upload_image(
|
|
|
|
author_pk: int,
|
|
|
|
filename: str,
|
|
|
|
content: bytes,
|
|
|
|
mimetype: str,
|
|
|
|
description: str = "",
|
|
|
|
) -> PostAttachment:
|
|
|
|
if len(content) > 1024 * 1024 * 5:
|
|
|
|
raise ValueError("Image too large")
|
|
|
|
main_file = ImageFile(io.BytesIO(content), name=filename)
|
|
|
|
resized_image = Image.open(io.BytesIO(content))
|
|
|
|
resized_image.thumbnail((400, 225), resample=Image.Resampling.BILINEAR)
|
|
|
|
new_image_bytes = io.BytesIO()
|
|
|
|
resized_image.save(new_image_bytes, format="webp", save_all=True)
|
|
|
|
thumbnail_file = ImageFile(new_image_bytes, name="image.webp")
|
2023-12-28 23:04:28 -05:00
|
|
|
hash = blurhash.encode(resized_image, 4, 4)
|
2023-12-22 23:59:48 -05:00
|
|
|
attachment = PostAttachment.objects.create(
|
|
|
|
mimetype=mimetype,
|
|
|
|
width=main_file.width,
|
|
|
|
height=main_file.height,
|
|
|
|
name=description or None,
|
|
|
|
state="fetched",
|
|
|
|
author_id=author_pk,
|
|
|
|
file=main_file,
|
|
|
|
thumbnail=thumbnail_file,
|
2023-12-28 23:04:28 -05:00
|
|
|
blurhash=hash,
|
2023-12-22 23:59:48 -05:00
|
|
|
)
|
|
|
|
attachment.save()
|
|
|
|
return attachment
|
|
|
|
|
2023-07-20 21:59:49 -04:00
|
|
|
@staticmethod
|
|
|
|
def post(
|
|
|
|
author_pk: int,
|
|
|
|
content: str,
|
|
|
|
visibility: Visibilities,
|
2024-06-15 21:54:39 -04:00
|
|
|
prepend_content: str = "",
|
|
|
|
append_content: str = "",
|
2023-12-15 19:59:22 -05:00
|
|
|
summary: str | None = None,
|
|
|
|
sensitive: bool = False,
|
2023-07-20 21:59:49 -04:00
|
|
|
data: dict | None = None,
|
|
|
|
post_pk: int | None = None,
|
|
|
|
post_time: datetime.datetime | None = None,
|
2024-06-13 20:44:15 -04:00
|
|
|
edit_time: datetime.datetime | None = None,
|
2023-08-17 18:54:00 -04:00
|
|
|
reply_to_pk: int | None = None,
|
2023-12-22 23:59:48 -05:00
|
|
|
attachments: list | None = None,
|
2023-08-20 21:46:53 +00:00
|
|
|
) -> Post | None:
|
2023-07-20 21:59:49 -04:00
|
|
|
identity = Identity.objects.get(pk=author_pk)
|
|
|
|
post = (
|
|
|
|
Post.objects.filter(author=identity, pk=post_pk).first()
|
|
|
|
if post_pk
|
|
|
|
else None
|
|
|
|
)
|
2023-08-15 15:46:11 -04:00
|
|
|
if post_pk and not post:
|
|
|
|
raise ValueError(f"Cannot find post to edit: {post_pk}")
|
|
|
|
reply_to_post = (
|
|
|
|
Post.objects.filter(pk=reply_to_pk).first() if reply_to_pk else None
|
|
|
|
)
|
|
|
|
if reply_to_pk and not reply_to_post:
|
|
|
|
raise ValueError(f"Cannot find post to reply: {reply_to_pk}")
|
2023-07-20 21:59:49 -04:00
|
|
|
if post:
|
|
|
|
post.edit_local(
|
2023-11-26 17:23:53 -05:00
|
|
|
content,
|
2024-06-15 21:54:39 -04:00
|
|
|
prepend_content,
|
|
|
|
append_content,
|
2023-12-15 19:59:22 -05:00
|
|
|
summary,
|
|
|
|
sensitive,
|
2023-11-26 17:23:53 -05:00
|
|
|
visibility=visibility,
|
|
|
|
type_data=data,
|
2023-11-27 23:02:58 -05:00
|
|
|
published=post_time,
|
2024-06-13 20:44:15 -04:00
|
|
|
edited=edit_time,
|
2023-12-22 23:59:48 -05:00
|
|
|
attachments=attachments,
|
2023-07-20 21:59:49 -04:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
post = Post.create_local(
|
|
|
|
identity,
|
|
|
|
content,
|
2024-06-15 21:54:39 -04:00
|
|
|
prepend_content,
|
|
|
|
append_content,
|
2023-12-15 19:59:22 -05:00
|
|
|
summary,
|
|
|
|
sensitive,
|
2023-07-20 21:59:49 -04:00
|
|
|
visibility=visibility,
|
|
|
|
type_data=data,
|
|
|
|
published=post_time,
|
2024-06-13 20:44:15 -04:00
|
|
|
edited=edit_time,
|
2023-08-15 15:46:11 -04:00
|
|
|
reply_to=reply_to_post,
|
2023-12-22 23:59:48 -05:00
|
|
|
attachments=attachments,
|
2023-07-20 21:59:49 -04:00
|
|
|
)
|
2024-06-16 15:35:46 -04:00
|
|
|
TimelineEvent.objects.get_or_create(
|
|
|
|
identity=identity,
|
|
|
|
type="post",
|
|
|
|
subject_post=post,
|
|
|
|
subject_identity=identity,
|
2024-06-16 18:13:16 -04:00
|
|
|
defaults={"published": post_time or timezone.now()},
|
2024-06-16 15:35:46 -04:00
|
|
|
)
|
2023-08-20 21:46:53 +00:00
|
|
|
return post
|
2023-07-20 21:59:49 -04:00
|
|
|
|
2023-08-15 15:46:11 -04:00
|
|
|
@staticmethod
|
2023-08-26 01:27:18 +00:00
|
|
|
def get_post(post_pk: int) -> Post | None:
|
2023-08-15 15:46:11 -04:00
|
|
|
return Post.objects.filter(pk=post_pk).first()
|
|
|
|
|
2023-08-26 01:27:18 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_posts(post_pks: list[int]):
|
2024-06-03 07:27:44 -04:00
|
|
|
return (
|
|
|
|
Post.objects.filter(pk__in=post_pks)
|
|
|
|
.exclude(state__in=["deleted", "deleted_fanned_out"])
|
|
|
|
.prefetch_related("author", "attachments")
|
|
|
|
)
|
2023-08-26 01:27:18 +00:00
|
|
|
|
2023-07-20 21:59:49 -04:00
|
|
|
@staticmethod
|
|
|
|
def get_post_url(post_pk: int) -> str | None:
|
|
|
|
post = Post.objects.filter(pk=post_pk).first() if post_pk else None
|
|
|
|
return post.object_uri if post else None
|
|
|
|
|
2024-06-13 20:44:15 -04:00
|
|
|
@staticmethod
|
|
|
|
def update_post(post_pk, **kwargs):
|
|
|
|
Post.objects.filter(pk=post_pk).update(**kwargs)
|
|
|
|
|
2023-07-20 21:59:49 -04:00
|
|
|
@staticmethod
|
2023-08-26 01:27:18 +00:00
|
|
|
def delete_posts(post_pks):
|
2024-06-13 20:44:15 -04:00
|
|
|
parent_posts = list(
|
|
|
|
Post.objects.filter(
|
|
|
|
object_uri__in=Post.objects.filter(
|
|
|
|
pk__in=post_pks, in_reply_to__isnull=False
|
|
|
|
)
|
|
|
|
.distinct("in_reply_to")
|
|
|
|
.values_list("in_reply_to", flat=True)
|
|
|
|
)
|
|
|
|
)
|
2023-08-26 01:27:18 +00:00
|
|
|
Post.objects.filter(pk__in=post_pks).update(state="deleted")
|
2024-06-13 20:44:15 -04:00
|
|
|
for post in parent_posts:
|
|
|
|
post.calculate_stats()
|
2023-11-20 12:13:43 -05:00
|
|
|
# TimelineEvent.objects.filter(subject_post__in=[post.pk]).delete()
|
|
|
|
PostInteraction.objects.filter(post__in=post_pks).update(state="undone")
|
|
|
|
|
|
|
|
@staticmethod
|
2023-12-10 19:14:32 -05:00
|
|
|
def visibility_n2t(visibility: int, post_public_mode: int) -> Visibilities:
|
2023-11-20 12:13:43 -05:00
|
|
|
if visibility == 1:
|
|
|
|
return Takahe.Visibilities.followers
|
|
|
|
elif visibility == 2:
|
|
|
|
return Takahe.Visibilities.mentioned
|
2023-12-10 19:14:32 -05:00
|
|
|
elif post_public_mode == 4:
|
|
|
|
return Takahe.Visibilities.local_only
|
|
|
|
elif post_public_mode == 1:
|
2023-11-20 12:13:43 -05:00
|
|
|
return Takahe.Visibilities.unlisted
|
2023-12-10 19:14:32 -05:00
|
|
|
else:
|
|
|
|
return Takahe.Visibilities.public
|
2023-07-20 21:59:49 -04:00
|
|
|
|
2024-06-13 20:44:15 -04:00
|
|
|
@staticmethod
|
|
|
|
def visibility_t2n(visibility: int) -> int:
|
|
|
|
match visibility:
|
|
|
|
case 2:
|
|
|
|
return 1
|
|
|
|
case 3:
|
|
|
|
return 2
|
|
|
|
case _:
|
|
|
|
return 0
|
|
|
|
|
2023-07-20 21:59:49 -04:00
|
|
|
@staticmethod
|
2024-07-06 08:30:09 -04:00
|
|
|
def interact_post(post_pk: int, identity_pk: int, type: str, flip=False):
|
2023-07-20 21:59:49 -04:00
|
|
|
post = Post.objects.filter(pk=post_pk).first()
|
|
|
|
if not post:
|
|
|
|
logger.warning(f"Cannot find post {post_pk}")
|
|
|
|
return
|
2023-12-22 23:59:48 -05:00
|
|
|
identity = Identity.objects.filter(pk=identity_pk).first()
|
|
|
|
if not identity:
|
|
|
|
logger.warning(f"Cannot find identity {identity_pk}")
|
|
|
|
return
|
2024-07-06 08:30:09 -04:00
|
|
|
interaction, created = PostInteraction.objects.get_or_create(
|
2023-07-20 21:59:49 -04:00
|
|
|
type=type,
|
|
|
|
identity_id=identity_pk,
|
|
|
|
post=post,
|
2024-07-06 08:30:09 -04:00
|
|
|
)
|
|
|
|
if flip and not created:
|
|
|
|
Takahe.update_state(interaction, "undone")
|
|
|
|
elif interaction.state not in ["new", "fanned_out"]:
|
|
|
|
Takahe.update_state(interaction, "new")
|
2023-07-20 21:59:49 -04:00
|
|
|
post.calculate_stats()
|
|
|
|
return interaction
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def uninteract_post(post_pk: int, identity_pk: int, type: str):
|
|
|
|
post = Post.objects.filter(pk=post_pk).first()
|
|
|
|
if not post:
|
|
|
|
logger.warning(f"Cannot find post {post_pk}")
|
|
|
|
return
|
|
|
|
for interaction in PostInteraction.objects.filter(
|
|
|
|
type=type,
|
|
|
|
identity_id=identity_pk,
|
|
|
|
post=post,
|
|
|
|
):
|
|
|
|
interaction.state = "undone"
|
|
|
|
interaction.save()
|
|
|
|
post.calculate_stats()
|
|
|
|
|
2023-08-15 15:46:11 -04:00
|
|
|
@staticmethod
|
|
|
|
def reply_post(
|
|
|
|
post_pk: int, identity_pk: int, content: str, visibility: Visibilities
|
|
|
|
):
|
2024-06-13 20:44:15 -04:00
|
|
|
return Takahe.post(identity_pk, content, visibility, reply_to_pk=post_pk)
|
2023-08-15 15:46:11 -04:00
|
|
|
|
2023-12-29 16:03:31 -05:00
|
|
|
@staticmethod
|
|
|
|
def boost_post(post_pk: int, identity_pk: int):
|
2024-07-06 08:30:09 -04:00
|
|
|
return Takahe.interact_post(post_pk, identity_pk, "boost", flip=True)
|
2023-12-29 16:03:31 -05:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def post_boosted_by(post_pk: int, identity_pk: int) -> bool:
|
|
|
|
interaction = Takahe.get_user_interaction(post_pk, identity_pk, "boost")
|
|
|
|
return interaction is not None and interaction.state in ["new", "fanned_out"]
|
|
|
|
|
2023-07-20 21:59:49 -04:00
|
|
|
@staticmethod
|
|
|
|
def like_post(post_pk: int, identity_pk: int):
|
|
|
|
return Takahe.interact_post(post_pk, identity_pk, "like")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def unlike_post(post_pk: int, identity_pk: int):
|
|
|
|
return Takahe.uninteract_post(post_pk, identity_pk, "like")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def post_liked_by(post_pk: int, identity_pk: int) -> bool:
|
|
|
|
interaction = Takahe.get_user_interaction(post_pk, identity_pk, "like")
|
|
|
|
return interaction is not None and interaction.state in ["new", "fanned_out"]
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_user_interaction(post_pk: int, identity_pk: int, type: str):
|
2023-08-29 06:00:02 +00:00
|
|
|
if not post_pk or not identity_pk:
|
|
|
|
return None
|
2023-07-20 21:59:49 -04:00
|
|
|
post = Post.objects.filter(pk=post_pk).first()
|
|
|
|
if not post:
|
|
|
|
logger.warning(f"Cannot find post {post_pk}")
|
|
|
|
return None
|
|
|
|
return PostInteraction.objects.filter(
|
|
|
|
type=type,
|
|
|
|
identity_id=identity_pk,
|
|
|
|
post=post,
|
|
|
|
).first()
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_post_stats(post_pk: int) -> dict:
|
|
|
|
post = Post.objects.filter(pk=post_pk).first()
|
|
|
|
if not post:
|
|
|
|
logger.warning(f"Cannot find post {post_pk}")
|
|
|
|
return {}
|
|
|
|
return post.stats or {}
|
2023-08-15 15:46:11 -04:00
|
|
|
|
|
|
|
@staticmethod
|
2023-08-26 01:27:18 +00:00
|
|
|
def get_replies_for_posts(post_pks: list[int], identity_pk: int | None):
|
|
|
|
post_uris = Post.objects.filter(pk__in=post_pks).values_list(
|
|
|
|
"object_uri", flat=True
|
|
|
|
)
|
|
|
|
if not post_uris.exists():
|
2023-08-15 15:46:11 -04:00
|
|
|
return Post.objects.none()
|
|
|
|
identity = (
|
|
|
|
Identity.objects.filter(pk=identity_pk).first() if identity_pk else None
|
|
|
|
)
|
|
|
|
child_queryset = (
|
|
|
|
Post.objects.not_hidden()
|
|
|
|
.prefetch_related(
|
|
|
|
# "attachments",
|
|
|
|
"mentions",
|
|
|
|
"emojis",
|
|
|
|
)
|
|
|
|
.select_related(
|
|
|
|
"author",
|
|
|
|
"author__domain",
|
|
|
|
)
|
2023-08-26 01:27:18 +00:00
|
|
|
.filter(in_reply_to__in=post_uris)
|
2023-08-15 15:46:11 -04:00
|
|
|
.order_by("published")
|
|
|
|
)
|
|
|
|
if identity:
|
|
|
|
child_queryset = child_queryset.visible_to(
|
|
|
|
identity=identity, include_replies=True
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
child_queryset = child_queryset.unlisted(include_replies=True)
|
|
|
|
return child_queryset
|
2023-08-22 17:13:52 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def html2txt(html: str) -> str:
|
|
|
|
if not html:
|
|
|
|
return ""
|
|
|
|
return FediverseHtmlParser(html).plain_text
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def txt2html(txt: str) -> str:
|
|
|
|
if not txt:
|
|
|
|
return ""
|
|
|
|
return FediverseHtmlParser(linebreaks_filter(txt)).html
|
|
|
|
|
|
|
|
@staticmethod
|
2024-07-06 08:30:09 -04:00
|
|
|
def update_state(obj: Post | PostInteraction | Relay | Identity, state: str):
|
2023-08-22 17:13:52 +00:00
|
|
|
obj.state = state
|
|
|
|
obj.state_changed = timezone.now()
|
|
|
|
obj.state_next_attempt = None
|
|
|
|
obj.state_locked_until = None
|
|
|
|
obj.save(
|
|
|
|
update_fields=[
|
|
|
|
"state",
|
|
|
|
"state_changed",
|
|
|
|
"state_next_attempt",
|
|
|
|
"state_locked_until",
|
|
|
|
]
|
|
|
|
)
|
2023-08-22 21:55:02 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_neodb_peers():
|
2024-07-05 21:19:33 -04:00
|
|
|
if settings.SEARCH_PEERS: # '-' = disable federated search
|
|
|
|
return [] if settings.SEARCH_PEERS == ["-"] else settings.SEARCH_PEERS
|
2023-08-22 21:55:02 +00:00
|
|
|
cache_key = "neodb_peers"
|
|
|
|
peers = cache.get(cache_key, None)
|
|
|
|
if peers is None:
|
|
|
|
peers = list(
|
|
|
|
Domain.objects.filter(
|
2024-05-27 14:20:58 -04:00
|
|
|
nodeinfo__protocols__contains="neodb",
|
|
|
|
nodeinfo__metadata__nodeEnvironment="production",
|
|
|
|
local=False,
|
2023-08-22 21:55:02 +00:00
|
|
|
).values_list("pk", flat=True)
|
|
|
|
)
|
|
|
|
cache.set(cache_key, peers, timeout=1800)
|
|
|
|
return peers
|
2023-09-03 20:11:46 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2024-05-27 14:20:58 -04:00
|
|
|
def verify_invite(token: str) -> bool:
|
2023-09-03 20:11:46 +00:00
|
|
|
if not token:
|
|
|
|
return False
|
|
|
|
invite = Invite.objects.filter(token=token).first()
|
2024-05-27 14:20:58 -04:00
|
|
|
return invite is not None and invite.valid
|
2024-04-17 00:00:40 -04:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_announcements():
|
|
|
|
now = timezone.now()
|
|
|
|
return Announcement.objects.filter(
|
|
|
|
models.Q(start__lte=now) | models.Q(start__isnull=True),
|
|
|
|
models.Q(end__gte=now) | models.Q(end__isnull=True),
|
|
|
|
published=True,
|
|
|
|
).order_by("-start", "-created")
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_announcements_for_user(u: "NeoUser"):
|
|
|
|
identity = (
|
|
|
|
Identity.objects.filter(pk=u.identity.pk, local=True).first()
|
|
|
|
if u and u.is_authenticated and u.identity
|
|
|
|
else None
|
|
|
|
)
|
|
|
|
user = identity.users.all().first() if identity else None
|
|
|
|
now = timezone.now()
|
2024-06-03 07:27:44 -04:00
|
|
|
qs = Announcement.objects.filter(
|
|
|
|
models.Q(start__lte=now) | models.Q(start__isnull=True),
|
|
|
|
models.Q(end__gte=now) | models.Q(end__isnull=True),
|
|
|
|
published=True,
|
|
|
|
).order_by("-start", "-created")
|
|
|
|
return qs.exclude(seen=user) if user else qs
|
2024-04-17 00:00:40 -04:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def mark_announcements_seen(u: "NeoUser"):
|
|
|
|
identity = (
|
|
|
|
Identity.objects.filter(pk=u.identity.pk, local=True).first()
|
|
|
|
if u and u.is_authenticated and u.identity
|
|
|
|
else None
|
|
|
|
)
|
|
|
|
user = identity.users.all().first() if identity else None
|
|
|
|
if not user:
|
|
|
|
return
|
|
|
|
now = timezone.now()
|
|
|
|
for a in (
|
|
|
|
Announcement.objects.filter(
|
|
|
|
models.Q(start__lte=now) | models.Q(start__isnull=True),
|
|
|
|
published=True,
|
|
|
|
)
|
|
|
|
.order_by("-start", "-created")
|
|
|
|
.exclude(seen=user)
|
|
|
|
):
|
|
|
|
a.seen.add(user)
|
2024-04-19 20:24:34 -04:00
|
|
|
|
|
|
|
@staticmethod
|
2024-05-27 14:20:58 -04:00
|
|
|
def get_events(identity_id: int, types: list[str]):
|
2024-04-19 20:24:34 -04:00
|
|
|
return (
|
|
|
|
TimelineEvent.objects.select_related(
|
|
|
|
"subject_post",
|
|
|
|
"subject_post__author",
|
|
|
|
"subject_post__author__domain",
|
|
|
|
"subject_identity",
|
|
|
|
"subject_identity__domain",
|
|
|
|
"subject_post_interaction",
|
|
|
|
"subject_post_interaction__identity",
|
|
|
|
"subject_post_interaction__identity__domain",
|
|
|
|
)
|
|
|
|
.prefetch_related(
|
|
|
|
"subject_post__attachments",
|
|
|
|
"subject_post__mentions",
|
|
|
|
"subject_post__emojis",
|
|
|
|
)
|
|
|
|
.filter(identity=identity_id)
|
2024-04-20 10:01:46 -04:00
|
|
|
.filter(type__in=types)
|
2024-04-19 20:24:34 -04:00
|
|
|
.exclude(subject_identity_id=identity_id)
|
|
|
|
.order_by("-created")
|
|
|
|
)
|
2024-06-03 07:27:44 -04:00
|
|
|
|
|
|
|
@staticmethod
|
2024-06-20 15:10:06 -04:00
|
|
|
def get_no_discover_identities():
|
|
|
|
return list(
|
|
|
|
Identity.objects.filter(discoverable=False).values_list("pk", flat=True)
|
|
|
|
)
|
|
|
|
|
2024-07-17 00:53:42 -04:00
|
|
|
@staticmethod
|
|
|
|
def get_public_posts(local_only=False):
|
|
|
|
qs = (
|
|
|
|
Post.objects.exclude(state__in=["deleted", "deleted_fanned_out"])
|
|
|
|
.filter(visibility__in=[0, 4])
|
|
|
|
.order_by("-published")
|
|
|
|
)
|
|
|
|
if local_only:
|
|
|
|
qs = qs.filter(local=True)
|
|
|
|
return qs
|
|
|
|
|
2024-06-20 15:10:06 -04:00
|
|
|
@staticmethod
|
|
|
|
def get_popular_posts(
|
2024-07-15 16:49:21 -04:00
|
|
|
days: int = 30,
|
|
|
|
min_interaction: int = 1,
|
|
|
|
exclude_identities: list[int] = [],
|
|
|
|
local_only=False,
|
2024-06-20 15:10:06 -04:00
|
|
|
):
|
2024-06-03 18:09:47 -04:00
|
|
|
since = timezone.now() - timedelta(days=days)
|
2024-06-03 07:27:44 -04:00
|
|
|
domains = Takahe.get_neodb_peers() + [settings.SITE_DOMAIN]
|
2024-07-15 16:49:21 -04:00
|
|
|
qs = (
|
2024-06-03 07:27:44 -04:00
|
|
|
Post.objects.exclude(state__in=["deleted", "deleted_fanned_out"])
|
2024-06-20 15:10:06 -04:00
|
|
|
.exclude(author_id__in=exclude_identities)
|
2024-06-03 07:27:44 -04:00
|
|
|
.filter(
|
2024-06-04 21:09:45 -04:00
|
|
|
author__domain__in=domains,
|
|
|
|
visibility__in=[0, 1, 4],
|
|
|
|
published__gte=since,
|
2024-06-03 07:27:44 -04:00
|
|
|
)
|
|
|
|
.annotate(num_interactions=Count("interactions"))
|
2024-06-03 18:09:47 -04:00
|
|
|
.filter(num_interactions__gte=min_interaction)
|
|
|
|
.order_by("-num_interactions", "-published")
|
2024-06-03 07:27:44 -04:00
|
|
|
)
|
2024-07-15 16:49:21 -04:00
|
|
|
if local_only:
|
|
|
|
qs = qs.filter(local=True)
|
|
|
|
return qs
|
2024-06-03 12:59:24 -04:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_recent_posts(author_pk: int, viewer_pk: int | None = None):
|
2024-06-22 11:16:20 -04:00
|
|
|
since = timezone.now() - timedelta(days=90)
|
2024-06-03 12:59:24 -04:00
|
|
|
qs = (
|
|
|
|
Post.objects.exclude(state__in=["deleted", "deleted_fanned_out"])
|
|
|
|
.filter(author_id=author_pk)
|
2024-06-22 11:16:20 -04:00
|
|
|
.filter(published__gte=since)
|
2024-06-03 12:59:24 -04:00
|
|
|
.order_by("-published")
|
|
|
|
)
|
|
|
|
if viewer_pk and Takahe.get_is_following(viewer_pk, author_pk):
|
|
|
|
qs = qs.exclude(visibility=3)
|
|
|
|
else:
|
|
|
|
qs = qs.filter(visibility__in=[0, 1, 4])
|
|
|
|
return qs.prefetch_related("attachments", "author")
|
2024-06-04 16:51:51 -04:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def pin_hashtag_for_user(identity_pk: int, hashtag: str):
|
|
|
|
tag = Hashtag.ensure_hashtag(hashtag)
|
|
|
|
identity = Identity.objects.get(pk=identity_pk)
|
|
|
|
feature, created = identity.hashtag_features.get_or_create(hashtag=tag)
|
|
|
|
if created:
|
|
|
|
identity.fanout("tag_featured", subject_hashtag=tag)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def unpin_hashtag_for_user(identity_pk: int, hashtag: str):
|
|
|
|
identity = Identity.objects.get(pk=identity_pk)
|
|
|
|
featured = HashtagFeature.objects.filter(
|
|
|
|
identity=identity, hashtag_id=hashtag
|
|
|
|
).first()
|
|
|
|
if featured:
|
|
|
|
identity.fanout("tag_unfeatured", subject_hashtag_id=hashtag)
|
|
|
|
featured.delete()
|
2024-06-10 17:28:20 -04:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_or_create_app(
|
|
|
|
name: str,
|
|
|
|
website: str,
|
|
|
|
redirect_uris: str,
|
|
|
|
owner_pk: int,
|
|
|
|
scopes: str = "read write follow",
|
|
|
|
client_id: str | None = None,
|
|
|
|
):
|
|
|
|
client_id = client_id or (
|
|
|
|
"app-" + b62_encode(owner_pk).zfill(11) + "-" + secrets.token_urlsafe(16)
|
|
|
|
)
|
|
|
|
client_secret = secrets.token_urlsafe(40)
|
|
|
|
return Application.objects.get_or_create(
|
|
|
|
client_id=client_id,
|
|
|
|
defaults={
|
|
|
|
"name": name,
|
|
|
|
"website": website,
|
|
|
|
"client_secret": client_secret,
|
|
|
|
"redirect_uris": redirect_uris,
|
|
|
|
"scopes": scopes,
|
|
|
|
},
|
|
|
|
)[0]
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_apps(owner_pk: int):
|
|
|
|
return Application.objects.filter(
|
|
|
|
name__startswith="app-" + b62_encode(owner_pk).zfill(11)
|
|
|
|
)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def refresh_token(app: Application, owner_pk: int, user_pk) -> str:
|
|
|
|
tk = Token.objects.filter(application=app, identity_id=owner_pk).first()
|
|
|
|
if tk:
|
|
|
|
tk.delete()
|
|
|
|
return Token.objects.create(
|
|
|
|
application=app,
|
|
|
|
identity_id=owner_pk,
|
|
|
|
user_id=user_pk,
|
|
|
|
scopes=["read", "write"],
|
|
|
|
token=secrets.token_urlsafe(43),
|
|
|
|
).token
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_token(token: str) -> Token | None:
|
|
|
|
return Token.objects.filter(token=token).first()
|
2024-06-15 23:38:33 -04:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def bookmark(post_pk: int, identity_pk: int):
|
|
|
|
Bookmark.objects.get_or_create(post_id=post_pk, identity_id=identity_pk)
|