diff --git a/journal/models/comment.py b/journal/models/comment.py index ccc26a54..e399c9e4 100644 --- a/journal/models/comment.py +++ b/journal/models/comment.py @@ -139,7 +139,7 @@ class Comment(Content): def to_post_params(self): item_link = f"{settings.SITE_INFO['site_url']}/~neodb~{self.item_url}" - pre_conetent = ( + prepend_content = ( self.get_repost_template().format( item=f'{self.item.display_title}' ) @@ -148,7 +148,7 @@ class Comment(Content): spoiler_text, txt = render_spoiler_text(self.text, self.item) content = f"{txt}\n{self.get_repost_postfix()}" return { - "pre_conetent": pre_conetent, + "prepend_content": prepend_content, "content": content, "summary": spoiler_text, "sensitive": bool(spoiler_text), diff --git a/journal/models/common.py b/journal/models/common.py index da485e7b..7e7f575f 100644 --- a/journal/models/common.py +++ b/journal/models/common.py @@ -297,27 +297,7 @@ class Piece(PolymorphicModel, UserOwnedObjectMixin): d["edited_time"] = datetime.fromisoformat(obj["updated"]) p = cls.objects.create(**d) p.link_post_id(post.id) - if local: - # a local piece is reconstructred from a post, update post and fanout - if not post.type_data: - post.type_data = {} - # always_merger.merge( - # post.type_data, - # { - # "object": { - # "tag": [item.ap_object_ref], - # "relatedWith": [p.ap_object], - # } - # }, - # ) - post.type_data = { - "object": { - "tag": [item.ap_object_ref], - "relatedWith": [p.ap_object], - } - } - post.save(update_fields=["type_data"]) - Takahe.update_state(post, "edited") + # subclass may have to add additional code to update type_data in local post return p def sync_to_mastodon(self, delete_existing=False): diff --git a/journal/models/note.py b/journal/models/note.py index 9011ae30..bc518c57 100644 --- a/journal/models/note.py +++ b/journal/models/note.py @@ -2,6 +2,7 @@ import re from functools import cached_property from typing import override +from deepmerge import always_merger from django.db import models from django.utils.translation import gettext_lazy as _ from loguru import logger @@ -15,7 +16,12 @@ from .renderers import render_text from .shelf import ShelfMember _progress = re.compile( - r"^\s*(?P(p|pg|page|ch|chapter|pt|part|e|ep|episode|trk|track|cycle))?(\s|\.|#)*(?P(\d[\d\:\.\-]*\d|\d))\s*(?P(%))?\s*(\s|\n|\.|:)", + r"(.*\s)?(?P(p|pg|page|ch|chapter|pt|part|e|ep|episode|trk|track|cycle))(\s|\.|#)*(?P(\d[\d\:\.\-]*\d|\d))\s*(?P(%))?(\s|\n|\.|。)?$", + re.IGNORECASE, +) + +_progress2 = re.compile( + r"(.*\s)?(?P(\d[\d\:\.\-]*\d|\d))\s*(?P(%))?(\s|\n|\.|。)?$", re.IGNORECASE, ) @@ -106,9 +112,17 @@ class Note(Content): @override @classmethod def params_from_ap_object(cls, post, obj, piece): + content = obj.get("content", "").strip() + footer = [] + if post.local: + # strip footer from local post if detected + lines = content.splitlines() + if len(lines) > 2 and lines[-2].strip() in ["—", "-"]: + content = "\n".join(lines[:-2]) + footer = lines[-2:] params = { "title": obj.get("title", post.summary), - "content": obj.get("content", "").strip(), + "content": content, "sensitive": obj.get("sensitive", post.sensitive), "attachments": [], } @@ -117,12 +131,14 @@ class Note(Content): params["progress_type"] = progress.get("type") if progress.get("value"): params["progress_value"] = progress.get("value") - if post.local: - progress_type, progress_value = cls.extract_progress(params["content"]) - print(progress_type, progress_value) + if post.local and len(footer) == 2: + progress_type, progress_value = cls.extract_progress(footer[1]) if progress_value: params["progress_type"] = progress_type params["progress_value"] = progress_value + elif not footer[1].startswith("https://"): + # add footer back if unable to regconize correct patterns + params["content"] += "\n" + "\n".join(footer) if post: for atta in post.attachments.all(): params["attachments"].append( @@ -138,14 +154,16 @@ class Note(Content): @override @classmethod def update_by_ap_object(cls, owner, item, obj, post): + # new_piece = cls.get_by_post_id(post.id) is None p = super().update_by_ap_object(owner, item, obj, post) - if ( - p - and p.local - and owner.user.preference.mastodon_default_repost - and owner.user.mastodon_username - ): - p.sync_to_mastodon() + if p and p.local: + # if local piece is created from a post, update post type_data and fanout + p.sync_to_timeline() + if ( + owner.user.preference.mastodon_default_repost + and owner.user.mastodon_username + ): + p.sync_to_mastodon() return p @cached_property @@ -153,9 +171,10 @@ class Note(Content): return ShelfMember.objects.filter(item=self.item, owner=self.owner).first() def to_mastodon_params(self): + footer = f"\n—\n《{self.item.display_title}》 {self.progress_display}\n{self.item.absolute_url}" params = { "spoiler_text": self.title, - "content": self.content, + "content": self.content + footer, "sensitive": self.sensitive, "reply_to_toot_url": ( self.shelfmember.get_mastodon_repost_url() if self.shelfmember else None @@ -170,9 +189,11 @@ class Note(Content): return params def to_post_params(self): + footer = f'\n


{self.item.display_title} {self.progress_display}\n

' return { "summary": self.title, "content": self.content, + "append_content": footer, "sensitive": self.sensitive, "reply_to_pk": ( self.shelfmember.latest_post_id if self.shelfmember else None @@ -183,8 +204,11 @@ class Note(Content): @classmethod def extract_progress(cls, content): m = _progress.match(content) + if not m: + m = _progress2.match(content) if m and m["value"]: - typ_ = "percentage" if m["postfix"] == "%" else m["prefix"] + m = m.groupdict() + typ_ = "percentage" if m["postfix"] == "%" else m.get("prefix", "") match typ_: case "p" | "pg" | "page": typ = Note.ProgressType.PAGE diff --git a/journal/models/review.py b/journal/models/review.py index 8cc89dca..dd3a7ab2 100644 --- a/journal/models/review.py +++ b/journal/models/review.py @@ -98,7 +98,7 @@ class Review(Content): def to_post_params(self): item_link = f"{settings.SITE_INFO['site_url']}/~neodb~{self.item.url}" - pre_conetent = ( + prepend_content = ( self.get_repost_template().format( item=f'{self.item.display_title}' ) @@ -106,7 +106,7 @@ class Review(Content): ) content = f"{render_rating(self.rating_grade, 1)}\n{self.get_repost_postfix()}" return { - "pre_conetent": pre_conetent, + "prepend_content": prepend_content, "content": content, } diff --git a/journal/views/wrapped.py b/journal/views/wrapped.py index 391014ba..4ed7b564 100644 --- a/journal/views/wrapped.py +++ b/journal/views/wrapped.py @@ -125,7 +125,6 @@ class WrappedShareView(LoginRequiredMixin, TemplateView): identity.pk, comment, Takahe.visibility_n2t(visibility, user.preference.post_public_mode), - "", attachments=[media], ) classic_repost = user.preference.mastodon_repost_mode == 1 diff --git a/takahe/ap_handlers.py b/takahe/ap_handlers.py index aae46630..30269cea 100644 --- a/takahe/ap_handlers.py +++ b/takahe/ap_handlers.py @@ -15,6 +15,7 @@ from journal.models import ( Review, ShelfMember, ) +from users.middlewares import activate_language_for_user from users.models.apidentity import APIdentity from .models import Follow, Identity, Post, TimelineEvent @@ -102,6 +103,7 @@ def post_edited(pk, post_data): def post_fetched(pk, post_data): post = Post.objects.get(pk=pk) owner = Takahe.get_or_create_remote_apidentity(post.author) + activate_language_for_user(owner.user) if not post.type_data and not post_data: logger.warning(f"Post {post} has no type_data") return diff --git a/takahe/models.py b/takahe/models.py index fbe4254a..6a73080e 100644 --- a/takahe/models.py +++ b/takahe/models.py @@ -1084,8 +1084,9 @@ class Post(models.Model): def create_local( cls, author: Identity, - raw_prepend_content: str, content: str, + raw_prepend_content: str, + raw_append_content: str, summary: str | None = None, sensitive: bool = False, visibility: int = Visibilities.public, @@ -1107,7 +1108,10 @@ class Post(models.Model): emojis = Emoji.emojis_from_content(content, None) # Strip all unwanted HTML and apply linebreaks filter, grabbing hashtags on the way parser = FediverseHtmlParser(linebreaks_filter(content), find_hashtags=True) - content = parser.html.replace("

", "

" + raw_prepend_content, 1) + content = ( + parser.html.replace("

", "

" + raw_prepend_content, 1) + + raw_append_content + ) hashtags = ( sorted([tag[: Hashtag.MAXIMUM_LENGTH] for tag in parser.hashtags]) or None @@ -1158,8 +1162,9 @@ class Post(models.Model): def edit_local( self, - raw_prepend_content: str, content: str, + raw_prepend_content: str, + raw_append_content: str, summary: str | None = None, sensitive: bool | None = None, visibility: int = Visibilities.public, @@ -1172,7 +1177,10 @@ class Post(models.Model): with transaction.atomic(): # Strip all HTML and apply linebreaks filter parser = FediverseHtmlParser(linebreaks_filter(content), find_hashtags=True) - self.content = parser.html.replace("

", "

" + raw_prepend_content, 1) + self.content = ( + parser.html.replace("

", "

" + raw_prepend_content, 1) + + raw_append_content + ) self.hashtags = ( sorted([tag[: Hashtag.MAXIMUM_LENGTH] for tag in parser.hashtags]) or None diff --git a/takahe/utils.py b/takahe/utils.py index 84422025..0649e962 100644 --- a/takahe/utils.py +++ b/takahe/utils.py @@ -444,7 +444,8 @@ class Takahe: author_pk: int, content: str, visibility: Visibilities, - pre_conetent: str = "", + prepend_content: str = "", + append_content: str = "", summary: str | None = None, sensitive: bool = False, data: dict | None = None, @@ -469,8 +470,9 @@ class Takahe: raise ValueError(f"Cannot find post to reply: {reply_to_pk}") if post: post.edit_local( - pre_conetent, content, + prepend_content, + append_content, summary, sensitive, visibility=visibility, @@ -482,8 +484,9 @@ class Takahe: else: post = Post.create_local( identity, - pre_conetent, content, + prepend_content, + append_content, summary, sensitive, visibility=visibility, @@ -589,7 +592,7 @@ class Takahe: return existing_post action = _("created collection") item_link = collection.absolute_url - pre_conetent = f'{action} {collection.title}
' + prepend_content = f'{action} {collection.title}
' content = collection.plain_content if len(content) > 360: content = content[:357] + "..." @@ -603,7 +606,8 @@ class Takahe: collection.owner.pk, content, visibility, - pre_conetent, + prepend_content, + "", None, False, data, @@ -622,7 +626,7 @@ class Takahe: user = mark.owner.user stars = _rating_to_emoji(mark.rating_grade, 1) item_link = f"{settings.SITE_INFO['site_url']}/~neodb~{mark.item.url}" - pre_conetent = mark.get_action_for_feed(item_link=item_link) + prepend_content = mark.get_action_for_feed(item_link=item_link) spoiler, txt = Takahe.get_spoiler_text(mark.comment_text, mark.item) content = f"{stars} \n{txt}\n{mark.tag_text}" data = { @@ -647,7 +651,8 @@ class Takahe: mark.owner.pk, content + append_content, v, - pre_conetent, + prepend_content, + "", spoiler, spoiler is not None, data, diff --git a/users/middlewares.py b/users/middlewares.py index defe2460..ce9a1735 100644 --- a/users/middlewares.py +++ b/users/middlewares.py @@ -3,14 +3,21 @@ from django.middleware.locale import LocaleMiddleware from django.utils import translation +def activate_language_for_user(user, request=None): + user_language = None + if user and user.is_authenticated: + user_language = getattr(user, "language", "") + if not user_language: + if request: + user_language = translation.get_language_from_request(request) + else: + user_language = settings.LANGUAGE_CODE + # if user_language in dict(settings.LANGUAGES).keys(): + translation.activate(user_language) + return translation.get_language() + + class LanguageMiddleware(LocaleMiddleware): def process_request(self, request): - user_language = None user = getattr(request, "user", None) - if user and user.is_authenticated: - user_language = getattr(user, "language", "") - if not user_language: - user_language = translation.get_language_from_request(request) - # if user_language in dict(settings.LANGUAGES).keys(): - translation.activate(user_language) - request.LANGUAGE_CODE = translation.get_language() + request.LANGUAGE_CODE = activate_language_for_user(user, request)