index and search user journal
This commit is contained in:
parent
bea87f22be
commit
3061905452
57 changed files with 2516 additions and 1226 deletions
|
@ -118,6 +118,7 @@ env = environ.FileAwareEnv(
|
|||
NEODB_SENTRY_DSN=(str, ""),
|
||||
NEODB_SENTRY_SAMPLE_RATE=(float, 0),
|
||||
NEODB_FANOUT_LIMIT_DAYS=(int, 9),
|
||||
INDEX_ALIASES=(dict, {}),
|
||||
)
|
||||
|
||||
# ====== End of user configuration variables ======
|
||||
|
@ -561,6 +562,8 @@ RQ_SHOW_ADMIN_LINK = DEBUG
|
|||
|
||||
SEARCH_INDEX_NEW_ONLY = False
|
||||
|
||||
INDEX_ALIASES = env("INDEX_ALIASES")
|
||||
|
||||
DOWNLOADER_SAVEDIR = env("NEODB_DOWNLOADER_SAVE_DIR", default="/tmp") # type: ignore
|
||||
|
||||
DISABLE_MODEL_SIGNAL = False # disable index and social feeds during importing/etc
|
||||
|
|
|
@ -216,6 +216,12 @@ class Edition(Item):
|
|||
def display_subtitle(self) -> str | None:
|
||||
return self.get_localized_subtitle()
|
||||
|
||||
def to_indexable_titles(self) -> list[str]:
|
||||
titles = [t["text"] for t in self.localized_title if t]
|
||||
titles += [t["text"] for t in self.localized_subtitle if t]
|
||||
titles += [self.orig_title] if self.orig_title else []
|
||||
return list(set(titles))
|
||||
|
||||
@property
|
||||
def isbn10(self):
|
||||
return isbn_13_to_10(self.isbn)
|
||||
|
|
|
@ -10,6 +10,10 @@ class Collection(Item):
|
|||
journal_item: "JournalCollection"
|
||||
category = ItemCategory.Collection
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return self.journal_item.url if self.journal_item else super().url
|
||||
|
||||
@property
|
||||
def owner_id(self):
|
||||
return self.journal_item.owner_id if self.journal_item else None
|
||||
|
|
|
@ -470,7 +470,7 @@ class Item(PolymorphicModel):
|
|||
res.save()
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.__class__.__name__}|{self.pk}|{self.uuid} {self.primary_lookup_id_type}:{self.primary_lookup_id_value if self.primary_lookup_id_value else ''} ({self.title})"
|
||||
return f"{self.__class__.__name__}|{self.pk}|{self.uuid} {self.primary_lookup_id_type}:{self.primary_lookup_id_value if self.primary_lookup_id_value else ''} ({self.display_title})"
|
||||
|
||||
@classmethod
|
||||
def lookup_id_type_choices(cls):
|
||||
|
@ -567,6 +567,12 @@ class Item(PolymorphicModel):
|
|||
res.item = to_item
|
||||
res.save()
|
||||
|
||||
@property
|
||||
def final_item(self) -> Self:
|
||||
if self.merged_to_item:
|
||||
return self.merged_to_item.final_item
|
||||
return self
|
||||
|
||||
def recast_to(self, model: "type[Any]") -> "Item":
|
||||
logger.warning(f"recast item {self} to {model}")
|
||||
if isinstance(self, model):
|
||||
|
@ -657,6 +663,12 @@ class Item(PolymorphicModel):
|
|||
def brief_description(self):
|
||||
return (str(self.display_description) or "")[:155]
|
||||
|
||||
def to_indexable_titles(self) -> list[str]:
|
||||
titles = [t["text"] for t in self.localized_title if t]
|
||||
if self.parent_item:
|
||||
titles += self.parent_item.to_indexable_titles()
|
||||
return list(set(titles))
|
||||
|
||||
@classmethod
|
||||
def get_by_url(cls, url_or_b62: str, resolve_merge=False) -> "Self | None":
|
||||
b62 = url_or_b62.strip().split("/")[-1]
|
||||
|
|
|
@ -173,3 +173,8 @@ class Movie(Item):
|
|||
else:
|
||||
return None, None
|
||||
return super().lookup_id_cleanup(lookup_id_type, lookup_id_value)
|
||||
|
||||
def to_indexable_titles(self) -> list[str]:
|
||||
titles = [t["text"] for t in self.localized_title if t]
|
||||
titles += [self.orig_title] if self.orig_title else []
|
||||
return list(set(titles))
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import re
|
||||
from urllib.parse import quote
|
||||
|
||||
import django_rq
|
||||
from django.conf import settings
|
||||
|
@ -6,12 +7,14 @@ from django.contrib.auth.decorators import login_required
|
|||
from django.core.cache import cache
|
||||
from django.core.exceptions import BadRequest
|
||||
from django.shortcuts import redirect, render
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext as _
|
||||
from django.views.decorators.http import require_http_methods
|
||||
from rq.job import Job
|
||||
|
||||
from catalog.common.models import ItemCategory, SiteName
|
||||
from catalog.common.sites import AbstractSite, SiteManager
|
||||
from common.models import int_
|
||||
from common.utils import (
|
||||
HTTPResponseHXRedirect,
|
||||
PageLinksGenerator,
|
||||
|
@ -37,7 +40,7 @@ def fetch_refresh(request, job_id):
|
|||
else:
|
||||
return HTTPResponseHXRedirect(item_url)
|
||||
else:
|
||||
retry = int(request.GET.get("retry", 0)) + 1
|
||||
retry = int_(request.GET.get("retry", 0)) + 1
|
||||
if retry > 10:
|
||||
return render(request, "_fetch_failed.html")
|
||||
else:
|
||||
|
@ -97,10 +100,10 @@ def visible_categories(request):
|
|||
|
||||
@user_identity_required
|
||||
def search(request):
|
||||
category = request.GET.get("c", default="all").strip().lower()
|
||||
keywords = request.GET.get("q", default="").strip()
|
||||
if re.match(r"^[@@]", keywords):
|
||||
return query_identity(request, keywords.replace("@", "@"))
|
||||
category = request.GET.get("c", default="all").strip().lower()
|
||||
hide_category = False
|
||||
if category == "all" or not category:
|
||||
category = None
|
||||
|
@ -115,8 +118,7 @@ def search(request):
|
|||
categories = visible_categories(request)
|
||||
tag = request.GET.get("tag", default="").strip()
|
||||
tag = Tag.deep_cleanup_title(tag, default="")
|
||||
p = request.GET.get("page", default="1")
|
||||
p = int(p) if p.isdigit() else 1
|
||||
p = int_(request.GET.get("page", default="1"), 1)
|
||||
if not (keywords or tag):
|
||||
return render(
|
||||
request,
|
||||
|
@ -158,7 +160,7 @@ def external_search(request):
|
|||
if category == "all":
|
||||
category = None
|
||||
keywords = request.GET.get("q", default="").strip()
|
||||
page_number = int(request.GET.get("page", default=1))
|
||||
page_number = int_(request.GET.get("page"), 1)
|
||||
items = ExternalSources.search(category, keywords, page_number) if keywords else []
|
||||
cache_key = f"search_{category if category!='movietv' else 'movie,tv'}_{keywords}"
|
||||
dedupe_urls = cache.get(cache_key, [])
|
||||
|
|
|
@ -52,7 +52,7 @@
|
|||
{% for tag in item.tags %}
|
||||
{% if forloop.counter <= 5 %}
|
||||
<span>
|
||||
<a href="{% url 'catalog:search' %}?tag={{ tag }}">{{ tag }}</a>
|
||||
<a href="{% url 'common:search' %}?tag={{ tag }}">{{ tag }}</a>
|
||||
</span>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
|
16
catalog/templates/_item_card_metadata_collection.html
Normal file
16
catalog/templates/_item_card_metadata_collection.html
Normal file
|
@ -0,0 +1,16 @@
|
|||
{% extends "_item_card_metadata_base.html" %}
|
||||
{% load humanize %}
|
||||
{% load i18n %}
|
||||
{% block brief %}
|
||||
<div class="multi-fields">
|
||||
{% if item.rating %}
|
||||
<span class="solo-hidden">{{ item.rating | floatformat:1 }} <small>({{ item.rating_count }} {% trans "ratings" %})</small></span>
|
||||
{% endif %}
|
||||
{% include '_people.html' with people=item.host role='host' max=5 %}
|
||||
</div>
|
||||
{% endblock brief %}
|
||||
{% block full %}
|
||||
<div>
|
||||
{% if not hide_brief %}{{ item.display_description | linebreaksbr }}{% endif %}
|
||||
</div>
|
||||
{% endblock full %}
|
|
@ -163,7 +163,7 @@
|
|||
<div class="tag-list">
|
||||
{% for t in popular_tags %}
|
||||
<span>
|
||||
<a href="{% url 'catalog:search' %}?tag={{ t|urlencode }}">{{ t }}</a>
|
||||
<a href="{% url 'common:search' %}?tag={{ t|urlencode }}">{{ t }}</a>
|
||||
</span>
|
||||
{% empty %}
|
||||
<div class="empty">{% trans "nothing so far." %}</div>
|
||||
|
|
|
@ -191,7 +191,7 @@
|
|||
<div class="tag-list solo-hidden">
|
||||
{% for tag in item.tags %}
|
||||
<span>
|
||||
<a href="{% url 'catalog:search' %}?tag={{ tag }}">{{ tag }}</a>
|
||||
<a href="{% url 'common:search' %}?tag={{ tag }}">{{ tag }}</a>
|
||||
</span>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
|
80
catalog/templates/search_header.html
Normal file
80
catalog/templates/search_header.html
Normal file
|
@ -0,0 +1,80 @@
|
|||
{% load static %}
|
||||
{% load i18n %}
|
||||
{% load l10n %}
|
||||
{% load humanize %}
|
||||
{% load mastodon %}
|
||||
{% load duration %}
|
||||
{% load thumb %}
|
||||
<hgroup>
|
||||
<h5>“{{ request.GET.q }}”</h5>
|
||||
<div class="search-category-picker">
|
||||
{% visible_categories as cats %}
|
||||
{% if request.GET.c and request.GET.c != 'all' %}
|
||||
<a href="?q={{ request.GET.q }}&c=all">{% trans "all" %}</a>
|
||||
{% else %}
|
||||
{% trans "all" %}
|
||||
{% endif %}
|
||||
{% if 'book' in cats %}
|
||||
|
|
||||
{% if request.GET.c != 'book' %}
|
||||
<a href="?q={{ request.GET.q }}&c=book">{% trans "books" %}</a>
|
||||
{% else %}
|
||||
{% trans "books" %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if 'movie' in cats or 'tv' in cats %}
|
||||
|
|
||||
{% if request.GET.c != 'movietv' %}
|
||||
<a href="?q={{ request.GET.q }}&c=movietv">{% trans "movie & tv" %}</a>
|
||||
{% else %}
|
||||
{% trans "movie & tv" %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if 'podcast' in cats %}
|
||||
|
|
||||
{% if request.GET.c != 'podcast' %}
|
||||
<a href="?q={{ request.GET.q }}&c=podcast">{% trans "podcasts" %}</a>
|
||||
{% else %}
|
||||
{% trans "podcasts" %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if 'music' in cats %}
|
||||
|
|
||||
{% if request.GET.c != 'music' %}
|
||||
<a href="?q={{ request.GET.q }}&c=music">{% trans "music" %}</a>
|
||||
{% else %}
|
||||
{% trans "music" %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if 'game' in cats %}
|
||||
|
|
||||
{% if request.GET.c != 'game' %}
|
||||
<a href="?q={{ request.GET.q }}&c=game">{% trans "games" %}</a>
|
||||
{% else %}
|
||||
{% trans "games" %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if 'performance' in cats %}
|
||||
|
|
||||
{% if request.GET.c != 'performance' %}
|
||||
<a href="?q={{ request.GET.q }}&c=performance">{% trans "performances" %}</a>
|
||||
{% else %}
|
||||
{% trans "performances" %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if user.is_authenticated %}
|
||||
|
|
||||
{% if request.GET.c != 'journal' %}
|
||||
<a href="?q={{ request.GET.q }}&c=journal">{% trans "your journal" %}</a>
|
||||
{% else %}
|
||||
{% trans "your journal" %}
|
||||
{% endif %}
|
||||
|
|
||||
{% if request.GET.c != 'timeline' %}
|
||||
<a href="?q={{ request.GET.q }}&c=timeline">{% trans "your timeline" %}</a>
|
||||
{% else %}
|
||||
{% trans "your timeline" %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
</hgroup>
|
|
@ -20,65 +20,7 @@
|
|||
<div>
|
||||
<div>
|
||||
{% if request.GET.q %}
|
||||
<hgroup>
|
||||
<h5>“{{ request.GET.q }}”</h5>
|
||||
<div>
|
||||
{% visible_categories as cats %}
|
||||
{% if request.GET.c and request.GET.c != 'all' %}
|
||||
<a href="?q={{ request.GET.q }}&c=all">{% trans "all" %}</a>
|
||||
{% else %}
|
||||
{% trans "all" %}
|
||||
{% endif %}
|
||||
{% if 'book' in cats %}
|
||||
|
|
||||
{% if request.GET.c != 'book' %}
|
||||
<a href="?q={{ request.GET.q }}&c=book">{% trans "books" %}</a>
|
||||
{% else %}
|
||||
{% trans "books" %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if 'movie' in cats or 'tv' in cats %}
|
||||
|
|
||||
{% if request.GET.c != 'movietv' %}
|
||||
<a href="?q={{ request.GET.q }}&c=movietv">{% trans "movie & tv" %}</a>
|
||||
{% else %}
|
||||
{% trans "movie & tv" %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if 'podcast' in cats %}
|
||||
|
|
||||
{% if request.GET.c != 'podcast' %}
|
||||
<a href="?q={{ request.GET.q }}&c=podcast">{% trans "podcasts" %}</a>
|
||||
{% else %}
|
||||
{% trans "podcasts" %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if 'music' in cats %}
|
||||
|
|
||||
{% if request.GET.c != 'music' %}
|
||||
<a href="?q={{ request.GET.q }}&c=music">{% trans "music" %}</a>
|
||||
{% else %}
|
||||
{% trans "music" %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if 'game' in cats %}
|
||||
|
|
||||
{% if request.GET.c != 'game' %}
|
||||
<a href="?q={{ request.GET.q }}&c=game">{% trans "games" %}</a>
|
||||
{% else %}
|
||||
{% trans "games" %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if 'performance' in cats %}
|
||||
|
|
||||
{% if request.GET.c != 'performance' %}
|
||||
<a href="?q={{ request.GET.q }}&c=performance">{% trans "performances" %}</a>
|
||||
{% else %}
|
||||
{% trans "performances" %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
</hgroup>
|
||||
{% include "search_header.html" %}
|
||||
{% endif %}
|
||||
{% if request.GET.tag %}
|
||||
<h5>{% trans 'tag' %}: “{{ request.GET.tag }}”</h5>
|
||||
|
|
|
@ -250,6 +250,11 @@ class TVShow(Item):
|
|||
def get_season_count(self):
|
||||
return self.season_count or self.seasons.all().count()
|
||||
|
||||
def to_indexable_titles(self) -> list[str]:
|
||||
titles = [t["text"] for t in self.localized_title if t]
|
||||
titles += [self.orig_title] if self.orig_title else []
|
||||
return list(set(titles))
|
||||
|
||||
|
||||
class TVSeason(Item):
|
||||
if TYPE_CHECKING:
|
||||
|
@ -434,6 +439,12 @@ class TVSeason(Item):
|
|||
and RE_LOCALIZED_SEASON_NUMBERS.sub("", t["text"]) != ""
|
||||
]
|
||||
|
||||
def to_indexable_titles(self) -> list[str]:
|
||||
titles = [t["text"] for t in self.localized_title if t]
|
||||
titles += [self.orig_title] if self.orig_title else []
|
||||
titles += self.parent_item.to_indexable_titles() if self.parent_item else []
|
||||
return list(set(titles))
|
||||
|
||||
def update_linked_items_from_external_resource(self, resource):
|
||||
for w in resource.required_resources:
|
||||
if w["model"] == "TVShow":
|
||||
|
|
|
@ -157,7 +157,6 @@ urlpatterns = [
|
|||
mark_list,
|
||||
name="mark_list",
|
||||
),
|
||||
path("search", search, name="search"),
|
||||
path("search/", search, name="search_legacy"),
|
||||
path("search/external", external_search, name="external_search"),
|
||||
path("fetch_refresh/<str:job_id>", fetch_refresh, name="fetch_refresh"),
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from .cron import BaseJob, JobManager
|
||||
from .index import Index, SearchResult
|
||||
from .lang import (
|
||||
LANGUAGE_CHOICES,
|
||||
LOCALE_CHOICES,
|
||||
|
@ -9,4 +10,21 @@ from .lang import (
|
|||
detect_language,
|
||||
get_current_locales,
|
||||
)
|
||||
from .misc import uniq
|
||||
from .misc import int_, uniq
|
||||
|
||||
__all__ = [
|
||||
"BaseJob",
|
||||
"JobManager",
|
||||
"LANGUAGE_CHOICES",
|
||||
"LOCALE_CHOICES",
|
||||
"SCRIPT_CHOICES",
|
||||
"SITE_DEFAULT_LANGUAGE",
|
||||
"SITE_PREFERRED_LANGUAGES",
|
||||
"SITE_PREFERRED_LOCALES",
|
||||
"detect_language",
|
||||
"get_current_locales",
|
||||
"uniq",
|
||||
"int_",
|
||||
"Index",
|
||||
"SearchResult",
|
||||
]
|
||||
|
|
223
common/models/index.py
Normal file
223
common/models/index.py
Normal file
|
@ -0,0 +1,223 @@
|
|||
from functools import cached_property
|
||||
from time import sleep
|
||||
from typing import Iterable, Self, TypeVar
|
||||
|
||||
import typesense
|
||||
from django.conf import settings
|
||||
from loguru import logger
|
||||
from typesense.collection import Collection
|
||||
from typesense.exceptions import ObjectNotFound
|
||||
|
||||
|
||||
class SearchResult:
|
||||
def __init__(self, index: "Index", response: dict):
|
||||
self.index = index
|
||||
self.response = response
|
||||
self.page_size = response["request_params"]["per_page"]
|
||||
self.total = response["found"]
|
||||
self.page = response["page"]
|
||||
self.pages = (self.total + self.page_size - 1) // self.page_size
|
||||
|
||||
def __repr__(self):
|
||||
return f"SearchResult(search '{self.response['request_params']['q']}', found {self.response['found']} out of {self.response['out_of']}, page {self.response['page']})"
|
||||
|
||||
def __str__(self):
|
||||
return f"SearchResult(search '{self.response['request_params']['q']}', found {self.response['found']} out of {self.response['out_of']}, page {self.response['page']})"
|
||||
|
||||
def get_facet(self, field):
|
||||
f = next(
|
||||
(f for f in self.response["facet_counts"] if f["field_name"] == field),
|
||||
None,
|
||||
)
|
||||
if not f:
|
||||
return {}
|
||||
return {v["value"]: v["count"] for v in f["counts"]}
|
||||
|
||||
def __bool__(self):
|
||||
return len(self.response["hits"]) > 0
|
||||
|
||||
def __len__(self):
|
||||
return len(self.response["hits"])
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.response["hits"])
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.response["hits"][key]
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self.response["hits"]
|
||||
|
||||
|
||||
SearchResultClass = TypeVar("SearchResultClass", bound=SearchResult)
|
||||
|
||||
|
||||
class Index:
|
||||
name = "" # must be set in subclass
|
||||
schema = {"fields": []} # must be set in subclass
|
||||
max_pages = 100
|
||||
default_search_params = {
|
||||
# "query_by": ...,
|
||||
"per_page": 20,
|
||||
"highlight_fields": "",
|
||||
"include_fields": "id",
|
||||
}
|
||||
|
||||
_instance = None
|
||||
_client: typesense.Client
|
||||
|
||||
@classmethod
|
||||
def instance(cls) -> Self:
|
||||
if not cls._instance:
|
||||
cls._instance = cls()
|
||||
return cls._instance
|
||||
|
||||
@classmethod
|
||||
def get_client(cls):
|
||||
return typesense.Client(settings.TYPESENSE_CONNECTION)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._client = self.get_client()
|
||||
|
||||
def _get_collection(self, for_write=False) -> Collection:
|
||||
global _cached_collections
|
||||
collection_id = self.name + ("_write" if for_write else "_read")
|
||||
cname = settings.INDEX_ALIASES.get(collection_id) or settings.INDEX_ALIASES.get(
|
||||
self.name, self.name
|
||||
)
|
||||
collection = self._client.collections[cname]
|
||||
if not collection:
|
||||
raise KeyError(f"Typesense: collection {collection_id} not found")
|
||||
return collection
|
||||
|
||||
@cached_property
|
||||
def read_collection(self) -> Collection:
|
||||
return self._get_collection()
|
||||
|
||||
@cached_property
|
||||
def write_collection(self) -> Collection:
|
||||
return self._get_collection(True)
|
||||
|
||||
@classmethod
|
||||
def get_schema(cls) -> dict:
|
||||
cname = settings.INDEX_ALIASES.get(
|
||||
cls.name + "_write"
|
||||
) or settings.INDEX_ALIASES.get(cls.name, cls.name)
|
||||
schema = {"name": cname}
|
||||
schema.update(cls.schema)
|
||||
return schema
|
||||
|
||||
def check(self) -> dict:
|
||||
if not self._client.operations.is_healthy():
|
||||
raise ValueError("Typesense: server not healthy")
|
||||
return self.read_collection.retrieve()
|
||||
|
||||
def create_collection(self):
|
||||
self._client.collections.create(self.get_schema())
|
||||
|
||||
def delete_collection(self):
|
||||
self.write_collection.delete()
|
||||
|
||||
def update_schema(self):
|
||||
self.write_collection.update(self.get_schema())
|
||||
|
||||
def initialize_collection(self, max_wait=5) -> bool:
|
||||
try:
|
||||
wait = max_wait
|
||||
while not self._client.operations.is_healthy() and wait:
|
||||
logger.warning("Typesense: server not healthy")
|
||||
sleep(1)
|
||||
wait -= 1
|
||||
if not wait:
|
||||
logger.error("Typesense: timeout waiting for server")
|
||||
return False
|
||||
cname = settings.INDEX_ALIASES.get(
|
||||
self.name + "_write"
|
||||
) or settings.INDEX_ALIASES.get(self.name, self.name)
|
||||
collection = self._client.collections[cname]
|
||||
if collection:
|
||||
try:
|
||||
i = collection.retrieve()
|
||||
logger.debug(f"Typesense: {cname} has {i['num_documents']} docs")
|
||||
except ObjectNotFound:
|
||||
self.create_collection()
|
||||
logger.info(f"Typesense: {cname} created")
|
||||
return True
|
||||
logger.error("Typesense: server unknown error")
|
||||
except Exception as e:
|
||||
logger.error(f"Typesense: server error {e}")
|
||||
return False
|
||||
|
||||
def replace_docs(self, docs: Iterable[dict]):
|
||||
if not docs:
|
||||
return False
|
||||
rs = self.write_collection.documents.import_(docs, {"action": "upsert"})
|
||||
for r in rs:
|
||||
e = r.get("error", None)
|
||||
if e:
|
||||
logger.error(f"Typesense: {self.name} import error {e}")
|
||||
if settings.DEBUG:
|
||||
logger.error(f"Typesense: {r}")
|
||||
|
||||
def insert_docs(self, docs: Iterable[dict]):
|
||||
if not docs:
|
||||
return False
|
||||
rs = self.write_collection.documents.import_(docs)
|
||||
for r in rs:
|
||||
e = r.get("error", None)
|
||||
if e:
|
||||
logger.error(f"Typesense: {self.name} import error {e}")
|
||||
if settings.DEBUG:
|
||||
logger.error(f"Typesense: {r}")
|
||||
|
||||
def delete_docs(self, field: str, values: list[int] | str) -> int:
|
||||
v: str = (
|
||||
("[" + ",".join(map(str, values)) + "]")
|
||||
if isinstance(values, list)
|
||||
else values
|
||||
)
|
||||
q = {"filter_by": f"{field}:{v}"}
|
||||
r = self.write_collection.documents.delete(q)
|
||||
return (r or {}).get("num_deleted", 0)
|
||||
|
||||
def patch_docs(self, partial_doc: dict, doc_filter: str):
|
||||
self.write_collection.documents.update(partial_doc, {"filter_by": doc_filter})
|
||||
|
||||
def search(
|
||||
self,
|
||||
q: str,
|
||||
page: int = 1,
|
||||
page_size: int = 0,
|
||||
query_by: list[str] = [],
|
||||
sort_by: str = "",
|
||||
filter_by: dict[str, list[str | int]] = {},
|
||||
facet_by: list[str] = [],
|
||||
result_class: type[SearchResultClass] = SearchResult,
|
||||
) -> SearchResultClass:
|
||||
params = self.default_search_params.copy()
|
||||
params["q"] = q
|
||||
params["page"] = page if page > 0 and page <= self.max_pages else 1
|
||||
if page_size:
|
||||
params["per_page"] = page_size
|
||||
filters = []
|
||||
for field, values in filter_by.items():
|
||||
if field == "_":
|
||||
filters += values
|
||||
elif values:
|
||||
v = f"[{','.join(map(str, values))}]" if len(values) > 1 else values[0]
|
||||
filters.append(f"{field}:{v}")
|
||||
if filters:
|
||||
params["filter_by"] = " && ".join(filters)
|
||||
if facet_by:
|
||||
params["facet_by"] = ",".join(facet_by)
|
||||
if query_by:
|
||||
params["query_by"] = ",".join(query_by)
|
||||
if sort_by:
|
||||
params["sort_by"] = sort_by
|
||||
if settings.DEBUG:
|
||||
logger.debug(f"Typesense: search {self.name} {params}")
|
||||
r = self.read_collection.documents.search(params)
|
||||
sr = result_class(self, r)
|
||||
if settings.DEBUG:
|
||||
logger.debug(f"Typesense: search result {sr}")
|
||||
return sr
|
|
@ -4,3 +4,11 @@ def uniq(ls: list) -> list:
|
|||
if i not in r:
|
||||
r.append(i)
|
||||
return r
|
||||
|
||||
|
||||
def int_(x, default=0):
|
||||
return (
|
||||
int(x)
|
||||
if isinstance(x, str) and x.isdigit()
|
||||
else (x if isinstance(x, int) else default)
|
||||
)
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
</ul>
|
||||
<ul class="nav-search {% if request.GET.q %}unhide{% endif %}">
|
||||
<li>
|
||||
<form role="search" method="get" action="{% url 'catalog:search' %}">
|
||||
<form role="search" method="get" action="{% url 'common:search' %}">
|
||||
<input type="search"
|
||||
name="q"
|
||||
id="q"
|
||||
|
@ -19,7 +19,7 @@
|
|||
value="{{ request.GET.q|default:'' }}" />
|
||||
<select name="c">
|
||||
{% visible_categories as cats %}
|
||||
<option value="all">{% trans 'Everything' %}</option>
|
||||
<option value="all">{% trans 'All Items' %}</option>
|
||||
{% if 'book' in cats %}
|
||||
<option {% if request.GET.c == 'book' or '/book/' in request.path %}selected{% endif %}
|
||||
value="book">{% trans 'Book' %}</option>
|
||||
|
@ -44,6 +44,12 @@
|
|||
<option {% if request.GET.c == 'performance' or '/performance/' in request.path %}selected{% endif %}
|
||||
value="performance">{% trans 'Performance' %}</option>
|
||||
{% endif %}
|
||||
{% if request.user.is_authenticated %}
|
||||
<option {% if request.GET.c == 'journal' or '/users/' in request.path %}selected{% endif %}
|
||||
value="journal">{% trans 'Journal' %}</option>
|
||||
<option {% if request.GET.c == 'timeline' or '/timeline/' in request.path %}selected{% endif %}
|
||||
value="timeline">{% trans 'Posts' %}</option>
|
||||
{% endif %}
|
||||
</select>
|
||||
<input type="submit" value="" class="fa-solid" />
|
||||
</form>
|
||||
|
|
|
@ -52,7 +52,7 @@
|
|||
<div class="tag-list">
|
||||
{% for t in popular_tags %}
|
||||
<span>
|
||||
<a href="{% url 'catalog:search' %}?tag={{ t|urlencode }}">{{ t }}</a>
|
||||
<a href="{% url 'common:search' %}?tag={{ t|urlencode }}">{{ t }}</a>
|
||||
</span>
|
||||
{% empty %}
|
||||
<div class="empty">{% trans "nothing so far." %}</div>
|
||||
|
|
|
@ -5,6 +5,7 @@ from .views import *
|
|||
app_name = "common"
|
||||
urlpatterns = [
|
||||
path("", home),
|
||||
path("search", search, name="search"),
|
||||
path("home/", home, name="home"),
|
||||
path("me/", me, name="me"),
|
||||
path("nodeinfo/2.0/", nodeinfo2),
|
||||
|
|
|
@ -13,6 +13,7 @@ from django.utils import timezone
|
|||
from django.utils.translation import gettext as _
|
||||
|
||||
from .config import ITEMS_PER_PAGE, ITEMS_PER_PAGE_OPTIONS, PAGE_LINK_NUMBER
|
||||
from .models import int_
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from users.models import APIdentity, User
|
||||
|
@ -118,9 +119,9 @@ class CustomPaginator(Paginator):
|
|||
if request:
|
||||
try:
|
||||
if request.GET.get("per_page"):
|
||||
per_page = int(request.GET.get("per_page"))
|
||||
per_page = int_(request.GET.get("per_page"))
|
||||
elif request.COOKIES.get("per_page"):
|
||||
per_page = int(request.COOKIES.get("per_page"))
|
||||
per_page = int_(request.COOKIES.get("per_page"))
|
||||
except ValueError:
|
||||
pass
|
||||
if per_page not in ITEMS_PER_PAGE_OPTIONS:
|
||||
|
@ -139,7 +140,7 @@ class PageLinksGenerator:
|
|||
self, current_page: int, total_pages: int, query: QueryDict | None = None
|
||||
):
|
||||
length = PAGE_LINK_NUMBER
|
||||
current_page = int(current_page)
|
||||
current_page = int_(current_page)
|
||||
self.query_string = ""
|
||||
if query:
|
||||
q = query.copy()
|
||||
|
|
|
@ -6,6 +6,9 @@ from django.shortcuts import redirect, render
|
|||
from django.urls import reverse
|
||||
|
||||
from boofilsic import __version__
|
||||
from catalog.views import search as catalog_search
|
||||
from journal.views import search as journal_search
|
||||
from social.views import search as timeline_search
|
||||
from takahe.utils import Takahe
|
||||
|
||||
from .api import api
|
||||
|
@ -24,6 +27,16 @@ def me(request):
|
|||
return redirect(request.user.identity.url)
|
||||
|
||||
|
||||
def search(request):
|
||||
match request.GET.get("c", default="all").strip().lower():
|
||||
case "journal":
|
||||
return journal_search(request)
|
||||
case "timeline":
|
||||
return timeline_search(request)
|
||||
case _:
|
||||
return catalog_search(request)
|
||||
|
||||
|
||||
def home(request):
|
||||
if request.user.is_authenticated:
|
||||
if not request.user.registration_complete:
|
||||
|
|
|
@ -64,7 +64,7 @@ if you are doing debug or development:
|
|||
Add alias to your shell for easier access
|
||||
|
||||
```
|
||||
alias neodb-manage='docker-compose --profile production run shell neodb-manage'
|
||||
alias neodb-manage='docker-compose --profile production run --rm shell neodb-manage'
|
||||
```
|
||||
|
||||
Toggle user's active, staff and super user status
|
||||
|
|
|
@ -1,17 +1,58 @@
|
|||
from argparse import RawTextHelpFormatter
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.paginator import Paginator
|
||||
from tqdm import tqdm
|
||||
|
||||
from catalog.models import Item
|
||||
from journal.importers.douban import DoubanImporter
|
||||
from journal.models import *
|
||||
from journal.models import JournalIndex, Piece
|
||||
from journal.models.common import Content
|
||||
from journal.models.itemlist import ListMember
|
||||
from takahe.models import Post
|
||||
from users.models import *
|
||||
from users.models import User
|
||||
|
||||
_CONFIRM = "confirm deleting collection? [Y/N] "
|
||||
|
||||
_HELP_TEXT = """
|
||||
intergrity: check and fix remaining journal for merged and deleted items
|
||||
purge: delete invalid data (visibility=99)
|
||||
idx-info: show index information
|
||||
idx-init: check and create index if not exists
|
||||
idx-destroy: delete index
|
||||
idx-alt: update index schema
|
||||
idx-delete: delete docs in index
|
||||
idx-update: reindex docs
|
||||
idx-search: search docs in index
|
||||
"""
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "journal app utilities"
|
||||
|
||||
def create_parser(self, *args, **kwargs):
|
||||
parser = super(Command, self).create_parser(*args, **kwargs)
|
||||
parser.formatter_class = RawTextHelpFormatter
|
||||
return parser
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"action",
|
||||
choices=[
|
||||
"integrity",
|
||||
"purge",
|
||||
"idx-info",
|
||||
"idx-init",
|
||||
"idx-alt",
|
||||
"idx-destroy",
|
||||
"idx-update",
|
||||
"idx-delete",
|
||||
"idx-search",
|
||||
],
|
||||
help=_HELP_TEXT,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
|
@ -21,14 +62,27 @@ class Command(BaseCommand):
|
|||
action="store_true",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--purge",
|
||||
action="store_true",
|
||||
help="purge invalid data (visibility=99)",
|
||||
"--owner",
|
||||
action="append",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--integrity",
|
||||
"--query",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--batch-size",
|
||||
default=1000,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--item-class",
|
||||
action="append",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--piece-class",
|
||||
action="append",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--yes",
|
||||
action="store_true",
|
||||
help="check and fix remaining journal for merged and deleted items",
|
||||
)
|
||||
|
||||
def integrity(self):
|
||||
|
@ -44,16 +98,128 @@ class Command(BaseCommand):
|
|||
if self.fix:
|
||||
update_journal_for_merged_item(i.url)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.verbose = options["verbose"]
|
||||
self.fix = options["fix"]
|
||||
if options["integrity"]:
|
||||
self.integrity()
|
||||
def batch_index(self, index, typ, qs):
|
||||
c = 0
|
||||
pg = Paginator(qs.order_by("id"), self.batch_size)
|
||||
for p in tqdm(pg.page_range):
|
||||
if typ == "post":
|
||||
docs = index.posts_to_docs(pg.get_page(p).object_list)
|
||||
else:
|
||||
pieces = [
|
||||
p for p in pg.get_page(p).object_list if p.latest_post is None
|
||||
]
|
||||
docs = index.pieces_to_docs(pieces)
|
||||
c += len(docs)
|
||||
index.replace_docs(docs)
|
||||
self.stdout.write(self.style.SUCCESS(f"indexed {c} docs."))
|
||||
|
||||
if options["purge"]:
|
||||
for pcls in [Content, ListMember]:
|
||||
for cls in pcls.__subclasses__():
|
||||
self.stdout.write(f"Cleaning up {cls}...")
|
||||
cls.objects.filter(visibility=99).delete()
|
||||
def handle(
|
||||
self,
|
||||
action,
|
||||
yes,
|
||||
query,
|
||||
owner,
|
||||
piece_class,
|
||||
item_class,
|
||||
verbose,
|
||||
fix,
|
||||
batch_size,
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
self.verbose = verbose
|
||||
self.fix = fix
|
||||
self.batch_size = batch_size
|
||||
index = JournalIndex.instance()
|
||||
|
||||
self.stdout.write(self.style.SUCCESS(f"Done."))
|
||||
if owner:
|
||||
owners = list(
|
||||
APIdentity.objects.filter(username__in=owner, local=True).values_list(
|
||||
"id", flat=True
|
||||
)
|
||||
)
|
||||
else:
|
||||
owners = []
|
||||
|
||||
match action:
|
||||
case "integrity":
|
||||
self.integrity()
|
||||
self.stdout.write(self.style.SUCCESS(f"Done."))
|
||||
|
||||
case "purge":
|
||||
for pcls in [Content, ListMember]:
|
||||
for cls in pcls.__subclasses__():
|
||||
self.stdout.write(f"Cleaning up {cls}...")
|
||||
cls.objects.filter(visibility=99).delete()
|
||||
self.stdout.write(self.style.SUCCESS(f"Done."))
|
||||
|
||||
case "idx-destroy":
|
||||
if yes or input(_CONFIRM).upper().startswith("Y"):
|
||||
index.delete_collection()
|
||||
self.stdout.write(self.style.SUCCESS("deleted."))
|
||||
|
||||
case "idx-alt":
|
||||
index.update_schema()
|
||||
self.stdout.write(self.style.SUCCESS("updated."))
|
||||
|
||||
case "idx-init":
|
||||
index.initialize_collection()
|
||||
self.stdout.write(self.style.SUCCESS("initialized."))
|
||||
|
||||
case "idx-info":
|
||||
try:
|
||||
r = index.check()
|
||||
self.stdout.write(str(r))
|
||||
except Exception as e:
|
||||
self.stdout.write(self.style.ERROR(str(e)))
|
||||
|
||||
case "idx-delete":
|
||||
if owners:
|
||||
c = index.delete_by_owner(owners)
|
||||
else:
|
||||
c = index.delete_all()
|
||||
self.stdout.write(self.style.SUCCESS(f"deleted {c} documents."))
|
||||
|
||||
case "idx-update":
|
||||
pieces = Piece.objects.all()
|
||||
posts = Post.objects.filter(local=True).exclude(
|
||||
state__in=["deleted", "deleted_fanned_out"]
|
||||
)
|
||||
if owners:
|
||||
pieces = pieces.filter(owner_id__in=owners)
|
||||
posts = posts.filter(author_id__in=owners)
|
||||
# index all posts
|
||||
self.batch_index(index, "post", posts)
|
||||
# index remaining pieces without posts
|
||||
self.batch_index(index, "piece", pieces)
|
||||
# posts = posts.exclude(type_data__object__has_key="relatedWith")
|
||||
# docs = index.posts_to_docs(posts)
|
||||
# c = len(docs)
|
||||
# index.insert_docs(docs)
|
||||
# self.stdout.write(self.style.SUCCESS(f"indexed {c} posts."))
|
||||
|
||||
case "idx-search":
|
||||
r = index.search(
|
||||
"" if query == "-" else query,
|
||||
filter_by={
|
||||
"owner_id": owners,
|
||||
"piece_class": piece_class,
|
||||
"item_class": item_class,
|
||||
},
|
||||
page_size=100,
|
||||
)
|
||||
self.stdout.write(self.style.SUCCESS(str(r)))
|
||||
self.stdout.write(f"{r.facet_by_item_class}")
|
||||
self.stdout.write(f"{r.facet_by_piece_class}")
|
||||
self.stdout.write(self.style.SUCCESS("matched posts:"))
|
||||
for post in r:
|
||||
self.stdout.write(str(post))
|
||||
self.stdout.write(self.style.SUCCESS("matched pieces:"))
|
||||
for pc in r.pieces:
|
||||
self.stdout.write(str(pc))
|
||||
self.stdout.write(self.style.SUCCESS("matched items:"))
|
||||
for i in r.items:
|
||||
self.stdout.write(str(i))
|
||||
|
||||
case _:
|
||||
self.stdout.write(self.style.ERROR("action not found."))
|
||||
|
|
|
@ -12,6 +12,7 @@ from .common import (
|
|||
q_piece_in_home_feed_of_user,
|
||||
q_piece_visible_to_user,
|
||||
)
|
||||
from .index import JournalIndex, QueryParser
|
||||
from .like import Like
|
||||
from .mark import Mark
|
||||
from .mixins import UserOwnedObjectMixin
|
||||
|
@ -34,6 +35,7 @@ __all__ = [
|
|||
"CollectionMember",
|
||||
"FeaturedCollection",
|
||||
"Comment",
|
||||
"JournalIndex",
|
||||
"Piece",
|
||||
"PieceInteraction",
|
||||
"PiecePost",
|
||||
|
@ -47,6 +49,7 @@ __all__ = [
|
|||
"Like",
|
||||
"Mark",
|
||||
"Note",
|
||||
"QueryParser",
|
||||
"Rating",
|
||||
"render_md",
|
||||
"Review",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import re
|
||||
from functools import cached_property
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
|
@ -41,6 +41,9 @@ class CollectionMember(ListMember):
|
|||
"href": self.absolute_url,
|
||||
}
|
||||
|
||||
def to_indexable_doc(self) -> dict[str, Any]:
|
||||
return {}
|
||||
|
||||
|
||||
class Collection(List):
|
||||
if TYPE_CHECKING:
|
||||
|
@ -65,6 +68,9 @@ class Collection(List):
|
|||
to=APIdentity, related_name="featured_collections", through="FeaturedCollection"
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return f"Collection:{self.uuid}@{self.owner_id}:{self.title}"
|
||||
|
||||
@property
|
||||
def html_content(self):
|
||||
html = render_md(self.brief)
|
||||
|
@ -112,12 +118,49 @@ class Collection(List):
|
|||
self.catalog_item.cover = self.cover
|
||||
self.catalog_item.save()
|
||||
super().save(*args, **kwargs)
|
||||
Takahe.post_collection(self)
|
||||
self.sync_to_timeline()
|
||||
self.update_index()
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
if self.local:
|
||||
Takahe.delete_posts(self.all_post_ids)
|
||||
return super().delete(*args, **kwargs)
|
||||
def get_ap_data(self):
|
||||
return {
|
||||
"object": {
|
||||
# "tag": [item.ap_object_ref for item in collection.items],
|
||||
"relatedWith": [self.ap_object],
|
||||
}
|
||||
}
|
||||
|
||||
def sync_to_timeline(self, update_mode: int = 0):
|
||||
existing_post = self.latest_post
|
||||
owner: APIdentity = self.owner
|
||||
user = owner.user
|
||||
v = Takahe.visibility_n2t(self.visibility, user.preference.post_public_mode)
|
||||
if existing_post and (update_mode == 1 or v != existing_post.visibility):
|
||||
Takahe.delete_posts([existing_post.pk])
|
||||
existing_post = None
|
||||
data = self.get_ap_data()
|
||||
# if existing_post and existing_post.type_data == data:
|
||||
# return existing_post
|
||||
action = _("created collection")
|
||||
item_link = self.absolute_url
|
||||
prepend_content = f'{action} <a href="{item_link}">{self.title}</a><br>'
|
||||
content = self.plain_content
|
||||
if len(content) > 360:
|
||||
content = content[:357] + "..."
|
||||
post = Takahe.post(
|
||||
self.owner.pk,
|
||||
content,
|
||||
v,
|
||||
prepend_content,
|
||||
"",
|
||||
None,
|
||||
False,
|
||||
data,
|
||||
existing_post.pk if existing_post else None,
|
||||
self.created_time,
|
||||
)
|
||||
if post and post != existing_post:
|
||||
self.link_post_id(post.pk)
|
||||
return post
|
||||
|
||||
@property
|
||||
def ap_object(self):
|
||||
|
@ -133,6 +176,24 @@ class Collection(List):
|
|||
"href": self.absolute_url,
|
||||
}
|
||||
|
||||
def to_indexable_doc(self) -> dict[str, Any]:
|
||||
content = [self.title, self.brief]
|
||||
item_id = []
|
||||
item_title = []
|
||||
item_class = set()
|
||||
for m in self.members.all():
|
||||
item_id.append(m.item.pk)
|
||||
item_title += m.item.to_indexable_titles()
|
||||
item_class |= {m.item.__class__.__name__}
|
||||
if m.note:
|
||||
content.append(m.note)
|
||||
return {
|
||||
"item_id": item_id,
|
||||
"item_class": list(item_class),
|
||||
"item_title": item_title,
|
||||
"content": content,
|
||||
}
|
||||
|
||||
|
||||
class FeaturedCollection(Piece):
|
||||
owner = models.ForeignKey(APIdentity, on_delete=models.CASCADE)
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from datetime import datetime
|
||||
from functools import cached_property
|
||||
from typing import Any
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
|
@ -147,3 +148,20 @@ class Comment(Content):
|
|||
"summary": spoiler_text,
|
||||
"sensitive": bool(spoiler_text),
|
||||
}
|
||||
|
||||
@cached_property
|
||||
def sibling_shelfmember(self):
|
||||
from .shelf import ShelfMember
|
||||
|
||||
return ShelfMember.objects.filter(owner=self.owner, item=self.item).first()
|
||||
|
||||
def to_indexable_doc(self) -> dict[str, Any]:
|
||||
if self.sibling_shelfmember:
|
||||
return {}
|
||||
return {
|
||||
"item_id": [self.item.id],
|
||||
"item_class": [self.item.__class__.__name__],
|
||||
"item_title": self.item.to_indexable_titles(),
|
||||
"rating": self.rating_grade or 0,
|
||||
"content": [self.text],
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ from takahe.utils import Takahe
|
|||
from users.middlewares import activate_language_for_user
|
||||
from users.models import APIdentity, User
|
||||
|
||||
from .index import JournalIndex
|
||||
from .mixins import UserOwnedObjectMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
@ -112,8 +113,9 @@ class Piece(PolymorphicModel, UserOwnedObjectMixin):
|
|||
|
||||
def delete(self, *args, **kwargs):
|
||||
if self.local:
|
||||
Takahe.delete_posts(self.all_post_ids)
|
||||
self.delete_from_timeline()
|
||||
self.delete_crossposts()
|
||||
self.delete_index()
|
||||
return super().delete(*args, **kwargs)
|
||||
|
||||
@property
|
||||
|
@ -424,6 +426,9 @@ class Piece(PolymorphicModel, UserOwnedObjectMixin):
|
|||
}
|
||||
}
|
||||
|
||||
def delete_from_timeline(self):
|
||||
Takahe.delete_posts(self.all_post_ids)
|
||||
|
||||
def sync_to_timeline(self, update_mode: int = 0):
|
||||
"""update_mode: 0 update if exists otherwise create; 1: delete if exists and create; 2: only create"""
|
||||
user = self.owner.user
|
||||
|
@ -452,6 +457,25 @@ class Piece(PolymorphicModel, UserOwnedObjectMixin):
|
|||
self.link_post_id(post.pk)
|
||||
return post
|
||||
|
||||
def update_index(self):
|
||||
index = JournalIndex.instance()
|
||||
doc = index.piece_to_doc(self)
|
||||
if doc:
|
||||
try:
|
||||
index.delete_by_piece([self.pk])
|
||||
index.replace_docs([doc])
|
||||
except Exception as e:
|
||||
logger.error(f"Indexing {self} error {e}")
|
||||
|
||||
def delete_index(self):
|
||||
index = JournalIndex.instance()
|
||||
index.delete_by_piece([self.pk])
|
||||
|
||||
def to_indexable_doc(self) -> dict[str, Any]:
|
||||
raise NotImplementedError(
|
||||
f"{self.__class__} should override this to make itself searchable"
|
||||
)
|
||||
|
||||
|
||||
class PiecePost(models.Model):
|
||||
post_id: int
|
||||
|
@ -530,3 +554,6 @@ class Debris(Content):
|
|||
item=c.item,
|
||||
remote_id=c.remote_id if hasattr(c, "remote_id") else None,
|
||||
)
|
||||
|
||||
def to_indexable_doc(self) -> dict[str, Any]:
|
||||
return {}
|
||||
|
|
346
journal/models/index.py
Normal file
346
journal/models/index.py
Normal file
|
@ -0,0 +1,346 @@
|
|||
import re
|
||||
from functools import cached_property, reduce
|
||||
from typing import TYPE_CHECKING, Iterable
|
||||
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from catalog.models import Item
|
||||
from common.models import Index, SearchResult, int_, uniq
|
||||
from takahe.models import Post
|
||||
from takahe.utils import Takahe
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from journal.models import Piece
|
||||
|
||||
|
||||
def _get_item_ids(doc):
|
||||
from journal.models import Collection
|
||||
|
||||
if doc.get("piece_class") != ["Collection"]:
|
||||
return doc["item_id"]
|
||||
return list(
|
||||
Collection.objects.filter(id__in=doc["piece_id"]).values_list(
|
||||
"catalog_item_id", flat=True
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class JournalSearchResult(SearchResult):
|
||||
@cached_property
|
||||
def items(self):
|
||||
if not self:
|
||||
return Item.objects.none()
|
||||
ids = uniq(
|
||||
reduce(
|
||||
lambda a, b: a + b,
|
||||
[
|
||||
_get_item_ids(hit["document"])
|
||||
for hit in self.response["hits"]
|
||||
if "item_id" in hit["document"]
|
||||
],
|
||||
[],
|
||||
)
|
||||
)
|
||||
items = Item.objects.filter(pk__in=ids, is_deleted=False)
|
||||
items = [j for j in [i.final_item for i in items] if not j.is_deleted]
|
||||
return items
|
||||
|
||||
@cached_property
|
||||
def pieces(self):
|
||||
from journal.models import Piece
|
||||
|
||||
if not self:
|
||||
return Piece.objects.none()
|
||||
ids = reduce(
|
||||
lambda a, b: a + b,
|
||||
[
|
||||
hit["document"]["piece_id"]
|
||||
for hit in self.response["hits"]
|
||||
if "piece_id" in hit["document"]
|
||||
],
|
||||
[],
|
||||
)
|
||||
ps = Piece.objects.filter(pk__in=ids)
|
||||
return ps
|
||||
|
||||
@cached_property
|
||||
def posts(self):
|
||||
if not self:
|
||||
return Post.objects.none()
|
||||
ids = reduce(
|
||||
lambda a, b: a + b,
|
||||
[
|
||||
hit["document"]["post_id"]
|
||||
for hit in self.response["hits"]
|
||||
if "post_id" in hit["document"]
|
||||
],
|
||||
[],
|
||||
)
|
||||
ps = Post.objects.filter(pk__in=ids).exclude(
|
||||
state__in=["deleted", "deleted_fanned_out"]
|
||||
)
|
||||
return ps
|
||||
|
||||
@property
|
||||
def facet_by_item_class(self):
|
||||
return self.get_facet("item_class")
|
||||
|
||||
@property
|
||||
def facet_by_piece_class(self):
|
||||
return self.get_facet("piece_class")
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.posts)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.posts[key]
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self.posts
|
||||
|
||||
|
||||
class JournalIndex(Index):
|
||||
name = "journal"
|
||||
schema = {
|
||||
"fields": [
|
||||
{
|
||||
"name": "post_id",
|
||||
"type": "int64[]",
|
||||
"sort": False,
|
||||
"optional": True,
|
||||
},
|
||||
{
|
||||
"name": "piece_id",
|
||||
"type": "int64[]",
|
||||
"sort": False,
|
||||
"optional": True,
|
||||
},
|
||||
{
|
||||
"name": "piece_class",
|
||||
"type": "string[]",
|
||||
"facet": True,
|
||||
"optional": True,
|
||||
},
|
||||
{
|
||||
"name": "item_id",
|
||||
"type": "int64[]",
|
||||
"optional": True,
|
||||
},
|
||||
{
|
||||
"name": "item_class",
|
||||
"type": "string[]",
|
||||
"facet": True,
|
||||
"optional": True,
|
||||
},
|
||||
{
|
||||
"name": "item_title",
|
||||
"type": "string[]",
|
||||
"locale": "zh",
|
||||
"optional": True,
|
||||
},
|
||||
{
|
||||
"name": "content",
|
||||
"type": "string[]",
|
||||
"locale": "zh",
|
||||
"optional": True,
|
||||
},
|
||||
{
|
||||
"name": "shelf_type",
|
||||
"type": "string",
|
||||
"optional": True,
|
||||
},
|
||||
{
|
||||
"name": "rating",
|
||||
"type": "int32",
|
||||
"range_index": True,
|
||||
"optional": True,
|
||||
},
|
||||
{
|
||||
"name": "tag",
|
||||
"type": "string[]",
|
||||
"locale": "zh",
|
||||
"optional": True,
|
||||
},
|
||||
{
|
||||
"name": "created",
|
||||
"type": "int64",
|
||||
},
|
||||
{
|
||||
"name": "owner_id",
|
||||
"type": "int64",
|
||||
"sort": False,
|
||||
},
|
||||
{
|
||||
"name": "visibility",
|
||||
"type": "int32",
|
||||
"sort": False,
|
||||
},
|
||||
{
|
||||
"name": "viewer_id",
|
||||
"type": "int64[]",
|
||||
"sort": False,
|
||||
"optional": True,
|
||||
},
|
||||
]
|
||||
}
|
||||
default_search_params = {
|
||||
"query_by": "content, item_title, tag",
|
||||
"sort_by": "created:desc",
|
||||
"per_page": 20,
|
||||
"highlight_fields": "",
|
||||
"include_fields": "post_id, piece_id, item_id, owner_id, piece_class",
|
||||
"facet_by": "item_class, piece_class",
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def piece_to_doc(cls, piece: "Piece") -> dict:
|
||||
d = piece.to_indexable_doc()
|
||||
if not d:
|
||||
return {}
|
||||
doc = {
|
||||
"id": (
|
||||
str(piece.latest_post_id)
|
||||
if piece.latest_post_id
|
||||
else "p" + str(piece.pk)
|
||||
),
|
||||
"piece_id": [piece.pk],
|
||||
"piece_class": [piece.__class__.__name__],
|
||||
"created": int(piece.created_time.timestamp()), # type: ignore
|
||||
"owner_id": piece.owner_id,
|
||||
"visibility": piece.visibility,
|
||||
}
|
||||
if piece.latest_post:
|
||||
# fk is not enforced, so post might be deleted
|
||||
doc["post_id"] = [piece.latest_post_id]
|
||||
doc["viewer_id"] = list(
|
||||
piece.latest_post.interactions.values_list("identity_id", flat=True)
|
||||
)
|
||||
doc.update(d)
|
||||
return doc
|
||||
|
||||
@classmethod
|
||||
def pieces_to_docs(cls, pieces: "Iterable[Piece]") -> list[dict]:
|
||||
docs = [cls.piece_to_doc(p) for p in pieces]
|
||||
return [d for d in docs if d]
|
||||
|
||||
@classmethod
|
||||
def post_to_doc(cls, post: Post) -> dict:
|
||||
pc = post.piece
|
||||
doc = {}
|
||||
if pc:
|
||||
doc = cls.piece_to_doc(pc)
|
||||
if not doc:
|
||||
doc = {
|
||||
"id": str(post.pk),
|
||||
"post_id": [post.pk],
|
||||
"piece_class": ["Post"],
|
||||
"content": [post.content],
|
||||
"created": int(post.created.timestamp()),
|
||||
"owner_id": post.author_id,
|
||||
"viewer_id": list(
|
||||
post.interactions.values_list("identity_id", flat=True)
|
||||
),
|
||||
"visibility": Takahe.visibility_t2n(post.visibility),
|
||||
}
|
||||
return doc
|
||||
|
||||
@classmethod
|
||||
def posts_to_docs(cls, posts: QuerySet[Post]) -> list[dict]:
|
||||
return [cls.post_to_doc(p) for p in posts]
|
||||
|
||||
def delete_all(self):
|
||||
return self.delete_docs("owner_id", ">0")
|
||||
|
||||
def delete_by_owner(self, owner_ids):
|
||||
return self.delete_docs("owner_id", owner_ids)
|
||||
|
||||
def delete_by_piece(self, piece_ids):
|
||||
return self.delete_docs("piece_id", piece_ids)
|
||||
|
||||
def delete_by_post(self, post_ids):
|
||||
return self.delete_docs("post_id", post_ids)
|
||||
|
||||
def replace_pieces(self, pieces: "Iterable[Piece] | QuerySet[Piece]"):
|
||||
if isinstance(pieces, QuerySet):
|
||||
pids = pieces.values_list("pk", flat=True)
|
||||
else:
|
||||
pids = [p.pk for p in pieces]
|
||||
if not pids:
|
||||
return
|
||||
self.delete_by_piece(pids)
|
||||
self.insert_docs(self.pieces_to_docs(pieces))
|
||||
|
||||
def search(
|
||||
self,
|
||||
q: str,
|
||||
page: int = 1,
|
||||
page_size: int = 0,
|
||||
query_by: list[str] = [],
|
||||
sort_by: str = "",
|
||||
filter_by: dict[str, list[str | int]] = {},
|
||||
facet_by: list[str] = [],
|
||||
result_class=JournalSearchResult,
|
||||
) -> JournalSearchResult:
|
||||
r = super().search(
|
||||
q=q,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
query_by=query_by,
|
||||
sort_by=sort_by,
|
||||
filter_by=filter_by,
|
||||
facet_by=facet_by,
|
||||
result_class=result_class,
|
||||
)
|
||||
return r
|
||||
|
||||
|
||||
class QueryParser:
|
||||
fields = ["status", "rating", "tag", "category", "type"]
|
||||
|
||||
@classmethod
|
||||
def re(cls):
|
||||
return re.compile(
|
||||
r"\b(?P<field>" + "|".join(cls.fields) + r"):(?P<value>[^ ]+)"
|
||||
)
|
||||
|
||||
def __init__(self, query: str):
|
||||
self.query = str(query) if query else ""
|
||||
r = self.re()
|
||||
self.filters = {
|
||||
m.group("field").strip().lower(): m.group("value").strip().lower()
|
||||
for m in r.finditer(query)
|
||||
}
|
||||
self.q = r.sub("", query).strip()
|
||||
self.filter_by = {}
|
||||
self.query_by = ["content", "item_title", "tag"]
|
||||
|
||||
v = list(
|
||||
set(self.filters.get("status", "").split(","))
|
||||
& {"wishlist", "progress", "complete"}
|
||||
)
|
||||
if v:
|
||||
self.filter_by["shelf_type"] = v
|
||||
|
||||
v = list(
|
||||
set(self.filters.get("type", "").replace("mark", "shelfmember").split(","))
|
||||
& {"shelfmember", "rating", "comment", "review", "collection", "note"}
|
||||
)
|
||||
if v:
|
||||
self.filter_by["piece_class"] = v
|
||||
# else:
|
||||
# # hide collection by default unless specified
|
||||
# self.filter_by["piece_class"] = ["!collection"]
|
||||
|
||||
v = [i for i in set(self.filters.get("tag", "").split(",")) if i]
|
||||
if v:
|
||||
self.filter_by["tag"] = v
|
||||
self.query_by.remove("tag")
|
||||
|
||||
v = self.filters.get("rating", "").split("..")
|
||||
if len(v) == 2:
|
||||
v = map(int_, v)
|
||||
self.filter_by["rating"] = ["..".join(map(str, v))]
|
||||
elif len(v) == 1:
|
||||
v = int_(v[0])
|
||||
if v:
|
||||
self.filter_by["rating"] = [v]
|
|
@ -218,7 +218,7 @@ class Mark:
|
|||
if shelf_type is None:
|
||||
# take item off shelf
|
||||
if self.shelfmember:
|
||||
Takahe.delete_posts(self.shelfmember.all_post_ids)
|
||||
self.shelfmember.delete_from_timeline()
|
||||
self.shelfmember.log_and_delete()
|
||||
if self.comment:
|
||||
self.comment.delete()
|
||||
|
@ -281,6 +281,7 @@ class Mark:
|
|||
post = self.shelfmember.sync_to_timeline(update_mode)
|
||||
if share_to_mastodon:
|
||||
self.shelfmember.sync_to_social_accounts(update_mode)
|
||||
self.shelfmember.update_index()
|
||||
# auto add bookmark
|
||||
if (
|
||||
post
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import re
|
||||
from functools import cached_property
|
||||
from typing import override
|
||||
from typing import Any, override
|
||||
|
||||
from deepmerge import always_merger
|
||||
from django.db import models
|
||||
|
@ -161,6 +161,7 @@ class Note(Content):
|
|||
p.sync_to_timeline()
|
||||
if owner.user.preference.mastodon_default_repost and owner.user.mastodon:
|
||||
p.sync_to_social_accounts()
|
||||
p.update_index()
|
||||
return p
|
||||
|
||||
@cached_property
|
||||
|
@ -292,3 +293,11 @@ class Note(Content):
|
|||
case _:
|
||||
v = []
|
||||
return v
|
||||
|
||||
def to_indexable_doc(self) -> dict[str, Any]:
|
||||
return {
|
||||
"item_id": [self.item.id],
|
||||
"item_class": [self.item.__class__.__name__],
|
||||
"item_title": self.item.to_indexable_titles(),
|
||||
"content": [self.title or "", self.content],
|
||||
}
|
||||
|
|
|
@ -139,3 +139,7 @@ class Rating(Content):
|
|||
def get_item_rating(item: Item, owner: APIdentity) -> int | None:
|
||||
rating = Rating.objects.filter(owner=owner, item=item).first()
|
||||
return (rating.grade or None) if rating else None
|
||||
|
||||
def to_indexable_doc(self) -> dict[str, Any]:
|
||||
# rating is not indexed individually but with shelfmember
|
||||
return {}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import re
|
||||
from datetime import datetime
|
||||
from functools import cached_property
|
||||
from typing import Any
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import models
|
||||
|
@ -163,4 +164,13 @@ class Review(Content):
|
|||
review.sync_to_timeline(update_mode)
|
||||
if share_to_mastodon:
|
||||
review.sync_to_social_accounts(update_mode)
|
||||
review.update_index()
|
||||
return review
|
||||
|
||||
def to_indexable_doc(self) -> dict[str, Any]:
|
||||
return {
|
||||
"item_id": [self.item.id],
|
||||
"item_class": [self.item.__class__.__name__],
|
||||
"item_title": self.item.to_indexable_titles(),
|
||||
"content": [self.title, self.body],
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from datetime import datetime
|
||||
from functools import cached_property
|
||||
from typing import TYPE_CHECKING, override
|
||||
from typing import TYPE_CHECKING, Any, override
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection, models
|
||||
|
@ -419,6 +419,31 @@ class ShelfMember(ListMember):
|
|||
self.sibling_comment.link_post_id(post.id)
|
||||
return post
|
||||
|
||||
def to_indexable_doc(self) -> dict[str, Any]:
|
||||
ids = [self.pk]
|
||||
classes = [self.__class__.__name__]
|
||||
content = []
|
||||
rating = 0
|
||||
if self.sibling_rating:
|
||||
# ids.append(self.sibling_rating.pk)
|
||||
classes.append("Rating")
|
||||
rating = self.sibling_rating.grade
|
||||
if self.sibling_comment:
|
||||
# ids.append(self.sibling_comment.pk)
|
||||
classes.append("Comment")
|
||||
content = [self.sibling_comment.text]
|
||||
return {
|
||||
"piece_id": ids,
|
||||
"piece_class": classes,
|
||||
"item_id": [self.item.id],
|
||||
"item_class": [self.item.__class__.__name__],
|
||||
"item_title": self.item.to_indexable_titles(),
|
||||
"shelf_type": self.shelf_type,
|
||||
"rating": rating,
|
||||
"tag": self.tags,
|
||||
"content": content,
|
||||
}
|
||||
|
||||
@cached_property
|
||||
def sibling_comment(self) -> "Comment | None":
|
||||
from .comment import Comment
|
||||
|
@ -503,6 +528,9 @@ class Shelf(List):
|
|||
def __str__(self):
|
||||
return f"Shelf:{self.owner.username}:{self.shelf_type}"
|
||||
|
||||
def to_indexable_doc(self) -> dict[str, Any]:
|
||||
return {}
|
||||
|
||||
|
||||
class ShelfLogEntry(models.Model):
|
||||
owner = models.ForeignKey(APIdentity, on_delete=models.PROTECT)
|
||||
|
|
|
@ -37,6 +37,9 @@ class TagMember(ListMember):
|
|||
"href": self.absolute_url,
|
||||
}
|
||||
|
||||
def to_indexable_doc(self):
|
||||
return {}
|
||||
|
||||
|
||||
TagValidators = [RegexValidator(regex=r"\s+", inverse_match=True)]
|
||||
|
||||
|
@ -83,6 +86,9 @@ class Tag(List):
|
|||
if self.pinned:
|
||||
Takahe.pin_hashtag_for_user(self.owner.pk, new_title)
|
||||
|
||||
def to_indexable_doc(self):
|
||||
return {}
|
||||
|
||||
|
||||
class TagManager:
|
||||
@staticmethod
|
||||
|
|
|
@ -60,6 +60,7 @@ def update_journal_for_merged_item(
|
|||
try:
|
||||
p.item = new_item
|
||||
p.save(update_fields=["item_id"])
|
||||
p.update_index()
|
||||
except IntegrityError:
|
||||
if delete_duplicated:
|
||||
logger.warning(
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
<i class="fa-solid fa-square-minus"></i>
|
||||
</a>
|
||||
</span>
|
||||
{% elif request.user.is_authenticated %}
|
||||
{% elif request.user.is_authenticated and item.class_name != 'collection' %}
|
||||
{% wish_item_action item as action %}
|
||||
<span>
|
||||
{% if not action.taken %}
|
||||
|
@ -56,7 +56,7 @@
|
|||
{% for tag in mark.tags %}
|
||||
{% if forloop.counter <= 5 %}
|
||||
<span>
|
||||
<a href="{% url 'catalog:search' %}?tag={{ tag }}">{{ tag }}</a>
|
||||
<a href="{% url 'common:search' %}?tag={{ tag }}">{{ tag }}</a>
|
||||
</span>
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
|
36
journal/templates/search_journal.html
Normal file
36
journal/templates/search_journal.html
Normal file
|
@ -0,0 +1,36 @@
|
|||
{% load static %}
|
||||
{% load i18n %}
|
||||
{% load l10n %}
|
||||
{% load humanize %}
|
||||
{% load mastodon %}
|
||||
{% load duration %}
|
||||
{% load thumb %}
|
||||
<!DOCTYPE html>
|
||||
<html lang="zh" class="classic-page nav-page-search">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>{{ site_name }} - {{ request.GET.q }} - {% trans 'Search Results' %}</title>
|
||||
{% include "common_libs.html" %}
|
||||
</head>
|
||||
<body>
|
||||
{% include '_header.html' %}
|
||||
<main>
|
||||
<div class="grid__main">
|
||||
{% include 'search_header.html' %}
|
||||
<div class="item-card-list">
|
||||
{% for item in items %}
|
||||
{% include '_list_item.html' %}
|
||||
{% empty %}
|
||||
<p>{% trans "No items matching the search query." %}</p>
|
||||
{% endfor %}
|
||||
</div>
|
||||
</div>
|
||||
{% include "_pagination.html" %}
|
||||
{% block sidebar %}
|
||||
{% include "_sidebar.html" with show_profile=1 identity=user.identity %}
|
||||
{% endblock %}
|
||||
</main>
|
||||
{% include '_footer.html' %}
|
||||
</body>
|
||||
</html>
|
|
@ -29,5 +29,6 @@ from .post import (
|
|||
)
|
||||
from .profile import profile, user_calendar_data
|
||||
from .review import ReviewFeed, review_edit, review_retrieve, user_review_list
|
||||
from .search import search
|
||||
from .tag import user_tag_edit, user_tag_list, user_tag_member_list
|
||||
from .wrapped import WrappedShareView, WrappedView
|
||||
|
|
|
@ -7,6 +7,7 @@ from django.utils.translation import gettext as _
|
|||
from django.views.decorators.http import require_http_methods
|
||||
|
||||
from catalog.models import Item
|
||||
from common.models import int_
|
||||
from common.utils import AuthedHttpRequest, get_uuid_or_404
|
||||
from users.models import User
|
||||
|
||||
|
@ -29,7 +30,7 @@ def add_to_collection(request: AuthedHttpRequest, item_uuid):
|
|||
},
|
||||
)
|
||||
else:
|
||||
cid = int(request.POST.get("collection_id", default=0))
|
||||
cid = int_(request.POST.get("collection_id"))
|
||||
if not cid:
|
||||
cid = Collection.objects.create(
|
||||
owner=request.user.identity,
|
||||
|
@ -140,7 +141,7 @@ def collection_share(request: AuthedHttpRequest, collection_uuid):
|
|||
if user.mastodon:
|
||||
user.mastodon.boost_later(collection.latest_post.url)
|
||||
else:
|
||||
visibility = VisibilityType(request.POST.get("visibility", default=0))
|
||||
visibility = VisibilityType(int_(request.POST.get("visibility")))
|
||||
link = (
|
||||
collection.latest_post.url
|
||||
if collection.latest_post
|
||||
|
@ -261,7 +262,7 @@ def collection_update_member_order(request: AuthedHttpRequest, collection_uuid):
|
|||
ids = request.POST.get("member_ids", "").strip()
|
||||
if not ids:
|
||||
raise BadRequest(_("Invalid parameter"))
|
||||
ordered_member_ids = [int(i) for i in ids.split(",")]
|
||||
ordered_member_ids = [int_(i) for i in ids.split(",")]
|
||||
collection.update_member_order(ordered_member_ids)
|
||||
return collection_retrieve_items(request, collection_uuid, True)
|
||||
|
||||
|
|
|
@ -192,6 +192,7 @@ def comment(request: AuthedHttpRequest, item_uuid):
|
|||
comment.sync_to_timeline(update_mode)
|
||||
if share_to_mastodon:
|
||||
comment.sync_to_social_accounts(update_mode)
|
||||
comment.update_index()
|
||||
return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/"))
|
||||
|
||||
|
||||
|
|
|
@ -112,4 +112,5 @@ def note_edit(request: AuthedHttpRequest, item_uuid: str, note_uuid: str = ""):
|
|||
note.sync_to_timeline(update_mode)
|
||||
if form.cleaned_data["share_to_mastodon"]:
|
||||
note.sync_to_social_accounts(update_mode)
|
||||
note.update_index()
|
||||
return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/"))
|
||||
|
|
31
journal/views/search.py
Normal file
31
journal/views/search.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
from django.contrib.auth.decorators import login_required
|
||||
from django.shortcuts import render
|
||||
|
||||
from common.models.misc import int_
|
||||
from common.utils import PageLinksGenerator
|
||||
from journal.models import JournalIndex, QueryParser
|
||||
|
||||
|
||||
@login_required
|
||||
def search(request):
|
||||
identity_id = request.user.identity.pk
|
||||
page = int_(request.GET.get("page"))
|
||||
q = QueryParser(request.GET.get("q", default=""))
|
||||
q.filter_by["owner_id"] = [identity_id] # only search for current user
|
||||
q.filter_by["item_id"] = [">0"] # only search for records with items
|
||||
index = JournalIndex.instance()
|
||||
r = index.search(
|
||||
q.q,
|
||||
filter_by=q.filter_by,
|
||||
query_by=q.query_by,
|
||||
sort_by="_text_match:desc",
|
||||
page=page,
|
||||
)
|
||||
return render(
|
||||
request,
|
||||
"search_journal.html",
|
||||
{
|
||||
"items": r.items,
|
||||
"pagination": PageLinksGenerator(r.page, r.pages, request.GET),
|
||||
},
|
||||
)
|
|
@ -18,6 +18,7 @@ from catalog.models import (
|
|||
PodcastEpisode,
|
||||
item_content_types,
|
||||
)
|
||||
from common.utils import int_
|
||||
from journal.models import Comment, ShelfType
|
||||
from journal.models.common import VisibilityType
|
||||
from mastodon.models.bluesky import EmbedObj
|
||||
|
@ -116,7 +117,7 @@ class WrappedShareView(LoginRequiredMixin, TemplateView):
|
|||
def post(self, request: HttpRequest, *args: Any, **kwargs: Any) -> HttpResponse:
|
||||
img = base64.b64decode(request.POST.get("img", ""))
|
||||
comment = request.POST.get("comment", "")
|
||||
visibility = VisibilityType(int(request.POST.get("visibility", 0)))
|
||||
visibility = VisibilityType(int_(request.POST.get("visibility")))
|
||||
user: User = request.user # type: ignore
|
||||
identity = user.identity
|
||||
media = Takahe.upload_image(
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -130,7 +130,7 @@ httpcore==1.0.7
|
|||
# via httpx
|
||||
httpx==0.27.2
|
||||
# via atproto
|
||||
identify==2.6.3
|
||||
identify==2.6.4
|
||||
# via pre-commit
|
||||
idna==3.10
|
||||
# via anyio
|
||||
|
@ -167,7 +167,7 @@ markupsafe==3.0.2
|
|||
mergedeep==1.3.4
|
||||
# via mkdocs
|
||||
# via mkdocs-get-deps
|
||||
mistune==3.0.2
|
||||
mistune==3.1.0
|
||||
mkdocs==1.6.1
|
||||
# via mkdocs-material
|
||||
mkdocs-get-deps==0.2.0
|
||||
|
@ -281,7 +281,7 @@ tinycss2==1.1.1
|
|||
# via bleach
|
||||
tqdm==4.67.1
|
||||
# via djlint
|
||||
types-pyyaml==6.0.12.20241221
|
||||
types-pyyaml==6.0.12.20241230
|
||||
# via django-stubs
|
||||
typesense==0.21.0
|
||||
typing-extensions==4.12.2
|
||||
|
|
|
@ -117,7 +117,7 @@ lxml==5.3.0
|
|||
markdown==3.7
|
||||
# via django-markdownx
|
||||
markdownify==0.14.1
|
||||
mistune==3.0.2
|
||||
mistune==3.1.0
|
||||
multidict==6.1.0
|
||||
# via aiohttp
|
||||
# via yarl
|
||||
|
|
|
@ -31,7 +31,7 @@
|
|||
</small>
|
||||
</h5>
|
||||
<div class="feed">
|
||||
<div hx-get="{% url 'social:data' %}?typ={{ feed_type }}"
|
||||
<div hx-get="{% url 'social:data' %}?typ={{ feed_type }}&q={{ request.GET.q }}"
|
||||
hx-trigger="intersect once delay:0.1s"
|
||||
hx-swap="outerHTML">
|
||||
<i class="fa-solid fa-compact-disc fa-spin loading"></i>
|
||||
|
|
|
@ -125,19 +125,21 @@
|
|||
{% if forloop.last %}
|
||||
<div class="htmx-indicator"
|
||||
style="margin-left: 60px"
|
||||
hx-get="{% url 'social:data' %}?last={{ event.pk }}&typ={{ feed_type }}"
|
||||
{% if request.GET.q %} hx-get="{% url 'social:data' %}?q={{ request.GET.q }}&page={{ nextpage }}" {% else %} hx-get="{% url 'social:data' %}?last={{ event.pk }}&typ={{ feed_type }}" {% endif %}
|
||||
hx-trigger="revealed"
|
||||
hx-swap="outerHTML">
|
||||
<i class="fa-solid fa-compact-disc fa-spin loading"></i>
|
||||
</div>
|
||||
{% endif %}
|
||||
{% empty %}
|
||||
{% if request.GET.last %}
|
||||
<div class="empty">{% trans 'nothing more.' %}</div>
|
||||
{% else %}
|
||||
<div class="empty">
|
||||
<div class="empty">
|
||||
{% if request.GET.last or request.GET.nextpage %}
|
||||
{% trans 'nothing more.' %}
|
||||
{% elif request.GET.q %}
|
||||
{% trans 'no matching activities.' %}
|
||||
{% else %}
|
||||
{% url 'users:data' as import_url %}
|
||||
{% blocktrans %}Find and mark some books/movies/podcasts/games, <a href="{{ import_url }}">import your data</a> from Goodreads/Letterboxd/Douban, follow some fellow {{ site_name }} users on the fediverse, so their recent activities and yours will show up here.{% endblocktrans %}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endfor %}
|
||||
|
|
35
social/templates/search_feed.html
Normal file
35
social/templates/search_feed.html
Normal file
|
@ -0,0 +1,35 @@
|
|||
{% load static %}
|
||||
{% load i18n %}
|
||||
{% load l10n %}
|
||||
{% load mastodon %}
|
||||
{% load thumb %}
|
||||
<!DOCTYPE html>
|
||||
<html lang="zh" class="feed-page nav-page-feed">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>{{ site_name }} - {% trans 'Activities from those you follow' %}</title>
|
||||
{% include "common_libs.html" %}
|
||||
<script src="{{ cdn_url }}/npm/shikwasa@2.2.1/dist/shikwasa.min.js"></script>
|
||||
<link href="{{ cdn_url }}/npm/shikwasa@2.2.1/dist/style.min.css"
|
||||
rel="stylesheet"></link>
|
||||
<script src="{% static 'js/podcast.js' %}"></script>
|
||||
</head>
|
||||
<body>
|
||||
{% include "_header.html" %}
|
||||
<main>
|
||||
<div class="grid__main">
|
||||
{% include 'search_header.html' %}
|
||||
<div class="feed">
|
||||
<div hx-get="{% url 'social:data' %}?q={{ request.GET.q }}"
|
||||
hx-trigger="intersect once delay:0.1s"
|
||||
hx-swap="outerHTML">
|
||||
<i class="fa-solid fa-compact-disc fa-spin loading"></i>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% include "_sidebar.html" with show_progress=1 identity=request.user.identity %}
|
||||
</main>
|
||||
{% include "_footer.html" %}
|
||||
</body>
|
||||
</html>
|
163
social/views.py
163
social/views.py
|
@ -3,22 +3,17 @@ from django.shortcuts import redirect, render
|
|||
from django.urls import reverse
|
||||
from django.views.decorators.http import require_http_methods
|
||||
|
||||
from catalog.models import *
|
||||
from journal.models import *
|
||||
from takahe.models import PostInteraction, TimelineEvent
|
||||
from catalog.models import Edition, Item, ItemCategory, PodcastEpisode
|
||||
from common.models.misc import int_
|
||||
from journal.models import JournalIndex, Piece, QueryParser, ShelfType
|
||||
from takahe.models import Post, PostInteraction, TimelineEvent
|
||||
from takahe.utils import Takahe
|
||||
|
||||
from .models import *
|
||||
from users.models import APIdentity
|
||||
|
||||
PAGE_SIZE = 10
|
||||
|
||||
|
||||
@require_http_methods(["GET"])
|
||||
@login_required
|
||||
def feed(request, typ=0):
|
||||
if not request.user.registration_complete:
|
||||
return redirect(reverse("users:register"))
|
||||
user = request.user
|
||||
def _sidebar_context(user):
|
||||
podcast_ids = [
|
||||
p.item_id
|
||||
for p in user.shelf_manager.get_latest_members(
|
||||
|
@ -44,59 +39,97 @@ def feed(request, typ=0):
|
|||
)[:10]
|
||||
]
|
||||
)
|
||||
return render(
|
||||
request,
|
||||
"feed.html",
|
||||
{
|
||||
"feed_type": typ,
|
||||
"recent_podcast_episodes": recent_podcast_episodes,
|
||||
"books_in_progress": books_in_progress,
|
||||
"tvshows_in_progress": tvshows_in_progress,
|
||||
},
|
||||
)
|
||||
return {
|
||||
"recent_podcast_episodes": recent_podcast_episodes,
|
||||
"books_in_progress": books_in_progress,
|
||||
"tvshows_in_progress": tvshows_in_progress,
|
||||
}
|
||||
|
||||
|
||||
@require_http_methods(["GET"])
|
||||
@login_required
|
||||
def feed(request, typ=0):
|
||||
if not request.user.registration_complete:
|
||||
return redirect(reverse("users:register"))
|
||||
user = request.user
|
||||
data = _sidebar_context(user)
|
||||
data["feed_type"] = typ
|
||||
return render(request, "feed.html", data)
|
||||
|
||||
|
||||
def focus(request):
|
||||
return feed(request, typ=1)
|
||||
|
||||
|
||||
@require_http_methods(["GET"])
|
||||
@login_required
|
||||
def search(request):
|
||||
if not request.user.registration_complete:
|
||||
return redirect(reverse("users:register"))
|
||||
user = request.user
|
||||
data = _sidebar_context(user)
|
||||
return render(request, "search_feed.html", data)
|
||||
|
||||
|
||||
@login_required
|
||||
@require_http_methods(["GET"])
|
||||
def data(request):
|
||||
since_id = int(request.GET.get("last", 0))
|
||||
typ = int(request.GET.get("typ", 0))
|
||||
since_id = int_(request.GET.get("last", 0))
|
||||
typ = int_(request.GET.get("typ", 0))
|
||||
q = request.GET.get("q")
|
||||
identity_id = request.user.identity.pk
|
||||
events = TimelineEvent.objects.filter(
|
||||
identity_id=identity_id,
|
||||
type__in=[TimelineEvent.Types.post, TimelineEvent.Types.boost],
|
||||
)
|
||||
match typ:
|
||||
case 1:
|
||||
events = events.filter(
|
||||
subject_post__type_data__object__has_key="relatedWith"
|
||||
page = int_(request.GET.get("page", 1))
|
||||
if q:
|
||||
q = QueryParser(request.GET.get("q", default=""))
|
||||
index = JournalIndex.instance()
|
||||
q.filter_by["owner_id"] = [identity_id]
|
||||
q.filter_by["post_id"] = [">0"]
|
||||
r = index.search(
|
||||
q.q,
|
||||
filter_by=q.filter_by,
|
||||
query_by=q.query_by,
|
||||
sort_by="created:desc",
|
||||
page=page,
|
||||
page_size=PAGE_SIZE,
|
||||
)
|
||||
events = [
|
||||
SearchResultEvent(p)
|
||||
for p in r.posts.select_related("author")
|
||||
.prefetch_related("attachments")
|
||||
.order_by("-id")
|
||||
]
|
||||
else:
|
||||
events = TimelineEvent.objects.filter(
|
||||
identity_id=identity_id,
|
||||
type__in=[TimelineEvent.Types.post, TimelineEvent.Types.boost],
|
||||
)
|
||||
match typ:
|
||||
case 1:
|
||||
events = events.filter(
|
||||
subject_post__type_data__object__has_key="relatedWith"
|
||||
)
|
||||
case _: # default: no replies
|
||||
events = events.filter(subject_post__in_reply_to__isnull=True)
|
||||
if since_id:
|
||||
events = events.filter(id__lt=since_id)
|
||||
events = list(
|
||||
events.select_related(
|
||||
"subject_post",
|
||||
"subject_post__author",
|
||||
# "subject_post__author__domain",
|
||||
"subject_identity",
|
||||
# "subject_identity__domain",
|
||||
"subject_post_interaction",
|
||||
"subject_post_interaction__identity",
|
||||
# "subject_post_interaction__identity__domain",
|
||||
)
|
||||
case _: # default: no replies
|
||||
events = events.filter(subject_post__in_reply_to__isnull=True)
|
||||
if since_id:
|
||||
events = events.filter(id__lt=since_id)
|
||||
events = list(
|
||||
events.select_related(
|
||||
"subject_post",
|
||||
"subject_post__author",
|
||||
# "subject_post__author__domain",
|
||||
"subject_identity",
|
||||
# "subject_identity__domain",
|
||||
"subject_post_interaction",
|
||||
"subject_post_interaction__identity",
|
||||
# "subject_post_interaction__identity__domain",
|
||||
.prefetch_related(
|
||||
"subject_post__attachments",
|
||||
# "subject_post__mentions",
|
||||
# "subject_post__emojis",
|
||||
)
|
||||
.order_by("-id")[:PAGE_SIZE]
|
||||
)
|
||||
.prefetch_related(
|
||||
"subject_post__attachments",
|
||||
# "subject_post__mentions",
|
||||
# "subject_post__emojis",
|
||||
)
|
||||
.order_by("-id")[:PAGE_SIZE]
|
||||
)
|
||||
interactions = PostInteraction.objects.filter(
|
||||
identity_id=identity_id,
|
||||
post_id__in=[event.subject_post_id for event in events],
|
||||
|
@ -105,15 +138,19 @@ def data(request):
|
|||
).values_list("post_id", "type")
|
||||
for event in events:
|
||||
if event.subject_post_id:
|
||||
event.subject_post.liked_by_current_user = (
|
||||
event.subject_post.liked_by_current_user = ( # type: ignore
|
||||
event.subject_post_id,
|
||||
"like",
|
||||
) in interactions
|
||||
event.subject_post.boosted_by_current_user = (
|
||||
event.subject_post_id,
|
||||
"boost",
|
||||
) in interactions
|
||||
return render(request, "feed_events.html", {"feed_type": typ, "events": events})
|
||||
event.subject_post.boosted_by_current_user = ( # type: ignore
|
||||
event.subject_post_id,
|
||||
"boost",
|
||||
) in interactions
|
||||
return render(
|
||||
request,
|
||||
"feed_events.html",
|
||||
{"feed_type": typ, "events": events, "nextpage": page + 1},
|
||||
)
|
||||
|
||||
|
||||
@require_http_methods(["GET"])
|
||||
|
@ -182,6 +219,16 @@ class NotificationEvent:
|
|||
self.template += "_" + cls
|
||||
|
||||
|
||||
class SearchResultEvent:
|
||||
def __init__(self, post: Post):
|
||||
self.type = "post"
|
||||
self.subject_post = post
|
||||
self.subject_post_id = post.id
|
||||
self.created = post.created
|
||||
self.published = post.published
|
||||
self.identity = post.author
|
||||
|
||||
|
||||
@login_required
|
||||
@require_http_methods(["GET"])
|
||||
def events(request):
|
||||
|
|
152
takahe/utils.py
152
takahe/utils.py
|
@ -68,57 +68,57 @@ class Takahe:
|
|||
if not u.username:
|
||||
logger.warning(f"User {u} has no username")
|
||||
return None
|
||||
user = User.objects.filter(pk=u.pk).first()
|
||||
handler = "@" + u.username
|
||||
if not user:
|
||||
logger.info(f"Creating takahe user {u}")
|
||||
user = User.objects.create(pk=u.pk, email=handler, password=u.password)
|
||||
else:
|
||||
if user.email != handler:
|
||||
logger.warning(f"Updating takahe user {u} email to {handler}")
|
||||
user.email = handler
|
||||
user.save()
|
||||
domain = Domain.objects.get(domain=settings.SITE_INFO["site_domain"])
|
||||
# TODO add transaction protection here
|
||||
identity = Identity.objects.filter(username=u.username, local=True).first()
|
||||
if not identity:
|
||||
logger.info(f"Creating takahe identity {u}@{domain}")
|
||||
identity = Identity.objects.create(
|
||||
actor_uri=f"https://{domain.uri_domain}/@{u.username}@{domain.domain}/",
|
||||
profile_uri=u.absolute_url,
|
||||
username=u.username,
|
||||
domain=domain,
|
||||
name=u.username,
|
||||
local=True,
|
||||
discoverable=True,
|
||||
)
|
||||
if not identity.private_key and not identity.public_key:
|
||||
identity.generate_keypair()
|
||||
identity.ensure_uris()
|
||||
if not user.identities.filter(pk=identity.pk).exists():
|
||||
user.identities.add(identity)
|
||||
apidentity = APIdentity.objects.filter(pk=identity.pk).first()
|
||||
if not apidentity:
|
||||
logger.info(f"Creating APIdentity for {identity}")
|
||||
apidentity = APIdentity.objects.create(
|
||||
user=u,
|
||||
id=identity.pk,
|
||||
local=True,
|
||||
username=u.username,
|
||||
domain_name=domain.domain,
|
||||
deleted=identity.deleted,
|
||||
)
|
||||
elif apidentity.username != identity.username:
|
||||
logger.warning(
|
||||
f"Updating APIdentity {apidentity} username to {identity.username}"
|
||||
)
|
||||
apidentity.username = identity.username
|
||||
apidentity.save()
|
||||
if u.identity != apidentity:
|
||||
logger.warning(f"Linking user {u} identity to {apidentity}")
|
||||
u.identity = apidentity
|
||||
u.save(update_fields=["identity"])
|
||||
return apidentity
|
||||
with transaction.atomic(using="takahe"):
|
||||
user = User.objects.filter(pk=u.pk).first()
|
||||
handler = "@" + u.username
|
||||
if not user:
|
||||
logger.info(f"Creating takahe user {u}")
|
||||
user = User.objects.create(pk=u.pk, email=handler, password=u.password)
|
||||
else:
|
||||
if user.email != handler:
|
||||
logger.warning(f"Updating takahe user {u} email to {handler}")
|
||||
user.email = handler
|
||||
user.save()
|
||||
domain = Domain.objects.get(domain=settings.SITE_INFO["site_domain"])
|
||||
identity = Identity.objects.filter(username=u.username, local=True).first()
|
||||
if not identity:
|
||||
logger.info(f"Creating takahe identity {u}@{domain}")
|
||||
identity = Identity.objects.create(
|
||||
actor_uri=f"https://{domain.uri_domain}/@{u.username}@{domain.domain}/",
|
||||
profile_uri=u.absolute_url,
|
||||
username=u.username,
|
||||
domain=domain,
|
||||
name=u.username,
|
||||
local=True,
|
||||
discoverable=True,
|
||||
)
|
||||
if not identity.private_key and not identity.public_key:
|
||||
identity.generate_keypair()
|
||||
identity.ensure_uris()
|
||||
if not user.identities.filter(pk=identity.pk).exists():
|
||||
user.identities.add(identity)
|
||||
apidentity = APIdentity.objects.filter(pk=identity.pk).first()
|
||||
if not apidentity:
|
||||
logger.info(f"Creating APIdentity for {identity}")
|
||||
apidentity = APIdentity.objects.create(
|
||||
user=u,
|
||||
id=identity.pk,
|
||||
local=True,
|
||||
username=u.username,
|
||||
domain_name=domain.domain,
|
||||
deleted=identity.deleted,
|
||||
)
|
||||
elif apidentity.username != identity.username:
|
||||
logger.warning(
|
||||
f"Updating APIdentity {apidentity} username to {identity.username}"
|
||||
)
|
||||
apidentity.username = identity.username
|
||||
apidentity.save()
|
||||
if u.identity != apidentity:
|
||||
logger.warning(f"Linking user {u} identity to {apidentity}")
|
||||
u.identity = apidentity
|
||||
u.save(update_fields=["identity"])
|
||||
return apidentity
|
||||
|
||||
@staticmethod
|
||||
def get_identity_by_handler(username: str, domain: str) -> Identity | None:
|
||||
|
@ -541,56 +541,6 @@ class Takahe:
|
|||
case _:
|
||||
return 0
|
||||
|
||||
@staticmethod
|
||||
def post_collection(collection: "Collection"):
|
||||
existing_post = collection.latest_post
|
||||
owner: APIdentity = collection.owner
|
||||
user = owner.user
|
||||
if not user:
|
||||
raise ValueError(f"Cannot find user for collection {collection}")
|
||||
visibility = Takahe.visibility_n2t(
|
||||
collection.visibility, user.preference.post_public_mode
|
||||
)
|
||||
if existing_post and visibility != existing_post.visibility:
|
||||
Takahe.delete_posts([existing_post.pk])
|
||||
existing_post = None
|
||||
data = {
|
||||
"object": {
|
||||
# "tag": [item.ap_object_ref for item in collection.items],
|
||||
"relatedWith": [collection.ap_object],
|
||||
}
|
||||
}
|
||||
if existing_post and existing_post.type_data == data:
|
||||
return existing_post
|
||||
action = _("created collection")
|
||||
item_link = collection.absolute_url
|
||||
prepend_content = f'{action} <a href="{item_link}">{collection.title}</a><br>'
|
||||
content = collection.plain_content
|
||||
if len(content) > 360:
|
||||
content = content[:357] + "..."
|
||||
data = {
|
||||
"object": {
|
||||
# "tag": [item.ap_object_ref for item in collection.items],
|
||||
"relatedWith": [collection.ap_object],
|
||||
}
|
||||
}
|
||||
post = Takahe.post(
|
||||
collection.owner.pk,
|
||||
content,
|
||||
visibility,
|
||||
prepend_content,
|
||||
"",
|
||||
None,
|
||||
False,
|
||||
data,
|
||||
existing_post.pk if existing_post else None,
|
||||
collection.created_time,
|
||||
)
|
||||
if not post:
|
||||
return
|
||||
collection.link_post_id(post.pk)
|
||||
return post
|
||||
|
||||
@staticmethod
|
||||
def interact_post(post_pk: int, identity_pk: int, type: str, flip=False):
|
||||
post = Post.objects.filter(pk=post_pk).first()
|
||||
|
|
|
@ -47,7 +47,7 @@ class Command(BaseCommand):
|
|||
self.stdout.write(
|
||||
user.username.ljust(20)
|
||||
+ str(user.date_joined.date()).ljust(12)
|
||||
+ str(user.last_login.date()).ljust(12)
|
||||
+ str(user.last_login.date() if user.last_login else "").ljust(12)
|
||||
+ str(list(user.social_accounts.all())),
|
||||
)
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ from loguru import logger
|
|||
|
||||
from common.utils import AuthedHttpRequest
|
||||
from journal.models import remove_data_by_user
|
||||
from journal.models.index import JournalIndex
|
||||
from mastodon.models import Email, Mastodon
|
||||
from mastodon.models.common import Platform, SocialAccount
|
||||
from mastodon.models.email import EmailAccount
|
||||
|
@ -228,6 +229,8 @@ def clear_data_task(user_id):
|
|||
remove_data_by_user(user.identity)
|
||||
Takahe.delete_identity(user.identity.pk)
|
||||
user.clear()
|
||||
index = JournalIndex(user)
|
||||
index.delete_by_owner(user.identity.pk)
|
||||
logger.warning(f"User {user_str} data cleared.")
|
||||
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ from django.urls import reverse
|
|||
from django.utils.translation import gettext as _
|
||||
|
||||
from common.config import *
|
||||
from common.models import int_
|
||||
from common.utils import (
|
||||
AuthedHttpRequest,
|
||||
HTTPResponseHXRedirect,
|
||||
|
@ -40,7 +41,7 @@ def fetch_refresh(request):
|
|||
i = APIdentity.get_by_handle(handle)
|
||||
return HTTPResponseHXRedirect(i.url)
|
||||
except Exception:
|
||||
retry = int(request.GET.get("retry", 0)) + 1
|
||||
retry = int_(request.GET.get("retry")) + 1
|
||||
if retry > 10:
|
||||
return render(request, "users/fetch_identity_failed.html")
|
||||
else:
|
||||
|
|
Loading…
Add table
Reference in a new issue