add language to federated Note
This commit is contained in:
parent
c5df89af11
commit
550956a463
25 changed files with 119 additions and 87 deletions
|
@ -25,7 +25,7 @@ repos:
|
|||
- id: mixed-line-ending
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.9.2
|
||||
rev: v0.9.4
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [ "--fix" ]
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import logging
|
||||
import os
|
||||
import sys
|
||||
from urllib import parse
|
||||
|
||||
import environ
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
@ -126,7 +127,7 @@ env = environ.FileAwareEnv(
|
|||
# ====== End of user configuration variables ======
|
||||
|
||||
SECRET_KEY = env("NEODB_SECRET_KEY")
|
||||
DEBUG = env("NEODB_DEBUG")
|
||||
DEBUG: bool = env("NEODB_DEBUG") # type:ignore
|
||||
DATABASES = {
|
||||
"takahe": env.db_url("TAKAHE_DB_URL"),
|
||||
"default": env.db_url("NEODB_DB_URL"),
|
||||
|
@ -137,7 +138,7 @@ DATABASES["takahe"]["OPTIONS"] = {"client_encoding": "UTF8"}
|
|||
DATABASES["takahe"]["TEST"] = {"DEPENDENCIES": []}
|
||||
REDIS_URL = env("NEODB_REDIS_URL")
|
||||
CACHES = {"default": env.cache_url("NEODB_REDIS_URL")}
|
||||
_parsed_redis_url = env.url("NEODB_REDIS_URL")
|
||||
_parsed_redis_url: parse.ParseResult = env.url("NEODB_REDIS_URL") # type:ignore
|
||||
RQ_QUEUES = {
|
||||
q: {
|
||||
"HOST": _parsed_redis_url.hostname,
|
||||
|
@ -148,7 +149,7 @@ RQ_QUEUES = {
|
|||
for q in ["mastodon", "export", "import", "fetch", "crawl", "ap", "cron"]
|
||||
}
|
||||
|
||||
_parsed_search_url = env.url("NEODB_SEARCH_URL")
|
||||
_parsed_search_url: parse.ParseResult = env.url("NEODB_SEARCH_URL") # type:ignore
|
||||
SEARCH_BACKEND = None
|
||||
TYPESENSE_CONNECTION = {}
|
||||
if _parsed_search_url.scheme == "typesense":
|
||||
|
@ -171,7 +172,7 @@ if _parsed_search_url.scheme == "typesense":
|
|||
# MEILISEARCH_KEY = _parsed_search_url.password
|
||||
|
||||
DEFAULT_FROM_EMAIL = env("NEODB_EMAIL_FROM")
|
||||
_parsed_email_url = env.url("NEODB_EMAIL_URL")
|
||||
_parsed_email_url: parse.ParseResult = env.url("NEODB_EMAIL_URL") # type:ignore
|
||||
if _parsed_email_url.scheme == "anymail":
|
||||
# "anymail://<anymail_backend_name>?<anymail_args>"
|
||||
# see https://anymail.dev/
|
||||
|
@ -198,12 +199,12 @@ THREADS_APP_SECRET = env("THREADS_APP_SECRET")
|
|||
ENABLE_LOGIN_BLUESKY = env("NEODB_ENABLE_LOGIN_BLUESKY")
|
||||
ENABLE_LOGIN_THREADS = env("NEODB_ENABLE_LOGIN_THREADS")
|
||||
|
||||
SITE_DOMAIN = env("NEODB_SITE_DOMAIN").lower()
|
||||
SITE_DOMAIN: str = env("NEODB_SITE_DOMAIN").lower() # type:ignore
|
||||
SITE_INFO = {
|
||||
"neodb_version": NEODB_VERSION,
|
||||
"site_name": env("NEODB_SITE_NAME"),
|
||||
"site_domain": SITE_DOMAIN,
|
||||
"site_url": env("NEODB_SITE_URL", default="https://" + SITE_DOMAIN),
|
||||
"site_url": env("NEODB_SITE_URL", default="https://" + SITE_DOMAIN), # type:ignore
|
||||
"site_logo": env("NEODB_SITE_LOGO"),
|
||||
"site_icon": env("NEODB_SITE_ICON"),
|
||||
"user_icon": env("NEODB_USER_ICON"),
|
||||
|
@ -211,7 +212,7 @@ SITE_INFO = {
|
|||
"site_intro": env("NEODB_SITE_INTRO"),
|
||||
"site_description": env("NEODB_SITE_DESCRIPTION"),
|
||||
"site_head": env("NEODB_SITE_HEAD"),
|
||||
"site_links": [{"title": k, "url": v} for k, v in env("NEODB_SITE_LINKS").items()],
|
||||
"site_links": [{"title": k, "url": v} for k, v in env("NEODB_SITE_LINKS").items()], # type:ignore
|
||||
"cdn_url": "https://cdn.jsdelivr.net" if DEBUG else "/jsdelivr",
|
||||
# "cdn_url": "https://cdn.jsdelivr.net",
|
||||
# "cdn_url": "https://fastly.jsdelivr.net",
|
||||
|
@ -221,7 +222,7 @@ INVITE_ONLY = env("NEODB_INVITE_ONLY")
|
|||
|
||||
# By default, NeoDB will relay with relay.neodb.net so that public user ratings/etc can be shared across instances
|
||||
# If you are running a development server, set this to True to disable this behavior
|
||||
DISABLE_DEFAULT_RELAY = env("NEODB_DISABLE_DEFAULT_RELAY", default=DEBUG)
|
||||
DISABLE_DEFAULT_RELAY = env("NEODB_DISABLE_DEFAULT_RELAY", default=DEBUG) # type:ignore
|
||||
|
||||
MIN_MARKS_FOR_DISCOVER = env("NEODB_MIN_MARKS_FOR_DISCOVER")
|
||||
|
||||
|
@ -230,7 +231,7 @@ DISCOVER_FILTER_LANGUAGE = env("NEODB_DISCOVER_FILTER_LANGUAGE")
|
|||
DISCOVER_SHOW_LOCAL_ONLY = env("NEODB_DISCOVER_SHOW_LOCAL_ONLY")
|
||||
DISCOVER_SHOW_POPULAR_POSTS = env("NEODB_DISCOVER_SHOW_POPULAR_POSTS")
|
||||
|
||||
MASTODON_ALLOWED_SITES = env("NEODB_LOGIN_MASTODON_WHITELIST")
|
||||
MASTODON_ALLOWED_SITES: str = env("NEODB_LOGIN_MASTODON_WHITELIST") # type:ignore
|
||||
|
||||
# Allow user to login via any Mastodon/Pleroma sites
|
||||
MASTODON_ALLOW_ANY_SITE = len(MASTODON_ALLOWED_SITES) == 0
|
||||
|
@ -282,7 +283,7 @@ DOWNLOADER_REQUEST_TIMEOUT = env("NEODB_DOWNLOADER_REQUEST_TIMEOUT")
|
|||
DOWNLOADER_CACHE_TIMEOUT = env("NEODB_DOWNLOADER_CACHE_TIMEOUT")
|
||||
DOWNLOADER_RETRIES = env("NEODB_DOWNLOADER_RETRIES")
|
||||
|
||||
DISABLE_CRON_JOBS = env("NEODB_DISABLE_CRON_JOBS")
|
||||
DISABLE_CRON_JOBS: list[str] = env("NEODB_DISABLE_CRON_JOBS") # type: ignore
|
||||
SEARCH_PEERS = env("NEODB_SEARCH_PEERS")
|
||||
SEARCH_SITES = env("NEODB_SEARCH_SITES")
|
||||
|
||||
|
@ -339,7 +340,7 @@ INSTALLED_APPS += [
|
|||
"legacy.apps.LegacyConfig",
|
||||
]
|
||||
|
||||
for app in env("NEODB_EXTRA_APPS"):
|
||||
for app in env("NEODB_EXTRA_APPS"): # type:ignore
|
||||
INSTALLED_APPS.append(app)
|
||||
|
||||
MIDDLEWARE = [
|
||||
|
@ -606,7 +607,7 @@ DEACTIVATE_AFTER_UNREACHABLE_DAYS = 365
|
|||
|
||||
DEFAULT_RELAY_SERVER = "https://relay.neodb.net/inbox"
|
||||
|
||||
SENTRY_DSN = env("NEODB_SENTRY_DSN")
|
||||
SENTRY_DSN: str = env("NEODB_SENTRY_DSN") # type:ignore
|
||||
if SENTRY_DSN:
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations.django import DjangoIntegration
|
||||
|
@ -627,5 +628,5 @@ if SENTRY_DSN:
|
|||
],
|
||||
release=NEODB_VERSION,
|
||||
send_default_pii=True,
|
||||
traces_sample_rate=env("NEODB_SENTRY_SAMPLE_RATE"),
|
||||
traces_sample_rate=env("NEODB_SENTRY_SAMPLE_RATE"), # type:ignore
|
||||
)
|
||||
|
|
|
@ -39,7 +39,6 @@ from catalog.common import (
|
|||
from catalog.common.models import (
|
||||
LIST_OF_ONE_PLUS_STR_SCHEMA,
|
||||
LOCALE_CHOICES_JSONFORM,
|
||||
ItemType,
|
||||
LanguageListField,
|
||||
)
|
||||
from common.models import uniq
|
||||
|
@ -121,7 +120,6 @@ class Edition(Item):
|
|||
OTHER = "other", _("Other")
|
||||
|
||||
schema = EditionSchema
|
||||
type = ItemType.Edition
|
||||
category = ItemCategory.Book
|
||||
url_path = "book"
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@ from .sites import *
|
|||
__all__ = ( # noqa
|
||||
"IdType",
|
||||
"SiteName",
|
||||
"ItemType",
|
||||
"ItemCategory",
|
||||
"AvailableItemCategory",
|
||||
"Item",
|
||||
|
|
|
@ -156,15 +156,16 @@ class BasicDownloader:
|
|||
"Cache-Control": "no-cache",
|
||||
}
|
||||
|
||||
def __init__(self, url, headers=None):
|
||||
timeout = settings.DOWNLOADER_REQUEST_TIMEOUT
|
||||
|
||||
def __init__(self, url, headers: dict | None = None, timeout: float | None = None):
|
||||
self.url = url
|
||||
self.response_type = RESPONSE_OK
|
||||
self.logs = []
|
||||
if headers:
|
||||
self.headers = headers
|
||||
|
||||
def get_timeout(self):
|
||||
return settings.DOWNLOADER_REQUEST_TIMEOUT
|
||||
if timeout:
|
||||
self.timeout = timeout
|
||||
|
||||
def validate_response(self, response) -> int:
|
||||
if response is None:
|
||||
|
@ -183,7 +184,7 @@ class BasicDownloader:
|
|||
if not _mock_mode:
|
||||
resp = cast(
|
||||
DownloaderResponse,
|
||||
requests.get(url, headers=self.headers, timeout=self.get_timeout()),
|
||||
requests.get(url, headers=self.headers, timeout=self.timeout),
|
||||
)
|
||||
resp.__class__ = DownloaderResponse
|
||||
if settings.DOWNLOADER_SAVEDIR:
|
||||
|
@ -223,7 +224,7 @@ class BasicDownloader2(BasicDownloader):
|
|||
if not _mock_mode:
|
||||
resp = cast(
|
||||
DownloaderResponse2,
|
||||
httpx.get(url, headers=self.headers, timeout=self.get_timeout()),
|
||||
httpx.get(url, headers=self.headers, timeout=self.timeout),
|
||||
)
|
||||
resp.__class__ = DownloaderResponse2
|
||||
if settings.DOWNLOADER_SAVEDIR:
|
||||
|
|
|
@ -255,7 +255,7 @@ class LocalizedTitleSchema(Schema):
|
|||
|
||||
|
||||
class ItemInSchema(Schema):
|
||||
type: str
|
||||
type: str = Field(alias="get_type")
|
||||
title: str = Field(alias="display_title")
|
||||
description: str = Field(default="", alias="display_description")
|
||||
localized_title: list[LocalizedTitleSchema] = []
|
||||
|
@ -346,7 +346,6 @@ class Item(PolymorphicModel):
|
|||
collections: QuerySet["Collection"]
|
||||
merged_from_items: QuerySet["Item"]
|
||||
merged_to_item_id: int
|
||||
type: ItemType # subclass must specify this
|
||||
schema = ItemSchema
|
||||
category: ItemCategory # subclass must specify this
|
||||
url_path = "item" # subclass must specify this
|
||||
|
@ -599,6 +598,9 @@ class Item(PolymorphicModel):
|
|||
def api_url(self):
|
||||
return f"/api{self.url}"
|
||||
|
||||
def get_type(self) -> str:
|
||||
return self.__class__.__name__
|
||||
|
||||
@property
|
||||
def class_name(self) -> str:
|
||||
return self.__class__.__name__.lower()
|
||||
|
|
|
@ -9,7 +9,6 @@ from catalog.common import (
|
|||
Item,
|
||||
ItemCategory,
|
||||
ItemInSchema,
|
||||
ItemType,
|
||||
PrimaryLookupIdDescriptor,
|
||||
jsondata,
|
||||
)
|
||||
|
@ -43,7 +42,6 @@ class GameSchema(GameInSchema, BaseSchema):
|
|||
|
||||
|
||||
class Game(Item):
|
||||
type = ItemType.Game
|
||||
schema = GameSchema
|
||||
category = ItemCategory.Game
|
||||
url_path = "game"
|
||||
|
|
|
@ -8,8 +8,8 @@ from tqdm import tqdm
|
|||
from catalog.common.sites import SiteManager
|
||||
from catalog.models import Edition, Item, Podcast, TVSeason, TVShow
|
||||
from catalog.search.external import ExternalSources
|
||||
from catalog.sites.fedi import FediverseInstance
|
||||
from common.models import detect_language, uniq
|
||||
from takahe.utils import Takahe
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
|
@ -62,7 +62,7 @@ class Command(BaseCommand):
|
|||
|
||||
def external_search(self, q, cat):
|
||||
sites = SiteManager.get_sites_for_search()
|
||||
peers = Takahe.get_neodb_peers()
|
||||
peers = FediverseInstance.get_peers_for_search()
|
||||
self.stdout.write(f"Searching {cat} '{q}' ...")
|
||||
self.stdout.write(f"Peers: {peers}")
|
||||
self.stdout.write(f"Sites: {sites}")
|
||||
|
|
|
@ -12,7 +12,6 @@ from .common.models import (
|
|||
ItemCategory,
|
||||
ItemInSchema,
|
||||
ItemSchema,
|
||||
ItemType,
|
||||
SiteName,
|
||||
item_categories,
|
||||
item_content_types,
|
||||
|
@ -115,7 +114,6 @@ __all__ = [
|
|||
"ItemCategory",
|
||||
"ItemInSchema",
|
||||
"ItemSchema",
|
||||
"ItemType",
|
||||
"SiteName",
|
||||
"item_categories",
|
||||
"item_content_types",
|
||||
|
|
|
@ -7,7 +7,6 @@ from catalog.common import (
|
|||
Item,
|
||||
ItemCategory,
|
||||
ItemInSchema,
|
||||
ItemType,
|
||||
PrimaryLookupIdDescriptor,
|
||||
jsondata,
|
||||
)
|
||||
|
@ -34,7 +33,6 @@ class MovieSchema(MovieInSchema, BaseSchema):
|
|||
|
||||
|
||||
class Movie(Item):
|
||||
type = ItemType.Movie
|
||||
schema = MovieSchema
|
||||
category = ItemCategory.Movie
|
||||
url_path = "movie"
|
||||
|
|
|
@ -10,7 +10,6 @@ from catalog.common import (
|
|||
Item,
|
||||
ItemCategory,
|
||||
ItemInSchema,
|
||||
ItemType,
|
||||
PrimaryLookupIdDescriptor,
|
||||
jsondata,
|
||||
)
|
||||
|
@ -34,7 +33,6 @@ class AlbumSchema(AlbumInSchema, BaseSchema):
|
|||
|
||||
class Album(Item):
|
||||
schema = AlbumSchema
|
||||
type = ItemType.Album
|
||||
url_path = "album"
|
||||
category = ItemCategory.Music
|
||||
barcode = PrimaryLookupIdDescriptor(IdType.GTIN)
|
||||
|
|
|
@ -12,7 +12,6 @@ from catalog.common import (
|
|||
Item,
|
||||
ItemCategory,
|
||||
ItemSchema,
|
||||
ItemType,
|
||||
jsondata,
|
||||
)
|
||||
from catalog.common.models import LanguageListField
|
||||
|
@ -105,7 +104,6 @@ class Performance(Item):
|
|||
if TYPE_CHECKING:
|
||||
productions: models.QuerySet["PerformanceProduction"]
|
||||
schema = PerformanceSchema
|
||||
type = ItemType.Performance
|
||||
child_class = "PerformanceProduction"
|
||||
category = ItemCategory.Performance
|
||||
url_path = "performance"
|
||||
|
@ -249,7 +247,6 @@ class Performance(Item):
|
|||
|
||||
class PerformanceProduction(Item):
|
||||
schema = PerformanceProductionSchema
|
||||
type = ItemType.PerformanceProduction
|
||||
category = ItemCategory.Performance
|
||||
url_path = "performance/production"
|
||||
show = models.ForeignKey(
|
||||
|
|
|
@ -15,7 +15,6 @@ from catalog.common import (
|
|||
)
|
||||
from catalog.common.models import (
|
||||
LIST_OF_ONE_PLUS_STR_SCHEMA,
|
||||
ItemType,
|
||||
LanguageListField,
|
||||
)
|
||||
|
||||
|
@ -48,7 +47,6 @@ class PodcastEpisodeSchema(PodcastEpisodeInSchema, BaseSchema):
|
|||
class Podcast(Item):
|
||||
if TYPE_CHECKING:
|
||||
episodes: models.QuerySet["PodcastEpisode"]
|
||||
type = ItemType.Podcast
|
||||
schema = PodcastSchema
|
||||
category = ItemCategory.Podcast
|
||||
child_class = "PodcastEpisode"
|
||||
|
@ -125,7 +123,6 @@ class Podcast(Item):
|
|||
|
||||
class PodcastEpisode(Item):
|
||||
schema = PodcastEpisodeSchema
|
||||
type = ItemType.PodcastEpisode
|
||||
category = ItemCategory.Podcast
|
||||
url_path = "podcast/episode"
|
||||
# uid = models.UUIDField(default=uuid.uuid4, editable=False, db_index=True)
|
||||
|
|
|
@ -66,28 +66,39 @@ class FediverseInstance(AbstractSite):
|
|||
|
||||
@classmethod
|
||||
def url_to_id(cls, url: str):
|
||||
u = url.split("://", 1)[1].split("/", 1)
|
||||
u = url.split("://", 1)[1].split("?", 1)[0].split("/", 1)
|
||||
return "https://" + u[0].lower() + "/" + u[1]
|
||||
|
||||
@classmethod
|
||||
def validate_url_fallback(cls, url: str):
|
||||
from takahe.utils import Takahe
|
||||
|
||||
val = URLValidator()
|
||||
try:
|
||||
val(url)
|
||||
if url.split("://", 1)[1].split("/", 1)[0].lower() in settings.SITE_DOMAINS:
|
||||
u = cls.url_to_id(url)
|
||||
host = u.split("://", 1)[1].split("/", 1)[0].lower()
|
||||
if host in settings.SITE_DOMAINS:
|
||||
# disallow local instance URLs
|
||||
return False
|
||||
return cls.get_json_from_url(url) is not None
|
||||
except Exception:
|
||||
if host in Takahe.get_blocked_peers():
|
||||
return False
|
||||
return cls.get_json_from_url(u) is not None
|
||||
except Exception as e:
|
||||
logger.error(f"Fedi item url validation error: {url} {e}")
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def get_json_from_url(cls, url):
|
||||
j = CachedDownloader(url, headers=cls.request_header).download().json()
|
||||
if j.get("type") not in cls.supported_types.keys():
|
||||
j = (
|
||||
CachedDownloader(url, headers=cls.request_header, timeout=2)
|
||||
.download()
|
||||
.json()
|
||||
)
|
||||
if not isinstance(j, dict) or j.get("type") not in cls.supported_types.keys():
|
||||
raise ValueError("Not a supported format or type")
|
||||
if j.get("id") != url:
|
||||
logger.warning(f"ID mismatch: {j.get('id')} != {url}")
|
||||
raise ValueError(f"ID mismatch: {j.get('id')} != {url}")
|
||||
return j
|
||||
|
||||
def scrape(self):
|
||||
|
@ -168,12 +179,18 @@ class FediverseInstance(AbstractSite):
|
|||
)
|
||||
return results[offset : offset + page_size]
|
||||
|
||||
@classmethod
|
||||
def get_peers_for_search(cls) -> list[str]:
|
||||
from takahe.utils import Takahe
|
||||
|
||||
if settings.SEARCH_PEERS: # '-' = disable federated search
|
||||
return [] if settings.SEARCH_PEERS == ["-"] else settings.SEARCH_PEERS
|
||||
return Takahe.get_neodb_peers()
|
||||
|
||||
@classmethod
|
||||
def search_tasks(
|
||||
cls, q: str, page: int = 1, category: str | None = None, page_size=5
|
||||
):
|
||||
from takahe.utils import Takahe
|
||||
|
||||
peers = Takahe.get_neodb_peers()
|
||||
peers = cls.get_peers_for_search()
|
||||
c = category if category != "movietv" else "movie,tv"
|
||||
return [cls.peer_search_task(host, q, page, c, page_size) for host in peers]
|
||||
|
|
|
@ -40,7 +40,6 @@ from catalog.common import (
|
|||
ItemCategory,
|
||||
ItemInSchema,
|
||||
ItemSchema,
|
||||
ItemType,
|
||||
PrimaryLookupIdDescriptor,
|
||||
jsondata,
|
||||
)
|
||||
|
@ -98,7 +97,6 @@ class TVShow(Item):
|
|||
if TYPE_CHECKING:
|
||||
seasons: QuerySet["TVSeason"]
|
||||
schema = TVShowSchema
|
||||
type = ItemType.TVShow
|
||||
child_class = "TVSeason"
|
||||
category = ItemCategory.TV
|
||||
url_path = "tv"
|
||||
|
@ -263,7 +261,6 @@ class TVSeason(Item):
|
|||
if TYPE_CHECKING:
|
||||
episodes: models.QuerySet["TVEpisode"]
|
||||
schema = TVSeasonSchema
|
||||
type = ItemType.TVSeason
|
||||
category = ItemCategory.TV
|
||||
url_path = "tv/season"
|
||||
child_class = "TVEpisode"
|
||||
|
@ -483,7 +480,6 @@ class TVSeason(Item):
|
|||
|
||||
class TVEpisode(Item):
|
||||
schema = TVEpisodeSchema
|
||||
type = ItemType.TVEpisode
|
||||
category = ItemCategory.TV
|
||||
url_path = "tv/episode"
|
||||
season = models.ForeignKey(
|
||||
|
|
|
@ -7,7 +7,7 @@ import tempfile
|
|||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
|
||||
from catalog.common.downloaders import ProxiedImageDownloader
|
||||
from catalog.common import ProxiedImageDownloader
|
||||
from common.utils import GenerateDateUUIDMediaFilePath
|
||||
from journal.models import ShelfMember
|
||||
from journal.models.collection import Collection
|
||||
|
|
|
@ -155,6 +155,7 @@ class Collection(List):
|
|||
data,
|
||||
existing_post.pk if existing_post else None,
|
||||
self.created_time,
|
||||
language=owner.user.macrolanguage,
|
||||
)
|
||||
if post and post != existing_post:
|
||||
self.link_post_id(post.pk)
|
||||
|
|
|
@ -497,6 +497,7 @@ class Piece(PolymorphicModel, UserOwnedObjectMixin):
|
|||
"post_time": self.created_time, # type:ignore subclass must have this
|
||||
"edit_time": self.edited_time, # type:ignore subclass must have this
|
||||
"data": self.get_ap_data(),
|
||||
"language": user.macrolanguage,
|
||||
}
|
||||
params.update(self.to_post_params())
|
||||
post = Takahe.post(**params)
|
||||
|
|
|
@ -65,7 +65,7 @@ dev-dependencies = [
|
|||
"djlint>=1.36.4",
|
||||
# "isort~=5.13.2",
|
||||
"lxml-stubs>=0.5.1",
|
||||
"pyright>=1.1.389",
|
||||
"pyright>=1.1.393",
|
||||
"ruff>=0.9.1",
|
||||
"mkdocs-material>=9.5.42",
|
||||
]
|
||||
|
|
|
@ -23,18 +23,18 @@ asgiref==3.8.1
|
|||
# via django
|
||||
# via django-cors-headers
|
||||
# via django-stubs
|
||||
atproto==0.0.56
|
||||
attrs==24.3.0
|
||||
atproto==0.0.58
|
||||
attrs==25.1.0
|
||||
# via aiohttp
|
||||
babel==2.16.0
|
||||
babel==2.17.0
|
||||
# via mkdocs-material
|
||||
beautifulsoup4==4.12.3
|
||||
beautifulsoup4==4.13.1
|
||||
# via markdownify
|
||||
bleach==5.0.1
|
||||
# via django-bleach
|
||||
blurhash-python==1.2.2
|
||||
cachetools==5.5.0
|
||||
certifi==2024.12.14
|
||||
cachetools==5.5.1
|
||||
certifi==2025.1.31
|
||||
# via httpcore
|
||||
# via httpx
|
||||
# via requests
|
||||
|
@ -90,7 +90,7 @@ django-compressor==4.5.1
|
|||
django-cors-headers==4.6.0
|
||||
django-environ==0.12.0
|
||||
django-hijack==3.7.1
|
||||
django-jsonform==2.23.1
|
||||
django-jsonform==2.23.2
|
||||
django-maintenance-mode==0.21.1
|
||||
django-markdownx==4.0.7
|
||||
django-ninja==1.3.0
|
||||
|
@ -113,7 +113,7 @@ editorconfig==0.17.0
|
|||
# via jsbeautifier
|
||||
et-xmlfile==2.0.0
|
||||
# via openpyxl
|
||||
filelock==3.16.1
|
||||
filelock==3.17.0
|
||||
# via virtualenv
|
||||
filetype==1.2.0
|
||||
frozenlist==1.5.0
|
||||
|
@ -128,7 +128,7 @@ httpcore==1.0.7
|
|||
# via httpx
|
||||
httpx==0.27.2
|
||||
# via atproto
|
||||
identify==2.6.5
|
||||
identify==2.6.6
|
||||
# via pre-commit
|
||||
idna==3.10
|
||||
# via anyio
|
||||
|
@ -164,12 +164,12 @@ markupsafe==3.0.2
|
|||
mergedeep==1.3.4
|
||||
# via mkdocs
|
||||
# via mkdocs-get-deps
|
||||
mistune==3.1.0
|
||||
mistune==3.1.1
|
||||
mkdocs==1.6.1
|
||||
# via mkdocs-material
|
||||
mkdocs-get-deps==0.2.0
|
||||
# via mkdocs
|
||||
mkdocs-material==9.5.50
|
||||
mkdocs-material==9.6.1
|
||||
mkdocs-material-extensions==1.3.1
|
||||
# via mkdocs-material
|
||||
multidict==6.1.0
|
||||
|
@ -195,7 +195,7 @@ platformdirs==4.3.6
|
|||
# via mkdocs-get-deps
|
||||
# via virtualenv
|
||||
podcastparser==0.6.10
|
||||
pre-commit==4.0.1
|
||||
pre-commit==4.1.0
|
||||
propcache==0.2.1
|
||||
# via aiohttp
|
||||
# via yarl
|
||||
|
@ -204,23 +204,23 @@ protobuf==5.29.3
|
|||
psycopg2-binary==2.9.10
|
||||
pycparser==2.22
|
||||
# via cffi
|
||||
pydantic==2.10.5
|
||||
pydantic==2.10.6
|
||||
# via atproto
|
||||
# via django-ninja
|
||||
pydantic-core==2.27.2
|
||||
# via pydantic
|
||||
pygments==2.19.1
|
||||
# via mkdocs-material
|
||||
pymdown-extensions==10.14
|
||||
pymdown-extensions==10.14.3
|
||||
# via mkdocs-material
|
||||
pyright==1.1.392.post0
|
||||
pyright==1.1.393
|
||||
python-dateutil==2.9.0.post0
|
||||
# via dateparser
|
||||
# via django-auditlog
|
||||
# via ghp-import
|
||||
python-fsutil==0.14.1
|
||||
# via django-maintenance-mode
|
||||
pytz==2024.2
|
||||
pytz==2025.1
|
||||
# via dateparser
|
||||
# via django-tz-detect
|
||||
pyyaml==6.0.2
|
||||
|
@ -251,7 +251,7 @@ rjsmin==1.2.2
|
|||
# via django-compressor
|
||||
rq==2.1.0
|
||||
# via django-rq
|
||||
ruff==0.9.2
|
||||
ruff==0.9.4
|
||||
sentry-sdk==2.20.0
|
||||
setproctitle==1.3.4
|
||||
six==1.17.0
|
||||
|
@ -279,6 +279,7 @@ typesense==0.21.0
|
|||
typing-extensions==4.12.2
|
||||
# via anyio
|
||||
# via atproto
|
||||
# via beautifulsoup4
|
||||
# via django-stubs
|
||||
# via django-stubs-ext
|
||||
# via pydantic
|
||||
|
|
|
@ -22,16 +22,16 @@ anyio==4.8.0
|
|||
asgiref==3.8.1
|
||||
# via django
|
||||
# via django-cors-headers
|
||||
atproto==0.0.56
|
||||
attrs==24.3.0
|
||||
atproto==0.0.58
|
||||
attrs==25.1.0
|
||||
# via aiohttp
|
||||
beautifulsoup4==4.12.3
|
||||
beautifulsoup4==4.13.1
|
||||
# via markdownify
|
||||
bleach==5.0.1
|
||||
# via django-bleach
|
||||
blurhash-python==1.2.2
|
||||
cachetools==5.5.0
|
||||
certifi==2024.12.14
|
||||
cachetools==5.5.1
|
||||
certifi==2025.1.31
|
||||
# via httpcore
|
||||
# via httpx
|
||||
# via requests
|
||||
|
@ -74,7 +74,7 @@ django-compressor==4.5.1
|
|||
django-cors-headers==4.6.0
|
||||
django-environ==0.12.0
|
||||
django-hijack==3.7.1
|
||||
django-jsonform==2.23.1
|
||||
django-jsonform==2.23.2
|
||||
django-maintenance-mode==0.21.1
|
||||
django-markdownx==4.0.7
|
||||
django-ninja==1.3.0
|
||||
|
@ -117,7 +117,7 @@ lxml==5.3.0
|
|||
markdown==3.7
|
||||
# via django-markdownx
|
||||
markdownify==0.14.1
|
||||
mistune==3.1.0
|
||||
mistune==3.1.1
|
||||
multidict==6.1.0
|
||||
# via aiohttp
|
||||
# via yarl
|
||||
|
@ -137,7 +137,7 @@ protobuf==5.29.3
|
|||
psycopg2-binary==2.9.10
|
||||
pycparser==2.22
|
||||
# via cffi
|
||||
pydantic==2.10.5
|
||||
pydantic==2.10.6
|
||||
# via atproto
|
||||
# via django-ninja
|
||||
pydantic-core==2.27.2
|
||||
|
@ -147,7 +147,7 @@ python-dateutil==2.9.0.post0
|
|||
# via django-auditlog
|
||||
python-fsutil==0.14.1
|
||||
# via django-maintenance-mode
|
||||
pytz==2024.2
|
||||
pytz==2025.1
|
||||
# via dateparser
|
||||
# via django-tz-detect
|
||||
rcssmin==1.1.2
|
||||
|
@ -188,6 +188,7 @@ typesense==0.21.0
|
|||
typing-extensions==4.12.2
|
||||
# via anyio
|
||||
# via atproto
|
||||
# via beautifulsoup4
|
||||
# via pydantic
|
||||
# via pydantic-core
|
||||
tzlocal==5.2
|
||||
|
|
|
@ -1203,6 +1203,7 @@ class Post(models.Model):
|
|||
type_data: dict | None = None,
|
||||
published: datetime.datetime | None = None,
|
||||
edited: datetime.datetime | None = None,
|
||||
language: str = "",
|
||||
) -> "Post":
|
||||
with transaction.atomic():
|
||||
# Find mentions in this post
|
||||
|
@ -1233,6 +1234,7 @@ class Post(models.Model):
|
|||
"visibility": visibility,
|
||||
"hashtags": hashtags,
|
||||
"in_reply_to": reply_to.object_uri if reply_to else None,
|
||||
"language": language,
|
||||
}
|
||||
if edited:
|
||||
post_obj["edited"] = edited
|
||||
|
@ -1281,6 +1283,7 @@ class Post(models.Model):
|
|||
type_data: dict | None = None,
|
||||
published: datetime.datetime | None = None,
|
||||
edited: datetime.datetime | None = None,
|
||||
language: str | None = None,
|
||||
):
|
||||
with transaction.atomic():
|
||||
# Strip all HTML and apply linebreaks filter
|
||||
|
@ -1301,6 +1304,8 @@ class Post(models.Model):
|
|||
self.emojis.set(Emoji.emojis_from_content(content, None))
|
||||
if attachments is not None:
|
||||
self.attachments.set(attachments or []) # type: ignore
|
||||
if language is not None:
|
||||
self.language = language
|
||||
if type_data:
|
||||
self.type_data = type_data
|
||||
self.save()
|
||||
|
|
|
@ -431,6 +431,7 @@ class Takahe:
|
|||
edit_time: datetime.datetime | None = None,
|
||||
reply_to_pk: int | None = None,
|
||||
attachments: list | None = None,
|
||||
language: str = "",
|
||||
) -> Post | None:
|
||||
identity = Identity.objects.get(pk=author_pk)
|
||||
post = (
|
||||
|
@ -457,6 +458,7 @@ class Takahe:
|
|||
published=post_time,
|
||||
edited=edit_time,
|
||||
attachments=attachments,
|
||||
language=language,
|
||||
)
|
||||
else:
|
||||
post = Post.create_local(
|
||||
|
@ -472,6 +474,7 @@ class Takahe:
|
|||
edited=edit_time,
|
||||
reply_to=reply_to_post,
|
||||
attachments=attachments,
|
||||
language=language,
|
||||
)
|
||||
TimelineEvent.objects.get_or_create(
|
||||
identity=identity,
|
||||
|
@ -693,8 +696,6 @@ class Takahe:
|
|||
|
||||
@staticmethod
|
||||
def get_neodb_peers():
|
||||
if settings.SEARCH_PEERS: # '-' = disable federated search
|
||||
return [] if settings.SEARCH_PEERS == ["-"] else settings.SEARCH_PEERS
|
||||
cache_key = "neodb_peers"
|
||||
peers = cache.get(cache_key, None)
|
||||
if peers is None:
|
||||
|
@ -709,6 +710,20 @@ class Takahe:
|
|||
cache.set(cache_key, peers, timeout=1800)
|
||||
return peers
|
||||
|
||||
@staticmethod
|
||||
def get_blocked_peers():
|
||||
cache_key = "blocked_peers"
|
||||
peers = cache.get(cache_key, None)
|
||||
if peers is None:
|
||||
peers = list(
|
||||
Domain.objects.filter(
|
||||
local=False,
|
||||
blocked=True,
|
||||
).values_list("pk", flat=True)
|
||||
)
|
||||
cache.set(cache_key, peers, timeout=1800)
|
||||
return peers
|
||||
|
||||
@staticmethod
|
||||
def verify_invite(token: str) -> bool:
|
||||
if not token:
|
||||
|
@ -810,8 +825,10 @@ class Takahe:
|
|||
exclude_identities: list[int] = [],
|
||||
local_only=False,
|
||||
):
|
||||
from catalog.sites.fedi import FediverseInstance
|
||||
|
||||
since = timezone.now() - timedelta(days=days)
|
||||
domains = Takahe.get_neodb_peers() + [settings.SITE_DOMAIN]
|
||||
domains = FediverseInstance.get_peers_for_search() + [settings.SITE_DOMAIN]
|
||||
qs = (
|
||||
Post.objects.exclude(state__in=["deleted", "deleted_fanned_out"])
|
||||
.exclude(author_id__in=exclude_identities)
|
||||
|
|
|
@ -14,7 +14,9 @@ def activate_language_for_user(user: "User | None", request=None):
|
|||
user_language = getattr(user, "language", "")
|
||||
if not user_language:
|
||||
if request:
|
||||
user_language = translation.get_language_from_request(request)
|
||||
user_language = request.GET.get("lang")
|
||||
if not user_language:
|
||||
user_language = translation.get_language_from_request(request)
|
||||
else:
|
||||
user_language = settings.LANGUAGE_CODE
|
||||
# if user_language in dict(settings.LANGUAGES).keys():
|
||||
|
|
|
@ -126,6 +126,10 @@ class User(AbstractUser):
|
|||
]
|
||||
indexes = [models.Index("is_active", name="index_user_is_active")]
|
||||
|
||||
@property
|
||||
def macrolanguage(self) -> str: # ISO 639 macrolanguage
|
||||
return self.language.split("-")[0] if self.language else ""
|
||||
|
||||
@cached_property
|
||||
def mastodon(self) -> "MastodonAccount | None":
|
||||
return MastodonAccount.objects.filter(user=self).first()
|
||||
|
|
Loading…
Add table
Reference in a new issue