add lint by ruff
This commit is contained in:
parent
2320cbbc91
commit
f2abc7a7c0
43 changed files with 90 additions and 85 deletions
|
@ -20,8 +20,14 @@ repos:
|
||||||
- id: requirements-txt-fixer
|
- id: requirements-txt-fixer
|
||||||
- id: mixed-line-ending
|
- id: mixed-line-ending
|
||||||
|
|
||||||
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
|
rev: v0.3.5
|
||||||
|
hooks:
|
||||||
|
- id: ruff
|
||||||
|
# - id: ruff-format
|
||||||
|
|
||||||
- repo: https://github.com/pycqa/isort
|
- repo: https://github.com/pycqa/isort
|
||||||
rev: 5.12.0
|
rev: 5.13.2
|
||||||
hooks:
|
hooks:
|
||||||
- id: isort
|
- id: isort
|
||||||
args: ["--profile=black"]
|
args: ["--profile=black"]
|
||||||
|
|
|
@ -10,7 +10,7 @@ BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(BASE_DIR, "build_version")) as f:
|
with open(os.path.join(BASE_DIR, "build_version")) as f:
|
||||||
NEODB_VERSION = __version__ + "-" + f.read().strip()
|
NEODB_VERSION = __version__ + "-" + f.read().strip()
|
||||||
except:
|
except Exception:
|
||||||
NEODB_VERSION = __version__ + "-unknown"
|
NEODB_VERSION = __version__ + "-unknown"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
|
from typing import Any, Callable, List, Optional, Tuple, Type
|
||||||
|
|
||||||
from django.http import HttpResponse
|
from django.http import HttpResponse
|
||||||
from django.utils.translation import gettext_lazy as _
|
|
||||||
from ninja import Schema
|
from ninja import Schema
|
||||||
|
|
||||||
from common.api import *
|
from common.api import *
|
||||||
|
|
|
@ -4,7 +4,7 @@ from .models import *
|
||||||
from .scrapers import *
|
from .scrapers import *
|
||||||
from .sites import *
|
from .sites import *
|
||||||
|
|
||||||
__all__ = (
|
__all__ = ( # noqa
|
||||||
"IdType",
|
"IdType",
|
||||||
"SiteName",
|
"SiteName",
|
||||||
"ItemType",
|
"ItemType",
|
||||||
|
|
|
@ -172,7 +172,7 @@ class BasicDownloader:
|
||||||
encoding="utf-8",
|
encoding="utf-8",
|
||||||
) as fp:
|
) as fp:
|
||||||
fp.write(resp.text)
|
fp.write(resp.text)
|
||||||
except:
|
except Exception:
|
||||||
_logger.warn("Save downloaded data failed.")
|
_logger.warn("Save downloaded data failed.")
|
||||||
else:
|
else:
|
||||||
resp = MockResponse(self.url)
|
resp = MockResponse(self.url)
|
||||||
|
|
|
@ -385,7 +385,7 @@ class Item(SoftDeleteMixin, PolymorphicModel):
|
||||||
if to_item.merged_to_item is not None:
|
if to_item.merged_to_item is not None:
|
||||||
raise ValueError("cannot merge to item which is merged to another item")
|
raise ValueError("cannot merge to item which is merged to another item")
|
||||||
if to_item.__class__ != self.__class__:
|
if to_item.__class__ != self.__class__:
|
||||||
raise ValueError(f"cannot merge to item in a different model")
|
raise ValueError("cannot merge to item in a different model")
|
||||||
self.log_action({"!merged": [str(self.merged_to_item), str(to_item)]})
|
self.log_action({"!merged": [str(self.merged_to_item), str(to_item)]})
|
||||||
self.merged_to_item = to_item
|
self.merged_to_item = to_item
|
||||||
self.save()
|
self.save()
|
||||||
|
@ -448,7 +448,7 @@ class Item(SoftDeleteMixin, PolymorphicModel):
|
||||||
b62 = r[0]
|
b62 = r[0]
|
||||||
try:
|
try:
|
||||||
item = cls.objects.get(uid=uuid.UUID(int=base62.decode(b62)))
|
item = cls.objects.get(uid=uuid.UUID(int=base62.decode(b62)))
|
||||||
except:
|
except Exception:
|
||||||
item = None
|
item = None
|
||||||
return item
|
return item
|
||||||
|
|
||||||
|
@ -618,7 +618,7 @@ class ExternalResource(models.Model):
|
||||||
try:
|
try:
|
||||||
site = self.get_site()
|
site = self.get_site()
|
||||||
return site.SITE_NAME if site else SiteName.Unknown
|
return site.SITE_NAME if site else SiteName.Unknown
|
||||||
except:
|
except Exception:
|
||||||
_logger.warning(f"Unknown site for {self}")
|
_logger.warning(f"Unknown site for {self}")
|
||||||
return SiteName.Unknown
|
return SiteName.Unknown
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ import json
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import Callable, Type
|
from typing import Any, Callable, Type, TypeVar
|
||||||
|
|
||||||
import django_rq
|
import django_rq
|
||||||
import requests
|
import requests
|
||||||
|
@ -268,8 +268,6 @@ class AbstractSite:
|
||||||
return p
|
return p
|
||||||
|
|
||||||
|
|
||||||
from typing import Any, Callable, Type, TypeVar
|
|
||||||
|
|
||||||
T = TypeVar("T")
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
@ -313,7 +311,7 @@ class SiteManager:
|
||||||
)
|
)
|
||||||
if cls:
|
if cls:
|
||||||
url = url2
|
url = url2
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
if cls is None:
|
if cls is None:
|
||||||
cls = next(
|
cls = next(
|
||||||
|
@ -327,7 +325,7 @@ class SiteManager:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_site_by_id(id_type: IdType, id_value: str) -> AbstractSite | None:
|
def get_site_by_id(id_type: IdType, id_value: str) -> AbstractSite | None:
|
||||||
if not id_type in SiteManager.registry:
|
if id_type not in SiteManager.registry:
|
||||||
return None
|
return None
|
||||||
cls = SiteManager.registry[id_type]
|
cls = SiteManager.registry[id_type]
|
||||||
return cls(id_value=id_value)
|
return cls(id_value=id_value)
|
||||||
|
|
|
@ -316,7 +316,7 @@ class Fediverse:
|
||||||
url = f"https://{host}{item['url']}" # FIXME update API and use abs urls
|
url = f"https://{host}{item['url']}" # FIXME update API and use abs urls
|
||||||
try:
|
try:
|
||||||
cat = ItemCategory(item["category"])
|
cat = ItemCategory(item["category"])
|
||||||
except:
|
except Exception:
|
||||||
cat = ""
|
cat = ""
|
||||||
results.append(
|
results.append(
|
||||||
SearchResultItem(
|
SearchResultItem(
|
||||||
|
|
|
@ -30,7 +30,7 @@ class DbIndexer:
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def search(cls, q, page=1, categories=None, tag=None, sort=None):
|
def search(cls, q, page=1, categories=None, tag=None, sort=None):
|
||||||
result = lambda: None
|
result = lambda: None # noqa
|
||||||
result.items = Item.objects.filter(title__contains=q)[:10]
|
result.items = Item.objects.filter(title__contains=q)[:10]
|
||||||
result.num_pages = 1
|
result.num_pages = 1
|
||||||
result.count = len(result.items)
|
result.count = len(result.items)
|
||||||
|
@ -83,10 +83,10 @@ def query_index(keywords, categories=None, tag=None, page=1, prepare_external=Tr
|
||||||
if hasattr(i, "works"):
|
if hasattr(i, "works"):
|
||||||
my_key += [i[0] for i in i.works.all().values_list("id")]
|
my_key += [i[0] for i in i.works.all().values_list("id")]
|
||||||
if len(my_key):
|
if len(my_key):
|
||||||
l = len(keys) + len(my_key)
|
sl = len(keys) + len(my_key)
|
||||||
keys.update(my_key)
|
keys.update(my_key)
|
||||||
# check and skip dup with same imdb or isbn or works id
|
# check and skip dup with same imdb or isbn or works id
|
||||||
if len(keys) < l:
|
if len(keys) < sl:
|
||||||
duplicated_items.append(i)
|
duplicated_items.append(i)
|
||||||
else:
|
else:
|
||||||
items.append(i)
|
items.append(i)
|
||||||
|
@ -135,7 +135,7 @@ def enqueue_fetch(url, is_refetch, user=None):
|
||||||
try:
|
try:
|
||||||
job = Job.fetch(id=job_id, connection=django_rq.get_connection("fetch"))
|
job = Job.fetch(id=job_id, connection=django_rq.get_connection("fetch"))
|
||||||
in_progress = job.get_status() in ["queued", "started"]
|
in_progress = job.get_status() in ["queued", "started"]
|
||||||
except:
|
except Exception:
|
||||||
in_progress = False
|
in_progress = False
|
||||||
if not in_progress:
|
if not in_progress:
|
||||||
django_rq.get_queue("fetch").enqueue(
|
django_rq.get_queue("fetch").enqueue(
|
||||||
|
|
|
@ -80,7 +80,7 @@ def enqueue_update_index(item_ids):
|
||||||
)
|
)
|
||||||
if job.get_status() in ["queued", "scheduled"]:
|
if job.get_status() in ["queued", "scheduled"]:
|
||||||
job.cancel()
|
job.cancel()
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
# using rq's built-in scheduler here, it can be switched to other similar implementations
|
# using rq's built-in scheduler here, it can be switched to other similar implementations
|
||||||
django_rq.get_queue(_PENDING_INDEX_QUEUE).enqueue_in(
|
django_rq.get_queue(_PENDING_INDEX_QUEUE).enqueue_in(
|
||||||
|
@ -184,7 +184,6 @@ class Indexer:
|
||||||
@classmethod
|
@classmethod
|
||||||
def check(cls):
|
def check(cls):
|
||||||
client = typesense.Client(settings.TYPESENSE_CONNECTION)
|
client = typesense.Client(settings.TYPESENSE_CONNECTION)
|
||||||
wait = 5
|
|
||||||
if not client.operations.is_healthy():
|
if not client.operations.is_healthy():
|
||||||
raise ValueError("Typesense: server not healthy")
|
raise ValueError("Typesense: server not healthy")
|
||||||
idx = client.collections[settings.TYPESENSE_INDEX_NAME]
|
idx = client.collections[settings.TYPESENSE_INDEX_NAME]
|
||||||
|
@ -209,7 +208,7 @@ class Indexer:
|
||||||
f"Typesense: index {settings.TYPESENSE_INDEX_NAME} has {i['num_documents']} documents"
|
f"Typesense: index {settings.TYPESENSE_INDEX_NAME} has {i['num_documents']} documents"
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
except:
|
except Exception:
|
||||||
client.collections.create(cls.config())
|
client.collections.create(cls.config())
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Typesense: index {settings.TYPESENSE_INDEX_NAME} created"
|
f"Typesense: index {settings.TYPESENSE_INDEX_NAME} created"
|
||||||
|
|
|
@ -29,7 +29,7 @@ def fetch_refresh(request, job_id):
|
||||||
try:
|
try:
|
||||||
job = Job.fetch(id=job_id, connection=django_rq.get_connection("fetch"))
|
job = Job.fetch(id=job_id, connection=django_rq.get_connection("fetch"))
|
||||||
item_url = job.return_value()
|
item_url = job.return_value()
|
||||||
except:
|
except Exception:
|
||||||
item_url = "-"
|
item_url = "-"
|
||||||
if item_url:
|
if item_url:
|
||||||
if item_url == "-":
|
if item_url == "-":
|
||||||
|
@ -109,7 +109,7 @@ def search(request):
|
||||||
try:
|
try:
|
||||||
categories = [ItemCategory(category)]
|
categories = [ItemCategory(category)]
|
||||||
hide_category = True
|
hide_category = True
|
||||||
except:
|
except Exception:
|
||||||
categories = visible_categories(request)
|
categories = visible_categories(request)
|
||||||
tag = request.GET.get("tag", default="").strip()
|
tag = request.GET.get("tag", default="").strip()
|
||||||
p = request.GET.get("page", default="1")
|
p = request.GET.get("page", default="1")
|
||||||
|
@ -134,7 +134,7 @@ def search(request):
|
||||||
if request.GET.get("r"):
|
if request.GET.get("r"):
|
||||||
return redirect(keywords)
|
return redirect(keywords)
|
||||||
|
|
||||||
items, num_pages, _, dup_items = query_index(keywords, categories, tag, p)
|
items, num_pages, __, dup_items = query_index(keywords, categories, tag, p)
|
||||||
return render(
|
return render(
|
||||||
request,
|
request,
|
||||||
"search_results.html",
|
"search_results.html",
|
||||||
|
|
|
@ -62,7 +62,7 @@ class AppleMusic(AbstractSite):
|
||||||
content = BasicDownloader(url, headers=self.headers).download().html()
|
content = BasicDownloader(url, headers=self.headers).download().html()
|
||||||
_logger.info(f"got localized content from {url}")
|
_logger.info(f"got localized content from {url}")
|
||||||
break
|
break
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
if content is None:
|
if content is None:
|
||||||
raise ParseError(self, f"localized content for {self.url}")
|
raise ParseError(self, f"localized content for {self.url}")
|
||||||
|
|
|
@ -71,7 +71,7 @@ class Bandcamp(AbstractSite):
|
||||||
release_date = (
|
release_date = (
|
||||||
release_datetime.strftime("%Y-%m-%d") if release_datetime else None
|
release_datetime.strftime("%Y-%m-%d") if release_datetime else None
|
||||||
)
|
)
|
||||||
except:
|
except Exception:
|
||||||
release_date = None
|
release_date = None
|
||||||
duration = None
|
duration = None
|
||||||
company = None
|
company = None
|
||||||
|
|
|
@ -144,7 +144,7 @@ class Goodreads_Work(AbstractSite):
|
||||||
author = self.query_str(content, "//h2/a/text()")
|
author = self.query_str(content, "//h2/a/text()")
|
||||||
try:
|
try:
|
||||||
first_published = self.query_str(content, "//h2/span/text()")
|
first_published = self.query_str(content, "//h2/span/text()")
|
||||||
except:
|
except Exception:
|
||||||
first_published = None
|
first_published = None
|
||||||
pd = ResourceContent(
|
pd = ResourceContent(
|
||||||
metadata={
|
metadata={
|
||||||
|
|
|
@ -44,7 +44,7 @@ class RSS(AbstractSite):
|
||||||
req.add_header("User-Agent", settings.NEODB_USER_AGENT)
|
req.add_header("User-Agent", settings.NEODB_USER_AGENT)
|
||||||
try:
|
try:
|
||||||
feed = podcastparser.parse(url, urllib.request.urlopen(req, timeout=3))
|
feed = podcastparser.parse(url, urllib.request.urlopen(req, timeout=3))
|
||||||
except:
|
except Exception:
|
||||||
url = url.replace("https://", "http://")
|
url = url.replace("https://", "http://")
|
||||||
req = urllib.request.Request(url)
|
req = urllib.request.Request(url)
|
||||||
req.add_header("User-Agent", settings.NEODB_USER_AGENT)
|
req.add_header("User-Agent", settings.NEODB_USER_AGENT)
|
||||||
|
@ -52,7 +52,7 @@ class RSS(AbstractSite):
|
||||||
feed = podcastparser.parse(
|
feed = podcastparser.parse(
|
||||||
url, urllib.request.urlopen(req, timeout=3)
|
url, urllib.request.urlopen(req, timeout=3)
|
||||||
)
|
)
|
||||||
except:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
if settings.DOWNLOADER_SAVEDIR:
|
if settings.DOWNLOADER_SAVEDIR:
|
||||||
pickle.dump(
|
pickle.dump(
|
||||||
|
|
|
@ -50,7 +50,7 @@ class Steam(AbstractSite):
|
||||||
brief = self.query_str(
|
brief = self.query_str(
|
||||||
content, "//div[@class='game_description_snippet']/text()"
|
content, "//div[@class='game_description_snippet']/text()"
|
||||||
)
|
)
|
||||||
except:
|
except Exception:
|
||||||
brief = ""
|
brief = ""
|
||||||
# try Steam images if no image from IGDB
|
# try Steam images if no image from IGDB
|
||||||
if pd.cover_image is None:
|
if pd.cover_image is None:
|
||||||
|
|
|
@ -214,13 +214,13 @@ def assign_parent(request, item_path, item_uuid):
|
||||||
@login_required
|
@login_required
|
||||||
def remove_unused_seasons(request, item_path, item_uuid):
|
def remove_unused_seasons(request, item_path, item_uuid):
|
||||||
item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid))
|
item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid))
|
||||||
l = list(item.seasons.all())
|
sl = list(item.seasons.all())
|
||||||
for s in l:
|
for s in sl:
|
||||||
if not s.journal_exists():
|
if not s.journal_exists():
|
||||||
s.delete()
|
s.delete()
|
||||||
l = [s.id for s in l]
|
ol = [s.id for s in sl]
|
||||||
l2 = [s.id for s in item.seasons.all()]
|
nl = [s.id for s in item.seasons.all()]
|
||||||
item.log_action({"!remove_unused_seasons": [l, l2]})
|
item.log_action({"!remove_unused_seasons": [ol, nl]})
|
||||||
return redirect(item.url)
|
return redirect(item.url)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ class BaseJob:
|
||||||
job.cancel()
|
job.cancel()
|
||||||
registry = ScheduledJobRegistry(queue=django_rq.get_queue("cron"))
|
registry = ScheduledJobRegistry(queue=django_rq.get_queue("cron"))
|
||||||
registry.remove(job)
|
registry.remove(job)
|
||||||
except:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|
|
@ -56,7 +56,7 @@ class Setup:
|
||||||
.exists()
|
.exists()
|
||||||
):
|
):
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"Local identities are found for other domains, there might be a configuration issue."
|
"Local identities are found for other domains, there might be a configuration issue."
|
||||||
)
|
)
|
||||||
|
|
||||||
TakaheConfig.objects.update_or_create(
|
TakaheConfig.objects.update_or_create(
|
||||||
|
@ -109,7 +109,7 @@ class Setup:
|
||||||
logger.info("Default relay is disabled, unsubscribing...")
|
logger.info("Default relay is disabled, unsubscribing...")
|
||||||
Takahe.update_state(relay, "unsubscribing")
|
Takahe.update_state(relay, "unsubscribing")
|
||||||
else:
|
else:
|
||||||
logger.info(f"Default relay is disabled.")
|
logger.info("Default relay is disabled.")
|
||||||
else:
|
else:
|
||||||
if relay:
|
if relay:
|
||||||
logger.debug(f"Default relay is enabled, state: {relay.state}")
|
logger.debug(f"Default relay is enabled, state: {relay.state}")
|
||||||
|
|
|
@ -20,14 +20,14 @@ def _cc(text):
|
||||||
@stringfilter
|
@stringfilter
|
||||||
def highlight(text, search):
|
def highlight(text, search):
|
||||||
otext = _cc(text.lower())
|
otext = _cc(text.lower())
|
||||||
l = len(text)
|
sl = len(text)
|
||||||
if l != len(otext):
|
if sl != len(otext):
|
||||||
return text # in rare cases, the lowered&converted text has a different length
|
return text # in rare cases, the lowered&converted text has a different length
|
||||||
rtext = ""
|
rtext = ""
|
||||||
words = list(set([w for w in _cc(search.strip().lower()).split(" ") if w]))
|
words = list(set([w for w in _cc(search.strip().lower()).split(" ") if w]))
|
||||||
words.sort(key=len, reverse=True)
|
words.sort(key=len, reverse=True)
|
||||||
i = 0
|
i = 0
|
||||||
while i < l:
|
while i < sl:
|
||||||
m = None
|
m = None
|
||||||
for w in words:
|
for w in words:
|
||||||
if otext[i : i + len(w)] == w:
|
if otext[i : i + len(w)] == w:
|
||||||
|
|
|
@ -15,5 +15,5 @@ def thumb(source, alias):
|
||||||
return source.url
|
return source.url
|
||||||
else:
|
else:
|
||||||
return thumbnail_url(source, alias)
|
return thumbnail_url(source, alias)
|
||||||
except Exception as e:
|
except Exception:
|
||||||
return ""
|
return ""
|
||||||
|
|
|
@ -522,7 +522,7 @@ def update_tag(request, tag_uuid: str, t_in: TagInSchema):
|
||||||
tag.title = title
|
tag.title = title
|
||||||
tag.visibility = visibility
|
tag.visibility = visibility
|
||||||
tag.save()
|
tag.save()
|
||||||
except:
|
except Exception:
|
||||||
return 409, {"message": "Tag with same title exists"}
|
return 409, {"message": "Tag with same title exists"}
|
||||||
return tag
|
return tag
|
||||||
|
|
||||||
|
|
|
@ -222,12 +222,12 @@ class DoubanImporter:
|
||||||
rating = cells[5]
|
rating = cells[5]
|
||||||
try:
|
try:
|
||||||
rating_grade = int(rating) * 2 if rating else None
|
rating_grade = int(rating) * 2 if rating else None
|
||||||
except:
|
except Exception:
|
||||||
rating_grade = None
|
rating_grade = None
|
||||||
tags = cells[6] if len(cells) >= 7 else ""
|
tags = cells[6] if len(cells) >= 7 else ""
|
||||||
try:
|
try:
|
||||||
tags = tags.split(",") if tags else []
|
tags = tags.split(",") if tags else []
|
||||||
except:
|
except Exception:
|
||||||
tags = []
|
tags = []
|
||||||
comment = cells[7] if len(cells) >= 8 else None
|
comment = cells[7] if len(cells) >= 8 else None
|
||||||
self.processed += 1
|
self.processed += 1
|
||||||
|
@ -235,7 +235,7 @@ class DoubanImporter:
|
||||||
if type(time) == str:
|
if type(time) == str:
|
||||||
time = datetime.strptime(time, "%Y-%m-%d %H:%M:%S")
|
time = datetime.strptime(time, "%Y-%m-%d %H:%M:%S")
|
||||||
time = time.replace(tzinfo=_tz_sh)
|
time = time.replace(tzinfo=_tz_sh)
|
||||||
except:
|
except Exception:
|
||||||
time = None
|
time = None
|
||||||
r = self.import_mark(url, shelf_type, comment, rating_grade, tags, time)
|
r = self.import_mark(url, shelf_type, comment, rating_grade, tags, time)
|
||||||
if r == 1:
|
if r == 1:
|
||||||
|
@ -385,7 +385,7 @@ class DoubanImporter:
|
||||||
Review.objects.update_or_create(
|
Review.objects.update_or_create(
|
||||||
owner=self.user.identity, item=item, defaults=params
|
owner=self.user.identity, item=item, defaults=params
|
||||||
)
|
)
|
||||||
except:
|
except Exception:
|
||||||
logger.warning(f"{prefix} update multiple review {review_url}")
|
logger.warning(f"{prefix} update multiple review {review_url}")
|
||||||
r = (
|
r = (
|
||||||
Review.objects.filter(owner=self.user.identity, item=item)
|
Review.objects.filter(owner=self.user.identity, item=item)
|
||||||
|
|
|
@ -57,7 +57,7 @@ class LetterboxdImporter(Task):
|
||||||
try:
|
try:
|
||||||
site.get_resource_ready()
|
site.get_resource_ready()
|
||||||
return site.get_item()
|
return site.get_item()
|
||||||
except:
|
except Exception:
|
||||||
imdb_url = str(iu[0]) # type:ignore
|
imdb_url = str(iu[0]) # type:ignore
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"Fetching {url}: TMDB {site.url} failed, try IMDB {imdb_url}"
|
f"Fetching {url}: TMDB {site.url} failed, try IMDB {imdb_url}"
|
||||||
|
|
|
@ -39,7 +39,7 @@ class OPMLImporter:
|
||||||
logger.info(f"{self.user} import {feed.url}")
|
logger.info(f"{self.user} import {feed.url}")
|
||||||
try:
|
try:
|
||||||
res = RSS(feed.url).get_resource_ready()
|
res = RSS(feed.url).get_resource_ready()
|
||||||
except:
|
except Exception:
|
||||||
res = None
|
res = None
|
||||||
if not res or not res.item:
|
if not res or not res.item:
|
||||||
logger.warning(f"{self.user} feed error {feed.url}")
|
logger.warning(f"{self.user} feed error {feed.url}")
|
||||||
|
|
|
@ -174,17 +174,17 @@ class Piece(PolymorphicModel, UserOwnedObjectMixin):
|
||||||
b62 = r[0]
|
b62 = r[0]
|
||||||
try:
|
try:
|
||||||
obj = cls.objects.get(uid=uuid.UUID(int=base62.decode(b62)))
|
obj = cls.objects.get(uid=uuid.UUID(int=base62.decode(b62)))
|
||||||
except:
|
except Exception:
|
||||||
obj = None
|
obj = None
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def update_by_ap_object(cls, owner, item, obj, post_id, visibility):
|
def update_by_ap_object(cls, owner, item, obj, post_id, visibility):
|
||||||
raise NotImplementedError()
|
raise NotImplementedError("subclass must implement this")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ap_object(self):
|
def ap_object(self):
|
||||||
raise NotImplementedError()
|
raise NotImplementedError("subclass must implement this")
|
||||||
|
|
||||||
def link_post_id(self, post_id: int):
|
def link_post_id(self, post_id: int):
|
||||||
PiecePost.objects.get_or_create(piece=self, post_id=post_id)
|
PiecePost.objects.get_or_create(piece=self, post_id=post_id)
|
||||||
|
|
|
@ -94,7 +94,6 @@ class List(Piece):
|
||||||
member.delete()
|
member.delete()
|
||||||
|
|
||||||
def update_member_order(self, ordered_member_ids):
|
def update_member_order(self, ordered_member_ids):
|
||||||
members = self.ordered_members
|
|
||||||
for m in self.members.all():
|
for m in self.members.all():
|
||||||
try:
|
try:
|
||||||
i = ordered_member_ids.index(m.id)
|
i = ordered_member_ids.index(m.id)
|
||||||
|
|
|
@ -224,7 +224,7 @@ class Mark:
|
||||||
log_entry.timestamp = created_time
|
log_entry.timestamp = created_time
|
||||||
try:
|
try:
|
||||||
log_entry.save(update_fields=["timestamp"])
|
log_entry.save(update_fields=["timestamp"])
|
||||||
except:
|
except Exception:
|
||||||
log_entry.delete()
|
log_entry.delete()
|
||||||
shelfmember_changed = True
|
shelfmember_changed = True
|
||||||
if shelfmember_changed:
|
if shelfmember_changed:
|
||||||
|
|
|
@ -21,7 +21,7 @@ class UserOwnedObjectMixin:
|
||||||
owner: ForeignKey[APIdentity, Piece]
|
owner: ForeignKey[APIdentity, Piece]
|
||||||
visibility: int
|
visibility: int
|
||||||
|
|
||||||
def is_visible_to(self: "Piece | Self", viewing_user: User) -> bool: # type: ignore
|
def is_visible_to(self: "Piece", viewing_user: User) -> bool: # noqa # type: ignore
|
||||||
owner = self.owner
|
owner = self.owner
|
||||||
if not owner or not owner.is_active:
|
if not owner or not owner.is_active:
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -67,7 +67,7 @@ class Rating(Content):
|
||||||
"created_time": datetime.fromisoformat(obj["published"]),
|
"created_time": datetime.fromisoformat(obj["published"]),
|
||||||
"edited_time": datetime.fromisoformat(obj["updated"]),
|
"edited_time": datetime.fromisoformat(obj["updated"]),
|
||||||
}
|
}
|
||||||
p, _ = cls.objects.update_or_create(owner=owner, item=item, defaults=d)
|
p = cls.objects.update_or_create(owner=owner, item=item, defaults=d)[0]
|
||||||
p.link_post_id(post_id)
|
p.link_post_id(post_id)
|
||||||
return p
|
return p
|
||||||
|
|
||||||
|
|
|
@ -35,12 +35,12 @@ def render_md(s: str) -> str:
|
||||||
|
|
||||||
|
|
||||||
def _spolier(s: str) -> str:
|
def _spolier(s: str) -> str:
|
||||||
l = s.split(">!", 1)
|
sl = s.split(">!", 1)
|
||||||
if len(l) == 1:
|
if len(sl) == 1:
|
||||||
return escape(s)
|
return escape(s)
|
||||||
r = l[1].split("!<", 1)
|
r = sl[1].split("!<", 1)
|
||||||
return (
|
return (
|
||||||
escape(l[0])
|
escape(sl[0])
|
||||||
+ '<span class="spoiler" _="on click toggle .revealed on me">'
|
+ '<span class="spoiler" _="on click toggle .revealed on me">'
|
||||||
+ escape(r[0])
|
+ escape(r[0])
|
||||||
+ "</span>"
|
+ "</span>"
|
||||||
|
|
|
@ -12,7 +12,7 @@ register = template.Library()
|
||||||
@register.simple_tag(takes_context=True)
|
@register.simple_tag(takes_context=True)
|
||||||
def user_visibility_of(context, piece: UserOwnedObjectMixin):
|
def user_visibility_of(context, piece: UserOwnedObjectMixin):
|
||||||
user = context["request"].user
|
user = context["request"].user
|
||||||
return piece.is_visible_to(user)
|
return piece.is_visible_to(user) # type: ignore
|
||||||
|
|
||||||
|
|
||||||
@register.simple_tag()
|
@register.simple_tag()
|
||||||
|
|
|
@ -101,7 +101,7 @@ def mark(request: AuthedHttpRequest, item_uuid):
|
||||||
share_to_mastodon=share_to_mastodon,
|
share_to_mastodon=share_to_mastodon,
|
||||||
created_time=mark_date,
|
created_time=mark_date,
|
||||||
)
|
)
|
||||||
except PermissionDenied as e:
|
except PermissionDenied:
|
||||||
_logger.warn(f"post to mastodon error 401 {request.user}")
|
_logger.warn(f"post to mastodon error 401 {request.user}")
|
||||||
return render_relogin(request)
|
return render_relogin(request)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
|
@ -143,13 +143,13 @@ def mark_log(request: AuthedHttpRequest, item_uuid, log_id):
|
||||||
@login_required
|
@login_required
|
||||||
def comment(request: AuthedHttpRequest, item_uuid):
|
def comment(request: AuthedHttpRequest, item_uuid):
|
||||||
item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid))
|
item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid))
|
||||||
if not item.class_name in ["podcastepisode", "tvepisode"]:
|
if item.class_name not in ["podcastepisode", "tvepisode"]:
|
||||||
raise BadRequest("Commenting this type of items is not supported yet.")
|
raise BadRequest("Commenting this type of items is not supported yet.")
|
||||||
comment = Comment.objects.filter(owner=request.user.identity, item=item).first()
|
comment = Comment.objects.filter(owner=request.user.identity, item=item).first()
|
||||||
if request.method == "GET":
|
if request.method == "GET":
|
||||||
return render(
|
return render(
|
||||||
request,
|
request,
|
||||||
f"comment.html",
|
"comment.html",
|
||||||
{
|
{
|
||||||
"item": item,
|
"item": item,
|
||||||
"comment": comment,
|
"comment": comment,
|
||||||
|
@ -169,7 +169,7 @@ def comment(request: AuthedHttpRequest, item_uuid):
|
||||||
try:
|
try:
|
||||||
pos = datetime.strptime(position, "%H:%M:%S")
|
pos = datetime.strptime(position, "%H:%M:%S")
|
||||||
position = pos.hour * 3600 + pos.minute * 60 + pos.second
|
position = pos.hour * 3600 + pos.minute * 60 + pos.second
|
||||||
except:
|
except Exception:
|
||||||
if settings.DEBUG:
|
if settings.DEBUG:
|
||||||
raise
|
raise
|
||||||
position = None
|
position = None
|
||||||
|
|
|
@ -29,7 +29,7 @@ class MastodonSiteCheck(BaseJob):
|
||||||
api_domain = site.api_domain or site.domain_name
|
api_domain = site.api_domain or site.domain_name
|
||||||
domain, api_domain, v = detect_server_info(api_domain)
|
domain, api_domain, v = detect_server_info(api_domain)
|
||||||
site.last_reachable_date = timezone.now()
|
site.last_reachable_date = timezone.now()
|
||||||
except:
|
except Exception:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"Failed to detect server info for {site.domain_name}/{site.api_domain}"
|
f"Failed to detect server info for {site.domain_name}/{site.api_domain}"
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
[tool.pyright]
|
[tool.pyright]
|
||||||
exclude = [ "media", ".venv", ".git", "playground", "catalog/*/tests.py", "neodb", "**/migrations", "**/sites/douban_*", "neodb-takahe" ]
|
exclude = [ "media", ".venv", ".git", "playground", "catalog/*/tests.py", "neodb", "**/migrations", "**/sites/douban_*", "neodb-takahe" ]
|
||||||
reportIncompatibleVariableOverride = false
|
reportIncompatibleVariableOverride = false
|
||||||
|
reportUnusedImport = false
|
||||||
|
|
||||||
[tool.djlint]
|
[tool.djlint]
|
||||||
ignore="T002,T003,H006,H019,H020,H021,H023,H030,H031"
|
ignore="T002,T003,H006,H019,H020,H021,H023,H030,H031"
|
||||||
|
@ -20,5 +21,5 @@ plugins = ["mypy_django_plugin.main"]
|
||||||
django_settings_module = "boofilsic.settings"
|
django_settings_module = "boofilsic.settings"
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
ignore = ['E501']
|
exclude = ["neodb-takahe/*", "media", ".venv", ".git", "playground", "**/tests.py", "neodb", "**/migrations", "**/commands", "**/importers", "**/sites", "legacy" ]
|
||||||
exclude = [ "media", ".venv", ".git", "playground", "**/tests.py", "neodb", "**/migrations", "**/commands", "**/importers", "**/sites", "legacy" ]
|
lint.ignore = ["F401", "F403", "F405"]
|
||||||
|
|
|
@ -3,7 +3,8 @@ coverage
|
||||||
django-debug-toolbar
|
django-debug-toolbar
|
||||||
django-stubs
|
django-stubs
|
||||||
djlint~=1.34.0
|
djlint~=1.34.0
|
||||||
isort~=5.12.0
|
isort~=5.13.2
|
||||||
lxml-stubs
|
lxml-stubs
|
||||||
pre-commit
|
pre-commit
|
||||||
pyright==1.1.350
|
pyright==1.1.350
|
||||||
|
ruff
|
||||||
|
|
|
@ -1705,7 +1705,7 @@ class Block(models.Model):
|
||||||
raise ValueError("You cannot block from a remote Identity")
|
raise ValueError("You cannot block from a remote Identity")
|
||||||
block = cls.maybe_get(source=source, target=target, mute=False)
|
block = cls.maybe_get(source=source, target=target, mute=False)
|
||||||
if block is not None:
|
if block is not None:
|
||||||
if not block.state in ["new", "sent", "awaiting_expiry"]:
|
if block.state not in ["new", "sent", "awaiting_expiry"]:
|
||||||
block.state = BlockStates.new # type:ignore
|
block.state = BlockStates.new # type:ignore
|
||||||
block.save()
|
block.save()
|
||||||
else:
|
else:
|
||||||
|
@ -1735,7 +1735,7 @@ class Block(models.Model):
|
||||||
raise ValueError("You cannot mute from a remote Identity")
|
raise ValueError("You cannot mute from a remote Identity")
|
||||||
block = cls.maybe_get(source=source, target=target, mute=True)
|
block = cls.maybe_get(source=source, target=target, mute=True)
|
||||||
if block is not None:
|
if block is not None:
|
||||||
if not block in ["new", "sent", "awaiting_expiry"]:
|
if block not in ["new", "sent", "awaiting_expiry"]:
|
||||||
block.state = BlockStates.new # type:ignore
|
block.state = BlockStates.new # type:ignore
|
||||||
if duration:
|
if duration:
|
||||||
block.expires = timezone.now() + datetime.timedelta(seconds=duration)
|
block.expires = timezone.now() + datetime.timedelta(seconds=duration)
|
||||||
|
|
|
@ -79,7 +79,7 @@ def connect(request):
|
||||||
login_email = request.POST.get("email", "")
|
login_email = request.POST.get("email", "")
|
||||||
try:
|
try:
|
||||||
EmailValidator()(login_email)
|
EmailValidator()(login_email)
|
||||||
except:
|
except Exception:
|
||||||
return render(
|
return render(
|
||||||
request,
|
request,
|
||||||
"common/error.html",
|
"common/error.html",
|
||||||
|
@ -299,11 +299,11 @@ def send_verification_link(user_id, action, email):
|
||||||
subject = f'{settings.SITE_INFO["site_name"]} - {_("注册新账号")}'
|
subject = f'{settings.SITE_INFO["site_name"]} - {_("注册新账号")}'
|
||||||
url = settings.SITE_INFO["site_url"] + "/account/register_email?c=" + v
|
url = settings.SITE_INFO["site_url"] + "/account/register_email?c=" + v
|
||||||
msg = f"你好,\n本站没有与{email}关联的账号。你希望注册一个新账号吗?\n"
|
msg = f"你好,\n本站没有与{email}关联的账号。你希望注册一个新账号吗?\n"
|
||||||
msg += f"\n如果你已注册过本站或某个联邦宇宙(长毛象)实例,不必重新注册,只要用联邦宇宙身份登录本站,再关联这个电子邮件地址,即可通过邮件登录。\n"
|
msg += "\n如果你已注册过本站或某个联邦宇宙(长毛象)实例,不必重新注册,只要用联邦宇宙身份登录本站,再关联这个电子邮件地址,即可通过邮件登录。\n"
|
||||||
msg += f"\n如果你还没有联邦宇宙身份,可以访问这里选择实例并创建一个: https://joinmastodon.org/zh/servers\n"
|
msg += "\n如果你还没有联邦宇宙身份,可以访问这里选择实例并创建一个: https://joinmastodon.org/zh/servers\n"
|
||||||
if settings.ALLOW_EMAIL_ONLY_ACCOUNT:
|
if settings.ALLOW_EMAIL_ONLY_ACCOUNT:
|
||||||
msg += f"\n如果你不便使用联邦宇宙身份,也可以点击以下链接使用电子邮件注册一个新账号,以后再关联到联邦宇宙。\n{url}\n"
|
msg += f"\n如果你不便使用联邦宇宙身份,也可以点击以下链接使用电子邮件注册一个新账号,以后再关联到联邦宇宙。\n{url}\n"
|
||||||
msg += f"\n如果你没有打算用此电子邮件地址注册或登录本站,请忽略此邮件。"
|
msg += "\n如果你没有打算用此电子邮件地址注册或登录本站,请忽略此邮件。"
|
||||||
else:
|
else:
|
||||||
raise ValueError("Invalid action")
|
raise ValueError("Invalid action")
|
||||||
try:
|
try:
|
||||||
|
@ -451,7 +451,7 @@ def swap_login(request, token, site, refresh_token):
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
existing_user = User.objects.get(
|
User.objects.get(
|
||||||
mastodon_username__iexact=username, mastodon_site__iexact=site
|
mastodon_username__iexact=username, mastodon_site__iexact=site
|
||||||
)
|
)
|
||||||
messages.add_message(
|
messages.add_message(
|
||||||
|
|
|
@ -40,4 +40,4 @@ class MastodonUserSync(BaseJob):
|
||||||
logger.warning(f"Skip {user} detail because of inactivity.")
|
logger.warning(f"Skip {user} detail because of inactivity.")
|
||||||
skip_detail = True
|
skip_detail = True
|
||||||
user.refresh_mastodon_data(skip_detail)
|
user.refresh_mastodon_data(skip_detail)
|
||||||
logger.info(f"Mastodon User Sync finished.")
|
logger.info("Mastodon User Sync finished.")
|
||||||
|
|
|
@ -237,14 +237,14 @@ class APIdentity(models.Model):
|
||||||
'@id@site' - remote activitypub identity 'id@site'
|
'@id@site' - remote activitypub identity 'id@site'
|
||||||
"""
|
"""
|
||||||
s = handler.split("@")
|
s = handler.split("@")
|
||||||
l = len(s)
|
sl = len(s)
|
||||||
if l == 1 or (l == 2 and s[0] == ""):
|
if sl == 1 or (sl == 2 and s[0] == ""):
|
||||||
return cls.objects.get(
|
return cls.objects.get(
|
||||||
username__iexact=s[0] if l == 1 else s[1],
|
username__iexact=s[0] if sl == 1 else s[1],
|
||||||
local=True,
|
local=True,
|
||||||
deleted__isnull=True,
|
deleted__isnull=True,
|
||||||
)
|
)
|
||||||
elif l == 2:
|
elif sl == 2:
|
||||||
if match_linked:
|
if match_linked:
|
||||||
return cls.objects.get(
|
return cls.objects.get(
|
||||||
user__mastodon_username__iexact=s[0],
|
user__mastodon_username__iexact=s[0],
|
||||||
|
@ -256,7 +256,7 @@ class APIdentity(models.Model):
|
||||||
if i:
|
if i:
|
||||||
return i
|
return i
|
||||||
raise cls.DoesNotExist(f"Identity not found @{handler}")
|
raise cls.DoesNotExist(f"Identity not found @{handler}")
|
||||||
elif l == 3 and s[0] == "":
|
elif sl == 3 and s[0] == "":
|
||||||
i = cls.get_remote(s[1], s[2])
|
i = cls.get_remote(s[1], s[2])
|
||||||
if i:
|
if i:
|
||||||
return i
|
return i
|
||||||
|
|
|
@ -78,4 +78,4 @@ class Task(models.Model):
|
||||||
msg.error(task.user, f"[{task.type}] {task.message}")
|
msg.error(task.user, f"[{task.type}] {task.message}")
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
raise NotImplemented
|
raise NotImplementedError("subclass must implement this")
|
||||||
|
|
|
@ -409,7 +409,7 @@ class User(AbstractUser):
|
||||||
if name.startswith("~"):
|
if name.startswith("~"):
|
||||||
try:
|
try:
|
||||||
query_kwargs = {"pk": int(name[1:])}
|
query_kwargs = {"pk": int(name[1:])}
|
||||||
except:
|
except Exception:
|
||||||
return None
|
return None
|
||||||
elif name.startswith("@"):
|
elif name.startswith("@"):
|
||||||
query_kwargs = {
|
query_kwargs = {
|
||||||
|
|
|
@ -79,7 +79,7 @@ def fetch_refresh(request):
|
||||||
try:
|
try:
|
||||||
i = APIdentity.get_by_handle(handle)
|
i = APIdentity.get_by_handle(handle)
|
||||||
return HTTPResponseHXRedirect(i.url)
|
return HTTPResponseHXRedirect(i.url)
|
||||||
except:
|
except Exception:
|
||||||
retry = int(request.GET.get("retry", 0)) + 1
|
retry = int(request.GET.get("retry", 0)) + 1
|
||||||
if retry > 10:
|
if retry > 10:
|
||||||
return render(request, "users/fetch_identity_failed.html")
|
return render(request, "users/fetch_identity_failed.html")
|
||||||
|
|
Loading…
Add table
Reference in a new issue