add lint by ruff

This commit is contained in:
Your Name 2024-04-06 00:13:50 -04:00 committed by Henri Dickson
parent 2320cbbc91
commit f2abc7a7c0
43 changed files with 90 additions and 85 deletions

View file

@ -20,8 +20,14 @@ repos:
- id: requirements-txt-fixer
- id: mixed-line-ending
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.5
hooks:
- id: ruff
# - id: ruff-format
- repo: https://github.com/pycqa/isort
rev: 5.12.0
rev: 5.13.2
hooks:
- id: isort
args: ["--profile=black"]

View file

@ -10,7 +10,7 @@ BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
try:
with open(os.path.join(BASE_DIR, "build_version")) as f:
NEODB_VERSION = __version__ + "-" + f.read().strip()
except:
except Exception:
NEODB_VERSION = __version__ + "-unknown"

View file

@ -1,5 +1,6 @@
from typing import Any, Callable, List, Optional, Tuple, Type
from django.http import HttpResponse
from django.utils.translation import gettext_lazy as _
from ninja import Schema
from common.api import *

View file

@ -4,7 +4,7 @@ from .models import *
from .scrapers import *
from .sites import *
__all__ = (
__all__ = ( # noqa
"IdType",
"SiteName",
"ItemType",

View file

@ -172,7 +172,7 @@ class BasicDownloader:
encoding="utf-8",
) as fp:
fp.write(resp.text)
except:
except Exception:
_logger.warn("Save downloaded data failed.")
else:
resp = MockResponse(self.url)

View file

@ -385,7 +385,7 @@ class Item(SoftDeleteMixin, PolymorphicModel):
if to_item.merged_to_item is not None:
raise ValueError("cannot merge to item which is merged to another item")
if to_item.__class__ != self.__class__:
raise ValueError(f"cannot merge to item in a different model")
raise ValueError("cannot merge to item in a different model")
self.log_action({"!merged": [str(self.merged_to_item), str(to_item)]})
self.merged_to_item = to_item
self.save()
@ -448,7 +448,7 @@ class Item(SoftDeleteMixin, PolymorphicModel):
b62 = r[0]
try:
item = cls.objects.get(uid=uuid.UUID(int=base62.decode(b62)))
except:
except Exception:
item = None
return item
@ -618,7 +618,7 @@ class ExternalResource(models.Model):
try:
site = self.get_site()
return site.SITE_NAME if site else SiteName.Unknown
except:
except Exception:
_logger.warning(f"Unknown site for {self}")
return SiteName.Unknown

View file

@ -10,7 +10,7 @@ import json
import logging
import re
from dataclasses import dataclass, field
from typing import Callable, Type
from typing import Any, Callable, Type, TypeVar
import django_rq
import requests
@ -268,8 +268,6 @@ class AbstractSite:
return p
from typing import Any, Callable, Type, TypeVar
T = TypeVar("T")
@ -313,7 +311,7 @@ class SiteManager:
)
if cls:
url = url2
except:
except Exception:
pass
if cls is None:
cls = next(
@ -327,7 +325,7 @@ class SiteManager:
@staticmethod
def get_site_by_id(id_type: IdType, id_value: str) -> AbstractSite | None:
if not id_type in SiteManager.registry:
if id_type not in SiteManager.registry:
return None
cls = SiteManager.registry[id_type]
return cls(id_value=id_value)

View file

@ -316,7 +316,7 @@ class Fediverse:
url = f"https://{host}{item['url']}" # FIXME update API and use abs urls
try:
cat = ItemCategory(item["category"])
except:
except Exception:
cat = ""
results.append(
SearchResultItem(

View file

@ -30,7 +30,7 @@ class DbIndexer:
@classmethod
def search(cls, q, page=1, categories=None, tag=None, sort=None):
result = lambda: None
result = lambda: None # noqa
result.items = Item.objects.filter(title__contains=q)[:10]
result.num_pages = 1
result.count = len(result.items)
@ -83,10 +83,10 @@ def query_index(keywords, categories=None, tag=None, page=1, prepare_external=Tr
if hasattr(i, "works"):
my_key += [i[0] for i in i.works.all().values_list("id")]
if len(my_key):
l = len(keys) + len(my_key)
sl = len(keys) + len(my_key)
keys.update(my_key)
# check and skip dup with same imdb or isbn or works id
if len(keys) < l:
if len(keys) < sl:
duplicated_items.append(i)
else:
items.append(i)
@ -135,7 +135,7 @@ def enqueue_fetch(url, is_refetch, user=None):
try:
job = Job.fetch(id=job_id, connection=django_rq.get_connection("fetch"))
in_progress = job.get_status() in ["queued", "started"]
except:
except Exception:
in_progress = False
if not in_progress:
django_rq.get_queue("fetch").enqueue(

View file

@ -80,7 +80,7 @@ def enqueue_update_index(item_ids):
)
if job.get_status() in ["queued", "scheduled"]:
job.cancel()
except:
except Exception:
pass
# using rq's built-in scheduler here, it can be switched to other similar implementations
django_rq.get_queue(_PENDING_INDEX_QUEUE).enqueue_in(
@ -184,7 +184,6 @@ class Indexer:
@classmethod
def check(cls):
client = typesense.Client(settings.TYPESENSE_CONNECTION)
wait = 5
if not client.operations.is_healthy():
raise ValueError("Typesense: server not healthy")
idx = client.collections[settings.TYPESENSE_INDEX_NAME]
@ -209,7 +208,7 @@ class Indexer:
f"Typesense: index {settings.TYPESENSE_INDEX_NAME} has {i['num_documents']} documents"
)
return
except:
except Exception:
client.collections.create(cls.config())
logger.info(
f"Typesense: index {settings.TYPESENSE_INDEX_NAME} created"

View file

@ -29,7 +29,7 @@ def fetch_refresh(request, job_id):
try:
job = Job.fetch(id=job_id, connection=django_rq.get_connection("fetch"))
item_url = job.return_value()
except:
except Exception:
item_url = "-"
if item_url:
if item_url == "-":
@ -109,7 +109,7 @@ def search(request):
try:
categories = [ItemCategory(category)]
hide_category = True
except:
except Exception:
categories = visible_categories(request)
tag = request.GET.get("tag", default="").strip()
p = request.GET.get("page", default="1")
@ -134,7 +134,7 @@ def search(request):
if request.GET.get("r"):
return redirect(keywords)
items, num_pages, _, dup_items = query_index(keywords, categories, tag, p)
items, num_pages, __, dup_items = query_index(keywords, categories, tag, p)
return render(
request,
"search_results.html",

View file

@ -62,7 +62,7 @@ class AppleMusic(AbstractSite):
content = BasicDownloader(url, headers=self.headers).download().html()
_logger.info(f"got localized content from {url}")
break
except:
except Exception:
pass
if content is None:
raise ParseError(self, f"localized content for {self.url}")

View file

@ -71,7 +71,7 @@ class Bandcamp(AbstractSite):
release_date = (
release_datetime.strftime("%Y-%m-%d") if release_datetime else None
)
except:
except Exception:
release_date = None
duration = None
company = None

View file

@ -144,7 +144,7 @@ class Goodreads_Work(AbstractSite):
author = self.query_str(content, "//h2/a/text()")
try:
first_published = self.query_str(content, "//h2/span/text()")
except:
except Exception:
first_published = None
pd = ResourceContent(
metadata={

View file

@ -44,7 +44,7 @@ class RSS(AbstractSite):
req.add_header("User-Agent", settings.NEODB_USER_AGENT)
try:
feed = podcastparser.parse(url, urllib.request.urlopen(req, timeout=3))
except:
except Exception:
url = url.replace("https://", "http://")
req = urllib.request.Request(url)
req.add_header("User-Agent", settings.NEODB_USER_AGENT)
@ -52,7 +52,7 @@ class RSS(AbstractSite):
feed = podcastparser.parse(
url, urllib.request.urlopen(req, timeout=3)
)
except:
except Exception:
return None
if settings.DOWNLOADER_SAVEDIR:
pickle.dump(

View file

@ -50,7 +50,7 @@ class Steam(AbstractSite):
brief = self.query_str(
content, "//div[@class='game_description_snippet']/text()"
)
except:
except Exception:
brief = ""
# try Steam images if no image from IGDB
if pd.cover_image is None:

View file

@ -214,13 +214,13 @@ def assign_parent(request, item_path, item_uuid):
@login_required
def remove_unused_seasons(request, item_path, item_uuid):
item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid))
l = list(item.seasons.all())
for s in l:
sl = list(item.seasons.all())
for s in sl:
if not s.journal_exists():
s.delete()
l = [s.id for s in l]
l2 = [s.id for s in item.seasons.all()]
item.log_action({"!remove_unused_seasons": [l, l2]})
ol = [s.id for s in sl]
nl = [s.id for s in item.seasons.all()]
item.log_action({"!remove_unused_seasons": [ol, nl]})
return redirect(item.url)

View file

@ -21,7 +21,7 @@ class BaseJob:
job.cancel()
registry = ScheduledJobRegistry(queue=django_rq.get_queue("cron"))
registry.remove(job)
except:
except Exception:
pass
@classmethod

View file

@ -56,7 +56,7 @@ class Setup:
.exists()
):
logger.warning(
f"Local identities are found for other domains, there might be a configuration issue."
"Local identities are found for other domains, there might be a configuration issue."
)
TakaheConfig.objects.update_or_create(
@ -109,7 +109,7 @@ class Setup:
logger.info("Default relay is disabled, unsubscribing...")
Takahe.update_state(relay, "unsubscribing")
else:
logger.info(f"Default relay is disabled.")
logger.info("Default relay is disabled.")
else:
if relay:
logger.debug(f"Default relay is enabled, state: {relay.state}")

View file

@ -20,14 +20,14 @@ def _cc(text):
@stringfilter
def highlight(text, search):
otext = _cc(text.lower())
l = len(text)
if l != len(otext):
sl = len(text)
if sl != len(otext):
return text # in rare cases, the lowered&converted text has a different length
rtext = ""
words = list(set([w for w in _cc(search.strip().lower()).split(" ") if w]))
words.sort(key=len, reverse=True)
i = 0
while i < l:
while i < sl:
m = None
for w in words:
if otext[i : i + len(w)] == w:

View file

@ -15,5 +15,5 @@ def thumb(source, alias):
return source.url
else:
return thumbnail_url(source, alias)
except Exception as e:
except Exception:
return ""

View file

@ -522,7 +522,7 @@ def update_tag(request, tag_uuid: str, t_in: TagInSchema):
tag.title = title
tag.visibility = visibility
tag.save()
except:
except Exception:
return 409, {"message": "Tag with same title exists"}
return tag

View file

@ -222,12 +222,12 @@ class DoubanImporter:
rating = cells[5]
try:
rating_grade = int(rating) * 2 if rating else None
except:
except Exception:
rating_grade = None
tags = cells[6] if len(cells) >= 7 else ""
try:
tags = tags.split(",") if tags else []
except:
except Exception:
tags = []
comment = cells[7] if len(cells) >= 8 else None
self.processed += 1
@ -235,7 +235,7 @@ class DoubanImporter:
if type(time) == str:
time = datetime.strptime(time, "%Y-%m-%d %H:%M:%S")
time = time.replace(tzinfo=_tz_sh)
except:
except Exception:
time = None
r = self.import_mark(url, shelf_type, comment, rating_grade, tags, time)
if r == 1:
@ -385,7 +385,7 @@ class DoubanImporter:
Review.objects.update_or_create(
owner=self.user.identity, item=item, defaults=params
)
except:
except Exception:
logger.warning(f"{prefix} update multiple review {review_url}")
r = (
Review.objects.filter(owner=self.user.identity, item=item)

View file

@ -57,7 +57,7 @@ class LetterboxdImporter(Task):
try:
site.get_resource_ready()
return site.get_item()
except:
except Exception:
imdb_url = str(iu[0]) # type:ignore
logger.warning(
f"Fetching {url}: TMDB {site.url} failed, try IMDB {imdb_url}"

View file

@ -39,7 +39,7 @@ class OPMLImporter:
logger.info(f"{self.user} import {feed.url}")
try:
res = RSS(feed.url).get_resource_ready()
except:
except Exception:
res = None
if not res or not res.item:
logger.warning(f"{self.user} feed error {feed.url}")

View file

@ -174,17 +174,17 @@ class Piece(PolymorphicModel, UserOwnedObjectMixin):
b62 = r[0]
try:
obj = cls.objects.get(uid=uuid.UUID(int=base62.decode(b62)))
except:
except Exception:
obj = None
return obj
@classmethod
def update_by_ap_object(cls, owner, item, obj, post_id, visibility):
raise NotImplementedError()
raise NotImplementedError("subclass must implement this")
@property
def ap_object(self):
raise NotImplementedError()
raise NotImplementedError("subclass must implement this")
def link_post_id(self, post_id: int):
PiecePost.objects.get_or_create(piece=self, post_id=post_id)

View file

@ -94,7 +94,6 @@ class List(Piece):
member.delete()
def update_member_order(self, ordered_member_ids):
members = self.ordered_members
for m in self.members.all():
try:
i = ordered_member_ids.index(m.id)

View file

@ -224,7 +224,7 @@ class Mark:
log_entry.timestamp = created_time
try:
log_entry.save(update_fields=["timestamp"])
except:
except Exception:
log_entry.delete()
shelfmember_changed = True
if shelfmember_changed:

View file

@ -21,7 +21,7 @@ class UserOwnedObjectMixin:
owner: ForeignKey[APIdentity, Piece]
visibility: int
def is_visible_to(self: "Piece | Self", viewing_user: User) -> bool: # type: ignore
def is_visible_to(self: "Piece", viewing_user: User) -> bool: # noqa # type: ignore
owner = self.owner
if not owner or not owner.is_active:
return False

View file

@ -67,7 +67,7 @@ class Rating(Content):
"created_time": datetime.fromisoformat(obj["published"]),
"edited_time": datetime.fromisoformat(obj["updated"]),
}
p, _ = cls.objects.update_or_create(owner=owner, item=item, defaults=d)
p = cls.objects.update_or_create(owner=owner, item=item, defaults=d)[0]
p.link_post_id(post_id)
return p

View file

@ -35,12 +35,12 @@ def render_md(s: str) -> str:
def _spolier(s: str) -> str:
l = s.split(">!", 1)
if len(l) == 1:
sl = s.split(">!", 1)
if len(sl) == 1:
return escape(s)
r = l[1].split("!<", 1)
r = sl[1].split("!<", 1)
return (
escape(l[0])
escape(sl[0])
+ '<span class="spoiler" _="on click toggle .revealed on me">'
+ escape(r[0])
+ "</span>"

View file

@ -12,7 +12,7 @@ register = template.Library()
@register.simple_tag(takes_context=True)
def user_visibility_of(context, piece: UserOwnedObjectMixin):
user = context["request"].user
return piece.is_visible_to(user)
return piece.is_visible_to(user) # type: ignore
@register.simple_tag()

View file

@ -101,7 +101,7 @@ def mark(request: AuthedHttpRequest, item_uuid):
share_to_mastodon=share_to_mastodon,
created_time=mark_date,
)
except PermissionDenied as e:
except PermissionDenied:
_logger.warn(f"post to mastodon error 401 {request.user}")
return render_relogin(request)
except ValueError as e:
@ -143,13 +143,13 @@ def mark_log(request: AuthedHttpRequest, item_uuid, log_id):
@login_required
def comment(request: AuthedHttpRequest, item_uuid):
item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid))
if not item.class_name in ["podcastepisode", "tvepisode"]:
if item.class_name not in ["podcastepisode", "tvepisode"]:
raise BadRequest("Commenting this type of items is not supported yet.")
comment = Comment.objects.filter(owner=request.user.identity, item=item).first()
if request.method == "GET":
return render(
request,
f"comment.html",
"comment.html",
{
"item": item,
"comment": comment,
@ -169,7 +169,7 @@ def comment(request: AuthedHttpRequest, item_uuid):
try:
pos = datetime.strptime(position, "%H:%M:%S")
position = pos.hour * 3600 + pos.minute * 60 + pos.second
except:
except Exception:
if settings.DEBUG:
raise
position = None

View file

@ -29,7 +29,7 @@ class MastodonSiteCheck(BaseJob):
api_domain = site.api_domain or site.domain_name
domain, api_domain, v = detect_server_info(api_domain)
site.last_reachable_date = timezone.now()
except:
except Exception:
logger.warning(
f"Failed to detect server info for {site.domain_name}/{site.api_domain}"
)

View file

@ -1,6 +1,7 @@
[tool.pyright]
exclude = [ "media", ".venv", ".git", "playground", "catalog/*/tests.py", "neodb", "**/migrations", "**/sites/douban_*", "neodb-takahe" ]
reportIncompatibleVariableOverride = false
reportUnusedImport = false
[tool.djlint]
ignore="T002,T003,H006,H019,H020,H021,H023,H030,H031"
@ -20,5 +21,5 @@ plugins = ["mypy_django_plugin.main"]
django_settings_module = "boofilsic.settings"
[tool.ruff]
ignore = ['E501']
exclude = [ "media", ".venv", ".git", "playground", "**/tests.py", "neodb", "**/migrations", "**/commands", "**/importers", "**/sites", "legacy" ]
exclude = ["neodb-takahe/*", "media", ".venv", ".git", "playground", "**/tests.py", "neodb", "**/migrations", "**/commands", "**/importers", "**/sites", "legacy" ]
lint.ignore = ["F401", "F403", "F405"]

View file

@ -3,7 +3,8 @@ coverage
django-debug-toolbar
django-stubs
djlint~=1.34.0
isort~=5.12.0
isort~=5.13.2
lxml-stubs
pre-commit
pyright==1.1.350
ruff

View file

@ -1705,7 +1705,7 @@ class Block(models.Model):
raise ValueError("You cannot block from a remote Identity")
block = cls.maybe_get(source=source, target=target, mute=False)
if block is not None:
if not block.state in ["new", "sent", "awaiting_expiry"]:
if block.state not in ["new", "sent", "awaiting_expiry"]:
block.state = BlockStates.new # type:ignore
block.save()
else:
@ -1735,7 +1735,7 @@ class Block(models.Model):
raise ValueError("You cannot mute from a remote Identity")
block = cls.maybe_get(source=source, target=target, mute=True)
if block is not None:
if not block in ["new", "sent", "awaiting_expiry"]:
if block not in ["new", "sent", "awaiting_expiry"]:
block.state = BlockStates.new # type:ignore
if duration:
block.expires = timezone.now() + datetime.timedelta(seconds=duration)

View file

@ -79,7 +79,7 @@ def connect(request):
login_email = request.POST.get("email", "")
try:
EmailValidator()(login_email)
except:
except Exception:
return render(
request,
"common/error.html",
@ -299,11 +299,11 @@ def send_verification_link(user_id, action, email):
subject = f'{settings.SITE_INFO["site_name"]} - {_("注册新账号")}'
url = settings.SITE_INFO["site_url"] + "/account/register_email?c=" + v
msg = f"你好,\n本站没有与{email}关联的账号。你希望注册一个新账号吗?\n"
msg += f"\n如果你已注册过本站或某个联邦宇宙(长毛象)实例,不必重新注册,只要用联邦宇宙身份登录本站,再关联这个电子邮件地址,即可通过邮件登录。\n"
msg += f"\n如果你还没有联邦宇宙身份,可以访问这里选择实例并创建一个: https://joinmastodon.org/zh/servers\n"
msg += "\n如果你已注册过本站或某个联邦宇宙(长毛象)实例,不必重新注册,只要用联邦宇宙身份登录本站,再关联这个电子邮件地址,即可通过邮件登录。\n"
msg += "\n如果你还没有联邦宇宙身份,可以访问这里选择实例并创建一个: https://joinmastodon.org/zh/servers\n"
if settings.ALLOW_EMAIL_ONLY_ACCOUNT:
msg += f"\n如果你不便使用联邦宇宙身份,也可以点击以下链接使用电子邮件注册一个新账号,以后再关联到联邦宇宙。\n{url}\n"
msg += f"\n如果你没有打算用此电子邮件地址注册或登录本站,请忽略此邮件。"
msg += "\n如果你没有打算用此电子邮件地址注册或登录本站,请忽略此邮件。"
else:
raise ValueError("Invalid action")
try:
@ -451,7 +451,7 @@ def swap_login(request, token, site, refresh_token):
)
else:
try:
existing_user = User.objects.get(
User.objects.get(
mastodon_username__iexact=username, mastodon_site__iexact=site
)
messages.add_message(

View file

@ -40,4 +40,4 @@ class MastodonUserSync(BaseJob):
logger.warning(f"Skip {user} detail because of inactivity.")
skip_detail = True
user.refresh_mastodon_data(skip_detail)
logger.info(f"Mastodon User Sync finished.")
logger.info("Mastodon User Sync finished.")

View file

@ -237,14 +237,14 @@ class APIdentity(models.Model):
'@id@site' - remote activitypub identity 'id@site'
"""
s = handler.split("@")
l = len(s)
if l == 1 or (l == 2 and s[0] == ""):
sl = len(s)
if sl == 1 or (sl == 2 and s[0] == ""):
return cls.objects.get(
username__iexact=s[0] if l == 1 else s[1],
username__iexact=s[0] if sl == 1 else s[1],
local=True,
deleted__isnull=True,
)
elif l == 2:
elif sl == 2:
if match_linked:
return cls.objects.get(
user__mastodon_username__iexact=s[0],
@ -256,7 +256,7 @@ class APIdentity(models.Model):
if i:
return i
raise cls.DoesNotExist(f"Identity not found @{handler}")
elif l == 3 and s[0] == "":
elif sl == 3 and s[0] == "":
i = cls.get_remote(s[1], s[2])
if i:
return i

View file

@ -78,4 +78,4 @@ class Task(models.Model):
msg.error(task.user, f"[{task.type}] {task.message}")
def run(self) -> None:
raise NotImplemented
raise NotImplementedError("subclass must implement this")

View file

@ -409,7 +409,7 @@ class User(AbstractUser):
if name.startswith("~"):
try:
query_kwargs = {"pk": int(name[1:])}
except:
except Exception:
return None
elif name.startswith("@"):
query_kwargs = {

View file

@ -79,7 +79,7 @@ def fetch_refresh(request):
try:
i = APIdentity.get_by_handle(handle)
return HTTPResponseHXRedirect(i.url)
except:
except Exception:
retry = int(request.GET.get("retry", 0)) + 1
if retry > 10:
return render(request, "users/fetch_identity_failed.html")