diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 97ee507a..91985527 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,8 +20,14 @@ repos: - id: requirements-txt-fixer - id: mixed-line-ending + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.3.5 + hooks: + - id: ruff + # - id: ruff-format + - repo: https://github.com/pycqa/isort - rev: 5.12.0 + rev: 5.13.2 hooks: - id: isort args: ["--profile=black"] diff --git a/boofilsic/settings.py b/boofilsic/settings.py index c1ec1a38..23a73e58 100644 --- a/boofilsic/settings.py +++ b/boofilsic/settings.py @@ -10,7 +10,7 @@ BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) try: with open(os.path.join(BASE_DIR, "build_version")) as f: NEODB_VERSION = __version__ + "-" + f.read().strip() -except: +except Exception: NEODB_VERSION = __version__ + "-unknown" diff --git a/catalog/api.py b/catalog/api.py index 869c0813..23c34ed9 100644 --- a/catalog/api.py +++ b/catalog/api.py @@ -1,5 +1,6 @@ +from typing import Any, Callable, List, Optional, Tuple, Type + from django.http import HttpResponse -from django.utils.translation import gettext_lazy as _ from ninja import Schema from common.api import * diff --git a/catalog/common/__init__.py b/catalog/common/__init__.py index fe04dde8..b39a38ed 100644 --- a/catalog/common/__init__.py +++ b/catalog/common/__init__.py @@ -4,7 +4,7 @@ from .models import * from .scrapers import * from .sites import * -__all__ = ( +__all__ = ( # noqa "IdType", "SiteName", "ItemType", diff --git a/catalog/common/downloaders.py b/catalog/common/downloaders.py index dde9f2b5..2078086a 100644 --- a/catalog/common/downloaders.py +++ b/catalog/common/downloaders.py @@ -172,7 +172,7 @@ class BasicDownloader: encoding="utf-8", ) as fp: fp.write(resp.text) - except: + except Exception: _logger.warn("Save downloaded data failed.") else: resp = MockResponse(self.url) diff --git a/catalog/common/models.py b/catalog/common/models.py index 65a8d565..9da4040e 100644 --- a/catalog/common/models.py +++ b/catalog/common/models.py @@ -385,7 +385,7 @@ class Item(SoftDeleteMixin, PolymorphicModel): if to_item.merged_to_item is not None: raise ValueError("cannot merge to item which is merged to another item") if to_item.__class__ != self.__class__: - raise ValueError(f"cannot merge to item in a different model") + raise ValueError("cannot merge to item in a different model") self.log_action({"!merged": [str(self.merged_to_item), str(to_item)]}) self.merged_to_item = to_item self.save() @@ -448,7 +448,7 @@ class Item(SoftDeleteMixin, PolymorphicModel): b62 = r[0] try: item = cls.objects.get(uid=uuid.UUID(int=base62.decode(b62))) - except: + except Exception: item = None return item @@ -618,7 +618,7 @@ class ExternalResource(models.Model): try: site = self.get_site() return site.SITE_NAME if site else SiteName.Unknown - except: + except Exception: _logger.warning(f"Unknown site for {self}") return SiteName.Unknown diff --git a/catalog/common/sites.py b/catalog/common/sites.py index c52c25d5..3c447e77 100644 --- a/catalog/common/sites.py +++ b/catalog/common/sites.py @@ -10,7 +10,7 @@ import json import logging import re from dataclasses import dataclass, field -from typing import Callable, Type +from typing import Any, Callable, Type, TypeVar import django_rq import requests @@ -268,8 +268,6 @@ class AbstractSite: return p -from typing import Any, Callable, Type, TypeVar - T = TypeVar("T") @@ -313,7 +311,7 @@ class SiteManager: ) if cls: url = url2 - except: + except Exception: pass if cls is None: cls = next( @@ -327,7 +325,7 @@ class SiteManager: @staticmethod def get_site_by_id(id_type: IdType, id_value: str) -> AbstractSite | None: - if not id_type in SiteManager.registry: + if id_type not in SiteManager.registry: return None cls = SiteManager.registry[id_type] return cls(id_value=id_value) diff --git a/catalog/search/external.py b/catalog/search/external.py index 40a84f02..297d6b5a 100644 --- a/catalog/search/external.py +++ b/catalog/search/external.py @@ -316,7 +316,7 @@ class Fediverse: url = f"https://{host}{item['url']}" # FIXME update API and use abs urls try: cat = ItemCategory(item["category"]) - except: + except Exception: cat = "" results.append( SearchResultItem( diff --git a/catalog/search/models.py b/catalog/search/models.py index 3fa13d16..2ef4efe1 100644 --- a/catalog/search/models.py +++ b/catalog/search/models.py @@ -30,7 +30,7 @@ class DbIndexer: @classmethod def search(cls, q, page=1, categories=None, tag=None, sort=None): - result = lambda: None + result = lambda: None # noqa result.items = Item.objects.filter(title__contains=q)[:10] result.num_pages = 1 result.count = len(result.items) @@ -83,10 +83,10 @@ def query_index(keywords, categories=None, tag=None, page=1, prepare_external=Tr if hasattr(i, "works"): my_key += [i[0] for i in i.works.all().values_list("id")] if len(my_key): - l = len(keys) + len(my_key) + sl = len(keys) + len(my_key) keys.update(my_key) # check and skip dup with same imdb or isbn or works id - if len(keys) < l: + if len(keys) < sl: duplicated_items.append(i) else: items.append(i) @@ -135,7 +135,7 @@ def enqueue_fetch(url, is_refetch, user=None): try: job = Job.fetch(id=job_id, connection=django_rq.get_connection("fetch")) in_progress = job.get_status() in ["queued", "started"] - except: + except Exception: in_progress = False if not in_progress: django_rq.get_queue("fetch").enqueue( diff --git a/catalog/search/typesense.py b/catalog/search/typesense.py index c4f43441..961f4d9b 100644 --- a/catalog/search/typesense.py +++ b/catalog/search/typesense.py @@ -80,7 +80,7 @@ def enqueue_update_index(item_ids): ) if job.get_status() in ["queued", "scheduled"]: job.cancel() - except: + except Exception: pass # using rq's built-in scheduler here, it can be switched to other similar implementations django_rq.get_queue(_PENDING_INDEX_QUEUE).enqueue_in( @@ -184,7 +184,6 @@ class Indexer: @classmethod def check(cls): client = typesense.Client(settings.TYPESENSE_CONNECTION) - wait = 5 if not client.operations.is_healthy(): raise ValueError("Typesense: server not healthy") idx = client.collections[settings.TYPESENSE_INDEX_NAME] @@ -209,7 +208,7 @@ class Indexer: f"Typesense: index {settings.TYPESENSE_INDEX_NAME} has {i['num_documents']} documents" ) return - except: + except Exception: client.collections.create(cls.config()) logger.info( f"Typesense: index {settings.TYPESENSE_INDEX_NAME} created" diff --git a/catalog/search/views.py b/catalog/search/views.py index af50aa7c..b98b94eb 100644 --- a/catalog/search/views.py +++ b/catalog/search/views.py @@ -29,7 +29,7 @@ def fetch_refresh(request, job_id): try: job = Job.fetch(id=job_id, connection=django_rq.get_connection("fetch")) item_url = job.return_value() - except: + except Exception: item_url = "-" if item_url: if item_url == "-": @@ -109,7 +109,7 @@ def search(request): try: categories = [ItemCategory(category)] hide_category = True - except: + except Exception: categories = visible_categories(request) tag = request.GET.get("tag", default="").strip() p = request.GET.get("page", default="1") @@ -134,7 +134,7 @@ def search(request): if request.GET.get("r"): return redirect(keywords) - items, num_pages, _, dup_items = query_index(keywords, categories, tag, p) + items, num_pages, __, dup_items = query_index(keywords, categories, tag, p) return render( request, "search_results.html", diff --git a/catalog/sites/apple_music.py b/catalog/sites/apple_music.py index 291fea8e..4d9b6006 100644 --- a/catalog/sites/apple_music.py +++ b/catalog/sites/apple_music.py @@ -62,7 +62,7 @@ class AppleMusic(AbstractSite): content = BasicDownloader(url, headers=self.headers).download().html() _logger.info(f"got localized content from {url}") break - except: + except Exception: pass if content is None: raise ParseError(self, f"localized content for {self.url}") diff --git a/catalog/sites/bandcamp.py b/catalog/sites/bandcamp.py index b99775c7..ce04e5cd 100644 --- a/catalog/sites/bandcamp.py +++ b/catalog/sites/bandcamp.py @@ -71,7 +71,7 @@ class Bandcamp(AbstractSite): release_date = ( release_datetime.strftime("%Y-%m-%d") if release_datetime else None ) - except: + except Exception: release_date = None duration = None company = None diff --git a/catalog/sites/goodreads.py b/catalog/sites/goodreads.py index 530e3b28..2013a741 100644 --- a/catalog/sites/goodreads.py +++ b/catalog/sites/goodreads.py @@ -144,7 +144,7 @@ class Goodreads_Work(AbstractSite): author = self.query_str(content, "//h2/a/text()") try: first_published = self.query_str(content, "//h2/span/text()") - except: + except Exception: first_published = None pd = ResourceContent( metadata={ diff --git a/catalog/sites/rss.py b/catalog/sites/rss.py index fa76d7a6..e7195ef4 100644 --- a/catalog/sites/rss.py +++ b/catalog/sites/rss.py @@ -44,7 +44,7 @@ class RSS(AbstractSite): req.add_header("User-Agent", settings.NEODB_USER_AGENT) try: feed = podcastparser.parse(url, urllib.request.urlopen(req, timeout=3)) - except: + except Exception: url = url.replace("https://", "http://") req = urllib.request.Request(url) req.add_header("User-Agent", settings.NEODB_USER_AGENT) @@ -52,7 +52,7 @@ class RSS(AbstractSite): feed = podcastparser.parse( url, urllib.request.urlopen(req, timeout=3) ) - except: + except Exception: return None if settings.DOWNLOADER_SAVEDIR: pickle.dump( diff --git a/catalog/sites/steam.py b/catalog/sites/steam.py index ab42224b..002c09a5 100644 --- a/catalog/sites/steam.py +++ b/catalog/sites/steam.py @@ -50,7 +50,7 @@ class Steam(AbstractSite): brief = self.query_str( content, "//div[@class='game_description_snippet']/text()" ) - except: + except Exception: brief = "" # try Steam images if no image from IGDB if pd.cover_image is None: diff --git a/catalog/views_edit.py b/catalog/views_edit.py index fd41adea..4fd69905 100644 --- a/catalog/views_edit.py +++ b/catalog/views_edit.py @@ -214,13 +214,13 @@ def assign_parent(request, item_path, item_uuid): @login_required def remove_unused_seasons(request, item_path, item_uuid): item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) - l = list(item.seasons.all()) - for s in l: + sl = list(item.seasons.all()) + for s in sl: if not s.journal_exists(): s.delete() - l = [s.id for s in l] - l2 = [s.id for s in item.seasons.all()] - item.log_action({"!remove_unused_seasons": [l, l2]}) + ol = [s.id for s in sl] + nl = [s.id for s in item.seasons.all()] + item.log_action({"!remove_unused_seasons": [ol, nl]}) return redirect(item.url) diff --git a/common/models.py b/common/models.py index d4324674..abe35518 100644 --- a/common/models.py +++ b/common/models.py @@ -21,7 +21,7 @@ class BaseJob: job.cancel() registry = ScheduledJobRegistry(queue=django_rq.get_queue("cron")) registry.remove(job) - except: + except Exception: pass @classmethod diff --git a/common/setup.py b/common/setup.py index 545e0e50..da211c08 100644 --- a/common/setup.py +++ b/common/setup.py @@ -56,7 +56,7 @@ class Setup: .exists() ): logger.warning( - f"Local identities are found for other domains, there might be a configuration issue." + "Local identities are found for other domains, there might be a configuration issue." ) TakaheConfig.objects.update_or_create( @@ -109,7 +109,7 @@ class Setup: logger.info("Default relay is disabled, unsubscribing...") Takahe.update_state(relay, "unsubscribing") else: - logger.info(f"Default relay is disabled.") + logger.info("Default relay is disabled.") else: if relay: logger.debug(f"Default relay is enabled, state: {relay.state}") diff --git a/common/templatetags/highlight.py b/common/templatetags/highlight.py index 45ab84ae..fe0c8709 100644 --- a/common/templatetags/highlight.py +++ b/common/templatetags/highlight.py @@ -20,14 +20,14 @@ def _cc(text): @stringfilter def highlight(text, search): otext = _cc(text.lower()) - l = len(text) - if l != len(otext): + sl = len(text) + if sl != len(otext): return text # in rare cases, the lowered&converted text has a different length rtext = "" words = list(set([w for w in _cc(search.strip().lower()).split(" ") if w])) words.sort(key=len, reverse=True) i = 0 - while i < l: + while i < sl: m = None for w in words: if otext[i : i + len(w)] == w: diff --git a/common/templatetags/thumb.py b/common/templatetags/thumb.py index 43715be9..6b976485 100644 --- a/common/templatetags/thumb.py +++ b/common/templatetags/thumb.py @@ -15,5 +15,5 @@ def thumb(source, alias): return source.url else: return thumbnail_url(source, alias) - except Exception as e: + except Exception: return "" diff --git a/journal/api.py b/journal/api.py index 4c8c2011..8cef0a54 100644 --- a/journal/api.py +++ b/journal/api.py @@ -522,7 +522,7 @@ def update_tag(request, tag_uuid: str, t_in: TagInSchema): tag.title = title tag.visibility = visibility tag.save() - except: + except Exception: return 409, {"message": "Tag with same title exists"} return tag diff --git a/journal/importers/douban.py b/journal/importers/douban.py index 2c1190bd..f22f5109 100644 --- a/journal/importers/douban.py +++ b/journal/importers/douban.py @@ -222,12 +222,12 @@ class DoubanImporter: rating = cells[5] try: rating_grade = int(rating) * 2 if rating else None - except: + except Exception: rating_grade = None tags = cells[6] if len(cells) >= 7 else "" try: tags = tags.split(",") if tags else [] - except: + except Exception: tags = [] comment = cells[7] if len(cells) >= 8 else None self.processed += 1 @@ -235,7 +235,7 @@ class DoubanImporter: if type(time) == str: time = datetime.strptime(time, "%Y-%m-%d %H:%M:%S") time = time.replace(tzinfo=_tz_sh) - except: + except Exception: time = None r = self.import_mark(url, shelf_type, comment, rating_grade, tags, time) if r == 1: @@ -385,7 +385,7 @@ class DoubanImporter: Review.objects.update_or_create( owner=self.user.identity, item=item, defaults=params ) - except: + except Exception: logger.warning(f"{prefix} update multiple review {review_url}") r = ( Review.objects.filter(owner=self.user.identity, item=item) diff --git a/journal/importers/letterboxd.py b/journal/importers/letterboxd.py index 16441834..e1e80180 100644 --- a/journal/importers/letterboxd.py +++ b/journal/importers/letterboxd.py @@ -57,7 +57,7 @@ class LetterboxdImporter(Task): try: site.get_resource_ready() return site.get_item() - except: + except Exception: imdb_url = str(iu[0]) # type:ignore logger.warning( f"Fetching {url}: TMDB {site.url} failed, try IMDB {imdb_url}" diff --git a/journal/importers/opml.py b/journal/importers/opml.py index 6c17cc0a..c06cf084 100644 --- a/journal/importers/opml.py +++ b/journal/importers/opml.py @@ -39,7 +39,7 @@ class OPMLImporter: logger.info(f"{self.user} import {feed.url}") try: res = RSS(feed.url).get_resource_ready() - except: + except Exception: res = None if not res or not res.item: logger.warning(f"{self.user} feed error {feed.url}") diff --git a/journal/models/common.py b/journal/models/common.py index e73afd2b..b4a40496 100644 --- a/journal/models/common.py +++ b/journal/models/common.py @@ -174,17 +174,17 @@ class Piece(PolymorphicModel, UserOwnedObjectMixin): b62 = r[0] try: obj = cls.objects.get(uid=uuid.UUID(int=base62.decode(b62))) - except: + except Exception: obj = None return obj @classmethod def update_by_ap_object(cls, owner, item, obj, post_id, visibility): - raise NotImplementedError() + raise NotImplementedError("subclass must implement this") @property def ap_object(self): - raise NotImplementedError() + raise NotImplementedError("subclass must implement this") def link_post_id(self, post_id: int): PiecePost.objects.get_or_create(piece=self, post_id=post_id) diff --git a/journal/models/itemlist.py b/journal/models/itemlist.py index 9ee44f7a..bc8f1b14 100644 --- a/journal/models/itemlist.py +++ b/journal/models/itemlist.py @@ -94,7 +94,6 @@ class List(Piece): member.delete() def update_member_order(self, ordered_member_ids): - members = self.ordered_members for m in self.members.all(): try: i = ordered_member_ids.index(m.id) diff --git a/journal/models/mark.py b/journal/models/mark.py index 2eeb6b9c..e13697ef 100644 --- a/journal/models/mark.py +++ b/journal/models/mark.py @@ -224,7 +224,7 @@ class Mark: log_entry.timestamp = created_time try: log_entry.save(update_fields=["timestamp"]) - except: + except Exception: log_entry.delete() shelfmember_changed = True if shelfmember_changed: diff --git a/journal/models/mixins.py b/journal/models/mixins.py index e319b66c..a534500f 100644 --- a/journal/models/mixins.py +++ b/journal/models/mixins.py @@ -21,7 +21,7 @@ class UserOwnedObjectMixin: owner: ForeignKey[APIdentity, Piece] visibility: int - def is_visible_to(self: "Piece | Self", viewing_user: User) -> bool: # type: ignore + def is_visible_to(self: "Piece", viewing_user: User) -> bool: # noqa # type: ignore owner = self.owner if not owner or not owner.is_active: return False diff --git a/journal/models/rating.py b/journal/models/rating.py index 5bbc94fe..bac1a70b 100644 --- a/journal/models/rating.py +++ b/journal/models/rating.py @@ -67,7 +67,7 @@ class Rating(Content): "created_time": datetime.fromisoformat(obj["published"]), "edited_time": datetime.fromisoformat(obj["updated"]), } - p, _ = cls.objects.update_or_create(owner=owner, item=item, defaults=d) + p = cls.objects.update_or_create(owner=owner, item=item, defaults=d)[0] p.link_post_id(post_id) return p diff --git a/journal/models/renderers.py b/journal/models/renderers.py index 5e261031..b5b39538 100644 --- a/journal/models/renderers.py +++ b/journal/models/renderers.py @@ -35,12 +35,12 @@ def render_md(s: str) -> str: def _spolier(s: str) -> str: - l = s.split(">!", 1) - if len(l) == 1: + sl = s.split(">!", 1) + if len(sl) == 1: return escape(s) - r = l[1].split("!<", 1) + r = sl[1].split("!<", 1) return ( - escape(l[0]) + escape(sl[0]) + '' + escape(r[0]) + "" diff --git a/journal/templatetags/collection.py b/journal/templatetags/collection.py index daff32fc..8aeebb1c 100644 --- a/journal/templatetags/collection.py +++ b/journal/templatetags/collection.py @@ -12,7 +12,7 @@ register = template.Library() @register.simple_tag(takes_context=True) def user_visibility_of(context, piece: UserOwnedObjectMixin): user = context["request"].user - return piece.is_visible_to(user) + return piece.is_visible_to(user) # type: ignore @register.simple_tag() diff --git a/journal/views/mark.py b/journal/views/mark.py index 3a5d455e..ade62155 100644 --- a/journal/views/mark.py +++ b/journal/views/mark.py @@ -101,7 +101,7 @@ def mark(request: AuthedHttpRequest, item_uuid): share_to_mastodon=share_to_mastodon, created_time=mark_date, ) - except PermissionDenied as e: + except PermissionDenied: _logger.warn(f"post to mastodon error 401 {request.user}") return render_relogin(request) except ValueError as e: @@ -143,13 +143,13 @@ def mark_log(request: AuthedHttpRequest, item_uuid, log_id): @login_required def comment(request: AuthedHttpRequest, item_uuid): item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid)) - if not item.class_name in ["podcastepisode", "tvepisode"]: + if item.class_name not in ["podcastepisode", "tvepisode"]: raise BadRequest("Commenting this type of items is not supported yet.") comment = Comment.objects.filter(owner=request.user.identity, item=item).first() if request.method == "GET": return render( request, - f"comment.html", + "comment.html", { "item": item, "comment": comment, @@ -169,7 +169,7 @@ def comment(request: AuthedHttpRequest, item_uuid): try: pos = datetime.strptime(position, "%H:%M:%S") position = pos.hour * 3600 + pos.minute * 60 + pos.second - except: + except Exception: if settings.DEBUG: raise position = None diff --git a/mastodon/jobs.py b/mastodon/jobs.py index efc5fdbd..cb9aa887 100644 --- a/mastodon/jobs.py +++ b/mastodon/jobs.py @@ -29,7 +29,7 @@ class MastodonSiteCheck(BaseJob): api_domain = site.api_domain or site.domain_name domain, api_domain, v = detect_server_info(api_domain) site.last_reachable_date = timezone.now() - except: + except Exception: logger.warning( f"Failed to detect server info for {site.domain_name}/{site.api_domain}" ) diff --git a/pyproject.toml b/pyproject.toml index 2191ecea..a30e1fab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,7 @@ [tool.pyright] exclude = [ "media", ".venv", ".git", "playground", "catalog/*/tests.py", "neodb", "**/migrations", "**/sites/douban_*", "neodb-takahe" ] reportIncompatibleVariableOverride = false +reportUnusedImport = false [tool.djlint] ignore="T002,T003,H006,H019,H020,H021,H023,H030,H031" @@ -20,5 +21,5 @@ plugins = ["mypy_django_plugin.main"] django_settings_module = "boofilsic.settings" [tool.ruff] -ignore = ['E501'] -exclude = [ "media", ".venv", ".git", "playground", "**/tests.py", "neodb", "**/migrations", "**/commands", "**/importers", "**/sites", "legacy" ] +exclude = ["neodb-takahe/*", "media", ".venv", ".git", "playground", "**/tests.py", "neodb", "**/migrations", "**/commands", "**/importers", "**/sites", "legacy" ] +lint.ignore = ["F401", "F403", "F405"] diff --git a/requirements-dev.txt b/requirements-dev.txt index bed61d36..2c672eff 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -3,7 +3,8 @@ coverage django-debug-toolbar django-stubs djlint~=1.34.0 -isort~=5.12.0 +isort~=5.13.2 lxml-stubs pre-commit pyright==1.1.350 +ruff diff --git a/takahe/models.py b/takahe/models.py index ce12d4a4..2f12d6ee 100644 --- a/takahe/models.py +++ b/takahe/models.py @@ -1705,7 +1705,7 @@ class Block(models.Model): raise ValueError("You cannot block from a remote Identity") block = cls.maybe_get(source=source, target=target, mute=False) if block is not None: - if not block.state in ["new", "sent", "awaiting_expiry"]: + if block.state not in ["new", "sent", "awaiting_expiry"]: block.state = BlockStates.new # type:ignore block.save() else: @@ -1735,7 +1735,7 @@ class Block(models.Model): raise ValueError("You cannot mute from a remote Identity") block = cls.maybe_get(source=source, target=target, mute=True) if block is not None: - if not block in ["new", "sent", "awaiting_expiry"]: + if block not in ["new", "sent", "awaiting_expiry"]: block.state = BlockStates.new # type:ignore if duration: block.expires = timezone.now() + datetime.timedelta(seconds=duration) diff --git a/users/account.py b/users/account.py index ed149af5..160d7312 100644 --- a/users/account.py +++ b/users/account.py @@ -79,7 +79,7 @@ def connect(request): login_email = request.POST.get("email", "") try: EmailValidator()(login_email) - except: + except Exception: return render( request, "common/error.html", @@ -299,11 +299,11 @@ def send_verification_link(user_id, action, email): subject = f'{settings.SITE_INFO["site_name"]} - {_("注册新账号")}' url = settings.SITE_INFO["site_url"] + "/account/register_email?c=" + v msg = f"你好,\n本站没有与{email}关联的账号。你希望注册一个新账号吗?\n" - msg += f"\n如果你已注册过本站或某个联邦宇宙(长毛象)实例,不必重新注册,只要用联邦宇宙身份登录本站,再关联这个电子邮件地址,即可通过邮件登录。\n" - msg += f"\n如果你还没有联邦宇宙身份,可以访问这里选择实例并创建一个: https://joinmastodon.org/zh/servers\n" + msg += "\n如果你已注册过本站或某个联邦宇宙(长毛象)实例,不必重新注册,只要用联邦宇宙身份登录本站,再关联这个电子邮件地址,即可通过邮件登录。\n" + msg += "\n如果你还没有联邦宇宙身份,可以访问这里选择实例并创建一个: https://joinmastodon.org/zh/servers\n" if settings.ALLOW_EMAIL_ONLY_ACCOUNT: msg += f"\n如果你不便使用联邦宇宙身份,也可以点击以下链接使用电子邮件注册一个新账号,以后再关联到联邦宇宙。\n{url}\n" - msg += f"\n如果你没有打算用此电子邮件地址注册或登录本站,请忽略此邮件。" + msg += "\n如果你没有打算用此电子邮件地址注册或登录本站,请忽略此邮件。" else: raise ValueError("Invalid action") try: @@ -451,7 +451,7 @@ def swap_login(request, token, site, refresh_token): ) else: try: - existing_user = User.objects.get( + User.objects.get( mastodon_username__iexact=username, mastodon_site__iexact=site ) messages.add_message( diff --git a/users/jobs/sync.py b/users/jobs/sync.py index d1794407..66c057a7 100644 --- a/users/jobs/sync.py +++ b/users/jobs/sync.py @@ -40,4 +40,4 @@ class MastodonUserSync(BaseJob): logger.warning(f"Skip {user} detail because of inactivity.") skip_detail = True user.refresh_mastodon_data(skip_detail) - logger.info(f"Mastodon User Sync finished.") + logger.info("Mastodon User Sync finished.") diff --git a/users/models/apidentity.py b/users/models/apidentity.py index 6a630b20..d86b158b 100644 --- a/users/models/apidentity.py +++ b/users/models/apidentity.py @@ -237,14 +237,14 @@ class APIdentity(models.Model): '@id@site' - remote activitypub identity 'id@site' """ s = handler.split("@") - l = len(s) - if l == 1 or (l == 2 and s[0] == ""): + sl = len(s) + if sl == 1 or (sl == 2 and s[0] == ""): return cls.objects.get( - username__iexact=s[0] if l == 1 else s[1], + username__iexact=s[0] if sl == 1 else s[1], local=True, deleted__isnull=True, ) - elif l == 2: + elif sl == 2: if match_linked: return cls.objects.get( user__mastodon_username__iexact=s[0], @@ -256,7 +256,7 @@ class APIdentity(models.Model): if i: return i raise cls.DoesNotExist(f"Identity not found @{handler}") - elif l == 3 and s[0] == "": + elif sl == 3 and s[0] == "": i = cls.get_remote(s[1], s[2]) if i: return i diff --git a/users/models/task.py b/users/models/task.py index 691bf73f..5e4c46a3 100644 --- a/users/models/task.py +++ b/users/models/task.py @@ -78,4 +78,4 @@ class Task(models.Model): msg.error(task.user, f"[{task.type}] {task.message}") def run(self) -> None: - raise NotImplemented + raise NotImplementedError("subclass must implement this") diff --git a/users/models/user.py b/users/models/user.py index b55df3bd..e768b21c 100644 --- a/users/models/user.py +++ b/users/models/user.py @@ -409,7 +409,7 @@ class User(AbstractUser): if name.startswith("~"): try: query_kwargs = {"pk": int(name[1:])} - except: + except Exception: return None elif name.startswith("@"): query_kwargs = { diff --git a/users/views.py b/users/views.py index b4c254bc..628aaf00 100644 --- a/users/views.py +++ b/users/views.py @@ -79,7 +79,7 @@ def fetch_refresh(request): try: i = APIdentity.get_by_handle(handle) return HTTPResponseHXRedirect(i.url) - except: + except Exception: retry = int(request.GET.get("retry", 0)) + 1 if retry > 10: return render(request, "users/fetch_identity_failed.html")