improve lint

This commit is contained in:
Your Name 2025-01-20 10:55:12 -05:00 committed by Henri Dickson
parent 1738280a2c
commit 47cffef24b
2 changed files with 14 additions and 15 deletions

View file

@ -10,7 +10,7 @@ ResourceContent persists as an ExternalResource which may link to an Item
import json
import re
from dataclasses import dataclass, field
from typing import Type, TypeVar
from typing import TYPE_CHECKING, Type, TypeVar
import django_rq
import requests
@ -20,6 +20,9 @@ from validators import url as url_validate
from .models import ExternalResource, IdealIdTypes, IdType, Item, SiteName
if TYPE_CHECKING:
from ..search.models import ExternalSearchResultItem
@dataclass
class ResourceContent:
@ -92,12 +95,12 @@ class AbstractSite:
)
return self.resource
# add this method to subclass to enable external search
# @classmethod
# async def search_task(
# cls, query: str, page: int, category: str, page_size:int
# ) -> list[ExternalSearchResultItem]:
# return []
@classmethod
async def search_task(
cls, q: str, page: int, category: str, page_size: int
) -> "list[ExternalSearchResultItem]":
# implement this method in subclass to enable external search
return []
def scrape(self) -> ResourceContent:
"""subclass should implement this, return ResourceContent object"""
@ -352,9 +355,7 @@ class SiteManager:
def get_sites_for_search():
if settings.SEARCH_SITES == ["-"]:
return []
sites = [
cls for cls in SiteManager.get_all_sites() if hasattr(cls, "search_task")
]
sites = SiteManager.get_all_sites()
if settings.SEARCH_SITES == ["*"] or not settings.SEARCH_SITES:
return sites
return [s for s in sites if s.SITE_NAME.value in settings.SEARCH_SITES]

View file

@ -123,7 +123,7 @@ class Bangumi(AbstractSite):
@classmethod
async def search_task(
cls, query: str, page: int, category: str, page_size: int
cls, q: str, page: int, category: str, page_size: int
) -> list[ExternalSearchResultItem]:
results = []
bgm_type = {
@ -144,7 +144,7 @@ class Bangumi(AbstractSite):
response = await client.post(
search_url,
headers={"User-Agent": settings.NEODB_USER_AGENT},
json={"keyword": query, "filter": {"type": bgm_type[category]}},
json={"keyword": q, "filter": {"type": bgm_type[category]}},
timeout=2,
)
r = response.json()
@ -162,9 +162,7 @@ class Bangumi(AbstractSite):
)
)
except Exception as e:
logger.error(
"Bangumi search error", extra={"query": query, "exception": e}
)
logger.error("Bangumi search error", extra={"query": q, "exception": e})
return results
def scrape(self):