search no longer block
This commit is contained in:
parent
2c3f19ce8d
commit
6f58627d9f
6 changed files with 233 additions and 6 deletions
|
@ -21,8 +21,8 @@ _logger = logging.getLogger(__name__)
|
|||
class ResourceContent:
|
||||
lookup_ids: dict = field(default_factory=dict)
|
||||
metadata: dict = field(default_factory=dict)
|
||||
cover_image: bytes | None = None
|
||||
cover_image_extention: str | None = None
|
||||
cover_image: bytes = None
|
||||
cover_image_extention: str = None
|
||||
|
||||
def dict(self):
|
||||
return {"metadata": self.metadata, "lookup_ids": self.lookup_ids}
|
||||
|
@ -122,7 +122,7 @@ class AbstractSite:
|
|||
auto_link=True,
|
||||
preloaded_content=None,
|
||||
ignore_existing_content=False,
|
||||
) -> ExternalResource | None:
|
||||
) -> ExternalResource:
|
||||
"""
|
||||
Returns an ExternalResource in scraped state if possible
|
||||
|
||||
|
@ -196,7 +196,7 @@ class SiteManager:
|
|||
return SiteManager.registry[typ]() if typ in SiteManager.registry else None
|
||||
|
||||
@staticmethod
|
||||
def get_site_by_url(url: str) -> AbstractSite | None:
|
||||
def get_site_by_url(url: str) -> AbstractSite:
|
||||
if not url:
|
||||
return None
|
||||
cls = next(
|
||||
|
|
3
catalog/templates/fetch_failed.html
Normal file
3
catalog/templates/fetch_failed.html
Normal file
|
@ -0,0 +1,3 @@
|
|||
<p>
|
||||
无法加载条目。部分网站可能删除条目或隐藏条目为仅登录可见,欢迎在本站手工添加这些条目。
|
||||
</p>
|
82
catalog/templates/fetch_pending.html
Normal file
82
catalog/templates/fetch_pending.html
Normal file
|
@ -0,0 +1,82 @@
|
|||
{% load static %}
|
||||
{% load i18n %}
|
||||
{% load l10n %}
|
||||
{% load humanize %}
|
||||
{% load admin_url %}
|
||||
{% load mastodon %}
|
||||
{% load oauth_token %}
|
||||
{% load truncate %}
|
||||
{% load highlight %}
|
||||
{% load thumb %}
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>{{ site_name }} - {% trans '搜索结果' %}</title>
|
||||
<script src="https://cdn.staticfile.org/jquery/3.6.1/jquery.min.js"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/htmx/1.8.4/htmx.min.js"></script>
|
||||
<script src="{% static 'lib/js/rating-star.js' %}"></script>
|
||||
<script src="{% static 'js/rating-star-readonly.js' %}"></script>
|
||||
<link rel="stylesheet" href="{% static 'css/boofilsic.min.css' %}">
|
||||
<link rel="stylesheet" href="{% static 'lib/css/rating-star.css' %}">
|
||||
<link rel="stylesheet" href="{% static 'lib/css/collection.css' %}">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="page-wrapper">
|
||||
<div id="content-wrapper">
|
||||
{% include 'partial/_navbar.html' %}
|
||||
|
||||
<section id="content">
|
||||
<div class="grid">
|
||||
<div class="grid__main">
|
||||
<div class="main-section-wrapper">
|
||||
<div>
|
||||
{% trans '正在连线' %}{{ site.SITE_NAME }}
|
||||
<div hx-get="{% url 'catalog:fetch_refresh' job_id %}" hx-trigger="load delay:2s" hx-swap="outerHTML"></div>
|
||||
<div id="spinner">
|
||||
<div class="spinner">
|
||||
<div></div>
|
||||
<div></div>
|
||||
<div></div>
|
||||
<div></div>
|
||||
<div></div>
|
||||
<div></div>
|
||||
<div></div>
|
||||
<div></div>
|
||||
<div></div>
|
||||
<div></div>
|
||||
<div></div>
|
||||
<div></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<div class="grid__aside">
|
||||
<div class="aside-section-wrapper">
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
</div>
|
||||
{% include 'partial/_footer.html' %}
|
||||
</div>
|
||||
|
||||
|
||||
|
||||
<script>
|
||||
document.body.addEventListener('htmx:configRequest', (event) => {
|
||||
event.detail.headers['X-CSRFToken'] = '{{ csrf_token }}';
|
||||
})
|
||||
</script>
|
||||
</body>
|
||||
|
||||
|
||||
</html>
|
1
catalog/templates/fetch_refresh.html
Normal file
1
catalog/templates/fetch_refresh.html
Normal file
|
@ -0,0 +1 @@
|
|||
<div hx-get="{% url 'catalog:fetch_refresh' job_id %}?retry={{ retry }}" hx-trigger="load delay:{{ delay }}s" hx-swap="outerHTML"></div>
|
|
@ -43,5 +43,7 @@ urlpatterns = [
|
|||
mark_list,
|
||||
name="mark_list",
|
||||
),
|
||||
path("search2/", search, name="search"),
|
||||
path("fetch_refresh/<str:job_id>", fetch_refresh, name="fetch_refresh"),
|
||||
path("api/", api.urls),
|
||||
]
|
||||
|
|
143
catalog/views.py
143
catalog/views.py
|
@ -1,13 +1,21 @@
|
|||
import uuid
|
||||
import logging
|
||||
from django.shortcuts import render, get_object_or_404, redirect, reverse
|
||||
from django.contrib.auth.decorators import login_required, permission_required
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.http import HttpResponseBadRequest, HttpResponseServerError, HttpResponse
|
||||
from django.core.exceptions import ObjectDoesNotExist, PermissionDenied
|
||||
from django.http import (
|
||||
HttpResponseBadRequest,
|
||||
HttpResponseServerError,
|
||||
HttpResponse,
|
||||
HttpResponseRedirect,
|
||||
)
|
||||
from django.core.exceptions import BadRequest, ObjectDoesNotExist, PermissionDenied
|
||||
from django.db import IntegrityError, transaction
|
||||
from django.db.models import Count
|
||||
from django.utils import timezone
|
||||
from django.core.paginator import Paginator
|
||||
from polymorphic.base import django
|
||||
from catalog.common.sites import AbstractSite, SiteManager
|
||||
from mastodon import mastodon_request_included
|
||||
from mastodon.models import MastodonApplication
|
||||
from mastodon.api import share_mark, share_review
|
||||
|
@ -20,6 +28,8 @@ from journal.models import query_visible, query_following
|
|||
from common.utils import PageLinksGenerator
|
||||
from common.views import PAGE_LINK_NUMBER
|
||||
from journal.models import ShelfTypeNames
|
||||
import django_rq
|
||||
from rq.job import Job
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -28,6 +38,14 @@ NUM_REVIEWS_ON_ITEM_PAGE = 5
|
|||
NUM_REVIEWS_ON_LIST_PAGE = 20
|
||||
|
||||
|
||||
class HTTPResponseHXRedirect(HttpResponseRedirect):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self["HX-Redirect"] = self["Location"]
|
||||
|
||||
status_code = 200
|
||||
|
||||
|
||||
def retrieve_by_uuid(request, item_uid):
|
||||
item = get_object_or_404(Item, uid=item_uid)
|
||||
return redirect(item.url)
|
||||
|
@ -134,3 +152,124 @@ def review_list(request, item_path, item_uuid):
|
|||
"item": item,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def fetch_task(url):
|
||||
try:
|
||||
site = SiteManager.get_site_by_url(url)
|
||||
site.get_resource_ready()
|
||||
item = site.get_item()
|
||||
return item.url if item else "-"
|
||||
except Exception:
|
||||
return "-"
|
||||
|
||||
|
||||
def fetch_refresh(request, job_id):
|
||||
retry = request.GET
|
||||
job = Job.fetch(id=job_id, connection=django_rq.get_connection("fetch"))
|
||||
print(job_id)
|
||||
print(job)
|
||||
item_url = job.result if job else "-" # FIXME job.return_value() in rq 1.12
|
||||
if item_url:
|
||||
if item_url == "-":
|
||||
return render(request, "fetch_failed.html")
|
||||
else:
|
||||
return HTTPResponseHXRedirect(item_url)
|
||||
else:
|
||||
retry = int(request.GET.get("retry", 0)) + 1
|
||||
if retry > 10:
|
||||
return render(request, "fetch_failed.html")
|
||||
else:
|
||||
return render(
|
||||
request,
|
||||
"fetch_refresh.html",
|
||||
{"job_id": job_id, "retry": retry, "delay": retry * 2},
|
||||
)
|
||||
|
||||
|
||||
def fetch(request, url, site: AbstractSite = None):
|
||||
if not site:
|
||||
site = SiteManager.get_site_by_url(keywords)
|
||||
if not site:
|
||||
return HttpResponseBadRequest()
|
||||
item = site.get_item()
|
||||
if item:
|
||||
return redirect(item.url)
|
||||
job_id = uuid.uuid4().hex
|
||||
django_rq.get_queue("fetch").enqueue(fetch_task, url, job_id=job_id)
|
||||
return render(
|
||||
request,
|
||||
"fetch_pending.html",
|
||||
{
|
||||
"site": site,
|
||||
"job_id": job_id,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def search(request):
|
||||
category = request.GET.get("c", default="all").strip().lower()
|
||||
if category == "all":
|
||||
category = None
|
||||
keywords = request.GET.get("q", default="").strip()
|
||||
tag = request.GET.get("tag", default="").strip()
|
||||
p = request.GET.get("page", default="1")
|
||||
page_number = int(p) if p.isdigit() else 1
|
||||
if not (keywords or tag):
|
||||
return render(
|
||||
request,
|
||||
"common/search_result.html",
|
||||
{
|
||||
"items": None,
|
||||
},
|
||||
)
|
||||
|
||||
if request.user.is_authenticated and keywords.find("://") > 0:
|
||||
site = SiteManager.get_site_by_url(keywords)
|
||||
if site:
|
||||
return fetch(request, keywords, site)
|
||||
if settings.SEARCH_BACKEND is None:
|
||||
# return limited results if no SEARCH_BACKEND
|
||||
result = {
|
||||
"items": Items.objects.filter(title__like=f"%{keywords}%")[:10],
|
||||
"num_pages": 1,
|
||||
}
|
||||
else:
|
||||
result = Indexer.search(keywords, page=page_number, category=category, tag=tag)
|
||||
keys = []
|
||||
items = []
|
||||
urls = []
|
||||
for i in result.items:
|
||||
key = (
|
||||
i.isbn
|
||||
if hasattr(i, "isbn")
|
||||
else (i.imdb_code if hasattr(i, "imdb_code") else None)
|
||||
)
|
||||
if key is None:
|
||||
items.append(i)
|
||||
elif key not in keys:
|
||||
keys.append(key)
|
||||
items.append(i)
|
||||
urls.append(i.source_url)
|
||||
i.tag_list = i.all_tag_list[:TAG_NUMBER_ON_LIST]
|
||||
|
||||
if request.path.endswith(".json/"):
|
||||
return JsonResponse(
|
||||
{
|
||||
"num_pages": result.num_pages,
|
||||
"items": list(map(lambda i: i.get_json(), items)),
|
||||
}
|
||||
)
|
||||
|
||||
request.session["search_dedupe_urls"] = urls
|
||||
return render(
|
||||
request,
|
||||
"common/search_result.html",
|
||||
{
|
||||
"items": items,
|
||||
"pagination": PageLinksGenerator(
|
||||
PAGE_LINK_NUMBER, page_number, result.num_pages
|
||||
),
|
||||
"categories": ["book", "movie", "music", "game"],
|
||||
},
|
||||
)
|
||||
|
|
Loading…
Add table
Reference in a new issue