lib.itmens/catalog/search/views.py

177 lines
5.2 KiB
Python
Raw Normal View History

2023-01-05 03:06:13 -05:00
import logging
2023-08-13 18:00:10 -04:00
import re
import django_rq
from django.conf import settings
2023-02-04 00:45:17 -05:00
from django.contrib.auth.decorators import login_required
from django.core.cache import cache
from django.core.exceptions import BadRequest
from django.shortcuts import redirect, render
from django.utils.translation import gettext_lazy as _
from rq.job import Job
2023-07-12 01:11:15 -04:00
from catalog.common.models import ItemCategory, SiteName
2023-01-05 03:06:13 -05:00
from catalog.common.sites import AbstractSite, SiteManager
from common.config import PAGE_LINK_NUMBER
2023-08-13 18:00:10 -04:00
from common.utils import HTTPResponseHXRedirect, PageLinksGenerator
from users.views import query_identity
from ..models import *
2023-01-05 03:06:13 -05:00
from .external import ExternalSources
from .models import enqueue_fetch, get_fetch_lock, query_index
2023-01-05 03:06:13 -05:00
_logger = logging.getLogger(__name__)
def fetch_refresh(request, job_id):
2023-01-29 21:24:58 -05:00
try:
job = Job.fetch(id=job_id, connection=django_rq.get_connection("fetch"))
item_url = job.return_value()
except:
item_url = "-"
2023-01-05 03:06:13 -05:00
if item_url:
if item_url == "-":
2023-06-09 02:45:27 -04:00
return render(request, "_fetch_failed.html")
2023-01-05 03:06:13 -05:00
else:
return HTTPResponseHXRedirect(item_url)
else:
retry = int(request.GET.get("retry", 0)) + 1
if retry > 10:
2023-06-09 02:45:27 -04:00
return render(request, "_fetch_failed.html")
2023-01-05 03:06:13 -05:00
else:
return render(
request,
2023-06-09 02:45:27 -04:00
"_fetch_refresh.html",
2023-01-05 03:06:13 -05:00
{"job_id": job_id, "retry": retry, "delay": retry * 2},
)
2023-05-22 14:52:02 -04:00
def fetch(request, url, is_refetch: bool = False, site: AbstractSite | None = None):
2023-01-05 03:06:13 -05:00
if not site:
site = SiteManager.get_site_by_url(url)
if not site:
2023-02-04 00:45:17 -05:00
raise BadRequest()
2023-01-05 03:06:13 -05:00
item = site.get_item()
if item and not is_refetch:
return redirect(item.url)
2023-06-19 00:17:56 -04:00
if item and is_refetch:
item.log_action(
{
2023-06-19 12:30:39 -04:00
"!refetch": [url, None],
2023-06-19 00:17:56 -04:00
}
)
2023-07-08 17:30:56 -04:00
job_id = None
if is_refetch or get_fetch_lock():
job_id = enqueue_fetch(url, is_refetch, request.user)
2023-01-05 03:06:13 -05:00
return render(
request,
"fetch_pending.html",
{
"site": site,
"sites": SiteName.labels,
2023-01-05 03:06:13 -05:00
"job_id": job_id,
},
)
2023-07-12 01:11:15 -04:00
def visible_categories(request):
vc = request.session.get("p_categories", None)
if vc is None:
vc = [
x
for x in item_categories()
2023-07-12 19:58:08 -04:00
if x.value
not in (
request.user.preference.hidden_categories
if request.user.is_authenticated
else []
)
2023-07-12 01:11:15 -04:00
]
request.session["p_categories"] = vc
return vc
2023-01-05 03:06:13 -05:00
def search(request):
2023-08-13 18:00:10 -04:00
keywords = request.GET.get("q", default="").strip()
if re.match(r"^[@]", keywords):
return query_identity(request, keywords.replace("", "@"))
2023-01-05 03:06:13 -05:00
category = request.GET.get("c", default="all").strip().lower()
2023-08-10 15:45:30 -04:00
hide_category = False
2023-07-12 01:11:15 -04:00
if category == "all" or not category:
2023-01-05 03:06:13 -05:00
category = None
2023-07-12 01:11:15 -04:00
categories = visible_categories(request)
elif category == "movietv":
categories = [ItemCategory.Movie, ItemCategory.TV]
else:
2023-08-10 15:45:30 -04:00
try:
categories = [ItemCategory(category)]
hide_category = True
except:
categories = visible_categories(request)
2023-01-05 03:06:13 -05:00
tag = request.GET.get("tag", default="").strip()
p = request.GET.get("page", default="1")
2023-06-03 11:10:48 -04:00
p = int(p) if p.isdigit() else 1
2023-01-05 03:06:13 -05:00
if not (keywords or tag):
return render(
request,
2023-01-09 10:00:04 -05:00
"search_results.html",
2023-01-05 03:06:13 -05:00
{
"items": None,
"sites": SiteName.labels,
2023-01-05 03:06:13 -05:00
},
)
2023-07-08 17:30:56 -04:00
if keywords.find("://") > 0:
2023-07-20 21:59:49 -04:00
host = keywords.split("://")[1].split("/")[0]
if host == settings.SITE_INFO["site_domain"]:
return redirect(keywords)
2023-01-05 03:06:13 -05:00
site = SiteManager.get_site_by_url(keywords)
if site:
return fetch(request, keywords, False, site)
2023-07-20 21:59:49 -04:00
if request.GET.get("r"):
return redirect(keywords)
2023-07-12 01:11:15 -04:00
items, num_pages, _, dup_items = query_index(keywords, categories, tag, p)
2023-01-05 03:06:13 -05:00
return render(
request,
"search_results.html",
{
"items": items,
2023-06-08 19:21:02 -04:00
"dup_items": dup_items,
2023-06-03 11:10:48 -04:00
"pagination": PageLinksGenerator(PAGE_LINK_NUMBER, p, num_pages),
2023-01-05 03:06:13 -05:00
"sites": SiteName.labels,
2023-08-10 15:45:30 -04:00
"hide_category": hide_category,
2023-01-05 03:06:13 -05:00
},
)
@login_required
def external_search(request):
category = request.GET.get("c", default="all").strip().lower()
if category == "all":
category = None
keywords = request.GET.get("q", default="").strip()
page_number = int(request.GET.get("page", default=1))
items = ExternalSources.search(category, keywords, page_number) if keywords else []
2023-01-29 21:09:29 -05:00
cache_key = f"search_{category}_{keywords}"
dedupe_urls = cache.get(cache_key, [])
2023-01-05 03:06:13 -05:00
items = [i for i in items if i.source_url not in dedupe_urls]
return render(
request,
"external_search_results.html",
{
"external_items": items,
},
)
2023-01-23 16:31:30 -05:00
@login_required
2023-01-05 03:06:13 -05:00
def refetch(request):
2023-01-23 16:31:30 -05:00
if request.method != "POST":
2023-02-04 00:45:17 -05:00
raise BadRequest()
2023-01-05 03:06:13 -05:00
url = request.POST.get("url")
if not url:
2023-02-04 00:45:17 -05:00
raise BadRequest()
2023-01-05 03:06:13 -05:00
return fetch(request, url, True)