2023-01-05 03:06:13 -05:00
|
|
|
import uuid
|
|
|
|
import logging
|
2023-02-04 00:45:17 -05:00
|
|
|
from django.core.exceptions import BadRequest
|
|
|
|
from django.shortcuts import render, redirect
|
|
|
|
from django.contrib.auth.decorators import login_required
|
2023-01-05 03:06:13 -05:00
|
|
|
from django.utils.translation import gettext_lazy as _
|
2023-02-04 00:45:17 -05:00
|
|
|
from django.http import HttpResponseRedirect
|
2023-07-12 01:11:15 -04:00
|
|
|
from catalog.common.models import ItemCategory, SiteName
|
2023-01-05 03:06:13 -05:00
|
|
|
from catalog.common.sites import AbstractSite, SiteManager
|
|
|
|
from ..models import *
|
|
|
|
from django.conf import settings
|
|
|
|
from common.utils import PageLinksGenerator
|
|
|
|
from common.config import PAGE_LINK_NUMBER
|
|
|
|
import django_rq
|
|
|
|
from rq.job import Job
|
|
|
|
from .external import ExternalSources
|
2023-01-29 21:09:29 -05:00
|
|
|
from django.core.cache import cache
|
2023-02-15 16:22:32 -05:00
|
|
|
import hashlib
|
2023-07-08 17:30:56 -04:00
|
|
|
from .models import get_fetch_lock, query_index, enqueue_fetch
|
2023-01-05 03:06:13 -05:00
|
|
|
|
|
|
|
_logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class HTTPResponseHXRedirect(HttpResponseRedirect):
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
self["HX-Redirect"] = self["Location"]
|
|
|
|
|
|
|
|
status_code = 200
|
|
|
|
|
|
|
|
|
|
|
|
@login_required
|
|
|
|
def fetch_refresh(request, job_id):
|
|
|
|
retry = request.GET
|
2023-01-29 21:24:58 -05:00
|
|
|
try:
|
|
|
|
job = Job.fetch(id=job_id, connection=django_rq.get_connection("fetch"))
|
|
|
|
item_url = job.return_value()
|
|
|
|
except:
|
|
|
|
item_url = "-"
|
2023-01-05 03:06:13 -05:00
|
|
|
if item_url:
|
|
|
|
if item_url == "-":
|
2023-06-09 02:45:27 -04:00
|
|
|
return render(request, "_fetch_failed.html")
|
2023-01-05 03:06:13 -05:00
|
|
|
else:
|
|
|
|
return HTTPResponseHXRedirect(item_url)
|
|
|
|
else:
|
|
|
|
retry = int(request.GET.get("retry", 0)) + 1
|
|
|
|
if retry > 10:
|
2023-06-09 02:45:27 -04:00
|
|
|
return render(request, "_fetch_failed.html")
|
2023-01-05 03:06:13 -05:00
|
|
|
else:
|
|
|
|
return render(
|
|
|
|
request,
|
2023-06-09 02:45:27 -04:00
|
|
|
"_fetch_refresh.html",
|
2023-01-05 03:06:13 -05:00
|
|
|
{"job_id": job_id, "retry": retry, "delay": retry * 2},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-05-22 14:52:02 -04:00
|
|
|
def fetch(request, url, is_refetch: bool = False, site: AbstractSite | None = None):
|
2023-01-05 03:06:13 -05:00
|
|
|
if not site:
|
|
|
|
site = SiteManager.get_site_by_url(url)
|
|
|
|
if not site:
|
2023-02-04 00:45:17 -05:00
|
|
|
raise BadRequest()
|
2023-01-05 03:06:13 -05:00
|
|
|
item = site.get_item()
|
|
|
|
if item and not is_refetch:
|
|
|
|
return redirect(item.url)
|
2023-06-19 00:17:56 -04:00
|
|
|
if item and is_refetch:
|
|
|
|
item.log_action(
|
|
|
|
{
|
2023-06-19 12:30:39 -04:00
|
|
|
"!refetch": [url, None],
|
2023-06-19 00:17:56 -04:00
|
|
|
}
|
|
|
|
)
|
2023-07-08 17:30:56 -04:00
|
|
|
job_id = None
|
|
|
|
if is_refetch or get_fetch_lock():
|
|
|
|
job_id = enqueue_fetch(url, is_refetch, request.user)
|
2023-01-05 03:06:13 -05:00
|
|
|
return render(
|
|
|
|
request,
|
|
|
|
"fetch_pending.html",
|
|
|
|
{
|
|
|
|
"site": site,
|
2023-05-20 11:01:18 -04:00
|
|
|
"sites": SiteName.labels,
|
2023-01-05 03:06:13 -05:00
|
|
|
"job_id": job_id,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-07-12 01:11:15 -04:00
|
|
|
def visible_categories(request):
|
|
|
|
vc = request.session.get("p_categories", None)
|
|
|
|
if vc is None:
|
|
|
|
vc = [
|
|
|
|
x
|
|
|
|
for x in item_categories()
|
2023-07-12 19:58:08 -04:00
|
|
|
if x.value
|
|
|
|
not in (
|
|
|
|
request.user.preference.hidden_categories
|
|
|
|
if request.user.is_authenticated
|
|
|
|
else []
|
|
|
|
)
|
2023-07-12 01:11:15 -04:00
|
|
|
]
|
|
|
|
request.session["p_categories"] = vc
|
|
|
|
return vc
|
|
|
|
|
|
|
|
|
2023-01-05 03:06:13 -05:00
|
|
|
def search(request):
|
|
|
|
category = request.GET.get("c", default="all").strip().lower()
|
2023-07-12 01:11:15 -04:00
|
|
|
if category == "all" or not category:
|
2023-01-05 03:06:13 -05:00
|
|
|
category = None
|
2023-07-12 01:11:15 -04:00
|
|
|
categories = visible_categories(request)
|
|
|
|
elif category == "movietv":
|
|
|
|
categories = [ItemCategory.Movie, ItemCategory.TV]
|
|
|
|
else:
|
|
|
|
categories = [ItemCategory(category)]
|
2023-01-05 03:06:13 -05:00
|
|
|
keywords = request.GET.get("q", default="").strip()
|
|
|
|
tag = request.GET.get("tag", default="").strip()
|
|
|
|
p = request.GET.get("page", default="1")
|
2023-06-03 11:10:48 -04:00
|
|
|
p = int(p) if p.isdigit() else 1
|
2023-01-05 03:06:13 -05:00
|
|
|
if not (keywords or tag):
|
|
|
|
return render(
|
|
|
|
request,
|
2023-01-09 10:00:04 -05:00
|
|
|
"search_results.html",
|
2023-01-05 03:06:13 -05:00
|
|
|
{
|
|
|
|
"items": None,
|
2023-05-20 11:01:18 -04:00
|
|
|
"sites": SiteName.labels,
|
2023-01-05 03:06:13 -05:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
2023-07-08 17:30:56 -04:00
|
|
|
if keywords.find("://") > 0:
|
2023-01-05 03:06:13 -05:00
|
|
|
site = SiteManager.get_site_by_url(keywords)
|
|
|
|
if site:
|
2023-01-07 00:35:30 -05:00
|
|
|
return fetch(request, keywords, False, site)
|
2023-05-21 18:15:49 -04:00
|
|
|
|
2023-07-12 01:11:15 -04:00
|
|
|
items, num_pages, _, dup_items = query_index(keywords, categories, tag, p)
|
2023-01-05 03:06:13 -05:00
|
|
|
return render(
|
|
|
|
request,
|
|
|
|
"search_results.html",
|
|
|
|
{
|
|
|
|
"items": items,
|
2023-06-08 19:21:02 -04:00
|
|
|
"dup_items": dup_items,
|
2023-06-03 11:10:48 -04:00
|
|
|
"pagination": PageLinksGenerator(PAGE_LINK_NUMBER, p, num_pages),
|
2023-01-05 03:06:13 -05:00
|
|
|
"sites": SiteName.labels,
|
2023-05-20 11:01:18 -04:00
|
|
|
"hide_category": category is not None and category != "movietv",
|
2023-01-05 03:06:13 -05:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@login_required
|
|
|
|
def external_search(request):
|
|
|
|
category = request.GET.get("c", default="all").strip().lower()
|
|
|
|
if category == "all":
|
|
|
|
category = None
|
|
|
|
keywords = request.GET.get("q", default="").strip()
|
|
|
|
page_number = int(request.GET.get("page", default=1))
|
|
|
|
items = ExternalSources.search(category, keywords, page_number) if keywords else []
|
2023-01-29 21:09:29 -05:00
|
|
|
cache_key = f"search_{category}_{keywords}"
|
|
|
|
dedupe_urls = cache.get(cache_key, [])
|
2023-01-05 03:06:13 -05:00
|
|
|
items = [i for i in items if i.source_url not in dedupe_urls]
|
|
|
|
|
|
|
|
return render(
|
|
|
|
request,
|
|
|
|
"external_search_results.html",
|
|
|
|
{
|
|
|
|
"external_items": items,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-01-23 16:31:30 -05:00
|
|
|
@login_required
|
2023-01-05 03:06:13 -05:00
|
|
|
def refetch(request):
|
2023-01-23 16:31:30 -05:00
|
|
|
if request.method != "POST":
|
2023-02-04 00:45:17 -05:00
|
|
|
raise BadRequest()
|
2023-01-05 03:06:13 -05:00
|
|
|
url = request.POST.get("url")
|
|
|
|
if not url:
|
2023-02-04 00:45:17 -05:00
|
|
|
raise BadRequest()
|
2023-01-05 03:06:13 -05:00
|
|
|
return fetch(request, url, True)
|