diff --git a/catalog/apis.py b/catalog/apis.py index 1c9a7e77..8585a3fc 100644 --- a/catalog/apis.py +++ b/catalog/apis.py @@ -266,7 +266,6 @@ def get_book(request, uuid: str, response: HttpResponse): @paginate(PageNumberPagination) def get_sibling_editions_for_book(request, uuid: str, response: HttpResponse): i = _get_item(Edition, uuid, response) - print(i) if not isinstance(i, Edition): return Edition.objects.none() return i.sibling_items diff --git a/catalog/collection/models.py b/catalog/collection/models.py index b5da4902..7083851a 100644 --- a/catalog/collection/models.py +++ b/catalog/collection/models.py @@ -12,7 +12,7 @@ class Collection(Item): @property def url(self): - return self.journal_item.url if self.journal_item else super().url + return self.journal_item.url if hasattr(self, "journal_item") else super().url @property def owner_id(self): diff --git a/catalog/common/sites.py b/catalog/common/sites.py index 5188eb22..7b20efa7 100644 --- a/catalog/common/sites.py +++ b/catalog/common/sites.py @@ -261,6 +261,7 @@ class AbstractSite: p.save() if p.item: p.item.merge_data_from_external_resources(ignore_existing_content) + p.item.ap_object # validate p.item.save() self.scrape_additional_data() if auto_link: diff --git a/catalog/sites/fedi.py b/catalog/sites/fedi.py index d594dd8a..ee2c8728 100644 --- a/catalog/sites/fedi.py +++ b/catalog/sites/fedi.py @@ -1,3 +1,4 @@ +import re from urllib.parse import quote_plus, urlparse import httpx @@ -68,7 +69,9 @@ class FediverseInstance(AbstractSite): @classmethod def url_to_id(cls, url: str): u = url.split("://", 1)[1].split("?", 1)[0].split("/", 1) - return "https://" + u[0].lower() + "/" + u[1] + return "https://" + u[0].lower() + "/" + re.sub("^api/", "", u[1]) + # return "https://" + u[0].lower() + "/" + u[1] + # FIXME re.sub(...) should be removed after all peers in network upgrade to 0.11.4.9+ @classmethod def validate_url_fallback(cls, url: str): diff --git a/catalog/views.py b/catalog/views.py index ebf30cb8..44d7e01c 100644 --- a/catalog/views.py +++ b/catalog/views.py @@ -1,7 +1,7 @@ from django.contrib.auth.decorators import login_required from django.core.cache import cache from django.db.models import Count -from django.http import Http404 +from django.http import Http404, JsonResponse from django.shortcuts import get_object_or_404, redirect, render from django.urls import reverse from django.utils import timezone @@ -82,7 +82,7 @@ def retrieve(request, item_path, item_uuid): if not skipcheck and item.is_deleted: raise Http404(_("Item no longer exists")) if request.headers.get("Accept", "").endswith("json"): - return redirect(item.api_url) + return JsonResponse(item.ap_object) focus_item = None if request.GET.get("focus"): focus_item = get_object_or_404( diff --git a/common/views.py b/common/views.py index c6e91663..3334a293 100644 --- a/common/views.py +++ b/common/views.py @@ -81,23 +81,35 @@ def nodeinfo2(request): ) +def _is_json_request(request) -> bool: + return request.headers.get("HTTP_ACCEPT", "").endswith("json") + + def error_400(request, exception=None): if isinstance(exception, DisallowedHost): url = settings.SITE_INFO["site_url"] + request.get_full_path() return redirect(url, permanent=True) + if _is_json_request(request): + return JsonResponse({"error": "invalid request"}, status=400) return render(request, "400.html", status=400, context={"exception": exception}) def error_403(request, exception=None): + if _is_json_request(request): + return JsonResponse({"error": "forbidden"}, status=403) return render(request, "403.html", status=403, context={"exception": exception}) def error_404(request, exception=None): + if _is_json_request(request): + return JsonResponse({"error": "not found"}, status=404) request.session.pop("next_url", None) return render(request, "404.html", status=404, context={"exception": exception}) def error_500(request, exception=None): + if _is_json_request(request): + return JsonResponse({"error": "something wrong"}, status=500) return render(request, "500.html", status=500, context={"exception": exception})