verify and recreate client app when login
This commit is contained in:
parent
9f3a289d34
commit
fcec33636c
10 changed files with 46 additions and 15 deletions
|
@ -217,7 +217,7 @@ ALLOWED_HOSTS = ["*"]
|
|||
ENABLE_LOCAL_ONLY = env("NEODB_ENABLE_LOCAL_ONLY")
|
||||
|
||||
# Timeout of requests to Mastodon, in seconds
|
||||
MASTODON_TIMEOUT = env("NEODB_LOGIN_MASTODON_TIMEOUT", default=10) # type: ignore
|
||||
MASTODON_TIMEOUT = env("NEODB_LOGIN_MASTODON_TIMEOUT", default=5) # type: ignore
|
||||
TAKAHE_REMOTE_TIMEOUT = MASTODON_TIMEOUT
|
||||
|
||||
NEODB_USER_AGENT = f"NeoDB/{NEODB_VERSION} (+{SITE_INFO.get('site_url', 'undefined')})"
|
||||
|
|
|
@ -293,7 +293,7 @@ class Item(SoftDeleteMixin, PolymorphicModel):
|
|||
def history(self):
|
||||
# can't use AuditlogHistoryField bc it will only return history with current content type
|
||||
return LogEntry.objects.filter(
|
||||
object_id=self.id, content_type_id__in=list(item_content_types().values())
|
||||
object_id=self.pk, content_type_id__in=list(item_content_types().values())
|
||||
)
|
||||
|
||||
@cached_property
|
||||
|
@ -310,7 +310,7 @@ class Item(SoftDeleteMixin, PolymorphicModel):
|
|||
res.save()
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.__class__.__name__}|{self.id}|{self.uuid} {self.primary_lookup_id_type}:{self.primary_lookup_id_value if self.primary_lookup_id_value else ''} ({self.title})"
|
||||
return f"{self.__class__.__name__}|{self.pk}|{self.uuid} {self.primary_lookup_id_type}:{self.primary_lookup_id_value if self.primary_lookup_id_value else ''} ({self.title})"
|
||||
|
||||
@classmethod
|
||||
def lookup_id_type_choices(cls):
|
||||
|
|
|
@ -188,7 +188,7 @@ def review_list(request, item_path, item_uuid):
|
|||
|
||||
def comments(request, item_path, item_uuid):
|
||||
item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid))
|
||||
ids = item.child_item_ids + [item.id] + item.sibling_item_ids
|
||||
ids = item.child_item_ids + [item.pk] + item.sibling_item_ids
|
||||
queryset = Comment.objects.filter(item_id__in=ids).order_by("-created_time")
|
||||
queryset = queryset.filter(q_piece_visible_to_user(request.user))
|
||||
before_time = request.GET.get("last")
|
||||
|
@ -230,7 +230,7 @@ def comments_by_episode(request, item_path, item_uuid):
|
|||
|
||||
def reviews(request, item_path, item_uuid):
|
||||
item = get_object_or_404(Item, uid=get_uuid_or_404(item_uuid))
|
||||
ids = item.child_item_ids + [item.id] + item.sibling_item_ids
|
||||
ids = item.child_item_ids + [item.pk] + item.sibling_item_ids
|
||||
queryset = Review.objects.filter(item_id__in=ids).order_by("-created_time")
|
||||
queryset = queryset.filter(q_piece_visible_to_user(request.user))
|
||||
before_time = request.GET.get("last")
|
||||
|
|
|
@ -163,4 +163,4 @@ class ListMember(Piece):
|
|||
abstract = True
|
||||
|
||||
def __str__(self):
|
||||
return f"{self.__class__.__name__}:{self.id}[{self.parent}]:{self.item}"
|
||||
return f"{self.__class__.__name__}:{self.pk}[{self.parent}]:{self.item}"
|
||||
|
|
|
@ -47,7 +47,7 @@ class Mark:
|
|||
|
||||
@property
|
||||
def id(self) -> int | None:
|
||||
return self.shelfmember.id if self.shelfmember else None
|
||||
return self.shelfmember.pk if self.shelfmember else None
|
||||
|
||||
@cached_property
|
||||
def shelf(self) -> Shelf | None:
|
||||
|
|
|
@ -75,7 +75,7 @@ class Rating(Content):
|
|||
def get_rating_for_item(item: Item) -> float | None:
|
||||
stat = Rating.objects.filter(grade__isnull=False)
|
||||
if item.class_name in RATING_INCLUDES_CHILD_ITEMS:
|
||||
stat = stat.filter(item_id__in=item.child_item_ids + [item.id])
|
||||
stat = stat.filter(item_id__in=item.child_item_ids + [item.pk])
|
||||
else:
|
||||
stat = stat.filter(item=item)
|
||||
stat = stat.aggregate(average=Avg("grade"), count=Count("item"))
|
||||
|
@ -85,7 +85,7 @@ class Rating(Content):
|
|||
def get_rating_count_for_item(item: Item) -> int:
|
||||
stat = Rating.objects.filter(grade__isnull=False)
|
||||
if item.class_name in RATING_INCLUDES_CHILD_ITEMS:
|
||||
stat = stat.filter(item_id__in=item.child_item_ids + [item.id])
|
||||
stat = stat.filter(item_id__in=item.child_item_ids + [item.pk])
|
||||
else:
|
||||
stat = stat.filter(item=item)
|
||||
stat = stat.aggregate(count=Count("item"))
|
||||
|
@ -95,7 +95,7 @@ class Rating(Content):
|
|||
def get_rating_distribution_for_item(item: Item):
|
||||
stat = Rating.objects.filter(grade__isnull=False)
|
||||
if item.class_name in RATING_INCLUDES_CHILD_ITEMS:
|
||||
stat = stat.filter(item_id__in=item.child_item_ids + [item.id])
|
||||
stat = stat.filter(item_id__in=item.child_item_ids + [item.pk])
|
||||
else:
|
||||
stat = stat.filter(item=item)
|
||||
stat = stat.values("grade").annotate(count=Count("grade")).order_by("grade")
|
||||
|
|
|
@ -44,6 +44,9 @@ def update_journal_for_merged_item(
|
|||
logger.error("update_journal_for_merged_item: unable to find item")
|
||||
return
|
||||
new_item = legacy_item.merged_to_item
|
||||
if not new_item:
|
||||
logger.error("update_journal_for_merged_item: unable to find merged_to_item")
|
||||
return
|
||||
delete_q = []
|
||||
for cls in list(Content.__subclasses__()) + list(ListMember.__subclasses__()):
|
||||
for p in cls.objects.filter(item=legacy_item):
|
||||
|
@ -54,12 +57,12 @@ def update_journal_for_merged_item(
|
|||
except IntegrityError:
|
||||
if delete_duplicated:
|
||||
logger.warning(
|
||||
f"deleted piece {p.id} when merging {cls.__name__}: {legacy_item_uuid} -> {new_item.uuid}"
|
||||
f"deleted piece {p.pk} when merging {cls.__name__}: {legacy_item_uuid} -> {new_item.uuid}"
|
||||
)
|
||||
delete_q.append(p)
|
||||
else:
|
||||
logger.warning(
|
||||
f"skip piece {p.id} when merging {cls.__name__}: {legacy_item_uuid} -> {new_item.uuid}"
|
||||
f"skip piece {p.pk} when merging {cls.__name__}: {legacy_item_uuid} -> {new_item.uuid}"
|
||||
)
|
||||
for p in delete_q:
|
||||
Debris.create_from_piece(p)
|
||||
|
|
|
@ -36,7 +36,7 @@ def add_to_collection(request: AuthedHttpRequest, item_uuid):
|
|||
cid = Collection.objects.create(
|
||||
owner=request.user.identity,
|
||||
title=_("Collection by {0}").format(request.user.display_name),
|
||||
).id
|
||||
).pk
|
||||
collection = Collection.objects.get(owner=request.user.identity, id=cid)
|
||||
collection.append_item(item, note=request.POST.get("note"))
|
||||
return HttpResponseRedirect(request.META.get("HTTP_REFERER", "/"))
|
||||
|
|
|
@ -50,7 +50,7 @@ def review_edit(request: AuthedHttpRequest, item_uuid, review_uuid=None):
|
|||
if review
|
||||
else ReviewForm(
|
||||
initial={
|
||||
"item": item.id,
|
||||
"item": item.pk,
|
||||
"share_to_mastodon": request.user.preference.mastodon_default_repost,
|
||||
}
|
||||
)
|
||||
|
|
|
@ -358,7 +358,11 @@ def get_or_create_fediverse_application(login_domain):
|
|||
if not app:
|
||||
app = MastodonApplication.objects.filter(api_domain__iexact=domain).first()
|
||||
if app:
|
||||
return app
|
||||
if verify_client(app):
|
||||
return app
|
||||
else:
|
||||
logger.warning(f"Invalid client app for {domain}")
|
||||
app.delete()
|
||||
if not settings.MASTODON_ALLOW_ANY_SITE:
|
||||
logger.warning(f"Disallowed to create app for {domain}")
|
||||
raise Exception("不支持其它实例登录")
|
||||
|
@ -424,6 +428,30 @@ def get_mastodon_login_url(app, login_domain, request):
|
|||
)
|
||||
|
||||
|
||||
def verify_client(mast_app):
|
||||
payload = {
|
||||
"client_id": mast_app.client_id,
|
||||
"client_secret": mast_app.client_secret,
|
||||
"redirect_uri": "urn:ietf:wg:oauth:2.0:oob",
|
||||
"scope": settings.MASTODON_CLIENT_SCOPE,
|
||||
"grant_type": "client_credentials",
|
||||
}
|
||||
headers = {"User-Agent": USER_AGENT}
|
||||
url = "https://" + (mast_app.api_domain or mast_app.domain_name) + API_OBTAIN_TOKEN
|
||||
try:
|
||||
response = post(
|
||||
url, data=payload, headers=headers, timeout=settings.MASTODON_TIMEOUT
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Error {url} {e}")
|
||||
return False
|
||||
if response.status_code != 200:
|
||||
logger.warning(f"Error {url} {response.status_code}")
|
||||
return False
|
||||
data = response.json()
|
||||
return data.get("access_token") is not None
|
||||
|
||||
|
||||
def obtain_token(site, request, code):
|
||||
"""Returns token if success else None."""
|
||||
mast_app = MastodonApplication.objects.get(domain_name=site)
|
||||
|
|
Loading…
Add table
Reference in a new issue