audit log improvement
This commit is contained in:
parent
44164de246
commit
6a6348c2e8
7 changed files with 109 additions and 103 deletions
|
@ -377,13 +377,7 @@ class Item(SoftDeleteMixin, PolymorphicModel):
|
||||||
with connection.cursor() as cursor:
|
with connection.cursor() as cursor:
|
||||||
cursor.execute(f"DELETE FROM {tbl} WHERE item_ptr_id = %s", [self.pk])
|
cursor.execute(f"DELETE FROM {tbl} WHERE item_ptr_id = %s", [self.pk])
|
||||||
obj = model.objects.get(pk=obj.pk)
|
obj = model.objects.get(pk=obj.pk)
|
||||||
LogEntry.objects.log_create(
|
obj.log_action({"!recast": [old_ct.model, ct.model]})
|
||||||
obj,
|
|
||||||
action=LogEntry.Action.UPDATE,
|
|
||||||
changes={
|
|
||||||
"!recast": [[old_ct.model, old_ct.id], [ct.model, ct.id]],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -536,13 +530,7 @@ class ExternalResource(models.Model):
|
||||||
return f"{self.pk}:{self.id_type}:{self.id_value or ''} ({self.url})"
|
return f"{self.pk}:{self.id_type}:{self.id_value or ''} ({self.url})"
|
||||||
|
|
||||||
def unlink_from_item(self):
|
def unlink_from_item(self):
|
||||||
LogEntry.objects.log_create(
|
self.item.log_action({"!unlink": [str(self), None]})
|
||||||
self.item,
|
|
||||||
action=LogEntry.Action.UPDATE,
|
|
||||||
changes={
|
|
||||||
"__unlink__": [str(self), None],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
self.item = None
|
self.item = None
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
|
|
|
@ -138,6 +138,9 @@ class AbstractSite:
|
||||||
obj["primary_lookup_id_value"] = v
|
obj["primary_lookup_id_value"] = v
|
||||||
resource.item = model.objects.create(**obj)
|
resource.item = model.objects.create(**obj)
|
||||||
if previous_item != resource.item:
|
if previous_item != resource.item:
|
||||||
|
if previous_item:
|
||||||
|
previous_item.log_action({"unmatch": [str(resource), ""]})
|
||||||
|
resource.item.log_action({"!match": ["", str(resource)]})
|
||||||
resource.save(update_fields=["item"])
|
resource.save(update_fields=["item"])
|
||||||
return resource.item
|
return resource.item
|
||||||
|
|
||||||
|
@ -149,11 +152,7 @@ class AbstractSite:
|
||||||
if not p.ready:
|
if not p.ready:
|
||||||
# raise ValueError(f'resource not ready for {self.url}')
|
# raise ValueError(f'resource not ready for {self.url}')
|
||||||
return None
|
return None
|
||||||
last_item = p.item
|
return self.match_or_create_item_for_resource(p)
|
||||||
item = self.match_or_create_item_for_resource(p)
|
|
||||||
if last_item != p.item:
|
|
||||||
p.save()
|
|
||||||
return item
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ready(self):
|
def ready(self):
|
||||||
|
|
|
@ -71,7 +71,20 @@ def init_catalog_audit_log():
|
||||||
for cls in Item.__subclasses__():
|
for cls in Item.__subclasses__():
|
||||||
auditlog.register(
|
auditlog.register(
|
||||||
cls,
|
cls,
|
||||||
exclude_fields=["metadata", "created_time", "edited_time", "last_editor"],
|
exclude_fields=[
|
||||||
|
"id",
|
||||||
|
"item_ptr",
|
||||||
|
"polymorphic_ctype",
|
||||||
|
"metadata",
|
||||||
|
"created_time",
|
||||||
|
"edited_time",
|
||||||
|
"last_editor",
|
||||||
|
# related fields are not supported in django-auditlog yet
|
||||||
|
"lookup_ids",
|
||||||
|
"external_resources",
|
||||||
|
"merged_from_items",
|
||||||
|
"focused_comments",
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
auditlog.register(
|
auditlog.register(
|
||||||
|
|
|
@ -8,6 +8,7 @@ from rq.job import Job
|
||||||
from django.core.cache import cache
|
from django.core.cache import cache
|
||||||
import hashlib
|
import hashlib
|
||||||
from .typesense import Indexer as TypeSenseIndexer
|
from .typesense import Indexer as TypeSenseIndexer
|
||||||
|
from auditlog.context import set_actor
|
||||||
|
|
||||||
# from .meilisearch import Indexer as MeiliSearchIndexer
|
# from .meilisearch import Indexer as MeiliSearchIndexer
|
||||||
|
|
||||||
|
@ -85,7 +86,7 @@ def query_index(keywords, category=None, tag=None, page=1, prepare_external=True
|
||||||
return items, result.num_pages, result.count, duplicated_items
|
return items, result.num_pages, result.count, duplicated_items
|
||||||
|
|
||||||
|
|
||||||
def enqueue_fetch(url, is_refetch):
|
def enqueue_fetch(url, is_refetch, user=None):
|
||||||
job_id = "fetch_" + hashlib.md5(url.encode()).hexdigest()
|
job_id = "fetch_" + hashlib.md5(url.encode()).hexdigest()
|
||||||
in_progress = False
|
in_progress = False
|
||||||
try:
|
try:
|
||||||
|
@ -95,24 +96,25 @@ def enqueue_fetch(url, is_refetch):
|
||||||
in_progress = False
|
in_progress = False
|
||||||
if not in_progress:
|
if not in_progress:
|
||||||
django_rq.get_queue("fetch").enqueue(
|
django_rq.get_queue("fetch").enqueue(
|
||||||
_fetch_task, url, is_refetch, job_id=job_id
|
_fetch_task, url, is_refetch, user, job_id=job_id
|
||||||
)
|
)
|
||||||
return job_id
|
return job_id
|
||||||
|
|
||||||
|
|
||||||
def _fetch_task(url, is_refetch):
|
def _fetch_task(url, is_refetch, user):
|
||||||
item_url = "-"
|
item_url = "-"
|
||||||
try:
|
with set_actor(user):
|
||||||
site = SiteManager.get_site_by_url(url)
|
try:
|
||||||
if not site:
|
site = SiteManager.get_site_by_url(url)
|
||||||
return None
|
if not site:
|
||||||
site.get_resource_ready(ignore_existing_content=is_refetch)
|
return None
|
||||||
item = site.get_item()
|
site.get_resource_ready(ignore_existing_content=is_refetch)
|
||||||
if item:
|
item = site.get_item()
|
||||||
_logger.info(f"fetched {url} {item.url} {item}")
|
if item:
|
||||||
item_url = item.url
|
_logger.info(f"fetched {url} {item.url} {item}")
|
||||||
else:
|
item_url = item.url
|
||||||
_logger.error(f"fetch {url} failed")
|
else:
|
||||||
except Exception as e:
|
_logger.error(f"fetch {url} failed")
|
||||||
_logger.error(f"fetch {url} error {e}")
|
except Exception as e:
|
||||||
return item_url
|
_logger.error(f"fetch {url} error {e}")
|
||||||
|
return item_url
|
||||||
|
|
|
@ -68,7 +68,7 @@ def fetch(request, url, is_refetch: bool = False, site: AbstractSite | None = No
|
||||||
"!refetch": [url, None],
|
"!refetch": [url, None],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
job_id = enqueue_fetch(url, is_refetch)
|
job_id = enqueue_fetch(url, is_refetch, request.user)
|
||||||
return render(
|
return render(
|
||||||
request,
|
request,
|
||||||
"fetch_pending.html",
|
"fetch_pending.html",
|
||||||
|
|
|
@ -9,6 +9,7 @@ from user_messages import api as msg
|
||||||
import django_rq
|
import django_rq
|
||||||
from common.utils import GenerateDateUUIDMediaFilePath
|
from common.utils import GenerateDateUUIDMediaFilePath
|
||||||
import os
|
import os
|
||||||
|
from auditlog.context import set_actor
|
||||||
from catalog.common import *
|
from catalog.common import *
|
||||||
from catalog.common.downloaders import *
|
from catalog.common.downloaders import *
|
||||||
from catalog.sites.douban import DoubanDownloader
|
from catalog.sites.douban import DoubanDownloader
|
||||||
|
@ -188,13 +189,14 @@ class DoubanImporter:
|
||||||
print(f"{self.user} import start")
|
print(f"{self.user} import start")
|
||||||
msg.info(self.user, f"开始导入豆瓣标记和评论")
|
msg.info(self.user, f"开始导入豆瓣标记和评论")
|
||||||
self.update_user_import_status(1)
|
self.update_user_import_status(1)
|
||||||
self.load_sheets()
|
with set_actor(self.user):
|
||||||
print(f"{self.user} sheet loaded, {self.total} lines total")
|
self.load_sheets()
|
||||||
self.update_user_import_status(1)
|
print(f"{self.user} sheet loaded, {self.total} lines total")
|
||||||
for name, param in self.mark_sheet_config.items():
|
self.update_user_import_status(1)
|
||||||
self.import_mark_sheet(self.mark_data[name], param[0], name)
|
for name, param in self.mark_sheet_config.items():
|
||||||
for name, param in self.review_sheet_config.items():
|
self.import_mark_sheet(self.mark_data[name], param[0], name)
|
||||||
self.import_review_sheet(self.review_data[name], name)
|
for name, param in self.review_sheet_config.items():
|
||||||
|
self.import_review_sheet(self.review_data[name], name)
|
||||||
self.update_user_import_status(0)
|
self.update_user_import_status(0)
|
||||||
msg.success(
|
msg.success(
|
||||||
self.user,
|
self.user,
|
||||||
|
|
|
@ -3,6 +3,7 @@ from datetime import datetime
|
||||||
from user_messages import api as msg
|
from user_messages import api as msg
|
||||||
import django_rq
|
import django_rq
|
||||||
from django.utils.timezone import make_aware
|
from django.utils.timezone import make_aware
|
||||||
|
from auditlog.context import set_actor
|
||||||
from catalog.common import *
|
from catalog.common import *
|
||||||
from catalog.models import *
|
from catalog.models import *
|
||||||
from journal.models import *
|
from journal.models import *
|
||||||
|
@ -42,65 +43,66 @@ class GoodreadsImporter:
|
||||||
match_profile = re.match(re_profile, url)
|
match_profile = re.match(re_profile, url)
|
||||||
total = 0
|
total = 0
|
||||||
visibility = user.preference.default_visibility
|
visibility = user.preference.default_visibility
|
||||||
if match_list or match_shelf:
|
with set_actor(user):
|
||||||
shelf = (
|
if match_list or match_shelf:
|
||||||
cls.parse_shelf(match_shelf[0], user)
|
shelf = (
|
||||||
if match_shelf
|
cls.parse_shelf(match_shelf[0], user)
|
||||||
else cls.parse_list(match_list[0], user)
|
if match_shelf
|
||||||
)
|
else cls.parse_list(match_list[0], user)
|
||||||
if shelf["title"] and shelf["books"]:
|
|
||||||
collection = Collection.objects.create(
|
|
||||||
title=shelf["title"],
|
|
||||||
brief=shelf["description"]
|
|
||||||
+ "\n\nImported from [Goodreads]("
|
|
||||||
+ url
|
|
||||||
+ ")",
|
|
||||||
owner=user,
|
|
||||||
)
|
)
|
||||||
for book in shelf["books"]:
|
if shelf["title"] and shelf["books"]:
|
||||||
collection.append_item(book["book"], note=book["review"])
|
collection = Collection.objects.create(
|
||||||
total += 1
|
title=shelf["title"],
|
||||||
collection.save()
|
brief=shelf["description"]
|
||||||
msg.success(user, f'成功从Goodreads导入包含{total}本书的收藏单{shelf["title"]}。')
|
+ "\n\nImported from [Goodreads]("
|
||||||
elif match_profile:
|
+ url
|
||||||
uid = match_profile[1]
|
+ ")",
|
||||||
shelves = {
|
owner=user,
|
||||||
ShelfType.WISHLIST: f"https://www.goodreads.com/review/list/{uid}?shelf=to-read",
|
)
|
||||||
ShelfType.PROGRESS: f"https://www.goodreads.com/review/list/{uid}?shelf=currently-reading",
|
for book in shelf["books"]:
|
||||||
ShelfType.COMPLETE: f"https://www.goodreads.com/review/list/{uid}?shelf=read",
|
collection.append_item(book["book"], note=book["review"])
|
||||||
}
|
total += 1
|
||||||
for shelf_type in shelves:
|
collection.save()
|
||||||
shelf_url = shelves.get(shelf_type)
|
msg.success(user, f'成功从Goodreads导入包含{total}本书的收藏单{shelf["title"]}。')
|
||||||
shelf = cls.parse_shelf(shelf_url, user)
|
elif match_profile:
|
||||||
for book in shelf["books"]:
|
uid = match_profile[1]
|
||||||
mark = Mark(user, book["book"])
|
shelves = {
|
||||||
if (
|
ShelfType.WISHLIST: f"https://www.goodreads.com/review/list/{uid}?shelf=to-read",
|
||||||
(
|
ShelfType.PROGRESS: f"https://www.goodreads.com/review/list/{uid}?shelf=currently-reading",
|
||||||
mark.shelf_type == shelf_type
|
ShelfType.COMPLETE: f"https://www.goodreads.com/review/list/{uid}?shelf=read",
|
||||||
and mark.comment_text == book["review"]
|
}
|
||||||
)
|
for shelf_type in shelves:
|
||||||
or (
|
shelf_url = shelves.get(shelf_type)
|
||||||
mark.shelf_type == ShelfType.COMPLETE
|
shelf = cls.parse_shelf(shelf_url, user)
|
||||||
and shelf_type != ShelfType.COMPLETE
|
for book in shelf["books"]:
|
||||||
)
|
mark = Mark(user, book["book"])
|
||||||
or (
|
if (
|
||||||
mark.shelf_type == ShelfType.PROGRESS
|
(
|
||||||
and shelf_type == ShelfType.WISHLIST
|
mark.shelf_type == shelf_type
|
||||||
)
|
and mark.comment_text == book["review"]
|
||||||
):
|
)
|
||||||
print(
|
or (
|
||||||
f'Skip {shelf_type}/{book["book"]} bc it was marked {mark.shelf_type}'
|
mark.shelf_type == ShelfType.COMPLETE
|
||||||
)
|
and shelf_type != ShelfType.COMPLETE
|
||||||
else:
|
)
|
||||||
mark.update(
|
or (
|
||||||
shelf_type,
|
mark.shelf_type == ShelfType.PROGRESS
|
||||||
book["review"],
|
and shelf_type == ShelfType.WISHLIST
|
||||||
book["rating"],
|
)
|
||||||
visibility=visibility,
|
):
|
||||||
created_time=book["last_updated"] or timezone.now(),
|
print(
|
||||||
)
|
f'Skip {shelf_type}/{book["book"]} bc it was marked {mark.shelf_type}'
|
||||||
total += 1
|
)
|
||||||
msg.success(user, f"成功从Goodreads用户主页导入{total}个标记。")
|
else:
|
||||||
|
mark.update(
|
||||||
|
shelf_type,
|
||||||
|
book["review"],
|
||||||
|
book["rating"],
|
||||||
|
visibility=visibility,
|
||||||
|
created_time=book["last_updated"] or timezone.now(),
|
||||||
|
)
|
||||||
|
total += 1
|
||||||
|
msg.success(user, f"成功从Goodreads用户主页导入{total}个标记。")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_book(cls, url, user):
|
def get_book(cls, url, user):
|
||||||
|
|
Loading…
Add table
Reference in a new issue