{% trans 'Export Data' %}
- -- -
-
diff --git a/catalog/views.py b/catalog/views.py index 1c8345f2..a8dd5d5e 100644 --- a/catalog/views.py +++ b/catalog/views.py @@ -86,7 +86,7 @@ def retrieve(request, item_path, item_uuid): if request.method == "HEAD": return HttpResponse() if request.headers.get("Accept", "").endswith("json"): - return JsonResponse(item.ap_object) + return JsonResponse(item.ap_object, content_type="application/activity+json") focus_item = None if request.GET.get("focus"): focus_item = get_object_or_404( diff --git a/common/views.py b/common/views.py index 3334a293..e21164e7 100644 --- a/common/views.py +++ b/common/views.py @@ -2,7 +2,7 @@ from django.conf import settings from django.contrib.auth.decorators import login_required from django.core.cache import cache from django.core.exceptions import DisallowedHost -from django.http import HttpRequest, JsonResponse +from django.http import HttpRequest, HttpResponse, JsonResponse from django.shortcuts import redirect, render from django.urls import reverse @@ -81,36 +81,41 @@ def nodeinfo2(request): ) -def _is_json_request(request) -> bool: - return request.headers.get("HTTP_ACCEPT", "").endswith("json") +def _error_response(request, status: int, exception=None, default_message=""): + message = str(exception) if exception else default_message + if request.headers.get("HTTP_ACCEPT").endswith("json"): + return JsonResponse({"error": message}, status=status) + if ( + request.headers.get("HTTP_HX_REQUEST") is not None + and request.headers.get("HTTP_HX_BOOSTED") is None + ): + return HttpResponse(message, status=status) + return render( + request, + f"{status}.html", + status=status, + context={"message": message, "exception": exception}, + ) def error_400(request, exception=None): if isinstance(exception, DisallowedHost): url = settings.SITE_INFO["site_url"] + request.get_full_path() return redirect(url, permanent=True) - if _is_json_request(request): - return JsonResponse({"error": "invalid request"}, status=400) - return render(request, "400.html", status=400, context={"exception": exception}) + return _error_response(request, 400, exception, "invalid request") def error_403(request, exception=None): - if _is_json_request(request): - return JsonResponse({"error": "forbidden"}, status=403) - return render(request, "403.html", status=403, context={"exception": exception}) + return _error_response(request, 403, exception, "forbidden") def error_404(request, exception=None): - if _is_json_request(request): - return JsonResponse({"error": "not found"}, status=404) request.session.pop("next_url", None) - return render(request, "404.html", status=404, context={"exception": exception}) + return _error_response(request, 404, exception, "not found") def error_500(request, exception=None): - if _is_json_request(request): - return JsonResponse({"error": "something wrong"}, status=500) - return render(request, "500.html", status=500, context={"exception": exception}) + return _error_response(request, 500, exception, "something wrong") def console(request): diff --git a/journal/exporters/csv.py b/journal/exporters/csv.py index 5bbd9d78..2cbffb62 100644 --- a/journal/exporters/csv.py +++ b/journal/exporters/csv.py @@ -171,5 +171,5 @@ class CsvExporter(Task): shutil.make_archive(filename[:-4], "zip", temp_folder_path) self.metadata["file"] = filename self.metadata["total"] = total - self.message = "Export complete." + self.message = f"{total} records exported." self.save() diff --git a/journal/exporters/ndjson.py b/journal/exporters/ndjson.py index 119ee136..cb0457d4 100644 --- a/journal/exporters/ndjson.py +++ b/journal/exporters/ndjson.py @@ -215,5 +215,5 @@ class NdjsonExporter(Task): self.metadata["file"] = filename self.metadata["total"] = total - self.message = "Export complete." + self.message = f"{total} records exported." self.save() diff --git a/journal/importers/__init__.py b/journal/importers/__init__.py index 67693cc3..07075888 100644 --- a/journal/importers/__init__.py +++ b/journal/importers/__init__.py @@ -1,6 +1,3 @@ -import os -import zipfile - from .csv import CsvImporter from .douban import DoubanImporter from .goodreads import GoodreadsImporter @@ -8,25 +5,6 @@ from .letterboxd import LetterboxdImporter from .ndjson import NdjsonImporter from .opml import OPMLImporter - -def get_neodb_importer( - filename: str, -) -> type[CsvImporter] | type[NdjsonImporter] | None: - if not os.path.exists(filename) or not zipfile.is_zipfile(filename): - return None - with zipfile.ZipFile(filename, "r") as z: - files = z.namelist() - if any(f == "journal.ndjson" for f in files): - return NdjsonImporter - if any( - f.endswith("_mark.csv") - or f.endswith("_review.csv") - or f.endswith("_note.csv") - for f in files - ): - return CsvImporter - - __all__ = [ "CsvImporter", "NdjsonImporter", @@ -34,5 +12,4 @@ __all__ = [ "OPMLImporter", "DoubanImporter", "GoodreadsImporter", - "get_neodb_importer", ] diff --git a/journal/importers/csv.py b/journal/importers/csv.py index f84bc98a..24401de3 100644 --- a/journal/importers/csv.py +++ b/journal/importers/csv.py @@ -5,7 +5,6 @@ import zipfile from typing import Dict from django.utils import timezone -from django.utils.translation import gettext as _ from loguru import logger from catalog.models import ItemCategory @@ -15,6 +14,9 @@ from .base import BaseImporter class CsvImporter(BaseImporter): + class Meta: + app_label = "journal" # workaround bug in TypedModel + def import_mark(self, row: Dict[str, str]) -> str: """Import a mark from a CSV row. @@ -249,7 +251,7 @@ class CsvImporter(BaseImporter): # Set the total count in metadata self.metadata["total"] = total_rows - self.message = f"Found {total_rows} items to import" + self.message = f"found {total_rows} records to import" self.save(update_fields=["metadata", "message"]) # Now process all files @@ -257,7 +259,5 @@ class CsvImporter(BaseImporter): import_function = getattr(self, f"import_{file_type}") self.process_csv_file(file_path, import_function) - self.message = _("Import complete") - if self.metadata.get("failed_items", []): - self.message += f": {self.metadata['failed']} items failed ({len(self.metadata['failed_items'])} unique items)" + self.message = f"{self.metadata['imported']} items imported, {self.metadata['skipped']} skipped, {self.metadata['failed']} failed." self.save() diff --git a/journal/importers/douban.py b/journal/importers/douban.py index 1157671c..627fd999 100644 --- a/journal/importers/douban.py +++ b/journal/importers/douban.py @@ -154,6 +154,8 @@ class DoubanImporter(Task): def run(self): logger.info(f"{self.user} import start") self.load_sheets() + self.message = f"豆瓣标记和评论导入开始,共{self.metadata['total']}篇。" + self.save(update_fields=["message"]) logger.info(f"{self.user} sheet loaded, {self.metadata['total']} lines total") for name, param in self.mark_sheet_config.items(): self.import_mark_sheet(self.mark_data[name], param[0], name) diff --git a/journal/importers/letterboxd.py b/journal/importers/letterboxd.py index f37241f8..4e76c5b0 100644 --- a/journal/importers/letterboxd.py +++ b/journal/importers/letterboxd.py @@ -1,4 +1,5 @@ import csv +import os import tempfile import zipfile from datetime import timedelta @@ -35,6 +36,13 @@ class LetterboxdImporter(Task): "file": None, } + @classmethod + def validate_file(cls, uploaded_file): + try: + return zipfile.is_zipfile(uploaded_file) + except Exception: + return False + def get_item_by_url(self, url): try: h = BasicDownloader(url).download().html() @@ -121,7 +129,6 @@ class LetterboxdImporter(Task): self.progress(1) def progress(self, mark_state: int, url=None): - self.metadata["total"] += 1 self.metadata["processed"] += 1 match mark_state: case 1: @@ -142,49 +149,56 @@ class LetterboxdImporter(Task): with tempfile.TemporaryDirectory() as tmpdirname: logger.debug(f"Extracting {filename} to {tmpdirname}") zipref.extractall(tmpdirname) - with open(tmpdirname + "/reviews.csv") as f: - reader = csv.DictReader(f, delimiter=",") - for row in reader: - uris.add(row["Letterboxd URI"]) - self.mark( - row["Letterboxd URI"], - ShelfType.COMPLETE, - row["Watched Date"], - row["Rating"], - row["Review"], - row["Tags"], - ) - with open(tmpdirname + "/ratings.csv") as f: - reader = csv.DictReader(f, delimiter=",") - for row in reader: - if row["Letterboxd URI"] in uris: - continue - uris.add(row["Letterboxd URI"]) - self.mark( - row["Letterboxd URI"], - ShelfType.COMPLETE, - row["Date"], - row["Rating"], - ) - with open(tmpdirname + "/watched.csv") as f: - reader = csv.DictReader(f, delimiter=",") - for row in reader: - if row["Letterboxd URI"] in uris: - continue - uris.add(row["Letterboxd URI"]) - self.mark( - row["Letterboxd URI"], - ShelfType.COMPLETE, - row["Date"], - ) - with open(tmpdirname + "/watchlist.csv") as f: - reader = csv.DictReader(f, delimiter=",") - for row in reader: - if row["Letterboxd URI"] in uris: - continue - uris.add(row["Letterboxd URI"]) - self.mark( - row["Letterboxd URI"], - ShelfType.WISHLIST, - row["Date"], - ) + if os.path.exists(tmpdirname + "/reviews.csv"): + with open(tmpdirname + "/reviews.csv") as f: + reader = csv.DictReader(f, delimiter=",") + for row in reader: + uris.add(row["Letterboxd URI"]) + self.mark( + row["Letterboxd URI"], + ShelfType.COMPLETE, + row["Watched Date"], + row["Rating"], + row["Review"], + row["Tags"], + ) + if os.path.exists(tmpdirname + "/ratings.csv"): + with open(tmpdirname + "/ratings.csv") as f: + reader = csv.DictReader(f, delimiter=",") + for row in reader: + if row["Letterboxd URI"] in uris: + continue + uris.add(row["Letterboxd URI"]) + self.mark( + row["Letterboxd URI"], + ShelfType.COMPLETE, + row["Date"], + row["Rating"], + ) + if os.path.exists(tmpdirname + "/watched.csv"): + with open(tmpdirname + "/watched.csv") as f: + reader = csv.DictReader(f, delimiter=",") + for row in reader: + if row["Letterboxd URI"] in uris: + continue + uris.add(row["Letterboxd URI"]) + self.mark( + row["Letterboxd URI"], + ShelfType.COMPLETE, + row["Date"], + ) + if os.path.exists(tmpdirname + "/watchlist.csv"): + with open(tmpdirname + "/watchlist.csv") as f: + reader = csv.DictReader(f, delimiter=",") + for row in reader: + if row["Letterboxd URI"] in uris: + continue + uris.add(row["Letterboxd URI"]) + self.mark( + row["Letterboxd URI"], + ShelfType.WISHLIST, + row["Date"], + ) + self.metadata["total"] = self.metadata["processed"] + self.message = f"{self.metadata['imported']} imported, {self.metadata['skipped']} skipped, {self.metadata['failed']} failed" + self.save(update_fields=["metadata", "message"]) diff --git a/journal/importers/ndjson.py b/journal/importers/ndjson.py index bb243923..b9f7a291 100644 --- a/journal/importers/ndjson.py +++ b/journal/importers/ndjson.py @@ -4,7 +4,6 @@ import tempfile import zipfile from typing import Any, Dict -from django.utils.translation import gettext as _ from loguru import logger from journal.models import ( @@ -26,6 +25,9 @@ from .base import BaseImporter class NdjsonImporter(BaseImporter): """Importer for NDJSON files exported from NeoDB.""" + class Meta: + app_label = "journal" # workaround bug in TypedModel + def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.items = {} @@ -60,8 +62,8 @@ class NdjsonImporter(BaseImporter): metadata = item_entry.get("metadata", {}) collection.append_item(item, metadata=metadata) return "imported" - except Exception as e: - logger.error(f"Error importing collection: {e}") + except Exception: + logger.exception("Error importing collection") return "failed" def import_shelf_member(self, data: Dict[str, Any]) -> BaseImporter.ImportResult: @@ -86,8 +88,8 @@ class NdjsonImporter(BaseImporter): created_time=published_dt, ) return "imported" - except Exception as e: - logger.error(f"Error importing shelf member: {e}") + except Exception: + logger.exception("Error importing shelf member") return "failed" def import_shelf_log(self, data: Dict[str, Any]) -> BaseImporter.ImportResult: @@ -110,8 +112,8 @@ class NdjsonImporter(BaseImporter): # return "imported" if created else "skipped" # count skip as success otherwise it may confuse user return "imported" - except Exception as e: - logger.error(f"Error importing shelf log: {e}") + except Exception: + logger.exception("Error importing shelf log") return "failed" def import_post(self, data: Dict[str, Any]) -> BaseImporter.ImportResult: @@ -152,8 +154,8 @@ class NdjsonImporter(BaseImporter): metadata=metadata, ) return "imported" - except Exception as e: - logger.error(f"Error importing review: {e}") + except Exception: + logger.exception("Error importing review") return "failed" def import_note(self, data: Dict[str, Any]) -> BaseImporter.ImportResult: @@ -185,8 +187,8 @@ class NdjsonImporter(BaseImporter): metadata=data.get("metadata", {}), ) return "imported" - except Exception as e: - logger.error(f"Error importing note: {e}") + except Exception: + logger.exception("Error importing note") return "failed" def import_comment(self, data: Dict[str, Any]) -> BaseImporter.ImportResult: @@ -218,8 +220,8 @@ class NdjsonImporter(BaseImporter): metadata=metadata, ) return "imported" - except Exception as e: - logger.error(f"Error importing comment: {e}") + except Exception: + logger.exception("Error importing comment") return "failed" def import_rating(self, data: Dict[str, Any]) -> BaseImporter.ImportResult: @@ -251,8 +253,8 @@ class NdjsonImporter(BaseImporter): metadata=metadata, ) return "imported" - except Exception as e: - logger.error(f"Error importing rating: {e}") + except Exception: + logger.exception("Error importing rating") return "failed" def import_tag(self, data: Dict[str, Any]) -> BaseImporter.ImportResult: @@ -271,8 +273,8 @@ class NdjsonImporter(BaseImporter): }, ) return "imported" if created else "skipped" - except Exception as e: - logger.error(f"Error importing tag member: {e}") + except Exception: + logger.exception("Error importing tag member") return "failed" def import_tag_member(self, data: Dict[str, Any]) -> BaseImporter.ImportResult: @@ -309,8 +311,8 @@ class NdjsonImporter(BaseImporter): }, ) return "imported" if created else "skipped" - except Exception as e: - logger.error(f"Error importing tag member: {e}") + except Exception: + logger.exception("Error importing tag member") return "failed" def process_journal(self, file_path: str) -> None: @@ -348,6 +350,9 @@ class NdjsonImporter(BaseImporter): journal[data_type].append(data) self.metadata["total"] = sum(len(items) for items in journal.values()) + self.message = f"found {self.metadata['total']} records to import" + self.save(update_fields=["metadata", "message"]) + logger.debug(f"Processing {self.metadata['total']} entries") if lines_error: logger.error(f"Error processing journal.ndjson: {lines_error} lines") @@ -369,8 +374,8 @@ class NdjsonImporter(BaseImporter): for line in jsonfile: try: i = json.loads(line) - except (json.JSONDecodeError, Exception) as e: - logger.error(f"Error processing catalog item: {e}") + except (json.JSONDecodeError, Exception): + logger.exception("Error processing catalog item") continue u = i.get("id") if not u: @@ -381,8 +386,8 @@ class NdjsonImporter(BaseImporter): self.items[u] = self.get_item_by_info_and_links("", "", links) logger.info(f"Loaded {item_count} items from catalog") self.metadata["catalog_processed"] = item_count - except Exception as e: - logger.error(f"Error parsing catalog file: {e}") + except Exception: + logger.exception("Error parsing catalog file") def parse_header(self, file_path: str) -> Dict[str, Any]: try: @@ -392,8 +397,8 @@ class NdjsonImporter(BaseImporter): header = json.loads(first_line) if header.get("server"): return header - except (json.JSONDecodeError, IOError) as e: - logger.error(f"Error parsing NDJSON header: {e}") + except (json.JSONDecodeError, IOError): + logger.exception("Error parsing header") return {} def run(self) -> None: @@ -424,24 +429,5 @@ class NdjsonImporter(BaseImporter): logger.debug(f"Importing journal.ndjson with {header}") self.process_journal(journal_path) - source_info = self.metadata.get("journal_header", {}) - source_summary = f" from {source_info.get('username', 'unknown')}@{source_info.get('server', 'unknown')} ver:{source_info.get('neodb_version', 'unknown')}." - self.message = _("Import complete") + source_summary - - metadata_stats = self.metadata.get("metadata_stats", {}) - partial_updates = metadata_stats.get("partial_updates", 0) - if partial_updates > 0: - self.message += f", {partial_updates} items with partial metadata updates" - - ratings = metadata_stats.get("ratings_updated", 0) - comments = metadata_stats.get("comments_updated", 0) - tags = metadata_stats.get("tags_updated", 0) - - if ratings > 0 or comments > 0 or tags > 0: - self.message += ( - f" ({ratings} ratings, {comments} comments, {tags} tag sets)" - ) - - if self.metadata.get("failed_items", []): - self.message += f": {self.metadata['failed']} items failed ({len(self.metadata['failed_items'])} unique items)" + self.message = f"{self.metadata['imported']} items imported, {self.metadata['skipped']} skipped, {self.metadata['failed']} failed." self.save() diff --git a/journal/importers/opml.py b/journal/importers/opml.py index 184b8151..d1685d61 100644 --- a/journal/importers/opml.py +++ b/journal/importers/opml.py @@ -1,43 +1,54 @@ -import django_rq import listparser -from auditlog.context import set_actor from django.utils.translation import gettext as _ from loguru import logger -from user_messages import api as msg from catalog.common import * from catalog.common.downloaders import * from catalog.sites.rss import RSS from journal.models import * +from users.models.task import Task -class OPMLImporter: - def __init__(self, user, visibility, mode): - self.user = user - self.visibility = visibility - self.mode = mode +class OPMLImporter(Task): + class Meta: + app_label = "journal" # workaround bug in TypedModel - def parse_file(self, uploaded_file): - return listparser.parse(uploaded_file.read()).feeds + TaskQueue = "import" + DefaultMetadata = { + "total": 0, + "mode": 0, + "processed": 0, + "skipped": 0, + "imported": 0, + "failed": 0, + "visibility": 0, + "failed_urls": [], + "file": None, + } - def import_from_file(self, uploaded_file): - feeds = self.parse_file(uploaded_file) - if not feeds: + @classmethod + def validate_file(cls, f): + try: + return bool(listparser.parse(f.read()).feeds) + except Exception: return False - django_rq.get_queue("import").enqueue(self.import_from_file_task, feeds) - return True - def import_from_file_task(self, feeds): - logger.info(f"{self.user} import opml start") - skip = 0 - collection = None - with set_actor(self.user): - if self.mode == 1: + def run(self): + with open(self.metadata["file"], "r") as f: + feeds = listparser.parse(f.read()).feeds + self.metadata["total"] = len(feeds) + self.message = f"Processing {self.metadata['total']} feeds." + self.save(update_fields=["metadata", "message"]) + + collection = None + if self.metadata["mode"] == 1: title = _("{username}'s podcast subscriptions").format( username=self.user.display_name ) collection = Collection.objects.create( - owner=self.user.identity, title=title + owner=self.user.identity, + title=title, + visibility=self.metadata["visibility"], ) for feed in feeds: logger.info(f"{self.user} import {feed.url}") @@ -47,21 +58,26 @@ class OPMLImporter: res = None if not res or not res.item: logger.warning(f"{self.user} feed error {feed.url}") + self.metadata["failed"] += 1 continue item = res.item - if self.mode == 0: + if self.metadata["mode"] == 0: mark = Mark(self.user.identity, item) if mark.shelfmember: logger.info(f"{self.user} marked, skip {feed.url}") - skip += 1 + self.metadata["skipped"] += 1 else: + self.metadata["imported"] += 1 mark.update( - ShelfType.PROGRESS, None, None, visibility=self.visibility + ShelfType.PROGRESS, + None, + None, + visibility=self.metadata["visibility"], ) - elif self.mode == 1 and collection: + elif self.metadata["mode"] == 1 and collection: + self.metadata["imported"] += 1 collection.append_item(item) - logger.info(f"{self.user} import opml end") - msg.success( - self.user, - f"OPML import complete, {len(feeds)} feeds processed, {skip} exisiting feeds skipped.", - ) + self.metadata["processed"] += 1 + self.save(update_fields=["metadata"]) + self.message = f"{self.metadata['imported']} feeds imported, {self.metadata['skipped']} skipped, {self.metadata['failed']} failed." + self.save(update_fields=["message"]) diff --git a/journal/migrations/0006_csvimporter.py b/journal/migrations/0006_csvimporter.py index 7b6f45c6..ceaa90b9 100644 --- a/journal/migrations/0006_csvimporter.py +++ b/journal/migrations/0006_csvimporter.py @@ -10,6 +10,16 @@ class Migration(migrations.Migration): ] operations = [ + migrations.CreateModel( + name="BaseImporter", + fields=[], + options={ + "proxy": True, + "indexes": [], + "constraints": [], + }, + bases=("users.task",), + ), migrations.CreateModel( name="CsvImporter", fields=[], @@ -20,4 +30,24 @@ class Migration(migrations.Migration): }, bases=("users.task",), ), + migrations.CreateModel( + name="OPMLImporter", + fields=[], + options={ + "proxy": True, + "indexes": [], + "constraints": [], + }, + bases=("users.task",), + ), + migrations.CreateModel( + name="NdjsonImporter", + fields=[], + options={ + "proxy": True, + "indexes": [], + "constraints": [], + }, + bases=("journal.baseimporter",), + ), ] diff --git a/journal/tests/csv.py b/journal/tests/csv.py index 22f3ace7..d8fb45bd 100644 --- a/journal/tests/csv.py +++ b/journal/tests/csv.py @@ -9,7 +9,7 @@ from loguru import logger from catalog.models import Edition, IdType, Movie, TVEpisode, TVSeason, TVShow from journal.exporters import CsvExporter -from journal.importers import CsvImporter, get_neodb_importer +from journal.importers import CsvImporter from users.models import User from ..models import * @@ -219,10 +219,9 @@ class CsvExportImportTest(TestCase): f"Expected file {filename} with {expected_data_count} data rows, but file not found" ) - self.assertEqual(get_neodb_importer(export_path), CsvImporter) importer = CsvImporter.create(user=self.user2, file=export_path, visibility=2) importer.run() - self.assertEqual(importer.message, "Import complete") + self.assertEqual(importer.message, "11 items imported, 0 skipped, 0 failed.") # Verify imported data diff --git a/journal/tests/ndjson.py b/journal/tests/ndjson.py index f882e6d0..cb236a68 100644 --- a/journal/tests/ndjson.py +++ b/journal/tests/ndjson.py @@ -18,7 +18,7 @@ from catalog.models import ( TVShow, ) from journal.exporters import NdjsonExporter -from journal.importers import NdjsonImporter, get_neodb_importer +from journal.importers import NdjsonImporter from users.models import User from ..models import * @@ -363,12 +363,11 @@ class NdjsonExportImportTest(TestCase): self.assertEqual(type_counts["ShelfLog"], logs.count()) # Now import the export file into a different user account - self.assertEqual(get_neodb_importer(export_path), NdjsonImporter) importer = NdjsonImporter.create( user=self.user2, file=export_path, visibility=2 ) importer.run() - self.assertIn("Import complete", importer.message) + self.assertIn("61 items imported, 0 skipped, 0 failed.", importer.message) # Verify imported data diff --git a/users/migrations/0008_alter_task_type.py b/users/migrations/0008_alter_task_type.py index aba30336..5b120d5d 100644 --- a/users/migrations/0008_alter_task_type.py +++ b/users/migrations/0008_alter_task_type.py @@ -14,6 +14,7 @@ class Migration(migrations.Migration): name="type", field=models.CharField( choices=[ + ("journal.baseimporter", "base importer"), ("journal.csvexporter", "csv exporter"), ("journal.csvimporter", "csv importer"), ("journal.doubanimporter", "douban importer"), @@ -21,6 +22,8 @@ class Migration(migrations.Migration): ("journal.goodreadsimporter", "goodreads importer"), ("journal.letterboxdimporter", "letterboxd importer"), ("journal.ndjsonexporter", "ndjson exporter"), + ("journal.ndjsonimporter", "ndjson importer"), + ("journal.opmlimporter", "opml importer"), ], db_index=True, max_length=255, diff --git a/users/models/task.py b/users/models/task.py index 85fab411..71f9cf26 100644 --- a/users/models/task.py +++ b/users/models/task.py @@ -82,7 +82,6 @@ class Task(TypedModel): task.refresh_from_db() task.state = cls.States.complete if ok else cls.States.failed task.save() - task.notify() def enqueue(self): return django_rq.get_queue(self.TaskQueue).enqueue( diff --git a/users/templates/users/data.html b/users/templates/users/data.html index 631a1fe9..dc530535 100644 --- a/users/templates/users/data.html +++ b/users/templates/users/data.html @@ -10,6 +10,13 @@