ndjson: auto refresh progress for all import and export
This commit is contained in:
parent
d343d6e71e
commit
d6d360025f
19 changed files with 575 additions and 586 deletions
|
@ -86,7 +86,7 @@ def retrieve(request, item_path, item_uuid):
|
|||
if request.method == "HEAD":
|
||||
return HttpResponse()
|
||||
if request.headers.get("Accept", "").endswith("json"):
|
||||
return JsonResponse(item.ap_object)
|
||||
return JsonResponse(item.ap_object, content_type="application/activity+json")
|
||||
focus_item = None
|
||||
if request.GET.get("focus"):
|
||||
focus_item = get_object_or_404(
|
||||
|
|
|
@ -2,7 +2,7 @@ from django.conf import settings
|
|||
from django.contrib.auth.decorators import login_required
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import DisallowedHost
|
||||
from django.http import HttpRequest, JsonResponse
|
||||
from django.http import HttpRequest, HttpResponse, JsonResponse
|
||||
from django.shortcuts import redirect, render
|
||||
from django.urls import reverse
|
||||
|
||||
|
@ -81,36 +81,41 @@ def nodeinfo2(request):
|
|||
)
|
||||
|
||||
|
||||
def _is_json_request(request) -> bool:
|
||||
return request.headers.get("HTTP_ACCEPT", "").endswith("json")
|
||||
def _error_response(request, status: int, exception=None, default_message=""):
|
||||
message = str(exception) if exception else default_message
|
||||
if request.headers.get("HTTP_ACCEPT").endswith("json"):
|
||||
return JsonResponse({"error": message}, status=status)
|
||||
if (
|
||||
request.headers.get("HTTP_HX_REQUEST") is not None
|
||||
and request.headers.get("HTTP_HX_BOOSTED") is None
|
||||
):
|
||||
return HttpResponse(message, status=status)
|
||||
return render(
|
||||
request,
|
||||
f"{status}.html",
|
||||
status=status,
|
||||
context={"message": message, "exception": exception},
|
||||
)
|
||||
|
||||
|
||||
def error_400(request, exception=None):
|
||||
if isinstance(exception, DisallowedHost):
|
||||
url = settings.SITE_INFO["site_url"] + request.get_full_path()
|
||||
return redirect(url, permanent=True)
|
||||
if _is_json_request(request):
|
||||
return JsonResponse({"error": "invalid request"}, status=400)
|
||||
return render(request, "400.html", status=400, context={"exception": exception})
|
||||
return _error_response(request, 400, exception, "invalid request")
|
||||
|
||||
|
||||
def error_403(request, exception=None):
|
||||
if _is_json_request(request):
|
||||
return JsonResponse({"error": "forbidden"}, status=403)
|
||||
return render(request, "403.html", status=403, context={"exception": exception})
|
||||
return _error_response(request, 403, exception, "forbidden")
|
||||
|
||||
|
||||
def error_404(request, exception=None):
|
||||
if _is_json_request(request):
|
||||
return JsonResponse({"error": "not found"}, status=404)
|
||||
request.session.pop("next_url", None)
|
||||
return render(request, "404.html", status=404, context={"exception": exception})
|
||||
return _error_response(request, 404, exception, "not found")
|
||||
|
||||
|
||||
def error_500(request, exception=None):
|
||||
if _is_json_request(request):
|
||||
return JsonResponse({"error": "something wrong"}, status=500)
|
||||
return render(request, "500.html", status=500, context={"exception": exception})
|
||||
return _error_response(request, 500, exception, "something wrong")
|
||||
|
||||
|
||||
def console(request):
|
||||
|
|
|
@ -171,5 +171,5 @@ class CsvExporter(Task):
|
|||
shutil.make_archive(filename[:-4], "zip", temp_folder_path)
|
||||
self.metadata["file"] = filename
|
||||
self.metadata["total"] = total
|
||||
self.message = "Export complete."
|
||||
self.message = f"{total} records exported."
|
||||
self.save()
|
||||
|
|
|
@ -215,5 +215,5 @@ class NdjsonExporter(Task):
|
|||
|
||||
self.metadata["file"] = filename
|
||||
self.metadata["total"] = total
|
||||
self.message = "Export complete."
|
||||
self.message = f"{total} records exported."
|
||||
self.save()
|
||||
|
|
|
@ -1,6 +1,3 @@
|
|||
import os
|
||||
import zipfile
|
||||
|
||||
from .csv import CsvImporter
|
||||
from .douban import DoubanImporter
|
||||
from .goodreads import GoodreadsImporter
|
||||
|
@ -8,25 +5,6 @@ from .letterboxd import LetterboxdImporter
|
|||
from .ndjson import NdjsonImporter
|
||||
from .opml import OPMLImporter
|
||||
|
||||
|
||||
def get_neodb_importer(
|
||||
filename: str,
|
||||
) -> type[CsvImporter] | type[NdjsonImporter] | None:
|
||||
if not os.path.exists(filename) or not zipfile.is_zipfile(filename):
|
||||
return None
|
||||
with zipfile.ZipFile(filename, "r") as z:
|
||||
files = z.namelist()
|
||||
if any(f == "journal.ndjson" for f in files):
|
||||
return NdjsonImporter
|
||||
if any(
|
||||
f.endswith("_mark.csv")
|
||||
or f.endswith("_review.csv")
|
||||
or f.endswith("_note.csv")
|
||||
for f in files
|
||||
):
|
||||
return CsvImporter
|
||||
|
||||
|
||||
__all__ = [
|
||||
"CsvImporter",
|
||||
"NdjsonImporter",
|
||||
|
@ -34,5 +12,4 @@ __all__ = [
|
|||
"OPMLImporter",
|
||||
"DoubanImporter",
|
||||
"GoodreadsImporter",
|
||||
"get_neodb_importer",
|
||||
]
|
||||
|
|
|
@ -5,7 +5,6 @@ import zipfile
|
|||
from typing import Dict
|
||||
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext as _
|
||||
from loguru import logger
|
||||
|
||||
from catalog.models import ItemCategory
|
||||
|
@ -15,6 +14,9 @@ from .base import BaseImporter
|
|||
|
||||
|
||||
class CsvImporter(BaseImporter):
|
||||
class Meta:
|
||||
app_label = "journal" # workaround bug in TypedModel
|
||||
|
||||
def import_mark(self, row: Dict[str, str]) -> str:
|
||||
"""Import a mark from a CSV row.
|
||||
|
||||
|
@ -249,7 +251,7 @@ class CsvImporter(BaseImporter):
|
|||
|
||||
# Set the total count in metadata
|
||||
self.metadata["total"] = total_rows
|
||||
self.message = f"Found {total_rows} items to import"
|
||||
self.message = f"found {total_rows} records to import"
|
||||
self.save(update_fields=["metadata", "message"])
|
||||
|
||||
# Now process all files
|
||||
|
@ -257,7 +259,5 @@ class CsvImporter(BaseImporter):
|
|||
import_function = getattr(self, f"import_{file_type}")
|
||||
self.process_csv_file(file_path, import_function)
|
||||
|
||||
self.message = _("Import complete")
|
||||
if self.metadata.get("failed_items", []):
|
||||
self.message += f": {self.metadata['failed']} items failed ({len(self.metadata['failed_items'])} unique items)"
|
||||
self.message = f"{self.metadata['imported']} items imported, {self.metadata['skipped']} skipped, {self.metadata['failed']} failed."
|
||||
self.save()
|
||||
|
|
|
@ -154,6 +154,8 @@ class DoubanImporter(Task):
|
|||
def run(self):
|
||||
logger.info(f"{self.user} import start")
|
||||
self.load_sheets()
|
||||
self.message = f"豆瓣标记和评论导入开始,共{self.metadata['total']}篇。"
|
||||
self.save(update_fields=["message"])
|
||||
logger.info(f"{self.user} sheet loaded, {self.metadata['total']} lines total")
|
||||
for name, param in self.mark_sheet_config.items():
|
||||
self.import_mark_sheet(self.mark_data[name], param[0], name)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import csv
|
||||
import os
|
||||
import tempfile
|
||||
import zipfile
|
||||
from datetime import timedelta
|
||||
|
@ -35,6 +36,13 @@ class LetterboxdImporter(Task):
|
|||
"file": None,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def validate_file(cls, uploaded_file):
|
||||
try:
|
||||
return zipfile.is_zipfile(uploaded_file)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def get_item_by_url(self, url):
|
||||
try:
|
||||
h = BasicDownloader(url).download().html()
|
||||
|
@ -121,7 +129,6 @@ class LetterboxdImporter(Task):
|
|||
self.progress(1)
|
||||
|
||||
def progress(self, mark_state: int, url=None):
|
||||
self.metadata["total"] += 1
|
||||
self.metadata["processed"] += 1
|
||||
match mark_state:
|
||||
case 1:
|
||||
|
@ -142,6 +149,7 @@ class LetterboxdImporter(Task):
|
|||
with tempfile.TemporaryDirectory() as tmpdirname:
|
||||
logger.debug(f"Extracting {filename} to {tmpdirname}")
|
||||
zipref.extractall(tmpdirname)
|
||||
if os.path.exists(tmpdirname + "/reviews.csv"):
|
||||
with open(tmpdirname + "/reviews.csv") as f:
|
||||
reader = csv.DictReader(f, delimiter=",")
|
||||
for row in reader:
|
||||
|
@ -154,6 +162,7 @@ class LetterboxdImporter(Task):
|
|||
row["Review"],
|
||||
row["Tags"],
|
||||
)
|
||||
if os.path.exists(tmpdirname + "/ratings.csv"):
|
||||
with open(tmpdirname + "/ratings.csv") as f:
|
||||
reader = csv.DictReader(f, delimiter=",")
|
||||
for row in reader:
|
||||
|
@ -166,6 +175,7 @@ class LetterboxdImporter(Task):
|
|||
row["Date"],
|
||||
row["Rating"],
|
||||
)
|
||||
if os.path.exists(tmpdirname + "/watched.csv"):
|
||||
with open(tmpdirname + "/watched.csv") as f:
|
||||
reader = csv.DictReader(f, delimiter=",")
|
||||
for row in reader:
|
||||
|
@ -177,6 +187,7 @@ class LetterboxdImporter(Task):
|
|||
ShelfType.COMPLETE,
|
||||
row["Date"],
|
||||
)
|
||||
if os.path.exists(tmpdirname + "/watchlist.csv"):
|
||||
with open(tmpdirname + "/watchlist.csv") as f:
|
||||
reader = csv.DictReader(f, delimiter=",")
|
||||
for row in reader:
|
||||
|
@ -188,3 +199,6 @@ class LetterboxdImporter(Task):
|
|||
ShelfType.WISHLIST,
|
||||
row["Date"],
|
||||
)
|
||||
self.metadata["total"] = self.metadata["processed"]
|
||||
self.message = f"{self.metadata['imported']} imported, {self.metadata['skipped']} skipped, {self.metadata['failed']} failed"
|
||||
self.save(update_fields=["metadata", "message"])
|
||||
|
|
|
@ -4,7 +4,6 @@ import tempfile
|
|||
import zipfile
|
||||
from typing import Any, Dict
|
||||
|
||||
from django.utils.translation import gettext as _
|
||||
from loguru import logger
|
||||
|
||||
from journal.models import (
|
||||
|
@ -26,6 +25,9 @@ from .base import BaseImporter
|
|||
class NdjsonImporter(BaseImporter):
|
||||
"""Importer for NDJSON files exported from NeoDB."""
|
||||
|
||||
class Meta:
|
||||
app_label = "journal" # workaround bug in TypedModel
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.items = {}
|
||||
|
@ -60,8 +62,8 @@ class NdjsonImporter(BaseImporter):
|
|||
metadata = item_entry.get("metadata", {})
|
||||
collection.append_item(item, metadata=metadata)
|
||||
return "imported"
|
||||
except Exception as e:
|
||||
logger.error(f"Error importing collection: {e}")
|
||||
except Exception:
|
||||
logger.exception("Error importing collection")
|
||||
return "failed"
|
||||
|
||||
def import_shelf_member(self, data: Dict[str, Any]) -> BaseImporter.ImportResult:
|
||||
|
@ -86,8 +88,8 @@ class NdjsonImporter(BaseImporter):
|
|||
created_time=published_dt,
|
||||
)
|
||||
return "imported"
|
||||
except Exception as e:
|
||||
logger.error(f"Error importing shelf member: {e}")
|
||||
except Exception:
|
||||
logger.exception("Error importing shelf member")
|
||||
return "failed"
|
||||
|
||||
def import_shelf_log(self, data: Dict[str, Any]) -> BaseImporter.ImportResult:
|
||||
|
@ -110,8 +112,8 @@ class NdjsonImporter(BaseImporter):
|
|||
# return "imported" if created else "skipped"
|
||||
# count skip as success otherwise it may confuse user
|
||||
return "imported"
|
||||
except Exception as e:
|
||||
logger.error(f"Error importing shelf log: {e}")
|
||||
except Exception:
|
||||
logger.exception("Error importing shelf log")
|
||||
return "failed"
|
||||
|
||||
def import_post(self, data: Dict[str, Any]) -> BaseImporter.ImportResult:
|
||||
|
@ -152,8 +154,8 @@ class NdjsonImporter(BaseImporter):
|
|||
metadata=metadata,
|
||||
)
|
||||
return "imported"
|
||||
except Exception as e:
|
||||
logger.error(f"Error importing review: {e}")
|
||||
except Exception:
|
||||
logger.exception("Error importing review")
|
||||
return "failed"
|
||||
|
||||
def import_note(self, data: Dict[str, Any]) -> BaseImporter.ImportResult:
|
||||
|
@ -185,8 +187,8 @@ class NdjsonImporter(BaseImporter):
|
|||
metadata=data.get("metadata", {}),
|
||||
)
|
||||
return "imported"
|
||||
except Exception as e:
|
||||
logger.error(f"Error importing note: {e}")
|
||||
except Exception:
|
||||
logger.exception("Error importing note")
|
||||
return "failed"
|
||||
|
||||
def import_comment(self, data: Dict[str, Any]) -> BaseImporter.ImportResult:
|
||||
|
@ -218,8 +220,8 @@ class NdjsonImporter(BaseImporter):
|
|||
metadata=metadata,
|
||||
)
|
||||
return "imported"
|
||||
except Exception as e:
|
||||
logger.error(f"Error importing comment: {e}")
|
||||
except Exception:
|
||||
logger.exception("Error importing comment")
|
||||
return "failed"
|
||||
|
||||
def import_rating(self, data: Dict[str, Any]) -> BaseImporter.ImportResult:
|
||||
|
@ -251,8 +253,8 @@ class NdjsonImporter(BaseImporter):
|
|||
metadata=metadata,
|
||||
)
|
||||
return "imported"
|
||||
except Exception as e:
|
||||
logger.error(f"Error importing rating: {e}")
|
||||
except Exception:
|
||||
logger.exception("Error importing rating")
|
||||
return "failed"
|
||||
|
||||
def import_tag(self, data: Dict[str, Any]) -> BaseImporter.ImportResult:
|
||||
|
@ -271,8 +273,8 @@ class NdjsonImporter(BaseImporter):
|
|||
},
|
||||
)
|
||||
return "imported" if created else "skipped"
|
||||
except Exception as e:
|
||||
logger.error(f"Error importing tag member: {e}")
|
||||
except Exception:
|
||||
logger.exception("Error importing tag member")
|
||||
return "failed"
|
||||
|
||||
def import_tag_member(self, data: Dict[str, Any]) -> BaseImporter.ImportResult:
|
||||
|
@ -309,8 +311,8 @@ class NdjsonImporter(BaseImporter):
|
|||
},
|
||||
)
|
||||
return "imported" if created else "skipped"
|
||||
except Exception as e:
|
||||
logger.error(f"Error importing tag member: {e}")
|
||||
except Exception:
|
||||
logger.exception("Error importing tag member")
|
||||
return "failed"
|
||||
|
||||
def process_journal(self, file_path: str) -> None:
|
||||
|
@ -348,6 +350,9 @@ class NdjsonImporter(BaseImporter):
|
|||
journal[data_type].append(data)
|
||||
|
||||
self.metadata["total"] = sum(len(items) for items in journal.values())
|
||||
self.message = f"found {self.metadata['total']} records to import"
|
||||
self.save(update_fields=["metadata", "message"])
|
||||
|
||||
logger.debug(f"Processing {self.metadata['total']} entries")
|
||||
if lines_error:
|
||||
logger.error(f"Error processing journal.ndjson: {lines_error} lines")
|
||||
|
@ -369,8 +374,8 @@ class NdjsonImporter(BaseImporter):
|
|||
for line in jsonfile:
|
||||
try:
|
||||
i = json.loads(line)
|
||||
except (json.JSONDecodeError, Exception) as e:
|
||||
logger.error(f"Error processing catalog item: {e}")
|
||||
except (json.JSONDecodeError, Exception):
|
||||
logger.exception("Error processing catalog item")
|
||||
continue
|
||||
u = i.get("id")
|
||||
if not u:
|
||||
|
@ -381,8 +386,8 @@ class NdjsonImporter(BaseImporter):
|
|||
self.items[u] = self.get_item_by_info_and_links("", "", links)
|
||||
logger.info(f"Loaded {item_count} items from catalog")
|
||||
self.metadata["catalog_processed"] = item_count
|
||||
except Exception as e:
|
||||
logger.error(f"Error parsing catalog file: {e}")
|
||||
except Exception:
|
||||
logger.exception("Error parsing catalog file")
|
||||
|
||||
def parse_header(self, file_path: str) -> Dict[str, Any]:
|
||||
try:
|
||||
|
@ -392,8 +397,8 @@ class NdjsonImporter(BaseImporter):
|
|||
header = json.loads(first_line)
|
||||
if header.get("server"):
|
||||
return header
|
||||
except (json.JSONDecodeError, IOError) as e:
|
||||
logger.error(f"Error parsing NDJSON header: {e}")
|
||||
except (json.JSONDecodeError, IOError):
|
||||
logger.exception("Error parsing header")
|
||||
return {}
|
||||
|
||||
def run(self) -> None:
|
||||
|
@ -424,24 +429,5 @@ class NdjsonImporter(BaseImporter):
|
|||
logger.debug(f"Importing journal.ndjson with {header}")
|
||||
self.process_journal(journal_path)
|
||||
|
||||
source_info = self.metadata.get("journal_header", {})
|
||||
source_summary = f" from {source_info.get('username', 'unknown')}@{source_info.get('server', 'unknown')} ver:{source_info.get('neodb_version', 'unknown')}."
|
||||
self.message = _("Import complete") + source_summary
|
||||
|
||||
metadata_stats = self.metadata.get("metadata_stats", {})
|
||||
partial_updates = metadata_stats.get("partial_updates", 0)
|
||||
if partial_updates > 0:
|
||||
self.message += f", {partial_updates} items with partial metadata updates"
|
||||
|
||||
ratings = metadata_stats.get("ratings_updated", 0)
|
||||
comments = metadata_stats.get("comments_updated", 0)
|
||||
tags = metadata_stats.get("tags_updated", 0)
|
||||
|
||||
if ratings > 0 or comments > 0 or tags > 0:
|
||||
self.message += (
|
||||
f" ({ratings} ratings, {comments} comments, {tags} tag sets)"
|
||||
)
|
||||
|
||||
if self.metadata.get("failed_items", []):
|
||||
self.message += f": {self.metadata['failed']} items failed ({len(self.metadata['failed_items'])} unique items)"
|
||||
self.message = f"{self.metadata['imported']} items imported, {self.metadata['skipped']} skipped, {self.metadata['failed']} failed."
|
||||
self.save()
|
||||
|
|
|
@ -1,43 +1,54 @@
|
|||
import django_rq
|
||||
import listparser
|
||||
from auditlog.context import set_actor
|
||||
from django.utils.translation import gettext as _
|
||||
from loguru import logger
|
||||
from user_messages import api as msg
|
||||
|
||||
from catalog.common import *
|
||||
from catalog.common.downloaders import *
|
||||
from catalog.sites.rss import RSS
|
||||
from journal.models import *
|
||||
from users.models.task import Task
|
||||
|
||||
|
||||
class OPMLImporter:
|
||||
def __init__(self, user, visibility, mode):
|
||||
self.user = user
|
||||
self.visibility = visibility
|
||||
self.mode = mode
|
||||
class OPMLImporter(Task):
|
||||
class Meta:
|
||||
app_label = "journal" # workaround bug in TypedModel
|
||||
|
||||
def parse_file(self, uploaded_file):
|
||||
return listparser.parse(uploaded_file.read()).feeds
|
||||
TaskQueue = "import"
|
||||
DefaultMetadata = {
|
||||
"total": 0,
|
||||
"mode": 0,
|
||||
"processed": 0,
|
||||
"skipped": 0,
|
||||
"imported": 0,
|
||||
"failed": 0,
|
||||
"visibility": 0,
|
||||
"failed_urls": [],
|
||||
"file": None,
|
||||
}
|
||||
|
||||
def import_from_file(self, uploaded_file):
|
||||
feeds = self.parse_file(uploaded_file)
|
||||
if not feeds:
|
||||
@classmethod
|
||||
def validate_file(cls, f):
|
||||
try:
|
||||
return bool(listparser.parse(f.read()).feeds)
|
||||
except Exception:
|
||||
return False
|
||||
django_rq.get_queue("import").enqueue(self.import_from_file_task, feeds)
|
||||
return True
|
||||
|
||||
def import_from_file_task(self, feeds):
|
||||
logger.info(f"{self.user} import opml start")
|
||||
skip = 0
|
||||
def run(self):
|
||||
with open(self.metadata["file"], "r") as f:
|
||||
feeds = listparser.parse(f.read()).feeds
|
||||
self.metadata["total"] = len(feeds)
|
||||
self.message = f"Processing {self.metadata['total']} feeds."
|
||||
self.save(update_fields=["metadata", "message"])
|
||||
|
||||
collection = None
|
||||
with set_actor(self.user):
|
||||
if self.mode == 1:
|
||||
if self.metadata["mode"] == 1:
|
||||
title = _("{username}'s podcast subscriptions").format(
|
||||
username=self.user.display_name
|
||||
)
|
||||
collection = Collection.objects.create(
|
||||
owner=self.user.identity, title=title
|
||||
owner=self.user.identity,
|
||||
title=title,
|
||||
visibility=self.metadata["visibility"],
|
||||
)
|
||||
for feed in feeds:
|
||||
logger.info(f"{self.user} import {feed.url}")
|
||||
|
@ -47,21 +58,26 @@ class OPMLImporter:
|
|||
res = None
|
||||
if not res or not res.item:
|
||||
logger.warning(f"{self.user} feed error {feed.url}")
|
||||
self.metadata["failed"] += 1
|
||||
continue
|
||||
item = res.item
|
||||
if self.mode == 0:
|
||||
if self.metadata["mode"] == 0:
|
||||
mark = Mark(self.user.identity, item)
|
||||
if mark.shelfmember:
|
||||
logger.info(f"{self.user} marked, skip {feed.url}")
|
||||
skip += 1
|
||||
self.metadata["skipped"] += 1
|
||||
else:
|
||||
self.metadata["imported"] += 1
|
||||
mark.update(
|
||||
ShelfType.PROGRESS, None, None, visibility=self.visibility
|
||||
ShelfType.PROGRESS,
|
||||
None,
|
||||
None,
|
||||
visibility=self.metadata["visibility"],
|
||||
)
|
||||
elif self.mode == 1 and collection:
|
||||
elif self.metadata["mode"] == 1 and collection:
|
||||
self.metadata["imported"] += 1
|
||||
collection.append_item(item)
|
||||
logger.info(f"{self.user} import opml end")
|
||||
msg.success(
|
||||
self.user,
|
||||
f"OPML import complete, {len(feeds)} feeds processed, {skip} exisiting feeds skipped.",
|
||||
)
|
||||
self.metadata["processed"] += 1
|
||||
self.save(update_fields=["metadata"])
|
||||
self.message = f"{self.metadata['imported']} feeds imported, {self.metadata['skipped']} skipped, {self.metadata['failed']} failed."
|
||||
self.save(update_fields=["message"])
|
||||
|
|
|
@ -10,6 +10,16 @@ class Migration(migrations.Migration):
|
|||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="BaseImporter",
|
||||
fields=[],
|
||||
options={
|
||||
"proxy": True,
|
||||
"indexes": [],
|
||||
"constraints": [],
|
||||
},
|
||||
bases=("users.task",),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="CsvImporter",
|
||||
fields=[],
|
||||
|
@ -20,4 +30,24 @@ class Migration(migrations.Migration):
|
|||
},
|
||||
bases=("users.task",),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="OPMLImporter",
|
||||
fields=[],
|
||||
options={
|
||||
"proxy": True,
|
||||
"indexes": [],
|
||||
"constraints": [],
|
||||
},
|
||||
bases=("users.task",),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="NdjsonImporter",
|
||||
fields=[],
|
||||
options={
|
||||
"proxy": True,
|
||||
"indexes": [],
|
||||
"constraints": [],
|
||||
},
|
||||
bases=("journal.baseimporter",),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -9,7 +9,7 @@ from loguru import logger
|
|||
|
||||
from catalog.models import Edition, IdType, Movie, TVEpisode, TVSeason, TVShow
|
||||
from journal.exporters import CsvExporter
|
||||
from journal.importers import CsvImporter, get_neodb_importer
|
||||
from journal.importers import CsvImporter
|
||||
from users.models import User
|
||||
|
||||
from ..models import *
|
||||
|
@ -219,10 +219,9 @@ class CsvExportImportTest(TestCase):
|
|||
f"Expected file {filename} with {expected_data_count} data rows, but file not found"
|
||||
)
|
||||
|
||||
self.assertEqual(get_neodb_importer(export_path), CsvImporter)
|
||||
importer = CsvImporter.create(user=self.user2, file=export_path, visibility=2)
|
||||
importer.run()
|
||||
self.assertEqual(importer.message, "Import complete")
|
||||
self.assertEqual(importer.message, "11 items imported, 0 skipped, 0 failed.")
|
||||
|
||||
# Verify imported data
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ from catalog.models import (
|
|||
TVShow,
|
||||
)
|
||||
from journal.exporters import NdjsonExporter
|
||||
from journal.importers import NdjsonImporter, get_neodb_importer
|
||||
from journal.importers import NdjsonImporter
|
||||
from users.models import User
|
||||
|
||||
from ..models import *
|
||||
|
@ -363,12 +363,11 @@ class NdjsonExportImportTest(TestCase):
|
|||
self.assertEqual(type_counts["ShelfLog"], logs.count())
|
||||
|
||||
# Now import the export file into a different user account
|
||||
self.assertEqual(get_neodb_importer(export_path), NdjsonImporter)
|
||||
importer = NdjsonImporter.create(
|
||||
user=self.user2, file=export_path, visibility=2
|
||||
)
|
||||
importer.run()
|
||||
self.assertIn("Import complete", importer.message)
|
||||
self.assertIn("61 items imported, 0 skipped, 0 failed.", importer.message)
|
||||
|
||||
# Verify imported data
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ class Migration(migrations.Migration):
|
|||
name="type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("journal.baseimporter", "base importer"),
|
||||
("journal.csvexporter", "csv exporter"),
|
||||
("journal.csvimporter", "csv importer"),
|
||||
("journal.doubanimporter", "douban importer"),
|
||||
|
@ -21,6 +22,8 @@ class Migration(migrations.Migration):
|
|||
("journal.goodreadsimporter", "goodreads importer"),
|
||||
("journal.letterboxdimporter", "letterboxd importer"),
|
||||
("journal.ndjsonexporter", "ndjson exporter"),
|
||||
("journal.ndjsonimporter", "ndjson importer"),
|
||||
("journal.opmlimporter", "opml importer"),
|
||||
],
|
||||
db_index=True,
|
||||
max_length=255,
|
||||
|
|
|
@ -82,7 +82,6 @@ class Task(TypedModel):
|
|||
task.refresh_from_db()
|
||||
task.state = cls.States.complete if ok else cls.States.failed
|
||||
task.save()
|
||||
task.notify()
|
||||
|
||||
def enqueue(self):
|
||||
return django_rq.get_queue(self.TaskQueue).enqueue(
|
||||
|
|
|
@ -10,6 +10,13 @@
|
|||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>{{ site_name }} - {% trans 'Data Management' %}</title>
|
||||
{% include "common_libs.html" %}
|
||||
<script>
|
||||
document.addEventListener('htmx:responseError', (event) => {
|
||||
let response = event.detail.xhr.response;
|
||||
let body = response ? response : `Request error: ${event.detail.xhr.statusText}`;
|
||||
alert(body);
|
||||
});
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
{% include "_header.html" %}
|
||||
|
@ -17,80 +24,217 @@
|
|||
<div class="grid__main">
|
||||
<article>
|
||||
<details>
|
||||
<summary>{% trans 'Export Data' %}</summary>
|
||||
<form action="{% url 'users:export_csv' %}"
|
||||
<summary>{% trans 'Import Marks and Reviews from Douban' %}</summary>
|
||||
<form action="{% url 'users:import_douban' %}"
|
||||
method="post"
|
||||
enctype="multipart/form-data">
|
||||
{% csrf_token %}
|
||||
{% blocktrans %}Select <code>.xlsx</code> exported from <a href="https://doufen.org" target="_blank" rel="noopener">Doufen</a>{% endblocktrans %}
|
||||
<input type="file" name="file" id="excel" required accept=".xlsx">
|
||||
<fieldset>
|
||||
<legend>{% trans "Import Method" %}</legend>
|
||||
<label for="import_mode_0">
|
||||
<input id="import_mode_0" type="radio" name="import_mode" value="0" checked>
|
||||
{% trans "Merge: only update when status changes from wishlist to in-progress, or from in-progress to complete." %}
|
||||
</label>
|
||||
<label for="import_mode_1">
|
||||
<input id="import_mode_1" type="radio" name="import_mode" value="1">
|
||||
{% trans "Overwrite: update all imported status." %}
|
||||
</label>
|
||||
</fieldset>
|
||||
<p>
|
||||
{% trans 'Visibility' %}:
|
||||
<br>
|
||||
<label for="id_visibility_0">
|
||||
<input type="radio"
|
||||
name="visibility"
|
||||
value="0"
|
||||
required=""
|
||||
id="id_visibility_0"
|
||||
checked>
|
||||
{% trans 'Public' %}
|
||||
</label>
|
||||
<label for="id_visibility_1">
|
||||
<input type="radio"
|
||||
name="visibility"
|
||||
value="1"
|
||||
required=""
|
||||
id="id_visibility_1">
|
||||
{% trans 'Followers Only' %}
|
||||
</label>
|
||||
<label for="id_visibility_2">
|
||||
<input type="radio"
|
||||
name="visibility"
|
||||
value="2"
|
||||
required=""
|
||||
id="id_visibility_2">
|
||||
{% trans 'Mentioned Only' %}
|
||||
</label>
|
||||
</p>
|
||||
<input type="submit"
|
||||
value="{% trans 'Export marks, reviews and notes in CSV' %}" />
|
||||
{% if csv_export_task %}
|
||||
<br>
|
||||
{% trans 'Last export' %}: {{ csv_export_task.created_time }}
|
||||
{% trans 'Status' %}: {{ csv_export_task.get_state_display }}
|
||||
<br>
|
||||
{{ csv_export_task.message }}
|
||||
{% if csv_export_task.metadata.file %}
|
||||
<a href="{% url 'users:export_csv' %}" download>{% trans 'Download' %}</a>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</form>
|
||||
<hr>
|
||||
<form action="{% url 'users:export_ndjson' %}"
|
||||
method="post"
|
||||
enctype="multipart/form-data">
|
||||
{% csrf_token %}
|
||||
<input type="submit" value="{% trans 'Export everything in NDJSON' %}" />
|
||||
{% if ndjson_export_task %}
|
||||
<br>
|
||||
{% trans 'Last export' %}: {{ ndjson_export_task.created_time }}
|
||||
{% trans 'Status' %}: {{ ndjson_export_task.get_state_display }}
|
||||
<br>
|
||||
{{ ndjson_export_task.message }}
|
||||
{% if ndjson_export_task.metadata.file %}
|
||||
<a href="{% url 'users:export_ndjson' %}" download><i class="fa fa-file-code"></i> {% trans 'Download' %}</a>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</form>
|
||||
<hr>
|
||||
<form action="{% url 'users:export_marks' %}"
|
||||
method="post"
|
||||
enctype="multipart/form-data">
|
||||
{% csrf_token %}
|
||||
<input type="submit"
|
||||
class="secondary"
|
||||
value="{% trans 'Export marks and reviews in XLSX (Doufen format)' %}" />
|
||||
<small>exporting to this format will be deprecated soon.</small>
|
||||
{% if export_task %}
|
||||
<br>
|
||||
{% trans 'Last export' %}: {{ export_task.created_time }}
|
||||
{% trans 'Status' %}: {{ export_task.get_state_display }}
|
||||
<br>
|
||||
{{ export_task.message }}
|
||||
{% if export_task.metadata.file %}
|
||||
<a href="{% url 'users:export_marks' %}" download>{% trans 'Download' %}</a>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% if import_task.status == "pending" %} onclick="return confirm('{% trans "Another import is in progress, starting a new import may cause issues, sure to import?" %}')" value="{% trans "Import in progress, please wait" %}" {% else %} value="{% trans 'Import' %}" {% endif %} />
|
||||
</form>
|
||||
</details>
|
||||
</article>
|
||||
<article>
|
||||
<details>
|
||||
<summary>{% trans 'Import Data' %}</summary>
|
||||
<form action="{% url 'users:import_neodb' %}"
|
||||
method="post"
|
||||
<summary>{% trans 'Import Shelf or List from Goodreads' %}</summary>
|
||||
<form hx-post="{% url 'users:import_goodreads' %}">
|
||||
{% csrf_token %}
|
||||
<div>
|
||||
{% trans 'Link to Goodreads Profile / Shelf / List' %}
|
||||
<ul>
|
||||
<li>
|
||||
Profile <code>https://www.goodreads.com/user/show/12345-janedoe</code>
|
||||
<br>
|
||||
{% trans 'want-to-read / currently-reading / read books and their reviews will be imported.' %}
|
||||
</li>
|
||||
<li>
|
||||
Shelf <code>https://www.goodreads.com/review/list/12345-janedoe?shelf=name</code>
|
||||
<br>
|
||||
{% trans 'Shelf will be imported as a new collection.' %}
|
||||
</li>
|
||||
<li>
|
||||
List <code>https://www.goodreads.com/list/show/155086.Popular_Highlights</code>
|
||||
<br>
|
||||
{% trans 'List will be imported as a new collection.' %}
|
||||
</li>
|
||||
<li>
|
||||
<mark>Who Can View My Profile</mark> must be set as <mark>anyone</mark> prior to import.
|
||||
</li>
|
||||
</ul>
|
||||
<input type="url"
|
||||
name="url"
|
||||
value=""
|
||||
placeholder="https://www.goodreads.com/user/show/12345-janedoe"
|
||||
required>
|
||||
<input type="submit" value="{% trans 'Import' %}" />
|
||||
</div>
|
||||
{% include "users/user_task_status.html" with task=goodreads_task %}
|
||||
</form>
|
||||
</details>
|
||||
</article>
|
||||
<article>
|
||||
<details>
|
||||
<summary>{% trans 'Import from Letterboxd' %}</summary>
|
||||
<form hx-post="{% url 'users:import_letterboxd' %}"
|
||||
enctype="multipart/form-data">
|
||||
{% csrf_token %}
|
||||
<ul>
|
||||
<li>
|
||||
In letterboxd.com,
|
||||
<a href="https://letterboxd.com/settings/data/"
|
||||
target="_blank"
|
||||
rel="noopener">click DATA in Settings</a>;
|
||||
or in its app, tap Advanced Settings in Settings, tap EXPORT YOUR DATA
|
||||
</li>
|
||||
<li>
|
||||
download file with name like <code>letterboxd-username-2018-03-11-07-52-utc.zip</code>, do not unzip.
|
||||
</li>
|
||||
</ul>
|
||||
<br>
|
||||
<input type="file" name="file" required accept=".zip">
|
||||
<p>
|
||||
{% trans 'Visibility' %}:
|
||||
<br>
|
||||
<label for="l_visibility_0">
|
||||
<input type="radio"
|
||||
name="visibility"
|
||||
value="0"
|
||||
required=""
|
||||
id="l_visibility_0"
|
||||
checked>
|
||||
{% trans 'Public' %}
|
||||
</label>
|
||||
<label for="l_visibility_1">
|
||||
<input type="radio"
|
||||
name="visibility"
|
||||
value="1"
|
||||
required=""
|
||||
id="l_visibility_1">
|
||||
{% trans 'Followers Only' %}
|
||||
</label>
|
||||
<label for="l_visibility_2">
|
||||
<input type="radio"
|
||||
name="visibility"
|
||||
value="2"
|
||||
required=""
|
||||
id="l_visibility_2">
|
||||
{% trans 'Mentioned Only' %}
|
||||
</label>
|
||||
</p>
|
||||
<input type="submit" value="{% trans 'Import' %}" />
|
||||
<small>{% trans 'Only forward changes(none->to-watch->watched) will be imported.' %}</small>
|
||||
{% include "users/user_task_status.html" with task=letterboxd_task %}
|
||||
</form>
|
||||
</details>
|
||||
</article>
|
||||
<article>
|
||||
<details>
|
||||
<summary>{% trans 'Import Podcast Subscriptions' %}</summary>
|
||||
<form hx-post="{% url 'users:import_opml' %}" enctype="multipart/form-data">
|
||||
{% csrf_token %}
|
||||
<div>
|
||||
{% trans 'Import Method' %}:
|
||||
<label for="opml_import_mode_0">
|
||||
<input id="opml_import_mode_0"
|
||||
type="radio"
|
||||
name="import_mode"
|
||||
value="0"
|
||||
checked>
|
||||
{% trans 'Mark as listening' %}
|
||||
</label>
|
||||
<label for="opml_import_mode_1">
|
||||
<input id="opml_import_mode_1" type="radio" name="import_mode" value="1">
|
||||
{% trans 'Import as a new collection' %}
|
||||
</label>
|
||||
{% trans 'Visibility' %}:
|
||||
<label for="opml_visibility_0">
|
||||
<input type="radio"
|
||||
name="visibility"
|
||||
value="0"
|
||||
required=""
|
||||
id="opml_visibility_0"
|
||||
checked>
|
||||
{% trans 'Public' %}
|
||||
</label>
|
||||
<label for="opml_visibility_1">
|
||||
<input type="radio"
|
||||
name="visibility"
|
||||
value="1"
|
||||
required=""
|
||||
id="opml_visibility_1">
|
||||
{% trans 'Followers Only' %}
|
||||
</label>
|
||||
<label for="opml_visibility_2">
|
||||
<input type="radio"
|
||||
name="visibility"
|
||||
value="2"
|
||||
required=""
|
||||
id="opml_visibility_2">
|
||||
{% trans 'Mentioned Only' %}
|
||||
</label>
|
||||
<br>
|
||||
{% trans 'Select OPML file' %}
|
||||
<input type="file" name="file" required accept=".opml,.xml">
|
||||
<input type="submit" value="{% trans 'Import' %}" />
|
||||
</div>
|
||||
{% include "users/user_task_status.html" with task=opml_import_task %}
|
||||
</form>
|
||||
</details>
|
||||
</article>
|
||||
<article>
|
||||
<details>
|
||||
<summary>{% trans 'Import NeoDB Archive' %}</summary>
|
||||
<form hx-post="{% url 'users:import_neodb' %}"
|
||||
enctype="multipart/form-data">
|
||||
{% csrf_token %}
|
||||
<ul>
|
||||
<li>
|
||||
{% trans 'Upload a <code>.zip</code> file containing <code>.csv</code> or <code>.ndjson</code> files exported from NeoDB.' %}
|
||||
</li>
|
||||
<li>{% trans 'Existing marks and reviews with newer dates will be preserved.' %}</li>
|
||||
<li>
|
||||
{% trans 'Both CSV and NDJSON formats exported from NeoDB are supported. NDJSON format includes more data, like collections.' %}
|
||||
</li>
|
||||
<li>{% trans 'Existing data may be overwritten.' %}</li>
|
||||
</ul>
|
||||
<br>
|
||||
<input type="file" name="file" id="neodb_import_file" required accept=".zip">
|
||||
<div id="detected_format_info"
|
||||
style="display: none;
|
||||
|
@ -132,8 +276,8 @@
|
|||
</label>
|
||||
</p>
|
||||
</div>
|
||||
<input type="hidden" name="format_type" id="format_type" value="">
|
||||
<input type="submit" value="{% trans 'Import' %}" />
|
||||
<input type="hidden" name="format_type" id="format_type" value="" required>
|
||||
<input type="submit" value="{% trans 'Import' %}" id="import_submit" />
|
||||
<script src="{{ cdn_url }}/npm/jszip@3.10.1/dist/jszip.min.js"></script>
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', function() {
|
||||
|
@ -203,8 +347,7 @@
|
|||
document.getElementById('detected_format').innerHTML = formatIcon + format;
|
||||
document.getElementById('format_type').value = formatValue;
|
||||
|
||||
// Show visibility settings only for NDJSON format
|
||||
if (formatValue === 'ndjson') {
|
||||
if (formatValue === 'csv') {
|
||||
document.getElementById('visibility_settings').style.display = 'block';
|
||||
} else {
|
||||
document.getElementById('visibility_settings').style.display = 'none';
|
||||
|
@ -222,210 +365,53 @@
|
|||
// Hide visibility settings on error
|
||||
document.getElementById('visibility_settings').style.display = 'none';
|
||||
}
|
||||
if (document.getElementById('format_type').value == '') {
|
||||
document.getElementById('import_submit').setAttribute('disabled', '')
|
||||
} else {
|
||||
document.getElementById('import_submit').removeAttribute('disabled', '')
|
||||
}
|
||||
});
|
||||
});
|
||||
</script>
|
||||
{% if neodb_import_task %}
|
||||
{% include "users/user_task_status.html" with task=neodb_import_task %}
|
||||
{% endif %}
|
||||
</form>
|
||||
</details>
|
||||
</article>
|
||||
<article>
|
||||
<details>
|
||||
<summary>{% trans 'Import Marks and Reviews from Douban' %}</summary>
|
||||
<form action="{% url 'users:import_douban' %}"
|
||||
method="post"
|
||||
enctype="multipart/form-data">
|
||||
<summary>{% trans 'Export NeoDB Archive' %}</summary>
|
||||
<form hx-post="{% url 'users:export_csv' %}" enctype="multipart/form-data">
|
||||
{% csrf_token %}
|
||||
{% blocktrans %}Select <code>.xlsx</code> exported from <a href="https://doufen.org" target="_blank" rel="noopener">Doufen</a>{% endblocktrans %}
|
||||
<input type="file" name="file" id="excel" required accept=".xlsx">
|
||||
<fieldset>
|
||||
<legend>{% trans "Import Method" %}</legend>
|
||||
<label for="import_mode_0">
|
||||
<input id="import_mode_0" type="radio" name="import_mode" value="0" checked>
|
||||
{% trans "Merge: only update when status changes from wishlist to in-progress, or from in-progress to complete." %}
|
||||
</label>
|
||||
<label for="import_mode_1">
|
||||
<input id="import_mode_1" type="radio" name="import_mode" value="1">
|
||||
{% trans "Overwrite: update all imported status." %}
|
||||
</label>
|
||||
</fieldset>
|
||||
<p>
|
||||
{% trans 'Visibility' %}:
|
||||
<br>
|
||||
<label for="id_visibility_0">
|
||||
<input type="radio"
|
||||
name="visibility"
|
||||
value="0"
|
||||
required=""
|
||||
id="id_visibility_0"
|
||||
checked>
|
||||
{% trans 'Public' %}
|
||||
</label>
|
||||
<label for="id_visibility_1">
|
||||
<input type="radio"
|
||||
name="visibility"
|
||||
value="1"
|
||||
required=""
|
||||
id="id_visibility_1">
|
||||
{% trans 'Followers Only' %}
|
||||
</label>
|
||||
<label for="id_visibility_2">
|
||||
<input type="radio"
|
||||
name="visibility"
|
||||
value="2"
|
||||
required=""
|
||||
id="id_visibility_2">
|
||||
{% trans 'Mentioned Only' %}
|
||||
</label>
|
||||
</p>
|
||||
<input type="submit"
|
||||
{% if import_task.status == "pending" %} onclick="return confirm('{% trans "Another import is in progress, starting a new import may cause issues, sure to import?" %}')" value="{% trans "Import in progress, please wait" %}" {% else %} value="{% trans 'Import' %}" {% endif %} />
|
||||
value="{% trans 'Export marks, reviews and notes in CSV' %}" />
|
||||
{% include "users/user_task_status.html" with task=csv_export_task %}
|
||||
</form>
|
||||
<div hx-get="{% url 'users:import_status' %}"
|
||||
hx-trigger="load delay:1s"
|
||||
hx-swap="outerHTML"></div>
|
||||
</details>
|
||||
</article>
|
||||
<article>
|
||||
<details>
|
||||
<summary>{% trans 'Import Shelf or List from Goodreads' %}</summary>
|
||||
<form action="{% url 'users:import_goodreads' %}" method="post">
|
||||
<hr>
|
||||
<form hx-post="{% url 'users:export_ndjson' %}"
|
||||
enctype="multipart/form-data">
|
||||
{% csrf_token %}
|
||||
<div>
|
||||
{% trans 'Link to Goodreads Profile / Shelf / List' %}
|
||||
<input type="url"
|
||||
name="url"
|
||||
value=""
|
||||
placeholder="https://www.goodreads.com/user/show/12345-janedoe"
|
||||
required>
|
||||
<input type="submit" value="{% trans 'Import' %}" />
|
||||
<small>
|
||||
{% if goodreads_task %}
|
||||
<br>
|
||||
{% trans 'Last import started' %}: {{ goodreads_task.created_time }}
|
||||
{% trans 'Status' %}: {{ goodreads_task.get_state_display }}。
|
||||
<br>
|
||||
{{ goodreads_task.message }}
|
||||
{% endif %}
|
||||
</small>
|
||||
</div>
|
||||
<ul>
|
||||
<li>
|
||||
Profile <code>https://www.goodreads.com/user/show/12345-janedoe</code>
|
||||
{% trans 'want-to-read / currently-reading / read books and their reviews will be imported.' %}
|
||||
</li>
|
||||
<li>
|
||||
Shelf <code>https://www.goodreads.com/review/list/12345-janedoe?shelf=name</code>
|
||||
{% trans 'Shelf will be imported as a new collection.' %}
|
||||
</li>
|
||||
<li>
|
||||
List <code>https://www.goodreads.com/list/show/155086.Popular_Highlights</code>
|
||||
{% trans 'List will be imported as a new collection.' %}
|
||||
</li>
|
||||
<li>
|
||||
<mark>Who Can View My Profile</mark> must be set as <mark>anyone</mark> prior to import.
|
||||
</li>
|
||||
</ul>
|
||||
<input type="submit" value="{% trans 'Export everything in NDJSON' %}" />
|
||||
{% include "users/user_task_status.html" with task=ndjson_export_task %}
|
||||
</form>
|
||||
</details>
|
||||
</article>
|
||||
<article>
|
||||
<details>
|
||||
<summary>{% trans 'Import from Letterboxd' %}</summary>
|
||||
<form action="{% url 'users:import_letterboxd' %}"
|
||||
<hr>
|
||||
<form action="{% url 'users:export_marks' %}"
|
||||
method="post"
|
||||
enctype="multipart/form-data">
|
||||
{% csrf_token %}
|
||||
<ul>
|
||||
<li>
|
||||
In letterboxd.com,
|
||||
<a href="https://letterboxd.com/settings/data/"
|
||||
target="_blank"
|
||||
rel="noopener">click DATA in Settings</a>;
|
||||
or in its app, tap Advanced Settings in Settings, tap EXPORT YOUR DATA
|
||||
</li>
|
||||
<li>
|
||||
download file with name like <code>letterboxd-username-2018-03-11-07-52-utc.zip</code>, do not unzip.
|
||||
</li>
|
||||
</ul>
|
||||
<b>exporting to this format will be deprecated soon, please use csv or ndjson format.</b>
|
||||
<input type="submit"
|
||||
class="secondary"
|
||||
value="{% trans 'Export marks and reviews in XLSX (Doufen format)' %}" />
|
||||
{% if export_task %}
|
||||
<br>
|
||||
<input type="file" name="file" required accept=".zip">
|
||||
<p>
|
||||
{% trans 'Visibility' %}:
|
||||
{% trans 'Last export' %}: {{ export_task.created_time }}
|
||||
{% trans 'Status' %}: {{ export_task.get_state_display }}
|
||||
<br>
|
||||
<label for="l_visibility_0">
|
||||
<input type="radio"
|
||||
name="visibility"
|
||||
value="0"
|
||||
required=""
|
||||
id="l_visibility_0"
|
||||
checked>
|
||||
{% trans 'Public' %}
|
||||
</label>
|
||||
<label for="l_visibility_1">
|
||||
<input type="radio"
|
||||
name="visibility"
|
||||
value="1"
|
||||
required=""
|
||||
id="l_visibility_1">
|
||||
{% trans 'Followers Only' %}
|
||||
</label>
|
||||
<label for="l_visibility_2">
|
||||
<input type="radio"
|
||||
name="visibility"
|
||||
value="2"
|
||||
required=""
|
||||
id="l_visibility_2">
|
||||
{% trans 'Mentioned Only' %}
|
||||
</label>
|
||||
</p>
|
||||
<input type="submit" value="{% trans 'Import' %}" />
|
||||
<small>
|
||||
{% trans 'Only forward changes(none->to-watch->watched) will be imported.' %}
|
||||
{% if letterboxd_task %}
|
||||
<br>
|
||||
{% trans 'Last import started' %}: {{ letterboxd_task.created_time }}
|
||||
{% trans 'Status' %}: {{ letterboxd_task.get_state_display }}。
|
||||
<br>
|
||||
{{ letterboxd_task.message }}
|
||||
{% if letterboxd_task.metadata.failed_urls %}
|
||||
{% trans 'Failed links, likely due to Letterboxd error, you may have to mark them manually' %}:
|
||||
<br>
|
||||
<textarea readonly>{% for url in letterboxd_task.metadata.failed_urls %}{{url}} {% endfor %}</textarea>
|
||||
{{ export_task.message }}
|
||||
{% if export_task.metadata.file %}
|
||||
<a href="{% url 'users:export_marks' %}" download>{% trans 'Download' %}</a>
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
</small>
|
||||
</form>
|
||||
</details>
|
||||
</article>
|
||||
<article>
|
||||
<details>
|
||||
<summary>{% trans 'Import Podcast Subscriptions' %}</summary>
|
||||
<form action="{% url 'users:import_opml' %}"
|
||||
method="post"
|
||||
enctype="multipart/form-data">
|
||||
{% csrf_token %}
|
||||
<div>
|
||||
{% trans 'Import Method' %}:
|
||||
<label for="opml_import_mode_0">
|
||||
<input id="opml_import_mode_0"
|
||||
type="radio"
|
||||
name="import_mode"
|
||||
value="0"
|
||||
checked>
|
||||
{% trans 'Mark as listening' %}
|
||||
</label>
|
||||
<label for="opml_import_mode_1">
|
||||
<input id="opml_import_mode_1" type="radio" name="import_mode" value="1">
|
||||
{% trans 'Import as a new collection' %}
|
||||
</label>
|
||||
<br>
|
||||
{% trans 'Select OPML file' %}
|
||||
<input type="file" name="file" id="excel" required accept=".opml,.xml">
|
||||
<input type="submit" value="{% trans 'Import' %}" />
|
||||
</div>
|
||||
</form>
|
||||
</details>
|
||||
</article>
|
||||
|
@ -441,25 +427,6 @@
|
|||
</div>
|
||||
</details>
|
||||
</article>
|
||||
{% comment %}
|
||||
<article>
|
||||
<details>
|
||||
<summary>{% trans 'Reset visibility for all marks' %}</summary>
|
||||
<form action="{% url 'users:reset_visibility' %}" method="post">
|
||||
{% csrf_token %}
|
||||
<input type="submit" value="{% trans 'Reset' %}" />
|
||||
<div>
|
||||
<input type="radio" name="visibility" id="visPublic" value="0" checked>
|
||||
<label for="visPublic">{% trans 'Public' %}</label>
|
||||
<input type="radio" name="visibility" id="visFollower" value="1">
|
||||
<label for="visFollower">{% trans 'Followers Only' %}</label>
|
||||
<input type="radio" name="visibility" id="visSelf" value="2">
|
||||
<label for="visSelf">{% trans 'Mentioned Only' %}</label>
|
||||
</div>
|
||||
</form>
|
||||
</details>
|
||||
</article>
|
||||
{% endcomment %}
|
||||
</div>
|
||||
{% include "_sidebar.html" with show_profile=1 identity=request.user.identity %}
|
||||
</main>
|
||||
|
|
|
@ -1,10 +1,23 @@
|
|||
{% load i18n %}
|
||||
<div hx-target="this"
|
||||
{% if task.state == 0 or task.state == 1 %} hx-get="{% url 'users:user_task_status' task.type %}" hx-trigger="intersect once, every 30s"{% endif %}
|
||||
{% if task %}
|
||||
<div hx-target="this"
|
||||
{% if task.state == 0 or task.state == 1 %} hx-get="{% url 'users:user_task_status' task.type %}" hx-trigger="every 30s"{% endif %}
|
||||
hx-swap="outerHTML">
|
||||
{% trans 'Requested' %}: {{ task.created_time }}
|
||||
({{ task.get_state_display }})
|
||||
<div>
|
||||
{% if task.state == 0 %}
|
||||
<i class="fa-solid fa-spinner fa-spin"></i>
|
||||
{% elif task.state == 1 %}
|
||||
<i class="fa-solid fa-gear fa-spin"></i>
|
||||
{% elif task.state == 3 %}
|
||||
<i class="fa-solid fa-triangle-exclamation"></i>
|
||||
{% elif 'exporter' in task.type %}
|
||||
<a href="{% url 'users:user_task_download' task.type %}" download><i class="fa fa-download"></i></a>
|
||||
{% else %}
|
||||
<i class="fa-solid fa-check"></i>
|
||||
{% endif %}
|
||||
{{ task.created_time }}
|
||||
{{ task.message }}
|
||||
</div>
|
||||
{% if task.state == 0 or task.state == 1 %}
|
||||
{% if task.metadata.total and task.metadata.processed %}
|
||||
<div>
|
||||
|
@ -14,7 +27,7 @@
|
|||
{% endif %}
|
||||
{% if task.metadata.failed_items %}
|
||||
{% trans 'Failed items' %}:
|
||||
<br>
|
||||
<textarea readonly>{% for item in task.metadata.failed_items %}{{item}} {% endfor %}</textarea>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
|
|
@ -11,6 +11,9 @@ urlpatterns = [
|
|||
path("info", account_info, name="info"),
|
||||
path("profile", account_profile, name="profile"),
|
||||
path("task/<str:task_type>/status", user_task_status, name="user_task_status"),
|
||||
path(
|
||||
"task/<str:task_type>/download", user_task_download, name="user_task_download"
|
||||
),
|
||||
path("data/import/status", data_import_status, name="import_status"),
|
||||
path("data/import/goodreads", import_goodreads, name="import_goodreads"),
|
||||
path("data/import/douban", import_douban, name="import_douban"),
|
||||
|
|
|
@ -4,6 +4,7 @@ import os
|
|||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth.decorators import login_required
|
||||
from django.core.exceptions import BadRequest
|
||||
from django.db.models import Min
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import redirect, render
|
||||
|
@ -20,7 +21,6 @@ from journal.importers import (
|
|||
LetterboxdImporter,
|
||||
NdjsonImporter,
|
||||
OPMLImporter,
|
||||
get_neodb_importer,
|
||||
)
|
||||
from journal.models import ShelfType
|
||||
from takahe.utils import Takahe
|
||||
|
@ -97,7 +97,6 @@ def data(request):
|
|||
# Import tasks - check for both CSV and NDJSON importers
|
||||
csv_import_task = CsvImporter.latest_task(request.user)
|
||||
ndjson_import_task = NdjsonImporter.latest_task(request.user)
|
||||
|
||||
# Use the most recent import task for display
|
||||
if ndjson_import_task and (
|
||||
not csv_import_task
|
||||
|
@ -119,6 +118,7 @@ def data(request):
|
|||
"ndjson_export_task": NdjsonExporter.latest_task(request.user),
|
||||
"letterboxd_task": LetterboxdImporter.latest_task(request.user),
|
||||
"goodreads_task": GoodreadsImporter.latest_task(request.user),
|
||||
# "opml_task": OPMLImporter.latest_task(request.user),
|
||||
"years": years,
|
||||
},
|
||||
)
|
||||
|
@ -150,6 +150,8 @@ def user_task_status(request, task_type: str):
|
|||
task_cls = LetterboxdImporter
|
||||
case "journal.goodreadsimporter":
|
||||
task_cls = GoodreadsImporter
|
||||
case "journal.opmlimporter":
|
||||
task_cls = OPMLImporter
|
||||
case "journal.doubanimporter":
|
||||
task_cls = DoubanImporter
|
||||
case _:
|
||||
|
@ -158,6 +160,28 @@ def user_task_status(request, task_type: str):
|
|||
return render(request, "users/user_task_status.html", {"task": task})
|
||||
|
||||
|
||||
@login_required
|
||||
def user_task_download(request, task_type: str):
|
||||
match task_type:
|
||||
case "journal.csvexporter":
|
||||
task_cls = CsvExporter
|
||||
case "journal.ndjsonexporter":
|
||||
task_cls = NdjsonExporter
|
||||
case _:
|
||||
return redirect(reverse("users:data"))
|
||||
task = task_cls.latest_task(request.user)
|
||||
if not task or task.state != Task.States.complete or not task.metadata.get("file"):
|
||||
messages.add_message(request, messages.ERROR, _("Export file not available."))
|
||||
return redirect(reverse("users:data"))
|
||||
response = HttpResponse()
|
||||
response["X-Accel-Redirect"] = (
|
||||
settings.MEDIA_URL + task.metadata["file"][len(settings.MEDIA_ROOT) :]
|
||||
)
|
||||
response["Content-Type"] = "application/zip"
|
||||
response["Content-Disposition"] = f'attachment; filename="{task.filename}.zip"'
|
||||
return response
|
||||
|
||||
|
||||
@login_required
|
||||
def export_reviews(request):
|
||||
if request.method != "POST":
|
||||
|
@ -167,6 +191,7 @@ def export_reviews(request):
|
|||
|
||||
@login_required
|
||||
def export_marks(request):
|
||||
# TODO: deprecated
|
||||
if request.method == "POST":
|
||||
DoufenExporter.create(request.user).enqueue()
|
||||
messages.add_message(request, messages.INFO, _("Generating exports."))
|
||||
|
@ -206,22 +231,10 @@ def export_csv(request):
|
|||
)
|
||||
return redirect(reverse("users:data"))
|
||||
CsvExporter.create(request.user).enqueue()
|
||||
messages.add_message(request, messages.INFO, _("Generating exports."))
|
||||
return redirect(reverse("users:data"))
|
||||
else:
|
||||
task = CsvExporter.latest_task(request.user)
|
||||
if not task or task.state != Task.States.complete:
|
||||
messages.add_message(
|
||||
request, messages.ERROR, _("Export file not available.")
|
||||
return redirect(
|
||||
reverse("users:user_task_status", args=("journal.csvexporter",))
|
||||
)
|
||||
return redirect(reverse("users:data"))
|
||||
response = HttpResponse()
|
||||
response["X-Accel-Redirect"] = (
|
||||
settings.MEDIA_URL + task.metadata["file"][len(settings.MEDIA_ROOT) :]
|
||||
)
|
||||
response["Content-Type"] = "application/zip"
|
||||
response["Content-Disposition"] = f'attachment; filename="{task.filename}.zip"'
|
||||
return response
|
||||
|
||||
|
||||
@login_required
|
||||
|
@ -238,22 +251,10 @@ def export_ndjson(request):
|
|||
)
|
||||
return redirect(reverse("users:data"))
|
||||
NdjsonExporter.create(request.user).enqueue()
|
||||
messages.add_message(request, messages.INFO, _("Generating exports."))
|
||||
return redirect(reverse("users:data"))
|
||||
else:
|
||||
task = NdjsonExporter.latest_task(request.user)
|
||||
if not task or task.state != Task.States.complete:
|
||||
messages.add_message(
|
||||
request, messages.ERROR, _("Export file not available.")
|
||||
return redirect(
|
||||
reverse("users:user_task_status", args=("journal.ndjsonexporter",))
|
||||
)
|
||||
return redirect(reverse("users:data"))
|
||||
response = HttpResponse()
|
||||
response["X-Accel-Redirect"] = (
|
||||
settings.MEDIA_URL + task.metadata["file"][len(settings.MEDIA_ROOT) :]
|
||||
)
|
||||
response["Content-Type"] = "application/zip"
|
||||
response["Content-Disposition"] = f'attachment; filename="{task.filename}.zip"'
|
||||
return response
|
||||
|
||||
|
||||
@login_required
|
||||
|
@ -280,24 +281,26 @@ def sync_mastodon_preference(request):
|
|||
|
||||
@login_required
|
||||
def import_goodreads(request):
|
||||
if request.method == "POST":
|
||||
if request.method != "POST":
|
||||
return redirect(reverse("users:data"))
|
||||
raw_url = request.POST.get("url")
|
||||
if GoodreadsImporter.validate_url(raw_url):
|
||||
GoodreadsImporter.create(
|
||||
if not GoodreadsImporter.validate_url(raw_url):
|
||||
raise BadRequest(_("Invalid URL."))
|
||||
task = GoodreadsImporter.create(
|
||||
request.user,
|
||||
visibility=int(request.POST.get("visibility", 0)),
|
||||
url=raw_url,
|
||||
).enqueue()
|
||||
messages.add_message(request, messages.INFO, _("Import in progress."))
|
||||
else:
|
||||
messages.add_message(request, messages.ERROR, _("Invalid URL."))
|
||||
return redirect(reverse("users:data"))
|
||||
)
|
||||
task.enqueue()
|
||||
return redirect(reverse("users:user_task_status", args=(task.type,)))
|
||||
|
||||
|
||||
@login_required
|
||||
def import_douban(request):
|
||||
if request.method != "POST":
|
||||
return redirect(reverse("users:data"))
|
||||
if not DoubanImporter.validate_file(request.FILES["file"]):
|
||||
raise BadRequest(_("Invalid file."))
|
||||
f = (
|
||||
settings.MEDIA_ROOT
|
||||
+ "/"
|
||||
|
@ -307,24 +310,22 @@ def import_douban(request):
|
|||
with open(f, "wb+") as destination:
|
||||
for chunk in request.FILES["file"].chunks():
|
||||
destination.write(chunk)
|
||||
if not DoubanImporter.validate_file(request.FILES["file"]):
|
||||
messages.add_message(request, messages.ERROR, _("Invalid file."))
|
||||
return redirect(reverse("users:data"))
|
||||
DoubanImporter.create(
|
||||
task = DoubanImporter.create(
|
||||
request.user,
|
||||
visibility=int(request.POST.get("visibility", 0)),
|
||||
mode=int(request.POST.get("import_mode", 0)),
|
||||
file=f,
|
||||
).enqueue()
|
||||
messages.add_message(
|
||||
request, messages.INFO, _("File is uploaded and will be imported soon.")
|
||||
)
|
||||
return redirect(reverse("users:data"))
|
||||
task.enqueue()
|
||||
return redirect(reverse("users:user_task_status", args=(task.type,)))
|
||||
|
||||
|
||||
@login_required
|
||||
def import_letterboxd(request):
|
||||
if request.method == "POST":
|
||||
if request.method != "POST":
|
||||
return redirect(reverse("users:data"))
|
||||
if not LetterboxdImporter.validate_file(request.FILES["file"]):
|
||||
raise BadRequest(_("Invalid file."))
|
||||
f = (
|
||||
settings.MEDIA_ROOT
|
||||
+ "/"
|
||||
|
@ -334,37 +335,50 @@ def import_letterboxd(request):
|
|||
with open(f, "wb+") as destination:
|
||||
for chunk in request.FILES["file"].chunks():
|
||||
destination.write(chunk)
|
||||
LetterboxdImporter.create(
|
||||
task = LetterboxdImporter.create(
|
||||
request.user,
|
||||
visibility=int(request.POST.get("visibility", 0)),
|
||||
file=f,
|
||||
).enqueue()
|
||||
messages.add_message(
|
||||
request, messages.INFO, _("File is uploaded and will be imported soon.")
|
||||
)
|
||||
return redirect(reverse("users:data"))
|
||||
task.enqueue()
|
||||
return redirect(reverse("users:user_task_status", args=(task.type,)))
|
||||
|
||||
|
||||
@login_required
|
||||
def import_opml(request):
|
||||
if request.method == "POST":
|
||||
importer = OPMLImporter(
|
||||
request.user,
|
||||
int(request.POST.get("visibility", 0)),
|
||||
int(request.POST.get("import_mode", 0)),
|
||||
)
|
||||
if importer.import_from_file(request.FILES["file"]):
|
||||
messages.add_message(
|
||||
request, messages.INFO, _("File is uploaded and will be imported soon.")
|
||||
)
|
||||
else:
|
||||
messages.add_message(request, messages.ERROR, _("Invalid file."))
|
||||
if request.method != "POST":
|
||||
return redirect(reverse("users:data"))
|
||||
if not OPMLImporter.validate_file(request.FILES["file"]):
|
||||
raise BadRequest(_("Invalid file."))
|
||||
f = (
|
||||
settings.MEDIA_ROOT
|
||||
+ "/"
|
||||
+ GenerateDateUUIDMediaFilePath("x.zip", settings.SYNC_FILE_PATH_ROOT)
|
||||
)
|
||||
os.makedirs(os.path.dirname(f), exist_ok=True)
|
||||
with open(f, "wb+") as destination:
|
||||
for chunk in request.FILES["file"].chunks():
|
||||
destination.write(chunk)
|
||||
task = OPMLImporter.create(
|
||||
request.user,
|
||||
visibility=int(request.POST.get("visibility", 0)),
|
||||
mode=int(request.POST.get("import_mode", 0)),
|
||||
file=f,
|
||||
)
|
||||
task.enqueue()
|
||||
return redirect(reverse("users:user_task_status", args=(task.type,)))
|
||||
|
||||
|
||||
@login_required
|
||||
def import_neodb(request):
|
||||
if request.method == "POST":
|
||||
format_type_hint = request.POST.get("format_type", "").lower()
|
||||
if format_type_hint == "csv":
|
||||
importer = CsvImporter
|
||||
elif format_type_hint == "ndjson":
|
||||
importer = NdjsonImporter
|
||||
else:
|
||||
raise BadRequest("Invalid file.")
|
||||
f = (
|
||||
settings.MEDIA_ROOT
|
||||
+ "/"
|
||||
|
@ -374,49 +388,11 @@ def import_neodb(request):
|
|||
with open(f, "wb+") as destination:
|
||||
for chunk in request.FILES["file"].chunks():
|
||||
destination.write(chunk)
|
||||
|
||||
# Get format type hint from frontend, if provided
|
||||
format_type_hint = request.POST.get("format_type", "").lower()
|
||||
|
||||
# Import appropriate class based on format type or auto-detect
|
||||
from journal.importers import CsvImporter, NdjsonImporter
|
||||
|
||||
if format_type_hint == "csv":
|
||||
importer = CsvImporter
|
||||
format_type = "CSV"
|
||||
elif format_type_hint == "ndjson":
|
||||
importer = NdjsonImporter
|
||||
format_type = "NDJSON"
|
||||
else:
|
||||
# Fall back to auto-detection if no hint provided
|
||||
importer = get_neodb_importer(f)
|
||||
if importer == CsvImporter:
|
||||
format_type = "CSV"
|
||||
elif importer == NdjsonImporter:
|
||||
format_type = "NDJSON"
|
||||
else:
|
||||
format_type = ""
|
||||
importer = None # Make sure importer is None if auto-detection fails
|
||||
|
||||
if not importer:
|
||||
messages.add_message(
|
||||
request,
|
||||
messages.ERROR,
|
||||
_(
|
||||
"Invalid file. Expected a ZIP containing either CSV or NDJSON files exported from NeoDB."
|
||||
),
|
||||
)
|
||||
return redirect(reverse("users:data"))
|
||||
|
||||
importer.create(
|
||||
task = importer.create(
|
||||
request.user,
|
||||
visibility=int(request.POST.get("visibility", 0)),
|
||||
file=f,
|
||||
).enqueue()
|
||||
|
||||
messages.add_message(
|
||||
request,
|
||||
messages.INFO,
|
||||
_(f"{format_type} file is uploaded and will be imported soon."),
|
||||
)
|
||||
task.enqueue()
|
||||
return redirect(reverse("users:user_task_status", args=(task.type,)))
|
||||
return redirect(reverse("users:data"))
|
||||
|
|
Loading…
Add table
Reference in a new issue