2024-05-15 03:18:27 -04:00
|
|
|
import datetime
|
2024-01-10 22:20:57 -05:00
|
|
|
import os
|
|
|
|
|
2023-08-10 11:27:31 -04:00
|
|
|
from django.conf import settings
|
|
|
|
from django.contrib import messages
|
|
|
|
from django.contrib.auth.decorators import login_required
|
2025-03-07 15:10:42 -05:00
|
|
|
from django.core.exceptions import BadRequest
|
2024-05-15 03:18:27 -04:00
|
|
|
from django.db.models import Min
|
2023-08-10 11:27:31 -04:00
|
|
|
from django.http import HttpResponse
|
2023-05-20 11:01:18 -04:00
|
|
|
from django.shortcuts import redirect, render
|
|
|
|
from django.urls import reverse
|
2025-01-27 02:44:37 -05:00
|
|
|
from django.utils import timezone, translation
|
2025-01-28 21:52:14 -05:00
|
|
|
from django.utils.translation import gettext as _
|
2022-12-31 00:20:20 -05:00
|
|
|
|
2024-07-03 00:07:07 -04:00
|
|
|
from common.utils import GenerateDateUUIDMediaFilePath
|
2025-01-28 21:52:14 -05:00
|
|
|
from journal.exporters import CsvExporter, DoufenExporter, NdjsonExporter
|
2024-12-26 01:51:24 -05:00
|
|
|
from journal.importers import (
|
2025-03-03 16:36:18 -05:00
|
|
|
CsvImporter,
|
2024-12-26 01:51:24 -05:00
|
|
|
DoubanImporter,
|
|
|
|
GoodreadsImporter,
|
|
|
|
LetterboxdImporter,
|
2025-03-06 11:21:01 -05:00
|
|
|
NdjsonImporter,
|
2024-12-26 01:51:24 -05:00
|
|
|
OPMLImporter,
|
|
|
|
)
|
2025-02-22 15:49:41 -05:00
|
|
|
from journal.models import ShelfType
|
2025-01-28 21:52:14 -05:00
|
|
|
from takahe.utils import Takahe
|
2024-12-26 01:51:24 -05:00
|
|
|
from users.models import Task
|
2022-05-07 17:00:52 -04:00
|
|
|
|
2025-01-28 21:52:14 -05:00
|
|
|
from .account import clear_preference_cache
|
2023-08-10 11:27:31 -04:00
|
|
|
|
2022-05-07 17:00:52 -04:00
|
|
|
|
|
|
|
@login_required
|
2024-05-23 17:46:00 -04:00
|
|
|
def preferences(request):
|
|
|
|
if not request.user.registration_complete:
|
|
|
|
return redirect(reverse("users:register"))
|
2023-08-10 11:27:31 -04:00
|
|
|
preference = request.user.preference
|
2023-12-29 02:11:36 -05:00
|
|
|
identity = request.user.identity
|
2022-12-31 00:20:20 -05:00
|
|
|
if request.method == "POST":
|
2023-12-29 02:11:36 -05:00
|
|
|
identity.anonymous_viewable = bool(request.POST.get("anonymous_viewable"))
|
|
|
|
identity.save(update_fields=["anonymous_viewable"])
|
2024-06-16 12:32:57 -04:00
|
|
|
tidentity = Takahe.get_identity(identity.pk)
|
|
|
|
tidentity.indexable = bool(request.POST.get("anonymous_viewable"))
|
|
|
|
tidentity.save(update_fields=["indexable"])
|
|
|
|
|
2022-12-31 00:20:20 -05:00
|
|
|
preference.default_visibility = int(request.POST.get("default_visibility"))
|
2023-12-10 19:14:32 -05:00
|
|
|
preference.mastodon_default_repost = (
|
|
|
|
int(request.POST.get("mastodon_default_repost", 0)) == 1
|
|
|
|
)
|
2023-04-20 10:12:05 -04:00
|
|
|
preference.classic_homepage = int(request.POST.get("classic_homepage"))
|
2023-07-12 01:11:15 -04:00
|
|
|
preference.hidden_categories = request.POST.getlist("hidden_categories")
|
2023-12-10 19:14:32 -05:00
|
|
|
preference.post_public_mode = int(request.POST.get("post_public_mode"))
|
2022-12-31 00:20:20 -05:00
|
|
|
preference.show_last_edit = bool(request.POST.get("show_last_edit"))
|
2023-12-15 19:31:07 -05:00
|
|
|
preference.mastodon_repost_mode = int(
|
|
|
|
request.POST.get("mastodon_repost_mode", 0)
|
|
|
|
)
|
2022-12-31 00:20:20 -05:00
|
|
|
preference.mastodon_append_tag = request.POST.get(
|
|
|
|
"mastodon_append_tag", ""
|
|
|
|
).strip()
|
|
|
|
preference.save(
|
|
|
|
update_fields=[
|
|
|
|
"default_visibility",
|
2023-12-10 19:14:32 -05:00
|
|
|
"post_public_mode",
|
2022-12-31 00:20:20 -05:00
|
|
|
"classic_homepage",
|
|
|
|
"mastodon_append_tag",
|
2023-12-10 19:14:32 -05:00
|
|
|
"mastodon_repost_mode",
|
|
|
|
"mastodon_default_repost",
|
2022-12-31 00:20:20 -05:00
|
|
|
"show_last_edit",
|
2023-07-12 01:11:15 -04:00
|
|
|
"hidden_categories",
|
2022-12-31 00:20:20 -05:00
|
|
|
]
|
|
|
|
)
|
2024-04-03 23:10:21 -04:00
|
|
|
lang = request.POST.get("language")
|
|
|
|
if lang in dict(settings.LANGUAGES).keys() and lang != request.user.language:
|
|
|
|
request.user.language = lang
|
2024-06-03 09:00:40 -04:00
|
|
|
translation.activate(lang)
|
|
|
|
request.LANGUAGE_CODE = translation.get_language()
|
2024-04-03 23:10:21 -04:00
|
|
|
request.user.save(update_fields=["language"])
|
2023-07-12 01:11:15 -04:00
|
|
|
clear_preference_cache(request)
|
2023-12-10 19:14:32 -05:00
|
|
|
return render(
|
|
|
|
request,
|
|
|
|
"users/preferences.html",
|
|
|
|
{"enable_local_only": settings.ENABLE_LOCAL_ONLY},
|
|
|
|
)
|
2022-05-07 17:00:52 -04:00
|
|
|
|
|
|
|
|
|
|
|
@login_required
|
2024-05-23 17:46:00 -04:00
|
|
|
def data(request):
|
|
|
|
if not request.user.registration_complete:
|
|
|
|
return redirect(reverse("users:register"))
|
2024-05-15 03:18:27 -04:00
|
|
|
current_year = datetime.date.today().year
|
|
|
|
queryset = request.user.identity.shelf_manager.get_shelf(
|
|
|
|
ShelfType.COMPLETE
|
|
|
|
).members.all()
|
|
|
|
start_date = queryset.aggregate(Min("created_time"))["created_time__min"]
|
|
|
|
start_year = start_date.year if start_date else current_year
|
|
|
|
years = reversed(range(start_year, current_year + 1))
|
2025-03-06 11:21:01 -05:00
|
|
|
|
|
|
|
# Import tasks - check for both CSV and NDJSON importers
|
|
|
|
csv_import_task = CsvImporter.latest_task(request.user)
|
|
|
|
ndjson_import_task = NdjsonImporter.latest_task(request.user)
|
|
|
|
# Use the most recent import task for display
|
|
|
|
if ndjson_import_task and (
|
|
|
|
not csv_import_task
|
|
|
|
or ndjson_import_task.created_time > csv_import_task.created_time
|
|
|
|
):
|
|
|
|
neodb_import_task = ndjson_import_task
|
|
|
|
else:
|
|
|
|
neodb_import_task = csv_import_task
|
|
|
|
|
2022-12-31 00:20:20 -05:00
|
|
|
return render(
|
|
|
|
request,
|
|
|
|
"users/data.html",
|
|
|
|
{
|
|
|
|
"allow_any_site": settings.MASTODON_ALLOW_ANY_SITE,
|
2024-12-26 10:54:25 -05:00
|
|
|
"import_task": DoubanImporter.latest_task(request.user),
|
2024-12-26 01:51:24 -05:00
|
|
|
"export_task": DoufenExporter.latest_task(request.user),
|
2025-01-27 02:44:37 -05:00
|
|
|
"csv_export_task": CsvExporter.latest_task(request.user),
|
2025-03-06 11:21:01 -05:00
|
|
|
"neodb_import_task": neodb_import_task, # Use the most recent import task
|
2025-01-28 21:38:02 -05:00
|
|
|
"ndjson_export_task": NdjsonExporter.latest_task(request.user),
|
2024-01-10 22:20:57 -05:00
|
|
|
"letterboxd_task": LetterboxdImporter.latest_task(request.user),
|
2024-12-26 10:54:25 -05:00
|
|
|
"goodreads_task": GoodreadsImporter.latest_task(request.user),
|
2025-03-07 15:10:42 -05:00
|
|
|
# "opml_task": OPMLImporter.latest_task(request.user),
|
2024-05-15 03:18:27 -04:00
|
|
|
"years": years,
|
2022-12-31 00:20:20 -05:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@login_required
|
|
|
|
def data_import_status(request):
|
|
|
|
return render(
|
|
|
|
request,
|
|
|
|
"users/data_import_status.html",
|
|
|
|
{
|
2024-12-26 10:54:25 -05:00
|
|
|
"import_task": DoubanImporter.latest_task(request.user),
|
2022-12-31 00:20:20 -05:00
|
|
|
},
|
|
|
|
)
|
2022-05-07 17:00:52 -04:00
|
|
|
|
|
|
|
|
2025-03-04 16:51:40 -05:00
|
|
|
@login_required
|
2025-03-06 11:21:01 -05:00
|
|
|
def user_task_status(request, task_type: str):
|
|
|
|
match task_type:
|
|
|
|
case "journal.csvimporter":
|
2025-03-04 16:51:40 -05:00
|
|
|
task_cls = CsvImporter
|
2025-03-06 11:21:01 -05:00
|
|
|
case "journal.ndjsonimporter":
|
|
|
|
task_cls = NdjsonImporter
|
|
|
|
case "journal.csvexporter":
|
2025-03-04 16:51:40 -05:00
|
|
|
task_cls = CsvExporter
|
2025-03-06 11:21:01 -05:00
|
|
|
case "journal.ndjsonexporter":
|
2025-03-04 16:51:40 -05:00
|
|
|
task_cls = NdjsonExporter
|
2025-03-06 11:21:01 -05:00
|
|
|
case "journal.letterboxdimporter":
|
2025-03-04 16:51:40 -05:00
|
|
|
task_cls = LetterboxdImporter
|
2025-03-06 11:21:01 -05:00
|
|
|
case "journal.goodreadsimporter":
|
2025-03-04 16:51:40 -05:00
|
|
|
task_cls = GoodreadsImporter
|
2025-03-07 15:10:42 -05:00
|
|
|
case "journal.opmlimporter":
|
|
|
|
task_cls = OPMLImporter
|
2025-03-06 11:21:01 -05:00
|
|
|
case "journal.doubanimporter":
|
2025-03-04 16:51:40 -05:00
|
|
|
task_cls = DoubanImporter
|
|
|
|
case _:
|
|
|
|
return redirect(reverse("users:data"))
|
|
|
|
task = task_cls.latest_task(request.user)
|
|
|
|
return render(request, "users/user_task_status.html", {"task": task})
|
|
|
|
|
|
|
|
|
2025-03-07 15:10:42 -05:00
|
|
|
@login_required
|
|
|
|
def user_task_download(request, task_type: str):
|
|
|
|
match task_type:
|
|
|
|
case "journal.csvexporter":
|
|
|
|
task_cls = CsvExporter
|
|
|
|
case "journal.ndjsonexporter":
|
|
|
|
task_cls = NdjsonExporter
|
|
|
|
case _:
|
|
|
|
return redirect(reverse("users:data"))
|
|
|
|
task = task_cls.latest_task(request.user)
|
|
|
|
if not task or task.state != Task.States.complete or not task.metadata.get("file"):
|
|
|
|
messages.add_message(request, messages.ERROR, _("Export file not available."))
|
|
|
|
return redirect(reverse("users:data"))
|
|
|
|
response = HttpResponse()
|
|
|
|
response["X-Accel-Redirect"] = (
|
|
|
|
settings.MEDIA_URL + task.metadata["file"][len(settings.MEDIA_ROOT) :]
|
|
|
|
)
|
|
|
|
response["Content-Type"] = "application/zip"
|
|
|
|
response["Content-Disposition"] = f'attachment; filename="{task.filename}.zip"'
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2022-05-07 17:00:52 -04:00
|
|
|
@login_required
|
|
|
|
def export_reviews(request):
|
2022-12-31 00:20:20 -05:00
|
|
|
if request.method != "POST":
|
2022-05-07 17:00:52 -04:00
|
|
|
return redirect(reverse("users:data"))
|
2022-12-31 00:20:20 -05:00
|
|
|
return render(request, "users/data.html")
|
2022-05-07 17:00:52 -04:00
|
|
|
|
|
|
|
|
|
|
|
@login_required
|
|
|
|
def export_marks(request):
|
2025-03-07 15:10:42 -05:00
|
|
|
# TODO: deprecated
|
2022-12-31 00:20:20 -05:00
|
|
|
if request.method == "POST":
|
2024-12-26 01:51:24 -05:00
|
|
|
DoufenExporter.create(request.user).enqueue()
|
2024-05-15 20:41:03 -04:00
|
|
|
messages.add_message(request, messages.INFO, _("Generating exports."))
|
2022-05-07 17:00:52 -04:00
|
|
|
return redirect(reverse("users:data"))
|
|
|
|
else:
|
2024-12-26 01:51:24 -05:00
|
|
|
task = DoufenExporter.latest_task(request.user)
|
|
|
|
if not task or task.state != Task.States.complete:
|
|
|
|
messages.add_message(
|
|
|
|
request, messages.ERROR, _("Export file not available.")
|
|
|
|
)
|
|
|
|
return redirect(reverse("users:data"))
|
2022-05-16 22:20:53 -04:00
|
|
|
try:
|
2024-12-26 01:51:24 -05:00
|
|
|
with open(task.metadata["file"], "rb") as fh:
|
2022-12-31 00:20:20 -05:00
|
|
|
response = HttpResponse(
|
|
|
|
fh.read(), content_type="application/vnd.ms-excel"
|
|
|
|
)
|
|
|
|
response["Content-Disposition"] = 'attachment;filename="marks.xlsx"'
|
2022-05-16 22:20:53 -04:00
|
|
|
return response
|
|
|
|
except Exception:
|
2024-05-15 20:41:03 -04:00
|
|
|
messages.add_message(
|
|
|
|
request, messages.ERROR, _("Export file expired. Please export again.")
|
|
|
|
)
|
2022-05-16 22:20:53 -04:00
|
|
|
return redirect(reverse("users:data"))
|
2022-05-07 17:00:52 -04:00
|
|
|
|
|
|
|
|
2025-01-27 02:44:37 -05:00
|
|
|
@login_required
|
|
|
|
def export_csv(request):
|
|
|
|
if request.method == "POST":
|
|
|
|
task = CsvExporter.latest_task(request.user)
|
|
|
|
if (
|
|
|
|
task
|
|
|
|
and task.state not in [Task.States.complete, Task.States.failed]
|
|
|
|
and task.created_time > (timezone.now() - datetime.timedelta(hours=1))
|
|
|
|
):
|
|
|
|
messages.add_message(
|
|
|
|
request, messages.INFO, _("Recent export still in progress.")
|
|
|
|
)
|
|
|
|
return redirect(reverse("users:data"))
|
|
|
|
CsvExporter.create(request.user).enqueue()
|
2025-03-07 15:10:42 -05:00
|
|
|
return redirect(
|
|
|
|
reverse("users:user_task_status", args=("journal.csvexporter",))
|
2025-01-27 02:44:37 -05:00
|
|
|
)
|
2025-03-07 15:10:42 -05:00
|
|
|
return redirect(reverse("users:data"))
|
2025-01-27 02:44:37 -05:00
|
|
|
|
|
|
|
|
2025-01-28 21:38:02 -05:00
|
|
|
@login_required
|
|
|
|
def export_ndjson(request):
|
|
|
|
if request.method == "POST":
|
|
|
|
task = NdjsonExporter.latest_task(request.user)
|
|
|
|
if (
|
|
|
|
task
|
|
|
|
and task.state not in [Task.States.complete, Task.States.failed]
|
|
|
|
and task.created_time > (timezone.now() - datetime.timedelta(hours=1))
|
|
|
|
):
|
|
|
|
messages.add_message(
|
|
|
|
request, messages.INFO, _("Recent export still in progress.")
|
|
|
|
)
|
|
|
|
return redirect(reverse("users:data"))
|
|
|
|
NdjsonExporter.create(request.user).enqueue()
|
2025-03-07 15:10:42 -05:00
|
|
|
return redirect(
|
|
|
|
reverse("users:user_task_status", args=("journal.ndjsonexporter",))
|
2025-01-28 21:38:02 -05:00
|
|
|
)
|
2025-03-07 15:10:42 -05:00
|
|
|
return redirect(reverse("users:data"))
|
2025-01-28 21:38:02 -05:00
|
|
|
|
|
|
|
|
2022-05-07 17:00:52 -04:00
|
|
|
@login_required
|
|
|
|
def sync_mastodon(request):
|
2024-07-05 18:15:10 -04:00
|
|
|
if request.method == "POST":
|
|
|
|
request.user.sync_accounts_later()
|
2024-05-15 20:41:03 -04:00
|
|
|
messages.add_message(request, messages.INFO, _("Sync in progress."))
|
2023-07-07 16:54:15 -04:00
|
|
|
return redirect(reverse("users:info"))
|
2023-11-08 09:54:36 -05:00
|
|
|
|
|
|
|
|
|
|
|
@login_required
|
|
|
|
def sync_mastodon_preference(request):
|
|
|
|
if request.method == "POST":
|
|
|
|
request.user.preference.mastodon_skip_userinfo = (
|
|
|
|
request.POST.get("mastodon_sync_userinfo", "") == ""
|
|
|
|
)
|
|
|
|
request.user.preference.mastodon_skip_relationship = (
|
|
|
|
request.POST.get("mastodon_sync_relationship", "") == ""
|
|
|
|
)
|
|
|
|
request.user.preference.save()
|
2024-05-15 20:41:03 -04:00
|
|
|
messages.add_message(request, messages.INFO, _("Settings saved."))
|
2023-11-08 09:54:36 -05:00
|
|
|
return redirect(reverse("users:info"))
|
2022-05-07 17:00:52 -04:00
|
|
|
|
|
|
|
|
|
|
|
@login_required
|
|
|
|
def import_goodreads(request):
|
2025-03-07 15:10:42 -05:00
|
|
|
if request.method != "POST":
|
|
|
|
return redirect(reverse("users:data"))
|
|
|
|
raw_url = request.POST.get("url")
|
|
|
|
if not GoodreadsImporter.validate_url(raw_url):
|
|
|
|
raise BadRequest(_("Invalid URL."))
|
|
|
|
task = GoodreadsImporter.create(
|
|
|
|
request.user,
|
|
|
|
visibility=int(request.POST.get("visibility", 0)),
|
|
|
|
url=raw_url,
|
|
|
|
)
|
|
|
|
task.enqueue()
|
|
|
|
return redirect(reverse("users:user_task_status", args=(task.type,)))
|
2022-05-07 17:00:52 -04:00
|
|
|
|
|
|
|
|
|
|
|
@login_required
|
|
|
|
def import_douban(request):
|
2024-12-26 10:54:25 -05:00
|
|
|
if request.method != "POST":
|
|
|
|
return redirect(reverse("users:data"))
|
2025-03-07 15:10:42 -05:00
|
|
|
if not DoubanImporter.validate_file(request.FILES["file"]):
|
|
|
|
raise BadRequest(_("Invalid file."))
|
2024-12-26 10:54:25 -05:00
|
|
|
f = (
|
|
|
|
settings.MEDIA_ROOT
|
|
|
|
+ "/"
|
|
|
|
+ GenerateDateUUIDMediaFilePath("x.zip", settings.SYNC_FILE_PATH_ROOT)
|
|
|
|
)
|
|
|
|
os.makedirs(os.path.dirname(f), exist_ok=True)
|
|
|
|
with open(f, "wb+") as destination:
|
|
|
|
for chunk in request.FILES["file"].chunks():
|
|
|
|
destination.write(chunk)
|
2025-03-07 15:10:42 -05:00
|
|
|
task = DoubanImporter.create(
|
2024-12-26 10:54:25 -05:00
|
|
|
request.user,
|
|
|
|
visibility=int(request.POST.get("visibility", 0)),
|
|
|
|
mode=int(request.POST.get("import_mode", 0)),
|
|
|
|
file=f,
|
|
|
|
)
|
2025-03-07 15:10:42 -05:00
|
|
|
task.enqueue()
|
|
|
|
return redirect(reverse("users:user_task_status", args=(task.type,)))
|
2023-01-29 20:05:30 -05:00
|
|
|
|
|
|
|
|
2024-01-10 22:20:57 -05:00
|
|
|
@login_required
|
|
|
|
def import_letterboxd(request):
|
2025-03-07 15:10:42 -05:00
|
|
|
if request.method != "POST":
|
|
|
|
return redirect(reverse("users:data"))
|
|
|
|
if not LetterboxdImporter.validate_file(request.FILES["file"]):
|
|
|
|
raise BadRequest(_("Invalid file."))
|
|
|
|
f = (
|
|
|
|
settings.MEDIA_ROOT
|
|
|
|
+ "/"
|
|
|
|
+ GenerateDateUUIDMediaFilePath("x.zip", settings.SYNC_FILE_PATH_ROOT)
|
|
|
|
)
|
|
|
|
os.makedirs(os.path.dirname(f), exist_ok=True)
|
|
|
|
with open(f, "wb+") as destination:
|
|
|
|
for chunk in request.FILES["file"].chunks():
|
|
|
|
destination.write(chunk)
|
|
|
|
task = LetterboxdImporter.create(
|
|
|
|
request.user,
|
|
|
|
visibility=int(request.POST.get("visibility", 0)),
|
|
|
|
file=f,
|
|
|
|
)
|
|
|
|
task.enqueue()
|
|
|
|
return redirect(reverse("users:user_task_status", args=(task.type,)))
|
2024-01-10 22:20:57 -05:00
|
|
|
|
|
|
|
|
2023-01-29 20:05:30 -05:00
|
|
|
@login_required
|
|
|
|
def import_opml(request):
|
2025-03-07 15:10:42 -05:00
|
|
|
if request.method != "POST":
|
|
|
|
return redirect(reverse("users:data"))
|
|
|
|
if not OPMLImporter.validate_file(request.FILES["file"]):
|
|
|
|
raise BadRequest(_("Invalid file."))
|
|
|
|
f = (
|
|
|
|
settings.MEDIA_ROOT
|
|
|
|
+ "/"
|
|
|
|
+ GenerateDateUUIDMediaFilePath("x.zip", settings.SYNC_FILE_PATH_ROOT)
|
|
|
|
)
|
|
|
|
os.makedirs(os.path.dirname(f), exist_ok=True)
|
|
|
|
with open(f, "wb+") as destination:
|
|
|
|
for chunk in request.FILES["file"].chunks():
|
|
|
|
destination.write(chunk)
|
|
|
|
task = OPMLImporter.create(
|
|
|
|
request.user,
|
|
|
|
visibility=int(request.POST.get("visibility", 0)),
|
|
|
|
mode=int(request.POST.get("import_mode", 0)),
|
|
|
|
file=f,
|
|
|
|
)
|
|
|
|
task.enqueue()
|
|
|
|
return redirect(reverse("users:user_task_status", args=(task.type,)))
|
2025-03-03 16:36:18 -05:00
|
|
|
|
|
|
|
|
|
|
|
@login_required
|
2025-03-03 22:30:08 -05:00
|
|
|
def import_neodb(request):
|
2025-03-03 16:36:18 -05:00
|
|
|
if request.method == "POST":
|
2025-03-07 15:10:42 -05:00
|
|
|
format_type_hint = request.POST.get("format_type", "").lower()
|
|
|
|
if format_type_hint == "csv":
|
|
|
|
importer = CsvImporter
|
|
|
|
elif format_type_hint == "ndjson":
|
|
|
|
importer = NdjsonImporter
|
|
|
|
else:
|
|
|
|
raise BadRequest("Invalid file.")
|
2025-03-03 16:36:18 -05:00
|
|
|
f = (
|
|
|
|
settings.MEDIA_ROOT
|
|
|
|
+ "/"
|
|
|
|
+ GenerateDateUUIDMediaFilePath("x.zip", settings.SYNC_FILE_PATH_ROOT)
|
|
|
|
)
|
|
|
|
os.makedirs(os.path.dirname(f), exist_ok=True)
|
|
|
|
with open(f, "wb+") as destination:
|
|
|
|
for chunk in request.FILES["file"].chunks():
|
|
|
|
destination.write(chunk)
|
2025-03-07 15:10:42 -05:00
|
|
|
task = importer.create(
|
2025-03-03 16:36:18 -05:00
|
|
|
request.user,
|
|
|
|
visibility=int(request.POST.get("visibility", 0)),
|
|
|
|
file=f,
|
|
|
|
)
|
2025-03-07 15:10:42 -05:00
|
|
|
task.enqueue()
|
|
|
|
return redirect(reverse("users:user_task_status", args=(task.type,)))
|
2025-03-03 16:36:18 -05:00
|
|
|
return redirect(reverse("users:data"))
|