letterboxd import ui
This commit is contained in:
parent
ae9fb6f3c6
commit
376357ec90
12 changed files with 159 additions and 46 deletions
|
@ -51,6 +51,7 @@ class Migration(migrations.Migration):
|
||||||
("spotify_artist", "Spotify艺术家"),
|
("spotify_artist", "Spotify艺术家"),
|
||||||
("tmdb_person", "TMDB影人"),
|
("tmdb_person", "TMDB影人"),
|
||||||
("igdb", "IGDB游戏"),
|
("igdb", "IGDB游戏"),
|
||||||
|
("bgg", "BGG桌游"),
|
||||||
("steam", "Steam游戏"),
|
("steam", "Steam游戏"),
|
||||||
("bangumi", "Bangumi"),
|
("bangumi", "Bangumi"),
|
||||||
("apple_podcast", "苹果播客"),
|
("apple_podcast", "苹果播客"),
|
||||||
|
@ -104,6 +105,7 @@ class Migration(migrations.Migration):
|
||||||
("spotify_artist", "Spotify艺术家"),
|
("spotify_artist", "Spotify艺术家"),
|
||||||
("tmdb_person", "TMDB影人"),
|
("tmdb_person", "TMDB影人"),
|
||||||
("igdb", "IGDB游戏"),
|
("igdb", "IGDB游戏"),
|
||||||
|
("bgg", "BGG桌游"),
|
||||||
("steam", "Steam游戏"),
|
("steam", "Steam游戏"),
|
||||||
("bangumi", "Bangumi"),
|
("bangumi", "Bangumi"),
|
||||||
("apple_podcast", "苹果播客"),
|
("apple_podcast", "苹果播客"),
|
||||||
|
|
|
@ -84,7 +84,7 @@ class DoubanMusic(AbstractSite):
|
||||||
"genre": genre,
|
"genre": genre,
|
||||||
"release_date": release_date,
|
"release_date": release_date,
|
||||||
"duration": None,
|
"duration": None,
|
||||||
"company": [company],
|
"company": [company] if company else [],
|
||||||
"track_list": track_list,
|
"track_list": track_list,
|
||||||
"brief": brief,
|
"brief": brief,
|
||||||
"cover_image_url": img_url,
|
"cover_image_url": img_url,
|
||||||
|
|
|
@ -151,7 +151,7 @@ class PageLinksGenerator:
|
||||||
# assert self.has_prev is not None and self.has_next is not None
|
# assert self.has_prev is not None and self.has_next is not None
|
||||||
|
|
||||||
|
|
||||||
def GenerateDateUUIDMediaFilePath(instance, filename, path_root):
|
def GenerateDateUUIDMediaFilePath(filename, path_root):
|
||||||
ext = filename.split(".")[-1]
|
ext = filename.split(".")[-1]
|
||||||
filename = "%s.%s" % (uuid.uuid4(), ext)
|
filename = "%s.%s" % (uuid.uuid4(), ext)
|
||||||
root = ""
|
root = ""
|
||||||
|
|
|
@ -33,7 +33,7 @@ def export_marks_task(user):
|
||||||
user.preference.export_status["marks_pending"] = True
|
user.preference.export_status["marks_pending"] = True
|
||||||
user.preference.save(update_fields=["export_status"])
|
user.preference.save(update_fields=["export_status"])
|
||||||
filename = GenerateDateUUIDMediaFilePath(
|
filename = GenerateDateUUIDMediaFilePath(
|
||||||
None, "f.xlsx", settings.MEDIA_ROOT + "/" + settings.EXPORT_FILE_PATH_ROOT
|
"f.xlsx", settings.MEDIA_ROOT + "/" + settings.EXPORT_FILE_PATH_ROOT
|
||||||
)
|
)
|
||||||
if not os.path.exists(os.path.dirname(filename)):
|
if not os.path.exists(os.path.dirname(filename)):
|
||||||
os.makedirs(os.path.dirname(filename))
|
os.makedirs(os.path.dirname(filename))
|
||||||
|
|
|
@ -28,9 +28,7 @@ def _fetch_remote_image(url):
|
||||||
imgdl = ProxiedImageDownloader(url)
|
imgdl = ProxiedImageDownloader(url)
|
||||||
raw_img = imgdl.download().content
|
raw_img = imgdl.download().content
|
||||||
ext = imgdl.extention
|
ext = imgdl.extention
|
||||||
f = GenerateDateUUIDMediaFilePath(
|
f = GenerateDateUUIDMediaFilePath(f"x.{ext}", settings.MARKDOWNX_MEDIA_PATH)
|
||||||
None, f"x.{ext}", settings.MARKDOWNX_MEDIA_PATH
|
|
||||||
)
|
|
||||||
file = settings.MEDIA_ROOT + "/" + f
|
file = settings.MEDIA_ROOT + "/" + f
|
||||||
local_url = settings.MEDIA_URL + f
|
local_url = settings.MEDIA_URL + f
|
||||||
os.makedirs(os.path.dirname(file), exist_ok=True)
|
os.makedirs(os.path.dirname(file), exist_ok=True)
|
||||||
|
@ -96,9 +94,7 @@ class DoubanImporter:
|
||||||
file = (
|
file = (
|
||||||
settings.MEDIA_ROOT
|
settings.MEDIA_ROOT
|
||||||
+ "/"
|
+ "/"
|
||||||
+ GenerateDateUUIDMediaFilePath(
|
+ GenerateDateUUIDMediaFilePath("x.xlsx", settings.SYNC_FILE_PATH_ROOT)
|
||||||
None, "x.xlsx", settings.SYNC_FILE_PATH_ROOT
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
os.makedirs(os.path.dirname(file), exist_ok=True)
|
os.makedirs(os.path.dirname(file), exist_ok=True)
|
||||||
with open(file, "wb") as destination:
|
with open(file, "wb") as destination:
|
||||||
|
|
|
@ -1,14 +1,10 @@
|
||||||
import csv
|
import csv
|
||||||
import re
|
|
||||||
import tempfile
|
import tempfile
|
||||||
import zipfile
|
import zipfile
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from django.utils import timezone
|
import pytz
|
||||||
from django.utils.dateparse import parse_datetime
|
from django.utils.dateparse import parse_datetime
|
||||||
from django.utils.timezone import make_aware
|
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
from user_messages import api as msg
|
|
||||||
|
|
||||||
from catalog.common import *
|
from catalog.common import *
|
||||||
from catalog.common.downloaders import *
|
from catalog.common.downloaders import *
|
||||||
|
@ -16,6 +12,8 @@ from catalog.models import *
|
||||||
from journal.models import *
|
from journal.models import *
|
||||||
from users.models import *
|
from users.models import *
|
||||||
|
|
||||||
|
_tz_sh = pytz.timezone("Asia/Shanghai")
|
||||||
|
|
||||||
|
|
||||||
class LetterboxdImporter(Task):
|
class LetterboxdImporter(Task):
|
||||||
TaskQueue = "import"
|
TaskQueue = "import"
|
||||||
|
@ -30,13 +28,20 @@ class LetterboxdImporter(Task):
|
||||||
"file": None,
|
"file": None,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
proxy = True
|
||||||
|
|
||||||
def get_item_by_url(self, url):
|
def get_item_by_url(self, url):
|
||||||
try:
|
try:
|
||||||
h = BasicDownloader(url).html() # type:ignore
|
h = BasicDownloader(url).download().html()
|
||||||
tt = h.xpath("//body/@data-tmdb-type")[0].strip()
|
if not h.xpath("//body/@data-tmdb-type"):
|
||||||
ti = h.xpath("//body/@data-tmdb-type")[0].strip()
|
i = h.xpath('//span[@class="film-title-wrapper"]/a/@href')
|
||||||
if tt != "movie":
|
u2 = "https://letterboxd.com" + i[0] # type:ignore
|
||||||
logger.error(f"Unknown TMDB type {tt} / {ti}")
|
h = BasicDownloader(u2).download().html()
|
||||||
|
tt = h.xpath("//body/@data-tmdb-type")[0].strip() # type:ignore
|
||||||
|
ti = str(h.xpath("//body/@data-tmdb-id")[0].strip()) # type:ignore
|
||||||
|
if tt != "movie" or not ti:
|
||||||
|
logger.error(f"Unknown TMDB ({tt}/{ti}) for {url}")
|
||||||
return None
|
return None
|
||||||
site = SiteManager.get_site_by_id(IdType.TMDB_Movie, ti)
|
site = SiteManager.get_site_by_id(IdType.TMDB_Movie, ti)
|
||||||
if not site:
|
if not site:
|
||||||
|
@ -50,6 +55,7 @@ class LetterboxdImporter(Task):
|
||||||
item = self.get_item_by_url(url)
|
item = self.get_item_by_url(url)
|
||||||
if not item:
|
if not item:
|
||||||
logger.error(f"Unable to get item for {url}")
|
logger.error(f"Unable to get item for {url}")
|
||||||
|
self.progress(-1)
|
||||||
return
|
return
|
||||||
owner = self.user.identity
|
owner = self.user.identity
|
||||||
mark = Mark(owner, item)
|
mark = Mark(owner, item)
|
||||||
|
@ -61,7 +67,8 @@ class LetterboxdImporter(Task):
|
||||||
and shelf_type == ShelfType.WISHLIST
|
and shelf_type == ShelfType.WISHLIST
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
return
|
self.progress(0)
|
||||||
|
return 0
|
||||||
visibility = self.metadata["visibility"]
|
visibility = self.metadata["visibility"]
|
||||||
shelf_time_offset = {
|
shelf_time_offset = {
|
||||||
ShelfType.WISHLIST: " 20:00:00",
|
ShelfType.WISHLIST: " 20:00:00",
|
||||||
|
@ -69,16 +76,32 @@ class LetterboxdImporter(Task):
|
||||||
ShelfType.COMPLETE: " 22:00:00",
|
ShelfType.COMPLETE: " 22:00:00",
|
||||||
}
|
}
|
||||||
dt = parse_datetime(date + shelf_time_offset[shelf_type])
|
dt = parse_datetime(date + shelf_time_offset[shelf_type])
|
||||||
|
if dt:
|
||||||
|
dt = dt.replace(tzinfo=_tz_sh)
|
||||||
mark.update(
|
mark.update(
|
||||||
shelf_type,
|
shelf_type,
|
||||||
comment_text=review or None,
|
comment_text=review or None,
|
||||||
rating_grade=round(rating * 2) if rating else None,
|
rating_grade=round(float(rating) * 2) if rating else None,
|
||||||
visibility=visibility,
|
visibility=visibility,
|
||||||
created_time=dt,
|
created_time=dt,
|
||||||
)
|
)
|
||||||
if tags:
|
if tags:
|
||||||
tag_titles = [s.strip() for s in tags.split(",")]
|
tag_titles = [s.strip() for s in tags.split(",")]
|
||||||
TagManager.tag_item(item, owner, tag_titles, visibility)
|
TagManager.tag_item(item, owner, tag_titles, visibility)
|
||||||
|
self.progress(1)
|
||||||
|
|
||||||
|
def progress(self, mark_state: int):
|
||||||
|
self.metadata["total"] += 1
|
||||||
|
self.metadata["processed"] += 1
|
||||||
|
match mark_state:
|
||||||
|
case 1:
|
||||||
|
self.metadata["imported"] += 1
|
||||||
|
case 0:
|
||||||
|
self.metadata["skipped"] += 1
|
||||||
|
case _:
|
||||||
|
self.metadata["failed"] += 1
|
||||||
|
self.message = f"{self.metadata['imported']} imported, {self.metadata['skipped']} skipped, {self.metadata['failed']} failed"
|
||||||
|
self.save(update_fields=["metadata", "message"])
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
uris = set()
|
uris = set()
|
||||||
|
@ -95,8 +118,8 @@ class LetterboxdImporter(Task):
|
||||||
row["Letterboxd URI"],
|
row["Letterboxd URI"],
|
||||||
ShelfType.COMPLETE,
|
ShelfType.COMPLETE,
|
||||||
row["Watched Date"],
|
row["Watched Date"],
|
||||||
row["Review"],
|
|
||||||
row["Rating"],
|
row["Rating"],
|
||||||
|
row["Review"],
|
||||||
row["Tags"],
|
row["Tags"],
|
||||||
)
|
)
|
||||||
with open(tmpdirname + "/ratings.csv") as f:
|
with open(tmpdirname + "/ratings.csv") as f:
|
||||||
|
|
24
journal/migrations/0022_letterboxdimporter.py
Normal file
24
journal/migrations/0022_letterboxdimporter.py
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
# Generated by Django 4.2.9 on 2024-01-11 01:47
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("users", "0019_task"),
|
||||||
|
("journal", "0021_pieceinteraction_pieceinteraction_unique_interaction"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="LetterboxdImporter",
|
||||||
|
fields=[],
|
||||||
|
options={
|
||||||
|
"proxy": True,
|
||||||
|
"indexes": [],
|
||||||
|
"constraints": [],
|
||||||
|
},
|
||||||
|
bases=("users.task",),
|
||||||
|
),
|
||||||
|
]
|
|
@ -1,3 +1,5 @@
|
||||||
|
import os
|
||||||
|
|
||||||
import django_rq
|
import django_rq
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib import messages
|
from django.contrib import messages
|
||||||
|
@ -8,14 +10,15 @@ from django.urls import reverse
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from common.config import *
|
from common.config import *
|
||||||
|
from common.utils import GenerateDateUUIDMediaFilePath
|
||||||
from journal.exporters.doufen import export_marks_task
|
from journal.exporters.doufen import export_marks_task
|
||||||
from journal.importers.douban import DoubanImporter
|
from journal.importers.douban import DoubanImporter
|
||||||
from journal.importers.goodreads import GoodreadsImporter
|
from journal.importers.goodreads import GoodreadsImporter
|
||||||
|
from journal.importers.letterboxd import LetterboxdImporter
|
||||||
from journal.importers.opml import OPMLImporter
|
from journal.importers.opml import OPMLImporter
|
||||||
from journal.models import reset_journal_visibility_for_user
|
from journal.models import reset_journal_visibility_for_user
|
||||||
from mastodon.api import *
|
from mastodon.api import *
|
||||||
from social.models import reset_social_visibility_for_user
|
from social.models import reset_social_visibility_for_user
|
||||||
from takahe.models import Identity
|
|
||||||
|
|
||||||
from .account import *
|
from .account import *
|
||||||
from .tasks import *
|
from .tasks import *
|
||||||
|
@ -71,6 +74,7 @@ def data(request):
|
||||||
"allow_any_site": settings.MASTODON_ALLOW_ANY_SITE,
|
"allow_any_site": settings.MASTODON_ALLOW_ANY_SITE,
|
||||||
"import_status": request.user.preference.import_status,
|
"import_status": request.user.preference.import_status,
|
||||||
"export_status": request.user.preference.export_status,
|
"export_status": request.user.preference.export_status,
|
||||||
|
"letterboxd_task": LetterboxdImporter.latest_task(request.user),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -175,6 +179,27 @@ def import_douban(request):
|
||||||
return redirect(reverse("users:data"))
|
return redirect(reverse("users:data"))
|
||||||
|
|
||||||
|
|
||||||
|
@login_required
|
||||||
|
def import_letterboxd(request):
|
||||||
|
if request.method == "POST":
|
||||||
|
f = (
|
||||||
|
settings.MEDIA_ROOT
|
||||||
|
+ "/"
|
||||||
|
+ GenerateDateUUIDMediaFilePath("x.zip", settings.SYNC_FILE_PATH_ROOT)
|
||||||
|
)
|
||||||
|
os.makedirs(os.path.dirname(f), exist_ok=True)
|
||||||
|
with open(f, "wb+") as destination:
|
||||||
|
for chunk in request.FILES["file"].chunks():
|
||||||
|
destination.write(chunk)
|
||||||
|
LetterboxdImporter.enqueue(
|
||||||
|
request.user,
|
||||||
|
visibility=int(request.POST.get("visibility", 0)),
|
||||||
|
file=f,
|
||||||
|
)
|
||||||
|
messages.add_message(request, messages.INFO, _("文件上传成功,等待后台导入。"))
|
||||||
|
return redirect(reverse("users:data"))
|
||||||
|
|
||||||
|
|
||||||
@login_required
|
@login_required
|
||||||
def import_opml(request):
|
def import_opml(request):
|
||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
|
|
|
@ -10,9 +10,7 @@ from .user import User
|
||||||
|
|
||||||
|
|
||||||
def report_image_path(instance, filename):
|
def report_image_path(instance, filename):
|
||||||
return GenerateDateUUIDMediaFilePath(
|
return GenerateDateUUIDMediaFilePath(filename, settings.REPORT_MEDIA_PATH_ROOT)
|
||||||
instance, filename, settings.REPORT_MEDIA_PATH_ROOT
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Report(models.Model):
|
class Report(models.Model):
|
||||||
|
|
|
@ -1,27 +1,9 @@
|
||||||
import hashlib
|
import django_rq
|
||||||
import re
|
|
||||||
from functools import cached_property
|
|
||||||
from operator import index
|
|
||||||
|
|
||||||
from auditlog.context import set_actor
|
from auditlog.context import set_actor
|
||||||
from django.conf import settings
|
|
||||||
from django.contrib.auth.models import AbstractUser
|
|
||||||
from django.core import validators
|
|
||||||
from django.core.exceptions import ValidationError
|
|
||||||
from django.core.serializers.json import DjangoJSONEncoder
|
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.db.models import F, Q, Value
|
|
||||||
from django.db.models.functions import Concat, Lower
|
|
||||||
from django.templatetags.static import static
|
|
||||||
from django.urls import reverse
|
|
||||||
from django.utils import timezone
|
|
||||||
from django.utils.deconstruct import deconstructible
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
from user_messages import api as msg
|
||||||
from management.models import Announcement
|
|
||||||
from mastodon.api import *
|
|
||||||
from takahe.utils import Takahe
|
|
||||||
|
|
||||||
from .user import User
|
from .user import User
|
||||||
|
|
||||||
|
@ -57,6 +39,14 @@ class Task(models.Model):
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.job_id
|
return self.job_id
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def latest_task(cls, user: User):
|
||||||
|
return (
|
||||||
|
cls.objects.filter(user=user, type=cls.TaskType)
|
||||||
|
.order_by("-created_time")
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def enqueue(cls, user: User, **kwargs) -> "Task":
|
def enqueue(cls, user: User, **kwargs) -> "Task":
|
||||||
d = cls.DefaultMetadata.copy()
|
d = cls.DefaultMetadata.copy()
|
||||||
|
@ -80,6 +70,12 @@ class Task(models.Model):
|
||||||
task.message = "Error occured."
|
task.message = "Error occured."
|
||||||
task.state = cls.States.failed
|
task.state = cls.States.failed
|
||||||
task.save(update_fields=["state", "message"])
|
task.save(update_fields=["state", "message"])
|
||||||
|
task = cls.objects.get(pk=task_id)
|
||||||
|
if task.message:
|
||||||
|
if task.state == cls.States.complete:
|
||||||
|
msg.success(task.user, f"[{task.type}] {task.message}")
|
||||||
|
else:
|
||||||
|
msg.error(task.user, f"[{task.type}] {task.message}")
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
raise NotImplemented
|
raise NotImplemented
|
||||||
|
|
|
@ -104,6 +104,54 @@
|
||||||
</form>
|
</form>
|
||||||
</details>
|
</details>
|
||||||
</article>
|
</article>
|
||||||
|
<article>
|
||||||
|
<details>
|
||||||
|
<summary>{% trans '导入Letterboxd标记' %}</summary>
|
||||||
|
<form action="{% url 'users:import_letterboxd' %}"
|
||||||
|
method="post"
|
||||||
|
enctype="multipart/form-data">
|
||||||
|
{% csrf_token %}
|
||||||
|
在Letterboxd网站的<a href="https://letterboxd.com/settings/data/"
|
||||||
|
target="_blank"
|
||||||
|
rel="noopener">Settings页面中选择DATA</a>,或在其app的Settings菜单中选择Advanced Settings,点击 EXPORT YOUR DATA,即可下载导出名称类似<code>letterboxd-username-2018-03-11-07-52-utc.zip</code>的文件,勿需解压。
|
||||||
|
<br>
|
||||||
|
<input type="file" name="file" required accept=".zip">
|
||||||
|
<p>
|
||||||
|
可见性:
|
||||||
|
<br>
|
||||||
|
<label for="l_visibility_0">
|
||||||
|
<input type="radio"
|
||||||
|
name="visibility"
|
||||||
|
value="0"
|
||||||
|
required=""
|
||||||
|
id="l_visibility_0"
|
||||||
|
checked>
|
||||||
|
公开
|
||||||
|
</label>
|
||||||
|
<label for="l_visibility_1">
|
||||||
|
<input type="radio"
|
||||||
|
name="visibility"
|
||||||
|
value="1"
|
||||||
|
required=""
|
||||||
|
id="l_visibility_1">
|
||||||
|
仅关注者
|
||||||
|
</label>
|
||||||
|
<label for="l_visibility_2">
|
||||||
|
<input type="radio"
|
||||||
|
name="visibility"
|
||||||
|
value="2"
|
||||||
|
required=""
|
||||||
|
id="l_visibility_2">
|
||||||
|
仅自己
|
||||||
|
</label>
|
||||||
|
</p>
|
||||||
|
<input type="submit" value="{% trans '导入' %}" />
|
||||||
|
{% if letterboxd_task %}
|
||||||
|
最近导入于{{ letterboxd_task.created_time }},状态:{{ letterboxd_task.get_state_display }}。 {{ letterboxd_task.message }}
|
||||||
|
{% endif %}
|
||||||
|
</form>
|
||||||
|
</details>
|
||||||
|
</article>
|
||||||
<article>
|
<article>
|
||||||
<details>
|
<details>
|
||||||
<summary>{% trans '导入播客订阅列表' %}</summary>
|
<summary>{% trans '导入播客订阅列表' %}</summary>
|
||||||
|
|
|
@ -19,6 +19,7 @@ urlpatterns = [
|
||||||
path("data/import/status", data_import_status, name="import_status"),
|
path("data/import/status", data_import_status, name="import_status"),
|
||||||
path("data/import/goodreads", import_goodreads, name="import_goodreads"),
|
path("data/import/goodreads", import_goodreads, name="import_goodreads"),
|
||||||
path("data/import/douban", import_douban, name="import_douban"),
|
path("data/import/douban", import_douban, name="import_douban"),
|
||||||
|
path("data/import/letterboxd", import_letterboxd, name="import_letterboxd"),
|
||||||
path("data/import/opml", import_opml, name="import_opml"),
|
path("data/import/opml", import_opml, name="import_opml"),
|
||||||
path("data/export/reviews", export_reviews, name="export_reviews"),
|
path("data/export/reviews", export_reviews, name="export_reviews"),
|
||||||
path("data/export/marks", export_marks, name="export_marks"),
|
path("data/export/marks", export_marks, name="export_marks"),
|
||||||
|
|
Loading…
Add table
Reference in a new issue