use tmdb_tvseason as main key for tvseason clas

This commit is contained in:
Your Name 2023-01-07 00:35:30 -05:00
parent 57ae96873e
commit 4447de4943
19 changed files with 315 additions and 113 deletions

View file

@ -1,5 +1,6 @@
from polymorphic.models import PolymorphicModel
from django.db import models
import logging
from catalog.common import jsondata
from django.utils.translation import gettext_lazy as _
from django.utils import timezone
@ -13,6 +14,8 @@ from .mixins import SoftDeleteMixin
from django.conf import settings
from users.models import User
_logger = logging.getLogger(__name__)
class SiteName(models.TextChoices):
Douban = "douban", _("豆瓣")
@ -241,23 +244,31 @@ class Item(SoftDeleteMixin, PolymorphicModel):
IdType.ISRC,
IdType.MusicBrainz,
IdType.Feed,
IdType.IMDB,
IdType.TMDB_TVSeason,
IdType.IMDB,
]
for t in best_id_types:
if lookup_ids.get(t):
return t, lookup_ids[t]
return list(lookup_ids.items())[0]
def merge(self, to_item):
def merge_to(self, to_item):
if to_item is None:
raise (ValueError("cannot merge to an empty item"))
raise ValueError("cannot merge to an empty item")
elif to_item.merged_to_item is not None:
raise (ValueError("cannot merge with an item aleady merged"))
elif to_item.__class__ != self.__class__:
raise (ValueError("cannot merge with an item in different class"))
else:
raise ValueError("cannot merge with an item aleady merged")
if to_item.__class__ != self.__class__:
_logger.warn(f"merging item across class from {self} to {to_item}")
self.merged_to_item = to_item
self.save()
for res in self.external_resources.all():
res.item = to_item
res.save()
def switch_class_to(self, cls):
_logger.warn(f"switch item across class from {self} to {cls}")
# TODO
pass
@property
def uuid(self):
@ -379,7 +390,11 @@ class ExternalResource(models.Model):
unique_together = [["id_type", "id_value"]]
def __str__(self):
return f"{self.id}:{self.id_type}:{self.id_value if self.id_value else ''} ({self.url})"
return f"{self.pk}:{self.id_type}:{self.id_value if self.id_value else ''} ({self.url})"
def get_site(self):
"""place holder only, this will be injected from SiteManager"""
pass
@property
def site_name(self):
@ -408,7 +423,7 @@ class ExternalResource(models.Model):
d = {k: v for k, v in d.items() if bool(v)}
return d
def get_preferred_model(self):
def get_preferred_model(self) -> type[Item] | None:
model = self.metadata.get("preferred_model")
if model:
m = ContentType.objects.filter(

View file

@ -12,6 +12,7 @@ from .models import ExternalResource
from dataclasses import dataclass, field
import logging
import json
import django_rq
_logger = logging.getLogger(__name__)
@ -21,8 +22,8 @@ _logger = logging.getLogger(__name__)
class ResourceContent:
lookup_ids: dict = field(default_factory=dict)
metadata: dict = field(default_factory=dict)
cover_image: bytes = None
cover_image_extention: str = None
cover_image: bytes | None = None
cover_image_extention: str | None = None
def dict(self):
return {"metadata": self.metadata, "lookup_ids": self.lookup_ids}
@ -122,7 +123,7 @@ class AbstractSite:
auto_link=True,
preloaded_content=None,
ignore_existing_content=False,
) -> ExternalResource:
) -> ExternalResource | None:
"""
Returns an ExternalResource in scraped state if possible
@ -158,7 +159,7 @@ class AbstractSite:
if not p.ready:
_logger.error(f"unable to get resource {self.url} ready")
return None
if auto_create and p.item is None:
if auto_create: # and p.item is None:
self.get_item()
if auto_save:
p.save()
@ -175,6 +176,7 @@ class AbstractSite:
)
else:
_logger.error(f'unable to get site for {linked_resource["url"]}')
django_rq.get_queue("crawl").enqueue(crawl_related_resources_task, p.pk)
p.item.update_linked_items_from_external_resource(p)
p.item.save()
return p
@ -196,7 +198,7 @@ class SiteManager:
return SiteManager.registry[typ]() if typ in SiteManager.registry else None
@staticmethod
def get_site_by_url(url: str) -> AbstractSite:
def get_site_by_url(url: str) -> AbstractSite | None:
if not url:
return None
cls = next(
@ -229,4 +231,21 @@ class SiteManager:
ExternalResource.get_site = lambda resource: SiteManager.get_site_by_id_type(
resource.id_type
)
# ExternalResource.get_site = SiteManager.get_site_by_resource
def crawl_related_resources_task(resource_pk):
resource = ExternalResource.objects.get(pk=resource_pk)
links = resource.related_resources
for w in links:
try:
item = None
site = SiteManager.get_site_by_url(w["url"])
if site:
site.get_resource_ready(ignore_existing_content=False, auto_link=True)
item = site.get_item()
if item:
_logger.info(f"crawled {w['url']} {item}")
else:
_logger.warn(f"crawl {w['url']} failed")
except Exception as e:
_logger.warn(f"crawl {w['url']} error {e}")

View file

@ -9,7 +9,9 @@ from .performance.models import Performance
from .collection.models import Collection as CatalogCollection
from django.contrib.contenttypes.models import ContentType
from django.conf import settings
import logging
_logger = logging.getLogger(__name__)
if settings.SEARCH_BACKEND == "MEILISEARCH":
from .search.meilisearch import Indexer
@ -71,6 +73,9 @@ def all_categories():
def init_catalog_search_models():
if settings.DISABLE_MODEL_SIGNAL:
_logger.warn(
"Catalog models are not being indexed with DISABLE_MODEL_SIGNAL configuration"
)
return
Indexer.update_model_indexable(Edition)
Indexer.update_model_indexable(Work)

View file

@ -194,6 +194,8 @@ class Indexer:
@classmethod
def replace_item(cls, obj):
if obj.is_deleted or obj.merged_to_item_id:
return cls.delete_item(obj)
try:
cls.instance().collections[INDEX_NAME].documents.upsert(
cls.obj_to_dict(obj), {"dirty_values": "coerce_or_drop"}
@ -212,7 +214,7 @@ class Indexer:
@classmethod
def delete_item(cls, obj):
pk = f"{obj.__class__.__name__}-{obj.id}"
pk = obj.uuid
try:
cls.instance().collections[INDEX_NAME].documents[pk].delete()
except Exception as e:
@ -259,6 +261,5 @@ class Indexer:
try:
return Item.get_by_url(item["id"])
except Exception as e:
print(e)
logger.error(f"unable to load search result item from db:\n{item}")
logger.error(f"unable to load search result item from db:{item}\n{e}")
return None

View file

@ -107,7 +107,7 @@ def search(request):
if request.user.is_authenticated and keywords.find("://") > 0:
site = SiteManager.get_site_by_url(keywords)
if site:
return fetch(request, keywords, site)
return fetch(request, keywords, False, site)
if settings.SEARCH_BACKEND is None:
# return limited results if no SEARCH_BACKEND
result = {
@ -191,5 +191,6 @@ def fetch_task(url, is_refetch):
if item:
_logger.info(f"fetched {url} {item.url} {item}")
item_url = item.url
finally:
except Exception as e:
_logger.info(f"fetch error {e}")
return item_url

View file

@ -5,7 +5,7 @@ from catalog.tv.models import *
import logging
from django.db import models
from django.utils.translation import gettext_lazy as _
from .tmdb import TMDB_TV, search_tmdb_by_imdb_id, query_tmdb_tv_episode
from .tmdb import TMDB_TV, TMDB_TVSeason, search_tmdb_by_imdb_id, query_tmdb_tv_episode
_logger = logging.getLogger(__name__)
@ -23,7 +23,7 @@ class DoubanMovie(AbstractSite):
# no DEFAULT_MODEL as it may be either TV Season and Movie
@classmethod
def id_to_url(self, id_value):
def id_to_url(cls, id_value):
return "https://movie.douban.com/subject/" + id_value + "/"
def scrape(self):
@ -218,51 +218,77 @@ class DoubanMovie(AbstractSite):
pd.metadata["preferred_model"] = (
("TVSeason" if season else "TVShow") if is_series else "Movie"
)
tmdb_season_id = None
if imdb_code:
res_data = search_tmdb_by_imdb_id(imdb_code)
tmdb_show_id = None
if "movie_results" in res_data and len(res_data["movie_results"]) > 0:
pd.metadata["preferred_model"] = "Movie"
elif "tv_results" in res_data and len(res_data["tv_results"]) > 0:
if pd.metadata["preferred_model"] == "TVSeason":
"""
determine if this Douban Movie item should map to
a single season tv show, or
first season of multi-season show
"""
tmdb_show_id = res_data["tv_results"][0]["id"]
tmdb_season_id = f"{tmdb_show_id}-1"
site = TMDB_TVSeason(TMDB_TVSeason.id_to_url(tmdb_season_id))
tmdb_tvseason = site.get_resource_ready().item
tmdb_tv = tmdb_tvseason.show
if tmdb_tv.season_count == 1:
pd.metadata["preferred_model"] = "TVShow"
# else:
# pd.metadata["preferred_model"] = "TVSeason"
# resp = query_tmdb_tv_episode(tmdb_show_id, 1, 1)
# imdb_code = resp["external_ids"]["imdb_id"]
# _logger.warning(
# f"Douban Movie {self.url} re-mapped to imdb episode {imdb_code}"
# )
elif (
"tv_season_results" in res_data
and len(res_data["tv_season_results"]) > 0
):
pd.metadata["preferred_model"] = "TVSeason"
tmdb_show_id = res_data["tv_season_results"][0]["show_id"]
tmdb_season_id = f"{tmdb_show_id}-{season}"
elif (
"tv_episode_results" in res_data
and len(res_data["tv_episode_results"]) > 0
):
pd.metadata["preferred_model"] = "TVSeason"
tmdb_show_id = res_data["tv_episode_results"][0]["show_id"]
if res_data["tv_episode_results"][0]["episode_number"] != 1:
_logger.warning(
f"Douban Movie {self.url} mapping to unexpected imdb episode {imdb_code}"
)
resp = query_tmdb_tv_episode(
tmdb_show_id,
res_data["tv_episode_results"][0]["season_number"],
1,
)
imdb_code = resp["external_ids"]["imdb_id"]
_logger.warning(
f"Douban Movie {self.url} re-mapped to imdb episode {imdb_code}"
)
tmdb_season_id = f"{tmdb_show_id}-{season}"
# if res_data["tv_episode_results"][0]["episode_number"] != 1:
# _logger.warning(
# f"Douban Movie {self.url} mapping to unexpected imdb episode {imdb_code}"
# )
# resp = query_tmdb_tv_episode(
# tmdb_show_id,
# res_data["tv_episode_results"][0]["season_number"],
# 1,
# )
# imdb_code = resp["external_ids"]["imdb_id"]
# _logger.warning(
# f"Douban Movie {self.url} re-mapped to imdb episode {imdb_code}"
# )
pd.lookup_ids[IdType.IMDB] = imdb_code
if tmdb_show_id:
pd.metadata["required_resources"] = [
{
"model": "TVShow",
"id_type": IdType.TMDB_TV,
"id_value": tmdb_show_id,
"title": title,
"url": TMDB_TV.id_to_url(tmdb_show_id),
}
]
if pd.metadata["preferred_model"] == "TVSeason":
pd.lookup_ids[IdType.TMDB_TVSeason] = tmdb_season_id
elif pd.metadata["preferred_model"] == "TVShow":
pd.lookup_ids[IdType.TMDB_TV] = tmdb_show_id
# if tmdb_show_id:
# pd.metadata["required_resources"] = [
# {
# "model": "TVShow",
# "id_type": IdType.TMDB_TV,
# "id_value": tmdb_show_id,
# "title": title,
# "url": TMDB_TV.id_to_url(tmdb_show_id),
# }
# ]
# TODO parse sister seasons
# pd.metadata['related_resources'] = []
if pd.metadata["cover_image_url"]:

View file

@ -58,11 +58,9 @@
</div>
<div class="grid__aside">
<div class="aside-section-wrapper">
</div>
</div>
{% include "search_sidebar.html" %}
</div>
</section>
</div>

View file

@ -103,45 +103,7 @@
</div>
<div class="grid__aside">
<div class="aside-section-wrapper">
<div class="add-entity-entries">
<div class="add-entity-entries__entry">
<div class="add-entity-entries__label">
{% trans '没有想要的结果?' %}
</div>
<p>
如果在
{% for site in sites %}
{{ site }}
{% if not forloop.last %}/{% endif %}
{% endfor %}
找到了条目,可以在搜索栏中输入完整链接提交。
</p>
<p>
当然也可以手工创建条目。
</p>
<a href="{% url 'catalog:create' 'Edition' %}">
<button class="add-entity-entries__button">{% trans '添加书' %}</button>
</a>
<a href="{% url 'catalog:create' 'Movie' %}">
<button class="add-entity-entries__button">{% trans '添加电影' %}</button>
</a>
<a href="{% url 'catalog:create' 'TVShow' %}">
<button class="add-entity-entries__button">{% trans '添加剧集' %}</button>
</a>
<a href="{% url 'catalog:create' 'Album' %}">
<button class="add-entity-entries__button">{% trans '添加专辑' %}</button>
</a>
<a href="{% url 'catalog:create' 'Game' %}">
<button class="add-entity-entries__button">{% trans '添加游戏' %}</button>
</a>
</div>
</div>
</div>
</div>
{% include "search_sidebar.html" %}
</div>
</section>
</div>

View file

@ -0,0 +1,43 @@
{% load static %}
{% load i18n %}
{% load l10n %}
<div class="grid__aside">
<div class="aside-section-wrapper">
<div class="add-entity-entries">
<div class="add-entity-entries__entry">
<div class="add-entity-entries__label">
{% trans '没有想要的结果?' %}
</div>
<p>
如果在
{% for site in sites %}
{{ site }}
{% if not forloop.last %}/{% endif %}
{% endfor %}
找到了条目,可以在搜索栏中输入完整链接提交。
</p>
<p>
当然也可以手工创建条目。
</p>
<a href="{% url 'catalog:create' 'Edition' %}">
<button class="add-entity-entries__button">{% trans '添加书' %}</button>
</a>
<a href="{% url 'catalog:create' 'Movie' %}">
<button class="add-entity-entries__button">{% trans '添加电影' %}</button>
</a>
<a href="{% url 'catalog:create' 'TVShow' %}">
<button class="add-entity-entries__button">{% trans '添加剧集' %}</button>
</a>
<a href="{% url 'catalog:create' 'Album' %}">
<button class="add-entity-entries__button">{% trans '添加专辑' %}</button>
</a>
<a href="{% url 'catalog:create' 'Game' %}">
<button class="add-entity-entries__button">{% trans '添加游戏' %}</button>
</a>
</div>
</div>
</div>
</div>

View file

@ -13,7 +13,7 @@
{% block title %}
<h5 class="entity-detail__title">
{% if item.season_number %}
{{ item.title }} {% trans '第' %}{{ item.season_number|apnumber }}{% trans '季' %} {{ item.orig_title }} Season {{ item.season_number }}
{{ item.title }} {{ item.orig_title }} Season {{ item.season_number }}
<span class="entity-detail__title entity-detail__title--secondary">
{% if item.year %}({{ item.year }}){% endif %}
</span>
@ -125,10 +125,24 @@
</div>
<div class="entity-detail__fields">
<div>{% if item.duration %}{% trans '片长:' %}{{ item.duration }}{% endif %}</div>
<div>{% if item.season_count %}{% trans '季数:' %}{{ item.season_count }}{% endif %}</div>
<div>{% if item.episode_count %}{% trans '集数:' %}{{ item.episode_count }}{% endif %}</div>
<div>{% if item.season_number %}{% trans '本季序号:' %}{{ item.season_number }}{% endif %}</div>
<div>{% if item.episode_count %}{% trans '本季集数:' %}{{ item.episode_count }}{% endif %}</div>
<div> </div>
<div>{% if item.show %}{% trans '所属剧集:' %}<a href="{{ item.show.url }}">{{ item.show.title }}{% endif %}</a></div>
<div>{% if item.season_count %}{% trans '总季数:' %}{{ item.season_count }}{% endif %}</div>
<div>{% if item.single_episode_length %}{% trans '单集长度:' %}{{ item.single_episode_length }}{% endif %}</div>
{% with item.all_seasons as seasons %}
{% if seasons %}
<div>
{% trans '本剧所有季:' %}
{% for s in seasons %}
<span>
<a href="{{ s.url }}">{{ s.season_number }}</a>
</span>
{% endfor %}
</div>
{% endif %}
{% endwith %}
<div>{% if item.showtime %}{% trans '上映时间:' %}
{% for showtime in item.showtime %}
@ -160,7 +174,7 @@
{% endif %}
<div>
<a href="{% url 'catalog:edit' item.url_path item.uuid %}">{% trans '编辑这部剧集' %}</a>
<a href="{% url 'catalog:edit' item.url_path item.uuid %}">{% trans '编辑' %}{{ item.demonstrative }}</a>
{% if user.is_staff %}
/<a href="{% url 'catalog:delete' item.url_path item.uuid %}"> {% trans '删除' %}</a>
{% endif %}

View file

@ -125,12 +125,24 @@
</div>
<div class="entity-detail__fields">
<div>{% if item.duration %}{% trans '片长:' %}{{ item.duration }}{% endif %}</div>
<div>{% if item.season_count %}{% trans '季数:' %}{{ item.season_count }}{% endif %}</div>
<div>{% if item.episode_count %}{% trans '集数:' %}{{ item.episode_count }}{% endif %}</div>
<div>{% if item.single_episode_length %}{% trans '单集长度:' %}{{ item.single_episode_length }}{% endif %}</div>
<div>{% if item.showtime %}{% trans '上映时间:' %}
{% with item.all_seasons as seasons %}
{% if seasons %}
<div>
{% trans '本剧所有季:' %}
{% for s in seasons %}
<span>
<a href="{{ s.url }}">{{ s.season_number }}</a>
</span>
{% endfor %}
</div>
{% endif %}
{% endwith %}
<div>{% if item.showtime %}{% trans '播出时间:' %}
{% for showtime in item.showtime %}
{% for time, region in showtime.items %}
<span>{{ time }}{% if region != '' %}({{ region }}){% endif %}</span>

View file

@ -24,9 +24,11 @@ tv specials are are shown as movies
For now, we follow Douban convention, but keep an eye on it in case it breaks its own rules...
"""
from simple_history.models import cached_property
from catalog.common import *
from django.db import models
from django.utils.translation import gettext_lazy as _
import re
class TVShow(Item):
@ -150,6 +152,10 @@ class TVShow(Item):
]
return [(i.value, i.label) for i in id_types]
@cached_property
def all_seasons(self):
return self.seasons.all().order_by("season_number")
class TVSeason(Item):
category = ItemCategory.TV
@ -269,6 +275,15 @@ class TVSeason(Item):
]
return [(i.value, i.label) for i in id_types]
def is_partial_title(self):
return re.match("^(第.+季|特别篇)$", self.title) is not None
def get_full_title(self):
if self.is_partial_title() and self.show:
return f"{self.show.title} {self.title}"
else:
return self.title
def update_linked_items_from_external_resource(self, resource):
"""add Work from resource.metadata['work'] if not yet"""
links = resource.required_resources + resource.related_resources
@ -277,8 +292,12 @@ class TVSeason(Item):
p = ExternalResource.objects.filter(
id_type=w["id_type"], id_value=w["id_value"]
).first()
if p and p.item and self.show != p.item:
if p and p.item and w in resource.required_resources:
self.show = p.item
self.title = self.get_full_title()
def all_seasons(self):
return self.show.all_seasons if self.show else []
class TVEpisode(Item):

View file

@ -44,6 +44,13 @@ urlpatterns = [
delete,
name="delete",
),
re_path(
r"^(?P<item_path>"
+ _get_all_url_paths()
+ ")/(?P<item_uuid>[A-Za-z0-9]{21,22})/merge$",
merge,
name="merge",
),
re_path(
r"^(?P<item_path>"
+ _get_all_url_paths()

View file

@ -25,7 +25,11 @@ from .models import *
from django.conf import settings
from django.utils.baseconv import base62
from journal.models import Mark, ShelfMember, Review
from journal.models import query_visible, query_following
from journal.models import (
query_visible,
query_following,
update_journal_for_merged_item,
)
from common.utils import PageLinksGenerator
from common.config import PAGE_LINK_NUMBER
from journal.models import ShelfTypeNames
@ -169,7 +173,26 @@ def edit(request, item_path, item_uuid):
@login_required
def delete(request, item_path, item_uuid):
if request.method != "POST":
return HttpResponseBadRequest()
if not request.user.is_staff:
raise PermissionDenied()
return HttpResponseBadRequest()
@login_required
def merge(request, item_path, item_uuid):
if request.method != "POST":
return HttpResponseBadRequest()
if not request.user.is_staff:
raise PermissionDenied()
item = get_object_or_404(Item, uid=base62.decode(item_uuid))
new_item = Item.get_by_url(request.POST.get("new_item_url"))
if not new_item or new_item.is_deleted or new_item.merged_to_item_id:
return HttpResponseBadRequest(b"invalid new item")
item.merge_to(new_item)
update_journal_for_merged_item(item_uuid)
return redirect(new_item.url)
@login_required

View file

@ -17,6 +17,7 @@
<option value="all" {% if request.GET.c and request.GET.c == 'all' or not request.GET.c %}selected{% endif %}>{% trans '任意' %}</option>
<option value="book" {% if request.GET.c and request.GET.c == 'book' or '/books/' in request.path %}selected{% endif %}>{% trans '书籍' %}</option>
<option value="movie" {% if request.GET.c and request.GET.c == 'movie' or '/movies/' in request.path %}selected{% endif %}>{% trans '电影' %}</option>
<option value="tv" {% if request.GET.c and request.GET.c == 'tv' or '/tv/' in request.path %}selected{% endif %}>{% trans '剧集' %}</option>
<option value="music" {% if request.GET.c and request.GET.c == 'music' or '/music/' in request.path %}selected{% endif %}>{% trans '音乐' %}</option>
<option value="game" {% if request.GET.c and request.GET.c == 'game' or '/games/' in request.path %}selected{% endif %}>{% trans '游戏' %}</option>
</select>

View file

@ -4,7 +4,6 @@ from users.models import User
from catalog.common.models import Item, ItemCategory
from .mixins import UserOwnedObjectMixin
from catalog.collection.models import Collection as CatalogCollection
from enum import Enum
from markdownx.models import MarkdownxField
from django.utils import timezone
from django.conf import settings
@ -15,7 +14,6 @@ from functools import cached_property
from django.db.models import Count, Avg
from django.contrib.contenttypes.models import ContentType
import django.dispatch
import math
import uuid
import re
from catalog.common.utils import DEFAULT_ITEM_COVER, item_cover_path
@ -27,6 +25,8 @@ from django.contrib.contenttypes.models import ContentType
from markdown import markdown
from catalog.common import jsondata
_logger = logging.getLogger(__name__)
class VisibilityType(models.IntegerChoices):
Public = 0, _("公开")
@ -248,6 +248,9 @@ class Review(Content):
class Rating(Content):
class Meta:
unique_together = [["owner", "item"]]
grade = models.PositiveSmallIntegerField(
default=0, validators=[MaxValueValidator(10), MinValueValidator(1)], null=True
)
@ -932,3 +935,20 @@ def remove_data_by_user(user: User):
Comment.objects.filter(owner=user).delete()
Rating.objects.filter(owner=user).delete()
Review.objects.filter(owner=user).delete()
def update_journal_for_merged_item(legacy_item_uuid):
legacy_item = Item.get_by_url(legacy_item_uuid)
if not legacy_item:
_logger.error("update_journal_for_merged_item: unable to find item")
return
new_item = legacy_item.merged_to_item
for cls in Content.__subclasses__ + ListMember.__subclasses__:
_logger.info(f"update {cls.__name__}: {legacy_item} -> {new_item}")
for p in cls.objects.filter(item=legacy_item):
try:
p.item = new_item
p.save(update_fields=["item_id"])
except:
_logger.info(f"delete duplicated piece {p}")
p.delete()

View file

@ -50,7 +50,7 @@ def _book_convert(entity):
if t:
content.lookup_ids[t] = v
if entity.other_info and entity.other_info.get("统一书号"):
content.lookup_ids[IdType.CUBN] = entity.other_info.get("统一书号")
content.lookup_ids[IdType.CUBN] = entity.other_info.get("统一书号").strip()
return content
@ -77,11 +77,12 @@ def _album_convert(entity):
else None,
}
)
if entity.other_info and entity.other_info.get("ISRC"):
if entity.other_info:
if entity.other_info.get("ISRC"):
content.lookup_ids[IdType.ISRC] = entity.other_info.get("ISRC")
if entity.other_info and entity.other_info.get("条形码"):
if entity.other_info.get("条形码") and entity.other_info.get("条形码") != "none":
content.lookup_ids[IdType.GTIN] = entity.other_info.get("条形码")
if entity.other_info and entity.other_info.get("UPC"):
if entity.other_info.get("UPC") and entity.other_info.get("UPC") != "none":
content.lookup_ids[IdType.GTIN] = entity.other_info.get("UPC")
return content

View file

@ -0,0 +1,31 @@
from catalog.common import *
from catalog.models import *
from catalog.sites import *
from django.core.management.base import BaseCommand
from django.core.paginator import Paginator
import pprint
from tqdm import tqdm
import logging
_logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Refetch TMDB TV Shows"
def add_arguments(self, parser):
parser.add_argument("--minid", help="min id to start")
def handle(self, *args, **options):
qs = ExternalResource.objects.all().filter(id_type="tmdb_tv").order_by("id")
if options["minid"]:
qs = qs.filter(id__gte=int(options["minid"]))
for res in tqdm(qs):
if res:
try:
site = SiteManager.get_site_by_url(res.url)
site.get_resource_ready(ignore_existing_content=True)
_logger.info(f"fetch {res.url} success {site.get_item().title}")
except Exception as e:
_logger.error(f"fetch {res.url} error {e}")

View file

@ -91,7 +91,11 @@ class DataSignalManager:
@staticmethod
def add_handler_for_model(model):
if not settings.DISABLE_MODEL_SIGNAL:
if settings.DISABLE_MODEL_SIGNAL:
_logger.warn(
f"{model.__name__} are not being indexed with DISABLE_MODEL_SIGNAL configuration"
)
return
post_save.connect(DataSignalManager.save_handler, sender=model)
pre_delete.connect(DataSignalManager.delete_handler, sender=model)