soft merge without removing dupes

This commit is contained in:
Your Name 2023-06-06 22:20:50 -04:00 committed by Henri Dickson
parent d155523d83
commit 31bd89d565
2 changed files with 15 additions and 7 deletions

View file

@ -231,9 +231,12 @@ class DoubanMovie(AbstractSite):
and len(res_data["tv_episode_results"]) > 0
)
if pd.metadata["preferred_model"] == "TVSeason" and has_tv:
if pd.metadata.get("season") and pd.metadata.get("season") != 1:
if (
pd.metadata.get("season_number")
and pd.metadata.get("season_number") != 1
):
_logger.warn(f"{imdb_code} matched imdb tv show, force season 1")
pd.metadata["season"] = 1
pd.metadata["season_number"] = 1
elif pd.metadata["preferred_model"] == "TVSeason" and has_episode:
if res_data["tv_episode_results"][0]["episode_number"] != 1:
_logger.warning(

View file

@ -1264,7 +1264,7 @@ def remove_data_by_user(user: User):
FeaturedCollection.objects.filter(owner=user).delete()
def update_journal_for_merged_item(legacy_item_uuid):
def update_journal_for_merged_item(legacy_item_uuid, delete_duplicated=False):
legacy_item = Item.get_by_url(legacy_item_uuid)
if not legacy_item:
_logger.error("update_journal_for_merged_item: unable to find item")
@ -1276,10 +1276,15 @@ def update_journal_for_merged_item(legacy_item_uuid):
p.item = new_item
p.save(update_fields=["item_id"])
except:
_logger.warn(
f"deleted piece {p} when merging {cls.__name__}: {legacy_item} -> {new_item}"
)
p.delete()
if delete_duplicated:
_logger.warn(
f"deleted piece {p} when merging {cls.__name__}: {legacy_item} -> {new_item}"
)
p.delete()
else:
_logger.warn(
f"skip piece {p} when merging {cls.__name__}: {legacy_item} -> {new_item}"
)
def journal_exists_for_item(item):