From de34bb0c706734f31c70b0679ddbfd33520cc440 Mon Sep 17 00:00:00 2001 From: Jigsaw Date: Fri, 24 Jan 2025 05:36:03 +0000 Subject: [PATCH 1/7] WIP: Series pt1 --- catalog/book/models.py | 101 +++++++++++++++++++++++++++++++--- catalog/common/models.py | 1 + catalog/sites/bangumi.py | 34 ++++++++---- catalog/sites/goodreads.py | 53 +++++++++++++++++- catalog/templates/series.html | 27 +++++++++ 5 files changed, 196 insertions(+), 20 deletions(-) create mode 100644 catalog/templates/series.html diff --git a/catalog/book/models.py b/catalog/book/models.py index c204e626d..893842b11 100644 --- a/catalog/book/models.py +++ b/catalog/book/models.py @@ -20,6 +20,8 @@ from functools import cached_property from typing import TYPE_CHECKING +from auditlog.models import QuerySet + from django.core.validators import MaxValueValidator, MinValueValidator from django.db import models from django.utils.translation import gettext_lazy as _ @@ -109,7 +111,6 @@ class EditionSchema(EditionInSchema, BaseSchema): class Edition(Item): if TYPE_CHECKING: works: "models.ManyToManyField[Work, Edition]" - class BookFormat(models.TextChoices): PAPERBACK = "paperback", _("Paperback") HARDCOVER = "hardcover", _("Hardcover") @@ -294,6 +295,35 @@ def update_linked_items_from_external_resource(self, resource): ).first() if work and work not in self.works.all(): self.works.add(work) + if w.get("model") == "Series": + series_res = ExternalResource.objects.filter( + id_type=w["id_type"], id_value=w["id_value"] + ).first() + if series_res: + series = series_res.item + if not series: + logger.warning(f"Unable to find series for {series_res}") + else: + logger.warning( + f"Unable to find resource for {w['id_type']}:{w['id_value']}" + ) + series = Series.objects.filter( + primary_lookup_id_type=w["id_type"], + primary_lookup_id_value=w["id_value"], + ).first() + if series: + if self.works.all().exists(): + logger.debug("WORK IS EXIST") + for work in self.works.all(): + if series in work.series.all(): + continue + work.series.add(series) + else: + logger.debug("WORK NOT EXIST") + work = Work.objects.create(localized_title=self.localized_title) + work.editions.add(self) + work.series.add(series) + work.save() def merge_data_from_external_resource( self, p: "ExternalResource", ignore_existing_content: bool = False @@ -371,9 +401,9 @@ def unlink_from_all_works(self): def has_works(self): return self.works.all().exists() - - class Work(Item): + if TYPE_CHECKING: + series: "models.ManyToManyField[Series, Work]" category = ItemCategory.Book url_path = "book/work" douban_work = PrimaryLookupIdDescriptor(IdType.DoubanBook_Work) @@ -460,12 +490,67 @@ def update_linked_items_from_external_resource(self, resource): if edition and edition not in self.editions.all(): self.editions.add(edition) - class Series(Item): + works = models.ManyToManyField(Work, related_name="series") category = ItemCategory.Book url_path = "book/series" - # douban_serie = LookupIdDescriptor(IdType.DoubanBook_Serie) - # goodreads_serie = LookupIdDescriptor(IdType.Goodreads_Serie) + METADATA_COPY_LIST = [ + "localized_title", + "localized_description", + ] + goodreads_serie = PrimaryLookupIdDescriptor(IdType.Goodreads_Series) + + @classmethod + def lookup_id_type_choices(cls): + id_types = [ + IdType.Goodreads_Series, + ] + return [(i.value, i.label) for i in id_types] + + @cached_property + def all_works(self): + return ( + self.works.all() + .filter(is_deleted=False, merged_to_item=None) + ) - class Meta: - proxy = True + @property + def cover_image_url(self): + url = super().cover_image_url + if url: + return url + e = next(filter(lambda e: e.cover_image_url, self.works.all()), None) + return e.cover_image_url if e else None + + def update_linked_items_from_external_resource(self, resource): + """add Work from resource.metadata['required_resources'] if not yet""" + links = resource.required_resources + resource.related_resources + for e in links: + if e.get("model") == "Edition": + edition_res = ExternalResource.objects.filter( + id_type=e["id_type"], id_value=e["id_value"] + ).first() + if edition_res: + edition = edition_res.item + if not edition: + logger.warning(f"Unable to find edition for {edition_res}") + else: + logger.warning( + f"Unable to find resource for {e['id_type']}:{e['id_value']}" + ) + edition = Edition.objects.filter( + primary_lookup_id_type=e["id_type"], + primary_lookup_id_value=e["id_value"], + ).first() + if not edition: + return + if edition.works.all().exists(): + for work in edition.works.all(): + if work not in self.works.all(): + self.works.add(work) + self.works.add(work) + else: + work = Work.objects.create(localized_title=edition.localized_title) + work.editions.add(edition) + work.save() + self.works.add(work) diff --git a/catalog/common/models.py b/catalog/common/models.py index 981800341..67348d738 100644 --- a/catalog/common/models.py +++ b/catalog/common/models.py @@ -74,6 +74,7 @@ class IdType(models.TextChoices): TMDB_Movie = "tmdb_movie", _("TMDB Movie") # type:ignore[reportCallIssue] Goodreads = "goodreads", _("Goodreads") # type:ignore[reportCallIssue] Goodreads_Work = "goodreads_work", _("Goodreads Work") # type:ignore[reportCallIssue] + Goodreads_Series = "goodreads_series", _("Goodreads Series") # type:ignore[reportCallIssue] GoogleBooks = "googlebooks", _("Google Books") # type:ignore[reportCallIssue] DoubanBook = "doubanbook", _("Douban Book") # type:ignore[reportCallIssue] DoubanBook_Work = "doubanbook_work", _("Douban Book Work") # type:ignore[reportCallIssue] diff --git a/catalog/sites/bangumi.py b/catalog/sites/bangumi.py index 648bdbbe6..556a19462 100644 --- a/catalog/sites/bangumi.py +++ b/catalog/sites/bangumi.py @@ -38,13 +38,13 @@ def get_category( release_type = None showtime = None year = None + required_resources = [] related_resources = [] match o["type"]: case 1: - model = "Edition" category = ItemCategory.Book - if o["series"] and fetch_resources: - # model = "Series" TODO + model = "Series" if o["series"] else "Edition" + if fetch_resources: res = ( BasicDownloader( f"https://api.bgm.tv/v0/subjects/{o['id']}/subjects", @@ -57,13 +57,26 @@ def get_category( ) for s in res: - if s["relation"] != "单行本": - continue - related_resources.append( - { - "url": cls.id_to_url(s["id"]), - } - ) + match s["relation"]: + case "系列": + required_resources.append( + { + "model": "Series", + "id_type": IdType.Bangumi, + "id_value": s["id"], + "url": cls.id_to_url(s["id"]), + } + ) + case "单行本": + related_resources.append( + { + "model": "Edition", + "id_type": IdType.Bangumi, + "id_value": s["id"], + "url": cls.id_to_url(s["id"]), + } + ) + if dt: d = dt.split("-") pub_year = d[0] @@ -110,6 +123,7 @@ def get_category( ) return category, { "preferred_model": model, + "required_resources": required_resources, "related_resources": related_resources, "pub_year": pub_year, "pub_month": pub_month, diff --git a/catalog/sites/goodreads.py b/catalog/sites/goodreads.py index 4efbac272..122d91cdf 100644 --- a/catalog/sites/goodreads.py +++ b/catalog/sites/goodreads.py @@ -9,7 +9,7 @@ from catalog.book.utils import binding_to_format, detect_isbn_asin from catalog.common import * -from catalog.models import Edition, ExternalSearchResultItem, Work +from catalog.models import Edition, ExternalSearchResultItem, Work,Series from common.models import detect_language from journal.models.renderers import html_to_text @@ -115,7 +115,14 @@ def scrape(self, response=None): "title": w["details"]["originalTitle"], "url": w["editions"]["webUrl"], } - ] + ]+[ + {"model":"Series","id_type":IdType.Goodreads_Series, + "id_value":Goodreads_Series.url_to_id(s["webUrl"]), + "title":s["title"], + "url":s["webUrl"], + } + for s in o["Series"] + ] pd = ResourceContent(metadata=data) pd.lookup_ids[IdType.ISBN] = ids.get(IdType.ISBN) pd.lookup_ids[IdType.ASIN] = ids.get(IdType.ASIN) @@ -228,3 +235,45 @@ def scrape(self, response=None): } ) return pd + +@SiteManager.register +class Goodreads_Series(AbstractSite): + SITE_NAME = SiteName.Goodreads + ID_TYPE = IdType.Goodreads_Series + WIKI_PROPERTY_ID = "" + DEFAULT_MODEL = Series + URL_PATTERNS = [r".+goodreads\.com/series/(\d+)"] + + @classmethod + def id_to_url(cls, id_value): + return "https://www.goodreads.com/series/" + id_value + + def scrape(self, response=None): + content = BasicDownloader(self.url).download().html() + title = self.query_str(content, "//h1/text()") + if not title: + raise ParseError(self, "title") + try: + description =self.query_str(content, '//*[@class="u-paddingBottomSmall"]/text()') + except IndexError: + description = None + logger.debug(f"Title: {description}") + related_editions = [] + for work in self.query_list(content, '//*[@class="listWithDividers__item"]'): + t = self.query_str(work, './/div/div/div[2]/a/span/text()') + url = self.query_str(work, './/div/div/div[1]/div/a/@href') + related_editions.append({ + "model": "Edition", + "id_type": IdType.Goodreads, + "id_value":Goodreads.url_to_id(url), + "title": t, + "url": f"https://www.goodreads.com{url}", + }) + pd = ResourceContent( + metadata={ + "localized_title": [{"lang": detect_language(title), "text": title}], + "localized_description": [{"lang": detect_language(description), "text": description}] if description else [], + "related_resources": related_editions, + } + ) + return pd diff --git a/catalog/templates/series.html b/catalog/templates/series.html new file mode 100644 index 000000000..3ea73a2d1 --- /dev/null +++ b/catalog/templates/series.html @@ -0,0 +1,27 @@ +{% extends "item_base.html" %} +{% load static %} +{% load i18n %} +{% load l10n %} +{% load humanize %} +{% load mastodon %} +{% load strip_scheme %} +{% load thumb %} + +{% block details %} +
{% include '_people.html' with people=item.additional_title role='' max=99 %}
+
{% include '_people.html' with people=item.author role='author' max=5 %}
+{% endblock %} +{% block left_sidebar %} +
+
+ {% trans 'Series' %} + {% for w in item.works.all %} +
+ {{ w.display_title }} +
+ {% endfor %} +
+
+{% endblock %} + +{% block sidebar %}{% endblock %} From ccc85ecfaeaf6692f3bd7733611dd571aef5b900 Mon Sep 17 00:00:00 2001 From: Jigsaw Date: Mon, 27 Jan 2025 08:16:35 +0000 Subject: [PATCH 2/7] migrate work-edition relation to ForeignKey --- catalog/book/models.py | 114 ++++++++------ ...alter_externalresource_id_type_and_more.py | 148 ++++++++++++++++++ ...ve_work_editions_edition_works_and_more.py | 92 +++++++++++ catalog/models.py | 1 + catalog/search/models.py | 6 +- catalog/sites/bangumi.py | 8 +- catalog/sites/goodreads.py | 43 +++-- .../templates/_item_card_metadata_series.html | 25 +++ catalog/templates/_sidebar_edit.html | 8 +- 9 files changed, 367 insertions(+), 78 deletions(-) create mode 100644 catalog/migrations/0013_delete_series_alter_externalresource_id_type_and_more.py create mode 100644 catalog/migrations/0014_remove_work_editions_edition_works_and_more.py create mode 100644 catalog/templates/_item_card_metadata_series.html diff --git a/catalog/book/models.py b/catalog/book/models.py index 893842b11..17f815543 100644 --- a/catalog/book/models.py +++ b/catalog/book/models.py @@ -109,8 +109,10 @@ class EditionSchema(EditionInSchema, BaseSchema): class Edition(Item): - if TYPE_CHECKING: - works: "models.ManyToManyField[Work, Edition]" + works = models.ForeignKey( + "Work", null=True, on_delete=models.SET_NULL, related_name="editions" + ) + class BookFormat(models.TextChoices): PAPERBACK = "paperback", _("Paperback") HARDCOVER = "hardcover", _("Hardcover") @@ -264,13 +266,21 @@ def merge_to(self, to_item: "Edition | None"): # type: ignore[reportIncompatibl if to_item: if self.merge_title(): self.save() - for work in self.works.all(): - to_item.works.add(work) - self.works.clear() + if not self.works: + return + if to_item.works: + for edition in self.works.editions.exclude(pk=self.pk).all(): + edition.works = to_item.works + edition.save() + else: + to_item.works = self.works + to_item.save() + self.works = None + self.save() def delete(self, using=None, keep_parents=False, soft=True, *args, **kwargs): if soft: - self.works.clear() + self.works = None return super().delete(using, soft, keep_parents, *args, **kwargs) def update_linked_items_from_external_resource(self, resource): @@ -293,8 +303,13 @@ def update_linked_items_from_external_resource(self, resource): primary_lookup_id_type=w["id_type"], primary_lookup_id_value=w["id_value"], ).first() - if work and work not in self.works.all(): - self.works.add(work) + if not work: + return + if not self.works: + self.works = work + self.save() + elif work.pk != self.works.pk: + work.merge_to(self.works) if w.get("model") == "Series": series_res = ExternalResource.objects.filter( id_type=w["id_type"], id_value=w["id_value"] @@ -312,16 +327,17 @@ def update_linked_items_from_external_resource(self, resource): primary_lookup_id_value=w["id_value"], ).first() if series: - if self.works.all().exists(): + if self.works: logger.debug("WORK IS EXIST") - for work in self.works.all(): - if series in work.series.all(): - continue - work.series.add(series) + if series in self.works.series.all(): + continue + self.works.series.add(series) + self.works.save() else: logger.debug("WORK NOT EXIST") work = Work.objects.create(localized_title=self.localized_title) - work.editions.add(self) + self.works = work + self.save() work.series.add(series) work.save() @@ -347,9 +363,8 @@ def merge_title(self) -> bool: @property def sibling_items(self): - works = list(self.works.all()) return ( - Edition.objects.filter(works__in=works) + Edition.objects.filter(works__in=[self.works]) .exclude(pk=self.pk) .exclude(is_deleted=True) .exclude(merged_to_item__isnull=False) @@ -369,46 +384,48 @@ def title_deco(self): return f"({' '.join(a)})" if a else "" def has_related_books(self): - works = list(self.works.all()) - if not works: + if not self.works: return False - return Edition.objects.filter(works__in=works).exclude(pk=self.pk).exists() + return ( + Edition.objects.filter(works__in=[self.works]).exclude(pk=self.pk).exists() + ) def link_to_related_book(self, target: "Edition") -> bool: if target == self or target.is_deleted or target.merged_to_item: return False - if target.works.all().exists(): - for work in target.works.all(): - self.works.add(work) - work.localized_title = uniq(work.localized_title + self.localized_title) - work.save() - elif self.works.all().exists(): - for work in self.works.all(): - target.works.add(work) - work.localized_title = uniq( - work.localized_title + target.localized_title - ) - work.save() + if target.works: + self.works = target.works + target.works.localized_title = uniq( + target.works.localized_title + self.localized_title + ) + target.works.save() else: work = Work.objects.create(localized_title=self.localized_title) - work.editions.add(self, target) + self.works = work + target.works = work # work.localized_title = self.localized_title # work.save() return True + @property + def parent_item(self): # type:ignore + return self.works.series.first() if self.works else None + def unlink_from_all_works(self): - self.works.clear() + self.works = None def has_works(self): - return self.works.all().exists() + return self.works is not None + + class Work(Item): if TYPE_CHECKING: - series: "models.ManyToManyField[Series, Work]" + series: models.ManyToManyField["Series", "Work"] + editions: QuerySet[Edition] category = ItemCategory.Book url_path = "book/work" douban_work = PrimaryLookupIdDescriptor(IdType.DoubanBook_Work) goodreads_work = PrimaryLookupIdDescriptor(IdType.Goodreads_Work) - editions = models.ManyToManyField(Edition, related_name="works") language = LanguageListField() author = jsondata.ArrayField( verbose_name=_("author"), @@ -448,15 +465,16 @@ def merge_to(self, to_item: "Work | None"): # type: ignore[reportIncompatibleMe if not to_item: return for edition in self.editions.all(): - to_item.editions.add(edition) - self.editions.clear() + edition.works = to_item + edition.save() to_item.language = uniq(to_item.language + self.language) # type: ignore to_item.localized_title = uniq(to_item.localized_title + self.localized_title) to_item.save() def delete(self, using=None, keep_parents=False, soft=True, *args, **kwargs): if soft: - self.editions.clear() + for edition in self.editions.all(): + edition.works = None return super().delete(using, keep_parents, soft, *args, **kwargs) @property @@ -488,7 +506,8 @@ def update_linked_items_from_external_resource(self, resource): primary_lookup_id_value=e["id_value"], ).first() if edition and edition not in self.editions.all(): - self.editions.add(edition) + edition.works = self + class Series(Item): works = models.ManyToManyField(Work, related_name="series") @@ -509,10 +528,7 @@ def lookup_id_type_choices(cls): @cached_property def all_works(self): - return ( - self.works.all() - .filter(is_deleted=False, merged_to_item=None) - ) + return [self.works] @property def cover_image_url(self): @@ -544,13 +560,11 @@ def update_linked_items_from_external_resource(self, resource): ).first() if not edition: return - if edition.works.all().exists(): - for work in edition.works.all(): - if work not in self.works.all(): - self.works.add(work) - self.works.add(work) + if edition.works: + if edition.works not in self.works.all(): + self.works.add(edition.works) else: work = Work.objects.create(localized_title=edition.localized_title) - work.editions.add(edition) + edition.works = work work.save() self.works.add(work) diff --git a/catalog/migrations/0013_delete_series_alter_externalresource_id_type_and_more.py b/catalog/migrations/0013_delete_series_alter_externalresource_id_type_and_more.py new file mode 100644 index 000000000..80fd254dd --- /dev/null +++ b/catalog/migrations/0013_delete_series_alter_externalresource_id_type_and_more.py @@ -0,0 +1,148 @@ +# Generated by Django 4.2.18 on 2025-01-27 02:19 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + dependencies = [ + ("catalog", "0012_alter_model_i18n"), + ] + + operations = [ + migrations.DeleteModel( + name="Series", + ), + migrations.AlterField( + model_name="externalresource", + name="id_type", + field=models.CharField( + choices=[ + ("wikidata", "WikiData"), + ("isbn10", "ISBN10"), + ("isbn", "ISBN"), + ("asin", "ASIN"), + ("issn", "ISSN"), + ("cubn", "CUBN"), + ("isrc", "ISRC"), + ("gtin", "GTIN UPC EAN"), + ("rss", "RSS Feed URL"), + ("imdb", "IMDb"), + ("tmdb_tv", "TMDB TV Serie"), + ("tmdb_tvseason", "TMDB TV Season"), + ("tmdb_tvepisode", "TMDB TV Episode"), + ("tmdb_movie", "TMDB Movie"), + ("goodreads", "Goodreads"), + ("goodreads_work", "Goodreads Work"), + ("goodreads_series", "Goodreads Series"), + ("googlebooks", "Google Books"), + ("doubanbook", "Douban Book"), + ("doubanbook_work", "Douban Book Work"), + ("doubanmovie", "Douban Movie"), + ("doubanmusic", "Douban Music"), + ("doubangame", "Douban Game"), + ("doubandrama", "Douban Drama"), + ("doubandrama_version", "Douban Drama Version"), + ("bookstw", "BooksTW Book"), + ("bandcamp", "Bandcamp"), + ("spotify_album", "Spotify Album"), + ("spotify_show", "Spotify Podcast"), + ("discogs_release", "Discogs Release"), + ("discogs_master", "Discogs Master"), + ("musicbrainz", "MusicBrainz ID"), + ("igdb", "IGDB Game"), + ("bgg", "BGG Boardgame"), + ("steam", "Steam Game"), + ("bangumi", "Bangumi"), + ("apple_podcast", "Apple Podcast"), + ("apple_music", "Apple Music"), + ("fedi", "Fediverse"), + ("qidian", "Qidian"), + ("ypshuo", "Ypshuo"), + ("ao3", "Archive of Our Own"), + ("jjwxc", "JinJiang"), + ], + max_length=50, + verbose_name="IdType of the source site", + ), + ), + migrations.AlterField( + model_name="itemlookupid", + name="id_type", + field=models.CharField( + blank=True, + choices=[ + ("wikidata", "WikiData"), + ("isbn10", "ISBN10"), + ("isbn", "ISBN"), + ("asin", "ASIN"), + ("issn", "ISSN"), + ("cubn", "CUBN"), + ("isrc", "ISRC"), + ("gtin", "GTIN UPC EAN"), + ("rss", "RSS Feed URL"), + ("imdb", "IMDb"), + ("tmdb_tv", "TMDB TV Serie"), + ("tmdb_tvseason", "TMDB TV Season"), + ("tmdb_tvepisode", "TMDB TV Episode"), + ("tmdb_movie", "TMDB Movie"), + ("goodreads", "Goodreads"), + ("goodreads_work", "Goodreads Work"), + ("goodreads_series", "Goodreads Series"), + ("googlebooks", "Google Books"), + ("doubanbook", "Douban Book"), + ("doubanbook_work", "Douban Book Work"), + ("doubanmovie", "Douban Movie"), + ("doubanmusic", "Douban Music"), + ("doubangame", "Douban Game"), + ("doubandrama", "Douban Drama"), + ("doubandrama_version", "Douban Drama Version"), + ("bookstw", "BooksTW Book"), + ("bandcamp", "Bandcamp"), + ("spotify_album", "Spotify Album"), + ("spotify_show", "Spotify Podcast"), + ("discogs_release", "Discogs Release"), + ("discogs_master", "Discogs Master"), + ("musicbrainz", "MusicBrainz ID"), + ("igdb", "IGDB Game"), + ("bgg", "BGG Boardgame"), + ("steam", "Steam Game"), + ("bangumi", "Bangumi"), + ("apple_podcast", "Apple Podcast"), + ("apple_music", "Apple Music"), + ("fedi", "Fediverse"), + ("qidian", "Qidian"), + ("ypshuo", "Ypshuo"), + ("ao3", "Archive of Our Own"), + ("jjwxc", "JinJiang"), + ], + max_length=50, + verbose_name="source site", + ), + ), + migrations.CreateModel( + name="Series", + fields=[ + ( + "item_ptr", + models.OneToOneField( + auto_created=True, + on_delete=django.db.models.deletion.CASCADE, + parent_link=True, + primary_key=True, + serialize=False, + to="catalog.item", + ), + ), + ( + "works", + models.ManyToManyField(related_name="series", to="catalog.work"), + ), + ], + options={ + "abstract": False, + "base_manager_name": "objects", + }, + bases=("catalog.item",), + ), + ] diff --git a/catalog/migrations/0014_remove_work_editions_edition_works_and_more.py b/catalog/migrations/0014_remove_work_editions_edition_works_and_more.py new file mode 100644 index 000000000..c8a7201a3 --- /dev/null +++ b/catalog/migrations/0014_remove_work_editions_edition_works_and_more.py @@ -0,0 +1,92 @@ +# Generated by Django 4.2.18 on 2025-01-27 05:56 + +from django.db import migrations, models +import django.db.models.deletion + + +def rename_legacy_editions(apps, schema_editor): + Edition = apps.get_model("catalog", "Edition") + Work = apps.get_model("catalog", "Work") + for edition in Edition.objects.all(): + works = edition.works.all() + if works.exists(): + for work in works: + edition.legacy_works.add(work) + edition.save() + + +def merge_to(self, to_item: "Item | None"): + if to_item is None: + if self.merged_to_item is not None: + self.merged_to_item = None + self.save() + return + if to_item.pk == self.pk: + raise ValueError("cannot merge to self") + if to_item.merged_to_item is not None: + raise ValueError("cannot merge to item which is merged to another item") + if not isinstance(to_item, self.__class__): + raise ValueError("cannot merge to item in a different model") + self.merged_to_item = to_item + self.save() + for res in self.external_resources.all(): + res.item = to_item + res.save() + for edition in self.legacy_editions.all(): + edition.works = to_item + edition.save() + to_item.save() + + +def merge_works(apps, schema_editor): + Edition = apps.get_model("catalog", "Edition") + Work = apps.get_model("catalog", "Work") + for edition in Edition.objects.all(): + works = edition.legacy_works.all() + if works.exists(): + edition.works = works.first() + for work in works[1:]: + merge_to(work, edition.works) + edition.save() + + +def process_pending_trigger_events(apps, schema_editor): + # Ensure all pending trigger events are processed + schema_editor.connection.cursor().execute("SET CONSTRAINTS ALL IMMEDIATE") + + +class Migration(migrations.Migration): + dependencies = [ + ("catalog", "0013_delete_series_alter_externalresource_id_type_and_more"), + ] + + operations = [ + migrations.AddField( + model_name="work", + name="legacy_editions", + field=models.ManyToManyField( + related_name="legacy_works", to="catalog.edition" + ), + ), + migrations.RunPython(rename_legacy_editions), + migrations.RemoveField( + model_name="work", + name="editions", + ), + migrations.AddField( + model_name="edition", + name="works", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="editions", + to="catalog.work", + ), + ), + migrations.RunPython(process_pending_trigger_events), + migrations.RunPython(merge_works), + migrations.RemoveField( + model_name="work", + name="legacy_editions", + ), + ] diff --git a/catalog/models.py b/catalog/models.py index d482d05a8..4cf984c43 100644 --- a/catalog/models.py +++ b/catalog/models.py @@ -62,6 +62,7 @@ def init_catalog_search_models(): # skip indexing if the item type should never show up in search Indexer.update_model_indexable(Edition) # Indexer.update_model_indexable(Work) + Indexer.update_model_indexable(Series) Indexer.update_model_indexable(Movie) Indexer.update_model_indexable(TVShow) Indexer.update_model_indexable(TVSeason) diff --git a/catalog/search/models.py b/catalog/search/models.py index 433754b7f..82fdba940 100644 --- a/catalog/search/models.py +++ b/catalog/search/models.py @@ -126,8 +126,8 @@ def query_index(keywords, categories=None, tag=None, page=1, prepare_external=Tr if hasattr(i, "isbn") else ([i.imdb_code] if hasattr(i, "imdb_code") else []) ) - if hasattr(i, "works"): - my_key += [i[0] for i in i.works.all().values_list("id")] + if hasattr(i, "works") and i.class_name == "edition": + my_key += [i.works.id] if len(my_key): sl = len(keys) + len(my_key) keys.update(my_key) @@ -208,5 +208,5 @@ def _fetch_task(url, is_refetch, user): if e.response_type != RESPONSE_CENSORSHIP: logger.error(f"fetch {url} error", extra={"exception": e}) except Exception as e: - logger.error(f"parse {url} error", extra={"exception": e}) + logger.error(f"parse {url} error {e}", extra={"exception": e}) return item_url diff --git a/catalog/sites/bangumi.py b/catalog/sites/bangumi.py index 556a19462..fa9227bb9 100644 --- a/catalog/sites/bangumi.py +++ b/catalog/sites/bangumi.py @@ -62,8 +62,8 @@ def get_category( required_resources.append( { "model": "Series", - "id_type": IdType.Bangumi, - "id_value": s["id"], + "id_type": IdType.Bangumi, + "id_value": s["id"], "url": cls.id_to_url(s["id"]), } ) @@ -71,8 +71,8 @@ def get_category( related_resources.append( { "model": "Edition", - "id_type": IdType.Bangumi, - "id_value": s["id"], + "id_type": IdType.Bangumi, + "id_value": s["id"], "url": cls.id_to_url(s["id"]), } ) diff --git a/catalog/sites/goodreads.py b/catalog/sites/goodreads.py index 122d91cdf..16bbf1d0a 100644 --- a/catalog/sites/goodreads.py +++ b/catalog/sites/goodreads.py @@ -9,7 +9,7 @@ from catalog.book.utils import binding_to_format, detect_isbn_asin from catalog.common import * -from catalog.models import Edition, ExternalSearchResultItem, Work,Series +from catalog.models import Edition, ExternalSearchResultItem, Work, Series from common.models import detect_language from journal.models.renderers import html_to_text @@ -115,14 +115,16 @@ def scrape(self, response=None): "title": w["details"]["originalTitle"], "url": w["editions"]["webUrl"], } - ]+[ - {"model":"Series","id_type":IdType.Goodreads_Series, - "id_value":Goodreads_Series.url_to_id(s["webUrl"]), - "title":s["title"], - "url":s["webUrl"], - } - for s in o["Series"] - ] + ] + [ + { + "model": "Series", + "id_type": IdType.Goodreads_Series, + "id_value": Goodreads_Series.url_to_id(s["webUrl"]), + "title": s["title"], + "url": s["webUrl"], + } + for s in o["Series"] + ] pd = ResourceContent(metadata=data) pd.lookup_ids[IdType.ISBN] = ids.get(IdType.ISBN) pd.lookup_ids[IdType.ASIN] = ids.get(IdType.ASIN) @@ -236,6 +238,7 @@ def scrape(self, response=None): ) return pd + @SiteManager.register class Goodreads_Series(AbstractSite): SITE_NAME = SiteName.Goodreads @@ -254,25 +257,33 @@ def scrape(self, response=None): if not title: raise ParseError(self, "title") try: - description =self.query_str(content, '//*[@class="u-paddingBottomSmall"]/text()') + description = self.query_str( + content, '//*[@class="u-paddingBottomSmall"]/text()' + ) except IndexError: description = None logger.debug(f"Title: {description}") related_editions = [] for work in self.query_list(content, '//*[@class="listWithDividers__item"]'): - t = self.query_str(work, './/div/div/div[2]/a/span/text()') - url = self.query_str(work, './/div/div/div[1]/div/a/@href') - related_editions.append({ + t = self.query_str(work, ".//div/div/div[2]/a/span/text()") + url = self.query_str(work, ".//div/div/div[1]/div/a/@href") + related_editions.append( + { "model": "Edition", "id_type": IdType.Goodreads, - "id_value":Goodreads.url_to_id(url), + "id_value": Goodreads.url_to_id(url), "title": t, "url": f"https://www.goodreads.com{url}", - }) + } + ) pd = ResourceContent( metadata={ "localized_title": [{"lang": detect_language(title), "text": title}], - "localized_description": [{"lang": detect_language(description), "text": description}] if description else [], + "localized_description": [ + {"lang": detect_language(description), "text": description} + ] + if description + else [], "related_resources": related_editions, } ) diff --git a/catalog/templates/_item_card_metadata_series.html b/catalog/templates/_item_card_metadata_series.html new file mode 100644 index 000000000..1b3d28418 --- /dev/null +++ b/catalog/templates/_item_card_metadata_series.html @@ -0,0 +1,25 @@ +{% extends "_item_card_metadata_base.html" %} +{% load humanize %} +{% load i18n %} +{% block brief %} +
+ {% if item.rating %} + {{ item.rating | floatformat:1 }} ({{ item.rating_count }} {% trans "ratings" %}) + {% endif %} + {% include '_people.html' with people=item.author role='author' max=2 %} + {% include '_people.html' with people=item.translator role='translator' max=2 %} + {% if item.pub_house %}{{ item.pub_house }}{% endif %} + {% if item.pub_year %} + + {{ item.pub_year }} + {% if item.pub_month %}- {{ item.pub_month }}{% endif %} + + {% endif %} + {% include '_people.html' with people=item.additional_title role='other title' max=2 %} +
+{% endblock brief %} +{% block full %} +
+ {% if not hide_brief %}{{ item.display_description | linebreaksbr }}{% endif %} +
+{% endblock full %} diff --git a/catalog/templates/_sidebar_edit.html b/catalog/templates/_sidebar_edit.html index 6dc452c05..44ac02019 100644 --- a/catalog/templates/_sidebar_edit.html +++ b/catalog/templates/_sidebar_edit.html @@ -217,11 +217,9 @@
{% trans "Edit Options" %}
{% trans 'Work' %} {% trans "This edition belongs to the following work" %} - {% for i in item.works.all %} -
  • - {{ i.display_title }} -
  • - {% endfor %} +
  • + {{ item.works.display_title }} +
  • From b91df20e300b25ce0c0d5bcf413c00f29fbd85ac Mon Sep 17 00:00:00 2001 From: Jigsaw Date: Wed, 29 Jan 2025 11:52:23 +0000 Subject: [PATCH 3/7] no series --- catalog/book/models.py | 165 +++++------------- catalog/common/models.py | 1 - ...alter_externalresource_id_type_and_more.py | 148 ---------------- ...works_and_more.py => 0013_migrate_work.py} | 14 +- catalog/models.py | 1 - catalog/search/models.py | 4 +- catalog/sites/bangumi.py | 34 ++-- catalog/sites/goodreads.py | 62 +------ .../templates/_item_card_metadata_series.html | 25 --- catalog/templates/_sidebar_edit.html | 2 +- catalog/templates/series.html | 27 --- 11 files changed, 58 insertions(+), 425 deletions(-) delete mode 100644 catalog/migrations/0013_delete_series_alter_externalresource_id_type_and_more.py rename catalog/migrations/{0014_remove_work_editions_edition_works_and_more.py => 0013_migrate_work.py} (85%) delete mode 100644 catalog/templates/_item_card_metadata_series.html delete mode 100644 catalog/templates/series.html diff --git a/catalog/book/models.py b/catalog/book/models.py index 17f815543..8f8e86231 100644 --- a/catalog/book/models.py +++ b/catalog/book/models.py @@ -109,8 +109,10 @@ class EditionSchema(EditionInSchema, BaseSchema): class Edition(Item): - works = models.ForeignKey( - "Work", null=True, on_delete=models.SET_NULL, related_name="editions" + if TYPE_CHECKING: + works: "models.ManyToManyField[Work, Edition]" + related_work = models.ForeignKey( + "Work", null=True, on_delete=models.SET_NULL, related_name="related_editions" ) class BookFormat(models.TextChoices): @@ -266,21 +268,21 @@ def merge_to(self, to_item: "Edition | None"): # type: ignore[reportIncompatibl if to_item: if self.merge_title(): self.save() - if not self.works: + if not self.related_work: return - if to_item.works: - for edition in self.works.editions.exclude(pk=self.pk).all(): - edition.works = to_item.works + if to_item.related_work: + for edition in self.related_work.related_editions.exclude(pk=self.pk).all(): + edition.related_work = to_item.related_work edition.save() else: - to_item.works = self.works + to_item.related_work = self.related_work to_item.save() - self.works = None + self.related_work = None self.save() def delete(self, using=None, keep_parents=False, soft=True, *args, **kwargs): if soft: - self.works = None + self.related_work = None return super().delete(using, soft, keep_parents, *args, **kwargs) def update_linked_items_from_external_resource(self, resource): @@ -305,41 +307,11 @@ def update_linked_items_from_external_resource(self, resource): ).first() if not work: return - if not self.works: - self.works = work + if not self.related_work: + self.related_work = work self.save() - elif work.pk != self.works.pk: - work.merge_to(self.works) - if w.get("model") == "Series": - series_res = ExternalResource.objects.filter( - id_type=w["id_type"], id_value=w["id_value"] - ).first() - if series_res: - series = series_res.item - if not series: - logger.warning(f"Unable to find series for {series_res}") - else: - logger.warning( - f"Unable to find resource for {w['id_type']}:{w['id_value']}" - ) - series = Series.objects.filter( - primary_lookup_id_type=w["id_type"], - primary_lookup_id_value=w["id_value"], - ).first() - if series: - if self.works: - logger.debug("WORK IS EXIST") - if series in self.works.series.all(): - continue - self.works.series.add(series) - self.works.save() - else: - logger.debug("WORK NOT EXIST") - work = Work.objects.create(localized_title=self.localized_title) - self.works = work - self.save() - work.series.add(series) - work.save() + elif work.pk != self.related_work.pk: + work.merge_to(self.related_work) def merge_data_from_external_resource( self, p: "ExternalResource", ignore_existing_content: bool = False @@ -364,7 +336,7 @@ def merge_title(self) -> bool: @property def sibling_items(self): return ( - Edition.objects.filter(works__in=[self.works]) + Edition.objects.filter(works__in=[self.related_work]) .exclude(pk=self.pk) .exclude(is_deleted=True) .exclude(merged_to_item__isnull=False) @@ -384,48 +356,47 @@ def title_deco(self): return f"({' '.join(a)})" if a else "" def has_related_books(self): - if not self.works: + if not self.related_work: return False return ( - Edition.objects.filter(works__in=[self.works]).exclude(pk=self.pk).exists() + Edition.objects.filter(works__in=[self.related_work]).exclude(pk=self.pk).exists() ) def link_to_related_book(self, target: "Edition") -> bool: if target == self or target.is_deleted or target.merged_to_item: return False - if target.works: - self.works = target.works - target.works.localized_title = uniq( - target.works.localized_title + self.localized_title + if target.related_work: + self.related_work = target.related_work + self.save() + target.related_work.localized_title = uniq( + target.related_work.localized_title + self.localized_title ) - target.works.save() + target.related_work.save() else: work = Work.objects.create(localized_title=self.localized_title) - self.works = work - target.works = work + self.related_work = work + self.save() + target.related_work = work + target.save() # work.localized_title = self.localized_title # work.save() return True - @property - def parent_item(self): # type:ignore - return self.works.series.first() if self.works else None - def unlink_from_all_works(self): - self.works = None + self.related_work = None def has_works(self): - return self.works is not None + return self.related_work is not None class Work(Item): if TYPE_CHECKING: - series: models.ManyToManyField["Series", "Work"] - editions: QuerySet[Edition] + related_editions: QuerySet[Edition] category = ItemCategory.Book url_path = "book/work" douban_work = PrimaryLookupIdDescriptor(IdType.DoubanBook_Work) goodreads_work = PrimaryLookupIdDescriptor(IdType.Goodreads_Work) + editions = models.ManyToManyField(Edition, related_name="works") language = LanguageListField() author = jsondata.ArrayField( verbose_name=_("author"), @@ -464,8 +435,8 @@ def merge_to(self, to_item: "Work | None"): # type: ignore[reportIncompatibleMe super().merge_to(to_item) if not to_item: return - for edition in self.editions.all(): - edition.works = to_item + for edition in self.related_editions.all(): + edition.related_work = to_item edition.save() to_item.language = uniq(to_item.language + self.language) # type: ignore to_item.localized_title = uniq(to_item.localized_title + self.localized_title) @@ -473,8 +444,8 @@ def merge_to(self, to_item: "Work | None"): # type: ignore[reportIncompatibleMe def delete(self, using=None, keep_parents=False, soft=True, *args, **kwargs): if soft: - for edition in self.editions.all(): - edition.works = None + for edition in self.related_editions.all(): + edition.related_work = None return super().delete(using, keep_parents, soft, *args, **kwargs) @property @@ -505,66 +476,16 @@ def update_linked_items_from_external_resource(self, resource): primary_lookup_id_type=e["id_type"], primary_lookup_id_value=e["id_value"], ).first() - if edition and edition not in self.editions.all(): - edition.works = self + if edition and edition not in self.related_editions.all(): + edition.related_work = self + edition.save() class Series(Item): - works = models.ManyToManyField(Work, related_name="series") category = ItemCategory.Book url_path = "book/series" - METADATA_COPY_LIST = [ - "localized_title", - "localized_description", - ] - goodreads_serie = PrimaryLookupIdDescriptor(IdType.Goodreads_Series) + # douban_serie = LookupIdDescriptor(IdType.DoubanBook_Serie) + # goodreads_serie = LookupIdDescriptor(IdType.Goodreads_Serie) - @classmethod - def lookup_id_type_choices(cls): - id_types = [ - IdType.Goodreads_Series, - ] - return [(i.value, i.label) for i in id_types] - - @cached_property - def all_works(self): - return [self.works] - - @property - def cover_image_url(self): - url = super().cover_image_url - if url: - return url - e = next(filter(lambda e: e.cover_image_url, self.works.all()), None) - return e.cover_image_url if e else None - - def update_linked_items_from_external_resource(self, resource): - """add Work from resource.metadata['required_resources'] if not yet""" - links = resource.required_resources + resource.related_resources - for e in links: - if e.get("model") == "Edition": - edition_res = ExternalResource.objects.filter( - id_type=e["id_type"], id_value=e["id_value"] - ).first() - if edition_res: - edition = edition_res.item - if not edition: - logger.warning(f"Unable to find edition for {edition_res}") - else: - logger.warning( - f"Unable to find resource for {e['id_type']}:{e['id_value']}" - ) - edition = Edition.objects.filter( - primary_lookup_id_type=e["id_type"], - primary_lookup_id_value=e["id_value"], - ).first() - if not edition: - return - if edition.works: - if edition.works not in self.works.all(): - self.works.add(edition.works) - else: - work = Work.objects.create(localized_title=edition.localized_title) - edition.works = work - work.save() - self.works.add(work) + class Meta: + proxy = True diff --git a/catalog/common/models.py b/catalog/common/models.py index 67348d738..981800341 100644 --- a/catalog/common/models.py +++ b/catalog/common/models.py @@ -74,7 +74,6 @@ class IdType(models.TextChoices): TMDB_Movie = "tmdb_movie", _("TMDB Movie") # type:ignore[reportCallIssue] Goodreads = "goodreads", _("Goodreads") # type:ignore[reportCallIssue] Goodreads_Work = "goodreads_work", _("Goodreads Work") # type:ignore[reportCallIssue] - Goodreads_Series = "goodreads_series", _("Goodreads Series") # type:ignore[reportCallIssue] GoogleBooks = "googlebooks", _("Google Books") # type:ignore[reportCallIssue] DoubanBook = "doubanbook", _("Douban Book") # type:ignore[reportCallIssue] DoubanBook_Work = "doubanbook_work", _("Douban Book Work") # type:ignore[reportCallIssue] diff --git a/catalog/migrations/0013_delete_series_alter_externalresource_id_type_and_more.py b/catalog/migrations/0013_delete_series_alter_externalresource_id_type_and_more.py deleted file mode 100644 index 80fd254dd..000000000 --- a/catalog/migrations/0013_delete_series_alter_externalresource_id_type_and_more.py +++ /dev/null @@ -1,148 +0,0 @@ -# Generated by Django 4.2.18 on 2025-01-27 02:19 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - dependencies = [ - ("catalog", "0012_alter_model_i18n"), - ] - - operations = [ - migrations.DeleteModel( - name="Series", - ), - migrations.AlterField( - model_name="externalresource", - name="id_type", - field=models.CharField( - choices=[ - ("wikidata", "WikiData"), - ("isbn10", "ISBN10"), - ("isbn", "ISBN"), - ("asin", "ASIN"), - ("issn", "ISSN"), - ("cubn", "CUBN"), - ("isrc", "ISRC"), - ("gtin", "GTIN UPC EAN"), - ("rss", "RSS Feed URL"), - ("imdb", "IMDb"), - ("tmdb_tv", "TMDB TV Serie"), - ("tmdb_tvseason", "TMDB TV Season"), - ("tmdb_tvepisode", "TMDB TV Episode"), - ("tmdb_movie", "TMDB Movie"), - ("goodreads", "Goodreads"), - ("goodreads_work", "Goodreads Work"), - ("goodreads_series", "Goodreads Series"), - ("googlebooks", "Google Books"), - ("doubanbook", "Douban Book"), - ("doubanbook_work", "Douban Book Work"), - ("doubanmovie", "Douban Movie"), - ("doubanmusic", "Douban Music"), - ("doubangame", "Douban Game"), - ("doubandrama", "Douban Drama"), - ("doubandrama_version", "Douban Drama Version"), - ("bookstw", "BooksTW Book"), - ("bandcamp", "Bandcamp"), - ("spotify_album", "Spotify Album"), - ("spotify_show", "Spotify Podcast"), - ("discogs_release", "Discogs Release"), - ("discogs_master", "Discogs Master"), - ("musicbrainz", "MusicBrainz ID"), - ("igdb", "IGDB Game"), - ("bgg", "BGG Boardgame"), - ("steam", "Steam Game"), - ("bangumi", "Bangumi"), - ("apple_podcast", "Apple Podcast"), - ("apple_music", "Apple Music"), - ("fedi", "Fediverse"), - ("qidian", "Qidian"), - ("ypshuo", "Ypshuo"), - ("ao3", "Archive of Our Own"), - ("jjwxc", "JinJiang"), - ], - max_length=50, - verbose_name="IdType of the source site", - ), - ), - migrations.AlterField( - model_name="itemlookupid", - name="id_type", - field=models.CharField( - blank=True, - choices=[ - ("wikidata", "WikiData"), - ("isbn10", "ISBN10"), - ("isbn", "ISBN"), - ("asin", "ASIN"), - ("issn", "ISSN"), - ("cubn", "CUBN"), - ("isrc", "ISRC"), - ("gtin", "GTIN UPC EAN"), - ("rss", "RSS Feed URL"), - ("imdb", "IMDb"), - ("tmdb_tv", "TMDB TV Serie"), - ("tmdb_tvseason", "TMDB TV Season"), - ("tmdb_tvepisode", "TMDB TV Episode"), - ("tmdb_movie", "TMDB Movie"), - ("goodreads", "Goodreads"), - ("goodreads_work", "Goodreads Work"), - ("goodreads_series", "Goodreads Series"), - ("googlebooks", "Google Books"), - ("doubanbook", "Douban Book"), - ("doubanbook_work", "Douban Book Work"), - ("doubanmovie", "Douban Movie"), - ("doubanmusic", "Douban Music"), - ("doubangame", "Douban Game"), - ("doubandrama", "Douban Drama"), - ("doubandrama_version", "Douban Drama Version"), - ("bookstw", "BooksTW Book"), - ("bandcamp", "Bandcamp"), - ("spotify_album", "Spotify Album"), - ("spotify_show", "Spotify Podcast"), - ("discogs_release", "Discogs Release"), - ("discogs_master", "Discogs Master"), - ("musicbrainz", "MusicBrainz ID"), - ("igdb", "IGDB Game"), - ("bgg", "BGG Boardgame"), - ("steam", "Steam Game"), - ("bangumi", "Bangumi"), - ("apple_podcast", "Apple Podcast"), - ("apple_music", "Apple Music"), - ("fedi", "Fediverse"), - ("qidian", "Qidian"), - ("ypshuo", "Ypshuo"), - ("ao3", "Archive of Our Own"), - ("jjwxc", "JinJiang"), - ], - max_length=50, - verbose_name="source site", - ), - ), - migrations.CreateModel( - name="Series", - fields=[ - ( - "item_ptr", - models.OneToOneField( - auto_created=True, - on_delete=django.db.models.deletion.CASCADE, - parent_link=True, - primary_key=True, - serialize=False, - to="catalog.item", - ), - ), - ( - "works", - models.ManyToManyField(related_name="series", to="catalog.work"), - ), - ], - options={ - "abstract": False, - "base_manager_name": "objects", - }, - bases=("catalog.item",), - ), - ] diff --git a/catalog/migrations/0014_remove_work_editions_edition_works_and_more.py b/catalog/migrations/0013_migrate_work.py similarity index 85% rename from catalog/migrations/0014_remove_work_editions_edition_works_and_more.py rename to catalog/migrations/0013_migrate_work.py index c8a7201a3..a2c477473 100644 --- a/catalog/migrations/0014_remove_work_editions_edition_works_and_more.py +++ b/catalog/migrations/0013_migrate_work.py @@ -3,18 +3,6 @@ from django.db import migrations, models import django.db.models.deletion - -def rename_legacy_editions(apps, schema_editor): - Edition = apps.get_model("catalog", "Edition") - Work = apps.get_model("catalog", "Work") - for edition in Edition.objects.all(): - works = edition.works.all() - if works.exists(): - for work in works: - edition.legacy_works.add(work) - edition.save() - - def merge_to(self, to_item: "Item | None"): if to_item is None: if self.merged_to_item is not None: @@ -57,7 +45,7 @@ def process_pending_trigger_events(apps, schema_editor): class Migration(migrations.Migration): dependencies = [ - ("catalog", "0013_delete_series_alter_externalresource_id_type_and_more"), + ("catalog", "0012_alter_model_i18n"), ] operations = [ diff --git a/catalog/models.py b/catalog/models.py index 4cf984c43..d482d05a8 100644 --- a/catalog/models.py +++ b/catalog/models.py @@ -62,7 +62,6 @@ def init_catalog_search_models(): # skip indexing if the item type should never show up in search Indexer.update_model_indexable(Edition) # Indexer.update_model_indexable(Work) - Indexer.update_model_indexable(Series) Indexer.update_model_indexable(Movie) Indexer.update_model_indexable(TVShow) Indexer.update_model_indexable(TVSeason) diff --git a/catalog/search/models.py b/catalog/search/models.py index 82fdba940..1bf6ae2c7 100644 --- a/catalog/search/models.py +++ b/catalog/search/models.py @@ -126,8 +126,8 @@ def query_index(keywords, categories=None, tag=None, page=1, prepare_external=Tr if hasattr(i, "isbn") else ([i.imdb_code] if hasattr(i, "imdb_code") else []) ) - if hasattr(i, "works") and i.class_name == "edition": - my_key += [i.works.id] + if hasattr(i, "related_works"): + my_key += [i.related_work.id] if len(my_key): sl = len(keys) + len(my_key) keys.update(my_key) diff --git a/catalog/sites/bangumi.py b/catalog/sites/bangumi.py index fa9227bb9..648bdbbe6 100644 --- a/catalog/sites/bangumi.py +++ b/catalog/sites/bangumi.py @@ -38,13 +38,13 @@ def get_category( release_type = None showtime = None year = None - required_resources = [] related_resources = [] match o["type"]: case 1: + model = "Edition" category = ItemCategory.Book - model = "Series" if o["series"] else "Edition" - if fetch_resources: + if o["series"] and fetch_resources: + # model = "Series" TODO res = ( BasicDownloader( f"https://api.bgm.tv/v0/subjects/{o['id']}/subjects", @@ -57,26 +57,13 @@ def get_category( ) for s in res: - match s["relation"]: - case "系列": - required_resources.append( - { - "model": "Series", - "id_type": IdType.Bangumi, - "id_value": s["id"], - "url": cls.id_to_url(s["id"]), - } - ) - case "单行本": - related_resources.append( - { - "model": "Edition", - "id_type": IdType.Bangumi, - "id_value": s["id"], - "url": cls.id_to_url(s["id"]), - } - ) - + if s["relation"] != "单行本": + continue + related_resources.append( + { + "url": cls.id_to_url(s["id"]), + } + ) if dt: d = dt.split("-") pub_year = d[0] @@ -123,7 +110,6 @@ def get_category( ) return category, { "preferred_model": model, - "required_resources": required_resources, "related_resources": related_resources, "pub_year": pub_year, "pub_month": pub_month, diff --git a/catalog/sites/goodreads.py b/catalog/sites/goodreads.py index 16bbf1d0a..4efbac272 100644 --- a/catalog/sites/goodreads.py +++ b/catalog/sites/goodreads.py @@ -9,7 +9,7 @@ from catalog.book.utils import binding_to_format, detect_isbn_asin from catalog.common import * -from catalog.models import Edition, ExternalSearchResultItem, Work, Series +from catalog.models import Edition, ExternalSearchResultItem, Work from common.models import detect_language from journal.models.renderers import html_to_text @@ -115,15 +115,6 @@ def scrape(self, response=None): "title": w["details"]["originalTitle"], "url": w["editions"]["webUrl"], } - ] + [ - { - "model": "Series", - "id_type": IdType.Goodreads_Series, - "id_value": Goodreads_Series.url_to_id(s["webUrl"]), - "title": s["title"], - "url": s["webUrl"], - } - for s in o["Series"] ] pd = ResourceContent(metadata=data) pd.lookup_ids[IdType.ISBN] = ids.get(IdType.ISBN) @@ -237,54 +228,3 @@ def scrape(self, response=None): } ) return pd - - -@SiteManager.register -class Goodreads_Series(AbstractSite): - SITE_NAME = SiteName.Goodreads - ID_TYPE = IdType.Goodreads_Series - WIKI_PROPERTY_ID = "" - DEFAULT_MODEL = Series - URL_PATTERNS = [r".+goodreads\.com/series/(\d+)"] - - @classmethod - def id_to_url(cls, id_value): - return "https://www.goodreads.com/series/" + id_value - - def scrape(self, response=None): - content = BasicDownloader(self.url).download().html() - title = self.query_str(content, "//h1/text()") - if not title: - raise ParseError(self, "title") - try: - description = self.query_str( - content, '//*[@class="u-paddingBottomSmall"]/text()' - ) - except IndexError: - description = None - logger.debug(f"Title: {description}") - related_editions = [] - for work in self.query_list(content, '//*[@class="listWithDividers__item"]'): - t = self.query_str(work, ".//div/div/div[2]/a/span/text()") - url = self.query_str(work, ".//div/div/div[1]/div/a/@href") - related_editions.append( - { - "model": "Edition", - "id_type": IdType.Goodreads, - "id_value": Goodreads.url_to_id(url), - "title": t, - "url": f"https://www.goodreads.com{url}", - } - ) - pd = ResourceContent( - metadata={ - "localized_title": [{"lang": detect_language(title), "text": title}], - "localized_description": [ - {"lang": detect_language(description), "text": description} - ] - if description - else [], - "related_resources": related_editions, - } - ) - return pd diff --git a/catalog/templates/_item_card_metadata_series.html b/catalog/templates/_item_card_metadata_series.html deleted file mode 100644 index 1b3d28418..000000000 --- a/catalog/templates/_item_card_metadata_series.html +++ /dev/null @@ -1,25 +0,0 @@ -{% extends "_item_card_metadata_base.html" %} -{% load humanize %} -{% load i18n %} -{% block brief %} -
    - {% if item.rating %} - {{ item.rating | floatformat:1 }} ({{ item.rating_count }} {% trans "ratings" %}) - {% endif %} - {% include '_people.html' with people=item.author role='author' max=2 %} - {% include '_people.html' with people=item.translator role='translator' max=2 %} - {% if item.pub_house %}{{ item.pub_house }}{% endif %} - {% if item.pub_year %} - - {{ item.pub_year }} - {% if item.pub_month %}- {{ item.pub_month }}{% endif %} - - {% endif %} - {% include '_people.html' with people=item.additional_title role='other title' max=2 %} -
    -{% endblock brief %} -{% block full %} -
    - {% if not hide_brief %}{{ item.display_description | linebreaksbr }}{% endif %} -
    -{% endblock full %} diff --git a/catalog/templates/_sidebar_edit.html b/catalog/templates/_sidebar_edit.html index 44ac02019..245c46b47 100644 --- a/catalog/templates/_sidebar_edit.html +++ b/catalog/templates/_sidebar_edit.html @@ -218,7 +218,7 @@
    {% trans "Edit Options" %}
    {% trans 'Work' %} {% trans "This edition belongs to the following work" %}
  • - {{ item.works.display_title }} + {{ item.related_work.display_title }}
  • -{% block details %} -
    {% include '_people.html' with people=item.additional_title role='' max=99 %}
    -
    {% include '_people.html' with people=item.author role='author' max=5 %}
    -{% endblock %} -{% block left_sidebar %} -
    -
    - {% trans 'Series' %} - {% for w in item.works.all %} - - {% endfor %} -
    -
    -{% endblock %} - -{% block sidebar %}{% endblock %} From 1aef7d1a414ae60c13c59e437cd6bbb0742e5ba9 Mon Sep 17 00:00:00 2001 From: Jigsaw Date: Wed, 29 Jan 2025 12:14:39 +0000 Subject: [PATCH 4/7] make migration configable --- boofilsic/settings.py | 3 ++ catalog/migrations/0013_migrate_work.py | 42 +++++++------------------ 2 files changed, 15 insertions(+), 30 deletions(-) diff --git a/boofilsic/settings.py b/boofilsic/settings.py index 5a1948a04..464b60390 100644 --- a/boofilsic/settings.py +++ b/boofilsic/settings.py @@ -121,6 +121,7 @@ NEODB_SENTRY_SAMPLE_RATE=(float, 0), NEODB_FANOUT_LIMIT_DAYS=(int, 9), INDEX_ALIASES=(dict, {}), + SKIP_WORK_MIGRATION=(bool, False), ) # ====== End of user configuration variables ====== @@ -629,3 +630,5 @@ def _init_language_settings(preferred_lanugages_env): send_default_pii=True, traces_sample_rate=env("NEODB_SENTRY_SAMPLE_RATE"), ) + +SKIP_WORK_MIGRATION = env("SKIP_WORK_MIGRATION") diff --git a/catalog/migrations/0013_migrate_work.py b/catalog/migrations/0013_migrate_work.py index a2c477473..b90f7912f 100644 --- a/catalog/migrations/0013_migrate_work.py +++ b/catalog/migrations/0013_migrate_work.py @@ -1,5 +1,6 @@ # Generated by Django 4.2.18 on 2025-01-27 05:56 +from django.conf import settings from django.db import migrations, models import django.db.models.deletion @@ -10,8 +11,10 @@ def merge_to(self, to_item: "Item | None"): self.save() return if to_item.pk == self.pk: + return raise ValueError("cannot merge to self") if to_item.merged_to_item is not None: + return raise ValueError("cannot merge to item which is merged to another item") if not isinstance(to_item, self.__class__): raise ValueError("cannot merge to item in a different model") @@ -20,61 +23,40 @@ def merge_to(self, to_item: "Item | None"): for res in self.external_resources.all(): res.item = to_item res.save() - for edition in self.legacy_editions.all(): - edition.works = to_item + for edition in self.editions.all(): + edition.related_work = to_item edition.save() to_item.save() def merge_works(apps, schema_editor): + if getattr(settings, 'SKIP_WORK_MIGRATION', False): + return Edition = apps.get_model("catalog", "Edition") Work = apps.get_model("catalog", "Work") for edition in Edition.objects.all(): - works = edition.legacy_works.all() + works = edition.works.all() if works.exists(): - edition.works = works.first() + edition.related_work = works.first() for work in works[1:]: - merge_to(work, edition.works) + merge_to(work, edition.related_work) edition.save() - -def process_pending_trigger_events(apps, schema_editor): - # Ensure all pending trigger events are processed - schema_editor.connection.cursor().execute("SET CONSTRAINTS ALL IMMEDIATE") - - class Migration(migrations.Migration): dependencies = [ ("catalog", "0012_alter_model_i18n"), ] operations = [ - migrations.AddField( - model_name="work", - name="legacy_editions", - field=models.ManyToManyField( - related_name="legacy_works", to="catalog.edition" - ), - ), - migrations.RunPython(rename_legacy_editions), - migrations.RemoveField( - model_name="work", - name="editions", - ), migrations.AddField( model_name="edition", - name="works", + name="related_work", field=models.ForeignKey( null=True, on_delete=django.db.models.deletion.SET_NULL, - related_name="editions", + related_name="related_editions", to="catalog.work", ), ), - migrations.RunPython(process_pending_trigger_events), migrations.RunPython(merge_works), - migrations.RemoveField( - model_name="work", - name="legacy_editions", - ), ] From 82f0344149dd73108533b86a971d68fa539fc842 Mon Sep 17 00:00:00 2001 From: Jigsaw Date: Wed, 29 Jan 2025 12:54:22 +0000 Subject: [PATCH 5/7] fix test --- catalog/book/models.py | 5 ++-- catalog/book/tests.py | 63 ++++++++++++++++++++---------------------- 2 files changed, 33 insertions(+), 35 deletions(-) diff --git a/catalog/book/models.py b/catalog/book/models.py index 8f8e86231..5685ee247 100644 --- a/catalog/book/models.py +++ b/catalog/book/models.py @@ -336,7 +336,7 @@ def merge_title(self) -> bool: @property def sibling_items(self): return ( - Edition.objects.filter(works__in=[self.related_work]) + Edition.objects.filter(related_work__in=[self.related_work]) .exclude(pk=self.pk) .exclude(is_deleted=True) .exclude(merged_to_item__isnull=False) @@ -359,7 +359,7 @@ def has_related_books(self): if not self.related_work: return False return ( - Edition.objects.filter(works__in=[self.related_work]).exclude(pk=self.pk).exists() + Edition.objects.filter(related_work__in=[self.related_work]).exclude(pk=self.pk).exists() ) def link_to_related_book(self, target: "Edition") -> bool: @@ -384,6 +384,7 @@ def link_to_related_book(self, target: "Edition") -> bool: def unlink_from_all_works(self): self.related_work = None + self.save() def has_works(self): return self.related_work is not None diff --git a/catalog/book/tests.py b/catalog/book/tests.py index 3eec12505..fd270dd16 100644 --- a/catalog/book/tests.py +++ b/catalog/book/tests.py @@ -105,7 +105,7 @@ def setUp(self): def test_work(self): self.assertFalse(self.hyperion_print.has_related_books()) - self.hyperion.editions.add(self.hyperion_print) + self.hyperion.related_editions.add(self.hyperion_print) self.assertFalse(self.hyperion_print.has_related_books()) def test_merge(self): @@ -122,7 +122,7 @@ def test_link(self): self.assertTrue(self.hyperion_ebook.has_related_books()) self.assertTrue(self.hyperion_print.has_works()) self.assertEqual( - self.hyperion_print.works.first().display_title, + self.hyperion_print.related_work.display_title, self.hyperion_print.display_title, ) self.hyperion_print.unlink_from_all_works() @@ -140,9 +140,8 @@ def test_link3(self): self.hyperion_ebook.link_to_related_book(self.hyperion_hardcover) self.hyperion_print.link_to_related_book(self.hyperion_hardcover) self.assertTrue(self.hyperion_print.has_works()) - self.assertEqual(self.hyperion_print.works.all().count(), 1) self.assertEqual( - self.hyperion_ebook.works.all().first().editions.all().count(), 3 + self.hyperion_ebook.related_work.related_editions.all().count(), 3 ) @@ -371,12 +370,12 @@ def test_work(self): url2 = "https://book.douban.com/subject/2037260/" p1 = SiteManager.get_site_by_url(url1).get_resource_ready() p2 = SiteManager.get_site_by_url(url2).get_resource_ready() - w1 = p1.item.works.all().first() - w2 = p2.item.works.all().first() + w1 = p1.item.related_work + w2 = p2.item.related_work self.assertEqual(w1.display_title, "黄金时代") self.assertEqual(w2.display_title, "黄金时代") self.assertEqual(w1, w2) - editions = sorted(list(w1.editions.all()), key=lambda e: e.display_title) + editions = sorted(list(w1.related_editions.all()), key=lambda e: e.display_title) self.assertEqual(len(editions), 2) self.assertEqual(editions[0].display_title, "Wang in Love and Bondage") self.assertEqual(editions[1].display_title, "黄金时代") @@ -523,26 +522,24 @@ def test_works(self): p1 = SiteManager.get_site_by_url( url1 ).get_resource_ready() # lxml bug may break this - w1 = p1.item.works.all().first() + w1 = p1.item.related_work p2 = SiteManager.get_site_by_url(url2).get_resource_ready() - w2 = p2.item.works.all().first() + w2 = p2.item.related_work self.assertEqual(w1, w2) - self.assertEqual(p1.item.works.all().count(), 1) + self.assertNotEqual(w1, None) p3 = SiteManager.get_site_by_url(url3).get_resource_ready() - w3 = p3.item.works.all().first() + w3 = p3.item.related_work self.assertNotEqual(w3, w2) p4 = SiteManager.get_site_by_url(url4).get_resource_ready() self.assertEqual(p4.item.id, p1.item.id) - self.assertEqual(p4.item.works.all().count(), 2) - self.assertEqual(p1.item.works.all().count(), 2) - w2e = sorted(list(w2.editions.all()), key=lambda e: e.display_title) - self.assertEqual(len(w2e), 2) - self.assertEqual(w2e[0].display_title, "Wang in Love and Bondage") - self.assertEqual(w2e[1].display_title, "黄金时代") - w3e = sorted(list(w3.editions.all()), key=lambda e: e.display_title) - self.assertEqual(len(w3e), 2) - self.assertEqual(w3e[0].display_title, "Golden Age: A Novel") - self.assertEqual(w3e[1].display_title, "黄金时代") + self.assertEqual(p4.item.related_work, p1.item.related_work) + w2e = sorted(list(w2.related_editions.all()), key=lambda e: e.display_title) + self.assertEqual(len(w2e), 3) + self.assertEqual(w2e[0].display_title, "Golden Age: A Novel") + self.assertEqual(w2e[1].display_title, "Wang in Love and Bondage") + self.assertEqual(w2e[2].display_title, "黄金时代") + w3e = sorted(list(w3.related_editions.all()), key=lambda e: e.display_title) + self.assertEqual(len(w3e), 0) # w3 is merged to w2 e = Edition.objects.get(primary_lookup_id_value=9781662601217) self.assertEqual(e.display_title, "Golden Age: A Novel") @@ -556,35 +553,35 @@ def test_works_merge(self): p1 = SiteManager.get_site_by_url( url1 ).get_resource_ready() # lxml bug may break this - w1 = p1.item.works.all().first() + w1 = p1.item.related_work p2 = SiteManager.get_site_by_url(url2).get_resource_ready() - w2 = p2.item.works.all().first() + w2 = p2.item.related_work self.assertEqual(w1, w2) - self.assertEqual(p1.item.works.all().count(), 1) + self.assertNotEqual(w1, None) p3 = SiteManager.get_site_by_url(url3).get_resource_ready() - w3 = p3.item.works.all().first() + w3 = p3.item.related_work self.assertNotEqual(w3, w2) self.assertEqual(w2.external_resources.all().count(), 1) self.assertEqual(w3.external_resources.all().count(), 1) w3.merge_to(w2) self.assertEqual(w2.external_resources.all().count(), 2) self.assertEqual(w3.external_resources.all().count(), 0) - self.assertEqual(w2.editions.all().count(), 3) - self.assertEqual(w3.editions.all().count(), 0) + self.assertEqual(w2.related_editions.all().count(), 3) + self.assertEqual(w3.related_editions.all().count(), 0) p4 = SiteManager.get_site_by_url(url4).get_resource_ready() self.assertEqual(p4.item.id, p1.item.id) - self.assertEqual(p4.item.works.all().count(), 1) - self.assertEqual(p1.item.works.all().count(), 1) - w2e = sorted(list(w2.editions.all()), key=lambda e: e.display_title) + self.assertNotEqual(p1.item.related_work, None) + self.assertNotEqual(p4.item.related_work, None) + w2e = sorted(list(w2.related_editions.all()), key=lambda e: e.display_title) self.assertEqual(len(w2e), 3) self.assertEqual(w2e[0].display_title, "Golden Age: A Novel") self.assertEqual(w2e[1].display_title, "Wang in Love and Bondage") self.assertEqual(w2e[2].display_title, "黄金时代") - w3e = w3.editions.all().order_by("title") + w3e = w3.related_editions.all().order_by("title") self.assertEqual(w3e.count(), 0) e = Edition.objects.get(primary_lookup_id_value=9781662601217) self.assertEqual(e.display_title, "Golden Age: A Novel") w2e[1].delete() - self.assertEqual(w2.editions.all().count(), 2) - w2.editions.all().delete() + self.assertEqual(w2.related_editions.all().count(), 2) + w2.related_editions.all().delete() self.assertEqual(p1.item.works.all().count(), 0) From 568e695e260bb6265ae8161433058a7d24cd5793 Mon Sep 17 00:00:00 2001 From: Jigsaw Date: Wed, 29 Jan 2025 13:23:39 +0000 Subject: [PATCH 6/7] fix --- catalog/book/models.py | 2 +- catalog/migrations/0013_migrate_work.py | 17 ++++++++++++++--- catalog/templates/work.html | 2 +- 3 files changed, 16 insertions(+), 5 deletions(-) diff --git a/catalog/book/models.py b/catalog/book/models.py index 5685ee247..2cb5ef858 100644 --- a/catalog/book/models.py +++ b/catalog/book/models.py @@ -454,7 +454,7 @@ def cover_image_url(self): url = super().cover_image_url if url: return url - e = next(filter(lambda e: e.cover_image_url, self.editions.all()), None) + e = next(filter(lambda e: e.cover_image_url, self.related_editions.all()), None) return e.cover_image_url if e else None def update_linked_items_from_external_resource(self, resource): diff --git a/catalog/migrations/0013_migrate_work.py b/catalog/migrations/0013_migrate_work.py index b90f7912f..d6376d3ea 100644 --- a/catalog/migrations/0013_migrate_work.py +++ b/catalog/migrations/0013_migrate_work.py @@ -37,10 +37,20 @@ def merge_works(apps, schema_editor): for edition in Edition.objects.all(): works = edition.works.all() if works.exists(): - edition.related_work = works.first() - for work in works[1:]: + if edition.related_work is None: + related_work = works.first() + if related_work.merged_to_item is not None: + related_work = related_work.merged_to_item + edition.related_work = related_work + edition.save() + for work in works: + if work.pk == edition.related_work.pk: + continue merge_to(work, edition.related_work) - edition.save() + +def process_pending_trigger_events(apps, schema_editor): + # Ensure all pending trigger events are processed + schema_editor.connection.cursor().execute("SET CONSTRAINTS ALL IMMEDIATE") class Migration(migrations.Migration): dependencies = [ @@ -58,5 +68,6 @@ class Migration(migrations.Migration): to="catalog.work", ), ), + migrations.RunPython(process_pending_trigger_events), migrations.RunPython(merge_works), ] diff --git a/catalog/templates/work.html b/catalog/templates/work.html index a4cd49ba3..5decfc4e3 100644 --- a/catalog/templates/work.html +++ b/catalog/templates/work.html @@ -15,7 +15,7 @@
    {% trans 'Editions' %} - {% for b in item.editions.all %} + {% for b in item.related_editions.all %}
    {{ b.display_title }} ({{ b.pub_house | default:'' }} {{ b.pub_year | default:'' }}) From 49caa14334fde009f0ba922e87fb0b58311c01a9 Mon Sep 17 00:00:00 2001 From: jigsaw Date: Tue, 4 Feb 2025 14:48:26 +0800 Subject: [PATCH 7/7] ignore error when migrate --- catalog/book/models.py | 10 +++++--- catalog/book/tests.py | 6 +++-- catalog/migrations/0013_migrate_work.py | 34 +++++++++++++++++++------ 3 files changed, 37 insertions(+), 13 deletions(-) diff --git a/catalog/book/models.py b/catalog/book/models.py index 2cb5ef858..5dfca857c 100644 --- a/catalog/book/models.py +++ b/catalog/book/models.py @@ -271,7 +271,9 @@ def merge_to(self, to_item: "Edition | None"): # type: ignore[reportIncompatibl if not self.related_work: return if to_item.related_work: - for edition in self.related_work.related_editions.exclude(pk=self.pk).all(): + for edition in self.related_work.related_editions.exclude( + pk=self.pk + ).all(): edition.related_work = to_item.related_work edition.save() else: @@ -359,7 +361,9 @@ def has_related_books(self): if not self.related_work: return False return ( - Edition.objects.filter(related_work__in=[self.related_work]).exclude(pk=self.pk).exists() + Edition.objects.filter(related_work__in=[self.related_work]) + .exclude(pk=self.pk) + .exists() ) def link_to_related_book(self, target: "Edition") -> bool: @@ -489,4 +493,4 @@ class Series(Item): # goodreads_serie = LookupIdDescriptor(IdType.Goodreads_Serie) class Meta: - proxy = True + proxy = True diff --git a/catalog/book/tests.py b/catalog/book/tests.py index fd270dd16..c8daaa252 100644 --- a/catalog/book/tests.py +++ b/catalog/book/tests.py @@ -375,7 +375,9 @@ def test_work(self): self.assertEqual(w1.display_title, "黄金时代") self.assertEqual(w2.display_title, "黄金时代") self.assertEqual(w1, w2) - editions = sorted(list(w1.related_editions.all()), key=lambda e: e.display_title) + editions = sorted( + list(w1.related_editions.all()), key=lambda e: e.display_title + ) self.assertEqual(len(editions), 2) self.assertEqual(editions[0].display_title, "Wang in Love and Bondage") self.assertEqual(editions[1].display_title, "黄金时代") @@ -539,7 +541,7 @@ def test_works(self): self.assertEqual(w2e[1].display_title, "Wang in Love and Bondage") self.assertEqual(w2e[2].display_title, "黄金时代") w3e = sorted(list(w3.related_editions.all()), key=lambda e: e.display_title) - self.assertEqual(len(w3e), 0) # w3 is merged to w2 + self.assertEqual(len(w3e), 0) # w3 is merged to w2 e = Edition.objects.get(primary_lookup_id_value=9781662601217) self.assertEqual(e.display_title, "Golden Age: A Novel") diff --git a/catalog/migrations/0013_migrate_work.py b/catalog/migrations/0013_migrate_work.py index d6376d3ea..d76914767 100644 --- a/catalog/migrations/0013_migrate_work.py +++ b/catalog/migrations/0013_migrate_work.py @@ -1,9 +1,14 @@ # Generated by Django 4.2.18 on 2025-01-27 05:56 from django.conf import settings -from django.db import migrations, models +from django.db import migrations, models, transaction import django.db.models.deletion +import logging + +logger = logging.getLogger(__name__) + + def merge_to(self, to_item: "Item | None"): if to_item is None: if self.merged_to_item is not None: @@ -30,28 +35,41 @@ def merge_to(self, to_item: "Item | None"): def merge_works(apps, schema_editor): - if getattr(settings, 'SKIP_WORK_MIGRATION', False): + if getattr(settings, "SKIP_WORK_MIGRATION", False): return Edition = apps.get_model("catalog", "Edition") Work = apps.get_model("catalog", "Work") - for edition in Edition.objects.all(): - works = edition.works.all() - if works.exists(): + with transaction.atomic(): + for edition in Edition.objects.all(): + works = edition.works.all() + if not works.exists(): + continue if edition.related_work is None: related_work = works.first() - if related_work.merged_to_item is not None: + while related_work.merged_to_item is not None: related_work = related_work.merged_to_item - edition.related_work = related_work - edition.save() + try: + edition.related_work = related_work + edition.save() + except Exception as e: + # do not know why, some work's class will be Item and cause error + logger.warning( + f"Error setting related_work for {edition} to {related_work}: {e}" + ) + continue for work in works: if work.pk == edition.related_work.pk: continue + while work.merged_to_item is not None: + work = work.merged_to_item merge_to(work, edition.related_work) + def process_pending_trigger_events(apps, schema_editor): # Ensure all pending trigger events are processed schema_editor.connection.cursor().execute("SET CONSTRAINTS ALL IMMEDIATE") + class Migration(migrations.Migration): dependencies = [ ("catalog", "0012_alter_model_i18n"),