From f8c7d9d8113a0ec5f41c9be5c19e3a6e5fd853fb Mon Sep 17 00:00:00 2001 From: Kevin Stadler Date: Mon, 2 Dec 2024 17:04:58 +0100 Subject: [PATCH] feat: display "earlier editions" on detail page --- app/[locale]/publications/[id]/page.tsx | 27 +++++++++++++++-- lib/model.ts | 1 + messages/de.json | 1 + messages/en.json | 1 + scripts/3_to_typesense.py | 39 +++++++++++++++++-------- 5 files changed, 54 insertions(+), 15 deletions(-) diff --git a/app/[locale]/publications/[id]/page.tsx b/app/[locale]/publications/[id]/page.tsx index e19dacb..0852c25 100644 --- a/app/[locale]/publications/[id]/page.tsx +++ b/app/[locale]/publications/[id]/page.tsx @@ -44,6 +44,11 @@ export default async function PublicationPage(props: PublicationPageProps) { return notFound(); } + // array of (Publication) promises + const earlier = pub.parents?.map((id) => { + return getPublication(id); + }); + // array of (Publication) promises const later = pub.later?.map((id) => { return getPublication(id); @@ -72,7 +77,7 @@ export default async function PublicationPage(props: PublicationPageProps) { }) .join(" / ")}

-
+
@@ -128,9 +133,25 @@ export default async function PublicationPage(props: PublicationPageProps) {
+ {earlier ? ( + <> +

{t("earlier_editions")}

+
+ {earlier.map(async (pp) => { + const p = await pp; + return ( +
+ +
+ ); + })} +
+ + ) : null} + {later ? ( <> -

{t("later_editions")}

+

{t("later_editions")}

{later.map(async (pp) => { const p = await pp; @@ -145,7 +166,7 @@ export default async function PublicationPage(props: PublicationPageProps) { ) : null}
-

+

{t("more_in")} {pub.language}

diff --git a/lib/model.ts b/lib/model.ts index ba9cef7..6c3234d 100755 --- a/lib/model.ts +++ b/lib/model.ts @@ -18,6 +18,7 @@ export interface Publication { // ids of publications which contain re-prints of some of the translations first published in this // publication. this field is inferred from the 'eltern' column in openrefine. + parents?: Array; later?: Array; year: number; year_display?: string; diff --git a/messages/de.json b/messages/de.json index 5ee5582..b2838e3 100644 --- a/messages/de.json +++ b/messages/de.json @@ -103,6 +103,7 @@ "more_in": "mehr auf", "publisher": "Verlag / Publikation", "translated_by": "übersetzt von", + "earlier_editions": "frühere Editionen", "later_editions": "spätere Editionen", "year": "Veröffentlichungsjahr" }, diff --git a/messages/en.json b/messages/en.json index 0d4bb10..6ba7c3c 100644 --- a/messages/en.json +++ b/messages/en.json @@ -103,6 +103,7 @@ "more_in": "more in", "publisher": "published by", "translated_by": "contains translations by", + "earlier_editions": "earlier publications", "later_editions": "later editions", "year": "publication year" }, diff --git a/scripts/3_to_typesense.py b/scripts/3_to_typesense.py index cdacb7a..0a9b92d 100755 --- a/scripts/3_to_typesense.py +++ b/scripts/3_to_typesense.py @@ -68,23 +68,35 @@ def load_json(dirname, filename): def merge_changes(orig, changed, field_names): for e1, e2 in zip(orig, changed): for f in field_names: - if isinstance(e2[f], dict): - e2[f] = e2[f]["value"] - elif isinstance(e1[f], int): - e2[f] = int(e2[f]) - - if e1[f] != e2[f]: # TODO check types and force original numbers - logging.info( - f"integrating manual change to field {f}: '{e1[f]}' > '{e2[f]}'" - ) - e1[f] = e2[f] + try: + if isinstance(e2[f], dict): + e2[f] = e2[f]["value"] + elif isinstance(e1[f], int): + e2[f] = int(e2[f]) + + if e1[f] != e2[f]: # TODO check types and force original numbers + logging.info( + f"integrating manual change to field {f}: '{e1[f]}' > '{e2[f]}'" + ) + e1[f] = e2[f] + except KeyError: + if e2[f]: + logging.info(f"adding new field {f} with value '{e2[f]}'") + e1[f] = e2[f] # for publication: title, year, year_display, publisher, publication_details, exemplar_... merge_changes( publications, publication_changes, - ["title", "year", "year_display", "publisher", "publication_details"], + [ + "title", + "short_title", + "year", + "year_display", + "publisher", + "publication_details", + ], ) merge_changes(translations, translation_changes, ["title", "work_display_title"]) merge_changes(works, work_changes, ["title", "short_title", "year", "category", "gnd"]) @@ -137,6 +149,9 @@ def del_empty_strings(o, field_names): for i, pub in enumerate(publications): pub["id"] = str(i + 1) + if not w["short_title"]: + w["short_title"] = w["title"] + pub["contains"] = [translations[t_id - 1] for t_id in pub["contains"]] pub["images"] = ( @@ -152,7 +167,7 @@ def del_empty_strings(o, field_names): else: publications[pid - 1]["later"] = [i + 1] - del_empty_strings(pub, ["publication_details"]) + del_empty_strings(pub, ["parents", "publication_details"]) # trim data a little del pub["exemplar_suhrkamp_berlin"]