Skip to content

Commit

Permalink
Move on_updated method
Browse files Browse the repository at this point in the history
SDESK-7442
  • Loading branch information
eos87 committed Dec 9, 2024
1 parent 3e84f80 commit 1ac8ebe
Show file tree
Hide file tree
Showing 4 changed files with 91 additions and 3 deletions.
1 change: 1 addition & 0 deletions server/planning/autosave.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ def on_create(self, docs):

def on_delete(self, doc):
if doc.get(ITEM_TYPE) == "event":
# TODO-ASYNC: replace with equivalent in `EventsAsyncService`
get_resource_service("events").delete_event_files(None, doc)

@staticmethod
Expand Down
3 changes: 3 additions & 0 deletions server/planning/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -349,6 +349,9 @@ def post_required(updates, original):

def update_post_item(updates, original):
"""Method to update(re-post) a posted item after the item is updated"""
# TODO-ASYNC: update once `events_post` & `planning_post` are async
# also ot use pydantic models intead of dicts

pub_status = None
# Save&Post or Save&Unpost
if updates.get("pubstatus"):
Expand Down
64 changes: 63 additions & 1 deletion server/planning/events/events_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,13 @@
from planning.types.event import EmbeddedPlanning
from planning.common import (
WorkflowStates,
format_address,
get_event_max_multi_day_duration,
get_max_recurrent_events,
remove_lock_information,
set_ingested_event_state,
post_required,
update_post_item,
)
from planning.planning import PlanningAsyncService
from planning.core.service import BasePlanningAsyncService
Expand All @@ -46,7 +48,11 @@
)

from .events_sync import sync_event_metadata_with_planning_items
from .events_utils import generate_recurring_dates, get_events_embedded_planning, get_recurring_timeline
from .events_utils import (
generate_recurring_dates,
get_events_embedded_planning,
get_recurring_timeline,
)


class EventsAsyncService(BasePlanningAsyncService[EventResourceModel]):
Expand Down Expand Up @@ -292,6 +298,49 @@ async def update(self, event_id: str | ObjectId, updates: dict[str, Any], etag:
# Process ``embedded_planning`` field, and sync Event metadata with associated Planning/Coverages
sync_event_metadata_with_planning_items(original_event.to_dict(), updates, embedded_planning)

async def on_updated(self, updates: dict[str, Any], original: EventResourceModel, from_ingest: bool = False):
# if this Event was converted to a recurring series
# then update all associated Planning items with the recurrence_id
if updates.get("recurrence_id") and not original.recurrence_id:
await PlanningAsyncService().on_event_converted_to_recurring(updates, original)

if not updates.get("duplicate_to"):
posted = update_post_item(updates, original.to_dict())
if posted:
new_event = await self.find_by_id(original.id)
assert new_event is not None
updates["_etag"] = new_event.etag
updates["state_reason"] = new_event.state_reason

if original.lock_user and "lock_user" in updates and updates.get("lock_user") is None:
# when the event is unlocked by the patch.
push_notification(
"events:unlock",
item=str(original.id),
user=str(get_user_id()),
lock_session=str(get_auth().get("_id")),
etag=updates["_etag"],
recurrence_id=original.recurrence_id or None,
from_ingest=from_ingest,
)

await self.delete_event_files(updates, original.files)

if "location" not in updates and original.location:
updates["location"] = original.location

updates[ID_FIELD] = original.id
self._enhance_event_item(updates)

async def delete_event_files(self, updates: dict[str, Any], event_files: list[ObjectId]):
files = [f for f in event_files if f not in (updates or {}).get("files", [])]
files_service = get_resource_service("events_files")

for file in files:
events_using_file = await self.find({"files": file})
if (await events_using_file.count()) == 0:
files_service.delete_action(lookup={"_id": file})

async def on_deleted(self, doc: EventResourceModel):
push_notification(
"events:delete",
Expand Down Expand Up @@ -755,3 +804,16 @@ async def _link_to_planning(event: EventResourceModel):
await planning_service.system_update(event.planning_item, updates)

await signals.planning_update.send(updates, planning_item)

def _enhance_event_item(self, doc: dict[str, Any]):
plannings = get_related_planning_for_events([doc[ID_FIELD]])

if len(plannings):
doc["planning_ids"] = [planning.get("_id") for planning in plannings]

for location in doc.get("location") or []:
format_address(location)

# this is to fix the existing events have original creator as empty string
if not doc.get("original_creator"):
doc.pop("original_creator", None)
26 changes: 24 additions & 2 deletions server/planning/planning/service.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
from typing import AsyncGenerator, Any
from datetime import datetime
from typing import AsyncGenerator, Any

from superdesk.core.utils import date_to_str
from superdesk.resource_fields import ID_FIELD

from planning.types import PlanningResourceModel
from planning.common import WORKFLOW_STATE
from planning.types import PlanningResourceModel
from planning.types.event import EventResourceModel
from planning.core.service import BasePlanningAsyncService
from planning.utils import get_related_event_links_for_planning, get_related_planning_for_events


class PlanningAsyncService(BasePlanningAsyncService[PlanningResourceModel]):
Expand Down Expand Up @@ -82,3 +86,21 @@ async def get_expired_items(

# Yield the results for iteration by the callee
yield items

async def on_event_converted_to_recurring(self, updates: dict[str, Any], original: EventResourceModel):
for item in get_related_planning_for_events([original.id]):
related_events = get_related_event_links_for_planning(item)

# Set the ``recurrence_id`` in the ``planning.related_events`` field
for event in related_events:
if event["_id"] == original.id:
event["recurrence_id"] = updates["recurrence_id"]
break

await self.update(
item[ID_FIELD],
{
"recurrence_id": updates["recurrence_id"],
"related_events": related_events,
},
)

0 comments on commit 1ac8ebe

Please sign in to comment.