From 03c517b066fce832fd224d228d7ad9268e0c248f Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Tue, 18 Apr 2023 03:26:41 -1000 Subject: [PATCH 01/16] Add a guard against selecting all entities in `state_changes_during_period` (#91585) Add a guard against selecting all entities in state_changes_during_period This cannot happen in `dev` because we require entity ids --- homeassistant/components/recorder/history/modern.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/recorder/history/modern.py b/homeassistant/components/recorder/history/modern.py index f7d08c6bba80a5..44950b8fe71527 100644 --- a/homeassistant/components/recorder/history/modern.py +++ b/homeassistant/components/recorder/history/modern.py @@ -374,6 +374,8 @@ def state_changes_during_period( if entity_id: instance = recorder.get_instance(hass) metadata_id = instance.states_meta_manager.get(entity_id, session, False) + if metadata_id is None: + return {} entity_id_to_metadata_id = {entity_id: metadata_id} stmt = _state_changed_during_period_stmt( start_time, @@ -394,7 +396,7 @@ def state_changes_during_period( states, start_time, entity_ids, - entity_id_to_metadata_id, + entity_id_to_metadata_id, # type: ignore[arg-type] include_start_time_state=include_start_time_state, ), ) From 71f0f53ddc53457ebb5eb32b4b1a35db3dafbc97 Mon Sep 17 00:00:00 2001 From: Aaron Godfrey Date: Thu, 13 Apr 2023 21:12:58 -0700 Subject: [PATCH 02/16] Fix tasks with no due date from not triggering `on` calendar state. (#91196) Fix tasks with no due date. Prior to this change we were setting the start date/time to utc rather than the user's timezone. --- homeassistant/components/todoist/calendar.py | 2 +- tests/components/todoist/test_calendar.py | 54 ++++++++++++++++++++ 2 files changed, 55 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/todoist/calendar.py b/homeassistant/components/todoist/calendar.py index c3e8f61fcc89dd..ea5aab15344e2a 100644 --- a/homeassistant/components/todoist/calendar.py +++ b/homeassistant/components/todoist/calendar.py @@ -446,7 +446,7 @@ def create_todoist_task(self, data: Task): LABELS: [], OVERDUE: False, PRIORITY: data.priority, - START: dt.utcnow(), + START: dt.now(), SUMMARY: data.content, } diff --git a/tests/components/todoist/test_calendar.py b/tests/components/todoist/test_calendar.py index 4f792b3cc01b94..d967c3dc0352f4 100644 --- a/tests/components/todoist/test_calendar.py +++ b/tests/components/todoist/test_calendar.py @@ -25,6 +25,14 @@ from tests.typing import ClientSessionGenerator +@pytest.fixture(autouse=True) +def set_time_zone(hass: HomeAssistant): + """Set the time zone for the tests.""" + # Set our timezone to CST/Regina so we can check calculations + # This keeps UTC-6 all year round + hass.config.set_time_zone("America/Regina") + + @pytest.fixture(name="task") def mock_task() -> Task: """Mock a todoist Task instance.""" @@ -132,6 +140,52 @@ async def test_update_entity_for_custom_project_with_labels_on( assert state.state == "on" +@patch("homeassistant.components.todoist.calendar.TodoistAPIAsync") +async def test_update_entity_for_custom_project_no_due_date_on( + todoist_api, hass: HomeAssistant, api +) -> None: + """Test that a task without an explicit due date is considered to be in an on state.""" + task_wo_due_date = Task( + assignee_id=None, + assigner_id=None, + comment_count=0, + is_completed=False, + content="No due date task", + created_at="2023-04-11T00:25:25.589971Z", + creator_id="1", + description="", + due=None, + id="123", + labels=["Label1"], + order=10, + parent_id=None, + priority=1, + project_id="12345", + section_id=None, + url="https://todoist.com/showTask?id=123", + sync_id=None, + ) + api.get_tasks.return_value = [task_wo_due_date] + todoist_api.return_value = api + + assert await setup.async_setup_component( + hass, + "calendar", + { + "calendar": { + "platform": DOMAIN, + CONF_TOKEN: "token", + "custom_projects": [{"name": "All projects", "labels": ["Label1"]}], + } + }, + ) + await hass.async_block_till_done() + + await async_update_entity(hass, "calendar.all_projects") + state = hass.states.get("calendar.all_projects") + assert state.state == "on" + + @patch("homeassistant.components.todoist.calendar.TodoistAPIAsync") async def test_failed_coordinator_update(todoist_api, hass: HomeAssistant, api) -> None: """Test a failed data coordinator update is handled correctly.""" From c544da74260627c1ce6eed44cf39fd77e32a718b Mon Sep 17 00:00:00 2001 From: Tom Harris Date: Wed, 19 Apr 2023 08:41:32 -0400 Subject: [PATCH 03/16] Fix Insteon thermostat issue (#91568) * Bump pyinsteon * Bump pyinsteon * Bump pyinsteon --- homeassistant/components/insteon/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/insteon/manifest.json b/homeassistant/components/insteon/manifest.json index d9c2380de0f2de..08adce918c1e92 100644 --- a/homeassistant/components/insteon/manifest.json +++ b/homeassistant/components/insteon/manifest.json @@ -17,7 +17,7 @@ "iot_class": "local_push", "loggers": ["pyinsteon", "pypubsub"], "requirements": [ - "pyinsteon==1.4.1", + "pyinsteon==1.4.2", "insteon-frontend-home-assistant==0.3.4" ], "usb": [ diff --git a/requirements_all.txt b/requirements_all.txt index 2421e7303c9738..8af07350c5da9a 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1684,7 +1684,7 @@ pyialarm==2.2.0 pyicloud==1.0.0 # homeassistant.components.insteon -pyinsteon==1.4.1 +pyinsteon==1.4.2 # homeassistant.components.intesishome pyintesishome==1.8.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index dd116a13753e70..900861f5172313 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1218,7 +1218,7 @@ pyialarm==2.2.0 pyicloud==1.0.0 # homeassistant.components.insteon -pyinsteon==1.4.1 +pyinsteon==1.4.2 # homeassistant.components.ipma pyipma==3.0.6 From 6b02892c285704874742f6bcebb28204a66addf4 Mon Sep 17 00:00:00 2001 From: Duco Sebel <74970928+DCSBL@users.noreply.github.com> Date: Wed, 19 Apr 2023 17:14:28 +0200 Subject: [PATCH 04/16] Handle UnsupportedError in HomeWizard (#91608) * Handle UnsupportedEror * Make error message more clear * Remove debug line, whoops --- .../components/homewizard/coordinator.py | 25 +++++++++--- tests/components/homewizard/test_switch.py | 38 ++++++++++++++++++- 2 files changed, 56 insertions(+), 7 deletions(-) diff --git a/homeassistant/components/homewizard/coordinator.py b/homeassistant/components/homewizard/coordinator.py index 533af445c84249..fb89989b2a5805 100644 --- a/homeassistant/components/homewizard/coordinator.py +++ b/homeassistant/components/homewizard/coordinator.py @@ -5,7 +5,7 @@ from homewizard_energy import HomeWizardEnergy from homewizard_energy.const import SUPPORTS_IDENTIFY, SUPPORTS_STATE, SUPPORTS_SYSTEM -from homewizard_energy.errors import DisabledError, RequestError +from homewizard_energy.errors import DisabledError, RequestError, UnsupportedError from homewizard_energy.models import Device from homeassistant.config_entries import ConfigEntry @@ -24,6 +24,8 @@ class HWEnergyDeviceUpdateCoordinator(DataUpdateCoordinator[DeviceResponseEntry] api: HomeWizardEnergy api_disabled: bool = False + _unsupported_error: bool = False + def __init__( self, hass: HomeAssistant, @@ -43,11 +45,22 @@ async def _async_update_data(self) -> DeviceResponseEntry: data=await self.api.data(), ) - if self.supports_state(data.device): - data.state = await self.api.state() - - if self.supports_system(data.device): - data.system = await self.api.system() + try: + if self.supports_state(data.device): + data.state = await self.api.state() + + if self.supports_system(data.device): + data.system = await self.api.system() + + except UnsupportedError as ex: + # Old firmware, ignore + if not self._unsupported_error: + self._unsupported_error = True + _LOGGER.warning( + "%s is running an outdated firmware version (%s). Contact HomeWizard support to update your device", + self.entry.title, + ex, + ) except RequestError as ex: raise UpdateFailed(ex) from ex diff --git a/tests/components/homewizard/test_switch.py b/tests/components/homewizard/test_switch.py index f55550ee825783..6a2623e964f181 100644 --- a/tests/components/homewizard/test_switch.py +++ b/tests/components/homewizard/test_switch.py @@ -1,7 +1,7 @@ """Test the update coordinator for HomeWizard.""" from unittest.mock import AsyncMock, patch -from homewizard_energy.errors import DisabledError, RequestError +from homewizard_energy.errors import DisabledError, RequestError, UnsupportedError from homewizard_energy.models import State, System import pytest @@ -507,3 +507,39 @@ async def test_switch_handles_disablederror( {"entity_id": "switch.product_name_aabbccddeeff_cloud_connection"}, blocking=True, ) + + +async def test_switch_handles_unsupportedrrror( + hass: HomeAssistant, mock_config_entry_data, mock_config_entry +) -> None: + """Test entity raises HomeAssistantError when Disabled was raised.""" + + api = get_mock_device(product_type="HWE-SKT", firmware_version="3.02") + api.state = AsyncMock(side_effect=UnsupportedError()) + api.system = AsyncMock(side_effect=UnsupportedError()) + + with patch( + "homeassistant.components.homewizard.coordinator.HomeWizardEnergy", + return_value=api, + ): + entry = mock_config_entry + entry.data = mock_config_entry_data + entry.add_to_hass(hass) + + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert ( + hass.states.get("switch.product_name_aabbccddeeff_cloud_connection").state + == STATE_UNAVAILABLE + ) + + assert ( + hass.states.get("switch.product_name_aabbccddeeff_switch_lock").state + == STATE_UNAVAILABLE + ) + + assert ( + hass.states.get("switch.product_name_aabbccddeeff").state + == STATE_UNAVAILABLE + ) From 38de9765dfcad2125c236d9b2da6418102ba1f82 Mon Sep 17 00:00:00 2001 From: epenet <6771947+epenet@users.noreply.github.com> Date: Tue, 18 Apr 2023 19:33:09 +0200 Subject: [PATCH 05/16] Bump renault-api to 0.1.13 (#91609) --- homeassistant/components/renault/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/renault/manifest.json b/homeassistant/components/renault/manifest.json index 9fade49b4b4702..5f2670fb17014e 100644 --- a/homeassistant/components/renault/manifest.json +++ b/homeassistant/components/renault/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["renault_api"], "quality_scale": "platinum", - "requirements": ["renault-api==0.1.12"] + "requirements": ["renault-api==0.1.13"] } diff --git a/requirements_all.txt b/requirements_all.txt index 8af07350c5da9a..e61c2e922d5a8e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2228,7 +2228,7 @@ raspyrfm-client==1.2.8 regenmaschine==2022.11.0 # homeassistant.components.renault -renault-api==0.1.12 +renault-api==0.1.13 # homeassistant.components.reolink reolink-aio==0.5.10 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 900861f5172313..2434743e22265b 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1591,7 +1591,7 @@ radiotherm==2.1.0 regenmaschine==2022.11.0 # homeassistant.components.renault -renault-api==0.1.12 +renault-api==0.1.13 # homeassistant.components.reolink reolink-aio==0.5.10 From 36d2accb5bead9d0a826b390f3ad91df543e2130 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 19 Apr 2023 02:40:04 -1000 Subject: [PATCH 06/16] Handle long format context UUIDs during migration (#91657) In https://github.com/home-assistant/core/issues/91514 is was discovered these exist in older versions --- .../components/recorder/migration.py | 3 +- .../recorder/test_migration_from_schema_32.py | 775 ++++++++++++++++++ 2 files changed, 776 insertions(+), 2 deletions(-) create mode 100644 tests/components/recorder/test_migration_from_schema_32.py diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 4b0244038e81d8..8b50b419f1b330 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -1364,10 +1364,9 @@ def _context_id_to_bytes(context_id: str | None) -> bytes | None: # ULIDs that filled the column to the max length # so we need to catch the ValueError and return # None if it happens - if len(context_id) == 32: - return UUID(context_id).bytes if len(context_id) == 26: return ulid_to_bytes(context_id) + return UUID(context_id).bytes return None diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py new file mode 100644 index 00000000000000..01c086e119a803 --- /dev/null +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -0,0 +1,775 @@ +"""The tests for the recorder filter matching the EntityFilter component.""" +# pylint: disable=invalid-name +import importlib +import sys +from unittest.mock import patch +import uuid + +import pytest +from sqlalchemy import create_engine, inspect +from sqlalchemy.orm import Session + +from homeassistant.components import recorder +from homeassistant.components.recorder import core, migration, statistics +from homeassistant.components.recorder.db_schema import ( + Events, + EventTypes, + States, + StatesMeta, +) +from homeassistant.components.recorder.queries import select_event_type_ids +from homeassistant.components.recorder.tasks import ( + EntityIDMigrationTask, + EntityIDPostMigrationTask, + EventsContextIDMigrationTask, + EventTypeIDMigrationTask, + StatesContextIDMigrationTask, +) +from homeassistant.components.recorder.util import session_scope +from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util +from homeassistant.util.ulid import bytes_to_ulid + +from .common import async_recorder_block_till_done, async_wait_recording_done + +from tests.typing import RecorderInstanceGenerator + +CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine" +SCHEMA_MODULE = "tests.components.recorder.db_schema_32" +ORIG_TZ = dt_util.DEFAULT_TIME_ZONE + + +def _create_engine_test(*args, **kwargs): + """Test version of create_engine that initializes with old schema. + + This simulates an existing db with the old schema. + """ + importlib.import_module(SCHEMA_MODULE) + old_db_schema = sys.modules[SCHEMA_MODULE] + engine = create_engine(*args, **kwargs) + old_db_schema.Base.metadata.create_all(engine) + with Session(engine) as session: + session.add( + recorder.db_schema.StatisticsRuns(start=statistics.get_start_time()) + ) + session.add( + recorder.db_schema.SchemaChanges( + schema_version=old_db_schema.SCHEMA_VERSION + ) + ) + session.commit() + return engine + + +@pytest.fixture(autouse=True) +def db_schema_32(): + """Fixture to initialize the db with the old schema.""" + importlib.import_module(SCHEMA_MODULE) + old_db_schema = sys.modules[SCHEMA_MODULE] + + with patch.object(recorder, "db_schema", old_db_schema), patch.object( + recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION + ), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object( + core, "EventTypes", old_db_schema.EventTypes + ), patch.object( + core, "EventData", old_db_schema.EventData + ), patch.object( + core, "States", old_db_schema.States + ), patch.object( + core, "Events", old_db_schema.Events + ), patch.object( + core, "StateAttributes", old_db_schema.StateAttributes + ), patch.object( + core, "EntityIDMigrationTask", core.RecorderTask + ), patch( + CREATE_ENGINE_TARGET, new=_create_engine_test + ): + yield + + +@pytest.fixture(name="legacy_recorder_mock") +async def legacy_recorder_mock_fixture(recorder_mock): + """Fixture for legacy recorder mock.""" + with patch.object(recorder_mock.states_meta_manager, "active", False): + yield recorder_mock + + +@pytest.mark.parametrize("enable_migrate_context_ids", [True]) +async def test_migrate_events_context_ids( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: + """Test we can migrate old uuid context ids and ulid context ids to binary format.""" + instance = await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) + + test_uuid = uuid.uuid4() + uuid_hex = test_uuid.hex + uuid_bin = test_uuid.bytes + + def _insert_events(): + with session_scope(hass=hass) as session: + session.add_all( + ( + Events( + event_type="old_uuid_context_id_event", + event_data=None, + origin_idx=0, + time_fired=None, + time_fired_ts=1677721632.452529, + context_id=uuid_hex, + context_id_bin=None, + context_user_id=None, + context_user_id_bin=None, + context_parent_id=None, + context_parent_id_bin=None, + ), + Events( + event_type="empty_context_id_event", + event_data=None, + origin_idx=0, + time_fired=None, + time_fired_ts=1677721632.552529, + context_id=None, + context_id_bin=None, + context_user_id=None, + context_user_id_bin=None, + context_parent_id=None, + context_parent_id_bin=None, + ), + Events( + event_type="ulid_context_id_event", + event_data=None, + origin_idx=0, + time_fired=None, + time_fired_ts=1677721632.552529, + context_id="01ARZ3NDEKTSV4RRFFQ69G5FAV", + context_id_bin=None, + context_user_id="9400facee45711eaa9308bfd3d19e474", + context_user_id_bin=None, + context_parent_id="01ARZ3NDEKTSV4RRFFQ69G5FA2", + context_parent_id_bin=None, + ), + Events( + event_type="invalid_context_id_event", + event_data=None, + origin_idx=0, + time_fired=None, + time_fired_ts=1677721632.552529, + context_id="invalid", + context_id_bin=None, + context_user_id=None, + context_user_id_bin=None, + context_parent_id=None, + context_parent_id_bin=None, + ), + Events( + event_type="garbage_context_id_event", + event_data=None, + origin_idx=0, + time_fired=None, + time_fired_ts=1677721632.552529, + context_id="adapt_lgt:b'5Cf*':interval:b'0R'", + context_id_bin=None, + context_user_id=None, + context_user_id_bin=None, + context_parent_id=None, + context_parent_id_bin=None, + ), + ) + ) + + await instance.async_add_executor_job(_insert_events) + + await async_wait_recording_done(hass) + # This is a threadsafe way to add a task to the recorder + instance.queue_task(EventsContextIDMigrationTask()) + await async_recorder_block_till_done(hass) + + def _object_as_dict(obj): + return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs} + + def _fetch_migrated_events(): + with session_scope(hass=hass) as session: + events = ( + session.query(Events) + .filter( + Events.event_type.in_( + [ + "old_uuid_context_id_event", + "empty_context_id_event", + "ulid_context_id_event", + "invalid_context_id_event", + "garbage_context_id_event", + ] + ) + ) + .all() + ) + assert len(events) == 5 + return {event.event_type: _object_as_dict(event) for event in events} + + events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) + + old_uuid_context_id_event = events_by_type["old_uuid_context_id_event"] + assert old_uuid_context_id_event["context_id"] is None + assert old_uuid_context_id_event["context_user_id"] is None + assert old_uuid_context_id_event["context_parent_id"] is None + assert old_uuid_context_id_event["context_id_bin"] == uuid_bin + assert old_uuid_context_id_event["context_user_id_bin"] is None + assert old_uuid_context_id_event["context_parent_id_bin"] is None + + empty_context_id_event = events_by_type["empty_context_id_event"] + assert empty_context_id_event["context_id"] is None + assert empty_context_id_event["context_user_id"] is None + assert empty_context_id_event["context_parent_id"] is None + assert empty_context_id_event["context_id_bin"] == b"\x00" * 16 + assert empty_context_id_event["context_user_id_bin"] is None + assert empty_context_id_event["context_parent_id_bin"] is None + + ulid_context_id_event = events_by_type["ulid_context_id_event"] + assert ulid_context_id_event["context_id"] is None + assert ulid_context_id_event["context_user_id"] is None + assert ulid_context_id_event["context_parent_id"] is None + assert ( + bytes_to_ulid(ulid_context_id_event["context_id_bin"]) + == "01ARZ3NDEKTSV4RRFFQ69G5FAV" + ) + assert ( + ulid_context_id_event["context_user_id_bin"] + == b"\x94\x00\xfa\xce\xe4W\x11\xea\xa90\x8b\xfd=\x19\xe4t" + ) + assert ( + bytes_to_ulid(ulid_context_id_event["context_parent_id_bin"]) + == "01ARZ3NDEKTSV4RRFFQ69G5FA2" + ) + + invalid_context_id_event = events_by_type["invalid_context_id_event"] + assert invalid_context_id_event["context_id"] is None + assert invalid_context_id_event["context_user_id"] is None + assert invalid_context_id_event["context_parent_id"] is None + assert invalid_context_id_event["context_id_bin"] == b"\x00" * 16 + assert invalid_context_id_event["context_user_id_bin"] is None + assert invalid_context_id_event["context_parent_id_bin"] is None + + garbage_context_id_event = events_by_type["garbage_context_id_event"] + assert garbage_context_id_event["context_id"] is None + assert garbage_context_id_event["context_user_id"] is None + assert garbage_context_id_event["context_parent_id"] is None + assert garbage_context_id_event["context_id_bin"] == b"\x00" * 16 + assert garbage_context_id_event["context_user_id_bin"] is None + assert garbage_context_id_event["context_parent_id_bin"] is None + + +@pytest.mark.parametrize("enable_migrate_context_ids", [True]) +async def test_migrate_states_context_ids( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: + """Test we can migrate old uuid context ids and ulid context ids to binary format.""" + instance = await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) + + test_uuid = uuid.uuid4() + uuid_hex = test_uuid.hex + uuid_bin = test_uuid.bytes + + def _insert_events(): + with session_scope(hass=hass) as session: + session.add_all( + ( + States( + entity_id="state.old_uuid_context_id", + last_updated_ts=1677721632.452529, + context_id=uuid_hex, + context_id_bin=None, + context_user_id=None, + context_user_id_bin=None, + context_parent_id=None, + context_parent_id_bin=None, + ), + States( + entity_id="state.empty_context_id", + last_updated_ts=1677721632.552529, + context_id=None, + context_id_bin=None, + context_user_id=None, + context_user_id_bin=None, + context_parent_id=None, + context_parent_id_bin=None, + ), + States( + entity_id="state.ulid_context_id", + last_updated_ts=1677721632.552529, + context_id="01ARZ3NDEKTSV4RRFFQ69G5FAV", + context_id_bin=None, + context_user_id="9400facee45711eaa9308bfd3d19e474", + context_user_id_bin=None, + context_parent_id="01ARZ3NDEKTSV4RRFFQ69G5FA2", + context_parent_id_bin=None, + ), + States( + entity_id="state.invalid_context_id", + last_updated_ts=1677721632.552529, + context_id="invalid", + context_id_bin=None, + context_user_id=None, + context_user_id_bin=None, + context_parent_id=None, + context_parent_id_bin=None, + ), + States( + entity_id="state.garbage_context_id", + last_updated_ts=1677721632.552529, + context_id="adapt_lgt:b'5Cf*':interval:b'0R'", + context_id_bin=None, + context_user_id=None, + context_user_id_bin=None, + context_parent_id=None, + context_parent_id_bin=None, + ), + States( + entity_id="state.human_readable_uuid_context_id", + last_updated_ts=1677721632.552529, + context_id="0ae29799-ee4e-4f45-8116-f582d7d3ee65", + context_id_bin=None, + context_user_id="0ae29799-ee4e-4f45-8116-f582d7d3ee65", + context_user_id_bin=None, + context_parent_id="0ae29799-ee4e-4f45-8116-f582d7d3ee65", + context_parent_id_bin=None, + ), + ) + ) + + await instance.async_add_executor_job(_insert_events) + + await async_wait_recording_done(hass) + # This is a threadsafe way to add a task to the recorder + instance.queue_task(StatesContextIDMigrationTask()) + await async_recorder_block_till_done(hass) + + def _object_as_dict(obj): + return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs} + + def _fetch_migrated_states(): + with session_scope(hass=hass) as session: + events = ( + session.query(States) + .filter( + States.entity_id.in_( + [ + "state.old_uuid_context_id", + "state.empty_context_id", + "state.ulid_context_id", + "state.invalid_context_id", + "state.garbage_context_id", + "state.human_readable_uuid_context_id", + ] + ) + ) + .all() + ) + assert len(events) == 6 + return {state.entity_id: _object_as_dict(state) for state in events} + + states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) + + old_uuid_context_id = states_by_entity_id["state.old_uuid_context_id"] + assert old_uuid_context_id["context_id"] is None + assert old_uuid_context_id["context_user_id"] is None + assert old_uuid_context_id["context_parent_id"] is None + assert old_uuid_context_id["context_id_bin"] == uuid_bin + assert old_uuid_context_id["context_user_id_bin"] is None + assert old_uuid_context_id["context_parent_id_bin"] is None + + empty_context_id = states_by_entity_id["state.empty_context_id"] + assert empty_context_id["context_id"] is None + assert empty_context_id["context_user_id"] is None + assert empty_context_id["context_parent_id"] is None + assert empty_context_id["context_id_bin"] == b"\x00" * 16 + assert empty_context_id["context_user_id_bin"] is None + assert empty_context_id["context_parent_id_bin"] is None + + ulid_context_id = states_by_entity_id["state.ulid_context_id"] + assert ulid_context_id["context_id"] is None + assert ulid_context_id["context_user_id"] is None + assert ulid_context_id["context_parent_id"] is None + assert ( + bytes_to_ulid(ulid_context_id["context_id_bin"]) == "01ARZ3NDEKTSV4RRFFQ69G5FAV" + ) + assert ( + ulid_context_id["context_user_id_bin"] + == b"\x94\x00\xfa\xce\xe4W\x11\xea\xa90\x8b\xfd=\x19\xe4t" + ) + assert ( + bytes_to_ulid(ulid_context_id["context_parent_id_bin"]) + == "01ARZ3NDEKTSV4RRFFQ69G5FA2" + ) + + invalid_context_id = states_by_entity_id["state.invalid_context_id"] + assert invalid_context_id["context_id"] is None + assert invalid_context_id["context_user_id"] is None + assert invalid_context_id["context_parent_id"] is None + assert invalid_context_id["context_id_bin"] == b"\x00" * 16 + assert invalid_context_id["context_user_id_bin"] is None + assert invalid_context_id["context_parent_id_bin"] is None + + garbage_context_id = states_by_entity_id["state.garbage_context_id"] + assert garbage_context_id["context_id"] is None + assert garbage_context_id["context_user_id"] is None + assert garbage_context_id["context_parent_id"] is None + assert garbage_context_id["context_id_bin"] == b"\x00" * 16 + assert garbage_context_id["context_user_id_bin"] is None + assert garbage_context_id["context_parent_id_bin"] is None + + human_readable_uuid_context_id = states_by_entity_id[ + "state.human_readable_uuid_context_id" + ] + assert human_readable_uuid_context_id["context_id"] is None + assert human_readable_uuid_context_id["context_user_id"] is None + assert human_readable_uuid_context_id["context_parent_id"] is None + assert ( + human_readable_uuid_context_id["context_id_bin"] + == b"\n\xe2\x97\x99\xeeNOE\x81\x16\xf5\x82\xd7\xd3\xeee" + ) + assert ( + human_readable_uuid_context_id["context_user_id_bin"] + == b"\n\xe2\x97\x99\xeeNOE\x81\x16\xf5\x82\xd7\xd3\xeee" + ) + assert ( + human_readable_uuid_context_id["context_parent_id_bin"] + == b"\n\xe2\x97\x99\xeeNOE\x81\x16\xf5\x82\xd7\xd3\xeee" + ) + + +@pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) +async def test_migrate_event_type_ids( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: + """Test we can migrate event_types to the EventTypes table.""" + instance = await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) + + def _insert_events(): + with session_scope(hass=hass) as session: + session.add_all( + ( + Events( + event_type="event_type_one", + origin_idx=0, + time_fired_ts=1677721632.452529, + ), + Events( + event_type="event_type_one", + origin_idx=0, + time_fired_ts=1677721632.552529, + ), + Events( + event_type="event_type_two", + origin_idx=0, + time_fired_ts=1677721632.552529, + ), + ) + ) + + await instance.async_add_executor_job(_insert_events) + + await async_wait_recording_done(hass) + # This is a threadsafe way to add a task to the recorder + instance.queue_task(EventTypeIDMigrationTask()) + await async_recorder_block_till_done(hass) + + def _fetch_migrated_events(): + with session_scope(hass=hass, read_only=True) as session: + events = ( + session.query(Events.event_id, Events.time_fired, EventTypes.event_type) + .filter( + Events.event_type_id.in_( + select_event_type_ids( + ( + "event_type_one", + "event_type_two", + ) + ) + ) + ) + .outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id) + .all() + ) + assert len(events) == 3 + result = {} + for event in events: + result.setdefault(event.event_type, []).append( + { + "event_id": event.event_id, + "time_fired": event.time_fired, + "event_type": event.event_type, + } + ) + return result + + events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) + assert len(events_by_type["event_type_one"]) == 2 + assert len(events_by_type["event_type_two"]) == 1 + + +@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) +async def test_migrate_entity_ids( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: + """Test we can migrate entity_ids to the StatesMeta table.""" + instance = await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) + + def _insert_states(): + with session_scope(hass=hass) as session: + session.add_all( + ( + States( + entity_id="sensor.one", + state="one_1", + last_updated_ts=1.452529, + ), + States( + entity_id="sensor.two", + state="two_2", + last_updated_ts=2.252529, + ), + States( + entity_id="sensor.two", + state="two_1", + last_updated_ts=3.152529, + ), + ) + ) + + await instance.async_add_executor_job(_insert_states) + + await async_wait_recording_done(hass) + # This is a threadsafe way to add a task to the recorder + instance.queue_task(EntityIDMigrationTask()) + await async_recorder_block_till_done(hass) + + def _fetch_migrated_states(): + with session_scope(hass=hass, read_only=True) as session: + states = ( + session.query( + States.state, + States.metadata_id, + States.last_updated_ts, + StatesMeta.entity_id, + ) + .outerjoin(StatesMeta, States.metadata_id == StatesMeta.metadata_id) + .all() + ) + assert len(states) == 3 + result = {} + for state in states: + result.setdefault(state.entity_id, []).append( + { + "state_id": state.entity_id, + "last_updated_ts": state.last_updated_ts, + "state": state.state, + } + ) + return result + + states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) + assert len(states_by_entity_id["sensor.two"]) == 2 + assert len(states_by_entity_id["sensor.one"]) == 1 + + +@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) +async def test_post_migrate_entity_ids( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: + """Test we can migrate entity_ids to the StatesMeta table.""" + instance = await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) + + def _insert_events(): + with session_scope(hass=hass) as session: + session.add_all( + ( + States( + entity_id="sensor.one", + state="one_1", + last_updated_ts=1.452529, + ), + States( + entity_id="sensor.two", + state="two_2", + last_updated_ts=2.252529, + ), + States( + entity_id="sensor.two", + state="two_1", + last_updated_ts=3.152529, + ), + ) + ) + + await instance.async_add_executor_job(_insert_events) + + await async_wait_recording_done(hass) + # This is a threadsafe way to add a task to the recorder + instance.queue_task(EntityIDPostMigrationTask()) + await async_recorder_block_till_done(hass) + + def _fetch_migrated_states(): + with session_scope(hass=hass, read_only=True) as session: + states = session.query( + States.state, + States.entity_id, + ).all() + assert len(states) == 3 + return {state.state: state.entity_id for state in states} + + states_by_state = await instance.async_add_executor_job(_fetch_migrated_states) + assert states_by_state["one_1"] is None + assert states_by_state["two_2"] is None + assert states_by_state["two_1"] is None + + +@pytest.mark.parametrize("enable_migrate_entity_ids", [True]) +async def test_migrate_null_entity_ids( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: + """Test we can migrate entity_ids to the StatesMeta table.""" + instance = await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) + + def _insert_states(): + with session_scope(hass=hass) as session: + session.add( + States( + entity_id="sensor.one", + state="one_1", + last_updated_ts=1.452529, + ), + ) + session.add_all( + States( + entity_id=None, + state="empty", + last_updated_ts=time + 1.452529, + ) + for time in range(1000) + ) + session.add( + States( + entity_id="sensor.one", + state="one_1", + last_updated_ts=2.452529, + ), + ) + + await instance.async_add_executor_job(_insert_states) + + await async_wait_recording_done(hass) + # This is a threadsafe way to add a task to the recorder + instance.queue_task(EntityIDMigrationTask()) + await async_recorder_block_till_done(hass) + await async_recorder_block_till_done(hass) + + def _fetch_migrated_states(): + with session_scope(hass=hass, read_only=True) as session: + states = ( + session.query( + States.state, + States.metadata_id, + States.last_updated_ts, + StatesMeta.entity_id, + ) + .outerjoin(StatesMeta, States.metadata_id == StatesMeta.metadata_id) + .all() + ) + assert len(states) == 1002 + result = {} + for state in states: + result.setdefault(state.entity_id, []).append( + { + "state_id": state.entity_id, + "last_updated_ts": state.last_updated_ts, + "state": state.state, + } + ) + return result + + states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) + assert len(states_by_entity_id[migration._EMPTY_ENTITY_ID]) == 1000 + assert len(states_by_entity_id["sensor.one"]) == 2 + + +@pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) +async def test_migrate_null_event_type_ids( + async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant +) -> None: + """Test we can migrate event_types to the EventTypes table when the event_type is NULL.""" + instance = await async_setup_recorder_instance(hass) + await async_wait_recording_done(hass) + + def _insert_events(): + with session_scope(hass=hass) as session: + session.add( + Events( + event_type="event_type_one", + origin_idx=0, + time_fired_ts=1.452529, + ), + ) + session.add_all( + Events( + event_type=None, + origin_idx=0, + time_fired_ts=time + 1.452529, + ) + for time in range(1000) + ) + session.add( + Events( + event_type="event_type_one", + origin_idx=0, + time_fired_ts=2.452529, + ), + ) + + await instance.async_add_executor_job(_insert_events) + + await async_wait_recording_done(hass) + # This is a threadsafe way to add a task to the recorder + + instance.queue_task(EventTypeIDMigrationTask()) + await async_recorder_block_till_done(hass) + await async_recorder_block_till_done(hass) + + def _fetch_migrated_events(): + with session_scope(hass=hass, read_only=True) as session: + events = ( + session.query(Events.event_id, Events.time_fired, EventTypes.event_type) + .filter( + Events.event_type_id.in_( + select_event_type_ids( + ( + "event_type_one", + migration._EMPTY_EVENT_TYPE, + ) + ) + ) + ) + .outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id) + .all() + ) + assert len(events) == 1002 + result = {} + for event in events: + result.setdefault(event.event_type, []).append( + { + "event_id": event.event_id, + "time_fired": event.time_fired, + "event_type": event.event_type, + } + ) + return result + + events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) + assert len(events_by_type["event_type_one"]) == 2 + assert len(events_by_type[migration._EMPTY_EVENT_TYPE]) == 1000 From fc4e8e5e7b66dfddc757f6024b9a8253c6196d43 Mon Sep 17 00:00:00 2001 From: Shay Levy Date: Wed, 19 Apr 2023 20:26:45 +0300 Subject: [PATCH 07/16] Bump aioshelly to 5.3.2 (#91679) --- homeassistant/components/shelly/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/shelly/manifest.json b/homeassistant/components/shelly/manifest.json index de085925549b9b..39a1427346dac7 100644 --- a/homeassistant/components/shelly/manifest.json +++ b/homeassistant/components/shelly/manifest.json @@ -9,7 +9,7 @@ "iot_class": "local_push", "loggers": ["aioshelly"], "quality_scale": "platinum", - "requirements": ["aioshelly==5.3.1"], + "requirements": ["aioshelly==5.3.2"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/requirements_all.txt b/requirements_all.txt index e61c2e922d5a8e..05dc2280c153fa 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -267,7 +267,7 @@ aiosenseme==0.6.1 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==5.3.1 +aioshelly==5.3.2 # homeassistant.components.skybell aioskybell==22.7.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2434743e22265b..7cdc39abba9154 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -248,7 +248,7 @@ aiosenseme==0.6.1 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==5.3.1 +aioshelly==5.3.2 # homeassistant.components.skybell aioskybell==22.7.0 From 591ffe23400b6620efc4615fe682a4d0fd6d8568 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Wed, 19 Apr 2023 15:56:07 -1000 Subject: [PATCH 08/16] Fallback to generating a new ULID on migraiton if context is missing or invalid (#91704) * Fallback to generating a new ULID on migraiton if context is missing or invalid It was discovered that postgresql will do a full scan if there is a low cardinality on the index because of missing context ids. We will now generate a ULID for the timestamp of the row if the context data is missing or invalid fixes #91514 * tests * tweak * tweak * preen --- .../components/recorder/migration.py | 19 ++-- homeassistant/components/recorder/queries.py | 2 + .../recorder/test_migration_from_schema_32.py | 93 ++++++++++++++----- 3 files changed, 83 insertions(+), 31 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 8b50b419f1b330..c487f0b70d79bb 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -6,6 +6,7 @@ from dataclasses import dataclass, replace as dataclass_replace from datetime import timedelta import logging +from time import time from typing import TYPE_CHECKING, cast from uuid import UUID @@ -26,7 +27,7 @@ from homeassistant.core import HomeAssistant from homeassistant.util.enum import try_parse_enum -from homeassistant.util.ulid import ulid_to_bytes +from homeassistant.util.ulid import ulid_at_time, ulid_to_bytes from .auto_repairs.events.schema import ( correct_db_schema as events_correct_db_schema, @@ -92,7 +93,6 @@ from . import Recorder LIVE_MIGRATION_MIN_SCHEMA_VERSION = 0 -_EMPTY_CONTEXT_ID = b"\x00" * 16 _EMPTY_ENTITY_ID = "missing.entity_id" _EMPTY_EVENT_TYPE = "missing_event_type" @@ -1370,6 +1370,11 @@ def _context_id_to_bytes(context_id: str | None) -> bytes | None: return None +def _generate_ulid_bytes_at_time(timestamp: float | None) -> bytes: + """Generate a ulid with a specific timestamp.""" + return ulid_to_bytes(ulid_at_time(timestamp or time())) + + @retryable_database_job("migrate states context_ids to binary format") def migrate_states_context_ids(instance: Recorder) -> bool: """Migrate states context_ids to use binary format.""" @@ -1384,13 +1389,14 @@ def migrate_states_context_ids(instance: Recorder) -> bool: { "state_id": state_id, "context_id": None, - "context_id_bin": _to_bytes(context_id) or _EMPTY_CONTEXT_ID, + "context_id_bin": _to_bytes(context_id) + or _generate_ulid_bytes_at_time(last_updated_ts), "context_user_id": None, "context_user_id_bin": _to_bytes(context_user_id), "context_parent_id": None, "context_parent_id_bin": _to_bytes(context_parent_id), } - for state_id, context_id, context_user_id, context_parent_id in states + for state_id, last_updated_ts, context_id, context_user_id, context_parent_id in states ], ) # If there is more work to do return False @@ -1418,13 +1424,14 @@ def migrate_events_context_ids(instance: Recorder) -> bool: { "event_id": event_id, "context_id": None, - "context_id_bin": _to_bytes(context_id) or _EMPTY_CONTEXT_ID, + "context_id_bin": _to_bytes(context_id) + or _generate_ulid_bytes_at_time(time_fired_ts), "context_user_id": None, "context_user_id_bin": _to_bytes(context_user_id), "context_parent_id": None, "context_parent_id_bin": _to_bytes(context_parent_id), } - for event_id, context_id, context_user_id, context_parent_id in events + for event_id, time_fired_ts, context_id, context_user_id, context_parent_id in events ], ) # If there is more work to do return False diff --git a/homeassistant/components/recorder/queries.py b/homeassistant/components/recorder/queries.py index 454c71f6dc574e..f8a1b769d87b88 100644 --- a/homeassistant/components/recorder/queries.py +++ b/homeassistant/components/recorder/queries.py @@ -690,6 +690,7 @@ def find_events_context_ids_to_migrate() -> StatementLambdaElement: return lambda_stmt( lambda: select( Events.event_id, + Events.time_fired_ts, Events.context_id, Events.context_user_id, Events.context_parent_id, @@ -788,6 +789,7 @@ def find_states_context_ids_to_migrate() -> StatementLambdaElement: return lambda_stmt( lambda: select( States.state_id, + States.last_updated_ts, States.context_id, States.context_user_id, States.context_parent_id, diff --git a/tests/components/recorder/test_migration_from_schema_32.py b/tests/components/recorder/test_migration_from_schema_32.py index 01c086e119a803..f76cf3180087ca 100644 --- a/tests/components/recorder/test_migration_from_schema_32.py +++ b/tests/components/recorder/test_migration_from_schema_32.py @@ -5,6 +5,7 @@ from unittest.mock import patch import uuid +from freezegun import freeze_time import pytest from sqlalchemy import create_engine, inspect from sqlalchemy.orm import Session @@ -28,7 +29,7 @@ from homeassistant.components.recorder.util import session_scope from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util -from homeassistant.util.ulid import bytes_to_ulid +from homeassistant.util.ulid import bytes_to_ulid, ulid_at_time, ulid_to_bytes from .common import async_recorder_block_till_done, async_wait_recording_done @@ -115,7 +116,7 @@ def _insert_events(): event_data=None, origin_idx=0, time_fired=None, - time_fired_ts=1677721632.452529, + time_fired_ts=1877721632.452529, context_id=uuid_hex, context_id_bin=None, context_user_id=None, @@ -128,7 +129,7 @@ def _insert_events(): event_data=None, origin_idx=0, time_fired=None, - time_fired_ts=1677721632.552529, + time_fired_ts=1877721632.552529, context_id=None, context_id_bin=None, context_user_id=None, @@ -141,7 +142,7 @@ def _insert_events(): event_data=None, origin_idx=0, time_fired=None, - time_fired_ts=1677721632.552529, + time_fired_ts=1877721632.552529, context_id="01ARZ3NDEKTSV4RRFFQ69G5FAV", context_id_bin=None, context_user_id="9400facee45711eaa9308bfd3d19e474", @@ -154,7 +155,7 @@ def _insert_events(): event_data=None, origin_idx=0, time_fired=None, - time_fired_ts=1677721632.552529, + time_fired_ts=1877721632.552529, context_id="invalid", context_id_bin=None, context_user_id=None, @@ -167,7 +168,20 @@ def _insert_events(): event_data=None, origin_idx=0, time_fired=None, - time_fired_ts=1677721632.552529, + time_fired_ts=1277721632.552529, + context_id="adapt_lgt:b'5Cf*':interval:b'0R'", + context_id_bin=None, + context_user_id=None, + context_user_id_bin=None, + context_parent_id=None, + context_parent_id_bin=None, + ), + Events( + event_type="event_with_garbage_context_id_no_time_fired_ts", + event_data=None, + origin_idx=0, + time_fired=None, + time_fired_ts=None, context_id="adapt_lgt:b'5Cf*':interval:b'0R'", context_id_bin=None, context_user_id=None, @@ -181,9 +195,12 @@ def _insert_events(): await instance.async_add_executor_job(_insert_events) await async_wait_recording_done(hass) - # This is a threadsafe way to add a task to the recorder - instance.queue_task(EventsContextIDMigrationTask()) - await async_recorder_block_till_done(hass) + now = dt_util.utcnow() + expected_ulid_fallback_start = ulid_to_bytes(ulid_at_time(now.timestamp()))[0:6] + with freeze_time(now): + # This is a threadsafe way to add a task to the recorder + instance.queue_task(EventsContextIDMigrationTask()) + await async_recorder_block_till_done(hass) def _object_as_dict(obj): return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs} @@ -200,12 +217,13 @@ def _fetch_migrated_events(): "ulid_context_id_event", "invalid_context_id_event", "garbage_context_id_event", + "event_with_garbage_context_id_no_time_fired_ts", ] ) ) .all() ) - assert len(events) == 5 + assert len(events) == 6 return {event.event_type: _object_as_dict(event) for event in events} events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) @@ -222,7 +240,9 @@ def _fetch_migrated_events(): assert empty_context_id_event["context_id"] is None assert empty_context_id_event["context_user_id"] is None assert empty_context_id_event["context_parent_id"] is None - assert empty_context_id_event["context_id_bin"] == b"\x00" * 16 + assert empty_context_id_event["context_id_bin"].startswith( + b"\x01\xb50\xeeO(" + ) # 6 bytes of timestamp + random assert empty_context_id_event["context_user_id_bin"] is None assert empty_context_id_event["context_parent_id_bin"] is None @@ -247,7 +267,9 @@ def _fetch_migrated_events(): assert invalid_context_id_event["context_id"] is None assert invalid_context_id_event["context_user_id"] is None assert invalid_context_id_event["context_parent_id"] is None - assert invalid_context_id_event["context_id_bin"] == b"\x00" * 16 + assert invalid_context_id_event["context_id_bin"].startswith( + b"\x01\xb50\xeeO(" + ) # 6 bytes of timestamp + random assert invalid_context_id_event["context_user_id_bin"] is None assert invalid_context_id_event["context_parent_id_bin"] is None @@ -255,10 +277,26 @@ def _fetch_migrated_events(): assert garbage_context_id_event["context_id"] is None assert garbage_context_id_event["context_user_id"] is None assert garbage_context_id_event["context_parent_id"] is None - assert garbage_context_id_event["context_id_bin"] == b"\x00" * 16 + assert garbage_context_id_event["context_id_bin"].startswith( + b"\x01)~$\xdf(" + ) # 6 bytes of timestamp + random assert garbage_context_id_event["context_user_id_bin"] is None assert garbage_context_id_event["context_parent_id_bin"] is None + event_with_garbage_context_id_no_time_fired_ts = events_by_type[ + "event_with_garbage_context_id_no_time_fired_ts" + ] + assert event_with_garbage_context_id_no_time_fired_ts["context_id"] is None + assert event_with_garbage_context_id_no_time_fired_ts["context_user_id"] is None + assert event_with_garbage_context_id_no_time_fired_ts["context_parent_id"] is None + assert event_with_garbage_context_id_no_time_fired_ts["context_id_bin"].startswith( + expected_ulid_fallback_start + ) # 6 bytes of timestamp + random + assert event_with_garbage_context_id_no_time_fired_ts["context_user_id_bin"] is None + assert ( + event_with_garbage_context_id_no_time_fired_ts["context_parent_id_bin"] is None + ) + @pytest.mark.parametrize("enable_migrate_context_ids", [True]) async def test_migrate_states_context_ids( @@ -272,13 +310,13 @@ async def test_migrate_states_context_ids( uuid_hex = test_uuid.hex uuid_bin = test_uuid.bytes - def _insert_events(): + def _insert_states(): with session_scope(hass=hass) as session: session.add_all( ( States( entity_id="state.old_uuid_context_id", - last_updated_ts=1677721632.452529, + last_updated_ts=1477721632.452529, context_id=uuid_hex, context_id_bin=None, context_user_id=None, @@ -288,7 +326,7 @@ def _insert_events(): ), States( entity_id="state.empty_context_id", - last_updated_ts=1677721632.552529, + last_updated_ts=1477721632.552529, context_id=None, context_id_bin=None, context_user_id=None, @@ -298,7 +336,7 @@ def _insert_events(): ), States( entity_id="state.ulid_context_id", - last_updated_ts=1677721632.552529, + last_updated_ts=1477721632.552529, context_id="01ARZ3NDEKTSV4RRFFQ69G5FAV", context_id_bin=None, context_user_id="9400facee45711eaa9308bfd3d19e474", @@ -308,7 +346,7 @@ def _insert_events(): ), States( entity_id="state.invalid_context_id", - last_updated_ts=1677721632.552529, + last_updated_ts=1477721632.552529, context_id="invalid", context_id_bin=None, context_user_id=None, @@ -318,7 +356,7 @@ def _insert_events(): ), States( entity_id="state.garbage_context_id", - last_updated_ts=1677721632.552529, + last_updated_ts=1477721632.552529, context_id="adapt_lgt:b'5Cf*':interval:b'0R'", context_id_bin=None, context_user_id=None, @@ -328,7 +366,7 @@ def _insert_events(): ), States( entity_id="state.human_readable_uuid_context_id", - last_updated_ts=1677721632.552529, + last_updated_ts=1477721632.552529, context_id="0ae29799-ee4e-4f45-8116-f582d7d3ee65", context_id_bin=None, context_user_id="0ae29799-ee4e-4f45-8116-f582d7d3ee65", @@ -339,10 +377,9 @@ def _insert_events(): ) ) - await instance.async_add_executor_job(_insert_events) + await instance.async_add_executor_job(_insert_states) await async_wait_recording_done(hass) - # This is a threadsafe way to add a task to the recorder instance.queue_task(StatesContextIDMigrationTask()) await async_recorder_block_till_done(hass) @@ -384,7 +421,9 @@ def _fetch_migrated_states(): assert empty_context_id["context_id"] is None assert empty_context_id["context_user_id"] is None assert empty_context_id["context_parent_id"] is None - assert empty_context_id["context_id_bin"] == b"\x00" * 16 + assert empty_context_id["context_id_bin"].startswith( + b"\x01X\x0f\x12\xaf(" + ) # 6 bytes of timestamp + random assert empty_context_id["context_user_id_bin"] is None assert empty_context_id["context_parent_id_bin"] is None @@ -408,7 +447,9 @@ def _fetch_migrated_states(): assert invalid_context_id["context_id"] is None assert invalid_context_id["context_user_id"] is None assert invalid_context_id["context_parent_id"] is None - assert invalid_context_id["context_id_bin"] == b"\x00" * 16 + assert invalid_context_id["context_id_bin"].startswith( + b"\x01X\x0f\x12\xaf(" + ) # 6 bytes of timestamp + random assert invalid_context_id["context_user_id_bin"] is None assert invalid_context_id["context_parent_id_bin"] is None @@ -416,7 +457,9 @@ def _fetch_migrated_states(): assert garbage_context_id["context_id"] is None assert garbage_context_id["context_user_id"] is None assert garbage_context_id["context_parent_id"] is None - assert garbage_context_id["context_id_bin"] == b"\x00" * 16 + assert garbage_context_id["context_id_bin"].startswith( + b"\x01X\x0f\x12\xaf(" + ) # 6 bytes of timestamp + random assert garbage_context_id["context_user_id_bin"] is None assert garbage_context_id["context_parent_id_bin"] is None From e19279fda5de396a56aa13129b8beac3087ad807 Mon Sep 17 00:00:00 2001 From: Teemu R Date: Thu, 20 Apr 2023 20:57:45 +0200 Subject: [PATCH 09/16] Bump python-songpal dependency (#91708) --- homeassistant/components/songpal/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/songpal/manifest.json b/homeassistant/components/songpal/manifest.json index d41cc2ad5875d3..aa1157e8d0baa2 100644 --- a/homeassistant/components/songpal/manifest.json +++ b/homeassistant/components/songpal/manifest.json @@ -7,7 +7,7 @@ "iot_class": "local_push", "loggers": ["songpal"], "quality_scale": "gold", - "requirements": ["python-songpal==0.15.1"], + "requirements": ["python-songpal==0.15.2"], "ssdp": [ { "st": "urn:schemas-sony-com:service:ScalarWebAPI:1", diff --git a/requirements_all.txt b/requirements_all.txt index 05dc2280c153fa..40b76ffa0dce62 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -2106,7 +2106,7 @@ python-ripple-api==0.0.3 python-smarttub==0.0.33 # homeassistant.components.songpal -python-songpal==0.15.1 +python-songpal==0.15.2 # homeassistant.components.tado python-tado==0.12.0 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 7cdc39abba9154..bb01f69d5019c6 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1508,7 +1508,7 @@ python-picnic-api==1.1.0 python-smarttub==0.0.33 # homeassistant.components.songpal -python-songpal==0.15.1 +python-songpal==0.15.2 # homeassistant.components.tado python-tado==0.12.0 From 8363183943accd8f437292e9b5af48ce1f163d91 Mon Sep 17 00:00:00 2001 From: Jan Bouwhuis Date: Fri, 21 Apr 2023 09:00:48 +0200 Subject: [PATCH 10/16] Do not wait for mqtt at startup mqtt_statestream (#91721) --- homeassistant/components/mqtt_statestream/__init__.py | 9 --------- tests/components/mqtt_statestream/test_init.py | 11 +++++++++-- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/homeassistant/components/mqtt_statestream/__init__.py b/homeassistant/components/mqtt_statestream/__init__.py index 01425737543056..aa4c2c628b4c2d 100644 --- a/homeassistant/components/mqtt_statestream/__init__.py +++ b/homeassistant/components/mqtt_statestream/__init__.py @@ -41,15 +41,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the MQTT state feed.""" - # Make sure MQTT is available and the entry is loaded - if not hass.config_entries.async_entries( - mqtt.DOMAIN - ) or not await hass.config_entries.async_wait_component( - hass.config_entries.async_entries(mqtt.DOMAIN)[0] - ): - _LOGGER.error("MQTT integration is not available") - return False - conf: ConfigType = config[DOMAIN] publish_filter = convert_include_exclude_filter(conf) base_topic: str = conf[CONF_BASE_TOPIC] diff --git a/tests/components/mqtt_statestream/test_init.py b/tests/components/mqtt_statestream/test_init.py index 130d874cc509a8..c5c91a97eeab0b 100644 --- a/tests/components/mqtt_statestream/test_init.py +++ b/tests/components/mqtt_statestream/test_init.py @@ -96,12 +96,19 @@ async def test_setup_and_stop_waits_for_ha( mqtt_mock.async_publish.assert_not_called() +@pytest.mark.xfail() async def test_startup_no_mqtt( hass: HomeAssistant, caplog: pytest.LogCaptureFixture ) -> None: """Test startup without MQTT support.""" - assert not await add_statestream(hass, base_topic="pub") - assert "MQTT integration is not available" in caplog.text + e_id = "fake.entity" + + assert await add_statestream(hass, base_topic="pub") + # Set a state of an entity + mock_state_change_event(hass, State(e_id, "on")) + await hass.async_block_till_done() + await hass.async_block_till_done() + assert "MQTT is not enabled" in caplog.text async def test_setup_succeeds_with_attributes( From 64f8059f0027941dc0bc3c545c65e82aec4fea47 Mon Sep 17 00:00:00 2001 From: Nathan Spencer Date: Thu, 20 Apr 2023 12:29:35 -0600 Subject: [PATCH 11/16] Bump pylitterbot to 2023.4.0 (#91759) --- homeassistant/components/litterrobot/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- tests/components/litterrobot/test_sensor.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/homeassistant/components/litterrobot/manifest.json b/homeassistant/components/litterrobot/manifest.json index 0b162ee2e56df0..d3dcf77f3243eb 100644 --- a/homeassistant/components/litterrobot/manifest.json +++ b/homeassistant/components/litterrobot/manifest.json @@ -12,5 +12,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["pylitterbot"], - "requirements": ["pylitterbot==2023.1.2"] + "requirements": ["pylitterbot==2023.4.0"] } diff --git a/requirements_all.txt b/requirements_all.txt index 40b76ffa0dce62..423219243c65ba 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1753,7 +1753,7 @@ pylibrespot-java==0.1.1 pylitejet==0.5.0 # homeassistant.components.litterrobot -pylitterbot==2023.1.2 +pylitterbot==2023.4.0 # homeassistant.components.lutron_caseta pylutron-caseta==0.18.1 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index bb01f69d5019c6..2f9d3163cbf95a 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1269,7 +1269,7 @@ pylibrespot-java==0.1.1 pylitejet==0.5.0 # homeassistant.components.litterrobot -pylitterbot==2023.1.2 +pylitterbot==2023.4.0 # homeassistant.components.lutron_caseta pylutron-caseta==0.18.1 diff --git a/tests/components/litterrobot/test_sensor.py b/tests/components/litterrobot/test_sensor.py index 9586e7cdbfcb44..adb44d59bff2ae 100644 --- a/tests/components/litterrobot/test_sensor.py +++ b/tests/components/litterrobot/test_sensor.py @@ -101,5 +101,5 @@ async def test_feeder_robot_sensor( """Tests Feeder-Robot sensors.""" await setup_integration(hass, mock_account_with_feederrobot, PLATFORM_DOMAIN) sensor = hass.states.get("sensor.test_food_level") - assert sensor.state == "20" + assert sensor.state == "10" assert sensor.attributes["unit_of_measurement"] == PERCENTAGE From 26b28001c5a1ccff30ea5610613605a803c2acf3 Mon Sep 17 00:00:00 2001 From: Stephan Uhle Date: Fri, 21 Apr 2023 08:16:32 +0200 Subject: [PATCH 12/16] Bump pysml to 0.0.10 (#91773) --- homeassistant/components/edl21/manifest.json | 2 +- requirements_all.txt | 2 +- requirements_test_all.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/homeassistant/components/edl21/manifest.json b/homeassistant/components/edl21/manifest.json index f6363473def3d5..034d3fcae2e102 100644 --- a/homeassistant/components/edl21/manifest.json +++ b/homeassistant/components/edl21/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["sml"], - "requirements": ["pysml==0.0.9"] + "requirements": ["pysml==0.0.10"] } diff --git a/requirements_all.txt b/requirements_all.txt index 423219243c65ba..8d23f37514b672 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -1976,7 +1976,7 @@ pysmartthings==0.7.6 pysmarty==0.8 # homeassistant.components.edl21 -pysml==0.0.9 +pysml==0.0.10 # homeassistant.components.snmp pysnmplib==5.0.21 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 2f9d3163cbf95a..04292652fc5ecd 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -1438,7 +1438,7 @@ pysmartapp==0.3.3 pysmartthings==0.7.6 # homeassistant.components.edl21 -pysml==0.0.9 +pysml==0.0.10 # homeassistant.components.snmp pysnmplib==5.0.21 From 30da629285928421ecc6ef9494696b5121ddea44 Mon Sep 17 00:00:00 2001 From: Allen Porter Date: Fri, 21 Apr 2023 07:25:52 -0700 Subject: [PATCH 13/16] Relax the constraint that events must have a consistent timezone for start/end (#91788) --- homeassistant/components/calendar/__init__.py | 1 - tests/components/google/test_calendar.py | 34 +++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/homeassistant/components/calendar/__init__.py b/homeassistant/components/calendar/__init__.py index 2445c054c6d1d2..aedfafbf368610 100644 --- a/homeassistant/components/calendar/__init__.py +++ b/homeassistant/components/calendar/__init__.py @@ -244,7 +244,6 @@ def _validate_rrule(value: Any) -> str: }, _has_same_type("start", "end"), _has_timezone("start", "end"), - _has_consistent_timezone("start", "end"), _as_local_timezone("start", "end"), _has_min_duration("start", "end", MIN_EVENT_DURATION), ), diff --git a/tests/components/google/test_calendar.py b/tests/components/google/test_calendar.py index 7d59d80687ead3..d6431700fca8bc 100644 --- a/tests/components/google/test_calendar.py +++ b/tests/components/google/test_calendar.py @@ -1295,3 +1295,37 @@ async def test_event_without_duration( assert state.attributes.get("start_time") == one_hour_from_now.strftime( DATE_STR_FORMAT ) + + +async def test_event_differs_timezone( + hass: HomeAssistant, mock_events_list_items, component_setup +) -> None: + """Test a case where the event has a different start/end timezone.""" + one_hour_from_now = dt_util.now() + datetime.timedelta(minutes=30) + end_event = one_hour_from_now + datetime.timedelta(hours=8) + event = { + **TEST_EVENT, + "start": { + "dateTime": one_hour_from_now.isoformat(), + "timeZone": "America/Regina", + }, + "end": {"dateTime": end_event.isoformat(), "timeZone": "UTC"}, + } + mock_events_list_items([event]) + + assert await component_setup() + + state = hass.states.get(TEST_ENTITY) + assert state.name == TEST_ENTITY_NAME + assert state.state == STATE_OFF + assert dict(state.attributes) == { + "friendly_name": TEST_ENTITY_NAME, + "message": event["summary"], + "all_day": False, + "offset_reached": False, + "start_time": one_hour_from_now.strftime(DATE_STR_FORMAT), + "end_time": end_event.strftime(DATE_STR_FORMAT), + "location": event["location"], + "description": event["description"], + "supported_features": 3, + } From deb55a74da85538b07e7116ab0a1d9391c00f8cd Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Fri, 21 Apr 2023 10:21:20 -0400 Subject: [PATCH 14/16] Disallow uploading files to bypass the media dirs (#91817) --- homeassistant/components/media_source/local_source.py | 8 +++++++- tests/components/media_source/test_local_source.py | 8 +++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/homeassistant/components/media_source/local_source.py b/homeassistant/components/media_source/local_source.py index d211b878b99a85..c29794ae8d7b97 100644 --- a/homeassistant/components/media_source/local_source.py +++ b/homeassistant/components/media_source/local_source.py @@ -48,7 +48,10 @@ def __init__(self, hass: HomeAssistant) -> None: @callback def async_full_path(self, source_dir_id: str, location: str) -> Path: """Return full path.""" - return Path(self.hass.config.media_dirs[source_dir_id], location) + base_path = self.hass.config.media_dirs[source_dir_id] + full_path = Path(base_path, location) + full_path.relative_to(base_path) + return full_path @callback def async_parse_identifier(self, item: MediaSourceItem) -> tuple[str, str]: @@ -65,6 +68,9 @@ def async_parse_identifier(self, item: MediaSourceItem) -> tuple[str, str]: except ValueError as err: raise Unresolvable("Invalid path.") from err + if Path(location).is_absolute(): + raise Unresolvable("Invalid path.") + return source_dir_id, location async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia: diff --git a/tests/components/media_source/test_local_source.py b/tests/components/media_source/test_local_source.py index 585f92c7a0f1b3..cf50e9675582f5 100644 --- a/tests/components/media_source/test_local_source.py +++ b/tests/components/media_source/test_local_source.py @@ -132,9 +132,13 @@ async def test_upload_view( hass: HomeAssistant, hass_client: ClientSessionGenerator, temp_dir, + tmpdir, hass_admin_user: MockUser, ) -> None: """Allow uploading media.""" + # We need a temp dir that's not under tempdir fixture + extra_media_dir = tmpdir + hass.config.media_dirs["another_path"] = temp_dir img = (Path(__file__).parent.parent / "image_upload/logo.png").read_bytes() @@ -167,6 +171,8 @@ def get_file(name): "media-source://media_source/test_dir/..", # Domain != media_source "media-source://nest/test_dir/.", + # Other directory + f"media-source://media_source/another_path///{extra_media_dir}/", # Completely something else "http://bla", ): @@ -178,7 +184,7 @@ def get_file(name): }, ) - assert res.status == 400 + assert res.status == 400, bad_id assert not (Path(temp_dir) / "bad-source-id.png").is_file() # Test invalid POST data From f1c4605fba7229d7b75f70bf9f1ab0601bfe3a1c Mon Sep 17 00:00:00 2001 From: Paulus Schoutsen Date: Fri, 21 Apr 2023 14:58:46 -0400 Subject: [PATCH 15/16] Bumped version to 2023.4.6 --- homeassistant/const.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/homeassistant/const.py b/homeassistant/const.py index 712e8ff0eb6c99..2fc41b74376753 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -8,7 +8,7 @@ APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2023 MINOR_VERSION: Final = 4 -PATCH_VERSION: Final = "5" +PATCH_VERSION: Final = "6" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0) diff --git a/pyproject.toml b/pyproject.toml index b2bcdb011b5eab..26b90151610b3c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2023.4.5" +version = "2023.4.6" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" From d58f62cb5e59ad6a22b14b143a06370f28ad38d1 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 21 Apr 2023 18:12:21 -0500 Subject: [PATCH 16/16] Remove old migration tests that have been replaced (#91842) These tests were moved to test_migration_from_schema_32.py in `dev` and have changed. Remove the old tests as they are no longer correct --- tests/components/recorder/test_migrate.py | 322 +--------------------- 1 file changed, 1 insertion(+), 321 deletions(-) diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index b75d536d152633..c9e49697585ac4 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -6,10 +6,9 @@ import sys import threading from unittest.mock import Mock, PropertyMock, call, patch -import uuid import pytest -from sqlalchemy import create_engine, inspect, text +from sqlalchemy import create_engine, text from sqlalchemy.exc import ( DatabaseError, InternalError, @@ -35,15 +34,12 @@ from homeassistant.components.recorder.tasks import ( EntityIDMigrationTask, EntityIDPostMigrationTask, - EventsContextIDMigrationTask, EventTypeIDMigrationTask, - StatesContextIDMigrationTask, ) from homeassistant.components.recorder.util import session_scope from homeassistant.core import HomeAssistant from homeassistant.helpers import recorder as recorder_helper import homeassistant.util.dt as dt_util -from homeassistant.util.ulid import bytes_to_ulid from .common import ( async_recorder_block_till_done, @@ -603,322 +599,6 @@ def test_raise_if_exception_missing_empty_cause_str() -> None: migration.raise_if_exception_missing_str(programming_exc, ["not present"]) -@pytest.mark.parametrize("enable_migrate_context_ids", [True]) -async def test_migrate_events_context_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: - """Test we can migrate old uuid context ids and ulid context ids to binary format.""" - instance = await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) - - test_uuid = uuid.uuid4() - uuid_hex = test_uuid.hex - uuid_bin = test_uuid.bytes - - def _insert_events(): - with session_scope(hass=hass) as session: - session.add_all( - ( - Events( - event_type="old_uuid_context_id_event", - event_data=None, - origin_idx=0, - time_fired=None, - time_fired_ts=1677721632.452529, - context_id=uuid_hex, - context_id_bin=None, - context_user_id=None, - context_user_id_bin=None, - context_parent_id=None, - context_parent_id_bin=None, - ), - Events( - event_type="empty_context_id_event", - event_data=None, - origin_idx=0, - time_fired=None, - time_fired_ts=1677721632.552529, - context_id=None, - context_id_bin=None, - context_user_id=None, - context_user_id_bin=None, - context_parent_id=None, - context_parent_id_bin=None, - ), - Events( - event_type="ulid_context_id_event", - event_data=None, - origin_idx=0, - time_fired=None, - time_fired_ts=1677721632.552529, - context_id="01ARZ3NDEKTSV4RRFFQ69G5FAV", - context_id_bin=None, - context_user_id="9400facee45711eaa9308bfd3d19e474", - context_user_id_bin=None, - context_parent_id="01ARZ3NDEKTSV4RRFFQ69G5FA2", - context_parent_id_bin=None, - ), - Events( - event_type="invalid_context_id_event", - event_data=None, - origin_idx=0, - time_fired=None, - time_fired_ts=1677721632.552529, - context_id="invalid", - context_id_bin=None, - context_user_id=None, - context_user_id_bin=None, - context_parent_id=None, - context_parent_id_bin=None, - ), - Events( - event_type="garbage_context_id_event", - event_data=None, - origin_idx=0, - time_fired=None, - time_fired_ts=1677721632.552529, - context_id="adapt_lgt:b'5Cf*':interval:b'0R'", - context_id_bin=None, - context_user_id=None, - context_user_id_bin=None, - context_parent_id=None, - context_parent_id_bin=None, - ), - ) - ) - - await instance.async_add_executor_job(_insert_events) - - await async_wait_recording_done(hass) - # This is a threadsafe way to add a task to the recorder - instance.queue_task(EventsContextIDMigrationTask()) - await async_recorder_block_till_done(hass) - - def _object_as_dict(obj): - return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs} - - def _fetch_migrated_events(): - with session_scope(hass=hass) as session: - events = ( - session.query(Events) - .filter( - Events.event_type.in_( - [ - "old_uuid_context_id_event", - "empty_context_id_event", - "ulid_context_id_event", - "invalid_context_id_event", - "garbage_context_id_event", - ] - ) - ) - .all() - ) - assert len(events) == 5 - return {event.event_type: _object_as_dict(event) for event in events} - - events_by_type = await instance.async_add_executor_job(_fetch_migrated_events) - - old_uuid_context_id_event = events_by_type["old_uuid_context_id_event"] - assert old_uuid_context_id_event["context_id"] is None - assert old_uuid_context_id_event["context_user_id"] is None - assert old_uuid_context_id_event["context_parent_id"] is None - assert old_uuid_context_id_event["context_id_bin"] == uuid_bin - assert old_uuid_context_id_event["context_user_id_bin"] is None - assert old_uuid_context_id_event["context_parent_id_bin"] is None - - empty_context_id_event = events_by_type["empty_context_id_event"] - assert empty_context_id_event["context_id"] is None - assert empty_context_id_event["context_user_id"] is None - assert empty_context_id_event["context_parent_id"] is None - assert empty_context_id_event["context_id_bin"] == b"\x00" * 16 - assert empty_context_id_event["context_user_id_bin"] is None - assert empty_context_id_event["context_parent_id_bin"] is None - - ulid_context_id_event = events_by_type["ulid_context_id_event"] - assert ulid_context_id_event["context_id"] is None - assert ulid_context_id_event["context_user_id"] is None - assert ulid_context_id_event["context_parent_id"] is None - assert ( - bytes_to_ulid(ulid_context_id_event["context_id_bin"]) - == "01ARZ3NDEKTSV4RRFFQ69G5FAV" - ) - assert ( - ulid_context_id_event["context_user_id_bin"] - == b"\x94\x00\xfa\xce\xe4W\x11\xea\xa90\x8b\xfd=\x19\xe4t" - ) - assert ( - bytes_to_ulid(ulid_context_id_event["context_parent_id_bin"]) - == "01ARZ3NDEKTSV4RRFFQ69G5FA2" - ) - - invalid_context_id_event = events_by_type["invalid_context_id_event"] - assert invalid_context_id_event["context_id"] is None - assert invalid_context_id_event["context_user_id"] is None - assert invalid_context_id_event["context_parent_id"] is None - assert invalid_context_id_event["context_id_bin"] == b"\x00" * 16 - assert invalid_context_id_event["context_user_id_bin"] is None - assert invalid_context_id_event["context_parent_id_bin"] is None - - garbage_context_id_event = events_by_type["garbage_context_id_event"] - assert garbage_context_id_event["context_id"] is None - assert garbage_context_id_event["context_user_id"] is None - assert garbage_context_id_event["context_parent_id"] is None - assert garbage_context_id_event["context_id_bin"] == b"\x00" * 16 - assert garbage_context_id_event["context_user_id_bin"] is None - assert garbage_context_id_event["context_parent_id_bin"] is None - - -@pytest.mark.parametrize("enable_migrate_context_ids", [True]) -async def test_migrate_states_context_ids( - async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant -) -> None: - """Test we can migrate old uuid context ids and ulid context ids to binary format.""" - instance = await async_setup_recorder_instance(hass) - await async_wait_recording_done(hass) - - test_uuid = uuid.uuid4() - uuid_hex = test_uuid.hex - uuid_bin = test_uuid.bytes - - def _insert_events(): - with session_scope(hass=hass) as session: - session.add_all( - ( - States( - entity_id="state.old_uuid_context_id", - last_updated_ts=1677721632.452529, - context_id=uuid_hex, - context_id_bin=None, - context_user_id=None, - context_user_id_bin=None, - context_parent_id=None, - context_parent_id_bin=None, - ), - States( - entity_id="state.empty_context_id", - last_updated_ts=1677721632.552529, - context_id=None, - context_id_bin=None, - context_user_id=None, - context_user_id_bin=None, - context_parent_id=None, - context_parent_id_bin=None, - ), - States( - entity_id="state.ulid_context_id", - last_updated_ts=1677721632.552529, - context_id="01ARZ3NDEKTSV4RRFFQ69G5FAV", - context_id_bin=None, - context_user_id="9400facee45711eaa9308bfd3d19e474", - context_user_id_bin=None, - context_parent_id="01ARZ3NDEKTSV4RRFFQ69G5FA2", - context_parent_id_bin=None, - ), - States( - entity_id="state.invalid_context_id", - last_updated_ts=1677721632.552529, - context_id="invalid", - context_id_bin=None, - context_user_id=None, - context_user_id_bin=None, - context_parent_id=None, - context_parent_id_bin=None, - ), - States( - entity_id="state.garbage_context_id", - last_updated_ts=1677721632.552529, - context_id="adapt_lgt:b'5Cf*':interval:b'0R'", - context_id_bin=None, - context_user_id=None, - context_user_id_bin=None, - context_parent_id=None, - context_parent_id_bin=None, - ), - ) - ) - - await instance.async_add_executor_job(_insert_events) - - await async_wait_recording_done(hass) - # This is a threadsafe way to add a task to the recorder - instance.queue_task(StatesContextIDMigrationTask()) - await async_recorder_block_till_done(hass) - - def _object_as_dict(obj): - return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs} - - def _fetch_migrated_states(): - with session_scope(hass=hass) as session: - events = ( - session.query(States) - .filter( - States.entity_id.in_( - [ - "state.old_uuid_context_id", - "state.empty_context_id", - "state.ulid_context_id", - "state.invalid_context_id", - "state.garbage_context_id", - ] - ) - ) - .all() - ) - assert len(events) == 5 - return {state.entity_id: _object_as_dict(state) for state in events} - - states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states) - - old_uuid_context_id = states_by_entity_id["state.old_uuid_context_id"] - assert old_uuid_context_id["context_id"] is None - assert old_uuid_context_id["context_user_id"] is None - assert old_uuid_context_id["context_parent_id"] is None - assert old_uuid_context_id["context_id_bin"] == uuid_bin - assert old_uuid_context_id["context_user_id_bin"] is None - assert old_uuid_context_id["context_parent_id_bin"] is None - - empty_context_id = states_by_entity_id["state.empty_context_id"] - assert empty_context_id["context_id"] is None - assert empty_context_id["context_user_id"] is None - assert empty_context_id["context_parent_id"] is None - assert empty_context_id["context_id_bin"] == b"\x00" * 16 - assert empty_context_id["context_user_id_bin"] is None - assert empty_context_id["context_parent_id_bin"] is None - - ulid_context_id = states_by_entity_id["state.ulid_context_id"] - assert ulid_context_id["context_id"] is None - assert ulid_context_id["context_user_id"] is None - assert ulid_context_id["context_parent_id"] is None - assert ( - bytes_to_ulid(ulid_context_id["context_id_bin"]) == "01ARZ3NDEKTSV4RRFFQ69G5FAV" - ) - assert ( - ulid_context_id["context_user_id_bin"] - == b"\x94\x00\xfa\xce\xe4W\x11\xea\xa90\x8b\xfd=\x19\xe4t" - ) - assert ( - bytes_to_ulid(ulid_context_id["context_parent_id_bin"]) - == "01ARZ3NDEKTSV4RRFFQ69G5FA2" - ) - - invalid_context_id = states_by_entity_id["state.invalid_context_id"] - assert invalid_context_id["context_id"] is None - assert invalid_context_id["context_user_id"] is None - assert invalid_context_id["context_parent_id"] is None - assert invalid_context_id["context_id_bin"] == b"\x00" * 16 - assert invalid_context_id["context_user_id_bin"] is None - assert invalid_context_id["context_parent_id_bin"] is None - - garbage_context_id = states_by_entity_id["state.garbage_context_id"] - assert garbage_context_id["context_id"] is None - assert garbage_context_id["context_user_id"] is None - assert garbage_context_id["context_parent_id"] is None - assert garbage_context_id["context_id_bin"] == b"\x00" * 16 - assert garbage_context_id["context_user_id_bin"] is None - assert garbage_context_id["context_parent_id_bin"] is None - - @pytest.mark.parametrize("enable_migrate_event_type_ids", [True]) async def test_migrate_event_type_ids( async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant