This commit is contained in:
Erik
2025-04-30 15:51:23 +02:00
parent 71af693569
commit 4d5e809e9b

View File

@@ -642,6 +642,7 @@ def mock_registry(
registry.entities[key] = entry registry.entities[key] = entry
hass.data[er.DATA_REGISTRY] = registry hass.data[er.DATA_REGISTRY] = registry
with suppress(AttributeError):
er.async_get.cache_clear() er.async_get.cache_clear()
return registry return registry
@@ -694,6 +695,7 @@ def mock_area_registry(
registry.areas[key] = entry registry.areas[key] = entry
hass.data[ar.DATA_REGISTRY] = registry hass.data[ar.DATA_REGISTRY] = registry
with suppress(AttributeError):
ar.async_get.cache_clear() ar.async_get.cache_clear()
return registry return registry
@@ -723,6 +725,7 @@ def mock_device_registry(
registry.deleted_devices = dr.DeviceRegistryItems() registry.deleted_devices = dr.DeviceRegistryItems()
hass.data[dr.DATA_REGISTRY] = registry hass.data[dr.DATA_REGISTRY] = registry
with suppress(AttributeError):
dr.async_get.cache_clear() dr.async_get.cache_clear()
return registry return registry
@@ -1307,6 +1310,7 @@ def mock_restore_cache(hass: HomeAssistant, states: Sequence[State]) -> None:
_LOGGER.debug("Restore cache: %s", data.last_states) _LOGGER.debug("Restore cache: %s", data.last_states)
assert len(data.last_states) == len(states), f"Duplicate entity_id? {states}" assert len(data.last_states) == len(states), f"Duplicate entity_id? {states}"
with suppress(AttributeError):
rs.async_get.cache_clear() rs.async_get.cache_clear()
hass.data[key] = data hass.data[key] = data
@@ -1335,6 +1339,7 @@ def mock_restore_cache_with_extra_data(
_LOGGER.debug("Restore cache: %s", data.last_states) _LOGGER.debug("Restore cache: %s", data.last_states)
assert len(data.last_states) == len(states), f"Duplicate entity_id? {states}" assert len(data.last_states) == len(states), f"Duplicate entity_id? {states}"
with suppress(AttributeError):
rs.async_get.cache_clear() rs.async_get.cache_clear()
hass.data[key] = data hass.data[key] = data