mirror of
https://github.com/home-assistant/core.git
synced 2025-07-29 18:28:14 +02:00
Merge branch 'dev' into epenet-20250527-1510
This commit is contained in:
@ -3,7 +3,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import socket
|
||||
|
||||
@ -26,8 +27,18 @@ from .const import CONF_RECORDS, DEFAULT_UPDATE_INTERVAL, DOMAIN, SERVICE_UPDATE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type CloudflareConfigEntry = ConfigEntry[CloudflareRuntimeData]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
@dataclass
|
||||
class CloudflareRuntimeData:
|
||||
"""Runtime data for Cloudflare config entry."""
|
||||
|
||||
client: pycfdns.Client
|
||||
dns_zone: pycfdns.ZoneModel
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CloudflareConfigEntry) -> bool:
|
||||
"""Set up Cloudflare from a config entry."""
|
||||
session = async_get_clientsession(hass)
|
||||
client = pycfdns.Client(
|
||||
@ -45,12 +56,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
except pycfdns.ComunicationException as error:
|
||||
raise ConfigEntryNotReady from error
|
||||
|
||||
async def update_records(now):
|
||||
entry.runtime_data = CloudflareRuntimeData(client, dns_zone)
|
||||
|
||||
async def update_records(now: datetime) -> None:
|
||||
"""Set up recurring update."""
|
||||
try:
|
||||
await _async_update_cloudflare(
|
||||
hass, client, dns_zone, entry.data[CONF_RECORDS]
|
||||
)
|
||||
await _async_update_cloudflare(hass, entry)
|
||||
except (
|
||||
pycfdns.AuthenticationException,
|
||||
pycfdns.ComunicationException,
|
||||
@ -60,9 +71,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def update_records_service(call: ServiceCall) -> None:
|
||||
"""Set up service for manual trigger."""
|
||||
try:
|
||||
await _async_update_cloudflare(
|
||||
hass, client, dns_zone, entry.data[CONF_RECORDS]
|
||||
)
|
||||
await _async_update_cloudflare(hass, entry)
|
||||
except (
|
||||
pycfdns.AuthenticationException,
|
||||
pycfdns.ComunicationException,
|
||||
@ -79,7 +88,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: CloudflareConfigEntry) -> bool:
|
||||
"""Unload Cloudflare config entry."""
|
||||
|
||||
return True
|
||||
@ -87,10 +96,12 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
async def _async_update_cloudflare(
|
||||
hass: HomeAssistant,
|
||||
client: pycfdns.Client,
|
||||
dns_zone: pycfdns.ZoneModel,
|
||||
target_records: list[str],
|
||||
entry: CloudflareConfigEntry,
|
||||
) -> None:
|
||||
client = entry.runtime_data.client
|
||||
dns_zone = entry.runtime_data.dns_zone
|
||||
target_records: list[str] = entry.data[CONF_RECORDS]
|
||||
|
||||
_LOGGER.debug("Starting update for zone %s", dns_zone["name"])
|
||||
|
||||
records = await client.list_dns_records(zone_id=dns_zone["id"], type="A")
|
||||
|
@ -226,6 +226,7 @@ class EsphomeEntity(EsphomeBaseEntity, Generic[_InfoT, _StateT]):
|
||||
_static_info: _InfoT
|
||||
_state: _StateT
|
||||
_has_state: bool
|
||||
unique_id: str
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -78,7 +78,7 @@ class EsphomeMediaPlayer(
|
||||
if self._static_info.supports_pause:
|
||||
flags |= MediaPlayerEntityFeature.PAUSE | MediaPlayerEntityFeature.PLAY
|
||||
self._attr_supported_features = flags
|
||||
self._entry_data.media_player_formats[static_info.unique_id] = cast(
|
||||
self._entry_data.media_player_formats[self.unique_id] = cast(
|
||||
MediaPlayerInfo, static_info
|
||||
).supported_formats
|
||||
|
||||
@ -114,9 +114,8 @@ class EsphomeMediaPlayer(
|
||||
media_id = async_process_play_media_url(self.hass, media_id)
|
||||
announcement = kwargs.get(ATTR_MEDIA_ANNOUNCE)
|
||||
bypass_proxy = kwargs.get(ATTR_MEDIA_EXTRA, {}).get(ATTR_BYPASS_PROXY)
|
||||
|
||||
supported_formats: list[MediaPlayerSupportedFormat] | None = (
|
||||
self._entry_data.media_player_formats.get(self._static_info.unique_id)
|
||||
self._entry_data.media_player_formats.get(self.unique_id)
|
||||
)
|
||||
|
||||
if (
|
||||
@ -139,7 +138,7 @@ class EsphomeMediaPlayer(
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Handle entity being removed."""
|
||||
await super().async_will_remove_from_hass()
|
||||
self._entry_data.media_player_formats.pop(self.entity_id, None)
|
||||
self._entry_data.media_player_formats.pop(self.unique_id, None)
|
||||
|
||||
def _get_proxy_url(
|
||||
self,
|
||||
|
@ -2,21 +2,13 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
|
||||
import aiohttp
|
||||
from gassist_text import TextAssistant
|
||||
from google.oauth2.credentials import Credentials
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_NAME, Platform
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, discovery, intent
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
@ -31,21 +23,9 @@ from .helpers import (
|
||||
GoogleAssistantSDKConfigEntry,
|
||||
GoogleAssistantSDKRuntimeData,
|
||||
InMemoryStorage,
|
||||
async_send_text_commands,
|
||||
best_matching_language_code,
|
||||
)
|
||||
|
||||
SERVICE_SEND_TEXT_COMMAND = "send_text_command"
|
||||
SERVICE_SEND_TEXT_COMMAND_FIELD_COMMAND = "command"
|
||||
SERVICE_SEND_TEXT_COMMAND_FIELD_MEDIA_PLAYER = "media_player"
|
||||
SERVICE_SEND_TEXT_COMMAND_SCHEMA = vol.All(
|
||||
{
|
||||
vol.Required(SERVICE_SEND_TEXT_COMMAND_FIELD_COMMAND): vol.All(
|
||||
cv.ensure_list, [vol.All(str, vol.Length(min=1))]
|
||||
),
|
||||
vol.Optional(SERVICE_SEND_TEXT_COMMAND_FIELD_MEDIA_PLAYER): cv.comp_entity_ids,
|
||||
},
|
||||
)
|
||||
from .services import async_setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
@ -58,6 +38,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
)
|
||||
|
||||
async_setup_services(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@ -81,8 +63,6 @@ async def async_setup_entry(
|
||||
mem_storage = InMemoryStorage(hass)
|
||||
hass.http.register_view(GoogleAssistantSDKAudioView(mem_storage))
|
||||
|
||||
await async_setup_service(hass)
|
||||
|
||||
entry.runtime_data = GoogleAssistantSDKRuntimeData(
|
||||
session=session, mem_storage=mem_storage
|
||||
)
|
||||
@ -105,36 +85,6 @@ async def async_unload_entry(
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_service(hass: HomeAssistant) -> None:
|
||||
"""Add the services for Google Assistant SDK."""
|
||||
|
||||
async def send_text_command(call: ServiceCall) -> ServiceResponse:
|
||||
"""Send a text command to Google Assistant SDK."""
|
||||
commands: list[str] = call.data[SERVICE_SEND_TEXT_COMMAND_FIELD_COMMAND]
|
||||
media_players: list[str] | None = call.data.get(
|
||||
SERVICE_SEND_TEXT_COMMAND_FIELD_MEDIA_PLAYER
|
||||
)
|
||||
command_response_list = await async_send_text_commands(
|
||||
hass, commands, media_players
|
||||
)
|
||||
if call.return_response:
|
||||
return {
|
||||
"responses": [
|
||||
dataclasses.asdict(command_response)
|
||||
for command_response in command_response_list
|
||||
]
|
||||
}
|
||||
return None
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SEND_TEXT_COMMAND,
|
||||
send_text_command,
|
||||
schema=SERVICE_SEND_TEXT_COMMAND_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
)
|
||||
|
||||
|
||||
class GoogleAssistantConversationAgent(conversation.AbstractConversationAgent):
|
||||
"""Google Assistant SDK conversation agent."""
|
||||
|
||||
|
61
homeassistant/components/google_assistant_sdk/services.py
Normal file
61
homeassistant/components/google_assistant_sdk/services.py
Normal file
@ -0,0 +1,61 @@
|
||||
"""Support for Google Assistant SDK."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import dataclasses
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import DOMAIN
|
||||
from .helpers import async_send_text_commands
|
||||
|
||||
SERVICE_SEND_TEXT_COMMAND = "send_text_command"
|
||||
SERVICE_SEND_TEXT_COMMAND_FIELD_COMMAND = "command"
|
||||
SERVICE_SEND_TEXT_COMMAND_FIELD_MEDIA_PLAYER = "media_player"
|
||||
SERVICE_SEND_TEXT_COMMAND_SCHEMA = vol.All(
|
||||
{
|
||||
vol.Required(SERVICE_SEND_TEXT_COMMAND_FIELD_COMMAND): vol.All(
|
||||
cv.ensure_list, [vol.All(str, vol.Length(min=1))]
|
||||
),
|
||||
vol.Optional(SERVICE_SEND_TEXT_COMMAND_FIELD_MEDIA_PLAYER): cv.comp_entity_ids,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def _send_text_command(call: ServiceCall) -> ServiceResponse:
|
||||
"""Send a text command to Google Assistant SDK."""
|
||||
commands: list[str] = call.data[SERVICE_SEND_TEXT_COMMAND_FIELD_COMMAND]
|
||||
media_players: list[str] | None = call.data.get(
|
||||
SERVICE_SEND_TEXT_COMMAND_FIELD_MEDIA_PLAYER
|
||||
)
|
||||
command_response_list = await async_send_text_commands(
|
||||
call.hass, commands, media_players
|
||||
)
|
||||
if call.return_response:
|
||||
return {
|
||||
"responses": [
|
||||
dataclasses.asdict(command_response)
|
||||
for command_response in command_response_list
|
||||
]
|
||||
}
|
||||
return None
|
||||
|
||||
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Add the services for Google Assistant SDK."""
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SEND_TEXT_COMMAND,
|
||||
_send_text_command,
|
||||
schema=SERVICE_SEND_TEXT_COMMAND_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
)
|
@ -12,3 +12,13 @@ async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationSe
|
||||
authorize_url=OAUTH2_AUTHORIZE,
|
||||
token_url=OAUTH2_TOKEN,
|
||||
)
|
||||
|
||||
|
||||
async def async_get_description_placeholders(hass: HomeAssistant) -> dict[str, str]:
|
||||
"""Return description placeholders for the credentials dialog."""
|
||||
return {
|
||||
"developer_dashboard_url": "https://developer.home-connect.com/",
|
||||
"applications_url": "https://developer.home-connect.com/applications",
|
||||
"register_application_url": "https://developer.home-connect.com/application/add",
|
||||
"redirect_url": "https://my.home-assistant.io/redirect/oauth",
|
||||
}
|
||||
|
@ -1,4 +1,7 @@
|
||||
{
|
||||
"application_credentials": {
|
||||
"description": "Login to Home Connect requires a client ID and secret. To acquire them, please follow the following steps.\n\n1. Visit the [Home Connect Developer Program website]({developer_dashboard_url}) and sign up for a development account.\n1. Enter the email of your login for the original Home Connect app under **Default Home Connect User Account for Testing** in the signup process.\n1. Go to the [Applications]({applications_url}) page and select [Register Application]({register_application_url}) and set the fields to the following values: \n\t* **Application ID**: Home Assistant (or any other name that makes sense)\n\t* **OAuth Flow**: Authorization Code Grant Flow\n\t* **Redirect URI**: {redirect_url}\n\nIn the newly created application's details, you will find the **Client ID** and the **Client Secret**."
|
||||
},
|
||||
"common": {
|
||||
"confirmed": "Confirmed",
|
||||
"present": "Present"
|
||||
@ -13,7 +16,7 @@
|
||||
"description": "The Home Connect integration needs to re-authenticate your account"
|
||||
},
|
||||
"oauth_discovery": {
|
||||
"description": "Home Assistant has found a Home Connect device on your network. Press **Submit** to continue setting up Home Connect."
|
||||
"description": "Home Assistant has found a Home Connect device on your network. Be aware that the setup of Home Connect is more complicated than many other integrations. Press **Submit** to continue setting up Home Connect."
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
|
@ -41,5 +41,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["switchbot"],
|
||||
"quality_scale": "gold",
|
||||
"requirements": ["PySwitchbot==0.65.0"]
|
||||
"requirements": ["PySwitchbot==0.66.0"]
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ EVENT_DEVICE_REGISTRY_UPDATED: EventType[EventDeviceRegistryUpdatedData] = Event
|
||||
)
|
||||
STORAGE_KEY = "core.device_registry"
|
||||
STORAGE_VERSION_MAJOR = 1
|
||||
STORAGE_VERSION_MINOR = 9
|
||||
STORAGE_VERSION_MINOR = 10
|
||||
|
||||
CLEANUP_DELAY = 10
|
||||
|
||||
@ -394,13 +394,17 @@ class DeviceEntry:
|
||||
class DeletedDeviceEntry:
|
||||
"""Deleted Device Registry Entry."""
|
||||
|
||||
area_id: str | None = attr.ib()
|
||||
config_entries: set[str] = attr.ib()
|
||||
config_entries_subentries: dict[str, set[str | None]] = attr.ib()
|
||||
connections: set[tuple[str, str]] = attr.ib()
|
||||
created_at: datetime = attr.ib()
|
||||
disabled_by: DeviceEntryDisabler | None = attr.ib()
|
||||
id: str = attr.ib()
|
||||
identifiers: set[tuple[str, str]] = attr.ib()
|
||||
labels: set[str] = attr.ib()
|
||||
modified_at: datetime = attr.ib()
|
||||
name_by_user: str | None = attr.ib()
|
||||
orphaned_timestamp: float | None = attr.ib()
|
||||
_cache: dict[str, Any] = attr.ib(factory=dict, eq=False, init=False)
|
||||
|
||||
@ -413,14 +417,18 @@ class DeletedDeviceEntry:
|
||||
) -> DeviceEntry:
|
||||
"""Create DeviceEntry from DeletedDeviceEntry."""
|
||||
return DeviceEntry(
|
||||
area_id=self.area_id,
|
||||
# type ignores: likely https://github.com/python/mypy/issues/8625
|
||||
config_entries={config_entry_id}, # type: ignore[arg-type]
|
||||
config_entries_subentries={config_entry_id: {config_subentry_id}},
|
||||
connections=self.connections & connections, # type: ignore[arg-type]
|
||||
created_at=self.created_at,
|
||||
disabled_by=self.disabled_by,
|
||||
identifiers=self.identifiers & identifiers, # type: ignore[arg-type]
|
||||
id=self.id,
|
||||
is_new=True,
|
||||
labels=self.labels, # type: ignore[arg-type]
|
||||
name_by_user=self.name_by_user,
|
||||
)
|
||||
|
||||
@under_cached_property
|
||||
@ -429,6 +437,7 @@ class DeletedDeviceEntry:
|
||||
return json_fragment(
|
||||
json_bytes(
|
||||
{
|
||||
"area_id": self.area_id,
|
||||
# The config_entries list can be removed from the storage
|
||||
# representation in HA Core 2026.2
|
||||
"config_entries": list(self.config_entries),
|
||||
@ -438,9 +447,12 @@ class DeletedDeviceEntry:
|
||||
},
|
||||
"connections": list(self.connections),
|
||||
"created_at": self.created_at,
|
||||
"disabled_by": self.disabled_by,
|
||||
"identifiers": list(self.identifiers),
|
||||
"id": self.id,
|
||||
"labels": list(self.labels),
|
||||
"modified_at": self.modified_at,
|
||||
"name_by_user": self.name_by_user,
|
||||
"orphaned_timestamp": self.orphaned_timestamp,
|
||||
}
|
||||
)
|
||||
@ -540,6 +552,13 @@ class DeviceRegistryStore(storage.Store[dict[str, list[dict[str, Any]]]]):
|
||||
config_entry_id: {None}
|
||||
for config_entry_id in device["config_entries"]
|
||||
}
|
||||
if old_minor_version < 10:
|
||||
# Introduced in 2025.6
|
||||
for device in old_data["deleted_devices"]:
|
||||
device["area_id"] = None
|
||||
device["disabled_by"] = None
|
||||
device["labels"] = []
|
||||
device["name_by_user"] = None
|
||||
|
||||
if old_major_version > 2:
|
||||
raise NotImplementedError
|
||||
@ -1238,13 +1257,17 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]):
|
||||
self.hass.verify_event_loop_thread("device_registry.async_remove_device")
|
||||
device = self.devices.pop(device_id)
|
||||
self.deleted_devices[device_id] = DeletedDeviceEntry(
|
||||
area_id=device.area_id,
|
||||
config_entries=device.config_entries,
|
||||
config_entries_subentries=device.config_entries_subentries,
|
||||
connections=device.connections,
|
||||
created_at=device.created_at,
|
||||
disabled_by=device.disabled_by,
|
||||
identifiers=device.identifiers,
|
||||
id=device.id,
|
||||
labels=device.labels,
|
||||
modified_at=utcnow(),
|
||||
name_by_user=device.name_by_user,
|
||||
orphaned_timestamp=None,
|
||||
)
|
||||
for other_device in list(self.devices.values()):
|
||||
@ -1316,6 +1339,7 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]):
|
||||
# Introduced in 0.111
|
||||
for device in data["deleted_devices"]:
|
||||
deleted_devices[device["id"]] = DeletedDeviceEntry(
|
||||
area_id=device["area_id"],
|
||||
config_entries=set(device["config_entries"]),
|
||||
config_entries_subentries={
|
||||
config_entry_id: set(subentries)
|
||||
@ -1325,9 +1349,16 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]):
|
||||
},
|
||||
connections={tuple(conn) for conn in device["connections"]},
|
||||
created_at=datetime.fromisoformat(device["created_at"]),
|
||||
disabled_by=(
|
||||
DeviceEntryDisabler(device["disabled_by"])
|
||||
if device["disabled_by"]
|
||||
else None
|
||||
),
|
||||
identifiers={tuple(iden) for iden in device["identifiers"]},
|
||||
id=device["id"],
|
||||
labels=set(device["labels"]),
|
||||
modified_at=datetime.fromisoformat(device["modified_at"]),
|
||||
name_by_user=device["name_by_user"],
|
||||
orphaned_timestamp=device["orphaned_timestamp"],
|
||||
)
|
||||
|
||||
@ -1448,12 +1479,26 @@ class DeviceRegistry(BaseRegistry[dict[str, list[dict[str, Any]]]]):
|
||||
"""Clear area id from registry entries."""
|
||||
for device in self.devices.get_devices_for_area_id(area_id):
|
||||
self.async_update_device(device.id, area_id=None)
|
||||
for deleted_device in list(self.deleted_devices.values()):
|
||||
if deleted_device.area_id != area_id:
|
||||
continue
|
||||
self.deleted_devices[deleted_device.id] = attr.evolve(
|
||||
deleted_device, area_id=None
|
||||
)
|
||||
self.async_schedule_save()
|
||||
|
||||
@callback
|
||||
def async_clear_label_id(self, label_id: str) -> None:
|
||||
"""Clear label from registry entries."""
|
||||
for device in self.devices.get_devices_for_label(label_id):
|
||||
self.async_update_device(device.id, labels=device.labels - {label_id})
|
||||
for deleted_device in list(self.deleted_devices.values()):
|
||||
if label_id not in deleted_device.labels:
|
||||
continue
|
||||
self.deleted_devices[deleted_device.id] = attr.evolve(
|
||||
deleted_device, labels=deleted_device.labels - {label_id}
|
||||
)
|
||||
self.async_schedule_save()
|
||||
|
||||
|
||||
@callback
|
||||
|
@ -79,7 +79,7 @@ EVENT_ENTITY_REGISTRY_UPDATED: EventType[EventEntityRegistryUpdatedData] = Event
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STORAGE_VERSION_MAJOR = 1
|
||||
STORAGE_VERSION_MINOR = 17
|
||||
STORAGE_VERSION_MINOR = 18
|
||||
STORAGE_KEY = "core.entity_registry"
|
||||
|
||||
CLEANUP_INTERVAL = 3600 * 24
|
||||
@ -406,12 +406,23 @@ class DeletedRegistryEntry:
|
||||
entity_id: str = attr.ib()
|
||||
unique_id: str = attr.ib()
|
||||
platform: str = attr.ib()
|
||||
|
||||
aliases: set[str] = attr.ib()
|
||||
area_id: str | None = attr.ib()
|
||||
categories: dict[str, str] = attr.ib()
|
||||
config_entry_id: str | None = attr.ib()
|
||||
config_subentry_id: str | None = attr.ib()
|
||||
created_at: datetime = attr.ib()
|
||||
device_class: str | None = attr.ib()
|
||||
disabled_by: RegistryEntryDisabler | None = attr.ib()
|
||||
domain: str = attr.ib(init=False, repr=False)
|
||||
hidden_by: RegistryEntryHider | None = attr.ib()
|
||||
icon: str | None = attr.ib()
|
||||
id: str = attr.ib()
|
||||
labels: set[str] = attr.ib()
|
||||
modified_at: datetime = attr.ib()
|
||||
name: str | None = attr.ib()
|
||||
options: ReadOnlyEntityOptionsType = attr.ib(converter=_protect_entity_options)
|
||||
orphaned_timestamp: float | None = attr.ib()
|
||||
|
||||
_cache: dict[str, Any] = attr.ib(factory=dict, eq=False, init=False)
|
||||
@ -427,12 +438,22 @@ class DeletedRegistryEntry:
|
||||
return json_fragment(
|
||||
json_bytes(
|
||||
{
|
||||
"aliases": list(self.aliases),
|
||||
"area_id": self.area_id,
|
||||
"categories": self.categories,
|
||||
"config_entry_id": self.config_entry_id,
|
||||
"config_subentry_id": self.config_subentry_id,
|
||||
"created_at": self.created_at,
|
||||
"device_class": self.device_class,
|
||||
"disabled_by": self.disabled_by,
|
||||
"entity_id": self.entity_id,
|
||||
"hidden_by": self.hidden_by,
|
||||
"icon": self.icon,
|
||||
"id": self.id,
|
||||
"labels": list(self.labels),
|
||||
"modified_at": self.modified_at,
|
||||
"name": self.name,
|
||||
"options": self.options,
|
||||
"orphaned_timestamp": self.orphaned_timestamp,
|
||||
"platform": self.platform,
|
||||
"unique_id": self.unique_id,
|
||||
@ -556,6 +577,20 @@ class EntityRegistryStore(storage.Store[dict[str, list[dict[str, Any]]]]):
|
||||
for entity in data["entities"]:
|
||||
entity["suggested_object_id"] = None
|
||||
|
||||
if old_minor_version < 18:
|
||||
# Version 1.18 adds user customizations to deleted entities
|
||||
for entity in data["deleted_entities"]:
|
||||
entity["aliases"] = []
|
||||
entity["area_id"] = None
|
||||
entity["categories"] = {}
|
||||
entity["device_class"] = None
|
||||
entity["disabled_by"] = None
|
||||
entity["hidden_by"] = None
|
||||
entity["icon"] = None
|
||||
entity["labels"] = []
|
||||
entity["name"] = None
|
||||
entity["options"] = {}
|
||||
|
||||
if old_major_version > 1:
|
||||
raise NotImplementedError
|
||||
return data
|
||||
@ -916,15 +951,40 @@ class EntityRegistry(BaseRegistry):
|
||||
entity_registry_id: str | None = None
|
||||
created_at = utcnow()
|
||||
deleted_entity = self.deleted_entities.pop((domain, platform, unique_id), None)
|
||||
options: Mapping[str, Mapping[str, Any]] | None
|
||||
if deleted_entity is not None:
|
||||
# Restore id
|
||||
entity_registry_id = deleted_entity.id
|
||||
aliases = deleted_entity.aliases
|
||||
area_id = deleted_entity.area_id
|
||||
categories = deleted_entity.categories
|
||||
created_at = deleted_entity.created_at
|
||||
device_class = deleted_entity.device_class
|
||||
disabled_by = deleted_entity.disabled_by
|
||||
# Restore entity_id if it's available
|
||||
if self._entity_id_available(deleted_entity.entity_id):
|
||||
entity_id = deleted_entity.entity_id
|
||||
entity_registry_id = deleted_entity.id
|
||||
hidden_by = deleted_entity.hidden_by
|
||||
icon = deleted_entity.icon
|
||||
labels = deleted_entity.labels
|
||||
name = deleted_entity.name
|
||||
options = deleted_entity.options
|
||||
else:
|
||||
aliases = set()
|
||||
area_id = None
|
||||
categories = {}
|
||||
device_class = None
|
||||
icon = None
|
||||
labels = set()
|
||||
name = None
|
||||
options = get_initial_options() if get_initial_options else None
|
||||
|
||||
entity_id = self.async_generate_entity_id(
|
||||
domain,
|
||||
suggested_object_id or calculated_object_id or f"{platform}_{unique_id}",
|
||||
)
|
||||
if not entity_id:
|
||||
entity_id = self.async_generate_entity_id(
|
||||
domain,
|
||||
suggested_object_id
|
||||
or calculated_object_id
|
||||
or f"{platform}_{unique_id}",
|
||||
)
|
||||
|
||||
if (
|
||||
disabled_by is None
|
||||
@ -938,21 +998,26 @@ class EntityRegistry(BaseRegistry):
|
||||
"""Return None if value is UNDEFINED, otherwise return value."""
|
||||
return None if value is UNDEFINED else value
|
||||
|
||||
initial_options = get_initial_options() if get_initial_options else None
|
||||
|
||||
entry = RegistryEntry(
|
||||
aliases=aliases,
|
||||
area_id=area_id,
|
||||
categories=categories,
|
||||
capabilities=none_if_undefined(capabilities),
|
||||
config_entry_id=none_if_undefined(config_entry_id),
|
||||
config_subentry_id=none_if_undefined(config_subentry_id),
|
||||
created_at=created_at,
|
||||
device_class=device_class,
|
||||
device_id=none_if_undefined(device_id),
|
||||
disabled_by=disabled_by,
|
||||
entity_category=none_if_undefined(entity_category),
|
||||
entity_id=entity_id,
|
||||
hidden_by=hidden_by,
|
||||
has_entity_name=none_if_undefined(has_entity_name) or False,
|
||||
icon=icon,
|
||||
id=entity_registry_id,
|
||||
options=initial_options,
|
||||
labels=labels,
|
||||
name=name,
|
||||
options=options,
|
||||
original_device_class=none_if_undefined(original_device_class),
|
||||
original_icon=none_if_undefined(original_icon),
|
||||
original_name=none_if_undefined(original_name),
|
||||
@ -986,12 +1051,22 @@ class EntityRegistry(BaseRegistry):
|
||||
# If the entity does not belong to a config entry, mark it as orphaned
|
||||
orphaned_timestamp = None if config_entry_id else time.time()
|
||||
self.deleted_entities[key] = DeletedRegistryEntry(
|
||||
aliases=entity.aliases,
|
||||
area_id=entity.area_id,
|
||||
categories=entity.categories,
|
||||
config_entry_id=config_entry_id,
|
||||
config_subentry_id=entity.config_subentry_id,
|
||||
created_at=entity.created_at,
|
||||
device_class=entity.device_class,
|
||||
disabled_by=entity.disabled_by,
|
||||
entity_id=entity_id,
|
||||
hidden_by=entity.hidden_by,
|
||||
icon=entity.icon,
|
||||
id=entity.id,
|
||||
labels=entity.labels,
|
||||
modified_at=utcnow(),
|
||||
name=entity.name,
|
||||
options=entity.options,
|
||||
orphaned_timestamp=orphaned_timestamp,
|
||||
platform=entity.platform,
|
||||
unique_id=entity.unique_id,
|
||||
@ -1420,12 +1495,30 @@ class EntityRegistry(BaseRegistry):
|
||||
entity["unique_id"],
|
||||
)
|
||||
deleted_entities[key] = DeletedRegistryEntry(
|
||||
aliases=set(entity["aliases"]),
|
||||
area_id=entity["area_id"],
|
||||
categories=entity["categories"],
|
||||
config_entry_id=entity["config_entry_id"],
|
||||
config_subentry_id=entity["config_subentry_id"],
|
||||
created_at=datetime.fromisoformat(entity["created_at"]),
|
||||
device_class=entity["device_class"],
|
||||
disabled_by=(
|
||||
RegistryEntryDisabler(entity["disabled_by"])
|
||||
if entity["disabled_by"]
|
||||
else None
|
||||
),
|
||||
entity_id=entity["entity_id"],
|
||||
hidden_by=(
|
||||
RegistryEntryHider(entity["hidden_by"])
|
||||
if entity["hidden_by"]
|
||||
else None
|
||||
),
|
||||
icon=entity["icon"],
|
||||
id=entity["id"],
|
||||
labels=set(entity["labels"]),
|
||||
modified_at=datetime.fromisoformat(entity["modified_at"]),
|
||||
name=entity["name"],
|
||||
options=entity["options"],
|
||||
orphaned_timestamp=entity["orphaned_timestamp"],
|
||||
platform=entity["platform"],
|
||||
unique_id=entity["unique_id"],
|
||||
@ -1455,12 +1548,29 @@ class EntityRegistry(BaseRegistry):
|
||||
categories = entry.categories.copy()
|
||||
del categories[scope]
|
||||
self.async_update_entity(entity_id, categories=categories)
|
||||
for key, deleted_entity in list(self.deleted_entities.items()):
|
||||
if (
|
||||
existing_category_id := deleted_entity.categories.get(scope)
|
||||
) and category_id == existing_category_id:
|
||||
categories = deleted_entity.categories.copy()
|
||||
del categories[scope]
|
||||
self.deleted_entities[key] = attr.evolve(
|
||||
deleted_entity, categories=categories
|
||||
)
|
||||
self.async_schedule_save()
|
||||
|
||||
@callback
|
||||
def async_clear_label_id(self, label_id: str) -> None:
|
||||
"""Clear label from registry entries."""
|
||||
for entry in self.entities.get_entries_for_label(label_id):
|
||||
self.async_update_entity(entry.entity_id, labels=entry.labels - {label_id})
|
||||
for key, deleted_entity in list(self.deleted_entities.items()):
|
||||
if label_id not in deleted_entity.labels:
|
||||
continue
|
||||
self.deleted_entities[key] = attr.evolve(
|
||||
deleted_entity, labels=deleted_entity.labels - {label_id}
|
||||
)
|
||||
self.async_schedule_save()
|
||||
|
||||
@callback
|
||||
def async_clear_config_entry(self, config_entry_id: str) -> None:
|
||||
@ -1525,6 +1635,11 @@ class EntityRegistry(BaseRegistry):
|
||||
"""Clear area id from registry entries."""
|
||||
for entry in self.entities.get_entries_for_area_id(area_id):
|
||||
self.async_update_entity(entry.entity_id, area_id=None)
|
||||
for key, deleted_entity in list(self.deleted_entities.items()):
|
||||
if deleted_entity.area_id != area_id:
|
||||
continue
|
||||
self.deleted_entities[key] = attr.evolve(deleted_entity, area_id=None)
|
||||
self.async_schedule_save()
|
||||
|
||||
|
||||
@callback
|
||||
|
@ -6,7 +6,7 @@ aiodns==3.4.0
|
||||
aiohasupervisor==0.3.1
|
||||
aiohttp-asyncmdnsresolver==0.1.1
|
||||
aiohttp-fast-zlib==0.3.0
|
||||
aiohttp==3.12.11
|
||||
aiohttp==3.12.12
|
||||
aiohttp_cors==0.8.1
|
||||
aiousbwatcher==1.1.1
|
||||
aiozoneinfo==0.2.3
|
||||
@ -50,7 +50,7 @@ orjson==3.10.18
|
||||
packaging>=23.1
|
||||
paho-mqtt==2.1.0
|
||||
Pillow==11.2.1
|
||||
propcache==0.3.1
|
||||
propcache==0.3.2
|
||||
psutil-home-assistant==0.0.1
|
||||
PyJWT==2.10.1
|
||||
pymicro-vad==1.0.1
|
||||
@ -74,7 +74,7 @@ voluptuous-openapi==0.1.0
|
||||
voluptuous-serialize==2.6.0
|
||||
voluptuous==0.15.2
|
||||
webrtc-models==0.3.0
|
||||
yarl==1.20.0
|
||||
yarl==1.20.1
|
||||
zeroconf==0.147.0
|
||||
|
||||
# Constrain pycryptodome to avoid vulnerability
|
||||
|
@ -28,7 +28,7 @@ dependencies = [
|
||||
# change behavior based on presence of supervisor. Deprecated with #127228
|
||||
# Lib can be removed with 2025.11
|
||||
"aiohasupervisor==0.3.1",
|
||||
"aiohttp==3.12.11",
|
||||
"aiohttp==3.12.12",
|
||||
"aiohttp_cors==0.8.1",
|
||||
"aiohttp-fast-zlib==0.3.0",
|
||||
"aiohttp-asyncmdnsresolver==0.1.1",
|
||||
@ -84,7 +84,7 @@ dependencies = [
|
||||
# PyJWT has loose dependency. We want the latest one.
|
||||
"cryptography==45.0.3",
|
||||
"Pillow==11.2.1",
|
||||
"propcache==0.3.1",
|
||||
"propcache==0.3.2",
|
||||
"pyOpenSSL==25.1.0",
|
||||
"orjson==3.10.18",
|
||||
"packaging>=23.1",
|
||||
@ -121,7 +121,7 @@ dependencies = [
|
||||
"voluptuous==0.15.2",
|
||||
"voluptuous-serialize==2.6.0",
|
||||
"voluptuous-openapi==0.1.0",
|
||||
"yarl==1.20.0",
|
||||
"yarl==1.20.1",
|
||||
"webrtc-models==0.3.0",
|
||||
"zeroconf==0.147.0",
|
||||
]
|
||||
|
6
requirements.txt
generated
6
requirements.txt
generated
@ -5,7 +5,7 @@
|
||||
# Home Assistant Core
|
||||
aiodns==3.4.0
|
||||
aiohasupervisor==0.3.1
|
||||
aiohttp==3.12.11
|
||||
aiohttp==3.12.12
|
||||
aiohttp_cors==0.8.1
|
||||
aiohttp-fast-zlib==0.3.0
|
||||
aiohttp-asyncmdnsresolver==0.1.1
|
||||
@ -36,7 +36,7 @@ numpy==2.3.0
|
||||
PyJWT==2.10.1
|
||||
cryptography==45.0.3
|
||||
Pillow==11.2.1
|
||||
propcache==0.3.1
|
||||
propcache==0.3.2
|
||||
pyOpenSSL==25.1.0
|
||||
orjson==3.10.18
|
||||
packaging>=23.1
|
||||
@ -58,6 +58,6 @@ uv==0.7.1
|
||||
voluptuous==0.15.2
|
||||
voluptuous-serialize==2.6.0
|
||||
voluptuous-openapi==0.1.0
|
||||
yarl==1.20.0
|
||||
yarl==1.20.1
|
||||
webrtc-models==0.3.0
|
||||
zeroconf==0.147.0
|
||||
|
2
requirements_all.txt
generated
2
requirements_all.txt
generated
@ -81,7 +81,7 @@ PyQRCode==1.2.1
|
||||
PyRMVtransport==0.3.3
|
||||
|
||||
# homeassistant.components.switchbot
|
||||
PySwitchbot==0.65.0
|
||||
PySwitchbot==0.66.0
|
||||
|
||||
# homeassistant.components.switchmate
|
||||
PySwitchmate==0.5.1
|
||||
|
2
requirements_test_all.txt
generated
2
requirements_test_all.txt
generated
@ -78,7 +78,7 @@ PyQRCode==1.2.1
|
||||
PyRMVtransport==0.3.3
|
||||
|
||||
# homeassistant.components.switchbot
|
||||
PySwitchbot==0.65.0
|
||||
PySwitchbot==0.66.0
|
||||
|
||||
# homeassistant.components.syncthru
|
||||
PySyncThru==0.8.0
|
||||
|
@ -103,7 +103,10 @@ RUN --mount=from=ghcr.io/astral-sh/uv:{uv},source=/uv,target=/bin/uv \
|
||||
--no-cache \
|
||||
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \
|
||||
-r /usr/src/homeassistant/requirements.txt \
|
||||
stdlib-list==0.10.0 pipdeptree=={pipdeptree} tqdm=={tqdm} ruff=={ruff} \
|
||||
stdlib-list==0.10.0 \
|
||||
pipdeptree=={pipdeptree} \
|
||||
tqdm=={tqdm} \
|
||||
ruff=={ruff} \
|
||||
{required_components_packages}
|
||||
|
||||
LABEL "name"="hassfest"
|
||||
@ -169,7 +172,7 @@ def _generate_hassfest_dockerimage(
|
||||
return File(
|
||||
_HASSFEST_TEMPLATE.format(
|
||||
timeout=timeout,
|
||||
required_components_packages=" ".join(sorted(packages)),
|
||||
required_components_packages=" \\\n ".join(sorted(packages)),
|
||||
**package_versions,
|
||||
),
|
||||
config.root / "script/hassfest/docker/Dockerfile",
|
||||
|
14
script/hassfest/docker/Dockerfile
generated
14
script/hassfest/docker/Dockerfile
generated
@ -24,8 +24,18 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.7.1,source=/uv,target=/bin/uv \
|
||||
--no-cache \
|
||||
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \
|
||||
-r /usr/src/homeassistant/requirements.txt \
|
||||
stdlib-list==0.10.0 pipdeptree==2.26.1 tqdm==4.67.1 ruff==0.11.12 \
|
||||
PyTurboJPEG==1.8.0 go2rtc-client==0.2.1 ha-ffmpeg==3.2.2 hassil==2.2.3 home-assistant-intents==2025.5.28 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
||||
stdlib-list==0.10.0 \
|
||||
pipdeptree==2.26.1 \
|
||||
tqdm==4.67.1 \
|
||||
ruff==0.11.12 \
|
||||
PyTurboJPEG==1.8.0 \
|
||||
go2rtc-client==0.2.1 \
|
||||
ha-ffmpeg==3.2.2 \
|
||||
hassil==2.2.3 \
|
||||
home-assistant-intents==2025.5.28 \
|
||||
mutagen==1.47.0 \
|
||||
pymicro-vad==1.0.1 \
|
||||
pyspeex-noise==1.0.2
|
||||
|
||||
LABEL "name"="hassfest"
|
||||
LABEL "maintainer"="Home Assistant <hello@home-assistant.io>"
|
||||
|
@ -429,3 +429,105 @@ async def test_media_player_proxy(
|
||||
mock_async_create_proxy_url.assert_not_called()
|
||||
media_args = mock_client.media_player_command.call_args.kwargs
|
||||
assert media_args["media_url"] == media_url
|
||||
|
||||
|
||||
async def test_media_player_formats_reload_preserves_data(
|
||||
hass: HomeAssistant,
|
||||
mock_client: APIClient,
|
||||
mock_esphome_device: MockESPHomeDeviceType,
|
||||
) -> None:
|
||||
"""Test that media player formats are properly managed on reload."""
|
||||
# Create a media player with supported formats
|
||||
supported_formats = [
|
||||
MediaPlayerSupportedFormat(
|
||||
format="mp3",
|
||||
sample_rate=48000,
|
||||
num_channels=2,
|
||||
purpose=MediaPlayerFormatPurpose.DEFAULT,
|
||||
),
|
||||
MediaPlayerSupportedFormat(
|
||||
format="wav",
|
||||
sample_rate=16000,
|
||||
num_channels=1,
|
||||
purpose=MediaPlayerFormatPurpose.ANNOUNCEMENT,
|
||||
sample_bytes=2,
|
||||
),
|
||||
]
|
||||
|
||||
mock_device = await mock_esphome_device(
|
||||
mock_client=mock_client,
|
||||
entity_info=[
|
||||
MediaPlayerInfo(
|
||||
object_id="test_media_player",
|
||||
key=1,
|
||||
name="Test Media Player",
|
||||
unique_id="test_unique_id",
|
||||
supports_pause=True,
|
||||
supported_formats=supported_formats,
|
||||
)
|
||||
],
|
||||
states=[
|
||||
MediaPlayerEntityState(
|
||||
key=1, volume=50, muted=False, state=MediaPlayerState.IDLE
|
||||
)
|
||||
],
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify entity was created
|
||||
state = hass.states.get("media_player.test_test_media_player")
|
||||
assert state is not None
|
||||
assert state.state == "idle"
|
||||
|
||||
# Test that play_media works with proxy URL (which requires formats to be stored)
|
||||
media_url = "http://127.0.0.1/test.mp3"
|
||||
|
||||
await hass.services.async_call(
|
||||
MEDIA_PLAYER_DOMAIN,
|
||||
SERVICE_PLAY_MEDIA,
|
||||
{
|
||||
ATTR_ENTITY_ID: "media_player.test_test_media_player",
|
||||
ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC,
|
||||
ATTR_MEDIA_CONTENT_ID: media_url,
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
# Verify the API was called with a proxy URL (contains /api/esphome/ffmpeg_proxy/)
|
||||
mock_client.media_player_command.assert_called_once()
|
||||
call_args = mock_client.media_player_command.call_args
|
||||
assert "/api/esphome/ffmpeg_proxy/" in call_args.kwargs["media_url"]
|
||||
assert ".mp3" in call_args.kwargs["media_url"] # Should use mp3 format for default
|
||||
assert call_args.kwargs["announcement"] is None
|
||||
|
||||
mock_client.media_player_command.reset_mock()
|
||||
|
||||
# Reload the integration
|
||||
await hass.config_entries.async_reload(mock_device.entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# Verify entity still exists after reload
|
||||
state = hass.states.get("media_player.test_test_media_player")
|
||||
assert state is not None
|
||||
|
||||
# Test that play_media still works after reload with announcement
|
||||
await hass.services.async_call(
|
||||
MEDIA_PLAYER_DOMAIN,
|
||||
SERVICE_PLAY_MEDIA,
|
||||
{
|
||||
ATTR_ENTITY_ID: "media_player.test_test_media_player",
|
||||
ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC,
|
||||
ATTR_MEDIA_CONTENT_ID: media_url,
|
||||
ATTR_MEDIA_ANNOUNCE: True,
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
# Verify the API was called with a proxy URL using wav format for announcements
|
||||
mock_client.media_player_command.assert_called_once()
|
||||
call_args = mock_client.media_player_command.call_args
|
||||
assert "/api/esphome/ffmpeg_proxy/" in call_args.kwargs["media_url"]
|
||||
assert (
|
||||
".wav" in call_args.kwargs["media_url"]
|
||||
) # Should use wav format for announcement
|
||||
assert call_args.kwargs["announcement"] is True
|
||||
|
@ -1,7 +1,9 @@
|
||||
"""Tests for the Modern Forms integration."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Callable, Coroutine
|
||||
from functools import partial
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
from aiomodernforms.const import COMMAND_QUERY_STATIC_DATA
|
||||
|
||||
@ -9,40 +11,52 @@ from homeassistant.components.modern_forms.const import DOMAIN
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONTENT_TYPE_JSON
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry, load_fixture
|
||||
from tests.common import MockConfigEntry, async_load_fixture
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker, AiohttpClientMockResponse
|
||||
|
||||
|
||||
async def modern_forms_call_mock(method, url, data):
|
||||
async def modern_forms_call_mock(
|
||||
hass: HomeAssistant, method: str, url: str, data: dict[str, Any]
|
||||
) -> AiohttpClientMockResponse:
|
||||
"""Set up the basic returns based on info or status request."""
|
||||
if COMMAND_QUERY_STATIC_DATA in data:
|
||||
fixture = "modern_forms/device_info.json"
|
||||
fixture = "device_info.json"
|
||||
else:
|
||||
fixture = "modern_forms/device_status.json"
|
||||
fixture = "device_status.json"
|
||||
return AiohttpClientMockResponse(
|
||||
method=method, url=url, json=json.loads(load_fixture(fixture))
|
||||
method=method,
|
||||
url=url,
|
||||
json=json.loads(await async_load_fixture(hass, fixture, DOMAIN)),
|
||||
)
|
||||
|
||||
|
||||
async def modern_forms_no_light_call_mock(method, url, data):
|
||||
async def modern_forms_no_light_call_mock(
|
||||
hass: HomeAssistant, method: str, url: str, data: dict[str, Any]
|
||||
) -> AiohttpClientMockResponse:
|
||||
"""Set up the basic returns based on info or status request."""
|
||||
if COMMAND_QUERY_STATIC_DATA in data:
|
||||
fixture = "modern_forms/device_info_no_light.json"
|
||||
fixture = "device_info_no_light.json"
|
||||
else:
|
||||
fixture = "modern_forms/device_status_no_light.json"
|
||||
fixture = "device_status_no_light.json"
|
||||
return AiohttpClientMockResponse(
|
||||
method=method, url=url, json=json.loads(load_fixture(fixture))
|
||||
method=method,
|
||||
url=url,
|
||||
json=json.loads(await async_load_fixture(hass, fixture, DOMAIN)),
|
||||
)
|
||||
|
||||
|
||||
async def modern_forms_timers_set_mock(method, url, data):
|
||||
async def modern_forms_timers_set_mock(
|
||||
hass: HomeAssistant, method: str, url: str, data: dict[str, Any]
|
||||
) -> AiohttpClientMockResponse:
|
||||
"""Set up the basic returns based on info or status request."""
|
||||
if COMMAND_QUERY_STATIC_DATA in data:
|
||||
fixture = "modern_forms/device_info.json"
|
||||
fixture = "device_info.json"
|
||||
else:
|
||||
fixture = "modern_forms/device_status_timers_active.json"
|
||||
fixture = "device_status_timers_active.json"
|
||||
return AiohttpClientMockResponse(
|
||||
method=method, url=url, json=json.loads(load_fixture(fixture))
|
||||
method=method,
|
||||
url=url,
|
||||
json=json.loads(await async_load_fixture(hass, fixture, DOMAIN)),
|
||||
)
|
||||
|
||||
|
||||
@ -51,13 +65,15 @@ async def init_integration(
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
rgbw: bool = False,
|
||||
skip_setup: bool = False,
|
||||
mock_type: Callable = modern_forms_call_mock,
|
||||
mock_type: Callable[
|
||||
[str, str, dict[str, Any]], Coroutine[Any, Any, AiohttpClientMockResponse]
|
||||
] = modern_forms_call_mock,
|
||||
) -> MockConfigEntry:
|
||||
"""Set up the Modern Forms integration in Home Assistant."""
|
||||
|
||||
aioclient_mock.post(
|
||||
"http://192.168.1.123:80/mf",
|
||||
side_effect=mock_type,
|
||||
side_effect=partial(mock_type, hass),
|
||||
headers={"Content-Type": CONTENT_TYPE_JSON},
|
||||
)
|
||||
|
||||
|
@ -1680,6 +1680,7 @@ async def test_rapid_rediscover_unique(
|
||||
"homeassistant/binary_sensor/bla/config",
|
||||
'{ "name": "Beer", "state_topic": "test-topic", "unique_id": "even_uniquer" }',
|
||||
)
|
||||
# Removal, immediately followed by rediscover
|
||||
async_fire_mqtt_message(hass, "homeassistant/binary_sensor/bla/config", "")
|
||||
async_fire_mqtt_message(
|
||||
hass,
|
||||
@ -1691,8 +1692,10 @@ async def test_rapid_rediscover_unique(
|
||||
assert len(hass.states.async_entity_ids("binary_sensor")) == 2
|
||||
state = hass.states.get("binary_sensor.ale")
|
||||
assert state is not None
|
||||
state = hass.states.get("binary_sensor.milk")
|
||||
state = hass.states.get("binary_sensor.beer")
|
||||
assert state is not None
|
||||
state = hass.states.get("binary_sensor.milk")
|
||||
assert state is None
|
||||
|
||||
assert len(events) == 4
|
||||
# Add the entity
|
||||
@ -1702,7 +1705,7 @@ async def test_rapid_rediscover_unique(
|
||||
assert events[2].data["entity_id"] == "binary_sensor.beer"
|
||||
assert events[2].data["new_state"] is None
|
||||
# Add the entity
|
||||
assert events[3].data["entity_id"] == "binary_sensor.milk"
|
||||
assert events[3].data["entity_id"] == "binary_sensor.beer"
|
||||
assert events[3].data["old_state"] is None
|
||||
|
||||
|
||||
|
@ -166,12 +166,16 @@ async def test_discovery_update(
|
||||
|
||||
await send_discovery_message(hass, payload)
|
||||
|
||||
# be sure that old relay are been removed
|
||||
# entity id from the old relay configuration should be reused
|
||||
for i in range(8):
|
||||
assert not hass.states.get(f"switch.first_test_relay_{i}")
|
||||
state = hass.states.get(f"switch.first_test_relay_{i}")
|
||||
assert state.state == STATE_UNKNOWN
|
||||
assert not state.attributes.get(ATTR_ASSUMED_STATE)
|
||||
for i in range(8):
|
||||
assert not hass.states.get(f"switch.second_test_relay_{i}")
|
||||
|
||||
# check new relay
|
||||
for i in range(16):
|
||||
for i in range(8, 16):
|
||||
state = hass.states.get(f"switch.second_test_relay_{i}")
|
||||
assert state.state == STATE_UNKNOWN
|
||||
assert not state.attributes.get(ATTR_ASSUMED_STATE)
|
||||
|
@ -344,13 +344,17 @@ async def test_loading_from_storage(
|
||||
],
|
||||
"deleted_devices": [
|
||||
{
|
||||
"area_id": "12345A",
|
||||
"config_entries": [mock_config_entry.entry_id],
|
||||
"config_entries_subentries": {mock_config_entry.entry_id: [None]},
|
||||
"connections": [["Zigbee", "23.45.67.89.01"]],
|
||||
"created_at": created_at,
|
||||
"disabled_by": dr.DeviceEntryDisabler.USER,
|
||||
"id": "bcdefghijklmn",
|
||||
"identifiers": [["serial", "3456ABCDEF12"]],
|
||||
"labels": {"label1", "label2"},
|
||||
"modified_at": modified_at,
|
||||
"name_by_user": "Test Friendly Name",
|
||||
"orphaned_timestamp": None,
|
||||
}
|
||||
],
|
||||
@ -363,13 +367,17 @@ async def test_loading_from_storage(
|
||||
assert len(registry.deleted_devices) == 1
|
||||
|
||||
assert registry.deleted_devices["bcdefghijklmn"] == dr.DeletedDeviceEntry(
|
||||
area_id="12345A",
|
||||
config_entries={mock_config_entry.entry_id},
|
||||
config_entries_subentries={mock_config_entry.entry_id: {None}},
|
||||
connections={("Zigbee", "23.45.67.89.01")},
|
||||
created_at=datetime.fromisoformat(created_at),
|
||||
disabled_by=dr.DeviceEntryDisabler.USER,
|
||||
id="bcdefghijklmn",
|
||||
identifiers={("serial", "3456ABCDEF12")},
|
||||
labels={"label1", "label2"},
|
||||
modified_at=datetime.fromisoformat(modified_at),
|
||||
name_by_user="Test Friendly Name",
|
||||
orphaned_timestamp=None,
|
||||
)
|
||||
|
||||
@ -417,15 +425,19 @@ async def test_loading_from_storage(
|
||||
model="model",
|
||||
)
|
||||
assert entry == dr.DeviceEntry(
|
||||
area_id="12345A",
|
||||
config_entries={mock_config_entry.entry_id},
|
||||
config_entries_subentries={mock_config_entry.entry_id: {None}},
|
||||
connections={("Zigbee", "23.45.67.89.01")},
|
||||
created_at=datetime.fromisoformat(created_at),
|
||||
disabled_by=dr.DeviceEntryDisabler.USER,
|
||||
id="bcdefghijklmn",
|
||||
identifiers={("serial", "3456ABCDEF12")},
|
||||
labels={"label1", "label2"},
|
||||
manufacturer="manufacturer",
|
||||
model="model",
|
||||
modified_at=utcnow(),
|
||||
name_by_user="Test Friendly Name",
|
||||
primary_config_entry=mock_config_entry.entry_id,
|
||||
)
|
||||
assert entry.id == "bcdefghijklmn"
|
||||
@ -566,13 +578,17 @@ async def test_migration_from_1_1(
|
||||
],
|
||||
"deleted_devices": [
|
||||
{
|
||||
"area_id": None,
|
||||
"config_entries": ["123456"],
|
||||
"config_entries_subentries": {"123456": [None]},
|
||||
"connections": [],
|
||||
"created_at": "1970-01-01T00:00:00+00:00",
|
||||
"disabled_by": None,
|
||||
"id": "deletedid",
|
||||
"identifiers": [["serial", "123456ABCDFF"]],
|
||||
"labels": [],
|
||||
"modified_at": "1970-01-01T00:00:00+00:00",
|
||||
"name_by_user": None,
|
||||
"orphaned_timestamp": None,
|
||||
}
|
||||
],
|
||||
@ -2066,6 +2082,49 @@ async def test_removing_area_id(
|
||||
assert entry_w_area != entry_wo_area
|
||||
|
||||
|
||||
async def test_removing_area_id_deleted_device(
|
||||
device_registry: dr.DeviceRegistry, mock_config_entry: MockConfigEntry
|
||||
) -> None:
|
||||
"""Make sure we can clear area id."""
|
||||
entry1 = device_registry.async_get_or_create(
|
||||
config_entry_id=mock_config_entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
|
||||
identifiers={("bridgeid", "0123")},
|
||||
manufacturer="manufacturer",
|
||||
model="model",
|
||||
)
|
||||
entry2 = device_registry.async_get_or_create(
|
||||
config_entry_id=mock_config_entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:FF")},
|
||||
identifiers={("bridgeid", "1234")},
|
||||
manufacturer="manufacturer",
|
||||
model="model",
|
||||
)
|
||||
|
||||
entry1_w_area = device_registry.async_update_device(entry1.id, area_id="12345A")
|
||||
entry2_w_area = device_registry.async_update_device(entry2.id, area_id="12345B")
|
||||
|
||||
device_registry.async_remove_device(entry1.id)
|
||||
device_registry.async_remove_device(entry2.id)
|
||||
|
||||
device_registry.async_clear_area_id("12345A")
|
||||
entry1_restored = device_registry.async_get_or_create(
|
||||
config_entry_id=mock_config_entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
|
||||
identifiers={("bridgeid", "0123")},
|
||||
)
|
||||
entry2_restored = device_registry.async_get_or_create(
|
||||
config_entry_id=mock_config_entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:FF")},
|
||||
identifiers={("bridgeid", "1234")},
|
||||
)
|
||||
|
||||
assert not entry1_restored.area_id
|
||||
assert entry2_restored.area_id == "12345B"
|
||||
assert entry1_w_area != entry1_restored
|
||||
assert entry2_w_area != entry2_restored
|
||||
|
||||
|
||||
async def test_specifying_via_device_create(
|
||||
hass: HomeAssistant,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
@ -3276,7 +3335,8 @@ async def test_restore_device(
|
||||
suggested_area=None,
|
||||
sw_version=None,
|
||||
)
|
||||
# This will restore the original device
|
||||
# This will restore the original device, user customizations of
|
||||
# area_id, disabled_by, labels and name_by_user will be restored
|
||||
entry3 = device_registry.async_get_or_create(
|
||||
config_entry_id=entry_id,
|
||||
config_subentry_id=subentry_id,
|
||||
@ -3295,23 +3355,23 @@ async def test_restore_device(
|
||||
via_device="via_device_id_new",
|
||||
)
|
||||
assert entry3 == dr.DeviceEntry(
|
||||
area_id="suggested_area_new",
|
||||
area_id="12345A",
|
||||
config_entries={entry_id},
|
||||
config_entries_subentries={entry_id: {subentry_id}},
|
||||
configuration_url="http://config_url_new.bla",
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:ab:cd:ef")},
|
||||
created_at=utcnow(),
|
||||
disabled_by=None,
|
||||
disabled_by=dr.DeviceEntryDisabler.USER,
|
||||
entry_type=None,
|
||||
hw_version="hw_version_new",
|
||||
id=entry.id,
|
||||
identifiers={("bridgeid", "0123")},
|
||||
labels={},
|
||||
labels={"label1", "label2"},
|
||||
manufacturer="manufacturer_new",
|
||||
model="model_new",
|
||||
model_id="model_id_new",
|
||||
modified_at=utcnow(),
|
||||
name_by_user=None,
|
||||
name_by_user="Test Friendly Name",
|
||||
name="name_new",
|
||||
primary_config_entry=entry_id,
|
||||
serial_number="serial_no_new",
|
||||
@ -3466,7 +3526,8 @@ async def test_restore_shared_device(
|
||||
assert len(device_registry.deleted_devices) == 1
|
||||
|
||||
# config_entry_1 restores the original device, only the supplied config entry,
|
||||
# config subentry, connections, and identifiers will be restored
|
||||
# config subentry, connections, and identifiers will be restored, user
|
||||
# customizations of area_id, disabled_by, labels and name_by_user will be restored.
|
||||
entry2 = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry_1.entry_id,
|
||||
config_subentry_id="mock-subentry-id-1-1",
|
||||
@ -3486,23 +3547,23 @@ async def test_restore_shared_device(
|
||||
)
|
||||
|
||||
assert entry2 == dr.DeviceEntry(
|
||||
area_id="suggested_area_new_1",
|
||||
area_id="12345A",
|
||||
config_entries={config_entry_1.entry_id},
|
||||
config_entries_subentries={config_entry_1.entry_id: {"mock-subentry-id-1-1"}},
|
||||
configuration_url="http://config_url_new_1.bla",
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:ab:cd:ef")},
|
||||
created_at=utcnow(),
|
||||
disabled_by=None,
|
||||
disabled_by=dr.DeviceEntryDisabler.USER,
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
hw_version="hw_version_new_1",
|
||||
id=entry.id,
|
||||
identifiers={("entry_123", "0123")},
|
||||
labels={},
|
||||
labels={"label1", "label2"},
|
||||
manufacturer="manufacturer_new_1",
|
||||
model="model_new_1",
|
||||
model_id="model_id_new_1",
|
||||
modified_at=utcnow(),
|
||||
name_by_user=None,
|
||||
name_by_user="Test Friendly Name",
|
||||
name="name_new_1",
|
||||
primary_config_entry=config_entry_1.entry_id,
|
||||
serial_number="serial_no_new_1",
|
||||
@ -3521,7 +3582,8 @@ async def test_restore_shared_device(
|
||||
device_registry.async_remove_device(entry.id)
|
||||
|
||||
# config_entry_2 restores the original device, only the supplied config entry,
|
||||
# config subentry, connections, and identifiers will be restored
|
||||
# config subentry, connections, and identifiers will be restored, user
|
||||
# customizations of area_id, disabled_by, labels and name_by_user will be restored.
|
||||
entry3 = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry_2.entry_id,
|
||||
configuration_url="http://config_url_new_2.bla",
|
||||
@ -3540,7 +3602,7 @@ async def test_restore_shared_device(
|
||||
)
|
||||
|
||||
assert entry3 == dr.DeviceEntry(
|
||||
area_id="suggested_area_new_2",
|
||||
area_id="12345A",
|
||||
config_entries={config_entry_2.entry_id},
|
||||
config_entries_subentries={
|
||||
config_entry_2.entry_id: {None},
|
||||
@ -3548,17 +3610,17 @@ async def test_restore_shared_device(
|
||||
configuration_url="http://config_url_new_2.bla",
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:ab:cd:ef")},
|
||||
created_at=utcnow(),
|
||||
disabled_by=None,
|
||||
disabled_by=dr.DeviceEntryDisabler.USER,
|
||||
entry_type=None,
|
||||
hw_version="hw_version_new_2",
|
||||
id=entry.id,
|
||||
identifiers={("entry_234", "2345")},
|
||||
labels={},
|
||||
labels={"label1", "label2"},
|
||||
manufacturer="manufacturer_new_2",
|
||||
model="model_new_2",
|
||||
model_id="model_id_new_2",
|
||||
modified_at=utcnow(),
|
||||
name_by_user=None,
|
||||
name_by_user="Test Friendly Name",
|
||||
name="name_new_2",
|
||||
primary_config_entry=config_entry_2.entry_id,
|
||||
serial_number="serial_no_new_2",
|
||||
@ -3593,7 +3655,7 @@ async def test_restore_shared_device(
|
||||
)
|
||||
|
||||
assert entry4 == dr.DeviceEntry(
|
||||
area_id="suggested_area_new_2",
|
||||
area_id="12345A",
|
||||
config_entries={config_entry_1.entry_id, config_entry_2.entry_id},
|
||||
config_entries_subentries={
|
||||
config_entry_1.entry_id: {"mock-subentry-id-1-1"},
|
||||
@ -3602,17 +3664,17 @@ async def test_restore_shared_device(
|
||||
configuration_url="http://config_url_new_1.bla",
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:ab:cd:ef")},
|
||||
created_at=utcnow(),
|
||||
disabled_by=None,
|
||||
disabled_by=dr.DeviceEntryDisabler.USER,
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
hw_version="hw_version_new_1",
|
||||
id=entry.id,
|
||||
identifiers={("entry_123", "0123"), ("entry_234", "2345")},
|
||||
labels={},
|
||||
labels={"label1", "label2"},
|
||||
manufacturer="manufacturer_new_1",
|
||||
model="model_new_1",
|
||||
model_id="model_id_new_1",
|
||||
modified_at=utcnow(),
|
||||
name_by_user=None,
|
||||
name_by_user="Test Friendly Name",
|
||||
name="name_new_1",
|
||||
primary_config_entry=config_entry_2.entry_id,
|
||||
serial_number="serial_no_new_1",
|
||||
@ -4069,6 +4131,65 @@ async def test_removing_labels(
|
||||
assert not entry_cleared_label2.labels
|
||||
|
||||
|
||||
async def test_removing_labels_deleted_device(
|
||||
hass: HomeAssistant, device_registry: dr.DeviceRegistry
|
||||
) -> None:
|
||||
"""Make sure we can clear labels."""
|
||||
config_entry = MockConfigEntry()
|
||||
config_entry.add_to_hass(hass)
|
||||
entry1 = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
|
||||
identifiers={("bridgeid", "0123")},
|
||||
manufacturer="manufacturer",
|
||||
model="model",
|
||||
)
|
||||
entry1 = device_registry.async_update_device(entry1.id, labels={"label1", "label2"})
|
||||
entry2 = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:FF")},
|
||||
identifiers={("bridgeid", "1234")},
|
||||
manufacturer="manufacturer",
|
||||
model="model",
|
||||
)
|
||||
entry2 = device_registry.async_update_device(entry2.id, labels={"label3"})
|
||||
|
||||
device_registry.async_remove_device(entry1.id)
|
||||
device_registry.async_remove_device(entry2.id)
|
||||
|
||||
device_registry.async_clear_label_id("label1")
|
||||
entry1_cleared_label1 = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
|
||||
identifiers={("bridgeid", "0123")},
|
||||
)
|
||||
|
||||
device_registry.async_remove_device(entry1.id)
|
||||
|
||||
device_registry.async_clear_label_id("label2")
|
||||
entry1_cleared_label2 = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
|
||||
identifiers={("bridgeid", "0123")},
|
||||
)
|
||||
entry2_restored = device_registry.async_get_or_create(
|
||||
config_entry_id=config_entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:FF")},
|
||||
identifiers={("bridgeid", "1234")},
|
||||
)
|
||||
|
||||
assert entry1_cleared_label1
|
||||
assert entry1_cleared_label2
|
||||
assert entry1 != entry1_cleared_label1
|
||||
assert entry1 != entry1_cleared_label2
|
||||
assert entry1_cleared_label1 != entry1_cleared_label2
|
||||
assert entry1.labels == {"label1", "label2"}
|
||||
assert entry1_cleared_label1.labels == {"label2"}
|
||||
assert not entry1_cleared_label2.labels
|
||||
assert entry2 != entry2_restored
|
||||
assert entry2_restored.labels == {"label3"}
|
||||
|
||||
|
||||
async def test_entries_for_label(
|
||||
hass: HomeAssistant, device_registry: dr.DeviceRegistry
|
||||
) -> None:
|
||||
|
@ -583,23 +583,43 @@ async def test_load_bad_data(
|
||||
],
|
||||
"deleted_entities": [
|
||||
{
|
||||
"aliases": [],
|
||||
"area_id": None,
|
||||
"categories": {},
|
||||
"config_entry_id": None,
|
||||
"config_subentry_id": None,
|
||||
"created_at": "2024-02-14T12:00:00.900075+00:00",
|
||||
"device_class": None,
|
||||
"disabled_by": None,
|
||||
"entity_id": "test.test3",
|
||||
"hidden_by": None,
|
||||
"icon": None,
|
||||
"id": "00003",
|
||||
"labels": [],
|
||||
"modified_at": "2024-02-14T12:00:00.900075+00:00",
|
||||
"name": None,
|
||||
"options": None,
|
||||
"orphaned_timestamp": None,
|
||||
"platform": "super_platform",
|
||||
"unique_id": 234, # Should not load
|
||||
},
|
||||
{
|
||||
"aliases": [],
|
||||
"area_id": None,
|
||||
"categories": {},
|
||||
"config_entry_id": None,
|
||||
"config_subentry_id": None,
|
||||
"created_at": "2024-02-14T12:00:00.900075+00:00",
|
||||
"device_class": None,
|
||||
"disabled_by": None,
|
||||
"entity_id": "test.test4",
|
||||
"hidden_by": None,
|
||||
"icon": None,
|
||||
"id": "00004",
|
||||
"labels": [],
|
||||
"modified_at": "2024-02-14T12:00:00.900075+00:00",
|
||||
"name": None,
|
||||
"options": None,
|
||||
"orphaned_timestamp": None,
|
||||
"platform": "super_platform",
|
||||
"unique_id": ["also", "not", "valid"], # Should not load
|
||||
@ -870,6 +890,33 @@ async def test_removing_area_id(entity_registry: er.EntityRegistry) -> None:
|
||||
assert entry_w_area != entry_wo_area
|
||||
|
||||
|
||||
async def test_removing_area_id_deleted_entity(
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Make sure we can clear area id."""
|
||||
entry1 = entity_registry.async_get_or_create("light", "hue", "5678")
|
||||
entry2 = entity_registry.async_get_or_create("light", "hue", "1234")
|
||||
|
||||
entry1_w_area = entity_registry.async_update_entity(
|
||||
entry1.entity_id, area_id="12345A"
|
||||
)
|
||||
entry2_w_area = entity_registry.async_update_entity(
|
||||
entry2.entity_id, area_id="12345B"
|
||||
)
|
||||
|
||||
entity_registry.async_remove(entry1.entity_id)
|
||||
entity_registry.async_remove(entry2.entity_id)
|
||||
|
||||
entity_registry.async_clear_area_id("12345A")
|
||||
entry1_restored = entity_registry.async_get_or_create("light", "hue", "5678")
|
||||
entry2_restored = entity_registry.async_get_or_create("light", "hue", "1234")
|
||||
|
||||
assert not entry1_restored.area_id
|
||||
assert entry2_restored.area_id == "12345B"
|
||||
assert entry1_w_area != entry1_restored
|
||||
assert entry2_w_area != entry2_restored
|
||||
|
||||
|
||||
@pytest.mark.parametrize("load_registries", [False])
|
||||
async def test_migration_1_1(hass: HomeAssistant, hass_storage: dict[str, Any]) -> None:
|
||||
"""Test migration from version 1.1."""
|
||||
@ -1119,12 +1166,22 @@ async def test_migration_1_11(
|
||||
],
|
||||
"deleted_entities": [
|
||||
{
|
||||
"aliases": [],
|
||||
"area_id": None,
|
||||
"categories": {},
|
||||
"config_entry_id": None,
|
||||
"config_subentry_id": None,
|
||||
"created_at": "1970-01-01T00:00:00+00:00",
|
||||
"device_class": None,
|
||||
"disabled_by": None,
|
||||
"entity_id": "test.deleted_entity",
|
||||
"hidden_by": None,
|
||||
"icon": None,
|
||||
"id": "23456",
|
||||
"labels": [],
|
||||
"modified_at": "1970-01-01T00:00:00+00:00",
|
||||
"name": None,
|
||||
"options": {},
|
||||
"orphaned_timestamp": None,
|
||||
"platform": "super_duper_platform",
|
||||
"unique_id": "very_very_unique",
|
||||
@ -2453,7 +2510,7 @@ async def test_restore_entity(
|
||||
entity_registry: er.EntityRegistry,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
"""Make sure entity registry id is stable."""
|
||||
"""Make sure entity registry id is stable and user configurations are restored."""
|
||||
update_events = async_capture_events(hass, er.EVENT_ENTITY_REGISTRY_UPDATED)
|
||||
config_entry = MockConfigEntry(
|
||||
domain="light",
|
||||
@ -2511,6 +2568,13 @@ async def test_restore_entity(
|
||||
config_entry=config_entry,
|
||||
config_subentry_id="mock-subentry-id-1-1",
|
||||
)
|
||||
entry3 = entity_registry.async_get_or_create(
|
||||
"light",
|
||||
"hue",
|
||||
"abcd",
|
||||
disabled_by=er.RegistryEntryDisabler.INTEGRATION,
|
||||
hidden_by=er.RegistryEntryHider.INTEGRATION,
|
||||
)
|
||||
|
||||
# Apply user customizations
|
||||
entry1 = entity_registry.async_update_entity(
|
||||
@ -2532,8 +2596,9 @@ async def test_restore_entity(
|
||||
|
||||
entity_registry.async_remove(entry1.entity_id)
|
||||
entity_registry.async_remove(entry2.entity_id)
|
||||
entity_registry.async_remove(entry3.entity_id)
|
||||
assert len(entity_registry.entities) == 0
|
||||
assert len(entity_registry.deleted_entities) == 2
|
||||
assert len(entity_registry.deleted_entities) == 3
|
||||
|
||||
# Re-add entities, integration has changed
|
||||
entry1_restored = entity_registry.async_get_or_create(
|
||||
@ -2557,32 +2622,46 @@ async def test_restore_entity(
|
||||
translation_key="translation_key_2",
|
||||
unit_of_measurement="unit_2",
|
||||
)
|
||||
entry2_restored = entity_registry.async_get_or_create("light", "hue", "5678")
|
||||
# Add back the second entity without config entry and with different
|
||||
# disabled_by and hidden_by settings
|
||||
entry2_restored = entity_registry.async_get_or_create(
|
||||
"light",
|
||||
"hue",
|
||||
"5678",
|
||||
disabled_by=er.RegistryEntryDisabler.INTEGRATION,
|
||||
hidden_by=er.RegistryEntryHider.INTEGRATION,
|
||||
)
|
||||
# Add back the third entity with different disabled_by and hidden_by settings
|
||||
entry3_restored = entity_registry.async_get_or_create("light", "hue", "abcd")
|
||||
|
||||
assert len(entity_registry.entities) == 2
|
||||
assert len(entity_registry.entities) == 3
|
||||
assert len(entity_registry.deleted_entities) == 0
|
||||
assert entry1 != entry1_restored
|
||||
# entity_id and user customizations are not restored. new integration options are
|
||||
# entity_id and user customizations are restored. new integration options are
|
||||
# respected.
|
||||
assert entry1_restored == er.RegistryEntry(
|
||||
entity_id="light.suggested_2",
|
||||
entity_id="light.custom_1",
|
||||
unique_id="1234",
|
||||
platform="hue",
|
||||
aliases={"alias1", "alias2"},
|
||||
area_id="12345A",
|
||||
categories={"scope1": "id", "scope2": "id"},
|
||||
capabilities={"key2": "value2"},
|
||||
config_entry_id=config_entry.entry_id,
|
||||
config_subentry_id="mock-subentry-id-1-2",
|
||||
created_at=utcnow(),
|
||||
device_class=None,
|
||||
device_class="device_class_user",
|
||||
device_id=device_entry_2.id,
|
||||
disabled_by=er.RegistryEntryDisabler.INTEGRATION,
|
||||
disabled_by=er.RegistryEntryDisabler.USER,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
has_entity_name=False,
|
||||
hidden_by=None,
|
||||
icon=None,
|
||||
hidden_by=er.RegistryEntryHider.USER,
|
||||
icon="icon_user",
|
||||
id=entry1.id,
|
||||
labels={"label1", "label2"},
|
||||
modified_at=utcnow(),
|
||||
name=None,
|
||||
options={"test_domain": {"key2": "value2"}},
|
||||
name="Test Friendly Name",
|
||||
options={"options_domain": {"key": "value"}, "test_domain": {"key1": "value1"}},
|
||||
original_device_class="device_class_2",
|
||||
original_icon="original_icon_2",
|
||||
original_name="original_name_2",
|
||||
@ -2594,14 +2673,21 @@ async def test_restore_entity(
|
||||
assert entry2 != entry2_restored
|
||||
# Config entry and subentry are not restored
|
||||
assert (
|
||||
attr.evolve(entry2, config_entry_id=None, config_subentry_id=None)
|
||||
attr.evolve(
|
||||
entry2,
|
||||
config_entry_id=None,
|
||||
config_subentry_id=None,
|
||||
disabled_by=None,
|
||||
hidden_by=None,
|
||||
)
|
||||
== entry2_restored
|
||||
)
|
||||
assert entry3 == entry3_restored
|
||||
|
||||
# Remove two of the entities again, then bump time
|
||||
entity_registry.async_remove(entry1_restored.entity_id)
|
||||
entity_registry.async_remove(entry2.entity_id)
|
||||
assert len(entity_registry.entities) == 0
|
||||
assert len(entity_registry.entities) == 1
|
||||
assert len(entity_registry.deleted_entities) == 2
|
||||
freezer.tick(timedelta(seconds=er.ORPHANED_ENTITY_KEEP_SECONDS + 1))
|
||||
async_fire_time_changed(hass)
|
||||
@ -2612,14 +2698,14 @@ async def test_restore_entity(
|
||||
"light", "hue", "1234", config_entry=config_entry
|
||||
)
|
||||
entry2_restored = entity_registry.async_get_or_create("light", "hue", "5678")
|
||||
assert len(entity_registry.entities) == 2
|
||||
assert len(entity_registry.entities) == 3
|
||||
assert len(entity_registry.deleted_entities) == 0
|
||||
assert entry1.id == entry1_restored.id
|
||||
assert entry2.id != entry2_restored.id
|
||||
|
||||
# Remove the first entity, then its config entry, finally bump time
|
||||
entity_registry.async_remove(entry1_restored.entity_id)
|
||||
assert len(entity_registry.entities) == 1
|
||||
assert len(entity_registry.entities) == 2
|
||||
assert len(entity_registry.deleted_entities) == 1
|
||||
entity_registry.async_clear_config_entry(config_entry.entry_id)
|
||||
freezer.tick(timedelta(seconds=er.ORPHANED_ENTITY_KEEP_SECONDS + 1))
|
||||
@ -2630,39 +2716,36 @@ async def test_restore_entity(
|
||||
entry1_restored = entity_registry.async_get_or_create(
|
||||
"light", "hue", "1234", config_entry=config_entry
|
||||
)
|
||||
assert len(entity_registry.entities) == 2
|
||||
assert len(entity_registry.entities) == 3
|
||||
assert len(entity_registry.deleted_entities) == 0
|
||||
assert entry1.id != entry1_restored.id
|
||||
|
||||
# Check the events
|
||||
await hass.async_block_till_done()
|
||||
assert len(update_events) == 14
|
||||
assert len(update_events) == 17
|
||||
assert update_events[0].data == {
|
||||
"action": "create",
|
||||
"entity_id": "light.suggested_1",
|
||||
}
|
||||
assert update_events[1].data == {"action": "create", "entity_id": "light.hue_5678"}
|
||||
assert update_events[2].data["action"] == "update"
|
||||
assert update_events[2].data == {"action": "create", "entity_id": "light.hue_abcd"}
|
||||
assert update_events[3].data["action"] == "update"
|
||||
assert update_events[4].data == {"action": "remove", "entity_id": "light.custom_1"}
|
||||
assert update_events[5].data == {"action": "remove", "entity_id": "light.hue_5678"}
|
||||
assert update_events[4].data["action"] == "update"
|
||||
assert update_events[5].data == {"action": "remove", "entity_id": "light.custom_1"}
|
||||
assert update_events[6].data == {"action": "remove", "entity_id": "light.hue_5678"}
|
||||
assert update_events[7].data == {"action": "remove", "entity_id": "light.hue_abcd"}
|
||||
# Restore entities the 1st time
|
||||
assert update_events[6].data == {
|
||||
"action": "create",
|
||||
"entity_id": "light.suggested_2",
|
||||
}
|
||||
assert update_events[7].data == {"action": "create", "entity_id": "light.hue_5678"}
|
||||
assert update_events[8].data == {
|
||||
"action": "remove",
|
||||
"entity_id": "light.suggested_2",
|
||||
}
|
||||
assert update_events[9].data == {"action": "remove", "entity_id": "light.hue_5678"}
|
||||
assert update_events[8].data == {"action": "create", "entity_id": "light.custom_1"}
|
||||
assert update_events[9].data == {"action": "create", "entity_id": "light.hue_5678"}
|
||||
assert update_events[10].data == {"action": "create", "entity_id": "light.hue_abcd"}
|
||||
assert update_events[11].data == {"action": "remove", "entity_id": "light.custom_1"}
|
||||
assert update_events[12].data == {"action": "remove", "entity_id": "light.hue_5678"}
|
||||
# Restore entities the 2nd time
|
||||
assert update_events[10].data == {"action": "create", "entity_id": "light.hue_1234"}
|
||||
assert update_events[11].data == {"action": "create", "entity_id": "light.hue_5678"}
|
||||
assert update_events[12].data == {"action": "remove", "entity_id": "light.hue_1234"}
|
||||
assert update_events[13].data == {"action": "create", "entity_id": "light.custom_1"}
|
||||
assert update_events[14].data == {"action": "create", "entity_id": "light.hue_5678"}
|
||||
assert update_events[15].data == {"action": "remove", "entity_id": "light.custom_1"}
|
||||
# Restore entities the 3rd time
|
||||
assert update_events[13].data == {"action": "create", "entity_id": "light.hue_1234"}
|
||||
assert update_events[16].data == {"action": "create", "entity_id": "light.hue_1234"}
|
||||
|
||||
|
||||
async def test_async_migrate_entry_delete_self(
|
||||
@ -2763,6 +2846,49 @@ async def test_removing_labels(entity_registry: er.EntityRegistry) -> None:
|
||||
assert not entry_cleared_label2.labels
|
||||
|
||||
|
||||
async def test_removing_labels_deleted_entity(
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Make sure we can clear labels."""
|
||||
entry1 = entity_registry.async_get_or_create(
|
||||
domain="light", platform="hue", unique_id="5678"
|
||||
)
|
||||
entry1 = entity_registry.async_update_entity(
|
||||
entry1.entity_id, labels={"label1", "label2"}
|
||||
)
|
||||
entry2 = entity_registry.async_get_or_create(
|
||||
domain="light", platform="hue", unique_id="1234"
|
||||
)
|
||||
entry2 = entity_registry.async_update_entity(entry2.entity_id, labels={"label3"})
|
||||
|
||||
entity_registry.async_remove(entry1.entity_id)
|
||||
entity_registry.async_remove(entry2.entity_id)
|
||||
entity_registry.async_clear_label_id("label1")
|
||||
entry1_cleared_label1 = entity_registry.async_get_or_create(
|
||||
domain="light", platform="hue", unique_id="5678"
|
||||
)
|
||||
|
||||
entity_registry.async_remove(entry1.entity_id)
|
||||
entity_registry.async_clear_label_id("label2")
|
||||
entry1_cleared_label2 = entity_registry.async_get_or_create(
|
||||
domain="light", platform="hue", unique_id="5678"
|
||||
)
|
||||
entry2_restored = entity_registry.async_get_or_create(
|
||||
domain="light", platform="hue", unique_id="1234"
|
||||
)
|
||||
|
||||
assert entry1_cleared_label1
|
||||
assert entry1_cleared_label2
|
||||
assert entry1 != entry1_cleared_label1
|
||||
assert entry1 != entry1_cleared_label2
|
||||
assert entry1_cleared_label1 != entry1_cleared_label2
|
||||
assert entry1.labels == {"label1", "label2"}
|
||||
assert entry1_cleared_label1.labels == {"label2"}
|
||||
assert not entry1_cleared_label2.labels
|
||||
assert entry2 != entry2_restored
|
||||
assert entry2_restored.labels == {"label3"}
|
||||
|
||||
|
||||
async def test_entries_for_label(entity_registry: er.EntityRegistry) -> None:
|
||||
"""Test getting entity entries by label."""
|
||||
entity_registry.async_get_or_create(
|
||||
@ -2830,6 +2956,39 @@ async def test_removing_categories(entity_registry: er.EntityRegistry) -> None:
|
||||
assert not entry_cleared_scope2.categories
|
||||
|
||||
|
||||
async def test_removing_categories_deleted_entity(
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Make sure we can clear categories."""
|
||||
entry = entity_registry.async_get_or_create(
|
||||
domain="light", platform="hue", unique_id="5678"
|
||||
)
|
||||
entry = entity_registry.async_update_entity(
|
||||
entry.entity_id, categories={"scope1": "id", "scope2": "id"}
|
||||
)
|
||||
|
||||
entity_registry.async_remove(entry.entity_id)
|
||||
entity_registry.async_clear_category_id("scope1", "id")
|
||||
entry_cleared_scope1 = entity_registry.async_get_or_create(
|
||||
domain="light", platform="hue", unique_id="5678"
|
||||
)
|
||||
|
||||
entity_registry.async_remove(entry.entity_id)
|
||||
entity_registry.async_clear_category_id("scope2", "id")
|
||||
entry_cleared_scope2 = entity_registry.async_get_or_create(
|
||||
domain="light", platform="hue", unique_id="5678"
|
||||
)
|
||||
|
||||
assert entry_cleared_scope1
|
||||
assert entry_cleared_scope2
|
||||
assert entry != entry_cleared_scope1
|
||||
assert entry != entry_cleared_scope2
|
||||
assert entry_cleared_scope1 != entry_cleared_scope2
|
||||
assert entry.categories == {"scope1": "id", "scope2": "id"}
|
||||
assert entry_cleared_scope1.categories == {"scope2": "id"}
|
||||
assert not entry_cleared_scope2.categories
|
||||
|
||||
|
||||
async def test_entries_for_category(entity_registry: er.EntityRegistry) -> None:
|
||||
"""Test getting entity entries by category."""
|
||||
entity_registry.async_get_or_create(
|
||||
|
Reference in New Issue
Block a user