Compare commits

..

19 Commits

Author SHA1 Message Date
Stefan Agner
0888dcc1da Fix test_unix_socket_started_with_supervisor pytest 2026-03-17 11:45:27 +01:00
Stefan Agner
63bc4564b2 Merge branch 'dev' into use-unix-socket-for-supervisor 2026-03-17 11:08:32 +01:00
Stefan Agner
03817ccc07 Check for Supervisor user existence before starting Unix socket 2026-03-11 20:01:31 +01:00
Stefan Agner
f0c56d74a4 Use get_running_loop() in tests
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-03-11 19:36:34 +01:00
Stefan Agner
58d8824a44 Fail hard if Supervisor user does not exist 2026-03-11 19:04:02 +01:00
Stefan Agner
d93b45fe35 Create Unix socket only after hassio is loaded
This avoids a race condition where the Supervisor user has not been
created yet, which causes unix socket authentication bypass to fail.
2026-03-11 19:02:13 +01:00
Stefan Agner
88b9e6cd83 Move Unix socket websocket auth bypass into AuthPhase
Consolidate all connection-creation logic in the auth module by adding
async_handle_unix_socket() to AuthPhase, instead of constructing
ActiveConnection directly in http.py. This moves ActiveConnection back
to a TYPE_CHECKING-only import in http.py and keeps auth logic in one
place.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-11 18:39:13 +01:00
Stefan Agner
fdde93187a Handle missing refresh token id gracefully 2026-03-11 16:57:12 +01:00
Stefan Agner
da29f06c2c Move potentially blocking I/O into executor 2026-03-11 16:17:49 +01:00
Stefan Agner
cccb252b8d Add comment about why we delay start serving 2026-03-11 15:30:11 +01:00
Stefan Agner
ea556d65cb Improve removing Unix socket on shutdown
Handle OSErrors when removing the Unix socket on shutdown, and run
the unlink in the executor to avoid blocking the event loop.
2026-03-11 15:14:26 +01:00
Stefan Agner
f499a0b45b Extend docs and add comments to Unix socket authentication logic 2026-03-11 14:58:00 +01:00
Stefan Agner
95d76e8e80 Merge branch 'dev' into use-unix-socket-for-supervisor 2026-03-11 14:55:20 +01:00
Stefan Agner
c3be74c1cd Merge branch 'dev' into use-unix-socket-for-supervisor 2026-03-09 15:10:13 +01:00
Stefan Agner
b6be7a12b1 Patch loop instance instead of private asyncio class in tests
Replace patching asyncio.unix_events._UnixSelectorEventLoop with
patch.object on the running loop instance. This avoids depending
on a private CPython implementation detail.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-24 13:52:38 +01:00
Stefan Agner
72db92b17b Restrict Unix socket permissions before accepting connections
Create the socket with start_serving=False, chmod to 0600, then
start serving. This avoids a race window where the socket could
accept connections before permissions are restricted.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-24 13:43:02 +01:00
Stefan Agner
c5889082c0 Authenticate Unix socket requests as the Supervisor user
Requests arriving over the Unix socket are implicitly trusted and
authenticated as the Supervisor system user, removing the need for
token-based authentication on this channel. The ban middleware also
skips IP-based checks for Unix socket connections since there is no
remote IP address.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-24 00:06:22 +01:00
Stefan Agner
68d94badc6 Use SUPERVISOR_CORE_API_SOCKET env var for unix socket path
Replace the hardcoded socket path constant with the
SUPERVISOR_CORE_API_SOCKET environment variable, allowing
Supervisor to specify where Core should listen. Only absolute
paths are accepted; relative paths are rejected with an error.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-23 23:10:35 +01:00
Stefan Agner
275374ec0d Add Unix socket listener for Supervisor to Core communication
When running under Supervisor (detected via SUPERVISOR env var),
the HTTP server now additionally listens on a Unix socket at
/run/core/http.sock. This enables efficient local IPC between
Supervisor and Core without going through TCP.

The Unix socket shares the same aiohttp app and runner, so all
routes, middleware, and authentication are shared with the TCP
server. The socket is started before the TCP site and cleaned up
on shutdown.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-23 23:00:14 +01:00
1106 changed files with 29638 additions and 44933 deletions

View File

@@ -46,10 +46,19 @@ async def async_setup_entry(
api.AsyncConfigEntryAuth(aiohttp_client.async_get_clientsession(hass), session)
)
coordinator = AladdinConnectCoordinator(hass, entry, client)
await coordinator.async_config_entry_first_refresh()
try:
doors = await client.get_doors()
except aiohttp.ClientResponseError as err:
if 400 <= err.status < 500:
raise ConfigEntryAuthFailed(err) from err
raise ConfigEntryNotReady from err
except aiohttp.ClientError as err:
raise ConfigEntryNotReady from err
entry.runtime_data = coordinator
entry.runtime_data = {
door.unique_id: AladdinConnectCoordinator(hass, entry, client, door)
for door in doors
}
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
@@ -91,7 +100,7 @@ def remove_stale_devices(
device_entries = dr.async_entries_for_config_entry(
device_registry, config_entry.entry_id
)
all_device_ids = set(config_entry.runtime_data.data)
all_device_ids = set(config_entry.runtime_data)
for device_entry in device_entries:
device_id: str | None = None

View File

@@ -11,24 +11,22 @@ from genie_partner_sdk.model import GarageDoor
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
_LOGGER = logging.getLogger(__name__)
type AladdinConnectConfigEntry = ConfigEntry[AladdinConnectCoordinator]
type AladdinConnectConfigEntry = ConfigEntry[dict[str, AladdinConnectCoordinator]]
SCAN_INTERVAL = timedelta(seconds=15)
class AladdinConnectCoordinator(DataUpdateCoordinator[dict[str, GarageDoor]]):
class AladdinConnectCoordinator(DataUpdateCoordinator[GarageDoor]):
"""Coordinator for Aladdin Connect integration."""
config_entry: AladdinConnectConfigEntry
def __init__(
self,
hass: HomeAssistant,
entry: AladdinConnectConfigEntry,
client: AladdinConnectClient,
garage_door: GarageDoor,
) -> None:
"""Initialize the coordinator."""
super().__init__(
@@ -39,16 +37,18 @@ class AladdinConnectCoordinator(DataUpdateCoordinator[dict[str, GarageDoor]]):
update_interval=SCAN_INTERVAL,
)
self.client = client
self.data = garage_door
async def _async_update_data(self) -> dict[str, GarageDoor]:
async def _async_update_data(self) -> GarageDoor:
"""Fetch data from the Aladdin Connect API."""
try:
doors = await self.client.get_doors()
except aiohttp.ClientResponseError as err:
if 400 <= err.status < 500:
raise ConfigEntryAuthFailed(err) from err
raise UpdateFailed(f"Error communicating with API: {err}") from err
await self.client.update_door(self.data.device_id, self.data.door_number)
except aiohttp.ClientError as err:
raise UpdateFailed(f"Error communicating with API: {err}") from err
return {door.unique_id: door for door in doors}
self.data.status = self.client.get_door_status(
self.data.device_id, self.data.door_number
)
self.data.battery_level = self.client.get_battery_status(
self.data.device_id, self.data.door_number
)
return self.data

View File

@@ -7,7 +7,7 @@ from typing import Any
import aiohttp
from homeassistant.components.cover import CoverDeviceClass, CoverEntity
from homeassistant.core import HomeAssistant, callback
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -24,22 +24,11 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the cover platform."""
coordinator = entry.runtime_data
known_devices: set[str] = set()
coordinators = entry.runtime_data
@callback
def _async_add_new_devices() -> None:
"""Detect and add entities for new doors."""
current_devices = set(coordinator.data)
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
AladdinCoverEntity(coordinator, door_id) for door_id in new_devices
)
_async_add_new_devices()
entry.async_on_unload(coordinator.async_add_listener(_async_add_new_devices))
async_add_entities(
AladdinCoverEntity(coordinator) for coordinator in coordinators.values()
)
class AladdinCoverEntity(AladdinConnectEntity, CoverEntity):
@@ -49,10 +38,10 @@ class AladdinCoverEntity(AladdinConnectEntity, CoverEntity):
_attr_supported_features = SUPPORTED_FEATURES
_attr_name = None
def __init__(self, coordinator: AladdinConnectCoordinator, door_id: str) -> None:
def __init__(self, coordinator: AladdinConnectCoordinator) -> None:
"""Initialize the Aladdin Connect cover."""
super().__init__(coordinator, door_id)
self._attr_unique_id = door_id
super().__init__(coordinator)
self._attr_unique_id = coordinator.data.unique_id
async def async_open_cover(self, **kwargs: Any) -> None:
"""Issue open command to cover."""
@@ -77,16 +66,16 @@ class AladdinCoverEntity(AladdinConnectEntity, CoverEntity):
@property
def is_closed(self) -> bool | None:
"""Update is closed attribute."""
if (status := self.door.status) is None:
if (status := self.coordinator.data.status) is None:
return None
return status == "closed"
@property
def is_closing(self) -> bool | None:
"""Update is closing attribute."""
return self.door.status == "closing"
return self.coordinator.data.status == "closing"
@property
def is_opening(self) -> bool | None:
"""Update is opening attribute."""
return self.door.status == "opening"
return self.coordinator.data.status == "opening"

View File

@@ -20,13 +20,13 @@ async def async_get_config_entry_diagnostics(
"config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT),
"doors": {
uid: {
"device_id": door.device_id,
"door_number": door.door_number,
"name": door.name,
"status": door.status,
"link_status": door.link_status,
"battery_level": door.battery_level,
"device_id": coordinator.data.device_id,
"door_number": coordinator.data.door_number,
"name": coordinator.data.name,
"status": coordinator.data.status,
"link_status": coordinator.data.link_status,
"battery_level": coordinator.data.battery_level,
}
for uid, door in config_entry.runtime_data.data.items()
for uid, coordinator in config_entry.runtime_data.items()
},
}

View File

@@ -1,7 +1,6 @@
"""Base class for Aladdin Connect entities."""
from genie_partner_sdk.client import AladdinConnectClient
from genie_partner_sdk.model import GarageDoor
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
@@ -15,28 +14,17 @@ class AladdinConnectEntity(CoordinatorEntity[AladdinConnectCoordinator]):
_attr_has_entity_name = True
def __init__(self, coordinator: AladdinConnectCoordinator, door_id: str) -> None:
def __init__(self, coordinator: AladdinConnectCoordinator) -> None:
"""Initialize Aladdin Connect entity."""
super().__init__(coordinator)
self._door_id = door_id
door = self.door
device = coordinator.data
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, door.unique_id)},
identifiers={(DOMAIN, device.unique_id)},
manufacturer="Aladdin Connect",
name=door.name,
name=device.name,
)
self._device_id = door.device_id
self._number = door.door_number
@property
def available(self) -> bool:
"""Return True if entity is available."""
return super().available and self._door_id in self.coordinator.data
@property
def door(self) -> GarageDoor:
"""Return the garage door data."""
return self.coordinator.data[self._door_id]
self._device_id = device.device_id
self._number = device.door_number
@property
def client(self) -> AladdinConnectClient:

View File

@@ -57,7 +57,7 @@ rules:
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices: done
dynamic-devices: todo
entity-category: done
entity-device-class: done
entity-disabled-by-default: done

View File

@@ -14,7 +14,7 @@ from homeassistant.components.sensor import (
SensorStateClass,
)
from homeassistant.const import PERCENTAGE, EntityCategory
from homeassistant.core import HomeAssistant, callback
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AladdinConnectConfigEntry, AladdinConnectCoordinator
@@ -49,24 +49,13 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Aladdin Connect sensor devices."""
coordinator = entry.runtime_data
known_devices: set[str] = set()
coordinators = entry.runtime_data
@callback
def _async_add_new_devices() -> None:
"""Detect and add entities for new doors."""
current_devices = set(coordinator.data)
new_devices = current_devices - known_devices
if new_devices:
known_devices.update(new_devices)
async_add_entities(
AladdinConnectSensor(coordinator, door_id, description)
for door_id in new_devices
for description in SENSOR_TYPES
)
_async_add_new_devices()
entry.async_on_unload(coordinator.async_add_listener(_async_add_new_devices))
async_add_entities(
AladdinConnectSensor(coordinator, description)
for coordinator in coordinators.values()
for description in SENSOR_TYPES
)
class AladdinConnectSensor(AladdinConnectEntity, SensorEntity):
@@ -77,15 +66,14 @@ class AladdinConnectSensor(AladdinConnectEntity, SensorEntity):
def __init__(
self,
coordinator: AladdinConnectCoordinator,
door_id: str,
entity_description: AladdinConnectSensorEntityDescription,
) -> None:
"""Initialize the Aladdin Connect sensor."""
super().__init__(coordinator, door_id)
super().__init__(coordinator)
self.entity_description = entity_description
self._attr_unique_id = f"{door_id}-{entity_description.key}"
self._attr_unique_id = f"{coordinator.data.unique_id}-{entity_description.key}"
@property
def native_value(self) -> float | None:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self.door)
return self.entity_description.value_fn(self.coordinator.data)

View File

@@ -153,8 +153,8 @@ def websocket_get_entities(
{
vol.Required("type"): "config/entity_registry/update",
vol.Required("entity_id"): cv.entity_id,
vol.Optional("aliases"): [vol.Any(str, None)],
# If passed in, we update value. Passing None will remove old value.
vol.Optional("aliases"): list,
vol.Optional("area_id"): vol.Any(str, None),
# Categories is a mapping of key/value (scope/category_id) pairs.
# If passed in, we update/adjust only the provided scope(s).
@@ -225,15 +225,10 @@ def websocket_update_entity(
changes[key] = msg[key]
if "aliases" in msg:
# Sanitize aliases by removing:
# - Trailing and leading whitespace characters in the individual aliases
# Create a set for the aliases without:
# - Empty strings
changes["aliases"] = aliases = []
for alias in msg["aliases"]:
if alias is None:
aliases.append(er.COMPUTED_NAME)
elif alias := alias.strip():
aliases.append(alias)
# - Trailing and leading whitespace characters in the individual aliases
changes["aliases"] = {s_strip for s in msg["aliases"] if (s_strip := s.strip())}
if "labels" in msg:
# Convert labels to a set

View File

@@ -992,11 +992,18 @@ class DefaultAgent(ConversationEntity):
continue
context[attr] = state.attributes[attr]
entity_entry = entity_registry.async_get(state.entity_id)
for name in intent.async_get_entity_aliases(
self.hass, entity_entry, state=state
):
yield (name, name, context)
if (
entity := entity_registry.async_get(state.entity_id)
) and entity.aliases:
for alias in entity.aliases:
alias = alias.strip()
if not alias:
continue
yield (alias, alias, context)
# Default name
yield (state.name, state.name, context)
def _recognize_strict(
self,

View File

@@ -5,7 +5,7 @@ from homeassistant.core import HomeAssistant, State, split_entity_id
from homeassistant.helpers.condition import Condition, EntityConditionBase
from .const import ATTR_IS_CLOSED, DOMAIN, CoverDeviceClass
from .models import CoverDomainSpec
from .trigger import CoverDomainSpec
class CoverConditionBase(EntityConditionBase[CoverDomainSpec]):

View File

@@ -1,12 +0,0 @@
"""Data models for the cover integration."""
from dataclasses import dataclass
from homeassistant.helpers.automation import DomainSpec
@dataclass(frozen=True, slots=True)
class CoverDomainSpec(DomainSpec):
"""DomainSpec with a target value for comparison."""
target_value: str | bool | None = None

View File

@@ -1,11 +1,20 @@
"""Provides triggers for covers."""
from dataclasses import dataclass
from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN
from homeassistant.core import HomeAssistant, State, split_entity_id
from homeassistant.helpers.automation import DomainSpec
from homeassistant.helpers.trigger import EntityTriggerBase, Trigger
from .const import ATTR_IS_CLOSED, DOMAIN, CoverDeviceClass
from .models import CoverDomainSpec
@dataclass(frozen=True, slots=True)
class CoverDomainSpec(DomainSpec):
"""DomainSpec with a target value for comparison."""
target_value: str | bool | None = None
class CoverTriggerBase(EntityTriggerBase[CoverDomainSpec]):

View File

@@ -9,12 +9,9 @@ from typing import Any
from homeassistant.components.valve import ValveEntity, ValveEntityFeature, ValveState
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.event import async_track_utc_time_change
from . import DOMAIN
OPEN_CLOSE_DELAY = 2 # Used to give a realistic open/close experience in frontend
@@ -26,10 +23,10 @@ async def async_setup_entry(
"""Set up the Demo config entry."""
async_add_entities(
[
DemoValve("valve_1", "Front Garden", ValveState.OPEN),
DemoValve("valve_2", "Orchard", ValveState.CLOSED),
DemoValve("valve_3", "Back Garden", ValveState.CLOSED, position=70),
DemoValve("valve_4", "Trees", ValveState.CLOSED, position=30),
DemoValve("Front Garden", ValveState.OPEN),
DemoValve("Orchard", ValveState.CLOSED),
DemoValve("Back Garden", ValveState.CLOSED, position=70),
DemoValve("Trees", ValveState.CLOSED, position=30),
]
)
@@ -37,24 +34,17 @@ async def async_setup_entry(
class DemoValve(ValveEntity):
"""Representation of a Demo valve."""
_attr_has_entity_name = True
_attr_name = None
_attr_should_poll = False
def __init__(
self,
unique_id: str,
name: str,
state: str,
moveable: bool = True,
position: int | None = None,
) -> None:
"""Initialize the valve."""
self._attr_unique_id = unique_id
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, unique_id)},
name=name,
)
self._attr_name = name
if moveable:
self._attr_supported_features = (
ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE

View File

@@ -48,7 +48,7 @@ def async_redact_data[_T](data: _T, to_redact: Iterable[Any]) -> _T:
def _entity_entry_filter(a: attr.Attribute, _: Any) -> bool:
return a.name not in ("_cache", "compat_aliases", "compat_name")
return a.name != "_cache"
@callback

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
from collections.abc import Mapping
from typing import Any
from aiohttp import ClientError
@@ -57,42 +56,3 @@ class FreshrFlowHandler(ConfigFlow, domain=DOMAIN):
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_reauth(
self, _user_input: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reauthentication."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reauthentication confirmation."""
errors: dict[str, str] = {}
reauth_entry = self._get_reauth_entry()
if user_input is not None:
client = FreshrClient(session=async_get_clientsession(self.hass))
try:
await client.login(
reauth_entry.data[CONF_USERNAME], user_input[CONF_PASSWORD]
)
except LoginError:
errors["base"] = "invalid_auth"
except ClientError:
errors["base"] = "cannot_connect"
except Exception: # noqa: BLE001
LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_update_reload_and_abort(
reauth_entry,
data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]},
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema({vol.Required(CONF_PASSWORD): str}),
description_placeholders={CONF_USERNAME: reauth_entry.data[CONF_USERNAME]},
errors=errors,
)

View File

@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/freshr",
"integration_type": "hub",
"iot_class": "cloud_polling",
"quality_scale": "silver",
"quality_scale": "bronze",
"requirements": ["pyfreshr==1.2.0"]
}

View File

@@ -36,7 +36,7 @@ rules:
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow: done
reauthentication-flow: todo
test-coverage: done
# Gold

View File

@@ -2,7 +2,9 @@
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"wrong_account": "Cannot change the account username."
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
@@ -10,15 +12,6 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"reauth_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "[%key:component::freshr::config::step::user::data_description::password%]"
},
"description": "Re-enter the password for your Fresh-r account `{username}`."
},
"user": {
"data": {
"password": "[%key:common::config_flow::data::password%]",

View File

@@ -4,9 +4,9 @@ set_guest_wifi_password:
required: true
selector:
device:
integration: fritz
entity:
integration: fritz
domain: update
device_class: connectivity
password:
required: false
selector:
@@ -23,9 +23,9 @@ dial:
required: true
selector:
device:
integration: fritz
entity:
integration: fritz
domain: update
device_class: connectivity
number:
required: true
selector:

View File

@@ -29,7 +29,6 @@ from homeassistant.helpers import (
area_registry as ar,
device_registry as dr,
entity_registry as er,
intent,
start,
)
from homeassistant.helpers.event import async_call_later
@@ -598,6 +597,7 @@ class GoogleEntity:
state = self.state
traits = self.traits()
entity_config = self.config.entity_config.get(state.entity_id, {})
name = (entity_config.get(CONF_NAME) or state.name).strip()
# Find entity/device/area registry entries
entity_entry, device_entry, area_entry = _get_registry_entries(
@@ -607,6 +607,7 @@ class GoogleEntity:
# Build the device info
device = {
"id": state.entity_id,
"name": {"name": name},
"attributes": {},
"traits": [trait.name for trait in traits],
"willReportState": self.config.should_report_state,
@@ -614,18 +615,13 @@ class GoogleEntity:
state.domain, state.attributes.get(ATTR_DEVICE_CLASS)
),
}
# Add name and aliases.
# The entity's alias list is ordered: the first slot naturally serves
# as the primary name (set to the auto-generated full entity name by
# default), while the rest serve as alternative names (nicknames).
aliases = intent.async_get_entity_aliases(
self.hass, entity_entry, state=state, allow_empty=False
)
name, *aliases = aliases
name = entity_config.get(CONF_NAME) or name
device["name"] = {"name": name}
if (config_aliases := entity_config.get(CONF_ALIASES, [])) or aliases:
device["name"]["nicknames"] = [name, *config_aliases, *aliases]
# Add aliases
if (config_aliases := entity_config.get(CONF_ALIASES, [])) or (
entity_entry and entity_entry.aliases
):
device["name"]["nicknames"] = [name, *config_aliases]
if entity_entry:
device["name"]["nicknames"].extend(entity_entry.aliases)
# Add local SDK info if enabled
if self.config.is_local_sdk_active and self.should_expose_local():

View File

@@ -7,6 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["growattServer"],
"quality_scale": "silver",
"requirements": ["growattServer==1.9.0"]
}

View File

@@ -89,18 +89,18 @@
"step": {
"advanced": {
"data": {
"api_key": "API token",
"api_key": "API Token",
"api_user": "User ID",
"url": "[%key:common::config_flow::data::url%]",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"api_key": "API token of the Habitica account",
"api_key": "API Token of the Habitica account",
"api_user": "User ID of your Habitica account",
"url": "URL of the Habitica installation to connect to. Defaults to `{default_url}`",
"verify_ssl": "Enable SSL certificate verification for secure connections. Disable only if connecting to a Habitica instance using a self-signed certificate"
},
"description": "You can retrieve your 'User ID' and 'API token' from [**Settings -> Site Data**]({site_data}) on Habitica or the instance you want to connect to",
"description": "You can retrieve your `User ID` and `API Token` from [**Settings -> Site Data**]({site_data}) on Habitica or the instance you want to connect to",
"title": "[%key:component::habitica::config::step::user::menu_options::advanced%]"
},
"login": {
@@ -126,7 +126,7 @@
"api_key": "[%key:component::habitica::config::step::advanced::data_description::api_key%]"
},
"description": "Enter your new API token below. You can find it in Habitica under 'Settings -> Site Data'",
"name": "Re-authorize via API token"
"name": "Re-authorize via API Token"
},
"reauth_login": {
"data": {

View File

@@ -965,7 +965,7 @@ class HKDevice:
# visible on the network.
self.async_set_available_state(False)
return
except AccessoryDisconnectedError, EncryptionError, TimeoutError:
except AccessoryDisconnectedError, EncryptionError:
# Temporary connection failure. Device may still available but our
# connection was dropped or we are reconnecting
self._poll_failures += 1

View File

@@ -10,6 +10,7 @@ from functools import partial
from ipaddress import IPv4Network, IPv6Network, ip_network
import logging
import os
from pathlib import Path
import socket
import ssl
from tempfile import NamedTemporaryFile
@@ -33,6 +34,7 @@ from homeassistant.components.network import async_get_source_ip
from homeassistant.const import (
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
HASSIO_USER_NAME,
SERVER_PORT,
)
from homeassistant.core import Event, HomeAssistant, callback
@@ -69,7 +71,7 @@ from .headers import setup_headers
from .request_context import setup_request_context
from .security_filter import setup_security_filter
from .static import CACHE_HEADERS, CachingStaticResource
from .web_runner import HomeAssistantTCPSite
from .web_runner import HomeAssistantTCPSite, HomeAssistantUnixSite
CONF_SERVER_HOST: Final = "server_host"
CONF_SERVER_PORT: Final = "server_port"
@@ -235,6 +237,16 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
source_ip_task = create_eager_task(async_get_source_ip(hass))
unix_socket_path: Path | None = None
if socket_env := os.environ.get("SUPERVISOR_CORE_API_SOCKET"):
socket_path = Path(socket_env)
if socket_path.is_absolute():
unix_socket_path = socket_path
else:
_LOGGER.error(
"Invalid unix socket path %s: path must be absolute", socket_env
)
server = HomeAssistantHTTP(
hass,
server_host=server_host,
@@ -244,6 +256,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
ssl_key=ssl_key,
trusted_proxies=trusted_proxies,
ssl_profile=ssl_profile,
unix_socket_path=unix_socket_path,
)
await server.async_initialize(
cors_origins=cors_origins,
@@ -267,6 +280,21 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async_when_setup_or_start(hass, "frontend", start_server)
if server.unix_socket_path is not None:
async def start_unix_socket(*_: Any) -> None:
"""Start the Unix socket after the Supervisor user is available."""
if any(
user
for user in await hass.auth.async_get_users()
if user.system_generated and user.name == HASSIO_USER_NAME
):
await server.async_start_unix_socket()
else:
_LOGGER.error("Supervisor user not found; not starting Unix socket")
async_when_setup_or_start(hass, "hassio", start_unix_socket)
hass.http = server
local_ip = await source_ip_task
@@ -366,6 +394,7 @@ class HomeAssistantHTTP:
server_port: int,
trusted_proxies: list[IPv4Network | IPv6Network],
ssl_profile: str,
unix_socket_path: Path | None = None,
) -> None:
"""Initialize the HTTP Home Assistant server."""
self.app = HomeAssistantApplication(
@@ -384,8 +413,10 @@ class HomeAssistantHTTP:
self.server_port = server_port
self.trusted_proxies = trusted_proxies
self.ssl_profile = ssl_profile
self.unix_socket_path = unix_socket_path
self.runner: web.AppRunner | None = None
self.site: HomeAssistantTCPSite | None = None
self.unix_site: HomeAssistantUnixSite | None = None
self.context: ssl.SSLContext | None = None
async def async_initialize(
@@ -610,6 +641,29 @@ class HomeAssistantHTTP:
context.load_cert_chain(cert_pem.name, key_pem.name)
return context
async def async_start_unix_socket(self) -> None:
"""Start listening on the Unix socket.
This is called separately from start() to delay serving the Unix
socket until the Supervisor user exists (created by the hassio
integration). Without this delay, Supervisor could connect before
its user is available and receive 401 responses it won't retry.
"""
if self.unix_socket_path is None or self.runner is None:
return
self.unix_site = HomeAssistantUnixSite(self.runner, self.unix_socket_path)
try:
await self.unix_site.start()
except OSError as error:
_LOGGER.error(
"Failed to create HTTP server on unix socket %s: %s",
self.unix_socket_path,
error,
)
self.unix_site = None
else:
_LOGGER.info("Now listening on unix socket %s", self.unix_socket_path)
async def start(self) -> None:
"""Start the aiohttp server."""
# Aiohttp freezes apps after start so that no changes can be made.
@@ -637,6 +691,19 @@ class HomeAssistantHTTP:
async def stop(self) -> None:
"""Stop the aiohttp server."""
if self.unix_site is not None:
await self.unix_site.stop()
if self.unix_socket_path is not None:
try:
await self.hass.async_add_executor_job(
self.unix_socket_path.unlink, True
)
except OSError as err:
_LOGGER.warning(
"Could not remove unix socket %s: %s",
self.unix_socket_path,
err,
)
if self.site is not None:
await self.site.stop()
if self.runner is not None:

View File

@@ -11,7 +11,13 @@ import time
from typing import Any, Final
from aiohttp import hdrs
from aiohttp.web import Application, Request, StreamResponse, middleware
from aiohttp.web import (
Application,
HTTPInternalServerError,
Request,
StreamResponse,
middleware,
)
import jwt
from jwt import api_jws
from yarl import URL
@@ -20,6 +26,7 @@ from homeassistant.auth import jwt_wrapper
from homeassistant.auth.const import GROUP_ID_READ_ONLY
from homeassistant.auth.models import User
from homeassistant.components import websocket_api
from homeassistant.const import HASSIO_USER_NAME
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.http import current_request
from homeassistant.helpers.json import json_bytes
@@ -27,7 +34,12 @@ from homeassistant.helpers.network import is_cloud_connection
from homeassistant.helpers.storage import Store
from homeassistant.util.network import is_local
from .const import KEY_AUTHENTICATED, KEY_HASS_REFRESH_TOKEN_ID, KEY_HASS_USER
from .const import (
KEY_AUTHENTICATED,
KEY_HASS_REFRESH_TOKEN_ID,
KEY_HASS_USER,
is_unix_socket_request,
)
_LOGGER = logging.getLogger(__name__)
@@ -117,7 +129,7 @@ def async_user_not_allowed_do_auth(
return "User cannot authenticate remotely"
async def async_setup_auth(
async def async_setup_auth( # noqa: C901
hass: HomeAssistant,
app: Application,
) -> None:
@@ -207,6 +219,41 @@ async def async_setup_auth(
request[KEY_HASS_REFRESH_TOKEN_ID] = refresh_token.id
return True
supervisor_user_id: str | None = None
async def async_authenticate_unix_socket(request: Request) -> bool:
"""Authenticate a request from a Unix socket as the Supervisor user.
The Unix Socket is dedicated and only available to Supervisor. To
avoid the extra overhead and round trips for the authentication and
refresh tokens, we directly authenticate requests from the socket as
the Supervisor user.
"""
nonlocal supervisor_user_id
# Fast path: use cached user ID
if supervisor_user_id is not None:
if user := await hass.auth.async_get_user(supervisor_user_id):
request[KEY_HASS_USER] = user
return True
supervisor_user_id = None
# Slow path: find the Supervisor user by name
for user in await hass.auth.async_get_users():
if user.system_generated and user.name == HASSIO_USER_NAME:
supervisor_user_id = user.id
# Not setting KEY_HASS_REFRESH_TOKEN_ID since Supervisor user
# doesn't use refresh tokens.
request[KEY_HASS_USER] = user
return True
# The Unix socket should not be serving before the hassio integration
# has created the Supervisor user. If we get here, something is wrong.
_LOGGER.error(
"Supervisor user not found; cannot authenticate Unix socket request"
)
raise HTTPInternalServerError
@middleware
async def auth_middleware(
request: Request, handler: Callable[[Request], Awaitable[StreamResponse]]
@@ -214,7 +261,11 @@ async def async_setup_auth(
"""Authenticate as middleware."""
authenticated = False
if hdrs.AUTHORIZATION in request.headers and async_validate_auth_header(
if is_unix_socket_request(request):
authenticated = await async_authenticate_unix_socket(request)
auth_type = "unix socket"
elif hdrs.AUTHORIZATION in request.headers and async_validate_auth_header(
request
):
authenticated = True
@@ -233,7 +284,7 @@ async def async_setup_auth(
if authenticated and _LOGGER.isEnabledFor(logging.DEBUG):
_LOGGER.debug(
"Authenticated %s for %s using %s",
request.remote,
request.remote or "unknown",
request.path,
auth_type,
)

View File

@@ -30,7 +30,7 @@ from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.hassio import get_supervisor_ip, is_hassio
from homeassistant.util import dt as dt_util, yaml as yaml_util
from .const import KEY_HASS
from .const import KEY_HASS, is_unix_socket_request
from .view import HomeAssistantView
_LOGGER: Final = logging.getLogger(__name__)
@@ -72,6 +72,10 @@ async def ban_middleware(
request: Request, handler: Callable[[Request], Awaitable[StreamResponse]]
) -> StreamResponse:
"""IP Ban middleware."""
# Unix socket connections are trusted, skip ban checks
if is_unix_socket_request(request):
return await handler(request)
if (ban_manager := request.app.get(KEY_BAN_MANAGER)) is None:
_LOGGER.error("IP Ban middleware loaded but banned IPs not loaded")
return await handler(request)

View File

@@ -1,10 +1,22 @@
"""HTTP specific constants."""
import socket
from typing import Final
from aiohttp.web import Request
from homeassistant.helpers.http import KEY_AUTHENTICATED, KEY_HASS # noqa: F401
DOMAIN: Final = "http"
KEY_HASS_USER: Final = "hass_user"
KEY_HASS_REFRESH_TOKEN_ID: Final = "hass_refresh_token_id"
def is_unix_socket_request(request: Request) -> bool:
"""Check if request arrived over a Unix socket."""
if (transport := request.transport) is None:
return False
if (sock := transport.get_extra_info("socket")) is None:
return False
return bool(sock.family == socket.AF_UNIX)

View File

@@ -3,6 +3,8 @@
from __future__ import annotations
import asyncio
from pathlib import Path
import socket
from ssl import SSLContext
from aiohttp import web
@@ -68,3 +70,62 @@ class HomeAssistantTCPSite(web.BaseSite):
reuse_address=self._reuse_address,
reuse_port=self._reuse_port,
)
class HomeAssistantUnixSite(web.BaseSite):
"""HomeAssistant specific aiohttp UnixSite.
Listens on a Unix socket for local inter-process communication,
used for Supervisor to Core communication.
"""
__slots__ = ("_path",)
def __init__(
self,
runner: web.BaseRunner,
path: Path,
*,
backlog: int = 128,
) -> None:
"""Initialize HomeAssistantUnixSite."""
super().__init__(
runner,
backlog=backlog,
)
self._path = path
@property
def name(self) -> str:
"""Return server URL."""
return f"http://unix:{self._path}:"
def _create_unix_socket(self) -> socket.socket:
"""Create and bind a Unix domain socket.
Performs blocking filesystem I/O (mkdir, unlink, chmod) and is
intended to be run in an executor. Permissions are set after bind
but before the socket is handed to the event loop, so no
connections can arrive on an unrestricted socket.
"""
self._path.parent.mkdir(parents=True, exist_ok=True)
self._path.unlink(missing_ok=True)
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
sock.bind(str(self._path))
except OSError:
sock.close()
raise
self._path.chmod(0o600)
return sock
async def start(self) -> None:
"""Start server."""
await super().start()
loop = asyncio.get_running_loop()
sock = await loop.run_in_executor(None, self._create_unix_socket)
server = self._runner.server
assert server is not None
self._server = await loop.create_unix_server(
server, sock=sock, backlog=self._backlog
)

View File

@@ -8,7 +8,6 @@ from homeassistant.core import HomeAssistant
from .coordinator import IndevoltConfigEntry, IndevoltCoordinator
PLATFORMS: list[Platform] = [
Platform.BUTTON,
Platform.NUMBER,
Platform.SELECT,
Platform.SENSOR,

View File

@@ -1,70 +0,0 @@
"""Button platform for Indevolt integration."""
from __future__ import annotations
from dataclasses import dataclass, field
from typing import Final
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import IndevoltConfigEntry
from .coordinator import IndevoltCoordinator
from .entity import IndevoltEntity
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class IndevoltButtonEntityDescription(ButtonEntityDescription):
"""Custom entity description class for Indevolt button entities."""
generation: list[int] = field(default_factory=lambda: [1, 2])
BUTTONS: Final = (
IndevoltButtonEntityDescription(
key="stop",
translation_key="stop",
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: IndevoltConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the button platform for Indevolt."""
coordinator = entry.runtime_data
device_gen = coordinator.generation
# Button initialization
async_add_entities(
IndevoltButtonEntity(coordinator=coordinator, description=description)
for description in BUTTONS
if device_gen in description.generation
)
class IndevoltButtonEntity(IndevoltEntity, ButtonEntity):
"""Represents a button entity for Indevolt devices."""
entity_description: IndevoltButtonEntityDescription
def __init__(
self,
coordinator: IndevoltCoordinator,
description: IndevoltButtonEntityDescription,
) -> None:
"""Initialize the Indevolt button entity."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{self.serial_number}_{description.key}"
async def async_press(self) -> None:
"""Handle the button press."""
await self.coordinator.async_execute_realtime_action([0, 0, 0])

View File

@@ -1,27 +1,16 @@
"""Constants for the Indevolt integration."""
from typing import Final
DOMAIN: Final = "indevolt"
# Default configurations
DEFAULT_PORT: Final = 8080
DOMAIN = "indevolt"
# Config entry fields
CONF_SERIAL_NUMBER: Final = "serial_number"
CONF_GENERATION: Final = "generation"
CONF_SERIAL_NUMBER = "serial_number"
CONF_GENERATION = "generation"
# API write/read keys for energy and value for outdoor/portable mode
ENERGY_MODE_READ_KEY: Final = "7101"
ENERGY_MODE_WRITE_KEY: Final = "47005"
PORTABLE_MODE: Final = 0
# API write key and value for real-time control mode
REALTIME_ACTION_KEY: Final = "47015"
REALTIME_ACTION_MODE: Final = 4
# Default values
DEFAULT_PORT = 8080
# API key fields
SENSOR_KEYS: Final[dict[int, list[str]]] = {
SENSOR_KEYS = {
1: [
"606",
"7101",

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
from datetime import timedelta
import logging
from typing import Any, Final
from typing import Any
from aiohttp import ClientError
from indevolt_api import IndevoltAPI, TimeOutException
@@ -21,37 +21,20 @@ from .const import (
CONF_SERIAL_NUMBER,
DEFAULT_PORT,
DOMAIN,
ENERGY_MODE_READ_KEY,
ENERGY_MODE_WRITE_KEY,
PORTABLE_MODE,
REALTIME_ACTION_KEY,
REALTIME_ACTION_MODE,
SENSOR_KEYS,
)
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL: Final = 30
SCAN_INTERVAL = 30
type IndevoltConfigEntry = ConfigEntry[IndevoltCoordinator]
class DeviceTimeoutError(HomeAssistantError):
"""Raised when device push times out."""
class DeviceConnectionError(HomeAssistantError):
"""Raised when device push fails due to connection issues."""
class IndevoltCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Coordinator for fetching and pushing data to indevolt devices."""
friendly_name: str
config_entry: IndevoltConfigEntry
firmware_version: str | None
serial_number: str
device_model: str
generation: int
def __init__(self, hass: HomeAssistant, entry: IndevoltConfigEntry) -> None:
"""Initialize the indevolt coordinator."""
@@ -70,7 +53,6 @@ class IndevoltCoordinator(DataUpdateCoordinator[dict[str, Any]]):
session=async_get_clientsession(hass),
)
self.friendly_name = entry.title
self.serial_number = entry.data[CONF_SERIAL_NUMBER]
self.device_model = entry.data[CONF_MODEL]
self.generation = entry.data[CONF_GENERATION]
@@ -103,67 +85,6 @@ class IndevoltCoordinator(DataUpdateCoordinator[dict[str, Any]]):
try:
return await self.api.set_data(sensor_key, value)
except TimeOutException as err:
raise DeviceTimeoutError(f"Device push timed out: {err}") from err
raise HomeAssistantError(f"Device push timed out: {err}") from err
except (ClientError, ConnectionError, OSError) as err:
raise DeviceConnectionError(f"Device push failed: {err}") from err
async def async_switch_energy_mode(
self, target_mode: int, refresh: bool = True
) -> None:
"""Attempt to switch device to given energy mode."""
current_mode = self.data.get(ENERGY_MODE_READ_KEY)
# Ensure current energy mode is known
if current_mode is None:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="failed_to_retrieve_current_energy_mode",
)
# Ensure device is not in "Outdoor/Portable mode"
if current_mode == PORTABLE_MODE:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="energy_mode_change_unavailable_outdoor_portable",
)
# Switch energy mode if required
if current_mode != target_mode:
try:
success = await self.async_push_data(ENERGY_MODE_WRITE_KEY, target_mode)
except (DeviceTimeoutError, DeviceConnectionError) as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="failed_to_switch_energy_mode",
) from err
if not success:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="failed_to_switch_energy_mode",
)
if refresh:
await self.async_request_refresh()
async def async_execute_realtime_action(self, action: list[int]) -> None:
"""Switch mode, execute action, and refresh for real-time control."""
await self.async_switch_energy_mode(REALTIME_ACTION_MODE, refresh=False)
try:
success = await self.async_push_data(REALTIME_ACTION_KEY, action)
except (DeviceTimeoutError, DeviceConnectionError) as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="failed_to_execute_realtime_action",
) from err
if not success:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="failed_to_execute_realtime_action",
)
await self.async_request_refresh()
raise HomeAssistantError(f"Device push failed: {err}") from err

View File

@@ -35,11 +35,6 @@
}
},
"entity": {
"button": {
"stop": {
"name": "Enable standby mode"
}
},
"number": {
"discharge_limit": {
"name": "Discharge limit"
@@ -294,19 +289,5 @@
"name": "LED indicator"
}
}
},
"exceptions": {
"energy_mode_change_unavailable_outdoor_portable": {
"message": "Energy mode cannot be changed when the device is in outdoor/portable mode"
},
"failed_to_execute_realtime_action": {
"message": "Failed to execute real-time action"
},
"failed_to_retrieve_current_energy_mode": {
"message": "Failed to retrieve current energy mode"
},
"failed_to_switch_energy_mode": {
"message": "Failed to switch to requested energy mode"
}
}
}

View File

@@ -4,21 +4,11 @@ from typing import Any
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.helpers.typing import ConfigType
from homeassistant.helpers import device_registry as dr
from .client_wrapper import CannotConnect, InvalidAuth, create_client, validate_input
from .const import CONF_CLIENT_DEVICE_ID, DEFAULT_NAME, DOMAIN, PLATFORMS
from .coordinator import JellyfinConfigEntry, JellyfinDataUpdateCoordinator
from .services import async_setup_services
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Jellyfin component."""
await async_setup_services(hass)
return True
async def async_setup_entry(hass: HomeAssistant, entry: JellyfinConfigEntry) -> bool:

View File

@@ -38,8 +38,6 @@ PLAYABLE_MEDIA_TYPES = [
MediaType.EPISODE,
MediaType.MOVIE,
MediaType.MUSIC,
MediaType.SEASON,
MediaType.TVSHOW,
]
@@ -100,8 +98,8 @@ async def build_item_response(
media_content_id: str,
) -> BrowseMedia:
"""Create response payload for the provided media query."""
title, media, thumbnail, media_type = await get_media_info(
hass, client, user_id, media_content_id
title, media, thumbnail = await get_media_info(
hass, client, user_id, media_content_type, media_content_id
)
if title is None or media is None:
@@ -113,12 +111,12 @@ async def build_item_response(
response = BrowseMedia(
media_class=CONTAINER_TYPES_SPECIFIC_MEDIA_CLASS.get(
str(media_type), MediaClass.DIRECTORY
str(media_content_type), MediaClass.DIRECTORY
),
media_content_id=media_content_id,
media_content_type=str(media_type),
media_content_type=str(media_content_type),
title=title,
can_play=bool(media_type in PLAYABLE_MEDIA_TYPES and media_content_id),
can_play=bool(media_content_type in PLAYABLE_MEDIA_TYPES and media_content_id),
can_expand=True,
children=children,
thumbnail=thumbnail,
@@ -209,18 +207,18 @@ async def get_media_info(
hass: HomeAssistant,
client: JellyfinClient,
user_id: str,
media_content_type: str | None,
media_content_id: str,
) -> tuple[str | None, list[dict[str, Any]] | None, str | None, str | None]:
) -> tuple[str | None, list[dict[str, Any]] | None, str | None]:
"""Fetch media info."""
thumbnail: str | None = None
title: str | None = None
media: list[dict[str, Any]] | None = None
media_type: str | None = None
item = await hass.async_add_executor_job(fetch_item, client, media_content_id)
if item is None:
return None, None, None, None
return None, None, None
title = item["Name"]
thumbnail = get_artwork_url(client, item)
@@ -233,6 +231,4 @@ async def get_media_info(
if not media or len(media) == 0:
media = None
media_type = CONTENT_TYPE_MAP.get(item["Type"], MEDIA_TYPE_NONE)
return title, media, thumbnail, media_type
return title, media, thumbnail

View File

@@ -74,10 +74,9 @@ MEDIA_CLASS_MAP = {
"MusicAlbum": MediaClass.ALBUM,
"MusicArtist": MediaClass.ARTIST,
"Audio": MediaClass.MUSIC,
"Series": MediaClass.TV_SHOW,
"Series": MediaClass.DIRECTORY,
"Movie": MediaClass.MOVIE,
"CollectionFolder": MediaClass.DIRECTORY,
"AggregateFolder": MediaClass.DIRECTORY,
"Folder": MediaClass.DIRECTORY,
"BoxSet": MediaClass.DIRECTORY,
"Episode": MediaClass.EPISODE,

View File

@@ -5,10 +5,5 @@
"default": "mdi:television-play"
}
}
},
"services": {
"play_media_shuffle": {
"service": "mdi:shuffle-variant"
}
}
}

View File

@@ -6,9 +6,7 @@ import logging
from typing import Any
from homeassistant.components.media_player import (
ATTR_MEDIA_ENQUEUE,
BrowseMedia,
MediaPlayerEnqueue,
MediaPlayerEntity,
MediaPlayerEntityFeature,
MediaPlayerState,
@@ -205,7 +203,6 @@ class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
| MediaPlayerEntityFeature.STOP
| MediaPlayerEntityFeature.SEEK
| MediaPlayerEntityFeature.SEARCH_MEDIA
| MediaPlayerEntityFeature.MEDIA_ENQUEUE
)
if "Mute" in commands and "Unmute" in commands:
@@ -248,20 +245,8 @@ class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
self, media_type: MediaType | str, media_id: str, **kwargs: Any
) -> None:
"""Play a piece of media."""
command = "PlayNow"
enqueue = kwargs.get(ATTR_MEDIA_ENQUEUE)
if enqueue == MediaPlayerEnqueue.NEXT:
command = "PlayNext"
elif enqueue == MediaPlayerEnqueue.ADD:
command = "PlayLast"
self.coordinator.api_client.jellyfin.remote_play_media(
self.session_id, [media_id], command
)
def play_media_shuffle(self, media_content_id: str) -> None:
"""Play a piece of media on shuffle."""
self.coordinator.api_client.jellyfin.remote_play_media(
self.session_id, [media_content_id], "PlayShuffle"
self.session_id, [media_id]
)
def set_volume_level(self, volume: float) -> None:

View File

@@ -1,55 +0,0 @@
"""Services for the Jellyfin integration."""
from __future__ import annotations
from typing import Any
import voluptuous as vol
from homeassistant.components.media_player import (
ATTR_MEDIA,
ATTR_MEDIA_CONTENT_ID,
DOMAIN as MP_DOMAIN,
MediaPlayerEntityFeature,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv, service
from .const import DOMAIN
JELLYFIN_PLAY_MEDIA_SHUFFLE_SCHEMA = {
vol.Required(ATTR_MEDIA_CONTENT_ID): cv.string,
}
def _promote_media_fields(data: dict[str, Any]) -> dict[str, Any]:
"""If 'media' key exists, promote its fields to the top level."""
if ATTR_MEDIA in data and isinstance(data[ATTR_MEDIA], dict):
if ATTR_MEDIA_CONTENT_ID in data:
raise vol.Invalid(
f"Play media cannot contain both '{ATTR_MEDIA}' and '{ATTR_MEDIA_CONTENT_ID}'"
)
media_data = data[ATTR_MEDIA]
if ATTR_MEDIA_CONTENT_ID in media_data:
data[ATTR_MEDIA_CONTENT_ID] = media_data[ATTR_MEDIA_CONTENT_ID]
del data[ATTR_MEDIA]
return data
async def async_setup_services(hass: HomeAssistant) -> None:
"""Set up services for the Jellyfin component."""
service.async_register_platform_entity_service(
hass,
DOMAIN,
"play_media_shuffle",
entity_domain=MP_DOMAIN,
schema=vol.All(
_promote_media_fields,
cv.make_entity_service_schema(JELLYFIN_PLAY_MEDIA_SHUFFLE_SCHEMA),
),
func="play_media_shuffle",
required_features=MediaPlayerEntityFeature.PLAY_MEDIA,
)

View File

@@ -1,11 +0,0 @@
play_media_shuffle:
target:
entity:
integration: jellyfin
domain: media_player
fields:
media:
required: true
selector:
media:
example: '{"media_content_id": "a656b907eb3a73532e40e44b968d0225"}'

View File

@@ -42,17 +42,5 @@
}
}
}
},
"services": {
"play_media_shuffle": {
"description": "Starts playing specified media shuffled. Overwrites current play queue.",
"fields": {
"media": {
"description": "The media selected to play.",
"name": "Media"
}
},
"name": "Play media shuffled"
}
}
}

View File

@@ -47,14 +47,6 @@ COLOR_MODE_MAP = {
clusters.ColorControl.Enums.ColorModeEnum.kColorTemperatureMireds: ColorMode.COLOR_TEMP,
}
# Maximum Mireds value per the Matter spec is 65279
# Conversion between Kelvin and Mireds is 1,000,000 / Kelvin, so this corresponds to a minimum color temperature of ~15.3K
# Which is shown in UI as 15 Kelvin due to rounding.
# But converting 15 Kelvin back to Mireds gives 66666 which is above the maximum,
# and causes Invoke error, so cap values over maximum when sending
MATTER_MAX_MIREDS = 65279
# there's a bug in (at least) Espressif's implementation of light transitions
# on devices based on Matter 1.0. Mark potential devices with this issue.
# https://github.com/home-assistant/core/issues/113775
@@ -160,7 +152,7 @@ class MatterLight(MatterEntity, LightEntity):
)
await self.send_device_command(
clusters.ColorControl.Commands.MoveToColorTemperature(
colorTemperatureMireds=min(color_temp_mired, MATTER_MAX_MIREDS),
colorTemperatureMireds=color_temp_mired,
# transition in matter is measured in tenths of a second
transitionTime=int(transition * 10),
# allow setting the color while the light is off,

View File

@@ -203,80 +203,105 @@ class MoldIndicator(SensorEntity):
def _async_setup_sensor(self) -> None:
"""Set up the sensor and start tracking state changes."""
self.async_on_remove(
@callback
def mold_indicator_sensors_state_listener(
event: Event[EventStateChangedData],
) -> None:
"""Handle for state changes for dependent sensors."""
new_state = event.data["new_state"]
old_state = event.data["old_state"]
entity = event.data["entity_id"]
_LOGGER.debug(
"Sensor state change for %s that had old state %s and new state %s",
entity,
old_state,
new_state,
)
if self._update_sensor(entity, old_state, new_state):
if self._preview_callback:
calculated_state = self._async_calculate_state()
self._preview_callback(
calculated_state.state, calculated_state.attributes
)
# only write state to the state machine if we are not in preview mode
else:
self.async_schedule_update_ha_state(True)
@callback
def mold_indicator_startup() -> None:
"""Add listeners and get 1st state."""
_LOGGER.debug("Startup for %s", self.entity_id)
async_track_state_change_event(
self.hass,
self._entities.values(),
self._async_mold_indicator_sensor_state_listener,
)
)
# Replay current state of source entities
for entity_id in self._entities.values():
state = self.hass.states.get(entity_id)
state_event: Event[EventStateChangedData] = Event(
"", {"entity_id": entity_id, "new_state": state, "old_state": None}
)
self._async_mold_indicator_sensor_state_listener(
state_event, update_state=False
list(self._entities.values()),
mold_indicator_sensors_state_listener,
)
self._recalculate()
# Read initial state
indoor_temp = self.hass.states.get(self._entities[CONF_INDOOR_TEMP])
outdoor_temp = self.hass.states.get(self._entities[CONF_OUTDOOR_TEMP])
indoor_hum = self.hass.states.get(self._entities[CONF_INDOOR_HUMIDITY])
if self._preview_callback:
calculated_state = self._async_calculate_state()
self._preview_callback(calculated_state.state, calculated_state.attributes)
schedule_update = self._update_sensor(
self._entities[CONF_INDOOR_TEMP], None, indoor_temp
)
@callback
def _async_mold_indicator_sensor_state_listener(
self, event: Event[EventStateChangedData], update_state: bool = True
) -> None:
"""Handle state changes for dependent sensors."""
entity_id = event.data["entity_id"]
new_state = event.data["new_state"]
schedule_update = (
False
if not self._update_sensor(
self._entities[CONF_OUTDOOR_TEMP], None, outdoor_temp
)
else schedule_update
)
_LOGGER.debug(
"Sensor state change for %s that had old state %s and new state %s",
entity_id,
event.data["old_state"],
new_state,
)
schedule_update = (
False
if not self._update_sensor(
self._entities[CONF_INDOOR_HUMIDITY], None, indoor_hum
)
else schedule_update
)
# update state depending on which sensor changed
if entity_id == self._entities[CONF_INDOOR_TEMP]:
if schedule_update and not self._preview_callback:
self.async_schedule_update_ha_state(True)
if self._preview_callback:
# re-calculate dewpoint and mold indicator
self._calc_dewpoint()
self._calc_moldindicator()
if self._attr_native_value is None:
self._attr_available = False
else:
self._attr_available = True
calculated_state = self._async_calculate_state()
self._preview_callback(
calculated_state.state, calculated_state.attributes
)
mold_indicator_startup()
def _update_sensor(
self, entity: str, old_state: State | None, new_state: State | None
) -> bool:
"""Update information based on new sensor states."""
_LOGGER.debug("Sensor update for %s", entity)
if new_state is None:
return False
# If old_state is not set and new state is unknown then it means
# that the sensor just started up
if old_state is None and new_state.state == STATE_UNKNOWN:
return False
if entity == self._entities[CONF_INDOOR_TEMP]:
self._indoor_temp = self._get_temperature_from_state(new_state)
elif entity_id == self._entities[CONF_OUTDOOR_TEMP]:
elif entity == self._entities[CONF_OUTDOOR_TEMP]:
self._outdoor_temp = self._get_temperature_from_state(new_state)
elif entity_id == self._entities[CONF_INDOOR_HUMIDITY]:
elif entity == self._entities[CONF_INDOOR_HUMIDITY]:
self._indoor_hum = self._get_humidity_from_state(new_state)
if not update_state:
return
self._recalculate()
if self._preview_callback:
calculated_state = self._async_calculate_state()
self._preview_callback(calculated_state.state, calculated_state.attributes)
# only write state to the state machine if we are not in preview mode
else:
self.async_write_ha_state()
@callback
def _recalculate(self) -> None:
"""Recalculate mold indicator from cached sensor values."""
# Check if all sensors are available
if None in (self._indoor_temp, self._indoor_hum, self._outdoor_temp):
self._attr_available = False
self._attr_native_value = None
self._dewpoint = None
self._crit_temp = None
return
# Calculate dewpoint and mold indicator
self._calc_dewpoint()
self._calc_moldindicator()
self._attr_available = self._attr_native_value is not None
return True
def _get_value_from_state(
self,
@@ -351,6 +376,26 @@ class MoldIndicator(SensorEntity):
return self._get_value_from_state(state, validate_humidity)
async def async_update(self) -> None:
"""Calculate latest state."""
_LOGGER.debug("Update state for %s", self.entity_id)
# check all sensors
if None in (self._indoor_temp, self._indoor_hum, self._outdoor_temp):
self._attr_available = False
self._dewpoint = None
self._crit_temp = None
return
# re-calculate dewpoint and mold indicator
self._calc_dewpoint()
self._calc_moldindicator()
if self._attr_native_value is None:
self._attr_available = False
self._dewpoint = None
self._crit_temp = None
else:
self._attr_available = True
def _calc_dewpoint(self) -> None:
"""Calculate the dewpoint for the indoor air."""
# Use magnus approximation to calculate the dew point

View File

@@ -283,7 +283,10 @@ class IntegrationOnboardingView(_BaseOnboardingStepView):
async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response:
"""Handle token creation."""
hass = request.app[KEY_HASS]
refresh_token_id = request[KEY_HASS_REFRESH_TOKEN_ID]
if not (refresh_token_id := request.get(KEY_HASS_REFRESH_TOKEN_ID)):
return self.json_message(
"Refresh token not available", HTTPStatus.FORBIDDEN
)
async with self._lock:
if self._async_is_done():

View File

@@ -51,7 +51,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: PortainerConfigEntry) ->
session=async_create_clientsession(
hass=hass, verify_ssl=entry.data[CONF_VERIFY_SSL]
),
request_timeout=30,
max_retries=API_MAX_RETRIES,
)

View File

@@ -14,7 +14,7 @@ from .coordinator import PranaConfigEntry, PranaCoordinator
_LOGGER = logging.getLogger(__name__)
PLATFORMS = [Platform.FAN, Platform.NUMBER, Platform.SENSOR, Platform.SWITCH]
PLATFORMS = [Platform.FAN, Platform.SENSOR, Platform.SWITCH]
async def async_setup_entry(hass: HomeAssistant, entry: PranaConfigEntry) -> bool:

View File

@@ -21,8 +21,8 @@ from homeassistant.util.percentage import (
)
from homeassistant.util.scaling import int_states_in_range
from .coordinator import PranaConfigEntry, PranaCoordinator
from .entity import PranaBaseEntity
from . import PranaConfigEntry
from .entity import PranaBaseEntity, PranaCoordinator
PARALLEL_UPDATES = 1

View File

@@ -8,20 +8,6 @@
"default": "mdi:arrow-expand-left"
}
},
"number": {
"display_brightness": {
"default": "mdi:brightness-6",
"state": {
"0": "mdi:brightness-2",
"1": "mdi:brightness-4",
"2": "mdi:brightness-4",
"3": "mdi:brightness-5",
"4": "mdi:brightness-5",
"5": "mdi:brightness-7",
"6": "mdi:brightness-7"
}
}
},
"sensor": {
"inside_temperature": {
"default": "mdi:home-thermometer"

View File

@@ -1,80 +0,0 @@
"""Number platform for Prana integration."""
from collections.abc import Callable
from dataclasses import dataclass
from enum import StrEnum
from typing import Any
from homeassistant.components.number import (
NumberEntity,
NumberEntityDescription,
NumberMode,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import PranaConfigEntry, PranaCoordinator
from .entity import PranaBaseEntity
PARALLEL_UPDATES = 1
class PranaNumberType(StrEnum):
"""Enumerates Prana number types exposed by the device API."""
DISPLAY_BRIGHTNESS = "display_brightness"
@dataclass(frozen=True, kw_only=True)
class PranaNumberEntityDescription(NumberEntityDescription):
"""Description of a Prana number entity."""
key: PranaNumberType
value_fn: Callable[[PranaCoordinator], float | None]
set_value_fn: Callable[[Any, float], Any]
ENTITIES: tuple[PranaNumberEntityDescription, ...] = (
PranaNumberEntityDescription(
key=PranaNumberType.DISPLAY_BRIGHTNESS,
translation_key="display_brightness",
native_min_value=0,
native_max_value=6,
native_step=1,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
value_fn=lambda coord: coord.data.brightness,
set_value_fn=lambda api, val: api.set_brightness(
0 if val == 0 else 2 ** (int(val) - 1)
),
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: PranaConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Prana number entities from a config entry."""
async_add_entities(
PranaNumber(entry.runtime_data, entity_description)
for entity_description in ENTITIES
)
class PranaNumber(PranaBaseEntity, NumberEntity):
"""Representation of a Prana number entity."""
entity_description: PranaNumberEntityDescription
@property
def native_value(self) -> float | None:
"""Return the entity value."""
return self.entity_description.value_fn(self.coordinator)
async def async_set_native_value(self, value: float) -> None:
"""Set new value."""
await self.entity_description.set_value_fn(self.coordinator.api_client, value)
await self.coordinator.async_refresh()

View File

@@ -21,8 +21,8 @@ from homeassistant.const import (
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import PranaConfigEntry, PranaCoordinator
from .entity import PranaBaseEntity
from . import PranaConfigEntry
from .entity import PranaBaseEntity, PranaCoordinator
PARALLEL_UPDATES = 1

View File

@@ -49,11 +49,6 @@
}
}
},
"number": {
"display_brightness": {
"name": "Display brightness"
}
},
"sensor": {
"inside_temperature": {
"name": "Inside temperature"

View File

@@ -9,7 +9,7 @@ from homeassistant.components.switch import SwitchEntity, SwitchEntityDescriptio
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import PranaConfigEntry, PranaCoordinator
from . import PranaConfigEntry, PranaCoordinator
from .entity import PranaBaseEntity
PARALLEL_UPDATES = 1

View File

@@ -4,11 +4,10 @@ from __future__ import annotations
import mimetypes
from aiodns.error import DNSError
import pycountry
from radios import FilterBy, Order, RadioBrowser, RadioBrowserError, Station
from radios import FilterBy, Order, RadioBrowser, Station
from homeassistant.components.media_player import BrowseError, MediaClass, MediaType
from homeassistant.components.media_player import MediaClass, MediaType
from homeassistant.components.media_source import (
BrowseMediaSource,
MediaSource,
@@ -16,7 +15,6 @@ from homeassistant.components.media_source import (
PlayMedia,
Unresolvable,
)
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant, callback
from homeassistant.util.location import vincenty
@@ -57,20 +55,9 @@ class RadioMediaSource(MediaSource):
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
"""Resolve selected Radio station to a streaming URL."""
if self.entry.state != ConfigEntryState.LOADED:
raise Unresolvable(
translation_domain=DOMAIN,
translation_key="config_entry_not_ready",
)
radios = self.radios
try:
station = await radios.station(uuid=item.identifier)
except (DNSError, RadioBrowserError) as e:
raise Unresolvable(
translation_domain=DOMAIN,
translation_key="radio_browser_error",
) from e
station = await radios.station(uuid=item.identifier)
if not station:
raise Unresolvable("Radio station is no longer available")
@@ -87,37 +74,25 @@ class RadioMediaSource(MediaSource):
item: MediaSourceItem,
) -> BrowseMediaSource:
"""Return media."""
if self.entry.state != ConfigEntryState.LOADED:
raise BrowseError(
translation_domain=DOMAIN,
translation_key="config_entry_not_ready",
)
radios = self.radios
try:
return BrowseMediaSource(
domain=DOMAIN,
identifier=None,
media_class=MediaClass.CHANNEL,
media_content_type=MediaType.MUSIC,
title=self.entry.title,
can_play=False,
can_expand=True,
children_media_class=MediaClass.DIRECTORY,
children=[
*await self._async_build_popular(radios, item),
*await self._async_build_by_tag(radios, item),
*await self._async_build_by_language(radios, item),
*await self._async_build_local(radios, item),
*await self._async_build_by_country(radios, item),
],
)
except (DNSError, RadioBrowserError) as e:
raise BrowseError(
translation_domain=DOMAIN,
translation_key="radio_browser_error",
) from e
return BrowseMediaSource(
domain=DOMAIN,
identifier=None,
media_class=MediaClass.CHANNEL,
media_content_type=MediaType.MUSIC,
title=self.entry.title,
can_play=False,
can_expand=True,
children_media_class=MediaClass.DIRECTORY,
children=[
*await self._async_build_popular(radios, item),
*await self._async_build_by_tag(radios, item),
*await self._async_build_by_language(radios, item),
*await self._async_build_local(radios, item),
*await self._async_build_by_country(radios, item),
],
)
@callback
@staticmethod

View File

@@ -5,13 +5,5 @@
"description": "Do you want to add Radio Browser to Home Assistant?"
}
}
},
"exceptions": {
"config_entry_not_ready": {
"message": "Radio Browser integration is not ready"
},
"radio_browser_error": {
"message": "Error occurred while communicating with Radio Browser"
}
}
}

View File

@@ -20,7 +20,7 @@
"loggers": ["roborock"],
"quality_scale": "silver",
"requirements": [
"python-roborock==4.25.0",
"python-roborock==4.20.0",
"vacuum-map-parser-roborock==0.1.4"
]
}

View File

@@ -2,7 +2,7 @@
from __future__ import annotations
from datetime import datetime
from datetime import date, datetime
import ephem
@@ -12,7 +12,7 @@ from homeassistant.const import CONF_TYPE
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util import dt as dt_util
from homeassistant.util.dt import utcnow
from .const import DOMAIN, TYPE_ASTRONOMICAL
@@ -50,7 +50,7 @@ async def async_setup_entry(
def get_season(
current_datetime: datetime, hemisphere: str, season_tracking_type: str
current_date: date, hemisphere: str, season_tracking_type: str
) -> str | None:
"""Calculate the current season."""
@@ -58,36 +58,22 @@ def get_season(
return None
if season_tracking_type == TYPE_ASTRONOMICAL:
spring_start = (
ephem.next_equinox(str(current_datetime.year))
.datetime()
.replace(tzinfo=dt_util.UTC)
)
summer_start = (
ephem.next_solstice(str(current_datetime.year))
.datetime()
.replace(tzinfo=dt_util.UTC)
)
autumn_start = (
ephem.next_equinox(spring_start).datetime().replace(tzinfo=dt_util.UTC)
)
winter_start = (
ephem.next_solstice(summer_start).datetime().replace(tzinfo=dt_util.UTC)
)
spring_start = ephem.next_equinox(str(current_date.year)).datetime()
summer_start = ephem.next_solstice(str(current_date.year)).datetime()
autumn_start = ephem.next_equinox(spring_start).datetime()
winter_start = ephem.next_solstice(summer_start).datetime()
else:
spring_start = current_datetime.replace(
month=3, day=1, hour=0, minute=0, second=0, microsecond=0
)
spring_start = datetime(2017, 3, 1).replace(year=current_date.year)
summer_start = spring_start.replace(month=6)
autumn_start = spring_start.replace(month=9)
winter_start = spring_start.replace(month=12)
season = STATE_WINTER
if spring_start <= current_datetime < summer_start:
if spring_start <= current_date < summer_start:
season = STATE_SPRING
elif summer_start <= current_datetime < autumn_start:
elif summer_start <= current_date < autumn_start:
season = STATE_SUMMER
elif autumn_start <= current_datetime < winter_start:
elif autumn_start <= current_date < winter_start:
season = STATE_AUTUMN
# If user is located in the southern hemisphere swap the season
@@ -118,4 +104,6 @@ class SeasonSensorEntity(SensorEntity):
def update(self) -> None:
"""Update season."""
self._attr_native_value = get_season(dt_util.now(), self.hemisphere, self.type)
self._attr_native_value = get_season(
utcnow().replace(tzinfo=None), self.hemisphere, self.type
)

View File

@@ -105,9 +105,6 @@
"robot_cleaner_driving_mode": {
"default": "mdi:car-cog"
},
"robot_cleaner_sound_mode": {
"default": "mdi:bell-cog"
},
"robot_cleaner_water_spray_level": {
"default": "mdi:spray-bottle"
},

View File

@@ -26,12 +26,6 @@ LAMP_TO_HA = {
"off": "off",
}
SOUND_MODE_TO_HA = {
"voice": "voice",
"beep": "tone",
"mute": "mute",
}
DRIVING_MODE_TO_HA = {
"areaThenWalls": "area_then_walls",
"wallFirst": "walls_first",
@@ -250,16 +244,6 @@ CAPABILITIES_TO_SELECT: dict[Capability | str, SmartThingsSelectDescription] = {
entity_category=EntityCategory.CONFIG,
value_is_integer=True,
),
Capability.SAMSUNG_CE_ROBOT_CLEANER_SYSTEM_SOUND_MODE: SmartThingsSelectDescription(
key=Capability.SAMSUNG_CE_ROBOT_CLEANER_SYSTEM_SOUND_MODE,
translation_key="robot_cleaner_sound_mode",
options_attribute=Attribute.SUPPORTED_SOUND_MODES,
status_attribute=Attribute.SOUND_MODE,
command=Command.SET_SOUND_MODE,
options_map=SOUND_MODE_TO_HA,
entity_category=EntityCategory.CONFIG,
entity_registry_enabled_default=False,
),
Capability.SAMSUNG_CE_ROBOT_CLEANER_CLEANING_TYPE: SmartThingsSelectDescription(
key=Capability.SAMSUNG_CE_ROBOT_CLEANER_CLEANING_TYPE,
translation_key="robot_cleaner_cleaning_type",

View File

@@ -254,14 +254,6 @@
"walls_first": "Walls first"
}
},
"robot_cleaner_sound_mode": {
"name": "Sound mode",
"state": {
"mute": "Mute",
"tone": "Tone",
"voice": "Voice"
}
},
"robot_cleaner_water_spray_level": {
"name": "Water level",
"state": {

View File

@@ -195,22 +195,9 @@ class TeslaFleetEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]])
except TeslaFleetError as e:
raise UpdateFailed(e.message) from e
if not isinstance(data, dict):
LOGGER.debug(
"%s got unexpected live status response type: %s",
self.name,
type(data).__name__,
)
return self.data
# Convert Wall Connectors from array to dict
wall_connectors = data.get("wall_connectors")
if not isinstance(wall_connectors, list):
wall_connectors = []
data["wall_connectors"] = {
wc["din"]: wc
for wc in wall_connectors
if isinstance(wc, dict) and "din" in wc
wc["din"]: wc for wc in (data.get("wall_connectors") or [])
}
self.updated_once = True

View File

@@ -55,7 +55,7 @@ class TibberRuntimeData:
time_zone=dt_util.get_default_time_zone(),
ssl=ssl_util.get_default_context(),
)
await self._client.set_access_token(access_token)
self._client.set_access_token(access_token)
return self._client

View File

@@ -2,10 +2,9 @@
from __future__ import annotations
import asyncio
from datetime import datetime, timedelta
from datetime import timedelta
import logging
from typing import TYPE_CHECKING, TypedDict, cast
from typing import TYPE_CHECKING, cast
from aiohttp.client_exceptions import ClientError
import tibber
@@ -39,58 +38,6 @@ FIVE_YEARS = 5 * 365 * 24
_LOGGER = logging.getLogger(__name__)
class TibberHomeData(TypedDict):
"""Data for a Tibber home used by the price sensor."""
currency: str
price_unit: str
current_price: float | None
current_price_time: datetime | None
intraday_price_ranking: float | None
max_price: float
avg_price: float
min_price: float
off_peak_1: float
peak: float
off_peak_2: float
month_cost: float | None
peak_hour: float | None
peak_hour_time: datetime | None
month_cons: float | None
app_nickname: str | None
grid_company: str | None
estimated_annual_consumption: int | None
def _build_home_data(home: tibber.TibberHome) -> TibberHomeData:
"""Build TibberHomeData from a TibberHome for the price sensor."""
current_price, last_updated, price_rank = home.current_price_data()
attributes = home.current_attributes()
result: TibberHomeData = {
"currency": home.currency,
"price_unit": home.price_unit,
"current_price": current_price,
"current_price_time": last_updated,
"intraday_price_ranking": price_rank,
"max_price": attributes["max_price"],
"avg_price": attributes["avg_price"],
"min_price": attributes["min_price"],
"off_peak_1": attributes["off_peak_1"],
"peak": attributes["peak"],
"off_peak_2": attributes["off_peak_2"],
"month_cost": home.month_cost,
"peak_hour": home.peak_hour,
"peak_hour_time": home.peak_hour_time,
"month_cons": home.month_cons,
"app_nickname": home.info["viewer"]["home"].get("appNickname"),
"grid_company": home.info["viewer"]["home"]["meteringPointData"]["gridCompany"],
"estimated_annual_consumption": home.info["viewer"]["home"][
"meteringPointData"
]["estimatedAnnualConsumption"],
}
return result
class TibberDataCoordinator(DataUpdateCoordinator[None]):
"""Handle Tibber data and insert statistics."""
@@ -110,16 +57,13 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]):
name=f"Tibber {tibber_connection.name}",
update_interval=timedelta(minutes=20),
)
self._tibber_connection = tibber_connection
async def _async_update_data(self) -> None:
"""Update data via API."""
tibber_connection = await self.config_entry.runtime_data.async_get_client(
self.hass
)
try:
await tibber_connection.fetch_consumption_data_active_homes()
await tibber_connection.fetch_production_data_active_homes()
await self._tibber_connection.fetch_consumption_data_active_homes()
await self._tibber_connection.fetch_production_data_active_homes()
await self._insert_statistics()
except tibber.RetryableHttpExceptionError as err:
raise UpdateFailed(f"Error communicating with API ({err.status})") from err
@@ -131,10 +75,7 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]):
async def _insert_statistics(self) -> None:
"""Insert Tibber statistics."""
tibber_connection = await self.config_entry.runtime_data.async_get_client(
self.hass
)
for home in tibber_connection.get_homes():
for home in self._tibber_connection.get_homes():
sensors: list[tuple[str, bool, str | None, str]] = []
if home.hourly_consumption_data:
sensors.append(
@@ -253,76 +194,6 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]):
async_add_external_statistics(self.hass, metadata, statistics)
class TibberPriceCoordinator(DataUpdateCoordinator[dict[str, TibberHomeData]]):
"""Handle Tibber price data and insert statistics."""
config_entry: TibberConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: TibberConfigEntry,
) -> None:
"""Initialize the price coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=f"{DOMAIN} price",
update_interval=timedelta(minutes=1),
)
def _seconds_until_next_15_minute(self) -> float:
"""Return seconds until the next 15-minute boundary (0, 15, 30, 45) in UTC."""
now = dt_util.utcnow()
next_minute = ((now.minute // 15) + 1) * 15
if next_minute >= 60:
next_run = now.replace(minute=0, second=0, microsecond=0) + timedelta(
hours=1
)
else:
next_run = now.replace(
minute=next_minute, second=0, microsecond=0, tzinfo=dt_util.UTC
)
return (next_run - now).total_seconds()
async def _async_update_data(self) -> dict[str, TibberHomeData]:
"""Update data via API and return per-home data for sensors."""
tibber_connection = await self.config_entry.runtime_data.async_get_client(
self.hass
)
active_homes = tibber_connection.get_homes(only_active=True)
try:
await asyncio.gather(
tibber_connection.fetch_consumption_data_active_homes(),
tibber_connection.fetch_production_data_active_homes(),
)
now = dt_util.now()
homes_to_update = [
home
for home in active_homes
if (
(last_data_timestamp := home.last_data_timestamp) is None
or (last_data_timestamp - now).total_seconds() < 11 * 3600
)
]
if homes_to_update:
await asyncio.gather(
*(home.update_info_and_price_info() for home in homes_to_update)
)
except tibber.RetryableHttpExceptionError as err:
raise UpdateFailed(f"Error communicating with API ({err.status})") from err
except tibber.FatalHttpExceptionError as err:
raise UpdateFailed(f"Error communicating with API ({err.status})") from err
result = {home.home_id: _build_home_data(home) for home in active_homes}
self.update_interval = timedelta(seconds=self._seconds_until_next_15_minute())
return result
class TibberDataAPICoordinator(DataUpdateCoordinator[dict[str, TibberDevice]]):
"""Fetch and cache Tibber Data API device capabilities."""

View File

@@ -3,8 +3,10 @@
from __future__ import annotations
from collections.abc import Callable
import datetime
from datetime import timedelta
import logging
from random import randrange
from typing import Any
import aiohttp
@@ -40,20 +42,18 @@ from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from homeassistant.util import dt as dt_util
from homeassistant.util import Throttle, dt as dt_util
from .const import DOMAIN, MANUFACTURER, TibberConfigEntry
from .coordinator import (
TibberDataAPICoordinator,
TibberDataCoordinator,
TibberPriceCoordinator,
)
from .coordinator import TibberDataAPICoordinator, TibberDataCoordinator
_LOGGER = logging.getLogger(__name__)
ICON = "mdi:currency-usd"
SCAN_INTERVAL = timedelta(minutes=1)
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5)
PARALLEL_UPDATES = 0
TWENTY_MINUTES = 20 * 60
RT_SENSORS_UNIQUE_ID_MIGRATION = {
"accumulated_consumption_last_hour": "accumulated consumption current hour",
@@ -610,7 +610,6 @@ async def _async_setup_graphql_sensors(
entity_registry = er.async_get(hass)
coordinator: TibberDataCoordinator | None = None
price_coordinator: TibberPriceCoordinator | None = None
entities: list[TibberSensor] = []
for home in tibber_connection.get_homes(only_active=False):
try:
@@ -627,9 +626,7 @@ async def _async_setup_graphql_sensors(
raise PlatformNotReady from err
if home.has_active_subscription:
if price_coordinator is None:
price_coordinator = TibberPriceCoordinator(hass, entry)
entities.append(TibberSensorElPrice(price_coordinator, home))
entities.append(TibberSensorElPrice(home))
if coordinator is None:
coordinator = TibberDataCoordinator(hass, entry, tibber_connection)
entities.extend(
@@ -740,21 +737,19 @@ class TibberSensor(SensorEntity):
return device_info
class TibberSensorElPrice(TibberSensor, CoordinatorEntity[TibberPriceCoordinator]):
class TibberSensorElPrice(TibberSensor):
"""Representation of a Tibber sensor for el price."""
_attr_state_class = SensorStateClass.MEASUREMENT
_attr_translation_key = "electricity_price"
def __init__(
self,
coordinator: TibberPriceCoordinator,
tibber_home: TibberHome,
) -> None:
def __init__(self, tibber_home: TibberHome) -> None:
"""Initialize the sensor."""
super().__init__(coordinator=coordinator, tibber_home=tibber_home)
super().__init__(tibber_home=tibber_home)
self._last_updated: datetime.datetime | None = None
self._spread_load_constant = randrange(TWENTY_MINUTES)
self._attr_available = False
self._attr_native_unit_of_measurement = tibber_home.price_unit
self._attr_extra_state_attributes = {
"app_nickname": None,
"grid_company": None,
@@ -773,38 +768,51 @@ class TibberSensorElPrice(TibberSensor, CoordinatorEntity[TibberPriceCoordinator
self._device_name = self._home_name
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
data = self.coordinator.data
if not data or (
(home_data := data.get(self._tibber_home.home_id)) is None
or (current_price := home_data.get("current_price")) is None
async def async_update(self) -> None:
"""Get the latest data and updates the states."""
now = dt_util.now()
if (
not self._tibber_home.last_data_timestamp
or (self._tibber_home.last_data_timestamp - now).total_seconds()
< 10 * 3600 - self._spread_load_constant
or not self.available
):
_LOGGER.debug("Asking for new data")
await self._fetch_data()
elif (
self._tibber_home.price_total
and self._last_updated
and self._last_updated.hour == now.hour
and now - self._last_updated < timedelta(minutes=15)
and self._tibber_home.last_data_timestamp
):
self._attr_available = False
self.async_write_ha_state()
return
self._attr_native_unit_of_measurement = home_data.get(
"price_unit", self._tibber_home.price_unit
)
self._attr_native_value = current_price
self._attr_extra_state_attributes["intraday_price_ranking"] = home_data.get(
"intraday_price_ranking"
)
self._attr_extra_state_attributes["max_price"] = home_data["max_price"]
self._attr_extra_state_attributes["avg_price"] = home_data["avg_price"]
self._attr_extra_state_attributes["min_price"] = home_data["min_price"]
self._attr_extra_state_attributes["off_peak_1"] = home_data["off_peak_1"]
self._attr_extra_state_attributes["peak"] = home_data["peak"]
self._attr_extra_state_attributes["off_peak_2"] = home_data["off_peak_2"]
self._attr_extra_state_attributes["app_nickname"] = home_data["app_nickname"]
self._attr_extra_state_attributes["grid_company"] = home_data["grid_company"]
self._attr_extra_state_attributes["estimated_annual_consumption"] = home_data[
"estimated_annual_consumption"
res = self._tibber_home.current_price_data()
self._attr_native_value, self._last_updated, price_rank = res
self._attr_extra_state_attributes["intraday_price_ranking"] = price_rank
attrs = self._tibber_home.current_attributes()
self._attr_extra_state_attributes.update(attrs)
self._attr_available = self._attr_native_value is not None
self._attr_native_unit_of_measurement = self._tibber_home.price_unit
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def _fetch_data(self) -> None:
_LOGGER.debug("Fetching data")
try:
await self._tibber_home.update_info_and_price_info()
except TimeoutError, aiohttp.ClientError:
return
data = self._tibber_home.info["viewer"]["home"]
self._attr_extra_state_attributes["app_nickname"] = data["appNickname"]
self._attr_extra_state_attributes["grid_company"] = data["meteringPointData"][
"gridCompany"
]
self._attr_available = True
self.async_write_ha_state()
self._attr_extra_state_attributes["estimated_annual_consumption"] = data[
"meteringPointData"
]["estimatedAnnualConsumption"]
class TibberDataSensor(TibberSensor, CoordinatorEntity[TibberDataCoordinator]):

View File

@@ -10,6 +10,7 @@ import voluptuous as vol
from voluptuous.humanize import humanize_error
from homeassistant.components.http.ban import process_success_login, process_wrong_login
from homeassistant.components.http.const import KEY_HASS_USER
from homeassistant.const import __version__
from homeassistant.core import CALLBACK_TYPE, HomeAssistant
from homeassistant.helpers.json import json_bytes
@@ -68,6 +69,19 @@ class AuthPhase:
# send_bytes_text will directly send a message to the client.
self._send_bytes_text = send_bytes_text
async def async_handle_unix_socket(self) -> ActiveConnection:
"""Handle a pre-authenticated Unix socket connection."""
conn = ActiveConnection(
self._logger,
self._hass,
self._send_message,
self._request[KEY_HASS_USER],
refresh_token=None,
)
await self._send_bytes_text(AUTH_OK_MESSAGE)
self._logger.debug("Auth OK (unix socket)")
return conn
async def async_handle(self, msg: JsonValueType) -> ActiveConnection:
"""Handle authentication."""
try:

View File

@@ -59,14 +59,14 @@ class ActiveConnection:
hass: HomeAssistant,
send_message: Callable[[bytes | str | dict[str, Any]], None],
user: User,
refresh_token: RefreshToken,
refresh_token: RefreshToken | None,
) -> None:
"""Initialize an active connection."""
self.logger = logger
self.hass = hass
self.send_message = send_message
self.user = user
self.refresh_token_id = refresh_token.id
self.refresh_token_id = refresh_token.id if refresh_token else None
self.subscriptions: dict[Hashable, Callable[[], Any]] = {}
self.last_id = 0
self.can_coalesce = False

View File

@@ -14,6 +14,7 @@ from aiohttp import WSMsgType, web
from aiohttp.http_websocket import WebSocketWriter
from homeassistant.components.http import KEY_HASS, HomeAssistantView
from homeassistant.components.http.const import is_unix_socket_request
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, EVENT_LOGGING_CHANGED
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_send
@@ -36,12 +37,12 @@ from .error import Disconnect
from .messages import message_to_json_bytes
from .util import describe_request
CLOSE_MSG_TYPES = {WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING}
AUTH_MESSAGE_TIMEOUT = 10 # seconds
if TYPE_CHECKING:
from .connection import ActiveConnection
CLOSE_MSG_TYPES = {WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING}
AUTH_MESSAGE_TIMEOUT = 10 # seconds
_WS_LOGGER: Final = logging.getLogger(f"{__name__}.connection")
@@ -386,37 +387,45 @@ class WebSocketHandler:
send_bytes_text: Callable[[bytes], Coroutine[Any, Any, None]],
) -> ActiveConnection:
"""Handle the auth phase of the websocket connection."""
await send_bytes_text(AUTH_REQUIRED_MESSAGE)
request = self._request
# Auth Phase
try:
msg = await self._wsock.receive(AUTH_MESSAGE_TIMEOUT)
except TimeoutError as err:
raise Disconnect(
f"Did not receive auth message within {AUTH_MESSAGE_TIMEOUT} seconds"
) from err
if is_unix_socket_request(request):
# Unix socket requests are pre-authenticated by the HTTP
# auth middleware — skip the token exchange.
connection = await auth.async_handle_unix_socket()
else:
await send_bytes_text(AUTH_REQUIRED_MESSAGE)
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING):
raise Disconnect("Received close message during auth phase")
if msg.type is not WSMsgType.TEXT:
if msg.type is WSMsgType.ERROR:
# msg.data is the exception
# Auth Phase
try:
msg = await self._wsock.receive(AUTH_MESSAGE_TIMEOUT)
except TimeoutError as err:
raise Disconnect(
f"Received error message during auth phase: {msg.data}"
f"Did not receive auth message within {AUTH_MESSAGE_TIMEOUT} seconds"
) from err
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING):
raise Disconnect("Received close message during auth phase")
if msg.type is not WSMsgType.TEXT:
if msg.type is WSMsgType.ERROR:
# msg.data is the exception
raise Disconnect(
f"Received error message during auth phase: {msg.data}"
)
raise Disconnect(
f"Received non-Text message of type {msg.type} during auth phase"
)
raise Disconnect(
f"Received non-Text message of type {msg.type} during auth phase"
)
try:
auth_msg_data = json_loads(msg.data)
except ValueError as err:
raise Disconnect("Received invalid JSON during auth phase") from err
try:
auth_msg_data = json_loads(msg.data)
except ValueError as err:
raise Disconnect("Received invalid JSON during auth phase") from err
if self._debug:
self._logger.debug("%s: Received %s", self.description, auth_msg_data)
connection = await auth.async_handle(auth_msg_data)
if self._debug:
self._logger.debug("%s: Received %s", self.description, auth_msg_data)
connection = await auth.async_handle(auth_msg_data)
# As the webserver is now started before the start
# event we do not want to block for websocket responses
#

View File

@@ -2,9 +2,9 @@
from __future__ import annotations
from collections.abc import Callable
from collections.abc import Generator
import contextlib
import logging
from typing import Any
from pywemo.exceptions import ActionException
@@ -64,20 +64,23 @@ class WemoEntity(CoordinatorEntity[DeviceCoordinator]):
"""Return the device info."""
return self._device_info
async def _async_wemo_call(self, message: str, action: Callable[[], Any]) -> None:
"""Run a WeMo device action in the executor and update listeners.
@contextlib.contextmanager
def _wemo_call_wrapper(self, message: str) -> Generator[None]:
"""Wrap calls to the device that change its state.
Handles errors from the device and ensures all entities sharing the
same coordinator are aware of updates to the device state.
1. Takes care of making available=False when communications with the
device fails.
2. Ensures all entities sharing the same coordinator are aware of
updates to the device state.
"""
try:
await self.hass.async_add_executor_job(action)
yield
except ActionException as err:
_LOGGER.warning("Could not %s for %s (%s)", message, self.name, err)
self.coordinator.last_exception = err
self.coordinator.last_update_success = False
self.coordinator.last_update_success = False # Used for self.available.
finally:
self.coordinator.async_update_listeners()
self.hass.add_job(self.coordinator.async_update_listeners)
class WemoBinaryStateEntity(WemoEntity):

View File

@@ -3,7 +3,6 @@
from __future__ import annotations
from datetime import timedelta
import functools as ft
import math
from typing import Any
@@ -61,16 +60,14 @@ async def async_setup_entry(
platform = entity_platform.async_get_current_platform()
# This will call WemoHumidifier.async_set_humidity(target_humidity=VALUE)
# This will call WemoHumidifier.set_humidity(target_humidity=VALUE)
platform.async_register_entity_service(
SERVICE_SET_HUMIDITY,
SET_HUMIDITY_SCHEMA,
WemoHumidifier.async_set_humidity.__name__,
SERVICE_SET_HUMIDITY, SET_HUMIDITY_SCHEMA, WemoHumidifier.set_humidity.__name__
)
# This will call WemoHumidifier.async_reset_filter_life()
# This will call WemoHumidifier.reset_filter_life()
platform.async_register_entity_service(
SERVICE_RESET_FILTER_LIFE, None, WemoHumidifier.async_reset_filter_life.__name__
SERVICE_RESET_FILTER_LIFE, None, WemoHumidifier.reset_filter_life.__name__
)
@@ -127,26 +124,25 @@ class WemoHumidifier(WemoBinaryStateEntity, FanEntity):
self._last_fan_on_mode = self.wemo.fan_mode
super()._handle_coordinator_update()
async def async_turn_on(
def turn_on(
self,
percentage: int | None = None,
preset_mode: str | None = None,
**kwargs: Any,
) -> None:
"""Turn the fan on."""
await self._async_set_percentage(percentage)
self._set_percentage(percentage)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the fan off."""
await self._async_wemo_call(
"turn off", ft.partial(self.wemo.set_state, FanMode.Off)
)
def turn_off(self, **kwargs: Any) -> None:
"""Turn the switch off."""
with self._wemo_call_wrapper("turn off"):
self.wemo.set_state(FanMode.Off)
async def async_set_percentage(self, percentage: int) -> None:
def set_percentage(self, percentage: int) -> None:
"""Set the fan_mode of the Humidifier."""
await self._async_set_percentage(percentage)
self._set_percentage(percentage)
async def _async_set_percentage(self, percentage: int | None) -> None:
def _set_percentage(self, percentage: int | None) -> None:
if percentage is None:
named_speed = self._last_fan_on_mode
elif percentage == 0:
@@ -156,11 +152,10 @@ class WemoHumidifier(WemoBinaryStateEntity, FanEntity):
math.ceil(percentage_to_ranged_value(SPEED_RANGE, percentage))
)
await self._async_wemo_call(
"set speed", ft.partial(self.wemo.set_state, named_speed)
)
with self._wemo_call_wrapper("set speed"):
self.wemo.set_state(named_speed)
async def async_set_humidity(self, target_humidity: float) -> None:
def set_humidity(self, target_humidity: float) -> None:
"""Set the target humidity level for the Humidifier."""
if target_humidity < 50:
pywemo_humidity = DesiredHumidity.FortyFivePercent
@@ -173,10 +168,10 @@ class WemoHumidifier(WemoBinaryStateEntity, FanEntity):
elif target_humidity >= 100:
pywemo_humidity = DesiredHumidity.OneHundredPercent
await self._async_wemo_call(
"set humidity", ft.partial(self.wemo.set_humidity, pywemo_humidity)
)
with self._wemo_call_wrapper("set humidity"):
self.wemo.set_humidity(pywemo_humidity)
async def async_reset_filter_life(self) -> None:
def reset_filter_life(self) -> None:
"""Reset the filter life to 100%."""
await self._async_wemo_call("reset filter life", self.wemo.reset_filter_life)
with self._wemo_call_wrapper("reset filter life"):
self.wemo.reset_filter_life()

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
import functools as ft
from typing import Any, cast
from pywemo import Bridge, BridgeLight, Dimmer
@@ -167,7 +166,7 @@ class WemoLight(WemoEntity, LightEntity):
"""Return true if device is on."""
return self.light.state.get("onoff", WEMO_OFF) != WEMO_OFF
async def async_turn_on(self, **kwargs: Any) -> None:
def turn_on(self, **kwargs: Any) -> None:
"""Turn the light on."""
xy_color = None
@@ -185,7 +184,7 @@ class WemoLight(WemoEntity, LightEntity):
"force_update": False,
}
def _turn_on() -> None:
with self._wemo_call_wrapper("turn on"):
if xy_color is not None:
self.light.set_color(xy_color, transition=transition_time)
@@ -196,14 +195,12 @@ class WemoLight(WemoEntity, LightEntity):
self.light.turn_on(**turn_on_kwargs)
await self._async_wemo_call("turn on", _turn_on)
async def async_turn_off(self, **kwargs: Any) -> None:
def turn_off(self, **kwargs: Any) -> None:
"""Turn the light off."""
transition_time = int(kwargs.get(ATTR_TRANSITION, 0))
await self._async_wemo_call(
"turn off", ft.partial(self.light.turn_off, transition=transition_time)
)
with self._wemo_call_wrapper("turn off"):
self.light.turn_off(transition=transition_time)
class WemoDimmer(WemoBinaryStateEntity, LightEntity):
@@ -219,19 +216,20 @@ class WemoDimmer(WemoBinaryStateEntity, LightEntity):
wemo_brightness: int = self.wemo.get_brightness()
return int((wemo_brightness * 255) / 100)
async def async_turn_on(self, **kwargs: Any) -> None:
def turn_on(self, **kwargs: Any) -> None:
"""Turn the dimmer on."""
# Wemo dimmer switches use a range of [0, 100] to control
# brightness. Level 255 might mean to set it to previous value
if ATTR_BRIGHTNESS in kwargs:
brightness = kwargs[ATTR_BRIGHTNESS]
brightness = int((brightness / 255) * 100)
await self._async_wemo_call(
"set brightness", ft.partial(self.wemo.set_brightness, brightness)
)
with self._wemo_call_wrapper("set brightness"):
self.wemo.set_brightness(brightness)
else:
await self._async_wemo_call("turn on", self.wemo.on)
with self._wemo_call_wrapper("turn on"):
self.wemo.on()
async def async_turn_off(self, **kwargs: Any) -> None:
def turn_off(self, **kwargs: Any) -> None:
"""Turn the dimmer off."""
await self._async_wemo_call("turn off", self.wemo.off)
with self._wemo_call_wrapper("turn off"):
self.wemo.off()

View File

@@ -119,10 +119,12 @@ class WemoSwitch(WemoBinaryStateEntity, SwitchEntity):
return "mdi:coffee"
return None
async def async_turn_on(self, **kwargs: Any) -> None:
def turn_on(self, **kwargs: Any) -> None:
"""Turn the switch on."""
await self._async_wemo_call("turn on", self.wemo.on)
with self._wemo_call_wrapper("turn on"):
self.wemo.on()
async def async_turn_off(self, **kwargs: Any) -> None:
def turn_off(self, **kwargs: Any) -> None:
"""Turn the switch off."""
await self._async_wemo_call("turn off", self.wemo.off)
with self._wemo_call_wrapper("turn off"):
self.wemo.off()

View File

@@ -44,16 +44,6 @@ MAP_JOIN_RESTRICTIONS = {
"followed": "joinable",
}
MAP_PLATFORM_NAME = {
"Android": "Android",
"iOS": "iOS",
"Nintendo": "Nintendo Switch",
"Scarlett": "Xbox Series X|S",
"WindowsOneCore": "Windows",
"Xbox360": "Xbox 360",
"XboxOne": "Xbox One",
}
class XboxSensor(StrEnum):
"""Xbox sensor."""
@@ -73,9 +63,6 @@ class XboxSensor(StrEnum):
FREE_STORAGE = "free_storage"
PRESENCE_ACTIVE = "Active"
@dataclass(kw_only=True, frozen=True)
class XboxSensorEntityDescription(XboxBaseEntityDescription, SensorEntityDescription):
"""Xbox sensor description."""
@@ -92,7 +79,7 @@ class XboxStorageDeviceSensorEntityDescription(
value_fn: Callable[[StorageDevice], StateType]
def now_playing_attributes(person: Person, title: Title | None) -> dict[str, Any]:
def now_playing_attributes(_: Person, title: Title | None) -> dict[str, Any]:
"""Attributes of the currently played title."""
attributes: dict[str, Any] = {
"short_description": None,
@@ -104,35 +91,9 @@ def now_playing_attributes(person: Person, title: Title | None) -> dict[str, Any
"achievements": None,
"gamerscore": None,
"progress": None,
"platform": None,
}
if person.presence_details:
active_entry = next(
(
d
for d in person.presence_details
if d.state == PRESENCE_ACTIVE and d.is_game
),
None,
) or next(
(d for d in person.presence_details if d.state == PRESENCE_ACTIVE),
None,
)
if active_entry:
platform = active_entry.device
if platform == "Scarlett" and title and title.devices:
if "Xbox360" in title.devices:
platform = "Xbox360"
elif "XboxOne" in title.devices:
platform = "XboxOne"
attributes["platform"] = MAP_PLATFORM_NAME.get(platform, platform)
if not title:
return attributes
if title.detail is not None:
attributes.update(
{

View File

@@ -141,7 +141,6 @@
},
"genres": { "name": "Genres" },
"min_age": { "name": "Minimum age" },
"platform": { "name": "Platform" },
"progress": { "name": "Progress" },
"publisher": { "name": "Publisher" },
"release_date": { "name": "Release date" },

View File

@@ -13,7 +13,7 @@ from __future__ import annotations
from collections import defaultdict
from collections.abc import Callable, Hashable, KeysView, Mapping
from datetime import datetime, timedelta
from enum import Enum, StrEnum
from enum import StrEnum
import logging
import time
from typing import TYPE_CHECKING, Any, Literal, NotRequired, TypedDict
@@ -80,7 +80,7 @@ EVENT_ENTITY_REGISTRY_UPDATED: EventType[EventEntityRegistryUpdatedData] = Event
_LOGGER = logging.getLogger(__name__)
STORAGE_VERSION_MAJOR = 1
STORAGE_VERSION_MINOR = 21
STORAGE_VERSION_MINOR = 20
STORAGE_KEY = "core.entity_registry"
CLEANUP_INTERVAL = 3600 * 24
@@ -91,28 +91,6 @@ ENTITY_CATEGORY_VALUE_TO_INDEX: dict[EntityCategory | None, int] = {
}
ENTITY_CATEGORY_INDEX_TO_VALUE = dict(enumerate(EntityCategory))
class ComputedNameType(Enum):
"""Singleton representing the computed full entity name in aliases."""
_singleton = 0
COMPUTED_NAME = ComputedNameType._singleton # noqa: SLF001
type AliasEntry = str | ComputedNameType
def _serialize_aliases(aliases: list[AliasEntry]) -> list[str | None]:
"""Convert aliases to a JSON-serializable list."""
return [None if a is COMPUTED_NAME else a for a in aliases]
def _deserialize_aliases(aliases: list[str | None]) -> list[AliasEntry]:
"""Convert aliases from JSON to internal representation."""
return [COMPUTED_NAME if a is None else a for a in aliases]
# Attributes relevant to describing entity
# to external services.
ENTITY_DESCRIBING_ATTRIBUTES = {
@@ -206,7 +184,7 @@ class RegistryEntry:
unique_id: str = attr.ib()
platform: str = attr.ib()
previous_unique_id: str | None = attr.ib(default=None)
aliases: list[AliasEntry] = attr.ib(factory=list)
aliases: set[str] = attr.ib(factory=set)
area_id: str | None = attr.ib(default=None)
categories: dict[str, str] = attr.ib(factory=dict)
capabilities: Mapping[str, Any] | None = attr.ib()
@@ -237,11 +215,6 @@ class RegistryEntry:
supported_features: int = attr.ib()
translation_key: str | None = attr.ib()
unit_of_measurement: str | None = attr.ib()
# For backwards compatibility, should be removed in the future
compat_aliases: list[str] = attr.ib(factory=list, eq=False)
compat_name: str | None = attr.ib(default=None, eq=False)
_cache: dict[str, Any] = attr.ib(factory=dict, eq=False, init=False)
@domain.default
@@ -279,7 +252,7 @@ class RegistryEntry:
display_dict["hb"] = True
if self.has_entity_name:
display_dict["hn"] = True
name = self.name if self.name is not None else self.original_name
name = self.name or self.original_name
if name is not None:
display_dict["en"] = name
if self.domain == "sensor" and (sensor_options := self.options.get("sensor")):
@@ -347,7 +320,7 @@ class RegistryEntry:
# it every time
return {
**self.as_partial_dict,
"aliases": _serialize_aliases(self.aliases),
"aliases": list(self.aliases),
"capabilities": self.capabilities,
"device_class": self.device_class,
"original_device_class": self.original_device_class,
@@ -376,8 +349,7 @@ class RegistryEntry:
return json_fragment(
json_bytes(
{
"aliases": self.compat_aliases,
"aliases_v2": _serialize_aliases(self.aliases),
"aliases": list(self.aliases),
"area_id": self.area_id,
"categories": self.categories,
"capabilities": self.capabilities,
@@ -395,8 +367,7 @@ class RegistryEntry:
"has_entity_name": self.has_entity_name,
"labels": list(self.labels),
"modified_at": self.modified_at,
"name": self.compat_name,
"name_v2": self.name,
"name": self.name,
"object_id_base": self.object_id_base,
"options": self.options,
"original_device_class": self.original_device_class,
@@ -443,7 +414,7 @@ class RegistryEntry:
@callback
def _async_get_full_entity_name(
def _async_get_full_entity_name_generic(
hass: HomeAssistant,
*,
device_id: str | None,
@@ -459,14 +430,13 @@ def _async_get_full_entity_name(
Used for both full entity name and entity ID.
"""
use_device = False
if name is not None:
use_device = True
elif overridden_name is not None:
name = overridden_name
else:
name = original_name
if has_entity_name:
use_device = True
if name is None:
if overridden_name is not None:
name = overridden_name
else:
name = original_name
if has_entity_name:
use_device = True
device = (
dr.async_get(hass).async_get(device_id)
@@ -497,7 +467,7 @@ def async_get_full_entity_name(
original_name = (
original_name if original_name is not UNDEFINED else entry.original_name
)
return _async_get_full_entity_name(
return _async_get_full_entity_name_generic(
hass,
device_id=entry.device_id,
fallback="",
@@ -507,82 +477,6 @@ def async_get_full_entity_name(
)
@callback
def async_get_entity_aliases(
hass: HomeAssistant,
entry: RegistryEntry,
*,
allow_empty: bool = True,
) -> list[str]:
"""Get all names/aliases for an entity.
Processes entry aliases where COMPUTED_NAME entries are replaced with the
computed full entity name. String entries are used as-is.
The returned list preserves the order set by the user.
"""
entry_aliases = entry.aliases
if not entry_aliases:
if allow_empty:
return []
entry_aliases = [COMPUTED_NAME]
aliases = []
for alias in entry_aliases:
if alias is COMPUTED_NAME:
alias = async_get_full_entity_name(hass, entry)
aliases.append(alias.strip())
return aliases
@callback
def _async_strip_prefix_from_entity_name(
entity_name: str | None, prefix: str | None
) -> str | None:
"""Strip prefix from entity name.
Returns None if the prefix does not meaningfully match.
"""
if not entity_name or not prefix:
return None
prefix_lower = prefix.casefold()
prefix_len = len(prefix_lower)
candidate = entity_name[:prefix_len]
true_prefix_len = len(candidate)
candidate = candidate.casefold()
if not candidate.startswith(prefix_lower):
return None
# Casefolded string can differ in length
prefix_diff = len(candidate) - prefix_len
while prefix_diff > 0:
true_prefix_len -= 1
prefix_diff -= len(entity_name[true_prefix_len].casefold())
# Casefolded string matched in a middle of a character, not a valid prefix
if prefix_diff < 0:
return None
new_name = entity_name[true_prefix_len:].lstrip(" -:")
if not new_name:
return ""
# Must have at least one separator character
if len(new_name) == len(entity_name) - true_prefix_len:
return None
first_word = new_name.partition(" ")[0]
# Preserve a mixed-case word, capitalize lowercase
if not first_word.islower():
return new_name
return new_name[0].upper() + new_name[1:]
@attr.s(frozen=True, slots=True)
class DeletedRegistryEntry:
"""Deleted Entity Registry Entry."""
@@ -591,7 +485,7 @@ class DeletedRegistryEntry:
unique_id: str = attr.ib()
platform: str = attr.ib()
aliases: list[AliasEntry] = attr.ib()
aliases: set[str] = attr.ib()
area_id: str | None = attr.ib()
categories: dict[str, str] = attr.ib()
config_entry_id: str | None = attr.ib()
@@ -611,10 +505,6 @@ class DeletedRegistryEntry:
)
orphaned_timestamp: float | None = attr.ib()
# For backwards compatibility, should be removed in the future
compat_aliases: list[str] = attr.ib(factory=list, eq=False)
compat_name: str | None = attr.ib(default=None, eq=False)
_cache: dict[str, Any] = attr.ib(factory=dict, eq=False, init=False)
@domain.default
@@ -628,8 +518,7 @@ class DeletedRegistryEntry:
return json_fragment(
json_bytes(
{
"aliases": self.compat_aliases,
"aliases_v2": _serialize_aliases(self.aliases),
"aliases": list(self.aliases),
"area_id": self.area_id,
"categories": self.categories,
"config_entry_id": self.config_entry_id,
@@ -649,8 +538,7 @@ class DeletedRegistryEntry:
"id": self.id,
"labels": list(self.labels),
"modified_at": self.modified_at,
"name": self.compat_name,
"name_v2": self.name,
"name": self.name,
"options": self.options if self.options is not UNDEFINED else {},
"options_undefined": self.options is UNDEFINED,
"orphaned_timestamp": self.orphaned_timestamp,
@@ -803,48 +691,6 @@ class EntityRegistryStore(storage.Store[dict[str, list[dict[str, Any]]]]):
for entity in data["entities"]:
entity["object_id_base"] = entity["original_name"]
if old_minor_version < 21:
# Version 1.21 migrates the full name to include device name,
# even if entity name is overwritten by user.
# It also adds support for COMPUTED_NAME in aliases and starts preserving their order.
# To avoid a major version bump, we keep the old name and aliases as-is
# and use new name_v2 and aliases_v2 fields instead.
device_registry = dr.async_get(self.hass)
for entity in data["entities"]:
alias_to_add: str | None = None
if (
(name := entity["name"])
and (device_id := entity["device_id"]) is not None
and (device := device_registry.async_get(device_id)) is not None
and (device_name := device.name_by_user or device.name)
):
# Strip the device name prefix from the entity name if present,
# and add the full generated name as an alias.
# If the name doesn't have the device name prefix and the
# entity is exposed to a voice assistant, add the previous
# name as an alias instead to preserve backwards compatibility.
if (
new_name := _async_strip_prefix_from_entity_name(
name, device_name
)
) is not None:
name = new_name
elif any(
entity.get("options", {}).get(key, {}).get("should_expose")
for key in ("conversation", "cloud.google_assistant")
):
alias_to_add = name
entity["name_v2"] = name
entity["aliases_v2"] = [alias_to_add, *entity["aliases"]]
for entity in data["deleted_entities"]:
# We don't know what the device name was, so the only thing we can do
# is to clear the overwritten name to not mislead users.
entity["name_v2"] = None
entity["aliases_v2"] = [None, *entity["aliases"]]
if old_major_version > 1:
raise NotImplementedError
return data
@@ -1183,15 +1029,13 @@ class EntityRegistry(BaseRegistry):
`name` is the name set by the user, not the original name from the integration.
`name` has priority over `suggested_object_id`, which has priority
over `object_id_base`.
`name` will always be prefixed with the device name.
`suggested_object_id` will not be prefixed with the device name.
`object_id_base` will be prefixed with the device name if
`has_entity_name` is True.
`name` and `suggested_object_id` will never be prefixed with the device name,
`object_id_base` will be if `has_entity_name` is True.
Entity ID conflicts are checked against registered and currently
existing entities, as well as provided `reserved_entity_ids`.
"""
object_id = _async_get_full_entity_name(
object_id = _async_get_full_entity_name_generic(
self.hass,
device_id=device_id,
fallback=f"{platform}_{unique_id}",
@@ -1315,8 +1159,6 @@ class EntityRegistry(BaseRegistry):
aliases = deleted_entity.aliases
area_id = deleted_entity.area_id
categories = deleted_entity.categories
compat_aliases = deleted_entity.compat_aliases
compat_name = deleted_entity.compat_name
created_at = deleted_entity.created_at
device_class = deleted_entity.device_class
if deleted_entity.disabled_by is not UNDEFINED:
@@ -1344,11 +1186,9 @@ class EntityRegistry(BaseRegistry):
else:
options = get_initial_options() if get_initial_options else None
else:
aliases = [COMPUTED_NAME]
aliases = set()
area_id = None
categories = {}
compat_aliases = []
compat_name = None
device_class = None
icon = None
labels = set()
@@ -1390,8 +1230,6 @@ class EntityRegistry(BaseRegistry):
area_id=area_id,
categories=categories,
capabilities=none_if_undefined(capabilities),
compat_aliases=compat_aliases,
compat_name=compat_name,
config_entry_id=none_if_undefined(config_entry_id),
config_subentry_id=none_if_undefined(config_subentry_id),
created_at=created_at,
@@ -1452,8 +1290,6 @@ class EntityRegistry(BaseRegistry):
aliases=entity.aliases,
area_id=entity.area_id,
categories=entity.categories,
compat_aliases=entity.compat_aliases,
compat_name=entity.compat_name,
config_entry_id=config_entry_id,
config_subentry_id=entity.config_subentry_id,
created_at=entity.created_at,
@@ -1586,7 +1422,7 @@ class EntityRegistry(BaseRegistry):
self,
entity_id: str,
*,
aliases: list[AliasEntry] | UndefinedType = UNDEFINED,
aliases: set[str] | UndefinedType = UNDEFINED,
area_id: str | None | UndefinedType = UNDEFINED,
categories: dict[str, str] | UndefinedType = UNDEFINED,
capabilities: Mapping[str, Any] | None | UndefinedType = UNDEFINED,
@@ -1737,7 +1573,7 @@ class EntityRegistry(BaseRegistry):
self,
entity_id: str,
*,
aliases: list[AliasEntry] | UndefinedType = UNDEFINED,
aliases: set[str] | UndefinedType = UNDEFINED,
area_id: str | None | UndefinedType = UNDEFINED,
categories: dict[str, str] | UndefinedType = UNDEFINED,
capabilities: Mapping[str, Any] | None | UndefinedType = UNDEFINED,
@@ -1879,12 +1715,10 @@ class EntityRegistry(BaseRegistry):
continue
entities[entity["entity_id"]] = RegistryEntry(
aliases=_deserialize_aliases(entity["aliases_v2"]),
aliases=set(entity["aliases"]),
area_id=entity["area_id"],
categories=entity["categories"],
capabilities=entity["capabilities"],
compat_aliases=entity["aliases"],
compat_name=entity["name"],
config_entry_id=entity["config_entry_id"],
config_subentry_id=entity["config_subentry_id"],
created_at=datetime.fromisoformat(entity["created_at"]),
@@ -1905,7 +1739,7 @@ class EntityRegistry(BaseRegistry):
has_entity_name=entity["has_entity_name"],
labels=set(entity["labels"]),
modified_at=datetime.fromisoformat(entity["modified_at"]),
name=entity["name_v2"],
name=entity["name"],
object_id_base=entity.get("object_id_base"),
options=entity["options"],
original_device_class=entity["original_device_class"],
@@ -1951,11 +1785,9 @@ class EntityRegistry(BaseRegistry):
entity["unique_id"],
)
deleted_entities[key] = DeletedRegistryEntry(
aliases=_deserialize_aliases(entity["aliases_v2"]),
aliases=set(entity["aliases"]),
area_id=entity["area_id"],
categories=entity["categories"],
compat_aliases=entity["aliases"],
compat_name=entity["name"],
config_entry_id=entity["config_entry_id"],
config_subentry_id=entity["config_subentry_id"],
created_at=datetime.fromisoformat(entity["created_at"]),
@@ -1975,7 +1807,7 @@ class EntityRegistry(BaseRegistry):
id=entity["id"],
labels=set(entity["labels"]),
modified_at=datetime.fromisoformat(entity["modified_at"]),
name=entity["name_v2"],
name=entity["name"],
options=entity["options"]
if not entity["options_undefined"]
else UNDEFINED,

View File

@@ -415,7 +415,6 @@ def _normalize_name(name: str) -> str:
def _filter_by_name(
hass: HomeAssistant,
name: str,
candidates: Iterable[MatchTargetsCandidate],
) -> Iterable[MatchTargetsCandidate]:
@@ -423,19 +422,31 @@ def _filter_by_name(
name_norm = _normalize_name(name)
for candidate in candidates:
# Accept entity id
if candidate.state.entity_id == name:
# Accept name or entity id
if (candidate.state.entity_id == name) or _normalize_name(
candidate.state.name
) == name_norm:
candidate.matched_name = name
yield candidate
continue
for candidate_name in async_get_entity_aliases(
hass, candidate.entity, state=candidate.state
if candidate.entity is None:
continue
if candidate.entity.name and (
_normalize_name(candidate.entity.name) == name_norm
):
if _normalize_name(candidate_name) == name_norm:
candidate.matched_name = name
yield candidate
break
candidate.matched_name = name
yield candidate
continue
# Check aliases
if candidate.entity.aliases:
for alias in candidate.entity.aliases:
if _normalize_name(alias) == name_norm:
candidate.matched_name = name
yield candidate
break
def _filter_by_features(
@@ -572,7 +583,7 @@ def async_match_targets( # noqa: C901
if constraints.name:
# Filter by entity name or alias
candidates = list(_filter_by_name(hass, constraints.name, candidates))
candidates = list(_filter_by_name(constraints.name, candidates))
if not candidates:
return MatchTargetsResult(False, MatchFailedReason.NAME)
@@ -1490,25 +1501,3 @@ class IntentResponse:
response_dict["data"] = response_data
return response_dict
@callback
def async_get_entity_aliases(
hass: HomeAssistant,
entity_entry: er.RegistryEntry | None,
*,
state: State,
allow_empty: bool = True,
) -> list[str]:
"""Get all names/aliases for an entity.
If no entity registry entry is provided, returns a list with just the
state name. Otherwise, delegates to the entity registry to resolve aliases,
where COMPUTED_NAME aliases are replaced with the computed full entity name.
The returned list preserves the order set by the user.
"""
if entity_entry is None:
return [state.name.strip()]
return er.async_get_entity_aliases(hass, entity_entry, allow_empty=allow_empty)

View File

@@ -659,34 +659,26 @@ def _get_exposed_entities(
continue
entity_entry = entity_registry.async_get(state.entity_id)
device_entry = (
device_registry.async_get(entity_entry.device_id)
if entity_entry is not None and entity_entry.device_id is not None
else None
)
names = intent.async_get_entity_aliases(hass, entity_entry, state=state)
names = [state.name]
area_names = []
if entity_entry is not None:
if (
entity_entry.area_id is not None
and (area_entry := area_registry.async_get_area(entity_entry.area_id))
is not None
names.extend(entity_entry.aliases)
if entity_entry.area_id and (
area := area_registry.async_get_area(entity_entry.area_id)
):
# Entity is in area
area_names.append(area_entry.name)
area_names.extend(area_entry.aliases)
elif device_entry is not None:
area_names.append(area.name)
area_names.extend(area.aliases)
elif entity_entry.device_id and (
device := device_registry.async_get(entity_entry.device_id)
):
# Check device area
if (
device_entry.area_id is not None
and (
area_entry := area_registry.async_get_area(device_entry.area_id)
)
is not None
if device.area_id and (
area := area_registry.async_get_area(device.area_id)
):
area_names.append(area_entry.name)
area_names.extend(area_entry.aliases)
area_names.append(area.name)
area_names.extend(area.aliases)
info: dict[str, Any] = {
"names": ", ".join(names),
@@ -927,10 +919,12 @@ def _get_cached_action_parameters(
entity_registry = er.async_get(hass)
if (
entity_id := entity_registry.async_get_entity_id(domain, domain, action)
) is not None and (
entity_entry := entity_registry.async_get(entity_id)
) is not None:
aliases = er.async_get_entity_aliases(hass, entity_entry)
) and (entity_entry := entity_registry.async_get(entity_id)):
aliases: list[str] = []
if entity_entry.name:
aliases.append(entity_entry.name)
if entity_entry.aliases:
aliases.extend(entity_entry.aliases)
if aliases:
if description:
description = description + ". Aliases: " + str(list(aliases))

2
requirements_all.txt generated
View File

@@ -2651,7 +2651,7 @@ python-rabbitair==0.0.8
python-ripple-api==0.0.3
# homeassistant.components.roborock
python-roborock==4.25.0
python-roborock==4.20.0
# homeassistant.components.smarttub
python-smarttub==0.0.47

View File

@@ -2247,7 +2247,7 @@ python-pooldose==0.8.6
python-rabbitair==0.0.8
# homeassistant.components.roborock
python-roborock==4.25.0
python-roborock==4.20.0
# homeassistant.components.smarttub
python-smarttub==0.0.47

View File

@@ -1419,6 +1419,7 @@ INTEGRATIONS_WITHOUT_SCALE = [
"greenwave",
"group",
"gtfs",
"growatt_server",
"guardian",
"harman_kardon_avr",
"harmony",

View File

@@ -1 +1,909 @@
"""The tests for components."""
from collections.abc import Iterable
from enum import StrEnum
import itertools
from typing import Any, TypedDict
import pytest
from homeassistant.const import (
ATTR_AREA_ID,
ATTR_DEVICE_ID,
ATTR_FLOOR_ID,
ATTR_LABEL_ID,
CONF_ABOVE,
CONF_BELOW,
CONF_CONDITION,
CONF_ENTITY_ID,
CONF_OPTIONS,
CONF_PLATFORM,
CONF_TARGET,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.helpers import (
area_registry as ar,
device_registry as dr,
entity_registry as er,
floor_registry as fr,
label_registry as lr,
)
from homeassistant.helpers.condition import (
ConditionCheckerTypeOptional,
async_from_config as async_condition_from_config,
)
from homeassistant.helpers.trigger import (
CONF_LOWER_LIMIT,
CONF_THRESHOLD_TYPE,
CONF_UPPER_LIMIT,
ThresholdType,
)
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry, mock_device_registry
async def target_entities(hass: HomeAssistant, domain: str) -> dict[str, list[str]]:
"""Create multiple entities associated with different targets.
Returns a dict with the following keys:
- included: List of entity_ids meant to be targeted.
- excluded: List of entity_ids not meant to be targeted.
"""
config_entry = MockConfigEntry(domain="test")
config_entry.add_to_hass(hass)
floor_reg = fr.async_get(hass)
floor = floor_reg.async_get_floor_by_name("Test Floor") or floor_reg.async_create(
"Test Floor"
)
area_reg = ar.async_get(hass)
area = area_reg.async_get_area_by_name("Test Area") or area_reg.async_create(
"Test Area", floor_id=floor.floor_id
)
label_reg = lr.async_get(hass)
label = label_reg.async_get_label_by_name("Test Label") or label_reg.async_create(
"Test Label"
)
device = dr.DeviceEntry(id="test_device", area_id=area.id, labels={label.label_id})
mock_device_registry(hass, {device.id: device})
entity_reg = er.async_get(hass)
# Entities associated with area
entity_area = entity_reg.async_get_or_create(
domain=domain,
platform="test",
unique_id=f"{domain}_area",
suggested_object_id=f"area_{domain}",
)
entity_reg.async_update_entity(entity_area.entity_id, area_id=area.id)
entity_area_excluded = entity_reg.async_get_or_create(
domain=domain,
platform="test",
unique_id=f"{domain}_area_excluded",
suggested_object_id=f"area_{domain}_excluded",
)
entity_reg.async_update_entity(entity_area_excluded.entity_id, area_id=area.id)
# Entities associated with device
entity_reg.async_get_or_create(
domain=domain,
platform="test",
unique_id=f"{domain}_device",
suggested_object_id=f"device_{domain}",
device_id=device.id,
)
entity_reg.async_get_or_create(
domain=domain,
platform="test",
unique_id=f"{domain}_device2",
suggested_object_id=f"device2_{domain}",
device_id=device.id,
)
entity_reg.async_get_or_create(
domain=domain,
platform="test",
unique_id=f"{domain}_device_excluded",
suggested_object_id=f"device_{domain}_excluded",
device_id=device.id,
)
# Entities associated with label
entity_label = entity_reg.async_get_or_create(
domain=domain,
platform="test",
unique_id=f"{domain}_label",
suggested_object_id=f"label_{domain}",
)
entity_reg.async_update_entity(entity_label.entity_id, labels={label.label_id})
entity_label_excluded = entity_reg.async_get_or_create(
domain=domain,
platform="test",
unique_id=f"{domain}_label_excluded",
suggested_object_id=f"label_{domain}_excluded",
)
entity_reg.async_update_entity(
entity_label_excluded.entity_id, labels={label.label_id}
)
# Return all available entities
return {
"included": [
f"{domain}.standalone_{domain}",
f"{domain}.standalone2_{domain}",
f"{domain}.label_{domain}",
f"{domain}.area_{domain}",
f"{domain}.device_{domain}",
f"{domain}.device2_{domain}",
],
"excluded": [
f"{domain}.standalone_{domain}_excluded",
f"{domain}.label_{domain}_excluded",
f"{domain}.area_{domain}_excluded",
f"{domain}.device_{domain}_excluded",
],
}
def parametrize_target_entities(domain: str) -> list[tuple[dict, str, int]]:
"""Parametrize target entities for different target types.
Meant to be used with target_entities.
"""
return [
(
{
CONF_ENTITY_ID: [
f"{domain}.standalone_{domain}",
f"{domain}.standalone2_{domain}",
]
},
f"{domain}.standalone_{domain}",
2,
),
({ATTR_LABEL_ID: "test_label"}, f"{domain}.label_{domain}", 3),
({ATTR_AREA_ID: "test_area"}, f"{domain}.area_{domain}", 3),
({ATTR_FLOOR_ID: "test_floor"}, f"{domain}.area_{domain}", 3),
({ATTR_LABEL_ID: "test_label"}, f"{domain}.device_{domain}", 3),
({ATTR_AREA_ID: "test_area"}, f"{domain}.device_{domain}", 3),
({ATTR_FLOOR_ID: "test_floor"}, f"{domain}.device_{domain}", 3),
({ATTR_DEVICE_ID: "test_device"}, f"{domain}.device_{domain}", 2),
]
class _StateDescription(TypedDict):
"""Test state with attributes."""
state: str | None
attributes: dict
class TriggerStateDescription(TypedDict):
"""Test state and expected service call count."""
included: _StateDescription # State for entities meant to be targeted
excluded: _StateDescription # State for entities not meant to be targeted
count: int # Expected service call count
class ConditionStateDescription(TypedDict):
"""Test state and expected condition evaluation."""
included: _StateDescription # State for entities meant to be targeted
excluded: _StateDescription # State for entities not meant to be targeted
condition_true: bool # If the condition is expected to evaluate to true
condition_true_first_entity: bool # If the condition is expected to evaluate to true for the first targeted entity
def _parametrize_condition_states(
*,
condition: str,
condition_options: dict[str, Any] | None = None,
target_states: list[str | None | tuple[str | None, dict]],
other_states: list[str | None | tuple[str | None, dict]],
additional_attributes: dict | None,
condition_true_if_invalid: bool,
) -> list[tuple[str, dict[str, Any], list[ConditionStateDescription]]]:
"""Parametrize states and expected condition evaluations.
The target_states and other_states iterables are either iterables of
states or iterables of (state, attributes) tuples.
Returns a list of tuples with (condition, condition options, list of states),
where states is a list of ConditionStateDescription dicts.
"""
additional_attributes = additional_attributes or {}
condition_options = condition_options or {}
def state_with_attributes(
state: str | None | tuple[str | None, dict],
condition_true: bool,
condition_true_first_entity: bool,
) -> ConditionStateDescription:
"""Return ConditionStateDescription dict."""
if isinstance(state, str) or state is None:
return {
"included": {
"state": state,
"attributes": additional_attributes,
},
"excluded": {
"state": state,
"attributes": {},
},
"condition_true": condition_true,
"condition_true_first_entity": condition_true_first_entity,
}
return {
"included": {
"state": state[0],
"attributes": state[1] | additional_attributes,
},
"excluded": {
"state": state[0],
"attributes": state[1],
},
"condition_true": condition_true,
"condition_true_first_entity": condition_true_first_entity,
}
return [
(
condition,
condition_options,
list(
itertools.chain(
(state_with_attributes(None, condition_true_if_invalid, True),),
(
state_with_attributes(
STATE_UNAVAILABLE, condition_true_if_invalid, True
),
),
(
state_with_attributes(
STATE_UNKNOWN, condition_true_if_invalid, True
),
),
(
state_with_attributes(other_state, False, False)
for other_state in other_states
),
),
),
),
# Test each target state individually to isolate condition_true expectations
*(
(
condition,
condition_options,
[
state_with_attributes(other_states[0], False, False),
state_with_attributes(target_state, True, False),
],
)
for target_state in target_states
),
]
def parametrize_condition_states_any(
*,
condition: str,
condition_options: dict[str, Any] | None = None,
target_states: list[str | None | tuple[str | None, dict]],
other_states: list[str | None | tuple[str | None, dict]],
additional_attributes: dict | None = None,
) -> list[tuple[str, dict[str, Any], list[ConditionStateDescription]]]:
"""Parametrize states and expected condition evaluations.
The target_states and other_states iterables are either iterables of
states or iterables of (state, attributes) tuples.
Returns a list of tuples with (condition, condition options, list of states),
where states is a list of ConditionStateDescription dicts.
"""
return _parametrize_condition_states(
condition=condition,
condition_options=condition_options,
target_states=target_states,
other_states=other_states,
additional_attributes=additional_attributes,
condition_true_if_invalid=False,
)
def parametrize_condition_states_all(
*,
condition: str,
condition_options: dict[str, Any] | None = None,
target_states: list[str | None | tuple[str | None, dict]],
other_states: list[str | None | tuple[str | None, dict]],
additional_attributes: dict | None = None,
) -> list[tuple[str, dict[str, Any], list[ConditionStateDescription]]]:
"""Parametrize states and expected condition evaluations.
The target_states and other_states iterables are either iterables of
states or iterables of (state, attributes) tuples.
Returns a list of tuples with (condition, condition options, list of states),
where states is a list of ConditionStateDescription dicts.
"""
return _parametrize_condition_states(
condition=condition,
condition_options=condition_options,
target_states=target_states,
other_states=other_states,
additional_attributes=additional_attributes,
condition_true_if_invalid=True,
)
def parametrize_trigger_states(
*,
trigger: str,
trigger_options: dict[str, Any] | None = None,
target_states: list[str | None | tuple[str | None, dict]],
other_states: list[str | None | tuple[str | None, dict]],
extra_invalid_states: list[str | None | tuple[str | None, dict]] | None = None,
additional_attributes: dict | None = None,
trigger_from_none: bool = True,
retrigger_on_target_state: bool = False,
) -> list[tuple[str, dict[str, Any], list[TriggerStateDescription]]]:
"""Parametrize states and expected service call counts.
The target_states, other_states, and extra_invalid_states iterables are
either iterables of states or iterables of (state, attributes) tuples.
Set `trigger_from_none` to False if the trigger is not expected to fire
when the initial state is None, this is relevant for triggers that limit
entities to a certain device class because the device class can't be
determined when the state is None.
Set `retrigger_on_target_state` to True if the trigger is expected to fire
when the state changes to another target state.
Returns a list of tuples with (trigger, list of states),
where states is a list of TriggerStateDescription dicts.
"""
extra_invalid_states = extra_invalid_states or []
invalid_states = [STATE_UNAVAILABLE, STATE_UNKNOWN, *extra_invalid_states]
additional_attributes = additional_attributes or {}
trigger_options = trigger_options or {}
def state_with_attributes(
state: str | None | tuple[str | None, dict], count: int
) -> TriggerStateDescription:
"""Return TriggerStateDescription dict."""
if isinstance(state, str) or state is None:
return {
"included": {
"state": state,
"attributes": additional_attributes,
},
"excluded": {
"state": state if additional_attributes else None,
"attributes": {},
},
"count": count,
}
return {
"included": {
"state": state[0],
"attributes": state[1] | additional_attributes,
},
"excluded": {
"state": state[0] if additional_attributes else None,
"attributes": state[1],
},
"count": count,
}
tests = [
# Initial state None
(
trigger,
trigger_options,
list(
itertools.chain.from_iterable(
(
state_with_attributes(None, 0),
state_with_attributes(target_state, 0),
state_with_attributes(other_state, 0),
state_with_attributes(
target_state, 1 if trigger_from_none else 0
),
)
for target_state in target_states
for other_state in other_states
)
),
),
# Initial state different from target state
(
trigger,
trigger_options,
# other_state,
list(
itertools.chain.from_iterable(
(
state_with_attributes(other_state, 0),
state_with_attributes(target_state, 1),
state_with_attributes(other_state, 0),
state_with_attributes(target_state, 1),
)
for target_state in target_states
for other_state in other_states
)
),
),
# Initial state same as target state
(
trigger,
trigger_options,
list(
itertools.chain.from_iterable(
(
state_with_attributes(target_state, 0),
state_with_attributes(target_state, 0),
state_with_attributes(other_state, 0),
state_with_attributes(target_state, 1),
# Repeat target state to test retriggering
state_with_attributes(target_state, 0),
state_with_attributes(STATE_UNAVAILABLE, 0),
)
for target_state in target_states
for other_state in other_states
)
),
),
# Initial state unavailable / unknown + extra invalid states
(
trigger,
trigger_options,
list(
itertools.chain.from_iterable(
(
state_with_attributes(invalid_state, 0),
state_with_attributes(target_state, 0),
state_with_attributes(other_state, 0),
state_with_attributes(target_state, 1),
)
for invalid_state in invalid_states
for target_state in target_states
for other_state in other_states
)
),
),
]
if len(target_states) > 1:
# If more than one target state, test state change between target states
tests.append(
(
trigger,
trigger_options,
list(
itertools.chain.from_iterable(
(
state_with_attributes(target_states[idx - 1], 0),
state_with_attributes(
target_state, 1 if retrigger_on_target_state else 0
),
state_with_attributes(other_state, 0),
state_with_attributes(target_states[idx - 1], 1),
state_with_attributes(
target_state, 1 if retrigger_on_target_state else 0
),
state_with_attributes(STATE_UNAVAILABLE, 0),
)
for idx, target_state in enumerate(target_states[1:], start=1)
for other_state in other_states
)
),
),
)
return tests
def parametrize_numerical_attribute_changed_trigger_states(
trigger: str, state: str, attribute: str
) -> list[tuple[str, dict[str, Any], list[TriggerStateDescription]]]:
"""Parametrize states and expected service call counts for numerical changed triggers."""
return [
*parametrize_trigger_states(
trigger=trigger,
trigger_options={},
target_states=[
(state, {attribute: 0}),
(state, {attribute: 50}),
(state, {attribute: 100}),
],
other_states=[(state, {attribute: None})],
retrigger_on_target_state=True,
),
*parametrize_trigger_states(
trigger=trigger,
trigger_options={CONF_ABOVE: 10},
target_states=[
(state, {attribute: 50}),
(state, {attribute: 100}),
],
other_states=[
(state, {attribute: None}),
(state, {attribute: 0}),
],
retrigger_on_target_state=True,
),
*parametrize_trigger_states(
trigger=trigger,
trigger_options={CONF_BELOW: 90},
target_states=[
(state, {attribute: 0}),
(state, {attribute: 50}),
],
other_states=[
(state, {attribute: None}),
(state, {attribute: 100}),
],
retrigger_on_target_state=True,
),
]
def parametrize_numerical_attribute_crossed_threshold_trigger_states(
trigger: str, state: str, attribute: str
) -> list[tuple[str, dict[str, Any], list[TriggerStateDescription]]]:
"""Parametrize states and expected service call counts for numerical crossed threshold triggers."""
return [
*parametrize_trigger_states(
trigger=trigger,
trigger_options={
CONF_THRESHOLD_TYPE: ThresholdType.BETWEEN,
CONF_LOWER_LIMIT: 10,
CONF_UPPER_LIMIT: 90,
},
target_states=[
(state, {attribute: 50}),
(state, {attribute: 60}),
],
other_states=[
(state, {attribute: None}),
(state, {attribute: 0}),
(state, {attribute: 100}),
],
),
*parametrize_trigger_states(
trigger=trigger,
trigger_options={
CONF_THRESHOLD_TYPE: ThresholdType.OUTSIDE,
CONF_LOWER_LIMIT: 10,
CONF_UPPER_LIMIT: 90,
},
target_states=[
(state, {attribute: 0}),
(state, {attribute: 100}),
],
other_states=[
(state, {attribute: None}),
(state, {attribute: 50}),
(state, {attribute: 60}),
],
),
*parametrize_trigger_states(
trigger=trigger,
trigger_options={
CONF_THRESHOLD_TYPE: ThresholdType.ABOVE,
CONF_LOWER_LIMIT: 10,
},
target_states=[
(state, {attribute: 50}),
(state, {attribute: 100}),
],
other_states=[
(state, {attribute: None}),
(state, {attribute: 0}),
],
),
*parametrize_trigger_states(
trigger=trigger,
trigger_options={
CONF_THRESHOLD_TYPE: ThresholdType.BELOW,
CONF_UPPER_LIMIT: 90,
},
target_states=[
(state, {attribute: 0}),
(state, {attribute: 50}),
],
other_states=[
(state, {attribute: None}),
(state, {attribute: 100}),
],
),
]
def parametrize_numerical_state_value_changed_trigger_states(
trigger: str, device_class: str
) -> list[tuple[str, dict[str, Any], list[TriggerStateDescription]]]:
"""Parametrize states and expected service call counts for numerical state-value changed triggers.
Unlike parametrize_numerical_attribute_changed_trigger_states, this is for
entities where the tracked numerical value is in state.state (e.g. sensor
entities), not in an attribute.
"""
from homeassistant.const import ATTR_DEVICE_CLASS # noqa: PLC0415
additional_attributes = {ATTR_DEVICE_CLASS: device_class}
return [
*parametrize_trigger_states(
trigger=trigger,
trigger_options={},
target_states=["0", "50", "100"],
other_states=["none"],
additional_attributes=additional_attributes,
retrigger_on_target_state=True,
trigger_from_none=False,
),
*parametrize_trigger_states(
trigger=trigger,
trigger_options={CONF_ABOVE: 10},
target_states=["50", "100"],
other_states=["none", "0"],
additional_attributes=additional_attributes,
retrigger_on_target_state=True,
trigger_from_none=False,
),
*parametrize_trigger_states(
trigger=trigger,
trigger_options={CONF_BELOW: 90},
target_states=["0", "50"],
other_states=["none", "100"],
additional_attributes=additional_attributes,
retrigger_on_target_state=True,
trigger_from_none=False,
),
]
def parametrize_numerical_state_value_crossed_threshold_trigger_states(
trigger: str, device_class: str
) -> list[tuple[str, dict[str, Any], list[TriggerStateDescription]]]:
"""Parametrize states and expected service call counts for numerical state-value crossed threshold triggers.
Unlike parametrize_numerical_attribute_crossed_threshold_trigger_states,
this is for entities where the tracked numerical value is in state.state
(e.g. sensor entities), not in an attribute.
"""
from homeassistant.const import ATTR_DEVICE_CLASS # noqa: PLC0415
additional_attributes = {ATTR_DEVICE_CLASS: device_class}
return [
*parametrize_trigger_states(
trigger=trigger,
trigger_options={
CONF_THRESHOLD_TYPE: ThresholdType.BETWEEN,
CONF_LOWER_LIMIT: 10,
CONF_UPPER_LIMIT: 90,
},
target_states=["50", "60"],
other_states=["none", "0", "100"],
additional_attributes=additional_attributes,
trigger_from_none=False,
),
*parametrize_trigger_states(
trigger=trigger,
trigger_options={
CONF_THRESHOLD_TYPE: ThresholdType.OUTSIDE,
CONF_LOWER_LIMIT: 10,
CONF_UPPER_LIMIT: 90,
},
target_states=["0", "100"],
other_states=["none", "50", "60"],
additional_attributes=additional_attributes,
trigger_from_none=False,
),
*parametrize_trigger_states(
trigger=trigger,
trigger_options={
CONF_THRESHOLD_TYPE: ThresholdType.ABOVE,
CONF_LOWER_LIMIT: 10,
},
target_states=["50", "100"],
other_states=["none", "0"],
additional_attributes=additional_attributes,
trigger_from_none=False,
),
*parametrize_trigger_states(
trigger=trigger,
trigger_options={
CONF_THRESHOLD_TYPE: ThresholdType.BELOW,
CONF_UPPER_LIMIT: 90,
},
target_states=["0", "50"],
other_states=["none", "100"],
additional_attributes=additional_attributes,
trigger_from_none=False,
),
]
async def arm_trigger(
hass: HomeAssistant,
trigger: str,
trigger_options: dict[str, Any] | None,
trigger_target: dict,
) -> None:
"""Arm the specified trigger, call service test.automation when it triggers."""
# Local include to avoid importing the automation component unnecessarily
from homeassistant.components import automation # noqa: PLC0415
options = {CONF_OPTIONS: {**trigger_options}} if trigger_options is not None else {}
await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {
CONF_PLATFORM: trigger,
CONF_TARGET: {**trigger_target},
}
| options,
"action": {
"service": "test.automation",
"data_template": {CONF_ENTITY_ID: "{{ trigger.entity_id }}"},
},
}
},
)
async def create_target_condition(
hass: HomeAssistant,
*,
condition: str,
target: dict,
behavior: str,
) -> ConditionCheckerTypeOptional:
"""Create a target condition."""
return await async_condition_from_config(
hass,
{
CONF_CONDITION: condition,
CONF_TARGET: target,
CONF_OPTIONS: {"behavior": behavior},
},
)
def set_or_remove_state(
hass: HomeAssistant,
entity_id: str,
state: TriggerStateDescription,
) -> None:
"""Set or remove the state of an entity."""
if state["state"] is None:
hass.states.async_remove(entity_id)
else:
hass.states.async_set(
entity_id, state["state"], state["attributes"], force_update=True
)
def other_states(state: StrEnum | Iterable[StrEnum]) -> list[str]:
"""Return a sorted list with all states except the specified one."""
if isinstance(state, StrEnum):
excluded_values = {state.value}
enum_class = state.__class__
else:
if len(state) == 0:
raise ValueError("state iterable must not be empty")
excluded_values = {s.value for s in state}
enum_class = list(state)[0].__class__
return sorted({s.value for s in enum_class} - excluded_values)
async def assert_condition_gated_by_labs_flag(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, condition: str
) -> None:
"""Helper to check that a condition is gated by the labs flag."""
# Local include to avoid importing the automation component unnecessarily
from homeassistant.components import automation # noqa: PLC0415
await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: {
"trigger": {"platform": "event", "event_type": "test_event"},
"condition": {
CONF_CONDITION: condition,
CONF_TARGET: {ATTR_LABEL_ID: "test_label"},
CONF_OPTIONS: {"behavior": "any"},
},
"action": {
"service": "test.automation",
},
}
},
)
assert (
"Unnamed automation failed to setup conditions and has been disabled: "
f"Condition '{condition}' requires the experimental 'New triggers and "
"conditions' feature to be enabled in Home Assistant Labs settings "
"(feature flag: 'new_triggers_conditions')"
) in caplog.text
async def assert_trigger_gated_by_labs_flag(
hass: HomeAssistant, caplog: pytest.LogCaptureFixture, trigger: str
) -> None:
"""Helper to check that a trigger is gated by the labs flag."""
await arm_trigger(hass, trigger, None, {ATTR_LABEL_ID: "test_label"})
assert (
"Unnamed automation failed to setup triggers and has been disabled: Trigger "
f"'{trigger}' requires the experimental 'New triggers and conditions' "
"feature to be enabled in Home Assistant Labs settings (feature flag: "
"'new_triggers_conditions')"
) in caplog.text
async def assert_trigger_behavior_any(
hass: HomeAssistant,
*,
service_calls: list[ServiceCall],
target_entities: dict[str, list[str]],
trigger_target_config: dict,
entity_id: str,
entities_in_target: int,
trigger: str,
trigger_options: dict[str, Any],
states: list[TriggerStateDescription],
) -> None:
"""Test trigger fires in mode any."""
other_entity_ids = set(target_entities["included"]) - {entity_id}
excluded_entity_ids = set(target_entities["excluded"]) - {entity_id}
for eid in target_entities["included"]:
set_or_remove_state(hass, eid, states[0]["included"])
await hass.async_block_till_done()
for eid in excluded_entity_ids:
set_or_remove_state(hass, eid, states[0]["excluded"])
await hass.async_block_till_done()
await arm_trigger(hass, trigger, trigger_options, trigger_target_config)
for state in states[1:]:
excluded_state = state["excluded"]
included_state = state["included"]
set_or_remove_state(hass, entity_id, included_state)
await hass.async_block_till_done()
assert len(service_calls) == state["count"]
for service_call in service_calls:
assert service_call.data[CONF_ENTITY_ID] == entity_id
service_calls.clear()
for other_entity_id in other_entity_ids:
set_or_remove_state(hass, other_entity_id, included_state)
await hass.async_block_till_done()
for excluded_entity_id in excluded_entity_ids:
set_or_remove_state(hass, excluded_entity_id, excluded_state)
await hass.async_block_till_done()
assert len(service_calls) == (entities_in_target - 1) * state["count"]
service_calls.clear()

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_binary_sensors[binary_sensor.lunar_ddeeff_timer_running-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_buttons[button.lunar_ddeeff_reset_timer-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -51,9 +50,8 @@
# ---
# name: test_buttons[button.lunar_ddeeff_start_stop_timer-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -101,9 +99,8 @@
# ---
# name: test_buttons[button.lunar_ddeeff_tare-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_sensors[sensor.lunar_ddeeff_battery-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -56,9 +55,8 @@
# ---
# name: test_sensors[sensor.lunar_ddeeff_volume_flow_rate-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -114,9 +112,8 @@
# ---
# name: test_sensors[sensor.lunar_ddeeff_weight-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,

File diff suppressed because it is too large Load Diff

View File

@@ -418,9 +418,8 @@
# ---
# name: test_weather[weather.home-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_climate_entities[climate.living_room-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'hvac_modes': list([
@@ -74,9 +73,8 @@
# ---
# name: test_climate_entities[climate.test_system-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'fan_modes': list([

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_switch_entities[switch.test_system_away_mode-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -51,9 +50,8 @@
# ---
# name: test_switch_entities[switch.test_system_continuous_fan-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -101,9 +99,8 @@
# ---
# name: test_switch_entities[switch.test_system_quiet_mode-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -151,9 +148,8 @@
# ---
# name: test_switch_entities[switch.test_system_turbo_mode-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_fallback_to_get_rooms[sensor.room_1_energy-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
@@ -62,9 +61,8 @@
# ---
# name: test_fallback_to_get_rooms[sensor.room_1_temperature-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -120,9 +118,8 @@
# ---
# name: test_multiple_devices_create_individual_sensors[sensor.room_1_energy-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
@@ -181,9 +178,8 @@
# ---
# name: test_multiple_devices_create_individual_sensors[sensor.room_1_temperature-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -239,9 +235,8 @@
# ---
# name: test_multiple_devices_create_individual_sensors[sensor.room_2_energy-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
@@ -300,9 +295,8 @@
# ---
# name: test_multiple_devices_create_individual_sensors[sensor.room_2_temperature-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -358,9 +352,8 @@
# ---
# name: test_sensor_cloud[sensor.room_1_energy-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.TOTAL_INCREASING: 'total_increasing'>,
@@ -419,9 +412,8 @@
# ---
# name: test_sensor_cloud[sensor.room_1_temperature-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_sensors[sensor.adguard_home_average_processing_speed-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -52,9 +51,8 @@
# ---
# name: test_sensors[sensor.adguard_home_dns_queries-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -103,9 +101,8 @@
# ---
# name: test_sensors[sensor.adguard_home_dns_queries_blocked-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -154,9 +151,8 @@
# ---
# name: test_sensors[sensor.adguard_home_dns_queries_blocked_ratio-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -205,9 +201,8 @@
# ---
# name: test_sensors[sensor.adguard_home_parental_control_blocked-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -256,9 +251,8 @@
# ---
# name: test_sensors[sensor.adguard_home_rules_count-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -307,9 +301,8 @@
# ---
# name: test_sensors[sensor.adguard_home_safe_browsing_blocked-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -358,9 +351,8 @@
# ---
# name: test_sensors[sensor.adguard_home_safe_searches_enforced-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_switch[switch.adguard_home_filtering-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -51,9 +50,8 @@
# ---
# name: test_switch[switch.adguard_home_parental_control-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -101,9 +99,8 @@
# ---
# name: test_switch[switch.adguard_home_protection-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -151,9 +148,8 @@
# ---
# name: test_switch[switch.adguard_home_query_log-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -201,9 +197,8 @@
# ---
# name: test_switch[switch.adguard_home_safe_browsing-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -251,9 +246,8 @@
# ---
# name: test_switch[switch.adguard_home_safe_search-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_update[update.adguard_home-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_all_entities[indoor][button.airgradient_calibrate_co2_sensor-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -51,9 +50,8 @@
# ---
# name: test_all_entities[indoor][button.airgradient_test_led_bar-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -101,9 +99,8 @@
# ---
# name: test_all_entities[outdoor][button.airgradient_calibrate_co2_sensor-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_all_entities[number.airgradient_display_brightness-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'max': 100,
@@ -61,9 +60,8 @@
# ---
# name: test_all_entities[number.airgradient_led_bar_brightness-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'max': 100,

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_all_entities[indoor][select.airgradient_co2_automatic_baseline_duration-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
@@ -68,9 +67,8 @@
# ---
# name: test_all_entities[indoor][select.airgradient_configuration_source-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
@@ -127,9 +125,8 @@
# ---
# name: test_all_entities[indoor][select.airgradient_display_pm_standard-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
@@ -186,9 +183,8 @@
# ---
# name: test_all_entities[indoor][select.airgradient_display_temperature_unit-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
@@ -245,9 +241,8 @@
# ---
# name: test_all_entities[indoor][select.airgradient_led_bar_mode-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
@@ -306,9 +301,8 @@
# ---
# name: test_all_entities[indoor][select.airgradient_nox_index_learning_offset-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
@@ -371,9 +365,8 @@
# ---
# name: test_all_entities[indoor][select.airgradient_voc_index_learning_offset-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
@@ -436,9 +429,8 @@
# ---
# name: test_all_entities[outdoor][select.airgradient_co2_automatic_baseline_duration-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
@@ -503,9 +495,8 @@
# ---
# name: test_all_entities[outdoor][select.airgradient_configuration_source-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
@@ -562,9 +553,8 @@
# ---
# name: test_all_entities[outdoor][select.airgradient_nox_index_learning_offset-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
@@ -627,9 +617,8 @@
# ---
# name: test_all_entities[outdoor][select.airgradient_voc_index_learning_offset-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_all_entities[indoor][sensor.airgradient_carbon_dioxide-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -56,9 +55,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_carbon_dioxide_automatic_baseline_calibration-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -111,9 +109,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_display_brightness-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -162,9 +159,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_display_pm_standard-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
@@ -222,9 +218,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_display_temperature_unit-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
@@ -282,9 +277,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_humidity-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -337,9 +331,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_led_bar_brightness-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -388,9 +381,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_led_bar_mode-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
@@ -450,9 +442,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_nox_index-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -503,9 +494,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_nox_index_learning_offset-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -558,9 +548,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_pm0_3-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -612,9 +601,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_pm1-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -667,9 +655,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_pm10-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -722,9 +709,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_pm2_5-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -777,9 +763,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_raw_nox-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -831,9 +816,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_raw_pm2_5-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -886,9 +870,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_raw_voc-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -940,9 +923,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_signal_strength-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -995,9 +977,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_temperature-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -1053,9 +1034,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_voc_index-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -1106,9 +1086,8 @@
# ---
# name: test_all_entities[indoor][sensor.airgradient_voc_index_learning_offset-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -1161,9 +1140,8 @@
# ---
# name: test_all_entities[outdoor][sensor.airgradient_carbon_dioxide_automatic_baseline_calibration-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -1216,9 +1194,8 @@
# ---
# name: test_all_entities[outdoor][sensor.airgradient_nox_index-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -1269,9 +1246,8 @@
# ---
# name: test_all_entities[outdoor][sensor.airgradient_nox_index_learning_offset-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -1324,9 +1300,8 @@
# ---
# name: test_all_entities[outdoor][sensor.airgradient_raw_nox-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -1378,9 +1353,8 @@
# ---
# name: test_all_entities[outdoor][sensor.airgradient_raw_voc-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -1432,9 +1406,8 @@
# ---
# name: test_all_entities[outdoor][sensor.airgradient_signal_strength-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -1487,9 +1460,8 @@
# ---
# name: test_all_entities[outdoor][sensor.airgradient_voc_index-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -1540,9 +1512,8 @@
# ---
# name: test_all_entities[outdoor][sensor.airgradient_voc_index_learning_offset-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_all_entities[switch.airgradient_post_data_to_airgradient-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_all_entities[update.airgradient_firmware-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_sensor[sensor.home_carbon_monoxide-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -61,9 +60,8 @@
# ---
# name: test_sensor[sensor.home_common_air_quality_index-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -119,9 +117,8 @@
# ---
# name: test_sensor[sensor.home_humidity-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -178,9 +175,8 @@
# ---
# name: test_sensor[sensor.home_nitrogen_dioxide-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -239,9 +235,8 @@
# ---
# name: test_sensor[sensor.home_ozone-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -300,9 +295,8 @@
# ---
# name: test_sensor[sensor.home_pm1-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -359,9 +353,8 @@
# ---
# name: test_sensor[sensor.home_pm10-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -420,9 +413,8 @@
# ---
# name: test_sensor[sensor.home_pm2_5-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -481,9 +473,8 @@
# ---
# name: test_sensor[sensor.home_pressure-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -540,9 +531,8 @@
# ---
# name: test_sensor[sensor.home_sulphur_dioxide-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
@@ -601,9 +591,8 @@
# ---
# name: test_sensor[sensor.home_temperature-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_buttons[button.test_thermostat_recalibrate_co2_sensor-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
@@ -51,9 +50,8 @@
# ---
# name: test_buttons[button.test_thermostat_restart-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_climate_entities[climate.test_thermostat-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'hvac_modes': list([

View File

@@ -1,9 +1,8 @@
# serializer version: 1
# name: test_number_entities[number.test_thermostat_hysteresis_band-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'max': 0.5,

Some files were not shown because too many files have changed in this diff Show More