Compare commits

..

43 Commits

Author SHA1 Message Date
Marcel van der Veldt
6113288662 Address Copilot review: robust cleanup on cancellation
Ensure pipeline cleanup runs to completion even when execute() is
cancelled mid-run (WebSocket unsubscribe, timeout). If cancellation
hits one cleanup step, the remaining steps still run and the
CancelledError is re-raised at the end.

Also correct the queue size comment to match the actual 10ms chunks
used by the pipeline (256 * 10ms = ~2.6s, not ~4s).
2026-04-15 14:13:28 +02:00
Marcel van der Veldt
e0b1e99211 Add coverage for _close_async_generators and document queue bound 2026-04-15 12:53:55 +02:00
Marcel van der Veldt
a3a2557259 Clean up async generator lifecycle in assist pipeline
Close the STT audio stream async generators (process_enhance_audio /
process_volume_only / buffer_then_audio_stream) in PipelineInput.execute()
finally block so buffered audio chunks and the audio enhancer VAD state
are released promptly on early exit instead of waiting on garbage
collection.

Also bound the WebSocket audio queue to 256 frames (~4s of 16kHz mono)
and drop frames on overflow rather than growing without limit. Send a
stop sentinel on disconnect to unblock the stt_stream generator.
2026-04-14 21:41:25 +02:00
Paulus Schoutsen
599c548264 Bump serialx to 1.2.2 (#168229) 2026-04-14 21:21:26 +02:00
Franck Nijhof
b18602cd18 Disable Renovate vulnerability alerts flow (#168233) 2026-04-14 21:11:07 +02:00
Stefan Agner
a45e2d74ec Split hassio data coordinator and add dedicated stats coordinator (#167080)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2026-04-14 20:51:13 +02:00
Franck Nijhof
a952636c28 Refine Renovate config with built-in manager and review follow-ups (#168225) 2026-04-14 20:27:59 +02:00
Daniel Hjelseth Høyer
ccd1d9f8ea Bump pyTibber to 0.37.1 (#168208) 2026-04-14 19:41:05 +02:00
Franck Nijhof
a4d4fe3722 Add Renovate config for allow-listed Python dependency updates (#168192) 2026-04-14 18:56:51 +02:00
Denis Shulyaka
98b41d25f3 Add send_message_draft action to telegram_bot (#165682)
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
2026-04-14 17:57:10 +02:00
Franck Nijhof
d8c8f82c7e Translate coordinator exceptions for PVOutput (#168076)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-04-14 17:55:15 +02:00
Florent Thoumie
8695d32b32 iaqualink: enable _attr_has_entity_name (#167810)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-04-14 17:45:44 +02:00
Marcel van der Veldt
073d22d046 Fix Wyoming satellite memory leak on disconnect (#168152) 2026-04-14 10:37:36 -05:00
Raphael Hehl
939412717f Add binary sensor platform for MELCloud ATW devices (#168128)
Co-authored-by: RaHehl <rahehl@users.noreply.github.com>
2026-04-14 17:28:56 +02:00
Marc Mueller
8217d3683a Fix mqtt test ResourceWarnings (#168182) 2026-04-14 17:24:50 +02:00
Ronald van der Meer
fa9185b755 Add sensor platform to Duco integration (#167920) 2026-04-14 17:21:46 +02:00
Erik Montnemery
f2f59eb8b7 Add todo conditions (#167752) 2026-04-14 17:15:56 +02:00
Erik Montnemery
16edfc9624 Add remote conditions (#167750) 2026-04-14 16:34:42 +02:00
Niracler
177d244b91 Add diagnostics platform to Sunricher DALI integration (#168074) 2026-04-14 16:33:54 +02:00
Andres Ruiz
dd8a79bd0e Add energy backfill support for waterfurnace (#167955)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-04-14 16:33:03 +02:00
Retha Runolfsson
3c46ecb93a Fix Switchbot Keypad Vision doorbell detection (#168098)
Co-authored-by: Ariel Ebersberger <31776703+justanotherariel@users.noreply.github.com>
2026-04-14 16:32:03 +02:00
Raphael Hehl
66b2d4477b Fix unifi_discovery deepcopy crash on Python 3.14 (#168153) 2026-04-14 16:31:04 +02:00
Robert Resch
2ba66fb722 Use runtime_data in plaato integration (#167900)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-14 16:26:10 +02:00
Arjan
9fab53d083 MeteoFrance - Add wind gusts for hourly forecast (re) (#168166) 2026-04-14 16:17:52 +02:00
Shay Levy
41c3db9ebd Revert "Replace "custom" with "community" in analytics_insights" (#168160) 2026-04-14 16:09:20 +02:00
Arie Catsman
3d0d048d1f Bump pyenphase from 2.4.6 to 2.4.8 (#168190) 2026-04-14 16:07:15 +02:00
Marc Mueller
c57a666921 Fix matrix ResourceWarning (#168186) 2026-04-14 16:05:32 +02:00
Marc Mueller
3cd67cea53 Fix test fixture tests ResourceWarning (#168183) 2026-04-14 16:05:29 +02:00
Kurt Chrisford
e05622f8d0 Mark entity-translations and icon-translations as done for Actron Air (#167150) 2026-04-14 15:24:41 +02:00
Marc Mueller
c17d3584cb Fix backup test ResourceWarnings (#168180) 2026-04-14 15:18:34 +02:00
Marc Mueller
3f3b3db913 Fix go2rtc ResourceWarnings (#168184) 2026-04-14 15:17:34 +02:00
Marc Mueller
44a0e964ef Fix homekit ResourceWarnings (#168185) 2026-04-14 15:17:11 +02:00
Marc Mueller
d6e56b41b1 Fix mcp_server ResourceWarnings (#168187) 2026-04-14 15:16:49 +02:00
Marc Mueller
4191bbf504 Fix octoprint ResourceWarnings (#168188) 2026-04-14 15:16:45 +02:00
Artur Pragacz
041fed4b48 Fix missing async_request_call in single-entity service call path (#168171) 2026-04-14 14:35:28 +02:00
Shay Levy
6311e6feec Revert "Replace 'custom component' with 'community integration' in bmw_connected_drive" (#168159) 2026-04-14 14:03:29 +02:00
Jan Čermák
582a0a5ae3 Add MariaDB 11.4 to CI tests (#168111) 2026-04-14 13:39:55 +02:00
Christopher Fenner
1a3f75c6fc Add additional codeowner to ViCare integration (#168169) 2026-04-14 13:38:29 +02:00
Shay Levy
21301e43a9 Revert "Update "custom component" to "community integration" in Shelly" (#168162) 2026-04-14 14:25:50 +03:00
Christian Lackas
cbe7823fd5 Bump homematicip to 2.8.0 (#168168) 2026-04-14 13:10:01 +02:00
Raphael Hehl
7a5951b72d Add discovery support to unifi_access via unifi_discovery (#168085) 2026-04-14 13:00:06 +02:00
Shay Levy
42771ed0a7 Revert "Replace "custom" with "community" in homeassistant" (#168161) 2026-04-14 12:58:33 +02:00
Aidan Timson
ded34b4430 Fix device_class removal in template binary sensors (#167775) 2026-04-14 11:40:13 +02:00
133 changed files with 5161 additions and 1170 deletions

161
.github/renovate.json vendored Normal file
View File

@@ -0,0 +1,161 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": ["config:recommended"],
"enabledManagers": [
"pep621",
"pip_requirements",
"pre-commit",
"homeassistant-manifest"
],
"pre-commit": {
"enabled": true
},
"pip_requirements": {
"managerFilePatterns": [
"/(^|/)requirements[\\w_-]*\\.txt$/",
"/(^|/)homeassistant/package_constraints\\.txt$/"
]
},
"homeassistant-manifest": {
"managerFilePatterns": [
"/^homeassistant/components/[^/]+/manifest\\.json$/"
]
},
"minimumReleaseAge": "7 days",
"prConcurrentLimit": 10,
"prHourlyLimit": 2,
"schedule": ["before 6am"],
"semanticCommits": "disabled",
"commitMessageAction": "Update",
"commitMessageTopic": "{{depName}}",
"commitMessageExtra": "to {{newVersion}}",
"automerge": false,
"vulnerabilityAlerts": {
"enabled": false
},
"packageRules": [
{
"description": "Deny all by default — allowlist below re-enables specific packages",
"matchPackageNames": ["*"],
"enabled": false
},
{
"description": "Core runtime dependencies (allowlisted)",
"matchPackageNames": [
"aiohttp",
"aiohttp-fast-zlib",
"aiohttp_cors",
"aiohttp-asyncmdnsresolver",
"yarl",
"httpx",
"requests",
"urllib3",
"certifi",
"orjson",
"PyYAML",
"Jinja2",
"cryptography",
"pyOpenSSL",
"PyJWT",
"SQLAlchemy",
"Pillow",
"attrs",
"uv",
"voluptuous",
"voluptuous-serialize",
"voluptuous-openapi",
"zeroconf"
],
"enabled": true,
"labels": ["dependency", "core"]
},
{
"description": "Test dependencies (allowlisted)",
"matchPackageNames": [
"pytest",
"pytest-asyncio",
"pytest-aiohttp",
"pytest-cov",
"pytest-freezer",
"pytest-github-actions-annotate-failures",
"pytest-socket",
"pytest-sugar",
"pytest-timeout",
"pytest-unordered",
"pytest-picked",
"pytest-xdist",
"pylint",
"pylint-per-file-ignores",
"astroid",
"coverage",
"freezegun",
"syrupy",
"respx",
"requests-mock",
"ruff",
"codespell",
"yamllint",
"zizmor"
],
"enabled": true,
"labels": ["dependency"]
},
{
"description": "For types-* stubs, only allow patch updates. Major/minor bumps track the upstream runtime package version and must be manually coordinated with the corresponding pin.",
"matchPackageNames": ["/^types-/"],
"matchUpdateTypes": ["patch"],
"enabled": true,
"labels": ["dependency"]
},
{
"description": "Pre-commit hook repos (allowlisted, matched by owner/repo)",
"matchPackageNames": [
"astral-sh/ruff-pre-commit",
"codespell-project/codespell",
"adrienverge/yamllint",
"zizmorcore/zizmor-pre-commit"
],
"enabled": true,
"labels": ["dependency"]
},
{
"description": "Group ruff pre-commit hook with its PyPI twin into one PR",
"matchPackageNames": ["astral-sh/ruff-pre-commit", "ruff"],
"groupName": "ruff",
"groupSlug": "ruff"
},
{
"description": "Group codespell pre-commit hook with its PyPI twin into one PR",
"matchPackageNames": ["codespell-project/codespell", "codespell"],
"groupName": "codespell",
"groupSlug": "codespell"
},
{
"description": "Group yamllint pre-commit hook with its PyPI twin into one PR",
"matchPackageNames": ["adrienverge/yamllint", "yamllint"],
"groupName": "yamllint",
"groupSlug": "yamllint"
},
{
"description": "Group zizmor pre-commit hook with its PyPI twin into one PR",
"matchPackageNames": ["zizmorcore/zizmor-pre-commit", "zizmor"],
"groupName": "zizmor",
"groupSlug": "zizmor"
},
{
"description": "Group pylint with astroid (their versions are linked and must move together)",
"matchPackageNames": ["pylint", "astroid"],
"groupName": "pylint",
"groupSlug": "pylint"
}
]
}

View File

@@ -50,9 +50,11 @@ env:
# - 10.10.3 is the latest (as of 6 Feb 2023)
# 10.11 is the latest long-term-support
# - 10.11.2 is the version currently shipped with Synology (as of 11 Oct 2023)
# 11.4 is an LTS with support until May 2029
# - 11.4.9 is used in Alpine 3.23 (used in latest HA base images as of 11 Apr 2026)
# mysql 8.0.32 does not always behave the same as MariaDB
# and some queries that work on MariaDB do not work on MySQL
MARIADB_VERSIONS: "['mariadb:10.3.32','mariadb:10.6.10','mariadb:10.10.3','mariadb:10.11.2','mysql:8.0.32']"
MARIADB_VERSIONS: "['mariadb:10.3.32','mariadb:10.6.10','mariadb:10.10.3','mariadb:10.11.2','mariadb:11.4.9','mysql:8.0.32']"
# 12 is the oldest supported version
# - 12.14 is the latest (as of 9 Feb 2023)
# 15 is the latest version
@@ -1062,7 +1064,9 @@ jobs:
- 3306:3306
env:
MYSQL_ROOT_PASSWORD: password
options: --health-cmd="mysqladmin ping -uroot -ppassword" --health-interval=5s --health-timeout=2s --health-retries=3
options: >-
--health-cmd="if command -v mariadb-admin >/dev/null; then mariadb-admin ping -uroot -ppassword; else mysqladmin ping -uroot -ppassword; fi"
--health-interval=5s --health-timeout=2s --health-retries=3
needs:
- info
- base

4
CODEOWNERS generated
View File

@@ -1877,8 +1877,8 @@ CLAUDE.md @home-assistant/core
/tests/components/version/ @ludeeus
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
/homeassistant/components/vicare/ @CFenner
/tests/components/vicare/ @CFenner
/homeassistant/components/vicare/ @CFenner @lackas
/tests/components/vicare/ @CFenner @lackas
/homeassistant/components/victron_ble/ @rajlaud
/tests/components/victron_ble/ @rajlaud
/homeassistant/components/victron_gx/ @tomer-w

View File

@@ -57,9 +57,9 @@ rules:
entity-category: done
entity-device-class: todo
entity-disabled-by-default: todo
entity-translations: todo
entity-translations: done
exception-translations: done
icon-translations: todo
icon-translations: done
reconfiguration-flow: todo
repair-issues:
status: exempt

View File

@@ -11,12 +11,12 @@
"user": {
"data": {
"tracked_apps": "Apps",
"tracked_custom_integrations": "Community integrations",
"tracked_custom_integrations": "Custom integrations",
"tracked_integrations": "Integrations"
},
"data_description": {
"tracked_apps": "Select the apps you want to track",
"tracked_custom_integrations": "Select the community integrations you want to track",
"tracked_custom_integrations": "Select the custom integrations you want to track",
"tracked_integrations": "Select the integrations you want to track"
}
}
@@ -31,7 +31,7 @@
"unit_of_measurement": "[%key:component::analytics_insights::entity::sensor::apps::unit_of_measurement%]"
},
"custom_integrations": {
"name": "{custom_integration_domain} (community)",
"name": "{custom_integration_domain} (custom)",
"unit_of_measurement": "[%key:component::analytics_insights::entity::sensor::apps::unit_of_measurement%]"
},
"total_active_installations": {

View File

@@ -1631,6 +1631,29 @@ def _pipeline_debug_recording_thread_proc(
wav_writer.close()
async def _close_async_generators(
*generators: AsyncIterable[Any] | None,
) -> None:
"""Close async generators, suppressing non-cancellation errors.
If ``aclose()`` on one generator is cancelled, the others are still
attempted; the cancellation is re-raised once all generators have
been processed.
"""
cancelled_exc: asyncio.CancelledError | None = None
for gen in generators:
aclose = getattr(gen, "aclose", None)
if aclose is not None:
try:
await aclose()
except asyncio.CancelledError as exc:
cancelled_exc = exc
except Exception: # noqa: BLE001
pass
if cancelled_exc is not None:
raise cancelled_exc
@dataclass(kw_only=True)
class PipelineInput:
"""Input to a pipeline run."""
@@ -1680,12 +1703,16 @@ class PipelineInput:
)
current_stage: PipelineStage | None = self.run.start_stage
# Track async generators so they can be closed on early exit
# (validation error, no wake word, cancellation, etc.).
stt_processed_stream: AsyncIterable[EnhancedAudioChunk] | None = None
stt_input_stream: AsyncIterable[EnhancedAudioChunk] | None = None
try:
if validation_error is not None:
raise validation_error
stt_audio_buffer: list[EnhancedAudioChunk] = []
stt_processed_stream: AsyncIterable[EnhancedAudioChunk] | None = None
if self.stt_stream is not None:
if self.run.audio_settings.needs_processor:
@@ -1800,9 +1827,44 @@ class PipelineInput:
)
)
finally:
# Always end the run since it needs to shut down the debug recording
# thread, etc.
await self._cleanup(stt_input_stream, stt_processed_stream)
async def _cleanup(
self,
stt_input_stream: AsyncIterable[EnhancedAudioChunk] | None,
stt_processed_stream: AsyncIterable[EnhancedAudioChunk] | None,
) -> None:
"""Release pipeline resources.
Close the STT audio stream async generators so buffered audio
chunks and the audio enhancer's VAD state are released promptly
instead of waiting on garbage collection (especially slow on
Python 3.14+). Close the wrapper first, then the upstream; skip
if both refer to the same object to avoid double-close.
Catch CancelledError around each cleanup step so a cancelled
pipeline (WebSocket unsubscribe, timeout) still runs the full
cleanup chain — otherwise cancellation reintroduces the very
leaks this code is trying to prevent. Re-raise at the end.
"""
cancelled_exc: asyncio.CancelledError | None = None
try:
await _close_async_generators(
None if stt_input_stream is stt_processed_stream else stt_input_stream,
stt_processed_stream,
)
except asyncio.CancelledError as exc:
cancelled_exc = exc
try:
# Always end the run since it needs to shut down the debug
# recording thread, etc.
await self.run.end()
except asyncio.CancelledError as exc:
cancelled_exc = cancelled_exc or exc
if cancelled_exc is not None:
raise cancelled_exc
async def validate(self) -> None:
"""Validate pipeline input against start stage."""

View File

@@ -155,7 +155,10 @@ async def websocket_run(
if start_stage in (PipelineStage.WAKE_WORD, PipelineStage.STT):
# Audio pipeline that will receive audio as binary websocket messages
msg_input = msg["input"]
audio_queue: asyncio.Queue[bytes] = asyncio.Queue()
# ~2.6s of 16kHz mono audio at 10ms chunks — enough to absorb
# brief stalls but bounded so a stalled consumer can't grow
# memory unboundedly.
audio_queue: asyncio.Queue[bytes] = asyncio.Queue(maxsize=256)
incoming_sample_rate = msg_input["sample_rate"]
wake_word_phrase: str | None = None
@@ -188,8 +191,11 @@ async def websocket_run(
_connection: websocket_api.ActiveConnection,
data: bytes,
) -> None:
# Forward to STT audio stream
audio_queue.put_nowait(data)
# Forward to STT audio stream.
# Drop frames if the pipeline can't keep up rather than
# growing the queue without bound.
with contextlib.suppress(asyncio.QueueFull):
audio_queue.put_nowait(data)
handler_id, unregister_handler = connection.async_register_binary_handler(
handle_binary
@@ -273,6 +279,20 @@ async def websocket_run(
# Unregister binary handler
unregister_handler()
# Send stop signal to unblock the stt_stream generator.
# Empty bytes is falsy and causes the ``while chunk :=``
# loop to exit cleanly. If the bounded queue is full,
# discard queued audio until there is room for the stop
# sentinel so the stream can always exit.
while True:
try:
audio_queue.put_nowait(b"")
except asyncio.QueueFull:
with contextlib.suppress(asyncio.QueueEmpty):
audio_queue.get_nowait()
else:
break
@callback
@websocket_api.require_admin

View File

@@ -143,6 +143,7 @@ _EXPERIMENTAL_CONDITION_PLATFORMS = {
"occupancy",
"person",
"power",
"remote",
"schedule",
"select",
"siren",
@@ -150,6 +151,7 @@ _EXPERIMENTAL_CONDITION_PLATFORMS = {
"temperature",
"text",
"timer",
"todo",
"vacuum",
"valve",
"water_heater",

View File

@@ -1,7 +1,7 @@
{
"issues": {
"integration_removed": {
"description": "The BMW Connected Drive integration has been removed from Home Assistant.\n\nIn September 2025, BMW blocked third-party access to their servers by adding additional security measures. For EU-registered cars, a [community integration]({custom_component_url}) using BMW's CarData API is available as an alternative.\n\nTo resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing BMW Connected Drive integration entries]({entries}).",
"description": "The BMW Connected Drive integration has been removed from Home Assistant.\n\nIn September 2025, BMW blocked third-party access to their servers by adding additional security measures. For EU-registered cars, a community-developed [custom component]({custom_component_url}) using BMW's CarData API is available as an alternative.\n\nTo resolve this issue, please remove the (now defunct) integration entries from your Home Assistant setup. [Click here to see your existing BMW Connected Drive integration entries]({entries}).",
"title": "The BMW Connected Drive integration has been removed"
}
}

View File

@@ -1,88 +1,14 @@
"""Provides conditions for device trackers."""
from typing import TYPE_CHECKING
import voluptuous as vol
from homeassistant.components.zone import ENTITY_ID_HOME as ENTITY_ID_HOME_ZONE
from homeassistant.const import CONF_OPTIONS, CONF_ZONE, STATE_HOME, STATE_NOT_HOME
from homeassistant.core import HomeAssistant, State
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.automation import DomainSpec
from homeassistant.helpers.condition import (
ENTITY_STATE_CONDITION_SCHEMA_ANY_ALL,
Condition,
ConditionConfig,
EntityConditionBase,
make_entity_state_condition,
)
from .const import ATTR_IN_ZONES, DOMAIN
ZONE_CONDITION_SCHEMA = ENTITY_STATE_CONDITION_SCHEMA_ANY_ALL.extend(
{
vol.Required(CONF_OPTIONS): {
vol.Required(CONF_ZONE): vol.All(
cv.ensure_list,
vol.Length(min=1),
[cv.entity_domain("zone")],
),
},
}
)
_IN_ZONES_SPEC = {DOMAIN: DomainSpec(value_source=ATTR_IN_ZONES)}
class ZoneConditionBase(EntityConditionBase):
"""Base for zone-based device tracker conditions."""
_domain_specs = _IN_ZONES_SPEC
_schema = ZONE_CONDITION_SCHEMA
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
"""Initialize the condition."""
super().__init__(hass, config)
if TYPE_CHECKING:
assert config.options is not None
self._zones: set[str] = set(config.options[CONF_ZONE])
def _in_target_zones(self, state: State) -> bool:
"""Check if the device is in any of the selected zones.
For GPS-based trackers, uses the in_zones attribute.
For scanner-based trackers (no in_zones attribute), infers from
state: 'home' means the device is in zone.home.
"""
if (in_zones := self._get_tracked_value(state)) is not None:
return bool(set(in_zones).intersection(self._zones))
# Scanner tracker: state 'home' means in zone.home
if state.state == STATE_HOME:
return ENTITY_ID_HOME_ZONE in self._zones
return False
class InZoneCondition(ZoneConditionBase):
"""Condition that tests if a device tracker is in one of the selected zones."""
def is_valid_state(self, entity_state: State) -> bool:
"""Check that the device is in at least one of the selected zones."""
return self._in_target_zones(entity_state)
class NotInZoneCondition(ZoneConditionBase):
"""Condition that tests if a device tracker is not in any of the selected zones."""
def is_valid_state(self, entity_state: State) -> bool:
"""Check that the device is not in any of the selected zones."""
return not self._in_target_zones(entity_state)
from homeassistant.const import STATE_HOME, STATE_NOT_HOME
from homeassistant.core import HomeAssistant
from homeassistant.helpers.condition import Condition, make_entity_state_condition
from .const import DOMAIN
CONDITIONS: dict[str, type[Condition]] = {
"in_zone": InZoneCondition,
"is_home": make_entity_state_condition(DOMAIN, STATE_HOME),
"is_not_home": make_entity_state_condition(DOMAIN, STATE_NOT_HOME),
"not_in_zone": NotInZoneCondition,
}

View File

@@ -1,9 +1,9 @@
.condition_common: &condition_common
target: &condition_target
target:
entity:
domain: device_tracker
fields:
behavior: &condition_behavior
behavior:
required: true
default: any
selector:
@@ -13,18 +13,5 @@
- all
- any
.condition_zone: &condition_zone
<<: *condition_common
fields:
behavior: *condition_behavior
zone:
required: true
selector:
entity:
domain: zone
multiple: true
in_zone: *condition_zone
is_home: *condition_common
is_not_home: *condition_common
not_in_zone: *condition_zone

View File

@@ -1,16 +1,10 @@
{
"conditions": {
"in_zone": {
"condition": "mdi:map-marker-check"
},
"is_home": {
"condition": "mdi:account"
},
"is_not_home": {
"condition": "mdi:account-arrow-right"
},
"not_in_zone": {
"condition": "mdi:map-marker-remove"
}
},
"entity_component": {

View File

@@ -1,24 +1,9 @@
{
"common": {
"condition_behavior_name": "Condition passes if",
"condition_zone_description": "The zones to check for.",
"condition_zone_name": "Zone",
"trigger_behavior_name": "Trigger when"
},
"conditions": {
"in_zone": {
"description": "Tests if one or more device trackers are in a zone.",
"fields": {
"behavior": {
"name": "[%key:component::device_tracker::common::condition_behavior_name%]"
},
"zone": {
"description": "[%key:component::device_tracker::common::condition_zone_description%]",
"name": "[%key:component::device_tracker::common::condition_zone_name%]"
}
},
"name": "Device tracker is in zone"
},
"is_home": {
"description": "Tests if one or more device trackers are home.",
"fields": {
@@ -36,19 +21,6 @@
}
},
"name": "Device tracker is not home"
},
"not_in_zone": {
"description": "Tests if one or more device trackers are not in a zone.",
"fields": {
"behavior": {
"name": "[%key:component::device_tracker::common::condition_behavior_name%]"
},
"zone": {
"description": "[%key:component::device_tracker::common::condition_zone_description%]",
"name": "[%key:component::device_tracker::common::condition_zone_name%]"
}
},
"name": "Device tracker is not in zone"
}
},
"device_automation": {

View File

@@ -5,5 +5,5 @@ from datetime import timedelta
from homeassistant.const import Platform
DOMAIN = "duco"
PLATFORMS = [Platform.FAN]
PLATFORMS = [Platform.FAN, Platform.SENSOR]
SCAN_INTERVAL = timedelta(seconds=30)

View File

@@ -0,0 +1,15 @@
{
"entity": {
"sensor": {
"iaq_co2": {
"default": "mdi:molecule-co2"
},
"iaq_rh": {
"default": "mdi:water-percent"
},
"ventilation_state": {
"default": "mdi:tune-variant"
}
}
}
}

View File

@@ -71,11 +71,11 @@ rules:
Users can pair new modules (CO2 sensors, humidity sensors, zone valves)
to their Duco box. Dynamic device support to be added in a follow-up PR.
entity-category: todo
entity-device-class: todo
entity-disabled-by-default: todo
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: todo
icon-translations: done
reconfiguration-flow: todo
repair-issues: todo
stale-devices:

View File

@@ -0,0 +1,119 @@
"""Sensor platform for the Duco integration."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from duco.models import Node, NodeType, VentilationState
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import CONCENTRATION_PARTS_PER_MILLION, PERCENTAGE
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import DucoConfigEntry, DucoCoordinator
from .entity import DucoEntity
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class DucoSensorEntityDescription(SensorEntityDescription):
"""Duco sensor entity description."""
value_fn: Callable[[Node], int | float | str | None]
node_types: tuple[NodeType, ...]
SENSOR_DESCRIPTIONS: tuple[DucoSensorEntityDescription, ...] = (
DucoSensorEntityDescription(
key="ventilation_state",
translation_key="ventilation_state",
device_class=SensorDeviceClass.ENUM,
options=[s.lower() for s in VentilationState],
value_fn=lambda node: (
node.ventilation.state.lower() if node.ventilation else None
),
node_types=(NodeType.BOX,),
),
DucoSensorEntityDescription(
key="co2",
device_class=SensorDeviceClass.CO2,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
value_fn=lambda node: node.sensor.co2 if node.sensor else None,
node_types=(NodeType.UCCO2,),
),
DucoSensorEntityDescription(
key="iaq_co2",
translation_key="iaq_co2",
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
value_fn=lambda node: node.sensor.iaq_co2 if node.sensor else None,
node_types=(NodeType.UCCO2,),
),
DucoSensorEntityDescription(
key="humidity",
device_class=SensorDeviceClass.HUMIDITY,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=PERCENTAGE,
value_fn=lambda node: node.sensor.rh if node.sensor else None,
node_types=(NodeType.BSRH,),
),
DucoSensorEntityDescription(
key="iaq_rh",
translation_key="iaq_rh",
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
entity_registry_enabled_default=False,
value_fn=lambda node: node.sensor.iaq_rh if node.sensor else None,
node_types=(NodeType.BSRH,),
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: DucoConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Duco sensor entities."""
coordinator = entry.runtime_data
async_add_entities(
DucoSensorEntity(coordinator, node, description)
for node in coordinator.data.values()
for description in SENSOR_DESCRIPTIONS
if node.general.node_type in description.node_types
)
class DucoSensorEntity(DucoEntity, SensorEntity):
"""Sensor entity for a Duco node."""
entity_description: DucoSensorEntityDescription
def __init__(
self,
coordinator: DucoCoordinator,
node: Node,
description: DucoSensorEntityDescription,
) -> None:
"""Initialize the sensor entity."""
super().__init__(coordinator, node)
self.entity_description = description
self._attr_unique_id = (
f"{coordinator.config_entry.unique_id}_{node.node_id}_{description.key}"
)
@property
def native_value(self) -> int | float | str | None:
"""Return the sensor value."""
return self.entity_description.value_fn(self._node)

View File

@@ -29,6 +29,36 @@
}
}
}
},
"sensor": {
"iaq_co2": {
"name": "CO2 air quality index"
},
"iaq_rh": {
"name": "Humidity air quality index"
},
"ventilation_state": {
"name": "Ventilation state",
"state": {
"aut1": "Automatic boost (15 min)",
"aut2": "Automatic boost (30 min)",
"aut3": "Automatic boost (45 min)",
"auto": "Automatic",
"cnt1": "Continuous low speed",
"cnt2": "Continuous medium speed",
"cnt3": "Continuous high speed",
"empt": "Empty house",
"man1": "Manual low speed (15 min)",
"man1x2": "Manual low speed (30 min)",
"man1x3": "Manual low speed (45 min)",
"man2": "Manual medium speed (15 min)",
"man2x2": "Manual medium speed (30 min)",
"man2x3": "Manual medium speed (45 min)",
"man3": "Manual high speed (15 min)",
"man3x2": "Manual high speed (30 min)",
"man3x3": "Manual high speed (45 min)"
}
}
}
},
"exceptions": {

View File

@@ -8,7 +8,7 @@
"iot_class": "local_polling",
"loggers": ["pyenphase"],
"quality_scale": "platinum",
"requirements": ["pyenphase==2.4.6"],
"requirements": ["pyenphase==2.4.8"],
"zeroconf": [
{
"type": "_enphase-envoy._tcp.local."

View File

@@ -175,6 +175,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
await server.start()
except Exception: # noqa: BLE001
_LOGGER.warning("Could not start go2rtc server", exc_info=True)
await session.close()
return False
async def on_stop(event: Event) -> None:

View File

@@ -91,10 +91,14 @@ from .const import (
DATA_STORE,
DATA_SUPERVISOR_INFO,
DOMAIN,
HASSIO_UPDATE_INTERVAL,
HASSIO_MAIN_UPDATE_INTERVAL,
MAIN_COORDINATOR,
STATS_COORDINATOR,
)
from .coordinator import (
HassioDataUpdateCoordinator,
HassioAddOnDataUpdateCoordinator,
HassioMainDataUpdateCoordinator,
HassioStatsDataUpdateCoordinator,
get_addons_info,
get_addons_list,
get_addons_stats,
@@ -384,12 +388,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
]
hass.data[DATA_SUPERVISOR_INFO]["addons"] = hass.data[DATA_ADDONS_LIST]
async_call_later(
hass,
HASSIO_UPDATE_INTERVAL,
HassJob(update_info_data, cancel_on_shutdown=True),
)
# Fetch data
update_info_task = hass.async_create_task(update_info_data(), eager_start=True)
@@ -436,7 +434,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
# os info not yet fetched from supervisor, retry later
async_call_later(
hass,
HASSIO_UPDATE_INTERVAL,
HASSIO_MAIN_UPDATE_INTERVAL,
async_setup_hardware_integration_job,
)
return
@@ -462,9 +460,20 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up a config entry."""
dev_reg = dr.async_get(hass)
coordinator = HassioDataUpdateCoordinator(hass, entry, dev_reg)
coordinator = HassioMainDataUpdateCoordinator(hass, entry, dev_reg)
await coordinator.async_config_entry_first_refresh()
hass.data[ADDONS_COORDINATOR] = coordinator
hass.data[MAIN_COORDINATOR] = coordinator
addon_coordinator = HassioAddOnDataUpdateCoordinator(
hass, entry, dev_reg, coordinator.jobs
)
await addon_coordinator.async_config_entry_first_refresh()
hass.data[ADDONS_COORDINATOR] = addon_coordinator
stats_coordinator = HassioStatsDataUpdateCoordinator(hass, entry)
await stats_coordinator.async_config_entry_first_refresh()
hass.data[STATS_COORDINATOR] = stats_coordinator
def deprecated_setup_issue() -> None:
os_info = get_os_info(hass)
@@ -531,10 +540,12 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
# Unload coordinator
coordinator: HassioDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
coordinator: HassioMainDataUpdateCoordinator = hass.data[MAIN_COORDINATOR]
coordinator.unload()
# Pop coordinator
# Pop coordinators
hass.data.pop(MAIN_COORDINATOR, None)
hass.data.pop(ADDONS_COORDINATOR, None)
hass.data.pop(STATS_COORDINATOR, None)
return unload_ok

View File

@@ -22,6 +22,7 @@ from .const import (
ATTR_STATE,
DATA_KEY_ADDONS,
DATA_KEY_MOUNTS,
MAIN_COORDINATOR,
)
from .entity import HassioAddonEntity, HassioMountEntity
@@ -60,17 +61,18 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Binary sensor set up for Hass.io config entry."""
coordinator = hass.data[ADDONS_COORDINATOR]
addons_coordinator = hass.data[ADDONS_COORDINATOR]
coordinator = hass.data[MAIN_COORDINATOR]
async_add_entities(
itertools.chain(
[
HassioAddonBinarySensor(
addon=addon,
coordinator=coordinator,
coordinator=addons_coordinator,
entity_description=entity_description,
)
for addon in coordinator.data[DATA_KEY_ADDONS].values()
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
for entity_description in ADDON_ENTITY_DESCRIPTIONS
],
[

View File

@@ -77,7 +77,9 @@ EVENT_JOB = "job"
UPDATE_KEY_SUPERVISOR = "supervisor"
STARTUP_COMPLETE = "complete"
MAIN_COORDINATOR = "hassio_main_coordinator"
ADDONS_COORDINATOR = "hassio_addons_coordinator"
STATS_COORDINATOR = "hassio_stats_coordinator"
DATA_COMPONENT: HassKey[HassIO] = HassKey(DOMAIN)
@@ -94,7 +96,9 @@ DATA_SUPERVISOR_STATS = "hassio_supervisor_stats"
DATA_ADDONS_INFO = "hassio_addons_info"
DATA_ADDONS_STATS = "hassio_addons_stats"
DATA_ADDONS_LIST = "hassio_addons_list"
HASSIO_UPDATE_INTERVAL = timedelta(minutes=5)
HASSIO_MAIN_UPDATE_INTERVAL = timedelta(minutes=5)
HASSIO_ADDON_UPDATE_INTERVAL = timedelta(minutes=15)
HASSIO_STATS_UPDATE_INTERVAL = timedelta(seconds=60)
ATTR_AUTO_UPDATE = "auto_update"
ATTR_VERSION = "version"

View File

@@ -7,7 +7,7 @@ from collections import defaultdict
from collections.abc import Awaitable
from copy import deepcopy
import logging
from typing import TYPE_CHECKING, Any, cast
from typing import TYPE_CHECKING, Any
from aiohasupervisor import SupervisorError, SupervisorNotFoundError
from aiohasupervisor.models import (
@@ -15,9 +15,9 @@ from aiohasupervisor.models import (
CIFSMountResponse,
InstalledAddon,
NFSMountResponse,
ResponseData,
StoreInfo,
)
from aiohasupervisor.models.base import ResponseData
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_MANUFACTURER, ATTR_NAME
@@ -35,13 +35,11 @@ from .const import (
ATTR_SLUG,
ATTR_URL,
ATTR_VERSION,
CONTAINER_INFO,
CONTAINER_STATS,
CORE_CONTAINER,
DATA_ADDONS_INFO,
DATA_ADDONS_LIST,
DATA_ADDONS_STATS,
DATA_COMPONENT,
DATA_CORE_INFO,
DATA_CORE_STATS,
DATA_HOST_INFO,
@@ -59,7 +57,9 @@ from .const import (
DATA_SUPERVISOR_INFO,
DATA_SUPERVISOR_STATS,
DOMAIN,
HASSIO_UPDATE_INTERVAL,
HASSIO_ADDON_UPDATE_INTERVAL,
HASSIO_MAIN_UPDATE_INTERVAL,
HASSIO_STATS_UPDATE_INTERVAL,
REQUEST_REFRESH_DELAY,
SUPERVISOR_CONTAINER,
SupervisorEntityModel,
@@ -318,7 +318,314 @@ def async_remove_devices_from_dev_reg(
dev_reg.async_remove_device(dev.id)
class HassioDataUpdateCoordinator(DataUpdateCoordinator):
class HassioStatsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Class to retrieve Hass.io container stats."""
config_entry: ConfigEntry
def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None:
"""Initialize coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=HASSIO_STATS_UPDATE_INTERVAL,
request_refresh_debouncer=Debouncer(
hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False
),
)
self.supervisor_client = get_supervisor_client(hass)
self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict(
lambda: defaultdict(set)
)
async def _async_update_data(self) -> dict[str, Any]:
"""Update stats data via library."""
try:
await self._fetch_stats()
except SupervisorError as err:
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
new_data: dict[str, Any] = {}
new_data[DATA_KEY_CORE] = get_core_stats(self.hass)
new_data[DATA_KEY_SUPERVISOR] = get_supervisor_stats(self.hass)
new_data[DATA_KEY_ADDONS] = get_addons_stats(self.hass)
return new_data
async def _fetch_stats(self) -> None:
"""Fetch container stats for subscribed entities."""
container_updates = self._container_updates
data = self.hass.data
client = self.supervisor_client
# Fetch core and supervisor stats
updates: dict[str, Awaitable] = {}
if container_updates.get(CORE_CONTAINER, {}).get(CONTAINER_STATS):
updates[DATA_CORE_STATS] = client.homeassistant.stats()
if container_updates.get(SUPERVISOR_CONTAINER, {}).get(CONTAINER_STATS):
updates[DATA_SUPERVISOR_STATS] = client.supervisor.stats()
if updates:
api_results: list[ResponseData] = await asyncio.gather(*updates.values())
for key, result in zip(updates, api_results, strict=True):
data[key] = result.to_dict()
# Fetch addon stats
addons_list = get_addons_list(self.hass) or []
started_addons = {
addon[ATTR_SLUG]
for addon in addons_list
if addon.get("state") in {AddonState.STARTED, AddonState.STARTUP}
}
addons_stats: dict[str, Any] = data.setdefault(DATA_ADDONS_STATS, {})
# Clean up cache for stopped/removed addons
for slug in addons_stats.keys() - started_addons:
del addons_stats[slug]
# Fetch stats for addons with subscribed entities
addon_stats_results = dict(
await asyncio.gather(
*[
self._update_addon_stats(slug)
for slug in started_addons
if container_updates.get(slug, {}).get(CONTAINER_STATS)
]
)
)
addons_stats.update(addon_stats_results)
async def _update_addon_stats(self, slug: str) -> tuple[str, dict[str, Any] | None]:
"""Update single addon stats."""
try:
stats = await self.supervisor_client.addons.addon_stats(slug)
except SupervisorError as err:
_LOGGER.warning("Could not fetch stats for %s: %s", slug, err)
return (slug, None)
return (slug, stats.to_dict())
@callback
def async_enable_container_updates(
self, slug: str, entity_id: str, types: set[str]
) -> CALLBACK_TYPE:
"""Enable stats updates for a container."""
enabled_updates = self._container_updates[slug]
for key in types:
enabled_updates[key].add(entity_id)
@callback
def _remove() -> None:
for key in types:
enabled_updates[key].discard(entity_id)
if not enabled_updates[key]:
del enabled_updates[key]
if not enabled_updates:
self._container_updates.pop(slug, None)
return _remove
class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Class to retrieve Hass.io Add-on status."""
config_entry: ConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
dev_reg: dr.DeviceRegistry,
jobs: SupervisorJobs,
) -> None:
"""Initialize coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=HASSIO_ADDON_UPDATE_INTERVAL,
# We don't want an immediate refresh since we want to avoid
# hammering the Supervisor API on startup
request_refresh_debouncer=Debouncer(
hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False
),
)
self.entry_id = config_entry.entry_id
self.dev_reg = dev_reg
self._addon_info_subscriptions: defaultdict[str, set[str]] = defaultdict(set)
self.supervisor_client = get_supervisor_client(hass)
self.jobs = jobs
async def _async_update_data(self) -> dict[str, Any]:
"""Update data via library."""
is_first_update = not self.data
client = self.supervisor_client
try:
installed_addons: list[InstalledAddon] = await client.addons.list()
all_addons = {addon.slug for addon in installed_addons}
# Fetch addon info for all addons on first update, or only
# for addons with subscribed entities on subsequent updates.
addon_info_results = dict(
await asyncio.gather(
*[
self._update_addon_info(slug)
for slug in all_addons
if is_first_update or self._addon_info_subscriptions.get(slug)
]
)
)
except SupervisorError as err:
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
# Update hass.data for legacy accessor functions
data = self.hass.data
addons_list_dicts = [addon.to_dict() for addon in installed_addons]
data[DATA_ADDONS_LIST] = addons_list_dicts
# Update addon info cache in hass.data
addon_info_cache: dict[str, Any] = data.setdefault(DATA_ADDONS_INFO, {})
for slug in addon_info_cache.keys() - all_addons:
del addon_info_cache[slug]
addon_info_cache.update(addon_info_results)
# Deprecated 2026.4.0: Folding addons.list results into supervisor_info
# for compatibility. Written to hass.data only, not coordinator data.
if DATA_SUPERVISOR_INFO in data:
data[DATA_SUPERVISOR_INFO]["addons"] = addons_list_dicts
# Build clean coordinator data
store_data = get_store(self.hass)
if store_data:
repositories = {
repo.slug: repo.name
for repo in StoreInfo.from_dict(store_data).repositories
}
else:
repositories = {}
new_data: dict[str, Any] = {}
new_data[DATA_KEY_ADDONS] = {
(slug := addon[ATTR_SLUG]): {
**addon,
ATTR_AUTO_UPDATE: (addon_info_cache.get(slug) or {}).get(
ATTR_AUTO_UPDATE, False
),
ATTR_REPOSITORY: repositories.get(
repo_slug := addon.get(ATTR_REPOSITORY, ""), repo_slug
),
}
for addon in addons_list_dicts
}
# If this is the initial refresh, register all addons
if is_first_update:
async_register_addons_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
)
# Remove add-ons that are no longer installed from device registry
supervisor_addon_devices = {
list(device.identifiers)[0][1]
for device in self.dev_reg.devices.get_devices_for_config_entry_id(
self.entry_id
)
if device.model == SupervisorEntityModel.ADDON
}
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
async_remove_devices_from_dev_reg(self.dev_reg, stale_addons)
# If there are new add-ons, we should reload the config entry so we can
# create new devices and entities. We can return an empty dict because
# coordinator will be recreated.
if self.data and (
set(new_data[DATA_KEY_ADDONS]) - set(self.data[DATA_KEY_ADDONS])
):
self.hass.async_create_task(
self.hass.config_entries.async_reload(self.entry_id)
)
return {}
return new_data
async def get_changelog(self, addon_slug: str) -> str | None:
"""Get the changelog for an add-on."""
try:
return await self.supervisor_client.store.addon_changelog(addon_slug)
except SupervisorNotFoundError:
return None
async def _update_addon_info(self, slug: str) -> tuple[str, dict[str, Any] | None]:
"""Return the info for an addon."""
try:
info = await self.supervisor_client.addons.addon_info(slug)
except SupervisorError as err:
_LOGGER.warning("Could not fetch info for %s: %s", slug, err)
return (slug, None)
# Translate to legacy hassio names for compatibility
info_dict = info.to_dict()
info_dict["hassio_api"] = info_dict.pop("supervisor_api")
info_dict["hassio_role"] = info_dict.pop("supervisor_role")
return (slug, info_dict)
@callback
def async_enable_addon_info_updates(
self, slug: str, entity_id: str
) -> CALLBACK_TYPE:
"""Enable info updates for an add-on."""
self._addon_info_subscriptions[slug].add(entity_id)
@callback
def _remove() -> None:
self._addon_info_subscriptions[slug].discard(entity_id)
if not self._addon_info_subscriptions[slug]:
del self._addon_info_subscriptions[slug]
return _remove
async def _async_refresh(
self,
log_failures: bool = True,
raise_on_auth_failed: bool = False,
scheduled: bool = False,
raise_on_entry_error: bool = False,
) -> None:
"""Refresh data."""
if not scheduled and not raise_on_auth_failed:
# Force reloading add-on updates for non-scheduled
# updates.
#
# If `raise_on_auth_failed` is set, it means this is
# the first refresh and we do not want to delay
# startup or cause a timeout so we only refresh the
# updates if this is not a scheduled refresh and
# we are not doing the first refresh.
try:
await self.supervisor_client.store.reload()
except SupervisorError as err:
_LOGGER.warning("Error on Supervisor API: %s", err)
await super()._async_refresh(
log_failures, raise_on_auth_failed, scheduled, raise_on_entry_error
)
async def force_addon_info_data_refresh(self, addon_slug: str) -> None:
"""Force refresh of addon info data for a specific addon."""
try:
slug, info = await self._update_addon_info(addon_slug)
if info is not None and DATA_KEY_ADDONS in self.data:
if slug in self.data[DATA_KEY_ADDONS]:
data = deepcopy(self.data)
data[DATA_KEY_ADDONS][slug].update(info)
self.async_set_updated_data(data)
except SupervisorError as err:
_LOGGER.warning("Could not refresh info for %s: %s", addon_slug, err)
class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Class to retrieve Hass.io status."""
config_entry: ConfigEntry
@@ -332,82 +639,77 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=HASSIO_UPDATE_INTERVAL,
update_interval=HASSIO_MAIN_UPDATE_INTERVAL,
# We don't want an immediate refresh since we want to avoid
# fetching the container stats right away and avoid hammering
# the Supervisor API on startup
# hammering the Supervisor API on startup
request_refresh_debouncer=Debouncer(
hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False
),
)
self.hassio = hass.data[DATA_COMPONENT]
self.data = {}
self.entry_id = config_entry.entry_id
self.dev_reg = dev_reg
self.is_hass_os = (get_info(self.hass) or {}).get("hassos") is not None
self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict(
lambda: defaultdict(set)
)
self.supervisor_client = get_supervisor_client(hass)
self.jobs = SupervisorJobs(hass)
async def _async_update_data(self) -> dict[str, Any]:
"""Update data via library."""
is_first_update = not self.data
client = self.supervisor_client
try:
await self.force_data_refresh(is_first_update)
(
info,
core_info,
supervisor_info,
os_info,
host_info,
store_info,
network_info,
) = await asyncio.gather(
client.info(),
client.homeassistant.info(),
client.supervisor.info(),
client.os.info(),
client.host.info(),
client.store.info(),
client.network.info(),
)
mounts_info = await client.mounts.info()
await self.jobs.refresh_data(is_first_update)
except SupervisorError as err:
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
# Build clean coordinator data
new_data: dict[str, Any] = {}
supervisor_info = get_supervisor_info(self.hass) or {}
addons_info = get_addons_info(self.hass) or {}
addons_stats = get_addons_stats(self.hass)
store_data = get_store(self.hass)
mounts_info = await self.supervisor_client.mounts.info()
addons_list = get_addons_list(self.hass) or []
if store_data:
repositories = {
repo.slug: repo.name
for repo in StoreInfo.from_dict(store_data).repositories
}
else:
repositories = {}
new_data[DATA_KEY_ADDONS] = {
(slug := addon[ATTR_SLUG]): {
**addon,
**(addons_stats.get(slug) or {}),
ATTR_AUTO_UPDATE: (addons_info.get(slug) or {}).get(
ATTR_AUTO_UPDATE, False
),
ATTR_REPOSITORY: repositories.get(
repo_slug := addon.get(ATTR_REPOSITORY, ""), repo_slug
),
}
for addon in addons_list
}
if self.is_hass_os:
new_data[DATA_KEY_OS] = get_os_info(self.hass)
new_data[DATA_KEY_CORE] = {
**(get_core_info(self.hass) or {}),
**get_core_stats(self.hass),
}
new_data[DATA_KEY_SUPERVISOR] = {
**supervisor_info,
**get_supervisor_stats(self.hass),
}
new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {}
new_data[DATA_KEY_CORE] = core_info.to_dict()
new_data[DATA_KEY_SUPERVISOR] = supervisor_info.to_dict()
new_data[DATA_KEY_HOST] = host_info.to_dict()
new_data[DATA_KEY_MOUNTS] = {mount.name: mount for mount in mounts_info.mounts}
if self.is_hass_os:
new_data[DATA_KEY_OS] = os_info.to_dict()
# If this is the initial refresh, register all addons and return the dict
# Update hass.data for legacy accessor functions
data = self.hass.data
data[DATA_INFO] = info.to_dict()
data[DATA_CORE_INFO] = new_data[DATA_KEY_CORE]
data[DATA_OS_INFO] = new_data.get(DATA_KEY_OS, os_info.to_dict())
data[DATA_HOST_INFO] = new_data[DATA_KEY_HOST]
data[DATA_STORE] = store_info.to_dict()
data[DATA_NETWORK_INFO] = network_info.to_dict()
# Separate dict for hass.data supervisor info since we add deprecated
# compat keys that should not be in coordinator data
supervisor_info_dict = supervisor_info.to_dict()
# Deprecated 2026.4.0: Folding repositories and addons into
# supervisor_info for compatibility. Written to hass.data only, not
# coordinator data. Preserve the addons key from the addon coordinator.
supervisor_info_dict["repositories"] = data[DATA_STORE][ATTR_REPOSITORIES]
if (prev := data.get(DATA_SUPERVISOR_INFO)) and "addons" in prev:
supervisor_info_dict["addons"] = prev["addons"]
data[DATA_SUPERVISOR_INFO] = supervisor_info_dict
# If this is the initial refresh, register all main components
if is_first_update:
async_register_addons_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
)
async_register_mounts_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_MOUNTS].values()
)
@@ -423,17 +725,6 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
self.entry_id, self.dev_reg, new_data[DATA_KEY_OS]
)
# Remove add-ons that are no longer installed from device registry
supervisor_addon_devices = {
list(device.identifiers)[0][1]
for device in self.dev_reg.devices.get_devices_for_config_entry_id(
self.entry_id
)
if device.model == SupervisorEntityModel.ADDON
}
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
async_remove_devices_from_dev_reg(self.dev_reg, stale_addons)
# Remove mounts that no longer exists from device registry
supervisor_mount_devices = {
device.name
@@ -453,12 +744,11 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
# Remove the OS device if it exists and the installation is not hassos
self.dev_reg.async_remove_device(dev.id)
# If there are new add-ons or mounts, we should reload the config entry so we can
# If there are new mounts, we should reload the config entry so we can
# create new devices and entities. We can return an empty dict because
# coordinator will be recreated.
if self.data and (
set(new_data[DATA_KEY_ADDONS]) - set(self.data[DATA_KEY_ADDONS])
or set(new_data[DATA_KEY_MOUNTS]) - set(self.data[DATA_KEY_MOUNTS])
set(new_data[DATA_KEY_MOUNTS]) - set(self.data.get(DATA_KEY_MOUNTS, {}))
):
self.hass.async_create_task(
self.hass.config_entries.async_reload(self.entry_id)
@@ -467,146 +757,6 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
return new_data
async def get_changelog(self, addon_slug: str) -> str | None:
"""Get the changelog for an add-on."""
try:
return await self.supervisor_client.store.addon_changelog(addon_slug)
except SupervisorNotFoundError:
return None
async def force_data_refresh(self, first_update: bool) -> None:
"""Force update of the addon info."""
container_updates = self._container_updates
data = self.hass.data
client = self.supervisor_client
updates: dict[str, Awaitable[ResponseData]] = {
DATA_INFO: client.info(),
DATA_CORE_INFO: client.homeassistant.info(),
DATA_SUPERVISOR_INFO: client.supervisor.info(),
DATA_OS_INFO: client.os.info(),
DATA_STORE: client.store.info(),
}
if CONTAINER_STATS in container_updates[CORE_CONTAINER]:
updates[DATA_CORE_STATS] = client.homeassistant.stats()
if CONTAINER_STATS in container_updates[SUPERVISOR_CONTAINER]:
updates[DATA_SUPERVISOR_STATS] = client.supervisor.stats()
# Pull off addons.list results for further processing before caching
addons_list, *results = await asyncio.gather(
client.addons.list(), *updates.values()
)
for key, result in zip(updates, cast(list[ResponseData], results), strict=True):
data[key] = result.to_dict()
installed_addons = cast(list[InstalledAddon], addons_list)
data[DATA_ADDONS_LIST] = [addon.to_dict() for addon in installed_addons]
# Deprecated 2026.4.0: Folding repositories and addons.list results into supervisor_info for compatibility
# Can drop this after removal period
data[DATA_SUPERVISOR_INFO].update(
{
"repositories": data[DATA_STORE][ATTR_REPOSITORIES],
"addons": [addon.to_dict() for addon in installed_addons],
}
)
all_addons = {addon.slug for addon in installed_addons}
started_addons = {
addon.slug
for addon in installed_addons
if addon.state in {AddonState.STARTED, AddonState.STARTUP}
}
#
# Update addon info if its the first update or
# there is at least one entity that needs the data.
#
# When entities are added they call async_enable_container_updates
# to enable updates for the endpoints they need via
# async_added_to_hass. This ensures that we only update
# the data for the endpoints that are needed to avoid unnecessary
# API calls since otherwise we would fetch stats for all containers
# and throw them away.
#
for data_key, update_func, enabled_key, wanted_addons, needs_first_update in (
(
DATA_ADDONS_STATS,
self._update_addon_stats,
CONTAINER_STATS,
started_addons,
False,
),
(
DATA_ADDONS_INFO,
self._update_addon_info,
CONTAINER_INFO,
all_addons,
True,
),
):
container_data: dict[str, Any] = data.setdefault(data_key, {})
# Clean up cache
for slug in container_data.keys() - wanted_addons:
del container_data[slug]
# Update cache from API
container_data.update(
dict(
await asyncio.gather(
*[
update_func(slug)
for slug in wanted_addons
if (first_update and needs_first_update)
or enabled_key in container_updates[slug]
]
)
)
)
# Refresh jobs data
await self.jobs.refresh_data(first_update)
async def _update_addon_stats(self, slug: str) -> tuple[str, dict[str, Any] | None]:
"""Update single addon stats."""
try:
stats = await self.supervisor_client.addons.addon_stats(slug)
except SupervisorError as err:
_LOGGER.warning("Could not fetch stats for %s: %s", slug, err)
return (slug, None)
return (slug, stats.to_dict())
async def _update_addon_info(self, slug: str) -> tuple[str, dict[str, Any] | None]:
"""Return the info for an addon."""
try:
info = await self.supervisor_client.addons.addon_info(slug)
except SupervisorError as err:
_LOGGER.warning("Could not fetch info for %s: %s", slug, err)
return (slug, None)
# Translate to legacy hassio names for compatibility
info_dict = info.to_dict()
info_dict["hassio_api"] = info_dict.pop("supervisor_api")
info_dict["hassio_role"] = info_dict.pop("supervisor_role")
return (slug, info_dict)
@callback
def async_enable_container_updates(
self, slug: str, entity_id: str, types: set[str]
) -> CALLBACK_TYPE:
"""Enable updates for an add-on."""
enabled_updates = self._container_updates[slug]
for key in types:
enabled_updates[key].add(entity_id)
@callback
def _remove() -> None:
for key in types:
enabled_updates[key].remove(entity_id)
return _remove
async def _async_refresh(
self,
log_failures: bool = True,
@@ -616,14 +766,16 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
) -> None:
"""Refresh data."""
if not scheduled and not raise_on_auth_failed:
# Force refreshing updates for non-scheduled updates
# Force reloading updates of main components for
# non-scheduled updates.
#
# If `raise_on_auth_failed` is set, it means this is
# the first refresh and we do not want to delay
# startup or cause a timeout so we only refresh the
# updates if this is not a scheduled refresh and
# we are not doing the first refresh.
try:
await self.supervisor_client.refresh_updates()
await self.supervisor_client.reload_updates()
except SupervisorError as err:
_LOGGER.warning("Error on Supervisor API: %s", err)
@@ -631,18 +783,6 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
log_failures, raise_on_auth_failed, scheduled, raise_on_entry_error
)
async def force_addon_info_data_refresh(self, addon_slug: str) -> None:
"""Force refresh of addon info data for a specific addon."""
try:
slug, info = await self._update_addon_info(addon_slug)
if info is not None and DATA_KEY_ADDONS in self.data:
if slug in self.data[DATA_KEY_ADDONS]:
data = deepcopy(self.data)
data[DATA_KEY_ADDONS][slug].update(info)
self.async_set_updated_data(data)
except SupervisorError as err:
_LOGGER.warning("Could not refresh info for %s: %s", addon_slug, err)
@callback
def unload(self) -> None:
"""Clean up when config entry unloaded."""

View File

@@ -11,8 +11,12 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er
from .const import ADDONS_COORDINATOR
from .coordinator import HassioDataUpdateCoordinator
from .const import ADDONS_COORDINATOR, MAIN_COORDINATOR, STATS_COORDINATOR
from .coordinator import (
HassioAddOnDataUpdateCoordinator,
HassioMainDataUpdateCoordinator,
HassioStatsDataUpdateCoordinator,
)
async def async_get_config_entry_diagnostics(
@@ -20,7 +24,9 @@ async def async_get_config_entry_diagnostics(
config_entry: ConfigEntry,
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator: HassioDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
coordinator: HassioMainDataUpdateCoordinator = hass.data[MAIN_COORDINATOR]
addons_coordinator: HassioAddOnDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
stats_coordinator: HassioStatsDataUpdateCoordinator = hass.data[STATS_COORDINATOR]
device_registry = dr.async_get(hass)
entity_registry = er.async_get(hass)
@@ -53,5 +59,7 @@ async def async_get_config_entry_diagnostics(
return {
"coordinator_data": coordinator.data,
"addons_coordinator_data": addons_coordinator.data,
"stats_coordinator_data": stats_coordinator.data,
"devices": devices,
}

View File

@@ -13,7 +13,6 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
ATTR_SLUG,
CONTAINER_STATS,
CORE_CONTAINER,
DATA_KEY_ADDONS,
DATA_KEY_CORE,
DATA_KEY_HOST,
@@ -21,20 +20,79 @@ from .const import (
DATA_KEY_OS,
DATA_KEY_SUPERVISOR,
DOMAIN,
KEY_TO_UPDATE_TYPES,
SUPERVISOR_CONTAINER,
)
from .coordinator import HassioDataUpdateCoordinator
from .coordinator import (
HassioAddOnDataUpdateCoordinator,
HassioMainDataUpdateCoordinator,
HassioStatsDataUpdateCoordinator,
)
class HassioAddonEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
class HassioStatsEntity(CoordinatorEntity[HassioStatsDataUpdateCoordinator]):
"""Base entity for container stats (CPU, memory)."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: HassioStatsDataUpdateCoordinator,
entity_description: EntityDescription,
*,
container_id: str,
data_key: str,
device_id: str,
unique_id_prefix: str,
) -> None:
"""Initialize base entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._container_id = container_id
self._data_key = data_key
self._attr_unique_id = f"{unique_id_prefix}_{entity_description.key}"
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device_id)})
@property
def available(self) -> bool:
"""Return True if entity is available."""
if self._data_key == DATA_KEY_ADDONS:
return (
super().available
and DATA_KEY_ADDONS in self.coordinator.data
and self.entity_description.key
in (
self.coordinator.data[DATA_KEY_ADDONS].get(self._container_id) or {}
)
)
return (
super().available
and self._data_key in self.coordinator.data
and self.entity_description.key in self.coordinator.data[self._data_key]
)
async def async_added_to_hass(self) -> None:
"""Subscribe to stats updates."""
await super().async_added_to_hass()
self.async_on_remove(
self.coordinator.async_enable_container_updates(
self._container_id, self.entity_id, {CONTAINER_STATS}
)
)
# Stats are only fetched for containers with subscribed entities.
# The first coordinator refresh (before entities exist) has no
# subscribers, so no stats are fetched. Schedule a debounced
# refresh so that all stats entities registering during platform
# setup are batched into a single API call.
await self.coordinator.async_request_refresh()
class HassioAddonEntity(CoordinatorEntity[HassioAddOnDataUpdateCoordinator]):
"""Base entity for a Hass.io add-on."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: HassioDataUpdateCoordinator,
coordinator: HassioAddOnDataUpdateCoordinator,
entity_description: EntityDescription,
addon: dict[str, Any],
) -> None:
@@ -56,26 +114,23 @@ class HassioAddonEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
)
async def async_added_to_hass(self) -> None:
"""Subscribe to updates."""
"""Subscribe to addon info updates."""
await super().async_added_to_hass()
update_types = KEY_TO_UPDATE_TYPES[self.entity_description.key]
self.async_on_remove(
self.coordinator.async_enable_container_updates(
self._addon_slug, self.entity_id, update_types
self.coordinator.async_enable_addon_info_updates(
self._addon_slug, self.entity_id
)
)
if CONTAINER_STATS in update_types:
await self.coordinator.async_request_refresh()
class HassioOSEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
class HassioOSEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
"""Base Entity for Hass.io OS."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: HassioDataUpdateCoordinator,
coordinator: HassioMainDataUpdateCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize base entity."""
@@ -94,14 +149,14 @@ class HassioOSEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
)
class HassioHostEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
class HassioHostEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
"""Base Entity for Hass.io host."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: HassioDataUpdateCoordinator,
coordinator: HassioMainDataUpdateCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize base entity."""
@@ -120,14 +175,14 @@ class HassioHostEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
)
class HassioSupervisorEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
class HassioSupervisorEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
"""Base Entity for Supervisor."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: HassioDataUpdateCoordinator,
coordinator: HassioMainDataUpdateCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize base entity."""
@@ -146,27 +201,15 @@ class HassioSupervisorEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
in self.coordinator.data[DATA_KEY_SUPERVISOR]
)
async def async_added_to_hass(self) -> None:
"""Subscribe to updates."""
await super().async_added_to_hass()
update_types = KEY_TO_UPDATE_TYPES[self.entity_description.key]
self.async_on_remove(
self.coordinator.async_enable_container_updates(
SUPERVISOR_CONTAINER, self.entity_id, update_types
)
)
if CONTAINER_STATS in update_types:
await self.coordinator.async_request_refresh()
class HassioCoreEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
class HassioCoreEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
"""Base Entity for Core."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: HassioDataUpdateCoordinator,
coordinator: HassioMainDataUpdateCoordinator,
entity_description: EntityDescription,
) -> None:
"""Initialize base entity."""
@@ -184,27 +227,15 @@ class HassioCoreEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
and self.entity_description.key in self.coordinator.data[DATA_KEY_CORE]
)
async def async_added_to_hass(self) -> None:
"""Subscribe to updates."""
await super().async_added_to_hass()
update_types = KEY_TO_UPDATE_TYPES[self.entity_description.key]
self.async_on_remove(
self.coordinator.async_enable_container_updates(
CORE_CONTAINER, self.entity_id, update_types
)
)
if CONTAINER_STATS in update_types:
await self.coordinator.async_request_refresh()
class HassioMountEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
class HassioMountEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
"""Base Entity for Mount."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: HassioDataUpdateCoordinator,
coordinator: HassioMainDataUpdateCoordinator,
entity_description: EntityDescription,
mount: CIFSMountResponse | NFSMountResponse,
) -> None:

View File

@@ -28,7 +28,6 @@ from homeassistant.helpers.issue_registry import (
)
from .const import (
ADDONS_COORDINATOR,
ATTR_DATA,
ATTR_HEALTHY,
ATTR_SLUG,
@@ -54,6 +53,7 @@ from .const import (
ISSUE_KEY_SYSTEM_DOCKER_CONFIG,
ISSUE_KEY_SYSTEM_FREE_SPACE,
ISSUE_MOUNT_MOUNT_FAILED,
MAIN_COORDINATOR,
PLACEHOLDER_KEY_ADDON,
PLACEHOLDER_KEY_ADDON_URL,
PLACEHOLDER_KEY_FREE_SPACE,
@@ -62,7 +62,7 @@ from .const import (
STARTUP_COMPLETE,
UPDATE_KEY_SUPERVISOR,
)
from .coordinator import HassioDataUpdateCoordinator, get_addons_list, get_host_info
from .coordinator import HassioMainDataUpdateCoordinator, get_addons_list, get_host_info
from .handler import get_supervisor_client
ISSUE_KEY_UNHEALTHY = "unhealthy"
@@ -417,8 +417,8 @@ class SupervisorIssues:
def _async_coordinator_refresh(self) -> None:
"""Refresh coordinator to update latest data in entities."""
coordinator: HassioDataUpdateCoordinator | None
if coordinator := self._hass.data.get(ADDONS_COORDINATOR):
coordinator: HassioMainDataUpdateCoordinator | None
if coordinator := self._hass.data.get(MAIN_COORDINATOR):
coordinator.config_entry.async_create_task(
self._hass, coordinator.async_refresh()
)

View File

@@ -17,20 +17,24 @@ from .const import (
ADDONS_COORDINATOR,
ATTR_CPU_PERCENT,
ATTR_MEMORY_PERCENT,
ATTR_SLUG,
ATTR_VERSION,
ATTR_VERSION_LATEST,
CORE_CONTAINER,
DATA_KEY_ADDONS,
DATA_KEY_CORE,
DATA_KEY_HOST,
DATA_KEY_OS,
DATA_KEY_SUPERVISOR,
MAIN_COORDINATOR,
STATS_COORDINATOR,
SUPERVISOR_CONTAINER,
)
from .entity import (
HassioAddonEntity,
HassioCoreEntity,
HassioHostEntity,
HassioOSEntity,
HassioSupervisorEntity,
HassioStatsEntity,
)
COMMON_ENTITY_DESCRIPTIONS = (
@@ -63,10 +67,7 @@ STATS_ENTITY_DESCRIPTIONS = (
),
)
ADDON_ENTITY_DESCRIPTIONS = COMMON_ENTITY_DESCRIPTIONS + STATS_ENTITY_DESCRIPTIONS
CORE_ENTITY_DESCRIPTIONS = STATS_ENTITY_DESCRIPTIONS
OS_ENTITY_DESCRIPTIONS = COMMON_ENTITY_DESCRIPTIONS
SUPERVISOR_ENTITY_DESCRIPTIONS = STATS_ENTITY_DESCRIPTIONS
HOST_ENTITY_DESCRIPTIONS = (
SensorEntityDescription(
@@ -114,36 +115,64 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Sensor set up for Hass.io config entry."""
coordinator = hass.data[ADDONS_COORDINATOR]
addons_coordinator = hass.data[ADDONS_COORDINATOR]
coordinator = hass.data[MAIN_COORDINATOR]
stats_coordinator = hass.data[STATS_COORDINATOR]
entities: list[
HassioOSSensor | HassioAddonSensor | CoreSensor | SupervisorSensor | HostSensor
] = [
entities: list[SensorEntity] = []
# Add-on non-stats sensors (version, version_latest)
entities.extend(
HassioAddonSensor(
addon=addon,
coordinator=coordinator,
coordinator=addons_coordinator,
entity_description=entity_description,
)
for addon in coordinator.data[DATA_KEY_ADDONS].values()
for entity_description in ADDON_ENTITY_DESCRIPTIONS
]
entities.extend(
CoreSensor(
coordinator=coordinator,
entity_description=entity_description,
)
for entity_description in CORE_ENTITY_DESCRIPTIONS
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
for entity_description in COMMON_ENTITY_DESCRIPTIONS
)
# Add-on stats sensors (cpu_percent, memory_percent)
entities.extend(
SupervisorSensor(
coordinator=coordinator,
HassioStatsSensor(
coordinator=stats_coordinator,
entity_description=entity_description,
container_id=addon[ATTR_SLUG],
data_key=DATA_KEY_ADDONS,
device_id=addon[ATTR_SLUG],
unique_id_prefix=addon[ATTR_SLUG],
)
for entity_description in SUPERVISOR_ENTITY_DESCRIPTIONS
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
for entity_description in STATS_ENTITY_DESCRIPTIONS
)
# Core stats sensors
entities.extend(
HassioStatsSensor(
coordinator=stats_coordinator,
entity_description=entity_description,
container_id=CORE_CONTAINER,
data_key=DATA_KEY_CORE,
device_id="core",
unique_id_prefix="home_assistant_core",
)
for entity_description in STATS_ENTITY_DESCRIPTIONS
)
# Supervisor stats sensors
entities.extend(
HassioStatsSensor(
coordinator=stats_coordinator,
entity_description=entity_description,
container_id=SUPERVISOR_CONTAINER,
data_key=DATA_KEY_SUPERVISOR,
device_id="supervisor",
unique_id_prefix="home_assistant_supervisor",
)
for entity_description in STATS_ENTITY_DESCRIPTIONS
)
# Host sensors
entities.extend(
HostSensor(
coordinator=coordinator,
@@ -152,6 +181,7 @@ async def async_setup_entry(
for entity_description in HOST_ENTITY_DESCRIPTIONS
)
# OS sensors
if coordinator.is_hass_os:
entities.extend(
HassioOSSensor(
@@ -175,8 +205,21 @@ class HassioAddonSensor(HassioAddonEntity, SensorEntity):
]
class HassioStatsSensor(HassioStatsEntity, SensorEntity):
"""Sensor to track container stats."""
@property
def native_value(self) -> str:
"""Return native value of entity."""
if self._data_key == DATA_KEY_ADDONS:
return self.coordinator.data[DATA_KEY_ADDONS][self._container_id][
self.entity_description.key
]
return self.coordinator.data[self._data_key][self.entity_description.key]
class HassioOSSensor(HassioOSEntity, SensorEntity):
"""Sensor to track a Hass.io add-on attribute."""
"""Sensor to track a Hass.io OS attribute."""
@property
def native_value(self) -> str:
@@ -184,24 +227,6 @@ class HassioOSSensor(HassioOSEntity, SensorEntity):
return self.coordinator.data[DATA_KEY_OS][self.entity_description.key]
class CoreSensor(HassioCoreEntity, SensorEntity):
"""Sensor to track a core attribute."""
@property
def native_value(self) -> str:
"""Return native value of entity."""
return self.coordinator.data[DATA_KEY_CORE][self.entity_description.key]
class SupervisorSensor(HassioSupervisorEntity, SensorEntity):
"""Sensor to track a supervisor attribute."""
@property
def native_value(self) -> str:
"""Return native value of entity."""
return self.coordinator.data[DATA_KEY_SUPERVISOR][self.entity_description.key]
class HostSensor(HassioHostEntity, SensorEntity):
"""Sensor to track a host attribute."""

View File

@@ -32,7 +32,6 @@ from homeassistant.helpers import (
from homeassistant.util.dt import now
from .const import (
ADDONS_COORDINATOR,
ATTR_ADDON,
ATTR_ADDONS,
ATTR_APP,
@@ -46,9 +45,10 @@ from .const import (
ATTR_PASSWORD,
ATTR_SLUG,
DOMAIN,
MAIN_COORDINATOR,
SupervisorEntityModel,
)
from .coordinator import HassioDataUpdateCoordinator, get_addons_info
from .coordinator import HassioMainDataUpdateCoordinator, get_addons_info
SERVICE_ADDON_START = "addon_start"
SERVICE_ADDON_STOP = "addon_stop"
@@ -406,7 +406,7 @@ def async_register_network_storage_services(
async def async_mount_reload(service: ServiceCall) -> None:
"""Handle service calls for Hass.io."""
coordinator: HassioDataUpdateCoordinator | None = None
coordinator: HassioMainDataUpdateCoordinator | None = None
if (device := dev_reg.async_get(service.data[ATTR_DEVICE_ID])) is None:
raise ServiceValidationError(
@@ -417,7 +417,7 @@ def async_register_network_storage_services(
if (
device.name is None
or device.model != SupervisorEntityModel.MOUNT
or (coordinator := hass.data.get(ADDONS_COORDINATOR)) is None
or (coordinator := hass.data.get(MAIN_COORDINATOR)) is None
or coordinator.entry_id not in device.config_entries
):
raise ServiceValidationError(

View File

@@ -29,6 +29,7 @@ from .const import (
DATA_KEY_CORE,
DATA_KEY_OS,
DATA_KEY_SUPERVISOR,
MAIN_COORDINATOR,
)
from .entity import (
HassioAddonEntity,
@@ -51,9 +52,9 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Supervisor update based on a config entry."""
coordinator = hass.data[ADDONS_COORDINATOR]
coordinator = hass.data[MAIN_COORDINATOR]
entities = [
entities: list[UpdateEntity] = [
SupervisorSupervisorUpdateEntity(
coordinator=coordinator,
entity_description=ENTITY_DESCRIPTION,
@@ -64,15 +65,6 @@ async def async_setup_entry(
),
]
entities.extend(
SupervisorAddonUpdateEntity(
addon=addon,
coordinator=coordinator,
entity_description=ENTITY_DESCRIPTION,
)
for addon in coordinator.data[DATA_KEY_ADDONS].values()
)
if coordinator.is_hass_os:
entities.append(
SupervisorOSUpdateEntity(
@@ -81,6 +73,16 @@ async def async_setup_entry(
)
)
addons_coordinator = hass.data[ADDONS_COORDINATOR]
entities.extend(
SupervisorAddonUpdateEntity(
addon=addon,
coordinator=addons_coordinator,
entity_description=ENTITY_DESCRIPTION,
)
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
)
async_add_entities(entities)

View File

@@ -148,7 +148,7 @@
},
"step": {
"init": {
"description": "The integration `{domain}` could not be found. This happens when a (community) integration was removed from Home Assistant, but there are still configurations for this `integration`. Please use the buttons below to either remove the previous configurations for `{domain}` or ignore this.",
"description": "The integration `{domain}` could not be found. This happens when a (custom) integration was removed from Home Assistant, but there are still configurations for this `integration`. Please use the buttons below to either remove the previous configurations for `{domain}` or ignore this.",
"menu_options": {
"confirm": "Remove previous configurations",
"ignore": "Ignore"
@@ -236,7 +236,7 @@
"description": "Restarts Home Assistant.",
"fields": {
"safe_mode": {
"description": "Disable community integrations and community cards.",
"description": "Disable custom integrations and custom cards.",
"name": "Safe mode"
}
},

View File

@@ -7,7 +7,7 @@
"documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware",
"integration_type": "system",
"requirements": [
"serialx==1.1.1",
"serialx==1.2.2",
"universal-silabs-flasher==1.0.3",
"ha-silabs-firmware-client==0.3.0"
]

View File

@@ -625,10 +625,13 @@ def _get_test_socket() -> socket.socket:
@callback
def async_port_is_available(port: int) -> bool:
"""Check to see if a port is available."""
test_socket = _get_test_socket()
try:
_get_test_socket().bind(("", port))
test_socket.bind(("", port))
except OSError:
return False
finally:
test_socket.close()
return True

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["homematicip"],
"requirements": ["homematicip==2.7.0"]
"requirements": ["homematicip==2.8.0"]
}

View File

@@ -42,7 +42,6 @@ class HassAqualinkBinarySensor(
) -> None:
"""Initialize AquaLink binary sensor."""
super().__init__(coordinator, dev)
self._attr_name = dev.label
if dev.label == "Freeze Protection":
self._attr_device_class = BinarySensorDeviceClass.COLD

View File

@@ -57,7 +57,6 @@ class HassAqualinkThermostat(AqualinkEntity[AqualinkThermostat], ClimateEntity):
) -> None:
"""Initialize AquaLink thermostat."""
super().__init__(coordinator, dev)
self._attr_name = dev.label.split(" ")[0]
self._attr_temperature_unit = (
UnitOfTemperature.FAHRENHEIT
if dev.unit == "F"

View File

@@ -22,6 +22,9 @@ class AqualinkEntity[AqualinkDeviceT: AqualinkDevice](
entity update flow.
"""
_attr_has_entity_name = True
_attr_name = None
def __init__(
self, coordinator: AqualinkDataUpdateCoordinator, dev: AqualinkDeviceT
) -> None:

View File

@@ -46,7 +46,6 @@ class HassAqualinkLight(AqualinkEntity[AqualinkLight], LightEntity):
) -> None:
"""Initialize AquaLink light."""
super().__init__(coordinator, dev)
self._attr_name = dev.label
if dev.supports_effect:
self._attr_effect_list = list(dev.supported_effects)
self._attr_supported_features = LightEntityFeature.EFFECT

View File

@@ -38,7 +38,6 @@ class HassAqualinkSensor(AqualinkEntity[AqualinkSensor], SensorEntity):
) -> None:
"""Initialize AquaLink sensor."""
super().__init__(coordinator, dev)
self._attr_name = dev.label
if not dev.name.endswith("_temp"):
return
self._attr_device_class = SensorDeviceClass.TEMPERATURE

View File

@@ -40,7 +40,7 @@ class HassAqualinkSwitch(AqualinkEntity[AqualinkSwitch], SwitchEntity):
) -> None:
"""Initialize AquaLink switch."""
super().__init__(coordinator, dev)
name = self._attr_name = dev.label
name = dev.label
if name == "Cleaner":
self._attr_icon = "mdi:robot-vacuum"
elif name == "Waterfall" or name.endswith("Dscnt"):

View File

@@ -127,6 +127,12 @@ CONFIG_SCHEMA = vol.Schema(
)
def _read_image_size(image_path: str) -> tuple[int, int]:
"""Open image to determine image size."""
with Image.open(image_path) as image:
return image.size
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Matrix bot component."""
config = config[DOMAIN]
@@ -504,8 +510,9 @@ class MatrixBot:
return
# Get required image metadata.
image = await self.hass.async_add_executor_job(Image.open, image_path)
(width, height) = image.size
(width, height) = await self.hass.async_add_executor_job(
_read_image_size, image_path
)
mime_type = mimetypes.guess_type(image_path)[0]
file_stat = await aiofiles.os.stat(image_path)

View File

@@ -24,6 +24,8 @@ See https://modelcontextprotocol.io/docs/concepts/transports
"""
import asyncio
from collections.abc import AsyncGenerator
from contextlib import asynccontextmanager
from dataclasses import dataclass
from http import HTTPStatus
import logging
@@ -102,17 +104,29 @@ class Streams:
write_stream: MemoryObjectSendStream[SessionMessage]
write_stream_reader: MemoryObjectReceiveStream[SessionMessage]
async def aclose(self) -> None:
"""Close open memory streams."""
await self.read_stream.aclose()
await self.read_stream_writer.aclose()
await self.write_stream.aclose()
await self.write_stream_reader.aclose()
def create_streams() -> Streams:
@asynccontextmanager
async def create_streams() -> AsyncGenerator[Streams]:
"""Create a new pair of streams for MCP server communication."""
read_stream_writer, read_stream = anyio.create_memory_object_stream(0)
write_stream, write_stream_reader = anyio.create_memory_object_stream(0)
return Streams(
streams = Streams(
read_stream=read_stream,
read_stream_writer=read_stream_writer,
write_stream=write_stream,
write_stream_reader=write_stream_reader,
)
try:
yield streams
finally:
await streams.aclose()
async def create_mcp_server(
@@ -155,9 +169,9 @@ class ModelContextProtocolSSEView(HomeAssistantView):
session_manager = entry.runtime_data
server, options = await create_mcp_server(hass, self.context(request), entry)
streams = create_streams()
async with (
create_streams() as streams,
sse_response(request) as response,
session_manager.create(Session(streams.read_stream_writer)) as session_id,
):
@@ -261,21 +275,24 @@ class ModelContextProtocolStreamableView(HomeAssistantView):
# request is sent to the MCP server and we wait for a single response
# then shut down the server.
server, options = await create_mcp_server(hass, self.context(request), entry)
streams = create_streams()
async def run_server() -> None:
await server.run(
streams.read_stream, streams.write_stream, options, stateless=True
async with create_streams() as streams:
async def run_server() -> None:
await server.run(
streams.read_stream, streams.write_stream, options, stateless=True
)
async with asyncio.timeout(TIMEOUT), anyio.create_task_group() as tg:
tg.start_soon(run_server)
await streams.read_stream_writer.send(SessionMessage(message))
session_message = await anext(streams.write_stream_reader)
tg.cancel_scope.cancel()
_LOGGER.debug("Sending response: %s", session_message)
return web.json_response(
data=session_message.message.model_dump(
by_alias=True, exclude_none=True
),
)
async with asyncio.timeout(TIMEOUT), anyio.create_task_group() as tg:
tg.start_soon(run_server)
await streams.read_stream_writer.send(SessionMessage(message))
session_message = await anext(streams.write_stream_reader)
tg.cancel_scope.cancel()
_LOGGER.debug("Sending response: %s", session_message)
return web.json_response(
data=session_message.message.model_dump(by_alias=True, exclude_none=True),
)

View File

@@ -19,7 +19,12 @@ from homeassistant.helpers.update_coordinator import UpdateFailed
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.CLIMATE,
Platform.SENSOR,
Platform.WATER_HEATER,
]
async def async_setup_entry(hass: HomeAssistant, entry: MelCloudConfigEntry) -> bool:

View File

@@ -0,0 +1,175 @@
"""Support for MelCloud device binary sensors."""
from __future__ import annotations
from collections.abc import Callable
import dataclasses
from typing import Any
from pymelcloud import DEVICE_TYPE_ATW
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
from .entity import MelCloudEntity
@dataclasses.dataclass(frozen=True, kw_only=True)
class MelcloudBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Describes Melcloud binary sensor entity."""
value_fn: Callable[[Any], bool | None]
enabled: Callable[[Any], bool]
ATW_BINARY_SENSORS: tuple[MelcloudBinarySensorEntityDescription, ...] = (
MelcloudBinarySensorEntityDescription(
key="boiler_status",
translation_key="boiler_status",
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.device.boiler_status,
enabled=lambda data: data.device.boiler_status is not None,
),
MelcloudBinarySensorEntityDescription(
key="booster_heater1_status",
translation_key="booster_heater_status",
translation_placeholders={"number": "1"},
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.device.booster_heater1_status,
enabled=lambda data: data.device.booster_heater1_status is not None,
),
MelcloudBinarySensorEntityDescription(
key="booster_heater2_status",
translation_key="booster_heater_status",
translation_placeholders={"number": "2"},
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
value_fn=lambda data: data.device.booster_heater2_status,
enabled=lambda data: data.device.booster_heater2_status is not None,
),
MelcloudBinarySensorEntityDescription(
key="booster_heater2plus_status",
translation_key="booster_heater_status",
translation_placeholders={"number": "2+"},
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
value_fn=lambda data: data.device.booster_heater2plus_status,
enabled=lambda data: data.device.booster_heater2plus_status is not None,
),
MelcloudBinarySensorEntityDescription(
key="immersion_heater_status",
translation_key="immersion_heater_status",
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.device.immersion_heater_status,
enabled=lambda data: data.device.immersion_heater_status is not None,
),
MelcloudBinarySensorEntityDescription(
key="water_pump1_status",
translation_key="water_pump_status",
translation_placeholders={"number": "1"},
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.device.water_pump1_status,
enabled=lambda data: data.device.water_pump1_status is not None,
),
MelcloudBinarySensorEntityDescription(
key="water_pump2_status",
translation_key="water_pump_status",
translation_placeholders={"number": "2"},
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.device.water_pump2_status,
enabled=lambda data: data.device.water_pump2_status is not None,
),
MelcloudBinarySensorEntityDescription(
key="water_pump3_status",
translation_key="water_pump_status",
translation_placeholders={"number": "3"},
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
value_fn=lambda data: data.device.water_pump3_status,
enabled=lambda data: data.device.water_pump3_status is not None,
),
MelcloudBinarySensorEntityDescription(
key="water_pump4_status",
translation_key="water_pump_status",
translation_placeholders={"number": "4"},
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
value_fn=lambda data: data.device.water_pump4_status,
enabled=lambda data: data.device.water_pump4_status is not None,
),
MelcloudBinarySensorEntityDescription(
key="valve_3way_status",
translation_key="valve_3way_status",
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda data: data.device.valve_3way_status,
enabled=lambda data: data.device.valve_3way_status is not None,
),
MelcloudBinarySensorEntityDescription(
key="valve_2way_status",
translation_key="valve_2way_status",
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
value_fn=lambda data: data.device.valve_2way_status,
enabled=lambda data: data.device.valve_2way_status is not None,
),
)
async def async_setup_entry(
_hass: HomeAssistant,
entry: MelCloudConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up MELCloud device binary sensors based on config_entry."""
coordinator = entry.runtime_data
if DEVICE_TYPE_ATW not in coordinator:
return
entities: list[MelDeviceBinarySensor] = [
MelDeviceBinarySensor(coord, description)
for description in ATW_BINARY_SENSORS
for coord in coordinator[DEVICE_TYPE_ATW]
if description.enabled(coord)
]
async_add_entities(entities)
class MelDeviceBinarySensor(MelCloudEntity, BinarySensorEntity):
"""Representation of a Binary Sensor."""
entity_description: MelcloudBinarySensorEntityDescription
def __init__(
self,
coordinator: MelCloudDeviceUpdateCoordinator,
description: MelcloudBinarySensorEntityDescription,
) -> None:
"""Initialize the binary sensor."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = (
f"{coordinator.device.serial}-{coordinator.device.mac}-{description.key}"
)
self._attr_device_info = coordinator.device_info
@property
def is_on(self) -> bool | None:
"""Return the state of the binary sensor."""
return self.entity_description.value_fn(self.coordinator)

View File

@@ -1,5 +1,25 @@
{
"entity": {
"binary_sensor": {
"boiler_status": {
"default": "mdi:water-boiler-off",
"state": {
"on": "mdi:water-boiler"
}
},
"valve_2way_status": {
"default": "mdi:valve-closed",
"state": {
"on": "mdi:valve-open"
}
},
"valve_3way_status": {
"default": "mdi:valve-closed",
"state": {
"on": "mdi:valve-open"
}
}
},
"sensor": {
"energy_consumed": {
"default": "mdi:factory"

View File

@@ -42,6 +42,26 @@
}
},
"entity": {
"binary_sensor": {
"boiler_status": {
"name": "Boiler"
},
"booster_heater_status": {
"name": "Booster heater {number}"
},
"immersion_heater_status": {
"name": "Immersion heater"
},
"valve_2way_status": {
"name": "2-way valve"
},
"valve_3way_status": {
"name": "3-way valve"
},
"water_pump_status": {
"name": "Water pump {number}"
}
},
"sensor": {
"condensing_temperature": {
"name": "Condensing temperature"

View File

@@ -11,6 +11,7 @@ from homeassistant.components.weather import (
ATTR_FORECAST_NATIVE_PRECIPITATION,
ATTR_FORECAST_NATIVE_TEMP,
ATTR_FORECAST_NATIVE_TEMP_LOW,
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED,
ATTR_FORECAST_NATIVE_WIND_SPEED,
ATTR_FORECAST_TIME,
ATTR_FORECAST_WIND_BEARING,
@@ -184,6 +185,9 @@ class MeteoFranceWeather(
ATTR_FORECAST_NATIVE_TEMP: forecast["T"]["value"],
ATTR_FORECAST_NATIVE_PRECIPITATION: forecast["rain"].get("1h"),
ATTR_FORECAST_NATIVE_WIND_SPEED: forecast["wind"]["speed"],
ATTR_FORECAST_NATIVE_WIND_GUST_SPEED: forecast["wind"].get(
"gust"
),
ATTR_FORECAST_WIND_BEARING: forecast["wind"]["direction"]
if forecast["wind"]["direction"] != -1
else None,

View File

@@ -154,6 +154,8 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN):
except ApiError as err:
_LOGGER.error("Failed to connect to printer")
raise CannotConnect from err
finally:
await self._sessions.pop().close()
await self.async_set_unique_id(discovery.upnp_uuid, raise_on_progress=False)
self._abort_if_unique_id_configured()
@@ -262,9 +264,12 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN):
assert self._user_input is not None
octoprint = self._get_octoprint_client(self._user_input)
self._user_input[CONF_API_KEY] = await octoprint.request_app_key(
"Home Assistant", self._user_input[CONF_USERNAME], 300
)
try:
self._user_input[CONF_API_KEY] = await octoprint.request_app_key(
"Home Assistant", self._user_input[CONF_USERNAME], 300
)
finally:
await self._sessions.pop().close()
def _get_octoprint_client(self, user_input: dict[str, Any]) -> OctoprintClient:
"""Build an octoprint client from the user_input."""
@@ -287,11 +292,6 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN):
path=user_input[CONF_PATH],
)
def async_remove(self) -> None:
"""Detach the session."""
for session in self._sessions:
session.detach()
class CannotConnect(HomeAssistantError):
"""Error to indicate we cannot connect."""

View File

@@ -23,7 +23,6 @@ import voluptuous as vol
from homeassistant.components import webhook
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_SCAN_INTERVAL,
CONF_TOKEN,
@@ -39,21 +38,15 @@ from .const import (
CONF_DEVICE_NAME,
CONF_DEVICE_TYPE,
CONF_USE_WEBHOOK,
COORDINATOR,
DEFAULT_SCAN_INTERVAL,
DEVICE,
DEVICE_ID,
DEVICE_NAME,
DEVICE_TYPE,
DOMAIN,
PLATFORMS,
SENSOR_DATA,
UNDO_UPDATE_LISTENER,
)
from .coordinator import PlaatoCoordinator
from .coordinator import PlaatoConfigEntry, PlaatoCoordinator, PlaatoData
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ["webhook"]
SENSOR_UPDATE = f"{DOMAIN}_sensor_update"
@@ -82,15 +75,15 @@ WEBHOOK_SCHEMA = vol.Schema(
)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: PlaatoConfigEntry) -> bool:
"""Configure based on config entry."""
hass.data.setdefault(DOMAIN, {})
if entry.data[CONF_USE_WEBHOOK]:
async_setup_webhook(hass, entry)
else:
await async_setup_coordinator(hass, entry)
entry.async_on_unload(entry.add_update_listener(_async_update_listener))
await hass.config_entries.async_forward_entry_setups(
entry, [platform for platform in PLATFORMS if entry.options.get(platform, True)]
)
@@ -99,19 +92,26 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
@callback
def async_setup_webhook(hass: HomeAssistant, entry: ConfigEntry):
def async_setup_webhook(hass: HomeAssistant, entry: PlaatoConfigEntry) -> None:
"""Init webhook based on config entry."""
webhook_id = entry.data[CONF_WEBHOOK_ID]
device_name = entry.data[CONF_DEVICE_NAME]
_set_entry_data(entry, hass)
entry.runtime_data = PlaatoData(
coordinator=None,
device_name=entry.data[CONF_DEVICE_NAME],
device_type=entry.data[CONF_DEVICE_TYPE],
device_id=None,
)
webhook.async_register(
hass, DOMAIN, f"{DOMAIN}.{device_name}", webhook_id, handle_webhook
)
async def async_setup_coordinator(hass: HomeAssistant, entry: ConfigEntry):
async def async_setup_coordinator(
hass: HomeAssistant, entry: PlaatoConfigEntry
) -> None:
"""Init auth token based on config entry."""
auth_token = entry.data[CONF_TOKEN]
device_type = entry.data[CONF_DEVICE_TYPE]
@@ -126,62 +126,44 @@ async def async_setup_coordinator(hass: HomeAssistant, entry: ConfigEntry):
)
await coordinator.async_config_entry_first_refresh()
_set_entry_data(entry, hass, coordinator, auth_token)
entry.runtime_data = PlaatoData(
coordinator=coordinator,
device_name=entry.data[CONF_DEVICE_NAME],
device_type=entry.data[CONF_DEVICE_TYPE],
device_id=auth_token,
)
for platform in PLATFORMS:
if entry.options.get(platform, True):
coordinator.platforms.append(platform)
def _set_entry_data(entry, hass, coordinator=None, device_id=None):
device = {
DEVICE_NAME: entry.data[CONF_DEVICE_NAME],
DEVICE_TYPE: entry.data[CONF_DEVICE_TYPE],
DEVICE_ID: device_id,
}
hass.data[DOMAIN][entry.entry_id] = {
COORDINATOR: coordinator,
DEVICE: device,
SENSOR_DATA: None,
UNDO_UPDATE_LISTENER: entry.add_update_listener(_async_update_listener),
}
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: PlaatoConfigEntry) -> bool:
"""Unload a config entry."""
use_webhook = entry.data[CONF_USE_WEBHOOK]
hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENER]()
if use_webhook:
if entry.data[CONF_USE_WEBHOOK]:
return await async_unload_webhook(hass, entry)
return await async_unload_coordinator(hass, entry)
async def async_unload_webhook(hass: HomeAssistant, entry: ConfigEntry):
async def async_unload_webhook(hass: HomeAssistant, entry: PlaatoConfigEntry) -> bool:
"""Unload webhook based entry."""
if entry.data[CONF_WEBHOOK_ID] is not None:
webhook.async_unregister(hass, entry.data[CONF_WEBHOOK_ID])
return await async_unload_platforms(hass, entry, PLATFORMS)
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
async def async_unload_coordinator(hass: HomeAssistant, entry: ConfigEntry):
async def async_unload_coordinator(
hass: HomeAssistant, entry: PlaatoConfigEntry
) -> bool:
"""Unload auth token based entry."""
coordinator = hass.data[DOMAIN][entry.entry_id][COORDINATOR]
return await async_unload_platforms(hass, entry, coordinator.platforms)
coordinator = entry.runtime_data.coordinator
return await hass.config_entries.async_unload_platforms(
entry, coordinator.platforms if coordinator else PLATFORMS
)
async def async_unload_platforms(hass: HomeAssistant, entry: ConfigEntry, platforms):
"""Unload platforms."""
unloaded = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unloaded:
hass.data[DOMAIN].pop(entry.entry_id)
return unloaded
async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
async def _async_update_listener(hass: HomeAssistant, entry: PlaatoConfigEntry) -> None:
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)

View File

@@ -8,17 +8,17 @@ from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import CONF_USE_WEBHOOK, COORDINATOR, DOMAIN
from .const import CONF_USE_WEBHOOK
from .coordinator import PlaatoConfigEntry, PlaatoCoordinator, PlaatoData
from .entity import PlaatoEntity
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
config_entry: PlaatoConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Plaato from a config entry."""
@@ -26,10 +26,12 @@ async def async_setup_entry(
if config_entry.data[CONF_USE_WEBHOOK]:
return
coordinator = hass.data[DOMAIN][config_entry.entry_id][COORDINATOR]
entry_data = config_entry.runtime_data
coordinator = entry_data.coordinator
assert coordinator is not None
async_add_entities(
PlaatoBinarySensor(
hass.data[DOMAIN][config_entry.entry_id],
entry_data,
sensor_type,
coordinator,
)
@@ -40,7 +42,12 @@ async def async_setup_entry(
class PlaatoBinarySensor(PlaatoEntity, BinarySensorEntity):
"""Representation of a Binary Sensor."""
def __init__(self, data, sensor_type, coordinator=None) -> None:
def __init__(
self,
data: PlaatoData,
sensor_type: str,
coordinator: PlaatoCoordinator | None = None,
) -> None:
"""Initialize plaato binary sensor."""
super().__init__(data, sensor_type, coordinator)
if sensor_type is PlaatoKeg.Pins.LEAK_DETECTION:

View File

@@ -19,13 +19,7 @@ PLACEHOLDER_DEVICE_TYPE = "device_type"
PLACEHOLDER_DEVICE_NAME = "device_name"
DOCS_URL = "https://www.home-assistant.io/integrations/plaato/"
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
SENSOR_DATA = "sensor_data"
COORDINATOR = "coordinator"
DEVICE = "device"
DEVICE_NAME = "device_name"
DEVICE_TYPE = "device_type"
DEVICE_ID = "device_id"
UNDO_UPDATE_LISTENER = "undo_update_listener"
DEFAULT_SCAN_INTERVAL = 5
MIN_UPDATE_INTERVAL = timedelta(minutes=1)

View File

@@ -1,8 +1,10 @@
"""Coordinator for Plaato devices."""
from dataclasses import dataclass, field
from datetime import timedelta
import logging
from pyplaato.models.device import PlaatoDevice
from pyplaato.plaato import Plaato, PlaatoDeviceType
from homeassistant.config_entries import ConfigEntry
@@ -16,15 +18,29 @@ from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
class PlaatoCoordinator(DataUpdateCoordinator):
@dataclass
class PlaatoData:
"""Runtime data for the Plaato integration."""
coordinator: PlaatoCoordinator | None
device_name: str
device_type: str
device_id: str | None
sensor_data: PlaatoDevice | None = field(default=None)
type PlaatoConfigEntry = ConfigEntry[PlaatoData]
class PlaatoCoordinator(DataUpdateCoordinator[PlaatoDevice]):
"""Class to manage fetching data from the API."""
config_entry: ConfigEntry
config_entry: PlaatoConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
config_entry: PlaatoConfigEntry,
auth_token: str,
device_type: PlaatoDeviceType,
update_interval: timedelta,
@@ -42,7 +58,7 @@ class PlaatoCoordinator(DataUpdateCoordinator):
update_interval=update_interval,
)
async def _async_update_data(self):
async def _async_update_data(self) -> PlaatoDevice:
"""Update data via library."""
return await self.api.get_data(
session=aiohttp_client.async_get_clientsession(self.hass),

View File

@@ -1,6 +1,6 @@
"""PlaatoEntity class."""
from typing import Any
from typing import Any, cast
from pyplaato.models.device import PlaatoDevice
@@ -8,16 +8,8 @@ from homeassistant.helpers import entity
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
DEVICE,
DEVICE_ID,
DEVICE_NAME,
DEVICE_TYPE,
DOMAIN,
EXTRA_STATE_ATTRIBUTES,
SENSOR_DATA,
SENSOR_SIGNAL,
)
from .const import DOMAIN, EXTRA_STATE_ATTRIBUTES, SENSOR_SIGNAL
from .coordinator import PlaatoCoordinator, PlaatoData
class PlaatoEntity(entity.Entity):
@@ -25,14 +17,20 @@ class PlaatoEntity(entity.Entity):
_attr_should_poll = False
def __init__(self, data, sensor_type, coordinator=None):
def __init__(
self,
data: PlaatoData,
sensor_type: str,
coordinator: PlaatoCoordinator | None = None,
) -> None:
"""Initialize the sensor."""
self._coordinator = coordinator
self._entry_data = data
self._sensor_type = sensor_type
self._device_id = data[DEVICE][DEVICE_ID]
self._device_type = data[DEVICE][DEVICE_TYPE]
self._device_name = data[DEVICE][DEVICE_NAME]
assert self._entry_data.device_id is not None
self._device_id = cast(str, data.device_id)
self._device_type = data.device_type
self._device_name = data.device_name
self._attr_unique_id = f"{self._device_id}_{self._sensor_type}"
self._attr_name = f"{DOMAIN} {self._device_type} {self._device_name} {self._sensor_name}".title()
sw_version = None
@@ -58,7 +56,7 @@ class PlaatoEntity(entity.Entity):
def _sensor_data(self) -> PlaatoDevice:
if self._coordinator:
return self._coordinator.data
return self._entry_data[SENSOR_DATA]
return self._entry_data.sensor_data
@property
def extra_state_attributes(self) -> dict[str, Any] | None:

View File

@@ -6,7 +6,6 @@ from pyplaato.models.device import PlaatoDevice
from pyplaato.plaato import PlaatoKeg
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
@@ -19,15 +18,8 @@ from homeassistant.helpers.entity_platform import (
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from . import ATTR_TEMP, SENSOR_UPDATE
from .const import (
CONF_USE_WEBHOOK,
COORDINATOR,
DEVICE,
DEVICE_ID,
DOMAIN,
SENSOR_DATA,
SENSOR_SIGNAL,
)
from .const import CONF_USE_WEBHOOK, SENSOR_SIGNAL
from .coordinator import PlaatoConfigEntry, PlaatoCoordinator, PlaatoData
from .entity import PlaatoEntity
@@ -42,19 +34,19 @@ async def async_setup_platform(
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
entry: PlaatoConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Plaato from a config entry."""
entry_data = hass.data[DOMAIN][entry.entry_id]
entry_data = entry.runtime_data
@callback
def _async_update_from_webhook(device_id, sensor_data: PlaatoDevice):
"""Update/Create the sensors."""
entry_data[SENSOR_DATA] = sensor_data
entry_data.sensor_data = sensor_data
if device_id != entry_data[DEVICE][DEVICE_ID]:
entry_data[DEVICE][DEVICE_ID] = device_id
if device_id != entry_data.device_id:
entry_data.device_id = device_id
async_add_entities(
[
PlaatoSensor(entry_data, sensor_type)
@@ -68,7 +60,8 @@ async def async_setup_entry(
if entry.data[CONF_USE_WEBHOOK]:
async_dispatcher_connect(hass, SENSOR_UPDATE, _async_update_from_webhook)
else:
coordinator = entry_data[COORDINATOR]
coordinator = entry_data.coordinator
assert coordinator is not None
async_add_entities(
PlaatoSensor(entry_data, sensor_type, coordinator)
for sensor_type in coordinator.data.sensors
@@ -78,18 +71,23 @@ async def async_setup_entry(
class PlaatoSensor(PlaatoEntity, SensorEntity):
"""Representation of a Plaato Sensor."""
def __init__(self, data, sensor_type, coordinator=None) -> None:
def __init__(
self,
data: PlaatoData,
sensor_type: str,
coordinator: PlaatoCoordinator | None = None,
) -> None:
"""Initialize plaato sensor."""
super().__init__(data, sensor_type, coordinator)
if sensor_type is PlaatoKeg.Pins.TEMPERATURE or sensor_type == ATTR_TEMP:
self._attr_device_class = SensorDeviceClass.TEMPERATURE
@property
def native_value(self):
def native_value(self) -> str | int | float | None:
"""Return the state of the sensor."""
return self._sensor_data.sensors.get(self._sensor_type)
@property
def native_unit_of_measurement(self):
def native_unit_of_measurement(self) -> str | None:
"""Return the unit of measurement."""
return self._sensor_data.get_unit_of_measurement(self._sensor_type)

View File

@@ -2,7 +2,14 @@
from __future__ import annotations
from pvo import PVOutput, PVOutputAuthenticationError, PVOutputNoDataError, Status
from pvo import (
PVOutput,
PVOutputAuthenticationError,
PVOutputConnectionError,
PVOutputError,
PVOutputNoDataError,
Status,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_KEY
@@ -37,7 +44,20 @@ class PVOutputDataUpdateCoordinator(DataUpdateCoordinator[Status]):
"""Fetch system status from PVOutput."""
try:
return await self.pvoutput.status()
except PVOutputNoDataError as err:
raise UpdateFailed("PVOutput has no data available") from err
except PVOutputAuthenticationError as err:
raise ConfigEntryAuthFailed from err
except PVOutputNoDataError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="no_data_available",
) from err
except PVOutputConnectionError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="communication_error",
) from err
except PVOutputError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="unknown_error",
) from err

View File

@@ -42,5 +42,16 @@
"name": "Power generation"
}
}
},
"exceptions": {
"communication_error": {
"message": "An error occurred while communicating with the PVOutput service."
},
"no_data_available": {
"message": "The PVOutput service has no data available for this system."
},
"unknown_error": {
"message": "An unknown error occurred while communicating with the PVOutput service."
}
}
}

View File

@@ -0,0 +1,17 @@
"""Provides conditions for remotes."""
from homeassistant.const import STATE_OFF, STATE_ON
from homeassistant.core import HomeAssistant
from homeassistant.helpers.condition import Condition, make_entity_state_condition
from . import DOMAIN
CONDITIONS: dict[str, type[Condition]] = {
"is_off": make_entity_state_condition(DOMAIN, STATE_OFF),
"is_on": make_entity_state_condition(DOMAIN, STATE_ON),
}
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
"""Return the remote conditions."""
return CONDITIONS

View File

@@ -0,0 +1,17 @@
.condition_common: &condition_common
target:
entity:
domain: remote
fields:
behavior:
required: true
default: any
selector:
select:
translation_key: condition_behavior
options:
- all
- any
is_off: *condition_common
is_on: *condition_common

View File

@@ -1,4 +1,12 @@
{
"conditions": {
"is_off": {
"condition": "mdi:remote-off"
},
"is_on": {
"condition": "mdi:remote"
}
},
"entity_component": {
"_": {
"default": "mdi:remote",

View File

@@ -1,7 +1,28 @@
{
"common": {
"condition_behavior_name": "Condition passes if",
"trigger_behavior_name": "Trigger when"
},
"conditions": {
"is_off": {
"description": "Tests if one or more remotes are off.",
"fields": {
"behavior": {
"name": "[%key:component::remote::common::condition_behavior_name%]"
}
},
"name": "Remote is off"
},
"is_on": {
"description": "Tests if one or more remotes are on.",
"fields": {
"behavior": {
"name": "[%key:component::remote::common::condition_behavior_name%]"
}
},
"name": "Remote is on"
}
},
"device_automation": {
"action_type": {
"toggle": "[%key:common::device_automation::action_type::toggle%]",
@@ -31,6 +52,12 @@
}
},
"selector": {
"condition_behavior": {
"options": {
"all": "All",
"any": "Any"
}
},
"trigger_behavior": {
"options": {
"any": "Any",

View File

@@ -128,16 +128,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ShellyConfigEntry) -> bo
"""Set up Shelly from a config entry."""
entry.runtime_data = ShellyEntryData([])
# The community integration for Shelly devices uses Shelly domain as well as Core
# integration. If the user removes the community integration but doesn't remove
# the config entry, Core integration will try to configure that config entry with
# an error. The config entry data for this community integration doesn't contain
# host value, so if host isn't present, config entry will not be configured.
# The custom component for Shelly devices uses shelly domain as well as core
# integration. If the user removes the custom component but doesn't remove the
# config entry, core integration will try to configure that config entry with an
# error. The config entry data for this custom component doesn't contain host
# value, so if host isn't present, config entry will not be configured.
if not entry.data.get(CONF_HOST):
LOGGER.warning(
(
"The config entry %s probably comes from a community integration, "
"please remove it if you want to use the Core Shelly integration"
"The config entry %s probably comes from a custom integration, please"
" remove it if you want to use core Shelly integration"
),
entry.title,
)

View File

@@ -0,0 +1,98 @@
"""Diagnostics support for Sunricher DALI."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from homeassistant.components.diagnostics import REDACTED, async_redact_data
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
)
from homeassistant.core import HomeAssistant
from .const import CONF_SERIAL_NUMBER
from .types import DaliCenterConfigEntry
if TYPE_CHECKING:
from PySrDaliGateway import Device, Scene
from PySrDaliGateway.types import SceneDeviceType
TO_REDACT = {
CONF_HOST,
CONF_USERNAME,
CONF_PASSWORD,
CONF_SERIAL_NUMBER,
"dev_sn",
}
ALLOWED_ENTRY_KEYS: tuple[str, ...] = (
CONF_HOST,
CONF_PORT,
CONF_NAME,
CONF_USERNAME,
CONF_PASSWORD,
CONF_SERIAL_NUMBER,
)
def _serialize_entry_data(entry: DaliCenterConfigEntry) -> dict[str, Any]:
"""Return entry data filtered by the whitelist."""
return {key: entry.data[key] for key in ALLOWED_ENTRY_KEYS if key in entry.data}
def _serialize_device(device: Device) -> dict[str, Any]:
"""Return a whitelisted dict view of a Device."""
return {
"dev_id": device.dev_id,
"unique_id": device.unique_id,
"name": device.name,
"dev_type": device.dev_type,
"channel": device.channel,
"address": device.address,
"status": device.status,
"dev_sn": device.dev_sn,
"area_name": getattr(device, "area_name", None),
"area_id": getattr(device, "area_id", None),
"model": device.model,
}
def _serialize_scene(scene: Scene) -> dict[str, Any]:
"""Return a whitelisted dict view of a Scene."""
members: list[SceneDeviceType] = scene.devices
return {
"scene_id": scene.scene_id,
"name": scene.name,
"channel": scene.channel,
"area_id": getattr(scene, "area_id", None),
"unique_id": scene.unique_id,
"device_unique_ids": [member["unique_id"] for member in members],
}
def _strip_gw_sn(data: Any, gw_sn: str) -> Any:
"""Recursively replace gw_sn in string values and list items."""
if isinstance(data, dict):
return {key: _strip_gw_sn(value, gw_sn) for key, value in data.items()}
if isinstance(data, list):
return [_strip_gw_sn(item, gw_sn) for item in data]
if isinstance(data, str):
return data.replace(gw_sn, REDACTED)
return data
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: DaliCenterConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
data = entry.runtime_data
payload = {
"entry_data": _serialize_entry_data(entry),
"devices": [_serialize_device(device) for device in data.devices],
"scenes": [_serialize_scene(scene) for scene in data.scenes],
}
return _strip_gw_sn(async_redact_data(payload, TO_REDACT), data.gateway.gw_sn)

View File

@@ -46,7 +46,7 @@ rules:
test-coverage: done
# Gold
devices: done
diagnostics: todo
diagnostics: done
discovery-update-info: done
discovery:
status: exempt

View File

@@ -52,12 +52,14 @@ class SwitchbotEventEntity(SwitchbotEntity, EventEntity):
self._event = event
self.entity_description = description
self._attr_unique_id = f"{coordinator.base_unique_id}-{event}"
self._previous_value = False
self._previous_doorbell_seq = int(
coordinator.device.parsed_data.get("doorbell_seq", 0)
)
@callback
def _async_update_attrs(self) -> None:
"""Update the entity attributes."""
value = bool(self.parsed_data.get(self._event, False))
if value and not self._previous_value:
seq = int(self.parsed_data.get("doorbell_seq", 0))
if seq not in (0, self._previous_doorbell_seq):
self._trigger_event("ring")
self._previous_value = value
self._previous_doorbell_seq = seq

View File

@@ -62,6 +62,7 @@ from .const import (
ATTR_DIRECTORY_PATH,
ATTR_DISABLE_NOTIF,
ATTR_DISABLE_WEB_PREV,
ATTR_DRAFT_ID,
ATTR_FILE,
ATTR_FILE_ID,
ATTR_FILE_NAME,
@@ -129,6 +130,7 @@ from .const import (
SERVICE_SEND_LOCATION,
SERVICE_SEND_MEDIA_GROUP,
SERVICE_SEND_MESSAGE,
SERVICE_SEND_MESSAGE_DRAFT,
SERVICE_SEND_PHOTO,
SERVICE_SEND_POLL,
SERVICE_SEND_STICKER,
@@ -176,6 +178,19 @@ SERVICE_SCHEMA_SEND_MESSAGE = vol.All(
),
)
SERVICE_SCHEMA_SEND_MESSAGE_DRAFT = vol.Schema(
{
vol.Optional(ATTR_ENTITY_ID): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ATTR_TARGET): vol.All(cv.ensure_list, [vol.Coerce(int)]),
vol.Optional(CONF_CONFIG_ENTRY_ID): cv.string,
vol.Optional(ATTR_CHAT_ID): vol.All(cv.ensure_list, [vol.Coerce(int)]),
vol.Optional(ATTR_MESSAGE_THREAD_ID): vol.Coerce(int),
vol.Required(ATTR_DRAFT_ID): vol.All(vol.Coerce(int), vol.Range(min=1)),
vol.Required(ATTR_MESSAGE): cv.string,
vol.Optional(ATTR_PARSER): ATTR_PARSER_SCHEMA,
}
)
SERVICE_SCHEMA_SEND_CHAT_ACTION = vol.All(
cv.deprecated(ATTR_TIMEOUT),
vol.Schema(
@@ -424,6 +439,7 @@ SERVICE_SCHEMA_DOWNLOAD_FILE = vol.Schema(
SERVICE_MAP: dict[str, VolSchemaType] = {
SERVICE_SEND_MESSAGE: SERVICE_SCHEMA_SEND_MESSAGE,
SERVICE_SEND_MESSAGE_DRAFT: SERVICE_SCHEMA_SEND_MESSAGE_DRAFT,
SERVICE_SEND_CHAT_ACTION: SERVICE_SCHEMA_SEND_CHAT_ACTION,
SERVICE_SEND_PHOTO: SERVICE_SCHEMA_SEND_FILE,
SERVICE_SEND_MEDIA_GROUP: SERVICE_SCHEMA_SEND_MEDIA_GROUP,
@@ -615,6 +631,8 @@ async def _call_service(
await notify_service.set_message_reaction(context=service.context, **kwargs)
elif service_name == SERVICE_EDIT_MESSAGE_MEDIA:
await notify_service.edit_message_media(context=service.context, **kwargs)
elif service_name == SERVICE_SEND_MESSAGE_DRAFT:
await notify_service.send_message_draft(context=service.context, **kwargs)
elif service_name == SERVICE_DOWNLOAD_FILE:
return await notify_service.download_file(context=service.context, **kwargs)
else:

View File

@@ -1013,6 +1013,36 @@ class TelegramNotificationService:
context=context,
)
async def send_message_draft(
self,
message: str,
chat_id: int,
draft_id: int,
context: Context | None = None,
**kwargs: dict[str, Any],
) -> None:
"""Stream a partial message to a user while the message is being generated."""
params = self._get_msg_kwargs(kwargs)
_LOGGER.debug(
"Sending message draft %s in chat ID %s with params: %s",
draft_id,
chat_id,
params,
)
await self._send_msg(
self.bot.send_message_draft,
None,
chat_id=chat_id,
draft_id=draft_id,
text=message,
message_thread_id=params[ATTR_MESSAGE_THREAD_ID],
parse_mode=params[ATTR_PARSER],
read_timeout=params[ATTR_TIMEOUT],
context=context,
)
async def download_file(
self,
file_id: str,

View File

@@ -31,6 +31,7 @@ DEFAULT_TRUSTED_NETWORKS = [ip_network("149.154.160.0/20"), ip_network("91.108.4
SERVICE_SEND_CHAT_ACTION = "send_chat_action"
SERVICE_SEND_MESSAGE = "send_message"
SERVICE_SEND_MESSAGE_DRAFT = "send_message_draft"
SERVICE_SEND_PHOTO = "send_photo"
SERVICE_SEND_MEDIA_GROUP = "send_media_group"
SERVICE_SEND_STICKER = "send_sticker"
@@ -90,6 +91,7 @@ ATTR_DATE = "date"
ATTR_DISABLE_NOTIF = "disable_notification"
ATTR_DISABLE_WEB_PREV = "disable_web_page_preview"
ATTR_DIRECTORY_PATH = "directory_path"
ATTR_DRAFT_ID = "draft_id"
ATTR_EDITED_MSG = "edited_message"
ATTR_FILE = "file"
ATTR_FILE_ID = "file_id"

View File

@@ -49,6 +49,9 @@
"send_message": {
"service": "mdi:send"
},
"send_message_draft": {
"service": "mdi:chat-processing"
},
"send_photo": {
"service": "mdi:camera"
},

View File

@@ -1198,3 +1198,50 @@ download_file:
example: "my_downloaded_file"
selector:
text:
send_message_draft:
fields:
entity_id:
selector:
entity:
filter:
domain: notify
integration: telegram_bot
multiple: true
reorder: true
message_thread_id:
selector:
number:
mode: box
draft_id:
required: true
selector:
number:
mode: box
min: 1
message:
example: The garage door has been o
required: true
selector:
text:
parse_mode:
selector:
select:
options:
- "html"
- "markdown"
- "markdownv2"
- "plain_text"
translation_key: "parse_mode"
advanced:
collapsed: true
fields:
config_entry_id:
selector:
config_entry:
integration: telegram_bot
chat_id:
example: "[12345, 67890] or 12345"
selector:
text:
multiple: true

View File

@@ -951,6 +951,45 @@
}
}
},
"send_message_draft": {
"description": "Stream a partial message to a user while the message is being generated.",
"fields": {
"chat_id": {
"description": "One or more pre-authorized chat IDs to send the message draft to.",
"name": "[%key:component::telegram_bot::services::edit_message::fields::chat_id::name%]"
},
"config_entry_id": {
"description": "The config entry representing the Telegram bot to send the message draft.",
"name": "[%key:component::telegram_bot::services::send_message::fields::config_entry_id::name%]"
},
"draft_id": {
"description": "Unique identifier of the message draft. Changes of drafts with the same identifier are animated.",
"name": "Draft ID"
},
"entity_id": {
"description": "[%key:component::telegram_bot::services::send_message::fields::entity_id::description%]",
"name": "[%key:component::telegram_bot::services::send_message::fields::entity_id::name%]"
},
"message": {
"description": "Available part of the message for temporary notification.\nCan't parse entities? Format your message according to the [formatting options]({formatting_options_url}).",
"name": "[%key:component::telegram_bot::services::send_message::fields::message::name%]"
},
"message_thread_id": {
"description": "[%key:component::telegram_bot::services::send_message::fields::message_thread_id::description%]",
"name": "[%key:component::telegram_bot::services::send_message::fields::message_thread_id::name%]"
},
"parse_mode": {
"description": "[%key:component::telegram_bot::services::send_message::fields::parse_mode::description%]",
"name": "[%key:component::telegram_bot::services::send_message::fields::parse_mode::name%]"
}
},
"name": "Send message draft",
"sections": {
"advanced": {
"name": "[%key:component::telegram_bot::services::send_message::sections::advanced::name%]"
}
}
},
"send_photo": {
"description": "Sends a photo.",
"fields": {

View File

@@ -180,18 +180,16 @@ def generate_schema(domain: str, flow_type: str) -> vol.Schema:
}
if domain == Platform.BINARY_SENSOR:
schema |= _SCHEMA_STATE
if flow_type == "config":
schema |= {
vol.Optional(CONF_DEVICE_CLASS): selector.SelectSelector(
selector.SelectSelectorConfig(
options=[cls.value for cls in BinarySensorDeviceClass],
mode=selector.SelectSelectorMode.DROPDOWN,
translation_key="binary_sensor_device_class",
sort=True,
),
schema |= _SCHEMA_STATE | {
vol.Optional(CONF_DEVICE_CLASS): selector.SelectSelector(
selector.SelectSelectorConfig(
options=[cls.value for cls in BinarySensorDeviceClass],
mode=selector.SelectSelectorMode.DROPDOWN,
translation_key="binary_sensor_device_class",
sort=True,
),
}
),
}
if domain == Platform.BUTTON:
schema |= {

View File

@@ -608,6 +608,7 @@
},
"binary_sensor": {
"data": {
"device_class": "[%key:component::template::common::device_class%]",
"device_id": "[%key:common::config_flow::data::device%]",
"state": "[%key:component::template::common::state%]"
},

View File

@@ -8,5 +8,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["tibber"],
"requirements": ["pyTibber==0.37.0"]
"requirements": ["pyTibber==0.37.1"]
}

View File

@@ -0,0 +1,20 @@
"""Provides conditions for to-do lists."""
from homeassistant.core import HomeAssistant
from homeassistant.helpers.condition import (
Condition,
make_entity_numerical_condition,
make_entity_state_condition,
)
from .const import DOMAIN
CONDITIONS: dict[str, type[Condition]] = {
"all_completed": make_entity_state_condition(DOMAIN, "0"),
"incomplete": make_entity_numerical_condition(DOMAIN),
}
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
"""Return the to-do list conditions."""
return CONDITIONS

View File

@@ -0,0 +1,37 @@
.condition_common: &condition_common
target: &condition_todo_target
entity:
domain: todo
fields:
behavior: &condition_behavior
required: true
default: any
selector:
select:
translation_key: condition_behavior
options:
- all
- any
.incomplete_threshold_entity: &incomplete_threshold_entity
- domain: input_number
- domain: number
- domain: sensor
.incomplete_threshold_number: &incomplete_threshold_number
min: 0
mode: box
all_completed: *condition_common
incomplete:
target: *condition_todo_target
fields:
behavior: *condition_behavior
threshold:
required: true
selector:
numeric_threshold:
entity: *incomplete_threshold_entity
mode: is
number: *incomplete_threshold_number

View File

@@ -1,4 +1,12 @@
{
"conditions": {
"all_completed": {
"condition": "mdi:clipboard-check"
},
"incomplete": {
"condition": "mdi:clipboard-alert"
}
},
"entity_component": {
"_": {
"default": "mdi:clipboard-list"

View File

@@ -1,4 +1,31 @@
{
"common": {
"condition_behavior_name": "Condition passes if",
"condition_threshold_name": "Threshold type"
},
"conditions": {
"all_completed": {
"description": "Tests if all to-do items are completed in one or more to-do lists.",
"fields": {
"behavior": {
"name": "[%key:component::todo::common::condition_behavior_name%]"
}
},
"name": "All to-do items completed"
},
"incomplete": {
"description": "Tests the number of incomplete to-do items in one or more to-do lists.",
"fields": {
"behavior": {
"name": "[%key:component::todo::common::condition_behavior_name%]"
},
"threshold": {
"name": "[%key:component::todo::common::condition_threshold_name%]"
}
},
"name": "Incomplete to-do items"
}
},
"entity_component": {
"_": {
"name": "[%key:component::todo::title%]"
@@ -13,6 +40,12 @@
}
},
"selector": {
"condition_behavior": {
"options": {
"all": "All",
"any": "Any"
}
},
"status": {
"options": {
"completed": "Completed",

View File

@@ -9,9 +9,10 @@ from typing import Any
from unifi_access_api import ApiAuthError, ApiConnectionError, UnifiAccessApiClient
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.config_entries import SOURCE_IGNORE, ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import DiscoveryInfoType
from homeassistant.util.ssl import create_no_verify_ssl_context
from .const import DOMAIN
@@ -25,6 +26,11 @@ class UnifiAccessConfigFlow(ConfigFlow, domain=DOMAIN):
VERSION = 1
MINOR_VERSION = 1
def __init__(self) -> None:
"""Init the config flow."""
super().__init__()
self._discovered_device: dict[str, Any] = {}
async def _validate_input(self, user_input: dict[str, Any]) -> dict[str, str]:
"""Validate user input and return errors dict."""
errors: dict[str, str] = {}
@@ -117,6 +123,66 @@ class UnifiAccessConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors,
)
async def async_step_integration_discovery(
self, discovery_info: DiscoveryInfoType
) -> ConfigFlowResult:
"""Handle discovery via unifi_discovery."""
self._discovered_device = discovery_info
source_ip = discovery_info["source_ip"]
mac = discovery_info["hw_addr"].replace(":", "").upper()
await self.async_set_unique_id(mac)
for entry in self._async_current_entries():
if entry.source == SOURCE_IGNORE:
continue
if entry.data.get(CONF_HOST) == source_ip:
if not entry.unique_id:
self.hass.config_entries.async_update_entry(entry, unique_id=mac)
return self.async_abort(reason="already_configured")
self._abort_if_unique_id_configured(updates={CONF_HOST: source_ip})
return await self.async_step_discovery_confirm()
async def async_step_discovery_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm discovery and collect API token."""
errors: dict[str, str] = {}
discovery_info = self._discovered_device
source_ip = discovery_info["source_ip"]
if user_input is not None:
merged_input = {
CONF_HOST: source_ip,
CONF_API_TOKEN: user_input[CONF_API_TOKEN],
CONF_VERIFY_SSL: user_input.get(CONF_VERIFY_SSL, False),
}
errors = await self._validate_input(merged_input)
if not errors:
return self.async_create_entry(
title="UniFi Access",
data=merged_input,
)
name = discovery_info.get("hostname") or discovery_info.get("platform")
if not name:
short_mac = discovery_info["hw_addr"].replace(":", "").upper()[-6:]
name = f"Access {short_mac}"
placeholders = {
"name": name,
"ip_address": source_ip,
}
self.context["title_placeholders"] = placeholders
return self.async_show_form(
step_id="discovery_confirm",
data_schema=vol.Schema(
{
vol.Required(CONF_API_TOKEN): str,
vol.Required(CONF_VERIFY_SSL, default=False): bool,
}
),
description_placeholders=placeholders,
errors=errors,
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:

View File

@@ -3,6 +3,7 @@
"name": "UniFi Access",
"codeowners": ["@imhotep", "@RaHehl"],
"config_flow": true,
"dependencies": ["unifi_discovery"],
"documentation": "https://www.home-assistant.io/integrations/unifi_access",
"integration_type": "hub",
"iot_class": "local_push",

View File

@@ -42,8 +42,10 @@ rules:
# Gold
devices: done
diagnostics: done
discovery-update-info: todo
discovery: todo
discovery-update-info: done
discovery:
status: exempt
comment: Discovery is handled via unifi_discovery dependency using SOURCE_INTEGRATION_DISCOVERY.
docs-data-update: done
docs-examples: done
docs-known-limitations: done

View File

@@ -12,6 +12,17 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"discovery_confirm": {
"data": {
"api_token": "[%key:common::config_flow::data::api_token%]",
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
},
"data_description": {
"api_token": "[%key:component::unifi_access::config::step::user::data_description::api_token%]",
"verify_ssl": "[%key:component::unifi_access::config::step::user::data_description::verify_ssl%]"
},
"description": "A UniFi Access controller was discovered at {ip_address} ({name})."
},
"reauth_confirm": {
"data": {
"api_token": "[%key:common::config_flow::data::api_token%]"

View File

@@ -9,4 +9,5 @@ DOMAIN = "unifi_discovery"
# when initial discovery runs — the same pattern DHCP/SSDP use with manifest matchers.
CONSUMER_MAPPING: dict[UnifiService, str] = {
UnifiService.Protect: "unifiprotect",
UnifiService.Access: "unifi_access",
}

View File

@@ -2,7 +2,8 @@
from __future__ import annotations
from dataclasses import asdict
from collections.abc import Mapping
from dataclasses import fields
from datetime import timedelta
import logging
from typing import Any
@@ -24,6 +25,24 @@ DISCOVERY_INTERVAL = timedelta(minutes=60)
DATA_DISCOVERY_STARTED: HassKey[bool] = HassKey(DOMAIN)
def _device_to_dict(device: UnifiDevice) -> dict[str, Any]:
"""Convert a UnifiDevice to a plain dict.
Avoid dataclasses.asdict() because it calls copy.deepcopy() on non-builtin
types. On Python 3.14+ deepcopy cannot pickle mappingproxy objects, and
Enum members (used as dict keys in ``services``) internally reference
``__members__`` which is a mappingproxy. This causes asdict() to crash
with ``TypeError: cannot pickle 'mappingproxy' object``.
"""
data: dict[str, Any] = {}
for f in fields(device):
value = getattr(device, f.name)
if isinstance(value, Mapping):
value = dict(value)
data[f.name] = value
return data
@callback
def async_start_discovery(hass: HomeAssistant) -> None:
"""Start discovery of UniFi devices."""
@@ -74,5 +93,5 @@ def async_trigger_discovery(
hass,
domain,
context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY},
data=asdict(device),
data=_device_to_dict(device),
)

View File

@@ -1,7 +1,7 @@
{
"domain": "vicare",
"name": "Viessmann ViCare",
"codeowners": ["@CFenner"],
"codeowners": ["@CFenner", "@lackas"],
"config_flow": true,
"dhcp": [
{

View File

@@ -14,6 +14,7 @@ from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.start import async_at_started
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN, INTEGRATION_TITLE
@@ -115,10 +116,13 @@ async def _async_setup_coordinator(
energy_coordinator = WaterFurnaceEnergyCoordinator(
hass, device_client, entry, device_client.gwid
)
# Use async_refresh() instead of async_config_entry_first_refresh() so that
# energy data failures (e.g. WFNoDataError for new accounts) don't block
# the integration from loading. Realtime sensor data is the primary concern.
await energy_coordinator.async_refresh()
# Defer the first energy refresh until HA has fully started so the
# potentially large initial backfill doesn't compete with startup I/O.
async def _async_start_energy(hass: HomeAssistant) -> None:
await energy_coordinator.async_refresh()
entry.async_on_unload(async_at_started(hass, _async_start_energy))
return device_client.gwid, WaterFurnaceDeviceData(
realtime=coordinator, energy=energy_coordinator

View File

@@ -2,9 +2,12 @@
from __future__ import annotations
import asyncio
from dataclasses import dataclass
from datetime import datetime, timedelta
import logging
import math
import random
from typing import TYPE_CHECKING
from waterfurnace.waterfurnace import (
@@ -39,6 +42,13 @@ if TYPE_CHECKING:
_LOGGER = logging.getLogger(__name__)
BACKFILL_BATCH_DAYS = 5
BACKFILL_LOOKBACK_DAYS = 395 # 13 Months
BACKFILL_GAP_THRESHOLD = timedelta(days=BACKFILL_BATCH_DAYS)
BACKFILL_DELAY_MIN_SECONDS = 5
BACKFILL_DELAY_MAX_SECONDS = 30
BACKFILL_MAX_EMPTY_DAYS = 15
@dataclass
class WaterFurnaceDeviceData:
@@ -115,6 +125,7 @@ class WaterFurnaceEnergyCoordinator(DataUpdateCoordinator[None]):
self.client = client
self.gwid = gwid
self.statistic_id = f"{DOMAIN}:{gwid.lower()}_energy"
self._backfill_task: asyncio.Task | None = None
self._statistic_metadata = StatisticMetaData(
has_sum=True,
mean_type=StatisticMeanType.NONE,
@@ -144,28 +155,43 @@ class WaterFurnaceEnergyCoordinator(DataUpdateCoordinator[None]):
if not last_stat:
return None
entry = last_stat[self.statistic_id][0]
if entry["sum"] is None:
if "sum" not in entry or "start" not in entry or entry["sum"] is None:
return None
return (entry["start"], entry["sum"])
def _fetch_energy_data(
self, start_date: str, end_date: str
) -> list[tuple[datetime, float]]:
"""Fetch energy data and return list of (timestamp, kWh) tuples."""
# Re-login to refresh the HTTP session token, which expires between
# the 2-hour polling intervals.
"""Fetch energy data and return list of (timestamp, kWh) tuples.
On auth failure, re-login once and retry the request.
"""
try:
self.client.login()
except WFCredentialError as err:
raise UpdateFailed(
"Authentication failed during energy data fetch"
) from err
data = self.client.get_energy_data(
start_date,
end_date,
frequency="1H",
timezone_str=self.hass.config.time_zone,
)
data = self.client.get_energy_data(
start_date,
end_date,
frequency="1H",
timezone_str=self.hass.config.time_zone,
)
except WFCredentialError:
try:
self.client.login()
except WFCredentialError as err:
raise UpdateFailed(
"Authentication failed during energy data fetch"
) from err
try:
data = self.client.get_energy_data(
start_date,
end_date,
frequency="1H",
timezone_str=self.hass.config.time_zone,
)
except WFCredentialError as err:
raise UpdateFailed(
"Authentication failed during energy data fetch"
) from err
return [
(reading.timestamp, reading.total_power)
for reading in data
@@ -177,10 +203,14 @@ class WaterFurnaceEnergyCoordinator(DataUpdateCoordinator[None]):
readings: list[tuple[datetime, float]],
last_ts: float,
last_sum: float,
now: datetime,
current_hour_ts: float | None = None,
) -> list[StatisticData]:
"""Build hourly statistics from readings, skipping already-recorded ones."""
current_hour_ts = now.replace(minute=0, second=0, microsecond=0).timestamp()
"""Build hourly statistics from readings, skipping already-recorded ones.
When provided, current_hour_ts acts as an exclusive cutoff so readings at
or after that timestamp are excluded, such as to skip the incomplete
current hour during normal polling and backfill.
"""
statistics: list[StatisticData] = []
seen_hours: set[float] = set()
running_sum = last_sum
@@ -188,7 +218,7 @@ class WaterFurnaceEnergyCoordinator(DataUpdateCoordinator[None]):
ts = timestamp.timestamp()
if ts <= last_ts:
continue
if ts >= current_hour_ts:
if current_hour_ts is not None and ts >= current_hour_ts:
continue
hour_ts = timestamp.replace(minute=0, second=0, microsecond=0).timestamp()
if hour_ts in seen_hours:
@@ -204,23 +234,140 @@ class WaterFurnaceEnergyCoordinator(DataUpdateCoordinator[None]):
)
return statistics
async def _async_backfill(
self,
start_dt: datetime,
end_dt: datetime,
initial_sum: float = 0.0,
last_ts: float = -math.inf,
) -> None:
"""Backfill energy statistics by walking backwards in batches.
Collects all readings into memory, then inserts them chronologically
in a single pass. Stops early if no data is found for
BACKFILL_MAX_EMPTY_DAYS consecutive days.
"""
all_readings: list[tuple[datetime, float]] = []
batch_end = end_dt
local_tz = dt_util.DEFAULT_TIME_ZONE
consecutive_empty_days = 0
while batch_end > start_dt:
batch_start = max(batch_end - timedelta(days=BACKFILL_BATCH_DAYS), start_dt)
start_str = batch_start.astimezone(local_tz).strftime("%Y-%m-%d")
end_str = batch_end.astimezone(local_tz).strftime("%Y-%m-%d")
try:
parsed = await self.hass.async_add_executor_job(
self._fetch_energy_data, start_str, end_str
)
except WFNoDataError:
_LOGGER.debug(
"No energy data for %s to %s, skipping", start_str, end_str
)
consecutive_empty_days += BACKFILL_BATCH_DAYS
if consecutive_empty_days >= BACKFILL_MAX_EMPTY_DAYS:
_LOGGER.debug(
"No data for %d consecutive days, stopping backfill",
consecutive_empty_days,
)
break
batch_end = batch_start
continue
except UpdateFailed, WFException:
_LOGGER.exception("Error fetching energy data during backfill")
break
_LOGGER.debug(
"Fetched %d readings for backfill batch %s to %s",
len(parsed),
start_str,
end_str,
)
all_readings.extend(parsed)
consecutive_empty_days = 0
batch_end = batch_start
if batch_end > start_dt:
await asyncio.sleep(
random.uniform(
BACKFILL_DELAY_MIN_SECONDS, BACKFILL_DELAY_MAX_SECONDS
)
)
if all_readings:
# Exclude the incomplete current hour. Use local timezone so
# the hour boundary is correct for partial-offset timezones
# (e.g. UTC+5:30).
current_hour_ts = (
end_dt.astimezone(local_tz)
.replace(minute=0, second=0, microsecond=0)
.timestamp()
)
statistics = self._build_statistics(
all_readings, last_ts, initial_sum, current_hour_ts
)
if statistics:
async_add_external_statistics(
self.hass, self._statistic_metadata, statistics
)
def _backfill_done_callback(self, task: asyncio.Task[None]) -> None:
"""Log any exception from a completed backfill task."""
if task.cancelled():
return
if exc := task.exception():
_LOGGER.error("Backfill task failed", exc_info=exc)
async def async_wait_backfill(self) -> None:
"""Wait for any in-progress backfill task to complete."""
if self._backfill_task:
await self._backfill_task
async def _async_update_data(self) -> None:
"""Fetch energy data and insert statistics."""
"""Fetch energy data and insert statistics.
Handles three scenarios:
1. No statistics exist → first-load backfill (background task)
2. Last stat is older than gap threshold → gap backfill (background task)
3. Last stat is recent → normal poll for recent data
"""
if self._backfill_task and not self._backfill_task.done():
_LOGGER.debug("Backfill already in progress, skipping update")
return
last = await self._async_get_last_stat()
now = dt_util.utcnow()
if last is None:
_LOGGER.info("No prior statistics found, fetching recent energy data")
last_ts = 0.0
last_sum = 0.0
start_dt = now - timedelta(days=1)
else:
last_ts, last_sum = last
start_dt = dt_util.utc_from_timestamp(last_ts)
_LOGGER.debug("Last stat: ts=%s, sum=%s", start_dt.isoformat(), last_sum)
# First load: backfill walking backwards from today
start = now - timedelta(days=BACKFILL_LOOKBACK_DAYS)
self._backfill_task = self.config_entry.async_create_background_task(
self.hass,
self._async_backfill(start, now),
f"waterfurnace_backfill_{self.gwid}",
)
self._backfill_task.add_done_callback(self._backfill_done_callback)
return
last_ts, last_sum = last
last_dt = dt_util.utc_from_timestamp(last_ts)
if now - last_dt > BACKFILL_GAP_THRESHOLD:
# Large gap detected, backfill using batches
self._backfill_task = self.config_entry.async_create_background_task(
self.hass,
self._async_backfill(last_dt, now, last_sum, last_ts),
f"waterfurnace_backfill_{self.gwid}",
)
self._backfill_task.add_done_callback(self._backfill_done_callback)
return
# Normal poll: fetch recent data (up to BACKFILL_GAP_THRESHOLD) and insert any missing hours
_LOGGER.debug("Last stat: ts=%s, sum=%s", last_dt.isoformat(), last_sum)
local_tz = dt_util.DEFAULT_TIME_ZONE
start_date = start_dt.astimezone(local_tz).strftime("%Y-%m-%d")
start_date = last_dt.astimezone(local_tz).strftime("%Y-%m-%d")
end_date = (now.astimezone(local_tz) + timedelta(days=1)).strftime("%Y-%m-%d")
try:
@@ -239,7 +386,16 @@ class WaterFurnaceEnergyCoordinator(DataUpdateCoordinator[None]):
_LOGGER.debug("Fetched %s readings", len(readings))
statistics = self._build_statistics(readings, last_ts, last_sum, now)
# Use local timezone so the hour boundary is correct for
# partial-offset timezones (e.g. UTC+5:30).
current_hour_ts = (
now.astimezone(local_tz)
.replace(minute=0, second=0, microsecond=0)
.timestamp()
)
statistics = self._build_statistics(
readings, last_ts, last_sum, current_hour_ts
)
_LOGGER.debug("Built %s statistics to insert", len(statistics))

View File

@@ -181,7 +181,19 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
def on_pipeline_event(self, event: PipelineEvent) -> None:
"""Set state based on pipeline stage."""
assert self._client is not None
if event.type == assist_pipeline.PipelineEventType.RUN_END:
# Pipeline run is complete — always update bookkeeping state
# even after a disconnect so follow-up reconnects don't retain
# stale _is_pipeline_running / _pipeline_ended_event state.
self._is_pipeline_running = False
self._pipeline_ended_event.set()
self.device.set_is_active(False)
self._tts_stream_token = None
self._is_tts_streaming = False
if self._client is None:
# Satellite disconnected, don't try to write to the client
return
if event.type == assist_pipeline.PipelineEventType.RUN_START:
if event.data and (tts_output := event.data["tts_output"]):
@@ -190,13 +202,6 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
# can start streaming TTS before the TTS_END event.
self._tts_stream_token = tts_output["token"]
self._is_tts_streaming = False
elif event.type == assist_pipeline.PipelineEventType.RUN_END:
# Pipeline run is complete
self._is_pipeline_running = False
self._pipeline_ended_event.set()
self.device.set_is_active(False)
self._tts_stream_token = None
self._is_tts_streaming = False
elif event.type == assist_pipeline.PipelineEventType.WAKE_WORD_START:
self.config_entry.async_create_background_task(
self.hass,
@@ -321,7 +326,8 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
Should block until the announcement is done playing.
"""
assert self._client is not None
if self._client is None:
raise ConnectionError("Satellite is not connected")
if self._ffmpeg_manager is None:
self._ffmpeg_manager = ffmpeg.get_ffmpeg_manager(self.hass)
@@ -441,6 +447,11 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
# Stop any existing pipeline
self._audio_queue.put_nowait(None)
# Cancel any pipeline still running so its background
# tasks and audio buffers can be released instead of
# being orphaned across the reconnect.
await self._cancel_running_pipeline()
# Ensure sensor is off (before restart)
self.device.set_is_active(False)
@@ -449,6 +460,9 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
finally:
unregister_timer_handler()
# Cancel any pipeline still running on final teardown.
await self._cancel_running_pipeline()
# Ensure sensor is off (before stop)
self.device.set_is_active(False)
@@ -699,10 +713,10 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
async def _send_delayed_ping(self) -> None:
"""Send ping to satellite after a delay."""
assert self._client is not None
try:
await asyncio.sleep(_PING_SEND_DELAY)
if self._client is None:
return
await self._client.write_event(Ping().event())
except ConnectionError:
pass # handled with timeout
@@ -728,7 +742,10 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
async def _stream_tts(self, tts_result: tts.ResultStream) -> None:
"""Stream TTS WAV audio to satellite in chunks."""
assert self._client is not None
client = self._client
if client is None:
# Satellite disconnected, cannot stream
return
if tts_result.extension != "wav":
raise ValueError(
@@ -760,7 +777,7 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
sample_rate, sample_width, sample_channels, data_chunk = (
audio_info
)
await self._client.write_event(
await client.write_event(
AudioStart(
rate=sample_rate,
width=sample_width,
@@ -794,12 +811,12 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
timestamp=timestamp,
)
await self._client.write_event(audio_chunk.event())
await client.write_event(audio_chunk.event())
timestamp += audio_chunk.milliseconds
total_seconds += audio_chunk.seconds
data_chunk_idx += _AUDIO_CHUNK_BYTES
await self._client.write_event(AudioStop(timestamp=timestamp).event())
await client.write_event(AudioStop(timestamp=timestamp).event())
_LOGGER.debug("TTS streaming complete")
finally:
send_duration = time.monotonic() - start_time
@@ -840,7 +857,9 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
self, event_type: intent.TimerEventType, timer: intent.TimerInfo
) -> None:
"""Forward timer events to satellite."""
assert self._client is not None
if self._client is None:
# Satellite disconnected, drop timer event
return
_LOGGER.debug("Timer event: type=%s, info=%s", event_type, timer)
event: Event | None = None

View File

@@ -23,7 +23,7 @@
"universal_silabs_flasher",
"serialx"
],
"requirements": ["zha==1.1.2", "serialx==1.1.1"],
"requirements": ["zha==1.1.2", "serialx==1.2.2"],
"usb": [
{
"description": "*2652*",

View File

@@ -831,8 +831,8 @@ async def entity_service_call(
if len(entities) == 1:
# Single entity case avoids creating task
entity = entities[0]
single_response = await _handle_entity_call(
hass, entity, func, data, call.context
single_response = await entity.async_request_call(
_handle_entity_call(hass, entity, func, data, call.context)
)
if entity.should_poll:
# Context expires if the turn on commands took a long time.

8
requirements_all.txt generated
View File

@@ -1247,7 +1247,7 @@ homekit-audio-proxy==1.2.1
homelink-integration-api==0.0.1
# homeassistant.components.homematicip_cloud
homematicip==2.7.0
homematicip==2.8.0
# homeassistant.components.homevolt
homevolt==0.5.0
@@ -1928,7 +1928,7 @@ pyRFXtrx==0.31.1
pySDCP==1
# homeassistant.components.tibber
pyTibber==0.37.0
pyTibber==0.37.1
# homeassistant.components.dlink
pyW215==0.8.0
@@ -2092,7 +2092,7 @@ pyegps==0.2.5
pyemoncms==0.1.3
# homeassistant.components.enphase_envoy
pyenphase==2.4.6
pyenphase==2.4.8
# homeassistant.components.envisalink
pyenvisalink==4.7
@@ -2930,7 +2930,7 @@ sentry-sdk==2.48.0
# homeassistant.components.homeassistant_hardware
# homeassistant.components.zha
serialx==1.1.1
serialx==1.2.2
# homeassistant.components.sfr_box
sfrbox-api==0.1.1

View File

@@ -1111,7 +1111,7 @@ homekit-audio-proxy==1.2.1
homelink-integration-api==0.0.1
# homeassistant.components.homematicip_cloud
homematicip==2.7.0
homematicip==2.8.0
# homeassistant.components.homevolt
homevolt==0.5.0
@@ -1671,7 +1671,7 @@ pyHomee==1.3.8
pyRFXtrx==0.31.1
# homeassistant.components.tibber
pyTibber==0.37.0
pyTibber==0.37.1
# homeassistant.components.dlink
pyW215==0.8.0
@@ -1799,7 +1799,7 @@ pyegps==0.2.5
pyemoncms==0.1.3
# homeassistant.components.enphase_envoy
pyenphase==2.4.6
pyenphase==2.4.8
# homeassistant.components.everlights
pyeverlights==0.1.0
@@ -2487,7 +2487,7 @@ sentry-sdk==2.48.0
# homeassistant.components.homeassistant_hardware
# homeassistant.components.zha
serialx==1.1.1
serialx==1.2.2
# homeassistant.components.sfr_box
sfrbox-api==0.1.1

View File

@@ -53,7 +53,7 @@
'state': '76357',
})
# ---
# name: test_all_entities[sensor.homeassistant_analytics_hacs_community-entry]
# name: test_all_entities[sensor.homeassistant_analytics_hacs_custom-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
@@ -69,7 +69,7 @@
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.homeassistant_analytics_hacs_community',
'entity_id': 'sensor.homeassistant_analytics_hacs_custom',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
@@ -77,12 +77,12 @@
'labels': set({
}),
'name': None,
'object_id_base': 'hacs (community)',
'object_id_base': 'hacs (custom)',
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'hacs (community)',
'original_name': 'hacs (custom)',
'platform': 'analytics_insights',
'previous_unique_id': None,
'suggested_object_id': None,
@@ -92,15 +92,15 @@
'unit_of_measurement': 'active installations',
})
# ---
# name: test_all_entities[sensor.homeassistant_analytics_hacs_community-state]
# name: test_all_entities[sensor.homeassistant_analytics_hacs_custom-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Homeassistant Analytics hacs (community)',
'friendly_name': 'Homeassistant Analytics hacs (custom)',
'state_class': <SensorStateClass.TOTAL: 'total'>,
'unit_of_measurement': 'active installations',
}),
'context': <ANY>,
'entity_id': 'sensor.homeassistant_analytics_hacs_community',
'entity_id': 'sensor.homeassistant_analytics_hacs_custom',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,

View File

@@ -1,5 +1,6 @@
"""Websocket tests for Voice Assistant integration."""
import asyncio
from collections.abc import AsyncGenerator, Generator
from pathlib import Path
from typing import Any
@@ -35,6 +36,7 @@ from homeassistant.components.assist_pipeline.pipeline import (
PipelineStorageCollection,
PipelineStore,
_async_local_fallback_intent_filter,
_close_async_generators,
async_create_default_pipeline,
async_get_pipeline,
async_get_pipelines,
@@ -2153,3 +2155,138 @@ async def test_acknowledge_other_agents(
text_to_speech.assert_not_called()
async_converse.assert_called_once()
get_all_targets_in_satellite_area.assert_not_called()
async def test_close_async_generators_closes_generators() -> None:
"""Test the _close_async_generators helper closes every generator."""
closed: list[str] = []
async def make_gen(name: str) -> AsyncGenerator[bytes]:
try:
yield b""
finally:
closed.append(name)
gen_a = make_gen("a")
gen_b = make_gen("b")
# Start them so there is something to close.
await gen_a.__anext__()
await gen_b.__anext__()
await _close_async_generators(gen_a, gen_b)
assert closed == ["a", "b"]
async def test_close_async_generators_handles_none() -> None:
"""Test the helper skips None and non-generator objects."""
# Should not raise on None or objects without aclose.
await _close_async_generators(None, "not a generator", None) # type: ignore[arg-type]
async def test_close_async_generators_suppresses_errors() -> None:
"""Test the helper suppresses errors raised during aclose()."""
async def bad_gen() -> AsyncGenerator[bytes]:
try:
yield b""
finally:
raise RuntimeError("boom")
gen = bad_gen()
await gen.__anext__()
# Must not propagate the RuntimeError from the generator's finally.
await _close_async_generators(gen)
async def test_close_async_generators_closes_all_on_cancellation() -> None:
"""Test all generators get a chance to close even on cancellation.
Regression guard for the leak scenario: if one generator's aclose()
raises CancelledError, the remaining generators must still be
closed so no audio buffers or VAD state are orphaned.
"""
closed: list[str] = []
async def cancel_gen() -> AsyncGenerator[bytes]:
try:
yield b""
finally:
closed.append("cancel")
raise asyncio.CancelledError
async def normal_gen() -> AsyncGenerator[bytes]:
try:
yield b""
finally:
closed.append("normal")
gen_a = cancel_gen()
gen_b = normal_gen()
await gen_a.__anext__()
await gen_b.__anext__()
with pytest.raises(asyncio.CancelledError):
await _close_async_generators(gen_a, gen_b)
# Both generators must have been attempted, not just the first.
assert closed == ["cancel", "normal"]
async def test_pipeline_execute_closes_stt_generators(
hass: HomeAssistant,
mock_wake_word_provider_entity: MockWakeWordEntity,
init_components,
pipeline_data: assist_pipeline.pipeline.PipelineData,
mock_chat_session: chat_session.ChatSession,
) -> None:
"""Test that PipelineInput.execute closes the STT audio generators.
Regression coverage for a leak where early exits of the pipeline (here:
no wake word detected) left the upstream audio generator un-closed,
keeping audio buffers and the audio enhancer's VAD state alive.
"""
closed = asyncio.Event()
async def audio_data() -> AsyncGenerator[bytes]:
try:
yield make_10ms_chunk(b"silence!")
yield b""
finally:
closed.set()
pipeline_store = pipeline_data.pipeline_store
pipeline_id = pipeline_store.async_get_preferred_item()
pipeline = assist_pipeline.pipeline.async_get_pipeline(hass, pipeline_id)
events: list[assist_pipeline.PipelineEvent] = []
pipeline_input = assist_pipeline.pipeline.PipelineInput(
session=mock_chat_session,
device_id=None,
stt_metadata=stt.SpeechMetadata(
language="",
format=stt.AudioFormats.WAV,
codec=stt.AudioCodecs.PCM,
bit_rate=stt.AudioBitRates.BITRATE_16,
sample_rate=stt.AudioSampleRates.SAMPLERATE_16000,
channel=stt.AudioChannels.CHANNEL_MONO,
),
stt_stream=audio_data(),
run=assist_pipeline.pipeline.PipelineRun(
hass,
context=Context(),
pipeline=pipeline,
start_stage=assist_pipeline.PipelineStage.WAKE_WORD,
end_stage=assist_pipeline.PipelineStage.TTS,
event_callback=events.append,
tts_audio_output=None,
audio_settings=assist_pipeline.AudioSettings(is_vad_enabled=False),
),
)
await pipeline_input.validate()
await pipeline_input.execute()
# Pipeline aborted (no wake word) — generator must have been closed.
assert closed.is_set()

View File

@@ -319,9 +319,9 @@ async def test_decrypted_backup_streamer(
expected_padding = b"\0" * padding_size
async def send_backup() -> AsyncIterator[bytes]:
f = encrypted_backup_path.open("rb")
while chunk := f.read(1024):
yield chunk
with encrypted_backup_path.open("rb") as f:
while chunk := f.read(1024):
yield chunk
async def open_backup() -> AsyncIterator[bytes]:
return send_backup()
@@ -367,10 +367,10 @@ async def test_decrypted_backup_streamer_interrupt_stuck_reader(
stuck = asyncio.Event()
async def send_backup() -> AsyncIterator[bytes]:
f = encrypted_backup_path.open("rb")
while chunk := f.read(1024):
await stuck.wait()
yield chunk
with encrypted_backup_path.open("rb") as f:
while chunk := f.read(1024):
await stuck.wait()
yield chunk
async def open_backup() -> AsyncIterator[bytes]:
return send_backup()
@@ -403,9 +403,9 @@ async def test_decrypted_backup_streamer_interrupt_stuck_writer(
)
async def send_backup() -> AsyncIterator[bytes]:
f = encrypted_backup_path.open("rb")
while chunk := f.read(1024):
yield chunk
with encrypted_backup_path.open("rb") as f:
while chunk := f.read(1024):
yield chunk
async def open_backup() -> AsyncIterator[bytes]:
return send_backup()
@@ -436,9 +436,9 @@ async def test_decrypted_backup_streamer_wrong_password(hass: HomeAssistant) ->
)
async def send_backup() -> AsyncIterator[bytes]:
f = encrypted_backup_path.open("rb")
while chunk := f.read(1024):
yield chunk
with encrypted_backup_path.open("rb") as f:
while chunk := f.read(1024):
yield chunk
async def open_backup() -> AsyncIterator[bytes]:
return send_backup()
@@ -499,9 +499,9 @@ async def test_encrypted_backup_streamer(
expected_padding = b"\0" * padding_size
async def send_backup() -> AsyncIterator[bytes]:
f = decrypted_backup_path.open("rb")
while chunk := f.read(1024):
yield chunk
with decrypted_backup_path.open("rb") as f:
while chunk := f.read(1024):
yield chunk
async def open_backup() -> AsyncIterator[bytes]:
return send_backup()
@@ -562,10 +562,10 @@ async def test_encrypted_backup_streamer_interrupt_stuck_reader(
stuck = asyncio.Event()
async def send_backup() -> AsyncIterator[bytes]:
f = decrypted_backup_path.open("rb")
while chunk := f.read(1024):
await stuck.wait()
yield chunk
with decrypted_backup_path.open("rb") as f:
while chunk := f.read(1024):
await stuck.wait()
yield chunk
async def open_backup() -> AsyncIterator[bytes]:
return send_backup()
@@ -600,9 +600,9 @@ async def test_encrypted_backup_streamer_interrupt_stuck_writer(
)
async def send_backup() -> AsyncIterator[bytes]:
f = decrypted_backup_path.open("rb")
while chunk := f.read(1024):
yield chunk
with decrypted_backup_path.open("rb") as f:
while chunk := f.read(1024):
yield chunk
async def open_backup() -> AsyncIterator[bytes]:
return send_backup()
@@ -638,9 +638,9 @@ async def test_encrypted_backup_streamer_random_nonce(hass: HomeAssistant) -> No
)
async def send_backup() -> AsyncIterator[bytes]:
f = decrypted_backup_path.open("rb")
while chunk := f.read(1024):
yield chunk
with decrypted_backup_path.open("rb") as f:
while chunk := f.read(1024):
yield chunk
async def open_backup() -> AsyncIterator[bytes]:
return send_backup()
@@ -702,9 +702,9 @@ async def test_encrypted_backup_streamer_error(hass: HomeAssistant) -> None:
)
async def send_backup() -> AsyncIterator[bytes]:
f = decrypted_backup_path.open("rb")
while chunk := f.read(1024):
yield chunk
with decrypted_backup_path.open("rb") as f:
while chunk := f.read(1024):
yield chunk
async def open_backup() -> AsyncIterator[bytes]:
return send_backup()

View File

@@ -46,13 +46,20 @@ from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry, mock_device_registry
async def target_entities(hass: HomeAssistant, domain: str) -> dict[str, list[str]]:
async def target_entities(
hass: HomeAssistant, domain: str, *, domain_excluded: str | None = None
) -> dict[str, list[str]]:
"""Create multiple entities associated with different targets.
If `domain_excluded` is provided, entities in excluded_entities will have this
domain, otherwise they will have the same domain as included_entities.
Returns a dict with the following keys:
- included_entities: List of entity_ids meant to be targeted.
- excluded_entities: List of entity_ids not meant to be targeted.
"""
domain_excluded = domain_excluded or domain
config_entry = MockConfigEntry(domain="test")
config_entry.add_to_hass(hass)
@@ -84,10 +91,10 @@ async def target_entities(hass: HomeAssistant, domain: str) -> dict[str, list[st
)
entity_reg.async_update_entity(entity_area.entity_id, area_id=area.id)
entity_area_excluded = entity_reg.async_get_or_create(
domain=domain,
domain=domain_excluded,
platform="test",
unique_id=f"{domain}_area_excluded",
suggested_object_id=f"area_{domain}_excluded",
unique_id=f"{domain_excluded}_area_excluded",
suggested_object_id=f"area_{domain_excluded}_excluded",
)
entity_reg.async_update_entity(entity_area_excluded.entity_id, area_id=area.id)
@@ -107,10 +114,10 @@ async def target_entities(hass: HomeAssistant, domain: str) -> dict[str, list[st
device_id=device.id,
)
entity_reg.async_get_or_create(
domain=domain,
domain=domain_excluded,
platform="test",
unique_id=f"{domain}_device_excluded",
suggested_object_id=f"device_{domain}_excluded",
unique_id=f"{domain_excluded}_device_excluded",
suggested_object_id=f"device_{domain_excluded}_excluded",
device_id=device.id,
)
@@ -123,10 +130,10 @@ async def target_entities(hass: HomeAssistant, domain: str) -> dict[str, list[st
)
entity_reg.async_update_entity(entity_label.entity_id, labels={label.label_id})
entity_label_excluded = entity_reg.async_get_or_create(
domain=domain,
domain=domain_excluded,
platform="test",
unique_id=f"{domain}_label_excluded",
suggested_object_id=f"label_{domain}_excluded",
unique_id=f"{domain_excluded}_label_excluded",
suggested_object_id=f"label_{domain_excluded}_excluded",
)
entity_reg.async_update_entity(
entity_label_excluded.entity_id, labels={label.label_id}
@@ -143,10 +150,10 @@ async def target_entities(hass: HomeAssistant, domain: str) -> dict[str, list[st
f"{domain}.device2_{domain}",
],
"excluded_entities": [
f"{domain}.standalone_{domain}_excluded",
f"{domain}.label_{domain}_excluded",
f"{domain}.area_{domain}_excluded",
f"{domain}.device_{domain}_excluded",
f"{domain_excluded}.standalone_{domain_excluded}_excluded",
f"{domain_excluded}.label_{domain_excluded}_excluded",
f"{domain_excluded}.area_{domain_excluded}_excluded",
f"{domain_excluded}.device_{domain_excluded}_excluded",
],
}
@@ -215,6 +222,7 @@ def _parametrize_condition_states(
other_states: list[str | None | tuple[str | None, dict]],
required_filter_attributes: dict | None,
condition_true_if_invalid: bool,
excluded_entities_from_other_domain: bool,
) -> list[tuple[str, dict[str, Any], list[ConditionStateDescription]]]:
"""Parametrize states and expected condition evaluations.
@@ -227,7 +235,9 @@ def _parametrize_condition_states(
required_filter_attributes = required_filter_attributes or {}
condition_options = condition_options or {}
has_required_filter_attributes = bool(required_filter_attributes)
add_excluded_state = excluded_entities_from_other_domain or bool(
required_filter_attributes
)
def state_with_attributes(
state: str | None | tuple[str | None, dict],
@@ -242,7 +252,7 @@ def _parametrize_condition_states(
"attributes": required_filter_attributes,
},
"excluded_state": {
"state": state if has_required_filter_attributes else None,
"state": state if add_excluded_state else None,
"attributes": {},
},
"condition_true": condition_true,
@@ -254,8 +264,8 @@ def _parametrize_condition_states(
"attributes": state[1] | required_filter_attributes,
},
"excluded_state": {
"state": state[0] if has_required_filter_attributes else None,
"attributes": state[1],
"state": state[0] if add_excluded_state else None,
"attributes": state[1] if add_excluded_state else {},
},
"condition_true": condition_true,
"condition_true_first_entity": condition_true_first_entity,
@@ -307,6 +317,7 @@ def parametrize_condition_states_any(
target_states: list[str | None | tuple[str | None, dict]],
other_states: list[str | None | tuple[str | None, dict]],
required_filter_attributes: dict | None = None,
excluded_entities_from_other_domain: bool = False,
) -> list[tuple[str, dict[str, Any], list[ConditionStateDescription]]]:
"""Parametrize states and expected condition evaluations.
@@ -324,6 +335,7 @@ def parametrize_condition_states_any(
other_states=other_states,
required_filter_attributes=required_filter_attributes,
condition_true_if_invalid=False,
excluded_entities_from_other_domain=excluded_entities_from_other_domain,
)
@@ -334,6 +346,7 @@ def parametrize_condition_states_all(
target_states: list[str | None | tuple[str | None, dict]],
other_states: list[str | None | tuple[str | None, dict]],
required_filter_attributes: dict | None = None,
excluded_entities_from_other_domain: bool = False,
) -> list[tuple[str, dict[str, Any], list[ConditionStateDescription]]]:
"""Parametrize states and expected condition evaluations.
@@ -351,6 +364,7 @@ def parametrize_condition_states_all(
other_states=other_states,
required_filter_attributes=required_filter_attributes,
condition_true_if_invalid=True,
excluded_entities_from_other_domain=excluded_entities_from_other_domain,
)

View File

@@ -1,22 +1,11 @@
"""Test device tracker conditions."""
from contextlib import AbstractContextManager, nullcontext as does_not_raise
from typing import Any
import pytest
import voluptuous as vol
from homeassistant.components.device_tracker.const import ATTR_IN_ZONES
from homeassistant.const import (
CONF_ENTITY_ID,
CONF_OPTIONS,
CONF_TARGET,
CONF_ZONE,
STATE_HOME,
STATE_NOT_HOME,
)
from homeassistant.const import STATE_HOME, STATE_NOT_HOME
from homeassistant.core import HomeAssistant
from homeassistant.helpers.condition import async_validate_condition_config
from tests.components.common import (
ConditionStateDescription,
@@ -29,13 +18,6 @@ from tests.components.common import (
target_entities,
)
STATE_WORK_ZONE = "work"
def _gps_state(state: str, in_zones: list[str]) -> tuple[str, dict[str, list[str]]]:
"""Create a GPS-based device tracker state with in_zones attribute."""
return (state, {ATTR_IN_ZONES: in_zones})
@pytest.fixture
async def target_device_trackers(hass: HomeAssistant) -> dict[str, list[str]]:
@@ -46,10 +28,8 @@ async def target_device_trackers(hass: HomeAssistant) -> dict[str, list[str]]:
@pytest.mark.parametrize(
"condition",
[
"device_tracker.in_zone",
"device_tracker.is_home",
"device_tracker.is_not_home",
"device_tracker.not_in_zone",
],
)
async def test_device_tracker_conditions_gated_by_labs_flag(
@@ -143,214 +123,3 @@ async def test_device_tracker_state_condition_behavior_all(
condition_options=condition_options,
states=states,
)
# Zone conditions for GPS-based trackers (have in_zones attribute)
GPS_ZONE_CONDITIONS_ANY = [
*parametrize_condition_states_any(
condition="device_tracker.in_zone",
condition_options={CONF_ZONE: ["zone.home", "zone.work"]},
target_states=[
_gps_state(STATE_HOME, ["zone.home"]),
_gps_state(STATE_WORK_ZONE, ["zone.work"]),
_gps_state(STATE_HOME, ["zone.home", "zone.work"]),
],
other_states=[
_gps_state(STATE_NOT_HOME, []),
_gps_state("school", ["zone.school"]),
],
),
*parametrize_condition_states_any(
condition="device_tracker.not_in_zone",
condition_options={CONF_ZONE: ["zone.home", "zone.work"]},
target_states=[
_gps_state(STATE_NOT_HOME, []),
_gps_state("school", ["zone.school"]),
],
other_states=[
_gps_state(STATE_HOME, ["zone.home"]),
_gps_state(STATE_WORK_ZONE, ["zone.work"]),
_gps_state(STATE_HOME, ["zone.home", "zone.work"]),
],
),
]
GPS_ZONE_CONDITIONS_ALL = [
*parametrize_condition_states_all(
condition="device_tracker.in_zone",
condition_options={CONF_ZONE: ["zone.home", "zone.work"]},
target_states=[
_gps_state(STATE_HOME, ["zone.home"]),
_gps_state(STATE_WORK_ZONE, ["zone.work"]),
_gps_state(STATE_HOME, ["zone.home", "zone.work"]),
],
other_states=[
_gps_state(STATE_NOT_HOME, []),
_gps_state("school", ["zone.school"]),
],
),
*parametrize_condition_states_all(
condition="device_tracker.not_in_zone",
condition_options={CONF_ZONE: ["zone.home", "zone.work"]},
target_states=[
_gps_state(STATE_NOT_HOME, []),
_gps_state("school", ["zone.school"]),
],
other_states=[
_gps_state(STATE_HOME, ["zone.home"]),
_gps_state(STATE_WORK_ZONE, ["zone.work"]),
_gps_state(STATE_HOME, ["zone.home", "zone.work"]),
],
),
]
# Zone conditions for scanner-based trackers (no in_zones attribute)
SCANNER_ZONE_CONDITIONS_ANY = [
*parametrize_condition_states_any(
condition="device_tracker.in_zone",
condition_options={CONF_ZONE: ["zone.home"]},
target_states=[STATE_HOME],
other_states=[STATE_NOT_HOME],
),
*parametrize_condition_states_any(
condition="device_tracker.not_in_zone",
condition_options={CONF_ZONE: ["zone.home"]},
target_states=[STATE_NOT_HOME],
other_states=[STATE_HOME],
),
]
SCANNER_ZONE_CONDITIONS_ALL = [
*parametrize_condition_states_all(
condition="device_tracker.in_zone",
condition_options={CONF_ZONE: ["zone.home"]},
target_states=[STATE_HOME],
other_states=[STATE_NOT_HOME],
),
*parametrize_condition_states_all(
condition="device_tracker.not_in_zone",
condition_options={CONF_ZONE: ["zone.home"]},
target_states=[STATE_NOT_HOME],
other_states=[STATE_HOME],
),
]
@pytest.mark.usefixtures("enable_labs_preview_features")
@pytest.mark.parametrize(
("trigger", "trigger_options", "expected_result"),
[
# Valid configurations
(
"device_tracker.in_zone",
{CONF_ZONE: ["zone.home", "zone.work"]},
does_not_raise(),
),
(
"device_tracker.in_zone",
{CONF_ZONE: "zone.home"},
does_not_raise(),
),
(
"device_tracker.not_in_zone",
{CONF_ZONE: ["zone.home"]},
does_not_raise(),
),
# Invalid configurations
(
"device_tracker.in_zone",
{CONF_ZONE: []},
pytest.raises(vol.Invalid),
),
(
"device_tracker.in_zone",
{},
pytest.raises(vol.Invalid),
),
(
"device_tracker.in_zone",
{CONF_ZONE: ["light.living_room"]},
pytest.raises(vol.Invalid),
),
],
)
async def test_device_tracker_zone_condition_validation(
hass: HomeAssistant,
trigger: str,
trigger_options: dict[str, Any],
expected_result: AbstractContextManager,
) -> None:
"""Test device_tracker zone condition config validation."""
with expected_result:
await async_validate_condition_config(
hass,
{
"condition": trigger,
CONF_TARGET: {CONF_ENTITY_ID: "device_tracker.test"},
CONF_OPTIONS: trigger_options,
},
)
@pytest.mark.usefixtures("enable_labs_preview_features")
@pytest.mark.parametrize(
("condition_target_config", "entity_id", "entities_in_target"),
parametrize_target_entities("device_tracker"),
)
@pytest.mark.parametrize(
("condition", "condition_options", "states"),
[*GPS_ZONE_CONDITIONS_ANY, *SCANNER_ZONE_CONDITIONS_ANY],
)
async def test_device_tracker_zone_condition_behavior_any(
hass: HomeAssistant,
target_device_trackers: dict[str, list[str]],
condition_target_config: dict,
entity_id: str,
entities_in_target: int,
condition: str,
condition_options: dict[str, Any],
states: list[ConditionStateDescription],
) -> None:
"""Test the device tracker zone condition with the 'any' behavior."""
await assert_condition_behavior_any(
hass,
target_entities=target_device_trackers,
condition_target_config=condition_target_config,
entity_id=entity_id,
entities_in_target=entities_in_target,
condition=condition,
condition_options=condition_options,
states=states,
)
@pytest.mark.usefixtures("enable_labs_preview_features")
@pytest.mark.parametrize(
("condition_target_config", "entity_id", "entities_in_target"),
parametrize_target_entities("device_tracker"),
)
@pytest.mark.parametrize(
("condition", "condition_options", "states"),
[*GPS_ZONE_CONDITIONS_ALL, *SCANNER_ZONE_CONDITIONS_ALL],
)
async def test_device_tracker_zone_condition_behavior_all(
hass: HomeAssistant,
target_device_trackers: dict[str, list[str]],
condition_target_config: dict,
entity_id: str,
entities_in_target: int,
condition: str,
condition_options: dict[str, Any],
states: list[ConditionStateDescription],
) -> None:
"""Test the device tracker zone condition with the 'all' behavior."""
await assert_condition_behavior_all(
hass,
target_entities=target_device_trackers,
condition_target_config=condition_target_config,
entity_id=entity_id,
entities_in_target=entities_in_target,
condition=condition,
condition_options=condition_options,
states=states,
)

View File

@@ -5,7 +5,14 @@ from __future__ import annotations
from collections.abc import Generator
from unittest.mock import AsyncMock, patch
from duco.models import BoardInfo, LanInfo, Node, NodeGeneralInfo, NodeVentilationInfo
from duco.models import (
BoardInfo,
LanInfo,
Node,
NodeGeneralInfo,
NodeSensorInfo,
NodeVentilationInfo,
)
import pytest
from homeassistant.components.duco.const import DOMAIN
@@ -62,7 +69,7 @@ def mock_lan_info() -> LanInfo:
@pytest.fixture
def mock_nodes() -> list[Node]:
"""Return a list with a single BOX node."""
"""Return a list of nodes covering all supported types."""
return [
Node(
node_id=1,
@@ -82,7 +89,63 @@ def mock_nodes() -> list[Node]:
mode="AUTO",
flow_lvl_tgt=0,
),
)
sensor=NodeSensorInfo(
co2=None,
iaq_co2=None,
rh=None,
iaq_rh=None,
),
),
Node(
node_id=2,
general=NodeGeneralInfo(
node_type="UCCO2",
sub_type=0,
network_type="RF",
parent=1,
asso=1,
name="Office CO2",
identify=0,
),
ventilation=NodeVentilationInfo(
state="AUTO",
time_state_remain=0,
time_state_end=0,
mode="-",
flow_lvl_tgt=None,
),
sensor=NodeSensorInfo(
co2=405,
iaq_co2=80,
rh=None,
iaq_rh=None,
),
),
Node(
node_id=113,
general=NodeGeneralInfo(
node_type="BSRH",
sub_type=0,
network_type="RF",
parent=1,
asso=1,
name="Bathroom RH",
identify=0,
),
ventilation=NodeVentilationInfo(
state="AUTO",
time_state_remain=0,
time_state_end=0,
mode="-",
flow_lvl_tgt=None,
),
sensor=NodeSensorInfo(
co2=None,
iaq_co2=None,
rh=42.0,
iaq_rh=85,
),
),
]

View File

@@ -0,0 +1,309 @@
# serializer version: 1
# name: test_sensor_entities_state[sensor.bathroom_rh_humidity-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.bathroom_rh_humidity',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'Humidity',
'options': dict({
}),
'original_device_class': <SensorDeviceClass.HUMIDITY: 'humidity'>,
'original_icon': None,
'original_name': 'Humidity',
'platform': 'duco',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': 'aa:bb:cc:dd:ee:ff_113_humidity',
'unit_of_measurement': '%',
})
# ---
# name: test_sensor_entities_state[sensor.bathroom_rh_humidity-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'humidity',
'friendly_name': 'Bathroom RH Humidity',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.bathroom_rh_humidity',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '42.0',
})
# ---
# name: test_sensor_entities_state[sensor.bathroom_rh_humidity_air_quality_index-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.bathroom_rh_humidity_air_quality_index',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'Humidity air quality index',
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Humidity air quality index',
'platform': 'duco',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'iaq_rh',
'unique_id': 'aa:bb:cc:dd:ee:ff_113_iaq_rh',
'unit_of_measurement': '%',
})
# ---
# name: test_sensor_entities_state[sensor.bathroom_rh_humidity_air_quality_index-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Bathroom RH Humidity air quality index',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.bathroom_rh_humidity_air_quality_index',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '85',
})
# ---
# name: test_sensor_entities_state[sensor.living_ventilation_state-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'area_id': None,
'capabilities': dict({
'options': list([
'auto',
'aut1',
'aut2',
'aut3',
'man1',
'man2',
'man3',
'empt',
'cnt1',
'cnt2',
'cnt3',
'man1x2',
'man2x2',
'man3x2',
'man1x3',
'man2x3',
'man3x3',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.living_ventilation_state',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'Ventilation state',
'options': dict({
}),
'original_device_class': <SensorDeviceClass.ENUM: 'enum'>,
'original_icon': None,
'original_name': 'Ventilation state',
'platform': 'duco',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'ventilation_state',
'unique_id': 'aa:bb:cc:dd:ee:ff_1_ventilation_state',
'unit_of_measurement': None,
})
# ---
# name: test_sensor_entities_state[sensor.living_ventilation_state-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'enum',
'friendly_name': 'Living Ventilation state',
'options': list([
'auto',
'aut1',
'aut2',
'aut3',
'man1',
'man2',
'man3',
'empt',
'cnt1',
'cnt2',
'cnt3',
'man1x2',
'man2x2',
'man3x2',
'man1x3',
'man2x3',
'man3x3',
]),
}),
'context': <ANY>,
'entity_id': 'sensor.living_ventilation_state',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'auto',
})
# ---
# name: test_sensor_entities_state[sensor.office_co2_carbon_dioxide-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.office_co2_carbon_dioxide',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'Carbon dioxide',
'options': dict({
}),
'original_device_class': <SensorDeviceClass.CO2: 'carbon_dioxide'>,
'original_icon': None,
'original_name': 'Carbon dioxide',
'platform': 'duco',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': 'aa:bb:cc:dd:ee:ff_2_co2',
'unit_of_measurement': 'ppm',
})
# ---
# name: test_sensor_entities_state[sensor.office_co2_carbon_dioxide-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'carbon_dioxide',
'friendly_name': 'Office CO2 Carbon dioxide',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': 'ppm',
}),
'context': <ANY>,
'entity_id': 'sensor.office_co2_carbon_dioxide',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '405',
})
# ---
# name: test_sensor_entities_state[sensor.office_co2_co2_air_quality_index-entry]
EntityRegistryEntrySnapshot({
'aliases': list([
None,
]),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.office_co2_co2_air_quality_index',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'CO2 air quality index',
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'CO2 air quality index',
'platform': 'duco',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'iaq_co2',
'unique_id': 'aa:bb:cc:dd:ee:ff_2_iaq_co2',
'unit_of_measurement': '%',
})
# ---
# name: test_sensor_entities_state[sensor.office_co2_co2_air_quality_index-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Office CO2 CO2 air quality index',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.office_co2_co2_air_quality_index',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '80',
})
# ---

View File

@@ -2,7 +2,7 @@
from __future__ import annotations
from unittest.mock import AsyncMock
from unittest.mock import AsyncMock, patch
from duco.exceptions import DucoConnectionError, DucoError
from freezegun.api import FrozenDateTimeFactory
@@ -17,7 +17,7 @@ from homeassistant.components.fan import (
SERVICE_SET_PERCENTAGE,
SERVICE_SET_PRESET_MODE,
)
from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE
from homeassistant.const import ATTR_ENTITY_ID, STATE_UNAVAILABLE, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import entity_registry as er
@@ -27,6 +27,20 @@ from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_plat
_FAN_ENTITY = "fan.living"
@pytest.fixture
async def init_integration(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_duco_client: AsyncMock,
) -> MockConfigEntry:
"""Set up only the fan platform for testing."""
mock_config_entry.add_to_hass(hass)
with patch("homeassistant.components.duco.PLATFORMS", [Platform.FAN]):
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
return mock_config_entry
@pytest.mark.usefixtures("init_integration")
async def test_fan_entity_state(
hass: HomeAssistant,

View File

@@ -0,0 +1,77 @@
"""Tests for the Duco sensor platform."""
from __future__ import annotations
from unittest.mock import AsyncMock, patch
from duco.exceptions import DucoConnectionError
from freezegun.api import FrozenDateTimeFactory
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.duco.const import SCAN_INTERVAL
from homeassistant.const import STATE_UNAVAILABLE, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
@pytest.fixture
async def init_integration(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_duco_client: AsyncMock,
) -> MockConfigEntry:
"""Set up only the sensor platform for testing."""
mock_config_entry.add_to_hass(hass)
with patch("homeassistant.components.duco.PLATFORMS", [Platform.SENSOR]):
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
return mock_config_entry
@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration")
async def test_sensor_entities_state(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
mock_config_entry: MockConfigEntry,
snapshot: SnapshotAssertion,
) -> None:
"""Test that sensor entities are created with the correct state."""
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
@pytest.mark.usefixtures("init_integration")
async def test_iaq_sensor_entities_disabled_by_default(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
) -> None:
"""Test that IAQ sensor entities are disabled by default."""
for entity_id in (
"sensor.bathroom_rh_humidity_air_quality_index",
"sensor.office_co2_co2_air_quality_index",
):
entry = entity_registry.async_get(entity_id)
assert entry is not None
assert entry.disabled_by == er.RegistryEntryDisabler.INTEGRATION
@pytest.mark.usefixtures("init_integration")
async def test_coordinator_update_marks_unavailable(
hass: HomeAssistant,
mock_duco_client: AsyncMock,
freezer: FrozenDateTimeFactory,
) -> None:
"""Test that sensor entities become unavailable when the coordinator fails."""
mock_duco_client.async_get_nodes = AsyncMock(
side_effect=DucoConnectionError("offline")
)
freezer.tick(SCAN_INTERVAL)
async_fire_time_changed(hass)
await hass.async_block_till_done(wait_background_tasks=True)
state = hass.states.get("sensor.office_co2_carbon_dioxide")
assert state is not None
assert state.state == STATE_UNAVAILABLE

Some files were not shown because too many files have changed in this diff Show More