Compare commits

..

101 Commits

Author SHA1 Message Date
Franck Nijhof
6a3051718a Add reconfiguration flow to Elgato (#168036) 2026-04-13 13:06:33 +02:00
Retha Runolfsson
95c3624b01 Bump PySwitchbot to 2.0.1 (#168090) 2026-04-13 12:43:14 +02:00
Tom Matheussen
f53b629dfd Bump satel-integra to 1.1.1 (#168091) 2026-04-13 12:41:56 +02:00
Giga77
d901541f48 Add hacf/reviewers as codeowners to Freebox (#168050) 2026-04-13 12:13:14 +02:00
Giga77
cdcf810506 Remove hacf-fr from Epic Games Store (#168038) 2026-04-13 12:02:47 +02:00
Giga77
274146cbb2 Remove hacf-fr from Synology DSM (#168039) 2026-04-13 11:55:10 +02:00
Giga77
b8cdd8dccc Remove hacf-fr (#168054) 2026-04-13 11:53:43 +02:00
Raphael Hehl
5abaa2ae72 Bump python-melcloud to 0.1.3 (#168086) 2026-04-13 11:34:05 +02:00
Simone Chemelli
4a511a3e53 Bump aioamazondevices to 13.4.0 (#167984) 2026-04-13 11:27:12 +02:00
Andrew Jackson
81a657ab2c Bump mastodon.py to 2.2.1 (#168084) 2026-04-13 11:11:30 +02:00
Giga77
e9a79ee0e5 Replace hacf-fr by hacf-fr reviewers team (#168056) 2026-04-13 11:06:40 +02:00
Fabian Neundorf
ffd439abc5 Add support for KM7576 in Miele integration (#168069) 2026-04-13 10:30:33 +02:00
Niracler
982a2b8af7 Bump PySrDaliGateway to 0.20.4 (#168078) 2026-04-13 10:28:14 +02:00
Raphael Hehl
ef589f9b46 Add unifi_discovery integration, migrate unifiprotect discovery (#168030)
Co-authored-by: RaHehl <rahehl@users.noreply.github.com>
2026-04-13 09:50:39 +02:00
Denis Shulyaka
81f8319af4 Fix llm tool results mutation (#167485)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-04-13 09:33:37 +02:00
Richard Kroegel
a061e47bec Improve eurotronic_cometblue tests (#168046) 2026-04-13 07:16:22 +02:00
Franck Nijhof
e5c49b6455 Set parallel updates to 0 for Sensor.Community (#168063) 2026-04-13 06:11:16 +02:00
Christian Lackas
5c51820869 Add Heatbox3 to ViCare unsupported devices list (#168067) 2026-04-13 05:49:12 +02:00
Franck Nijhof
eb64589115 Translate coordinator exceptions for Tailwind (#168027) 2026-04-12 18:45:37 +02:00
Franck Nijhof
4ebf0bf0b6 Fix untranslated button error in Tailwind (#168031) 2026-04-12 12:20:12 +02:00
Franck Nijhof
f521838bf1 Add reconfiguration flow to Tailwind (#168033) 2026-04-12 11:50:52 +02:00
Franck Nijhof
efb0162c6f Set parallel updates for Tailwind platforms (#168025) 2026-04-12 11:13:13 +02:00
Franck Nijhof
ba62b6cbda Handle connection errors in Peblar zeroconf confirm step (#167998) 2026-04-12 10:11:13 +02:00
Franck Nijhof
4e13731838 Extract entity template functions into an entity Jinja2 extension (#167992) 2026-04-12 10:00:53 +02:00
Franck Nijhof
4f255c23dd Translate coordinator exceptions for Twente Milieu (#168005) 2026-04-11 23:05:55 +02:00
Franck Nijhof
af69e9b5de Translate exceptions raised by Elgato (#168004) 2026-04-11 23:05:47 +02:00
Franck Nijhof
df734655f6 Remove unused service constants from Twente Milieu (#168000) 2026-04-11 22:27:04 +02:00
Franck Nijhof
4926ea9ef0 Set parallel updates to 0 for RDW platforms (#168003) 2026-04-11 22:26:36 +02:00
Franck Nijhof
322dc2adeb Add DHCP discovery for known Elgato devices (#168002) 2026-04-11 22:26:22 +02:00
Franck Nijhof
2e648aca8b Mark exception-translations rule as done for Peblar (#167997) 2026-04-11 21:56:24 +02:00
Franck Nijhof
dac2777729 Mark entity-translations rule as done for Twente Milieu (#168001) 2026-04-11 21:56:01 +02:00
Willem-Jan van Rootselaar
1e1e37637f Bump python-bsblan to version 5.1.4 (#167987) 2026-04-11 18:45:56 +02:00
Joakim Plate
d695250507 Fix gardena entity categories and percentage values (#167986) 2026-04-11 18:44:03 +02:00
Richard Kroegel
ab7b257785 Add eurotronic cometblue integration (#165626)
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
2026-04-11 16:35:04 +02:00
J. Nick Koston
3b1fa609f7 Bump aioesphomeapi to 44.13.3 (#167966) 2026-04-11 16:28:29 +02:00
Kevin Stillhammer
822fae227a Add base_coords for OptionsFlow and action call in waze_travel_time (#166642) 2026-04-11 16:28:02 +02:00
J. Nick Koston
2fa0bdb2dc Fix ESPHome cold/warm white color temperature read-back (#167972) 2026-04-11 16:24:50 +02:00
Andres Ruiz
8a43d1a12c Add remote start/stop button for supported Subaru vehicles (#167100)
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
2026-04-11 16:22:19 +02:00
Erwin Douna
483265a707 Portainer fix fetching swarm stacks (#167979) 2026-04-11 16:21:16 +02:00
Raphael Hehl
84f5cd8a12 Bump uiprotect to 10.2.6 (#167978)
Co-authored-by: RaHehl <rahehl@users.noreply.github.com>
2026-04-11 14:32:01 +01:00
J. Diego Rodríguez Royo
e23da7a5f0 Bump aiohomeconnect to 0.36.0 (#167973) 2026-04-11 13:55:29 +02:00
Florent Thoumie
fe1e12a298 Improve iaqualink reauthentication flow (#167931)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-04-11 13:06:49 +02:00
tronikos
938eacd777 Bump opower to 0.18.1 (#167967) 2026-04-11 13:04:25 +02:00
David Bonnes
ba7a959727 Remove unused constant from Evohome's const.py (#167969) 2026-04-11 12:23:33 +02:00
Fabian Munkes
966eadad69 Follow up to adding support for sound modes to Music Assistant (#167929) 2026-04-11 11:07:52 +02:00
Fabian Munkes
f34ed8f8ba Follow-up to player options: switch entities in Music Assistant (#167964) 2026-04-11 10:46:20 +02:00
Brett Adams
ac4b253a2f Add LoginRequired exception handling to Teslemetry coordinators (#167959) 2026-04-11 10:37:37 +02:00
Fabian Munkes
640fea89e0 Follow-up to player options: number entities in Music Assistant (#167963) 2026-04-11 10:33:27 +02:00
Fabian Munkes
fdf1b6536a Follow-up to player options: text entities in Music Assistant (#167962) 2026-04-11 10:27:43 +02:00
Raphael Hehl
974047664c Bump unifi-discovery to version 1.4.0 (#167958)
Co-authored-by: RaHehl <rahehl@users.noreply.github.com>
2026-04-10 22:26:15 -10:00
Franck Nijhof
03d6f5a756 Update cryptography to 46.0.7 (#167960) 2026-04-11 10:00:37 +02:00
epenet
9f1c396407 Unlink tomorrowio coordinator from config entry (#167901) 2026-04-11 09:49:54 +02:00
J. Nick Koston
054b8ad534 Bump aioesphomeapi to 44.13.2 (#167952) 2026-04-10 16:48:34 -10:00
J. Nick Koston
b93cdc64f3 Bump bleak-esphome to 3.7.3 (#167953) 2026-04-10 16:27:52 -10:00
Fabian Munkes
59248e5414 Bump music-assistant-client to 1.3.5 (#167947) 2026-04-11 01:07:18 +02:00
Michael
a5b830cc34 Don't create cpu temperature sensor when not supported in FRITZ!Box Tools (#167905) 2026-04-11 00:04:23 +02:00
James
299562d6ee Set integer display precision for Yardian duration sensors (#165896)
Co-authored-by: barneyonline <barneyonline@users.noreply.github.com>
2026-04-10 23:58:57 +02:00
Denis Shulyaka
47cc31067c Check if model exists in Anthropic config flow (#167844) 2026-04-11 00:06:17 +03:00
Martin Hjelmare
f050407bfa Fix tibber price sensor first state update (#167938)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-04-10 22:50:36 +02:00
Tomeamis
a202742fc6 Z-Wave.me: Make Light support the Transition feature (#167840) 2026-04-10 22:45:36 +02:00
Ronald van der Meer
63a0b5d2ff Bump python-duco-client to 0.3.0 (#167936) 2026-04-10 22:19:39 +02:00
Raman Gupta
53ed4b2c77 Refactor Vizio tests: shared fixtures, snapshot_platform, reduced parametrize (#167935)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-10 22:19:25 +02:00
Tomer
2f91c6b050 Promote victron_gx integration to silver quality scale (#167789)
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-04-10 20:24:54 +02:00
Raj Laud
d17cb0e096 Fix Victron BLE storage errors caused by non-serializable value_fn callable in sensor entity description (#167819)
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-04-10 20:22:55 +02:00
mettolen
c9ee533916 Update Liebherr to platinum (#167836) 2026-04-10 20:17:04 +02:00
Raman Gupta
e88022c2cc Add remote platform to Vizio integration (#165820)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-10 20:13:11 +02:00
Nick Haghiri
d633ac8120 Improve error logging for Backblaze B2 upload failures (#167721) 2026-04-10 20:12:24 +02:00
potelux
fb90237ae3 Proxy Jellyfin artwork through HA so thumbnails work over HTTPS (#167238)
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-04-10 20:08:09 +02:00
Artur Pragacz
4658f4246d Allow frontend-handled issues to omit description in strings (#167928) 2026-04-10 19:59:05 +02:00
tronikos
99e4c87f5e Add reauthentication and reconfiguration flows in Google Weather to reach platinum (#166106) 2026-04-10 19:55:33 +02:00
Abílio Costa
7690d9570c Narrow log check on ring event test (#167927) 2026-04-10 18:26:31 +01:00
On Freund
00560abd9c Bump pyrisco to 0.6.8 (#167924) 2026-04-10 18:47:51 +02:00
Bram Kragten
b6d4fca477 Update frontend to 20260325.7 (#167922) 2026-04-10 18:46:06 +02:00
Nathan Spencer
44e51c1103 Bump pylitterbot to 2025.2.1 (#167921) 2026-04-10 18:21:45 +02:00
Erik Montnemery
3ad2c5e574 Fix config validation in trigger and condition tests (#167683) 2026-04-10 18:20:04 +02:00
Marcello
212c9b1a94 Bump fluss-api to 0.2.4 (#167680)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2026-04-10 18:03:49 +02:00
epenet
b670172867 Bump tuya-device-handlers to 0.0.17 (#167904) 2026-04-10 18:01:28 +02:00
David Bonnes
23bcde09b0 Add Buttons to natively reset the mode of Evohome entities (#167550) 2026-04-10 18:00:30 +02:00
Tom Matheussen
62717fd3f5 Add support for encrypted connection to Satel Integra (#167372) 2026-04-10 17:57:26 +02:00
Simone Chemelli
86b72501ad Add faulty/anomaly binary sensors to Comelit (#167201) 2026-04-10 17:51:49 +02:00
Stef Coene
59827967e6 Velbus reconfigure fix (#167471)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-10 17:50:56 +02:00
Thomas D
fe5d45ed57 Fix light on action for qbus integration (#167917) 2026-04-10 17:24:00 +02:00
Florent Thoumie
cf87e9ab72 iaqualink: move custom update logic to DataUpdateCoordinator (#167816)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-04-10 17:15:11 +02:00
Alex Merkel
64907ad7e2 [LG Soundbar] Fix incorrect state for some models (#167094) 2026-04-10 17:10:18 +02:00
Noah Husby
9e111b2418 Bump aiorussound to 5.0.0 (#167914) 2026-04-10 17:07:07 +02:00
Joost Lekkerkerker
97d64ab37c Bump zinvolt to 0.4.3 (#167908) 2026-04-10 17:02:51 +02:00
Tomer
547830b450 Victron GX switch platform (#167859)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-04-10 17:00:39 +02:00
Andrew Brainwood
f2f605b425 Add Preset button support for Bond cover devices (#167881)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
Co-authored-by: Erwin Douna <e.douna@gmail.com>
2026-04-10 17:00:25 +02:00
Thomas D
781b5e1c0e Bump qbusmqttapi to 1.4.3 (#167909) 2026-04-10 16:57:05 +02:00
panosmz
68a7cbb620 Bump oasatelematics to 0.4 (#167911) 2026-04-10 16:48:11 +02:00
epenet
a6a716571d Use runtime_data in tesla_wall_connector (#167893)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-10 15:38:04 +02:00
epenet
ba09a54a37 Use runtime_data in tradfri integration (#167896)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-10 14:19:02 +02:00
puddly
7125796aac Temporarily stop the Z2M app when installing firmwares (#163958)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-04-10 14:16:47 +02:00
Robert Resch
ce9875806d Use runtime_data in launch_library integration (#167887)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2026-04-10 13:47:37 +02:00
Pierre Hauweele
7cf422361b Make the scaffold script ask for the integration type (#167725) 2026-04-10 12:49:40 +02:00
Robert Resch
9a97f1e8d2 Use runtime_data in soma integration (#167890)
Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
2026-04-10 12:49:39 +02:00
Robert Resch
777f78f74d Use runtime_data in litejet integration (#167888) 2026-04-10 12:35:31 +02:00
Joost Lekkerkerker
10c922b21f Support Chess.com accounts with no name (#167824) 2026-04-10 12:34:05 +02:00
epenet
aa293ba2f4 Add ability to load custom Tuya quirks (#166952) 2026-04-10 12:31:36 +02:00
Tomer
5edcfdf621 Mark docs-examples and docs-known-limitations as done for victron_gx (#167866)
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-04-10 12:13:06 +02:00
Andre v d Walt
244ed14019 smartthings: add Samsung OCF AC purify switch (#167705) 2026-04-10 12:12:36 +02:00
347 changed files with 12328 additions and 2532 deletions

View File

@@ -332,6 +332,7 @@ homeassistant.components.letpot.*
homeassistant.components.lg_infrared.*
homeassistant.components.libre_hardware_monitor.*
homeassistant.components.lidarr.*
homeassistant.components.liebherr.*
homeassistant.components.lifx.*
homeassistant.components.light.*
homeassistant.components.linkplay.*

24
CODEOWNERS generated
View File

@@ -489,8 +489,8 @@ CLAUDE.md @home-assistant/core
/homeassistant/components/environment_canada/ @gwww @michaeldavie
/tests/components/environment_canada/ @gwww @michaeldavie
/homeassistant/components/ephember/ @ttroy50 @roberty99
/homeassistant/components/epic_games_store/ @hacf-fr @Quentame
/tests/components/epic_games_store/ @hacf-fr @Quentame
/homeassistant/components/epic_games_store/ @Quentame
/tests/components/epic_games_store/ @Quentame
/homeassistant/components/epion/ @lhgravendeel
/tests/components/epion/ @lhgravendeel
/homeassistant/components/epson/ @pszafer
@@ -505,6 +505,8 @@ CLAUDE.md @home-assistant/core
/tests/components/essent/ @jaapp
/homeassistant/components/eufylife_ble/ @bdr99
/tests/components/eufylife_ble/ @bdr99
/homeassistant/components/eurotronic_cometblue/ @rikroe
/tests/components/eurotronic_cometblue/ @rikroe
/homeassistant/components/event/ @home-assistant/core
/tests/components/event/ @home-assistant/core
/homeassistant/components/evohome/ @zxdavb
@@ -564,8 +566,8 @@ CLAUDE.md @home-assistant/core
/homeassistant/components/fortios/ @kimfrellsen
/homeassistant/components/foscam/ @Foscam-wangzhengyu
/tests/components/foscam/ @Foscam-wangzhengyu
/homeassistant/components/freebox/ @hacf-fr @Quentame
/tests/components/freebox/ @hacf-fr @Quentame
/homeassistant/components/freebox/ @hacf-fr/reviewers @Quentame
/tests/components/freebox/ @hacf-fr/reviewers @Quentame
/homeassistant/components/freedompro/ @stefano055415
/tests/components/freedompro/ @stefano055415
/homeassistant/components/freshr/ @SierraNL
@@ -1055,8 +1057,8 @@ CLAUDE.md @home-assistant/core
/tests/components/met/ @danielhiversen
/homeassistant/components/met_eireann/ @DylanGore
/tests/components/met_eireann/ @DylanGore
/homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
/tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
/homeassistant/components/meteo_france/ @hacf-fr/reviewers @oncleben31 @Quentame
/tests/components/meteo_france/ @hacf-fr/reviewers @oncleben31 @Quentame
/homeassistant/components/meteo_lt/ @xE1H
/tests/components/meteo_lt/ @xE1H
/homeassistant/components/meteoalarm/ @rolfberkenbosch
@@ -1148,8 +1150,8 @@ CLAUDE.md @home-assistant/core
/homeassistant/components/netatmo/ @cgtobi
/tests/components/netatmo/ @cgtobi
/homeassistant/components/netdata/ @fabaff
/homeassistant/components/netgear/ @hacf-fr @Quentame @starkillerOG
/tests/components/netgear/ @hacf-fr @Quentame @starkillerOG
/homeassistant/components/netgear/ @Quentame @starkillerOG
/tests/components/netgear/ @Quentame @starkillerOG
/homeassistant/components/netgear_lte/ @tkdrob
/tests/components/netgear_lte/ @tkdrob
/homeassistant/components/network/ @home-assistant/core
@@ -1692,8 +1694,8 @@ CLAUDE.md @home-assistant/core
/tests/components/syncthing/ @zhulik
/homeassistant/components/syncthru/ @nielstron
/tests/components/syncthru/ @nielstron
/homeassistant/components/synology_dsm/ @hacf-fr @Quentame @mib1185
/tests/components/synology_dsm/ @hacf-fr @Quentame @mib1185
/homeassistant/components/synology_dsm/ @Quentame @mib1185
/tests/components/synology_dsm/ @Quentame @mib1185
/homeassistant/components/synology_srm/ @aerialls
/homeassistant/components/system_bridge/ @timmo001
/tests/components/system_bridge/ @timmo001
@@ -1826,6 +1828,8 @@ CLAUDE.md @home-assistant/core
/homeassistant/components/unifi_access/ @imhotep @RaHehl
/tests/components/unifi_access/ @imhotep @RaHehl
/homeassistant/components/unifi_direct/ @tofuSCHNITZEL
/homeassistant/components/unifi_discovery/ @RaHehl
/tests/components/unifi_discovery/ @RaHehl
/homeassistant/components/unifiled/ @florisvdk
/homeassistant/components/unifiprotect/ @RaHehl
/tests/components/unifiprotect/ @RaHehl

View File

@@ -6,6 +6,7 @@
"unifi",
"unifi_access",
"unifi_direct",
"unifi_discovery",
"unifiled",
"unifiprotect"
]

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "platinum",
"requirements": ["aioamazondevices==13.3.2"]
"requirements": ["aioamazondevices==13.4.0"]
}

View File

@@ -105,22 +105,6 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
await client.models.list(timeout=10.0)
async def get_model_list(client: anthropic.AsyncAnthropic) -> list[SelectOptionDict]:
"""Get list of available models."""
try:
models = (await client.models.list()).data
except anthropic.AnthropicError:
models = []
_LOGGER.debug("Available models: %s", models)
return [
SelectOptionDict(
label=model_info.display_name,
value=model_alias(model_info.id),
)
for model_info in models
]
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Anthropic."""
@@ -217,6 +201,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
"""Flow for managing conversation subentries."""
options: dict[str, Any]
model_info: anthropic.types.ModelInfo
@property
def _is_new(self) -> bool:
@@ -330,15 +315,14 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
) -> SubentryFlowResult:
"""Manage advanced options."""
errors: dict[str, str] = {}
description_placeholders: dict[str, str] = {}
step_schema: VolDictType = {
vol.Optional(
CONF_CHAT_MODEL,
default=DEFAULT[CONF_CHAT_MODEL],
): SelectSelector(
SelectSelectorConfig(
options=await self._get_model_list(), custom_value=True
)
SelectSelectorConfig(options=self._get_model_list(), custom_value=True)
),
vol.Optional(
CONF_MAX_TOKENS,
@@ -363,6 +347,25 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
if user_input is not None:
self.options.update(user_input)
coordinator = self._get_entry().runtime_data
self.model_info, status = coordinator.get_model_info(
self.options[CONF_CHAT_MODEL]
)
if not status:
# Couldn't find the model in the cached list, try to fetch it directly
client = coordinator.client
try:
self.model_info = await client.models.retrieve(
self.options[CONF_CHAT_MODEL], timeout=10.0
)
except anthropic.NotFoundError:
errors[CONF_CHAT_MODEL] = "model_not_found"
except anthropic.AnthropicError as err:
errors[CONF_CHAT_MODEL] = "api_error"
description_placeholders["message"] = (
err.message if isinstance(err, anthropic.APIError) else str(err)
)
if not errors:
return await self.async_step_model()
@@ -372,6 +375,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
vol.Schema(step_schema), self.options
),
errors=errors,
description_placeholders=description_placeholders,
)
async def async_step_model(
@@ -501,13 +505,16 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
last_step=True,
)
async def _get_model_list(self) -> list[SelectOptionDict]:
def _get_model_list(self) -> list[SelectOptionDict]:
"""Get list of available models."""
client = anthropic.AsyncAnthropic(
api_key=self._get_entry().data[CONF_API_KEY],
http_client=get_async_client(self.hass),
)
return await get_model_list(client)
coordinator = self._get_entry().runtime_data
return [
SelectOptionDict(
label=model_info.display_name,
value=model_alias(model_info.id),
)
for model_info in coordinator.data or []
]
async def _get_location_data(self) -> dict[str, str]:
"""Get approximate location data of the user."""

View File

@@ -95,21 +95,21 @@ class AnthropicCoordinator(DataUpdateCoordinator[list[anthropic.types.ModelInfo]
self._schedule_refresh()
@callback
def get_model_info(self, model_id: str) -> anthropic.types.ModelInfo:
def get_model_info(self, model_id: str) -> tuple[anthropic.types.ModelInfo, bool]:
"""Get model info for a given model ID."""
# First try: exact name match
for model in self.data or []:
if model.id == model_id:
return model
return model, True
# Second try: match by alias
alias = model_alias(model_id)
for model in self.data or []:
if model_alias(model.id) == alias:
return model
return model, True
# Model not found, return safe defaults
return anthropic.types.ModelInfo(
type="model",
id=model_id,
created_at=datetime.datetime(1970, 1, 1, tzinfo=datetime.UTC),
display_name=model_id,
)
display_name=alias,
), False

View File

@@ -690,7 +690,7 @@ class AnthropicBaseLLMEntity(CoordinatorEntity[AnthropicCoordinator]):
self.entry = entry
self.subentry = subentry
coordinator = entry.runtime_data
self.model_info = coordinator.get_model_info(
self.model_info, _ = coordinator.get_model_info(
subentry.data.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
)
self._attr_unique_id = subentry.subentry_id

View File

@@ -5,6 +5,7 @@ from __future__ import annotations
from collections.abc import Iterator
from typing import TYPE_CHECKING
import anthropic
import voluptuous as vol
from homeassistant import data_entry_flow
@@ -18,8 +19,8 @@ from homeassistant.helpers.selector import (
SelectSelectorConfig,
)
from .config_flow import get_model_list
from .const import CONF_CHAT_MODEL, DEPRECATED_MODELS, DOMAIN
from .coordinator import model_alias
if TYPE_CHECKING:
from . import AnthropicConfigEntry
@@ -61,7 +62,7 @@ class ModelDeprecatedRepairFlow(RepairsFlow):
client = entry.runtime_data.client
model_list = [
model_option
for model_option in await get_model_list(client)
for model_option in await self.get_model_list(client)
if not model_option["value"].startswith(tuple(DEPRECATED_MODELS))
]
self._model_list_cache[entry.entry_id] = model_list
@@ -107,6 +108,22 @@ class ModelDeprecatedRepairFlow(RepairsFlow):
},
)
async def get_model_list(
self, client: anthropic.AsyncAnthropic
) -> list[SelectOptionDict]:
"""Get list of available models."""
try:
models = (await client.models.list(timeout=10.0)).data
except anthropic.AnthropicError:
models = []
return [
SelectOptionDict(
label=model_info.display_name,
value=model_alias(model_info.id),
)
for model_info in models
]
def _iter_deprecated_subentries(self) -> Iterator[tuple[str, str]]:
"""Yield entry/subentry pairs that use deprecated models."""
for entry in self.hass.config_entries.async_entries(DOMAIN):

View File

@@ -38,6 +38,10 @@
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"entry_type": "AI task",
"error": {
"api_error": "[%key:component::anthropic::config_subentries::conversation::error::api_error%]",
"model_not_found": "[%key:component::anthropic::config_subentries::conversation::error::model_not_found%]"
},
"initiate_flow": {
"reconfigure": "Reconfigure AI task",
"user": "Add AI task"
@@ -98,6 +102,10 @@
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"entry_type": "Conversation agent",
"error": {
"api_error": "Unable to get model info: {message}",
"model_not_found": "Model not found"
},
"initiate_flow": {
"reconfigure": "Reconfigure conversation agent",
"user": "Add conversation agent"

View File

@@ -101,8 +101,7 @@ def handle_b2_errors[T](
try:
return await func(*args, **kwargs)
except B2Error as err:
error_msg = f"Failed during {func.__name__}"
raise BackupAgentError(error_msg) from err
raise BackupAgentError(f"Failed during {func.__name__}: {err}") from err
return wrapper
@@ -170,8 +169,7 @@ class BackblazeBackupAgent(BackupAgent):
async def _cleanup_failed_upload(self, filename: str) -> None:
"""Clean up a partially uploaded file after upload failure."""
_LOGGER.warning(
"Attempting to delete partially uploaded main backup file %s "
"due to metadata upload failure",
"Attempting to delete partially uploaded backup file %s",
filename,
)
try:
@@ -180,11 +178,10 @@ class BackblazeBackupAgent(BackupAgent):
)
await self._hass.async_add_executor_job(uploaded_main_file_info.delete)
except B2Error:
_LOGGER.debug(
"Failed to clean up partially uploaded main backup file %s. "
"Manual intervention may be required to delete it from Backblaze B2",
_LOGGER.warning(
"Failed to clean up partially uploaded backup file %s;"
" manual deletion from Backblaze B2 may be required",
filename,
exc_info=True,
)
else:
_LOGGER.debug(
@@ -256,9 +253,10 @@ class BackblazeBackupAgent(BackupAgent):
prefixed_metadata_filename,
)
upload_successful = False
tar_uploaded = False
try:
await self._upload_backup_file(prefixed_tar_filename, open_stream, {})
tar_uploaded = True
_LOGGER.debug(
"Main backup file upload finished for %s", prefixed_tar_filename
)
@@ -270,15 +268,14 @@ class BackblazeBackupAgent(BackupAgent):
_LOGGER.debug(
"Metadata file upload finished for %s", prefixed_metadata_filename
)
upload_successful = True
finally:
if upload_successful:
_LOGGER.debug("Backup upload complete: %s", prefixed_tar_filename)
self._invalidate_caches(
backup.backup_id, prefixed_tar_filename, prefixed_metadata_filename
)
else:
_LOGGER.debug("Backup upload complete: %s", prefixed_tar_filename)
self._invalidate_caches(
backup.backup_id, prefixed_tar_filename, prefixed_metadata_filename
)
except B2Error:
if tar_uploaded:
await self._cleanup_failed_upload(prefixed_tar_filename)
raise
def _upload_metadata_file_sync(
self, metadata_content: bytes, filename: str

View File

@@ -260,6 +260,14 @@ BUTTONS: tuple[BondButtonEntityDescription, ...] = (
),
)
PRESET_BUTTON = BondButtonEntityDescription(
key=Action.PRESET,
name="Preset",
translation_key="preset",
mutually_exclusive=None,
argument=None,
)
async def async_setup_entry(
hass: HomeAssistant,
@@ -285,6 +293,8 @@ async def async_setup_entry(
# we only add the stop action button if we add actions
# since its not so useful if there are no actions to stop
device_entities.append(BondButtonEntity(data, device, STOP_BUTTON))
if device.has_action(PRESET_BUTTON.key):
device_entities.append(BondButtonEntity(data, device, PRESET_BUTTON))
entities.extend(device_entities)
async_add_entities(entities)

View File

@@ -8,7 +8,7 @@
"iot_class": "local_polling",
"loggers": ["bsblan"],
"quality_scale": "silver",
"requirements": ["python-bsblan==5.1.3"],
"requirements": ["python-bsblan==5.1.4"],
"zeroconf": [
{
"name": "bsb-lan*",

View File

@@ -39,7 +39,9 @@ class ChessConfigFlow(ConfigFlow, domain=DOMAIN):
else:
await self.async_set_unique_id(str(user.player_id))
self._abort_if_unique_id_configured()
return self.async_create_entry(title=user.name, data=user_input)
return self.async_create_entry(
title=user.name or user.username, data=user_input
)
return self.async_show_form(
step_id="user",

View File

@@ -1,15 +1,18 @@
"""Support for sensors."""
"""Support for binary sensors."""
from __future__ import annotations
from typing import TYPE_CHECKING, cast
from collections.abc import Callable
from dataclasses import dataclass
from typing import TYPE_CHECKING, Final, cast
from aiocomelit.api import ComelitVedoZoneObject
from aiocomelit.const import ALARM_ZONE, AlarmZoneState
from aiocomelit.api import ComelitVedoAreaObject, ComelitVedoZoneObject
from aiocomelit.const import ALARM_AREA, ALARM_ZONE, AlarmAreaState, AlarmZoneState
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -23,12 +26,68 @@ from .utils import new_device_listener
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class ComelitBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Comelit binary sensor entity description."""
object_type: str
is_on_fn: Callable[[ComelitVedoAreaObject | ComelitVedoZoneObject], bool]
available_fn: Callable[[ComelitVedoAreaObject | ComelitVedoZoneObject], bool] = (
lambda obj: True
)
BINARY_SENSOR_TYPES: Final[tuple[ComelitBinarySensorEntityDescription, ...]] = (
ComelitBinarySensorEntityDescription(
key="anomaly",
translation_key="anomaly",
object_type=ALARM_AREA,
device_class=BinarySensorDeviceClass.PROBLEM,
is_on_fn=lambda obj: cast(ComelitVedoAreaObject, obj).anomaly,
available_fn=lambda obj: (
cast(ComelitVedoAreaObject, obj).human_status != AlarmAreaState.UNKNOWN
),
),
ComelitBinarySensorEntityDescription(
key="presence",
translation_key="motion",
object_type=ALARM_ZONE,
device_class=BinarySensorDeviceClass.MOTION,
is_on_fn=lambda obj: cast(ComelitVedoZoneObject, obj).status_api == "0001",
available_fn=lambda obj: (
cast(ComelitVedoZoneObject, obj).human_status
not in {
AlarmZoneState.FAULTY,
AlarmZoneState.UNAVAILABLE,
AlarmZoneState.UNKNOWN,
}
),
),
ComelitBinarySensorEntityDescription(
key="faulty",
translation_key="faulty",
object_type=ALARM_ZONE,
device_class=BinarySensorDeviceClass.PROBLEM,
is_on_fn=lambda obj: (
cast(ComelitVedoZoneObject, obj).human_status == AlarmZoneState.FAULTY
),
available_fn=lambda obj: (
cast(ComelitVedoZoneObject, obj).human_status
not in {
AlarmZoneState.UNAVAILABLE,
AlarmZoneState.UNKNOWN,
}
),
),
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ComelitConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Comelit VEDO presence sensors."""
"""Set up Comelit VEDO binary sensors."""
coordinator = config_entry.runtime_data
is_bridge = isinstance(coordinator, ComelitSerialBridge)
@@ -42,13 +101,23 @@ async def async_setup_entry(
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
"""Add entities for new monitors."""
entities = [
ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id)
ComelitVedoBinarySensorEntity(
coordinator,
device,
config_entry.entry_id,
description,
)
for description in BINARY_SENSOR_TYPES
for device in coordinator.data[dev_type].values()
if description.object_type == dev_type
if device in new_devices
]
if entities:
async_add_entities(entities)
config_entry.async_on_unload(
new_device_listener(coordinator, _add_new_entities, ALARM_AREA)
)
config_entry.async_on_unload(
new_device_listener(coordinator, _add_new_entities, ALARM_ZONE)
)
@@ -59,42 +128,47 @@ class ComelitVedoBinarySensorEntity(
):
"""Sensor device."""
entity_description: ComelitBinarySensorEntityDescription
_attr_has_entity_name = True
_attr_device_class = BinarySensorDeviceClass.MOTION
def __init__(
self,
coordinator: ComelitVedoSystem | ComelitSerialBridge,
zone: ComelitVedoZoneObject,
object_data: ComelitVedoAreaObject | ComelitVedoZoneObject,
config_entry_entry_id: str,
description: ComelitBinarySensorEntityDescription,
) -> None:
"""Init sensor entity."""
self._zone_index = zone.index
self.entity_description = description
self._object_index = object_data.index
self._object_type = description.object_type
super().__init__(coordinator)
# Use config_entry.entry_id as base for unique_id
# because no serial number or mac is available
self._attr_unique_id = f"{config_entry_entry_id}-presence-{zone.index}"
self._attr_device_info = coordinator.platform_device_info(zone, "zone")
self._attr_unique_id = (
f"{config_entry_entry_id}-{description.key}-{self._object_index}"
)
self._attr_device_info = coordinator.platform_device_info(
object_data, "area" if self._object_type == ALARM_AREA else "zone"
)
@property
def _zone(self) -> ComelitVedoZoneObject:
"""Return zone object."""
def _object(self) -> ComelitVedoAreaObject | ComelitVedoZoneObject:
"""Return alarm object."""
return cast(
ComelitVedoZoneObject, self.coordinator.data[ALARM_ZONE][self._zone_index]
ComelitVedoAreaObject | ComelitVedoZoneObject,
self.coordinator.data[self._object_type][self._object_index],
)
@property
def available(self) -> bool:
"""Return True if alarm is available."""
if self._zone.human_status in [
AlarmZoneState.FAULTY,
AlarmZoneState.UNAVAILABLE,
AlarmZoneState.UNKNOWN,
]:
"""Return True if object is available."""
if not self.entity_description.available_fn(self._object):
return False
return super().available
@property
def is_on(self) -> bool:
"""Presence detected."""
return self._zone.status_api == "0001"
"""Return object binary sensor state."""
return self.entity_description.is_on_fn(self._object)

View File

@@ -64,6 +64,17 @@
}
},
"entity": {
"binary_sensor": {
"anomaly": {
"name": "Anomaly"
},
"faulty": {
"name": "Faulty"
},
"motion": {
"name": "Motion"
}
},
"climate": {
"thermostat": {
"state_attributes": {

View File

@@ -8,5 +8,5 @@
"iot_class": "local_polling",
"loggers": ["duco"],
"quality_scale": "bronze",
"requirements": ["python-duco-client==0.2.0"]
"requirements": ["python-duco-client==0.3.0"]
}

View File

@@ -6,7 +6,7 @@ from collections.abc import Awaitable, Callable
from dataclasses import dataclass
from typing import Any
from elgato import Elgato, ElgatoError
from elgato import Elgato
from homeassistant.components.button import (
ButtonDeviceClass,
@@ -15,11 +15,11 @@ from homeassistant.components.button import (
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import ElgatoConfigEntry, ElgatoDataUpdateCoordinator
from .entity import ElgatoEntity
from .helpers import elgato_exception_handler
PARALLEL_UPDATES = 1
@@ -80,11 +80,7 @@ class ElgatoButtonEntity(ElgatoEntity, ButtonEntity):
f"{coordinator.data.info.serial_number}_{description.key}"
)
@elgato_exception_handler
async def async_press(self) -> None:
"""Trigger button press on the Elgato device."""
try:
await self.entity_description.press_fn(self.coordinator.client)
except ElgatoError as error:
raise HomeAssistantError(
"An error occurred while communicating with the Elgato Light"
) from error
await self.entity_description.press_fn(self.coordinator.client)

View File

@@ -12,6 +12,8 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_HOST, CONF_MAC
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.device_registry import format_mac
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
from .const import DOMAIN
@@ -23,7 +25,6 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN):
VERSION = 1
host: str
port: int
serial_number: str
mac: str | None = None
@@ -70,6 +71,69 @@ class ElgatoFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a flow initiated by zeroconf."""
return self._async_create_entry()
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfiguration of an existing Elgato device."""
errors: dict[str, str] = {}
if user_input is not None:
elgato = Elgato(
host=user_input[CONF_HOST],
session=async_get_clientsession(self.hass),
)
try:
info = await elgato.info()
except ElgatoError:
errors["base"] = "cannot_connect"
else:
await self.async_set_unique_id(info.serial_number)
self._abort_if_unique_id_mismatch(reason="different_device")
return self.async_update_reload_and_abort(
self._get_reconfigure_entry(),
data_updates={CONF_HOST: user_input[CONF_HOST]},
)
return self.async_show_form(
step_id="reconfigure",
data_schema=vol.Schema(
{
vol.Required(
CONF_HOST,
default=self._get_reconfigure_entry().data[CONF_HOST],
): str,
}
),
errors=errors,
)
async def async_step_dhcp(
self, discovery_info: DhcpServiceInfo
) -> ConfigFlowResult:
"""Handle DHCP discovery of a known Elgato device.
Only devices already configured (matched via ``registered_devices``)
reach this step. It is used to keep the stored host in sync with the
current IP address of the device.
"""
mac = format_mac(discovery_info.macaddress)
for entry in self._async_current_entries():
if (entry_mac := entry.data.get(CONF_MAC)) is None or format_mac(
entry_mac
) != mac:
continue
if entry.data[CONF_HOST] != discovery_info.ip:
self.hass.config_entries.async_update_entry(
entry,
data=entry.data | {CONF_HOST: discovery_info.ip},
)
self.hass.config_entries.async_schedule_reload(entry.entry_id)
return self.async_abort(reason="already_configured")
return self.async_abort(reason="no_devices_found")
@callback
def _async_show_setup_form(
self, errors: dict[str, str] | None = None

View File

@@ -2,7 +2,15 @@
from dataclasses import dataclass
from elgato import BatteryInfo, Elgato, ElgatoConnectionError, Info, Settings, State
from elgato import (
BatteryInfo,
Elgato,
ElgatoConnectionError,
ElgatoError,
Info,
Settings,
State,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST
@@ -59,4 +67,12 @@ class ElgatoDataUpdateCoordinator(DataUpdateCoordinator[ElgatoData]):
state=await self.client.state(),
)
except ElgatoConnectionError as err:
raise UpdateFailed(err) from err
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="communication_error",
) from err
except ElgatoError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="unknown_error",
) from err

View File

@@ -0,0 +1,43 @@
"""Helpers for Elgato."""
from __future__ import annotations
from collections.abc import Callable, Coroutine
from typing import Any, Concatenate
from elgato import ElgatoConnectionError, ElgatoError
from homeassistant.exceptions import HomeAssistantError
from .const import DOMAIN
from .entity import ElgatoEntity
def elgato_exception_handler[_ElgatoEntityT: ElgatoEntity, **_P](
func: Callable[Concatenate[_ElgatoEntityT, _P], Coroutine[Any, Any, Any]],
) -> Callable[Concatenate[_ElgatoEntityT, _P], Coroutine[Any, Any, None]]:
"""Decorate Elgato calls to handle Elgato exceptions.
A decorator that wraps the passed in function, catches Elgato errors,
and raises a translated ``HomeAssistantError``.
"""
async def handler(
self: _ElgatoEntityT, *args: _P.args, **kwargs: _P.kwargs
) -> None:
try:
await func(self, *args, **kwargs)
except ElgatoConnectionError as error:
self.coordinator.last_update_success = False
self.coordinator.async_update_listeners()
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="communication_error",
) from error
except ElgatoError as error:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="unknown_error",
) from error
return handler

View File

@@ -4,8 +4,6 @@ from __future__ import annotations
from typing import Any
from elgato import ElgatoError
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP_KELVIN,
@@ -14,12 +12,12 @@ from homeassistant.components.light import (
LightEntity,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util import color as color_util
from .coordinator import ElgatoConfigEntry, ElgatoDataUpdateCoordinator
from .entity import ElgatoEntity
from .helpers import elgato_exception_handler
PARALLEL_UPDATES = 1
@@ -94,17 +92,13 @@ class ElgatoLight(ElgatoEntity, LightEntity):
"""Return the state of the light."""
return self.coordinator.data.state.on
@elgato_exception_handler
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off the light."""
try:
await self.coordinator.client.light(on=False)
except ElgatoError as error:
raise HomeAssistantError(
"An error occurred while updating the Elgato Light"
) from error
finally:
await self.coordinator.async_refresh()
await self.coordinator.client.light(on=False)
await self.coordinator.async_request_refresh()
@elgato_exception_handler
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn on the light."""
temperature_kelvin = kwargs.get(ATTR_COLOR_TEMP_KELVIN)
@@ -137,26 +131,16 @@ class ElgatoLight(ElgatoEntity, LightEntity):
else color_util.color_temperature_kelvin_to_mired(temperature_kelvin)
)
try:
await self.coordinator.client.light(
on=True,
brightness=brightness,
hue=hue,
saturation=saturation,
temperature=temperature,
)
except ElgatoError as error:
raise HomeAssistantError(
"An error occurred while updating the Elgato Light"
) from error
finally:
await self.coordinator.async_refresh()
await self.coordinator.client.light(
on=True,
brightness=brightness,
hue=hue,
saturation=saturation,
temperature=temperature,
)
await self.coordinator.async_request_refresh()
@elgato_exception_handler
async def async_identify(self) -> None:
"""Identify the light, will make it blink."""
try:
await self.coordinator.client.identify()
except ElgatoError as error:
raise HomeAssistantError(
"An error occurred while identifying the Elgato Light"
) from error
await self.coordinator.client.identify()

View File

@@ -3,6 +3,11 @@
"name": "Elgato Light",
"codeowners": ["@frenck"],
"config_flow": true,
"dhcp": [
{
"registered_devices": true
}
],
"documentation": "https://www.home-assistant.io/integrations/elgato",
"integration_type": "device",
"iot_class": "local_polling",

View File

@@ -39,11 +39,7 @@ rules:
# Gold
devices: done
diagnostics: done
discovery-update-info:
status: todo
comment: |
The integration doesn't update the device info based on DHCP discovery
of known existing devices.
discovery-update-info: done
discovery: done
docs-data-update: todo
docs-examples: todo
@@ -64,9 +60,9 @@ rules:
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
exception-translations: done
icon-translations: done
reconfiguration-flow: todo
reconfiguration-flow: done
repair-issues:
status: exempt
comment: |

View File

@@ -2,13 +2,24 @@
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"different_device": "The configured Elgato device is not the same as the one at this address.",
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
},
"flow_title": "{serial_number}",
"step": {
"reconfigure": {
"data": {
"host": "[%key:common::config_flow::data::host%]"
},
"data_description": {
"host": "[%key:component::elgato::config::step::user::data_description::host%]"
}
},
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]"
@@ -48,6 +59,14 @@
}
}
},
"exceptions": {
"communication_error": {
"message": "An error occurred while communicating with the Elgato device."
},
"unknown_error": {
"message": "An unknown error occurred while communicating with the Elgato device."
}
},
"services": {
"identify": {
"description": "Identifies an Elgato Light. Blinks the light, which can be useful for, e.g., a visual notification.",

View File

@@ -6,16 +6,16 @@ from collections.abc import Awaitable, Callable
from dataclasses import dataclass
from typing import Any
from elgato import Elgato, ElgatoError
from elgato import Elgato
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import ElgatoConfigEntry, ElgatoData, ElgatoDataUpdateCoordinator
from .entity import ElgatoEntity
from .helpers import elgato_exception_handler
PARALLEL_UPDATES = 1
@@ -92,24 +92,14 @@ class ElgatoSwitchEntity(ElgatoEntity, SwitchEntity):
"""Return state of the switch."""
return self.entity_description.is_on_fn(self.coordinator.data)
@elgato_exception_handler
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the entity on."""
try:
await self.entity_description.set_fn(self.coordinator.client, True)
except ElgatoError as error:
raise HomeAssistantError(
"An error occurred while updating the Elgato Light"
) from error
finally:
await self.coordinator.async_refresh()
await self.entity_description.set_fn(self.coordinator.client, True)
await self.coordinator.async_request_refresh()
@elgato_exception_handler
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the entity off."""
try:
await self.entity_description.set_fn(self.coordinator.client, False)
except ElgatoError as error:
raise HomeAssistantError(
"An error occurred while updating the Elgato Light"
) from error
finally:
await self.coordinator.async_refresh()
await self.entity_description.set_fn(self.coordinator.client, False)
await self.coordinator.async_request_refresh()

View File

@@ -1,7 +1,7 @@
{
"domain": "epic_games_store",
"name": "Epic Games Store",
"codeowners": ["@hacf-fr", "@Quentame"],
"codeowners": ["@Quentame"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/epic_games_store",
"integration_type": "service",

View File

@@ -259,15 +259,18 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
if (color_temp_k := kwargs.get(ATTR_COLOR_TEMP_KELVIN)) is not None:
# Do not use kelvin_to_mired here to prevent precision loss
color_temp_mired = 1_000_000.0 / color_temp_k
data["color_temperature"] = color_temp_mired
if color_temp_modes := _filter_color_modes(
color_modes, LightColorCapability.COLOR_TEMPERATURE
):
data["color_temperature"] = color_temp_mired
color_modes = color_temp_modes
else:
# Convert color temperature to explicit cold/warm white
# values to avoid ESPHome applying brightness to both
# master brightness and white channels (b² effect).
# Also send explicit cold/warm white values to avoid
# ESPHome applying brightness to both master brightness
# and white channels (b² effect). The firmware skips
# deriving cwww from color_temperature when the channels
# are already set explicitly, but still stores
# color_temperature so HA can read it back.
data["cold_white"], data["warm_white"] = self._color_temp_to_cold_warm(
color_temp_mired
)

View File

@@ -17,9 +17,9 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==44.13.1",
"aioesphomeapi==44.13.3",
"esphome-dashboard-api==1.3.0",
"bleak-esphome==3.7.1"
"bleak-esphome==3.7.3"
],
"zeroconf": ["_esphomelib._tcp.local."]
}

View File

@@ -0,0 +1,80 @@
"""Comet Blue Bluetooth integration."""
from __future__ import annotations
from bleak.exc import BleakError
from eurotronic_cometblue_ha import AsyncCometBlue
from homeassistant.components.bluetooth import async_ble_device_from_address
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ADDRESS, CONF_PIN, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from .const import DOMAIN
from .coordinator import CometBlueConfigEntry, CometBlueDataUpdateCoordinator
PLATFORMS: list[Platform] = [
Platform.CLIMATE,
]
async def async_setup_entry(hass: HomeAssistant, entry: CometBlueConfigEntry) -> bool:
"""Set up Eurotronic Comet Blue from a config entry."""
address = entry.data[CONF_ADDRESS]
ble_device = async_ble_device_from_address(hass, entry.data[CONF_ADDRESS])
if not ble_device:
raise ConfigEntryNotReady(
f"Couldn't find a nearby device for address: {entry.data[CONF_ADDRESS]}"
)
cometblue_device = AsyncCometBlue(
device=ble_device,
pin=int(entry.data[CONF_PIN]),
)
try:
async with cometblue_device:
ble_device_info = await cometblue_device.get_device_info_async()
try:
# Device only returns battery level if PIN is correct
await cometblue_device.get_battery_async()
except TimeoutError as ex:
# This likely means PIN was incorrect on Linux and ESPHome backends
raise ConfigEntryError(
"Failed to read battery level, likely due to incorrect PIN"
) from ex
except BleakError as ex:
raise ConfigEntryNotReady(
f"Failed to get device info from '{cometblue_device.device.address}'"
) from ex
device_registry = dr.async_get(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, address)},
name=f"{ble_device_info['model']} {cometblue_device.device.address}",
manufacturer=ble_device_info["manufacturer"],
model=ble_device_info["model"],
sw_version=ble_device_info["version"],
)
coordinator = CometBlueDataUpdateCoordinator(
hass,
entry,
cometblue_device,
)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -0,0 +1,185 @@
"""Comet Blue climate integration."""
from __future__ import annotations
from typing import Any
from homeassistant.components.climate import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
PRESET_AWAY,
PRESET_BOOST,
PRESET_COMFORT,
PRESET_ECO,
PRESET_NONE,
ClimateEntity,
ClimateEntityFeature,
HVACMode,
)
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_HALVES, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import CometBlueConfigEntry, CometBlueDataUpdateCoordinator
from .entity import CometBlueBluetoothEntity
PARALLEL_UPDATES = 1
MIN_TEMP = 7.5
MAX_TEMP = 28.5
async def async_setup_entry(
hass: HomeAssistant,
entry: CometBlueConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the client entities."""
coordinator = entry.runtime_data
async_add_entities([CometBlueClimateEntity(coordinator)])
class CometBlueClimateEntity(CometBlueBluetoothEntity, ClimateEntity):
"""A Comet Blue Climate climate entity."""
_attr_min_temp = MIN_TEMP
_attr_max_temp = MAX_TEMP
_attr_name = None
_attr_hvac_modes = [HVACMode.AUTO, HVACMode.HEAT, HVACMode.OFF]
_attr_preset_modes = [
PRESET_COMFORT,
PRESET_ECO,
PRESET_BOOST,
PRESET_AWAY,
PRESET_NONE,
]
_attr_supported_features: ClimateEntityFeature = (
ClimateEntityFeature.TARGET_TEMPERATURE
| ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
| ClimateEntityFeature.PRESET_MODE
| ClimateEntityFeature.TURN_ON
| ClimateEntityFeature.TURN_OFF
)
_attr_target_temperature_step = PRECISION_HALVES
_attr_temperature_unit = UnitOfTemperature.CELSIUS
def __init__(self, coordinator: CometBlueDataUpdateCoordinator) -> None:
"""Initialize CometBlueClimateEntity."""
super().__init__(coordinator)
self._attr_unique_id = coordinator.address
@property
def current_temperature(self) -> float | None:
"""Return the current temperature."""
return self.coordinator.data.temperatures["currentTemp"]
@property
def target_temperature(self) -> float | None:
"""Return the temperature currently set to be reached."""
return self.coordinator.data.temperatures["manualTemp"]
@property
def target_temperature_high(self) -> float | None:
"""Return the upper bound target temperature."""
return self.coordinator.data.temperatures["targetTempHigh"]
@property
def target_temperature_low(self) -> float | None:
"""Return the lower bound target temperature."""
return self.coordinator.data.temperatures["targetTempLow"]
@property
def hvac_mode(self) -> HVACMode | None:
"""Return hvac operation mode."""
if self.target_temperature == MIN_TEMP:
return HVACMode.OFF
if self.target_temperature == MAX_TEMP:
return HVACMode.HEAT
return HVACMode.AUTO
@property
def preset_mode(self) -> str | None:
"""Return the current preset mode, e.g., home, away, temp."""
# presets have an order in which they are displayed on TRV:
# away, boost, comfort, eco, none (manual)
if (
self.coordinator.data.holiday.get("start") is None
and self.coordinator.data.holiday.get("end") is not None
and self.target_temperature
== self.coordinator.data.holiday.get("temperature")
):
return PRESET_AWAY
if self.target_temperature == MAX_TEMP:
return PRESET_BOOST
if self.target_temperature == self.target_temperature_high:
return PRESET_COMFORT
if self.target_temperature == self.target_temperature_low:
return PRESET_ECO
return PRESET_NONE
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperatures."""
if self.preset_mode == PRESET_AWAY:
raise ServiceValidationError(
"Cannot adjust TRV remotely, manually disable 'holiday' mode on TRV first"
)
await self.coordinator.send_command(
self.coordinator.device.set_temperature_async,
{
"values": {
# manual temperature always needs to be set, otherwise TRV will turn OFF
"manualTemp": kwargs.get(ATTR_TEMPERATURE)
or self.target_temperature,
# other temperatures can be left unchanged by setting them to None
"targetTempLow": kwargs.get(ATTR_TARGET_TEMP_LOW),
"targetTempHigh": kwargs.get(ATTR_TARGET_TEMP_HIGH),
}
},
)
await self.coordinator.async_request_refresh()
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new target preset mode."""
if self.preset_modes and preset_mode not in self.preset_modes:
raise ServiceValidationError(f"Unsupported preset_mode '{preset_mode}'")
if preset_mode in [PRESET_NONE, PRESET_AWAY]:
raise ServiceValidationError(
f"Unable to set preset '{preset_mode}', display only."
)
if preset_mode == PRESET_ECO:
return await self.async_set_temperature(
temperature=self.target_temperature_low
)
if preset_mode == PRESET_COMFORT:
return await self.async_set_temperature(
temperature=self.target_temperature_high
)
if preset_mode == PRESET_BOOST:
return await self.async_set_temperature(temperature=MAX_TEMP)
return None
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set new target hvac mode."""
if hvac_mode == HVACMode.OFF:
return await self.async_set_temperature(temperature=MIN_TEMP)
if hvac_mode == HVACMode.HEAT:
return await self.async_set_temperature(temperature=MAX_TEMP)
if hvac_mode == HVACMode.AUTO:
return await self.async_set_temperature(
temperature=self.target_temperature_low
)
raise ServiceValidationError(f"Unknown HVAC mode '{hvac_mode}'")
async def async_turn_on(self) -> None:
"""Turn the entity on."""
await self.async_set_hvac_mode(HVACMode.AUTO)
async def async_turn_off(self) -> None:
"""Turn the entity off."""
await self.async_set_hvac_mode(HVACMode.OFF)

View File

@@ -0,0 +1,186 @@
"""Config flow for CometBlue."""
from __future__ import annotations
import logging
from typing import Any
from bleak.exc import BleakError
from eurotronic_cometblue_ha import AsyncCometBlue
from eurotronic_cometblue_ha.const import SERVICE
from habluetooth import BluetoothServiceInfoBleak
import voluptuous as vol
from homeassistant.components.bluetooth import (
async_ble_device_from_address,
async_discovered_service_info,
)
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_ADDRESS, CONF_PIN
from homeassistant.helpers.device_registry import format_mac
from homeassistant.helpers.selector import (
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from .const import DOMAIN
LOGGER = logging.getLogger(__name__)
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_PIN, default="000000"): vol.All(
TextSelector(TextSelectorConfig(type=TextSelectorType.NUMBER)),
vol.Length(min=6, max=6),
),
}
)
def name_from_discovery(discovery: BluetoothServiceInfoBleak | None) -> str:
"""Get the name from a discovery."""
if discovery is None:
return "Comet Blue"
if discovery.name == str(discovery.address):
return discovery.address
return f"{discovery.name} {discovery.address}"
class CometBlueConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for CometBlue."""
VERSION = 1
def __init__(self) -> None:
"""Initialize the config flow."""
self._discovery_info: BluetoothServiceInfoBleak | None = None
self._discovered_devices: dict[str, BluetoothServiceInfoBleak] = {}
async def _try_connect(self, user_input: dict[str, Any]) -> dict[str, str]:
"""Verify connection to the device with the provided PIN and read initial data."""
device_address = self._discovery_info.address if self._discovery_info else ""
try:
ble_device = async_ble_device_from_address(self.hass, device_address)
LOGGER.info("Testing connection for device at address %s", device_address)
if not ble_device:
return {"base": "cannot_connect"}
cometblue_device = AsyncCometBlue(
device=ble_device,
pin=int(user_input[CONF_PIN]),
)
async with cometblue_device:
try:
# Device only returns battery level if PIN is correct
await cometblue_device.get_battery_async()
except TimeoutError:
# This likely means PIN was incorrect on Linux and ESPHome backends
LOGGER.debug(
"Failed to read battery level, likely due to incorrect PIN",
exc_info=True,
)
return {"base": "invalid_pin"}
except TimeoutError:
LOGGER.debug("Connection to device timed out", exc_info=True)
return {"base": "timeout_connect"}
except BleakError:
LOGGER.debug("Failed to connect to device", exc_info=True)
return {"base": "cannot_connect"}
except Exception: # noqa: BLE001
LOGGER.debug("Unknown error", exc_info=True)
return {"base": "unknown"}
return {}
def _create_entry(
self,
pin: str,
) -> ConfigFlowResult:
"""Create an entry for a discovered device."""
entry_data = {
CONF_ADDRESS: self._discovery_info.address
if self._discovery_info
else None,
CONF_PIN: pin,
}
return self.async_create_entry(
title=name_from_discovery(self._discovery_info), data=entry_data
)
async def async_step_bluetooth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle user-confirmation of discovered device."""
errors: dict[str, str] = {}
if user_input is not None:
errors = await self._try_connect(user_input)
if not errors:
return self._create_entry(user_input[CONF_PIN])
return self.async_show_form(
step_id="bluetooth_confirm",
data_schema=DATA_SCHEMA,
errors=errors,
)
async def async_step_bluetooth(
self, discovery_info: BluetoothServiceInfoBleak
) -> ConfigFlowResult:
"""Handle a flow initialized by Bluetooth discovery."""
address = discovery_info.address
await self.async_set_unique_id(format_mac(address))
self._abort_if_unique_id_configured(updates={CONF_ADDRESS: address})
self._discovery_info = discovery_info
self.context["title_placeholders"] = {
"name": name_from_discovery(self._discovery_info)
}
return await self.async_step_bluetooth_confirm()
async def async_step_pick_device(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the step to pick discovered device."""
current_addresses = self._async_current_ids()
self._discovered_devices = {
discovery_info.address: discovery_info
for discovery_info in async_discovered_service_info(
self.hass, connectable=True
)
if SERVICE in discovery_info.service_uuids
and discovery_info.address not in current_addresses
}
if user_input is not None:
address = user_input[CONF_ADDRESS]
await self.async_set_unique_id(format_mac(address))
self._abort_if_unique_id_configured()
self._discovery_info = self._discovered_devices.get(address)
return await self.async_step_bluetooth_confirm()
# Check if there is at least one device
if not self._discovered_devices:
return self.async_abort(reason="no_devices_found")
return self.async_show_form(
step_id="pick_device",
data_schema=vol.Schema(
{vol.Required(CONF_ADDRESS): vol.In(list(self._discovered_devices))}
),
)
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle a flow initialized by the user."""
return await self.async_step_pick_device()

View File

@@ -0,0 +1,7 @@
"""Constants for Cometblue BLE thermostats."""
from typing import Final
DOMAIN: Final = "eurotronic_cometblue"
MAX_RETRIES: Final = 3

View File

@@ -0,0 +1,132 @@
"""Provides the DataUpdateCoordinator for Comet Blue."""
from __future__ import annotations
import asyncio
from collections.abc import Awaitable, Callable
from dataclasses import dataclass, field
from datetime import timedelta
import logging
from typing import Any
from bleak.exc import BleakError
from eurotronic_cometblue_ha import AsyncCometBlue, InvalidByteValueError
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import MAX_RETRIES
SCAN_INTERVAL = timedelta(minutes=5)
LOGGER = logging.getLogger(__name__)
COMMAND_RETRY_INTERVAL = 2.5
type CometBlueConfigEntry = ConfigEntry[CometBlueDataUpdateCoordinator]
@dataclass
class CometBlueCoordinatorData:
"""Data stored by the coordinator."""
temperatures: dict[str, float | int] = field(default_factory=dict)
holiday: dict = field(default_factory=dict)
class CometBlueDataUpdateCoordinator(DataUpdateCoordinator[CometBlueCoordinatorData]):
"""Class to manage fetching data."""
def __init__(
self,
hass: HomeAssistant,
entry: CometBlueConfigEntry,
cometblue: AsyncCometBlue,
) -> None:
"""Initialize global data updater."""
super().__init__(
hass=hass,
config_entry=entry,
logger=LOGGER,
name=f"Comet Blue {cometblue.client.address}",
update_interval=SCAN_INTERVAL,
)
self.device = cometblue
self.address = cometblue.client.address
async def send_command(
self,
function: Callable[..., Awaitable[dict[str, Any] | None]],
payload: dict[str, Any],
) -> dict[str, Any] | None:
"""Send command to device."""
LOGGER.debug("Updating device %s with '%s'", self.name, payload)
retry_count = 0
while retry_count < MAX_RETRIES:
try:
async with self.device:
return await function(**payload)
except (InvalidByteValueError, TimeoutError, BleakError) as ex:
retry_count += 1
if retry_count >= MAX_RETRIES:
raise HomeAssistantError(
f"Error sending command to '{self.name}': {ex}"
) from ex
LOGGER.info(
"Retry sending command to %s after %s (%s)",
self.name,
type(ex).__name__,
ex,
)
await asyncio.sleep(COMMAND_RETRY_INTERVAL)
except ValueError as ex:
raise ServiceValidationError(
f"Invalid payload '{payload}' for '{self.name}': {ex}"
) from ex
return None
async def _async_update_data(self) -> CometBlueCoordinatorData:
"""Poll the device."""
data: CometBlueCoordinatorData = CometBlueCoordinatorData()
retry_count = 0
while retry_count < MAX_RETRIES and not data.temperatures:
try:
async with self.device:
# temperatures are required and must trigger a retry if not available
if not data.temperatures:
data.temperatures = await self.device.get_temperature_async()
# holiday is optional and should not trigger a retry
try:
if not data.holiday:
data.holiday = await self.device.get_holiday_async(1) or {}
except InvalidByteValueError as ex:
LOGGER.warning(
"Failed to retrieve optional data for %s: %s (%s)",
self.name,
type(ex).__name__,
ex,
)
except (InvalidByteValueError, TimeoutError, BleakError) as ex:
retry_count += 1
if retry_count >= MAX_RETRIES:
raise UpdateFailed(
f"Error retrieving data: {ex}", retry_after=30
) from ex
LOGGER.info(
"Retry updating %s after error: %s (%s)",
self.name,
type(ex).__name__,
ex,
)
await asyncio.sleep(COMMAND_RETRY_INTERVAL)
except Exception as ex:
raise UpdateFailed(
f"({type(ex).__name__}) {ex}", retry_after=30
) from ex
# If one value was not retrieved correctly, keep the old value
LOGGER.debug("Received data for %s: %s", self.name, data)
return data

View File

@@ -0,0 +1,33 @@
"""Coordinator entity base class for CometBlue."""
from homeassistant.components import bluetooth
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import DOMAIN
from .coordinator import CometBlueDataUpdateCoordinator
class CometBlueBluetoothEntity(CoordinatorEntity[CometBlueDataUpdateCoordinator]):
"""Coordinator entity for CometBlue."""
_attr_has_entity_name = True
def __init__(self, coordinator: CometBlueDataUpdateCoordinator) -> None:
"""Initialize coordinator entity."""
super().__init__(coordinator)
# Full DeviceInfo is added to DeviceRegistry in __init__.py, so we only
# set identifiers here to link the entity to the device
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self.coordinator.address)},
)
@property
def available(self) -> bool:
"""Return if entity is available."""
# As long the device is currently connectable via Bluetooth it is available, even if the last update failed.
# This is because Bluetooth connectivity can be intermittent and a failed update doesn't necessarily mean the device is unavailable.
# The BluetoothManager will check every 300s (same interval as DataUpdateCoordinator) if the device is still present and connectable.
return bluetooth.async_address_present(
self.hass, address=self.coordinator.address, connectable=True
)

View File

@@ -0,0 +1,19 @@
{
"domain": "eurotronic_cometblue",
"name": "Eurotronic Comet Blue",
"bluetooth": [
{
"connectable": true,
"service_uuid": "47e9ee00-47e9-11e4-8939-164230d1df67"
}
],
"codeowners": ["@rikroe"],
"config_flow": true,
"dependencies": ["bluetooth"],
"documentation": "https://www.home-assistant.io/integrations/eurotronic_cometblue",
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["eurotronic_cometblue_ha"],
"quality_scale": "bronze",
"requirements": ["eurotronic-cometblue-ha==1.4.0"]
}

View File

@@ -0,0 +1,88 @@
rules:
# Bronze
action-setup:
status: exempt
comment: This integration does not provide actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: This integration does not provide actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: This integration does not subscribe to any events.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: This integration does not provide actions.
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow:
status: exempt
comment: This integration does not login to any device or service.
test-coverage: todo
# Gold
devices: done
diagnostics: todo
discovery-update-info:
status: exempt
comment: This integration relies on MAC-based BLE connections.
discovery: done
docs-data-update: done
docs-examples: todo
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: todo
dynamic-devices: done
entity-category:
status: exempt
comment: This integration only provides one primary entity.
entity-device-class:
status: exempt
comment: This integration does not provide sensors.
entity-disabled-by-default:
status: exempt
comment: This integration only provides one primary entity.
entity-translations:
status: exempt
comment: This integration only provides one primary entity.
exception-translations: todo
icon-translations:
status: exempt
comment: Not required.
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: Not required.
stale-devices:
status: exempt
comment: Only single device per config entry.
# Platinum
async-dependency: done
inject-websession:
status: exempt
comment: This integration does not make any HTTP requests.
strict-typing: todo

View File

@@ -0,0 +1,33 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"no_devices_found": "No Comet Blue Bluetooth TRVs discovered.",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_pin": "Invalid device PIN",
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"bluetooth_confirm": {
"data": {
"pin": "[%key:common::config_flow::data::pin%]"
},
"data_description": {
"pin": "6-digit device PIN"
}
},
"pick_device": {
"data": {
"address": "Discovered devices"
},
"data_description": {
"address": "Select device to continue."
}
}
}
}
}

View File

@@ -104,6 +104,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
hass.async_create_task(
async_load_platform(hass, Platform.CLIMATE, DOMAIN, {}, config)
)
hass.async_create_task(
async_load_platform(hass, Platform.BUTTON, DOMAIN, {}, config)
)
if coordinator.tcs.hotwater:
hass.async_create_task(
async_load_platform(hass, Platform.WATER_HEATER, DOMAIN, {}, config)

View File

@@ -0,0 +1,116 @@
"""Support for Button entities of the Evohome integration."""
from __future__ import annotations
import evohomeasync2 as evo
from homeassistant.components.button import ButtonEntity
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .const import EVOHOME_DATA
from .coordinator import EvoDataUpdateCoordinator
from .entity import EvoEntity, is_valid_zone
async def async_setup_platform(
hass: HomeAssistant,
_: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the button platform for Evohome."""
if discovery_info is None:
return
coordinator = hass.data[EVOHOME_DATA].coordinator
tcs = hass.data[EVOHOME_DATA].tcs
entities: list[EvoResetButtonBase] = [EvoResetSystemButton(coordinator, tcs)]
entities.extend(
[EvoResetZoneButton(coordinator, z) for z in tcs.zones if is_valid_zone(z)]
)
if tcs.hotwater:
entities.append(EvoResetDhwButton(coordinator, tcs.hotwater))
async_add_entities(entities)
for entity in entities:
await entity.update_attrs()
class EvoResetButtonBase(EvoEntity, ButtonEntity):
"""Button entity for system reset."""
_attr_entity_category = EntityCategory.CONFIG
_evo_state_attr_names = ()
def __init__(
self,
coordinator: EvoDataUpdateCoordinator,
evo_device: evo.ControlSystem | evo.HotWater | evo.Zone,
) -> None:
"""Initialize the system reset button."""
super().__init__(coordinator, evo_device)
# zones can be renamed, so set name in their property method
if isinstance(evo_device, evo.ControlSystem):
self._attr_name = f"Reset {evo_device.location.name}"
elif not isinstance(evo_device, evo.Zone):
self._attr_name = f"Reset {evo_device.name}"
self._attr_unique_id = f"{evo_device.id}_reset"
async def async_press(self) -> None:
"""Reset the Evohome entity to its base operating mode."""
await self.coordinator.call_client_api(self._evo_device.reset())
class EvoResetSystemButton(EvoResetButtonBase):
"""Button entity for system reset."""
_attr_translation_key = "reset_system_mode"
_evo_device: evo.ControlSystem
_evo_id_attr = "system_id"
class EvoResetDhwButton(EvoResetButtonBase):
"""Button entity for DHW override reset."""
_attr_translation_key = "clear_dhw_override"
_evo_device: evo.HotWater
_evo_id_attr = "dhw_id"
class EvoResetZoneButton(EvoResetButtonBase):
"""Button entity for zone override reset."""
_attr_translation_key = "clear_zone_override"
_evo_device: evo.Zone
_evo_id_attr = "zone_id"
def __init__(
self,
coordinator: EvoDataUpdateCoordinator,
evo_device: evo.Zone,
) -> None:
"""Initialize the zone reset button."""
super().__init__(coordinator, evo_device)
if evo_device.id == evo_device.tcs.id:
# this system does not have a distinct ID for the zone
self._attr_unique_id = f"{evo_device.id}z_reset"
@property
def name(self) -> str:
"""Return the name of the evohome entity."""
return f"Reset {self._evo_device.name}"

View File

@@ -16,8 +16,6 @@ from evohomeasync2.const import (
from evohomeasync2.schemas.const import (
SystemMode as EvoSystemMode,
ZoneMode as EvoZoneMode,
ZoneModelType as EvoZoneModelType,
ZoneType as EvoZoneType,
)
from homeassistant.components.climate import (
@@ -43,7 +41,7 @@ from homeassistant.util import dt as dt_util
from .const import ATTR_DURATION, ATTR_PERIOD, DOMAIN, EVOHOME_DATA, EvoService
from .coordinator import EvoDataUpdateCoordinator
from .entity import EvoChild, EvoEntity
from .entity import EvoChild, EvoEntity, is_valid_zone
_LOGGER = logging.getLogger(__name__)
@@ -70,16 +68,16 @@ HA_PRESET_TO_EVO = {v: k for k, v in EVO_PRESET_TO_HA.items()}
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
_: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Create the evohome Controller, and its Zones, if any."""
"""Set up the climate platform for Evohome."""
if discovery_info is None:
return
coordinator = hass.data[EVOHOME_DATA].coordinator
loc_idx = hass.data[EVOHOME_DATA].loc_idx
tcs = hass.data[EVOHOME_DATA].tcs
_LOGGER.debug(
@@ -87,16 +85,13 @@ async def async_setup_platform(
tcs.model,
tcs.id,
tcs.location.name,
loc_idx,
coordinator.loc_idx,
)
entities: list[EvoController | EvoZone] = [EvoController(coordinator, tcs)]
for zone in tcs.zones:
if (
zone.model == EvoZoneModelType.HEATING_ZONE
or zone.type == EvoZoneType.THERMOSTAT
):
if is_valid_zone(zone):
_LOGGER.debug(
"Adding: %s (%s), id=%s, name=%s",
zone.type,
@@ -213,9 +208,9 @@ class EvoZone(EvoChild, EvoClimateEntity):
)
@property
def name(self) -> str | None:
def name(self) -> str:
"""Return the name of the evohome entity."""
return self._evo_device.name # zones can be easily renamed
return self._evo_device.name # zones can be renamed
@property
def hvac_mode(self) -> HVACMode | None:
@@ -330,7 +325,7 @@ class EvoController(EvoClimateEntity):
It is assumed there is only one TCS per location, and they are thus synonymous.
"""
_attr_icon = "mdi:thermostat"
_attr_icon = "mdi:thermostat-box"
_attr_precision = PRECISION_TENTHS
_evo_device: evo.ControlSystem

View File

@@ -19,8 +19,6 @@ STORAGE_KEY: Final = DOMAIN
CONF_LOCATION_IDX: Final = "location_idx"
USER_DATA: Final = "user_data"
SCAN_INTERVAL_DEFAULT: Final = timedelta(seconds=300)
SCAN_INTERVAL_MINIMUM: Final = timedelta(seconds=60)

View File

@@ -1,4 +1,4 @@
"""Base for evohome entity."""
"""Support for entities of the Evohome integration."""
from collections.abc import Mapping
from datetime import UTC, datetime
@@ -6,6 +6,10 @@ import logging
from typing import Any
import evohomeasync2 as evo
from evohomeasync2.schemas.const import (
ZoneModelType as EvoZoneModelType,
ZoneType as EvoZoneType,
)
from evohomeasync2.schemas.typedefs import DayOfWeekDhwT
from homeassistant.core import callback
@@ -18,6 +22,14 @@ from .coordinator import EvoDataUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
def is_valid_zone(zone: evo.Zone) -> bool:
"""Check if an Evohome zone should have climate and button entities."""
return (
zone.model == EvoZoneModelType.HEATING_ZONE
or zone.type == EvoZoneType.THERMOSTAT
)
class EvoEntity(CoordinatorEntity[EvoDataUpdateCoordinator]):
"""Base for any evohome-compatible entity (controller, DHW, zone).
@@ -75,6 +87,10 @@ class EvoEntity(CoordinatorEntity[EvoDataUpdateCoordinator]):
super()._handle_coordinator_update()
async def update_attrs(self) -> None:
"""Update the entity's extra state attrs."""
self._handle_coordinator_update()
class EvoChild(EvoEntity):
"""Base for any evohome-compatible child entity (DHW, zone).
@@ -179,4 +195,4 @@ class EvoChild(EvoEntity):
async def update_attrs(self) -> None:
"""Update the entity's extra state attrs."""
await self._update_schedule()
self._handle_coordinator_update()
await super().update_attrs()

View File

@@ -1,4 +1,17 @@
{
"entity": {
"button": {
"clear_dhw_override": {
"default": "mdi:water-boiler-auto"
},
"clear_zone_override": {
"default": "mdi:thermostat-auto"
},
"reset_system_mode": {
"default": "mdi:thermostat-box-auto"
}
}
},
"services": {
"clear_zone_override": {
"service": "mdi:motion-sensor-off"

View File

@@ -39,11 +39,12 @@ EVO_STATE_TO_HA = {v: k for k, v in HA_STATE_TO_EVO.items() if k != ""}
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
_: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Create a DHW controller."""
"""Set up the water heater platform for Evohome."""
if discovery_info is None:
return

View File

@@ -7,5 +7,5 @@
"iot_class": "cloud_polling",
"loggers": ["fluss-api"],
"quality_scale": "bronze",
"requirements": ["fluss-api==0.1.9.20"]
"requirements": ["fluss-api==0.2.4"]
}

View File

@@ -1,7 +1,7 @@
{
"domain": "freebox",
"name": "Freebox",
"codeowners": ["@hacf-fr", "@Quentame"],
"codeowners": ["@hacf-fr/reviewers", "@Quentame"],
"config_flow": true,
"dependencies": ["ffmpeg"],
"documentation": "https://www.home-assistant.io/integrations/freebox",

View File

@@ -8,6 +8,7 @@ from datetime import datetime, timedelta
import logging
from fritzconnection.lib.fritzstatus import FritzStatus
from requests.exceptions import RequestException
from homeassistant.components.sensor import (
SensorDeviceClass,
@@ -145,46 +146,65 @@ def _retrieve_link_attenuation_received_state(
def _retrieve_cpu_temperature_state(
status: FritzStatus, last_value: float | None
) -> float:
) -> float | None:
"""Return the first CPU temperature value."""
return status.get_cpu_temperatures()[0] # type: ignore[no-any-return]
try:
return status.get_cpu_temperatures()[0] # type: ignore[no-any-return]
except RequestException:
return None
def _is_suitable_cpu_temperature(status: FritzStatus) -> bool:
"""Return whether the CPU temperature sensor is suitable."""
try:
cpu_temp = status.get_cpu_temperatures()[0]
except RequestException, IndexError:
_LOGGER.debug("CPU temperature not supported by the device")
return False
if cpu_temp == 0:
_LOGGER.debug("CPU temperature returns 0°C, treating as not supported")
return False
return True
@dataclass(frozen=True, kw_only=True)
class FritzSensorEntityDescription(SensorEntityDescription, FritzEntityDescription):
"""Describes Fritz sensor entity."""
class FritzConnectionSensorEntityDescription(
SensorEntityDescription, FritzEntityDescription
):
"""Describes Fritz connection sensor entity."""
is_suitable: Callable[[ConnectionInfo], bool] = lambda info: info.wan_enabled
SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
FritzSensorEntityDescription(
@dataclass(frozen=True, kw_only=True)
class FritzDeviceSensorEntityDescription(
SensorEntityDescription, FritzEntityDescription
):
"""Describes Fritz device sensor entity."""
is_suitable: Callable[[FritzStatus], bool] = lambda status: True
CONNECTION_SENSOR_TYPES: tuple[FritzConnectionSensorEntityDescription, ...] = (
FritzConnectionSensorEntityDescription(
key="external_ip",
translation_key="external_ip",
value_fn=_retrieve_external_ip_state,
),
FritzSensorEntityDescription(
FritzConnectionSensorEntityDescription(
key="external_ipv6",
translation_key="external_ipv6",
value_fn=_retrieve_external_ipv6_state,
is_suitable=lambda info: info.ipv6_active,
),
FritzSensorEntityDescription(
key="device_uptime",
translation_key="device_uptime",
device_class=SensorDeviceClass.TIMESTAMP,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=_retrieve_device_uptime_state,
is_suitable=lambda info: True,
),
FritzSensorEntityDescription(
FritzConnectionSensorEntityDescription(
key="connection_uptime",
translation_key="connection_uptime",
device_class=SensorDeviceClass.TIMESTAMP,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=_retrieve_connection_uptime_state,
),
FritzSensorEntityDescription(
FritzConnectionSensorEntityDescription(
key="kb_s_sent",
translation_key="kb_s_sent",
state_class=SensorStateClass.MEASUREMENT,
@@ -192,7 +212,7 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
device_class=SensorDeviceClass.DATA_RATE,
value_fn=_retrieve_kb_s_sent_state,
),
FritzSensorEntityDescription(
FritzConnectionSensorEntityDescription(
key="kb_s_received",
translation_key="kb_s_received",
state_class=SensorStateClass.MEASUREMENT,
@@ -200,21 +220,21 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
device_class=SensorDeviceClass.DATA_RATE,
value_fn=_retrieve_kb_s_received_state,
),
FritzSensorEntityDescription(
FritzConnectionSensorEntityDescription(
key="max_kb_s_sent",
translation_key="max_kb_s_sent",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
value_fn=_retrieve_max_kb_s_sent_state,
),
FritzSensorEntityDescription(
FritzConnectionSensorEntityDescription(
key="max_kb_s_received",
translation_key="max_kb_s_received",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
device_class=SensorDeviceClass.DATA_RATE,
value_fn=_retrieve_max_kb_s_received_state,
),
FritzSensorEntityDescription(
FritzConnectionSensorEntityDescription(
key="gb_sent",
translation_key="gb_sent",
state_class=SensorStateClass.TOTAL_INCREASING,
@@ -222,7 +242,7 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
device_class=SensorDeviceClass.DATA_SIZE,
value_fn=_retrieve_gb_sent_state,
),
FritzSensorEntityDescription(
FritzConnectionSensorEntityDescription(
key="gb_received",
translation_key="gb_received",
state_class=SensorStateClass.TOTAL_INCREASING,
@@ -230,7 +250,7 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
device_class=SensorDeviceClass.DATA_SIZE,
value_fn=_retrieve_gb_received_state,
),
FritzSensorEntityDescription(
FritzConnectionSensorEntityDescription(
key="link_kb_s_sent",
translation_key="link_kb_s_sent",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
@@ -238,7 +258,7 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=_retrieve_link_kb_s_sent_state,
),
FritzSensorEntityDescription(
FritzConnectionSensorEntityDescription(
key="link_kb_s_received",
translation_key="link_kb_s_received",
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
@@ -246,7 +266,7 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=_retrieve_link_kb_s_received_state,
),
FritzSensorEntityDescription(
FritzConnectionSensorEntityDescription(
key="link_noise_margin_sent",
translation_key="link_noise_margin_sent",
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
@@ -255,7 +275,7 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
value_fn=_retrieve_link_noise_margin_sent_state,
is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION,
),
FritzSensorEntityDescription(
FritzConnectionSensorEntityDescription(
key="link_noise_margin_received",
translation_key="link_noise_margin_received",
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
@@ -264,7 +284,7 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
value_fn=_retrieve_link_noise_margin_received_state,
is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION,
),
FritzSensorEntityDescription(
FritzConnectionSensorEntityDescription(
key="link_attenuation_sent",
translation_key="link_attenuation_sent",
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
@@ -273,7 +293,7 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
value_fn=_retrieve_link_attenuation_sent_state,
is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION,
),
FritzSensorEntityDescription(
FritzConnectionSensorEntityDescription(
key="link_attenuation_received",
translation_key="link_attenuation_received",
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
@@ -282,7 +302,17 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
value_fn=_retrieve_link_attenuation_received_state,
is_suitable=lambda info: info.wan_enabled and info.connection == DSL_CONNECTION,
),
FritzSensorEntityDescription(
)
DEVICE_SENSOR_TYPES: tuple[FritzDeviceSensorEntityDescription, ...] = (
FritzDeviceSensorEntityDescription(
key="device_uptime",
translation_key="device_uptime",
device_class=SensorDeviceClass.TIMESTAMP,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=_retrieve_device_uptime_state,
),
FritzDeviceSensorEntityDescription(
key="cpu_temperature",
translation_key="cpu_temperature",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
@@ -290,7 +320,7 @@ SENSOR_TYPES: tuple[FritzSensorEntityDescription, ...] = (
entity_category=EntityCategory.DIAGNOSTIC,
state_class=SensorStateClass.MEASUREMENT,
value_fn=_retrieve_cpu_temperature_state,
is_suitable=lambda info: True,
is_suitable=_is_suitable_cpu_temperature,
),
)
@@ -305,20 +335,32 @@ async def async_setup_entry(
avm_wrapper = entry.runtime_data
connection_info = await avm_wrapper.async_get_connection_info()
entities = [
FritzBoxSensor(avm_wrapper, entry.title, description)
for description in SENSOR_TYPES
for description in CONNECTION_SENSOR_TYPES
if description.is_suitable(connection_info)
]
fritz_status = avm_wrapper.fritz_status
def _generate_device_sensors() -> list[FritzBoxSensor]:
return [
FritzBoxSensor(avm_wrapper, entry.title, description)
for description in DEVICE_SENSOR_TYPES
if description.is_suitable(fritz_status)
]
entities += await hass.async_add_executor_job(_generate_device_sensors)
async_add_entities(entities)
class FritzBoxSensor(FritzBoxBaseCoordinatorEntity, SensorEntity):
"""Define FRITZ!Box connectivity class."""
entity_description: FritzSensorEntityDescription
entity_description: (
FritzConnectionSensorEntityDescription | FritzDeviceSensorEntityDescription
)
@property
def native_value(self) -> StateType:

View File

@@ -21,5 +21,5 @@
"integration_type": "system",
"preview_features": { "winter_mode": {} },
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20260325.6"]
"requirements": ["home-assistant-frontend==20260325.7"]
}

View File

@@ -142,6 +142,7 @@ DESCRIPTIONS = (
native_min_value=0.0,
native_max_value=359.0,
native_step=1.0,
entity_category=EntityCategory.CONFIG,
char=Spray.sector,
),
GardenaBluetoothNumberEntityDescription(
@@ -153,6 +154,7 @@ DESCRIPTIONS = (
native_max_value=100.0,
native_step=0.1,
char=Spray.distance,
entity_category=EntityCategory.CONFIG,
scale=10.0,
),
)

View File

@@ -13,6 +13,7 @@ from gardena_bluetooth.const import (
from gardena_bluetooth.parse import CharacteristicInt
from homeassistant.components.select import SelectEntity, SelectEntityDescription
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -61,6 +62,7 @@ DESCRIPTIONS = (
translation_key="operation_mode",
char=AquaContour.operation_mode,
option_to_number=_enum_to_int(AquaContour.operation_mode.enum),
entity_category=EntityCategory.CONFIG,
),
GardenaBluetoothSelectEntityDescription(
translation_key="active_position",

View File

@@ -47,10 +47,10 @@ def _get_timestamp(value: datetime | None):
return value.replace(tzinfo=dt_util.get_default_time_zone())
def _get_distance_ratio(value: int | None):
def _get_distance_percentage(value: int | None) -> float | None:
if value is None:
return None
return value / 1000
return value / 10
@dataclass(frozen=True)
@@ -169,7 +169,7 @@ DESCRIPTIONS = (
entity_category=EntityCategory.DIAGNOSTIC,
native_unit_of_measurement=PERCENTAGE,
char=Spray.current_distance,
get=_get_distance_ratio,
get=_get_distance_percentage,
),
GardenaBluetoothSensorEntityDescription(
key=Spray.current_sector.unique_id,

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
@@ -9,6 +10,9 @@ from google_weather_api import GoogleWeatherApi, GoogleWeatherApiError
import voluptuous as vol
from homeassistant.config_entries import (
SOURCE_REAUTH,
SOURCE_RECONFIGURE,
SOURCE_USER,
ConfigEntry,
ConfigEntryState,
ConfigFlow,
@@ -81,11 +85,16 @@ def _get_location_schema(hass: HomeAssistant) -> vol.Schema:
def _is_location_already_configured(
hass: HomeAssistant, new_data: dict[str, float], epsilon: float = 1e-4
hass: HomeAssistant,
new_data: dict[str, float],
epsilon: float = 1e-4,
exclude_subentry_id: str | None = None,
) -> bool:
"""Check if the location is already configured."""
for entry in hass.config_entries.async_entries(DOMAIN):
for subentry in entry.subentries.values():
if exclude_subentry_id and subentry.subentry_id == exclude_subentry_id:
continue
# A more accurate way is to use the haversine formula, but for simplicity
# we use a simple distance check. The epsilon value is small anyway.
# This is mostly to capture cases where the user has slightly moved the location pin.
@@ -106,7 +115,7 @@ class GoogleWeatherConfigFlow(ConfigFlow, domain=DOMAIN):
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
"""Handle a flow initialized by the user, reauth or reconfigure."""
errors: dict[str, str] = {}
description_placeholders: dict[str, str] = {
"api_key_url": "https://developers.google.com/maps/documentation/weather/get-api-key",
@@ -116,21 +125,45 @@ class GoogleWeatherConfigFlow(ConfigFlow, domain=DOMAIN):
api_key = user_input[CONF_API_KEY]
referrer = user_input.get(SECTION_API_KEY_OPTIONS, {}).get(CONF_REFERRER)
self._async_abort_entries_match({CONF_API_KEY: api_key})
if _is_location_already_configured(self.hass, user_input[CONF_LOCATION]):
return self.async_abort(reason="already_configured")
if self.source in (SOURCE_REAUTH, SOURCE_RECONFIGURE):
entry = (
self._get_reauth_entry()
if self.source == SOURCE_REAUTH
else self._get_reconfigure_entry()
)
subentry = next(iter(entry.subentries.values()), None)
if subentry:
latitude = subentry.data[CONF_LATITUDE]
longitude = subentry.data[CONF_LONGITUDE]
else:
latitude = self.hass.config.latitude
longitude = self.hass.config.longitude
validation_input = {
CONF_LOCATION: {CONF_LATITUDE: latitude, CONF_LONGITUDE: longitude}
}
else:
if _is_location_already_configured(
self.hass, user_input[CONF_LOCATION]
):
return self.async_abort(reason="already_configured")
validation_input = user_input
api = GoogleWeatherApi(
session=async_get_clientsession(self.hass),
api_key=api_key,
referrer=referrer,
language_code=self.hass.config.language,
)
if await _validate_input(user_input, api, errors, description_placeholders):
if await _validate_input(
validation_input, api, errors, description_placeholders
):
data = {CONF_API_KEY: api_key, CONF_REFERRER: referrer}
if self.source in (SOURCE_REAUTH, SOURCE_RECONFIGURE):
return self.async_update_reload_and_abort(entry, data=data)
return self.async_create_entry(
title="Google Weather",
data={
CONF_API_KEY: api_key,
CONF_REFERRER: referrer,
},
data=data,
subentries=[
{
"subentry_type": "location",
@@ -140,19 +173,47 @@ class GoogleWeatherConfigFlow(ConfigFlow, domain=DOMAIN):
},
],
)
if self.source in (SOURCE_REAUTH, SOURCE_RECONFIGURE):
entry = (
self._get_reauth_entry()
if self.source == SOURCE_REAUTH
else self._get_reconfigure_entry()
)
if user_input is None:
user_input = {
CONF_API_KEY: entry.data.get(CONF_API_KEY),
SECTION_API_KEY_OPTIONS: {
CONF_REFERRER: entry.data.get(CONF_REFERRER)
},
}
schema = STEP_USER_DATA_SCHEMA
else:
user_input = {}
schema = STEP_USER_DATA_SCHEMA.schema.copy()
schema.update(_get_location_schema(self.hass).schema)
if user_input is None:
user_input = {}
schema_dict = STEP_USER_DATA_SCHEMA.schema.copy()
schema_dict.update(_get_location_schema(self.hass).schema)
schema = vol.Schema(schema_dict)
return self.async_show_form(
step_id="user",
data_schema=self.add_suggested_values_to_schema(
vol.Schema(schema), user_input
),
data_schema=self.add_suggested_values_to_schema(schema, user_input),
errors=errors,
description_placeholders=description_placeholders,
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reauth flow."""
return await self.async_step_user()
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle a reconfiguration flow."""
return await self.async_step_user(user_input)
@classmethod
@callback
def async_get_supported_subentry_types(
@@ -165,6 +226,11 @@ class GoogleWeatherConfigFlow(ConfigFlow, domain=DOMAIN):
class LocationSubentryFlowHandler(ConfigSubentryFlow):
"""Handle a subentry flow for location."""
@property
def _is_new(self) -> bool:
"""Return if this is a new subentry."""
return self.source == SOURCE_USER
async def async_step_location(
self,
user_input: dict[str, Any] | None = None,
@@ -176,16 +242,35 @@ class LocationSubentryFlowHandler(ConfigSubentryFlow):
errors: dict[str, str] = {}
description_placeholders: dict[str, str] = {}
if user_input is not None:
if _is_location_already_configured(self.hass, user_input[CONF_LOCATION]):
exclude_id = (
None if self._is_new else self._get_reconfigure_subentry().subentry_id
)
if _is_location_already_configured(
self.hass, user_input[CONF_LOCATION], exclude_subentry_id=exclude_id
):
return self.async_abort(reason="already_configured")
api: GoogleWeatherApi = self._get_entry().runtime_data.api
if await _validate_input(user_input, api, errors, description_placeholders):
return self.async_create_entry(
if self._is_new:
return self.async_create_entry(
title=user_input[CONF_NAME],
data=user_input[CONF_LOCATION],
)
return self.async_update_and_abort(
self._get_entry(),
self._get_reconfigure_subentry(),
title=user_input[CONF_NAME],
data=user_input[CONF_LOCATION],
)
else:
elif self._is_new:
user_input = {}
else:
subentry = self._get_reconfigure_subentry()
user_input = {
CONF_NAME: subentry.title,
CONF_LOCATION: dict(subentry.data),
}
return self.async_show_form(
step_id="location",
data_schema=self.add_suggested_values_to_schema(
@@ -196,3 +281,4 @@ class LocationSubentryFlowHandler(ConfigSubentryFlow):
)
async_step_user = async_step_location
async_step_reconfigure = async_step_location

View File

@@ -12,6 +12,7 @@ from google_weather_api import (
CurrentConditionsResponse,
DailyForecastResponse,
GoogleWeatherApi,
GoogleWeatherApiAuthError,
GoogleWeatherApiError,
HourlyForecastResponse,
)
@@ -19,6 +20,7 @@ from google_weather_api import (
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import (
TimestampDataUpdateCoordinator,
UpdateFailed,
@@ -92,6 +94,14 @@ class GoogleWeatherBaseCoordinator(TimestampDataUpdateCoordinator[T]):
self.subentry.data[CONF_LATITUDE],
self.subentry.data[CONF_LONGITUDE],
)
except GoogleWeatherApiAuthError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="auth_error",
translation_placeholders={
"error": str(err),
},
) from err
except GoogleWeatherApiError as err:
_LOGGER.error(
"Error fetching %s for %s: %s",

View File

@@ -7,6 +7,6 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["google_weather_api"],
"quality_scale": "bronze",
"quality_scale": "platinum",
"requirements": ["python-google-weather-api==0.0.6"]
}

View File

@@ -38,7 +38,7 @@ rules:
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow: todo
reauthentication-flow: done
test-coverage: done
# Gold
@@ -68,7 +68,7 @@ rules:
entity-translations: done
exception-translations: done
icon-translations: done
reconfiguration-flow: todo
reconfiguration-flow: done
repair-issues:
status: exempt
comment: No repairs.

View File

@@ -1,7 +1,9 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"error": {
"cannot_connect": "Unable to connect to the Google Weather API:\n\n{error_message}",
@@ -38,7 +40,8 @@
"location": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]",
"entry_not_loaded": "Cannot add things while the configuration is disabled."
"entry_not_loaded": "Cannot add things while the configuration is disabled.",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"entry_type": "Location",
"error": {
@@ -46,6 +49,7 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"initiate_flow": {
"reconfigure": "Reconfigure location",
"user": "Add location"
},
"step": {
@@ -100,6 +104,9 @@
}
},
"exceptions": {
"auth_error": {
"message": "Authentication failed: {error}"
},
"update_error": {
"message": "Error fetching weather data: {error}"
}

View File

@@ -79,7 +79,6 @@ from .config import HassioConfig
from .const import (
ADDONS_COORDINATOR,
ATTR_REPOSITORIES,
COORDINATOR,
DATA_ADDONS_LIST,
DATA_COMPONENT,
DATA_CONFIG_STORE,
@@ -93,12 +92,9 @@ from .const import (
DATA_SUPERVISOR_INFO,
DOMAIN,
HASSIO_UPDATE_INTERVAL,
STATS_COORDINATOR,
)
from .coordinator import (
HassioAddOnDataUpdateCoordinator,
HassioDataUpdateCoordinator,
HassioStatsDataUpdateCoordinator,
get_addons_info,
get_addons_list,
get_addons_stats,
@@ -388,6 +384,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
]
hass.data[DATA_SUPERVISOR_INFO]["addons"] = hass.data[DATA_ADDONS_LIST]
async_call_later(
hass,
HASSIO_UPDATE_INTERVAL,
HassJob(update_info_data, cancel_on_shutdown=True),
)
# Fetch data
update_info_task = hass.async_create_task(update_info_data(), eager_start=True)
@@ -460,20 +462,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up a config entry."""
dev_reg = dr.async_get(hass)
coordinator = HassioDataUpdateCoordinator(hass, entry, dev_reg)
await coordinator.async_config_entry_first_refresh()
hass.data[COORDINATOR] = coordinator
addon_coordinator = HassioAddOnDataUpdateCoordinator(
hass, entry, dev_reg, coordinator.jobs
)
await addon_coordinator.async_config_entry_first_refresh()
hass.data[ADDONS_COORDINATOR] = addon_coordinator
stats_coordinator = HassioStatsDataUpdateCoordinator(hass, entry)
await stats_coordinator.async_config_entry_first_refresh()
hass.data[STATS_COORDINATOR] = stats_coordinator
hass.data[ADDONS_COORDINATOR] = coordinator
def deprecated_setup_issue() -> None:
os_info = get_os_info(hass)
@@ -540,12 +531,10 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
# Unload coordinator
coordinator: HassioDataUpdateCoordinator = hass.data[COORDINATOR]
coordinator: HassioDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
coordinator.unload()
# Pop coordinators
hass.data.pop(COORDINATOR, None)
# Pop coordinator
hass.data.pop(ADDONS_COORDINATOR, None)
hass.data.pop(STATS_COORDINATOR, None)
return unload_ok

View File

@@ -20,7 +20,6 @@ from .const import (
ADDONS_COORDINATOR,
ATTR_STARTED,
ATTR_STATE,
COORDINATOR,
DATA_KEY_ADDONS,
DATA_KEY_MOUNTS,
)
@@ -61,18 +60,17 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Binary sensor set up for Hass.io config entry."""
addons_coordinator = hass.data[ADDONS_COORDINATOR]
coordinator = hass.data[COORDINATOR]
coordinator = hass.data[ADDONS_COORDINATOR]
async_add_entities(
itertools.chain(
[
HassioAddonBinarySensor(
addon=addon,
coordinator=addons_coordinator,
coordinator=coordinator,
entity_description=entity_description,
)
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
for addon in coordinator.data[DATA_KEY_ADDONS].values()
for entity_description in ADDON_ENTITY_DESCRIPTIONS
],
[

View File

@@ -77,9 +77,7 @@ EVENT_JOB = "job"
UPDATE_KEY_SUPERVISOR = "supervisor"
STARTUP_COMPLETE = "complete"
COORDINATOR = "hassio_coordinator"
ADDONS_COORDINATOR = "hassio_addons_coordinator"
STATS_COORDINATOR = "hassio_stats_coordinator"
DATA_COMPONENT: HassKey[HassIO] = HassKey(DOMAIN)
@@ -97,8 +95,6 @@ DATA_ADDONS_INFO = "hassio_addons_info"
DATA_ADDONS_STATS = "hassio_addons_stats"
DATA_ADDONS_LIST = "hassio_addons_list"
HASSIO_UPDATE_INTERVAL = timedelta(minutes=5)
HASSIO_ADDON_UPDATE_INTERVAL = timedelta(minutes=15)
HASSIO_STATS_UPDATE_INTERVAL = timedelta(seconds=60)
ATTR_AUTO_UPDATE = "auto_update"
ATTR_VERSION = "version"

View File

@@ -7,7 +7,7 @@ from collections import defaultdict
from collections.abc import Awaitable
from copy import deepcopy
import logging
from typing import TYPE_CHECKING, Any
from typing import TYPE_CHECKING, Any, cast
from aiohasupervisor import SupervisorError, SupervisorNotFoundError
from aiohasupervisor.models import (
@@ -15,9 +15,9 @@ from aiohasupervisor.models import (
CIFSMountResponse,
InstalledAddon,
NFSMountResponse,
ResponseData,
StoreInfo,
)
from aiohasupervisor.models.base import ResponseData
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_MANUFACTURER, ATTR_NAME
@@ -35,6 +35,7 @@ from .const import (
ATTR_SLUG,
ATTR_URL,
ATTR_VERSION,
CONTAINER_INFO,
CONTAINER_STATS,
CORE_CONTAINER,
DATA_ADDONS_INFO,
@@ -58,8 +59,6 @@ from .const import (
DATA_SUPERVISOR_INFO,
DATA_SUPERVISOR_STATS,
DOMAIN,
HASSIO_ADDON_UPDATE_INTERVAL,
HASSIO_STATS_UPDATE_INTERVAL,
HASSIO_UPDATE_INTERVAL,
REQUEST_REFRESH_DELAY,
SUPERVISOR_CONTAINER,
@@ -319,315 +318,7 @@ def async_remove_devices_from_dev_reg(
dev_reg.async_remove_device(dev.id)
class HassioStatsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Class to retrieve Hass.io container stats."""
config_entry: ConfigEntry
def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None:
"""Initialize coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=HASSIO_STATS_UPDATE_INTERVAL,
request_refresh_debouncer=Debouncer(
hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False
),
)
self.supervisor_client = get_supervisor_client(hass)
self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict(
lambda: defaultdict(set)
)
async def _async_update_data(self) -> dict[str, Any]:
"""Update stats data via library."""
try:
await self._fetch_stats()
except SupervisorError as err:
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
new_data: dict[str, Any] = {}
new_data[DATA_KEY_CORE] = get_core_stats(self.hass)
new_data[DATA_KEY_SUPERVISOR] = get_supervisor_stats(self.hass)
new_data[DATA_KEY_ADDONS] = get_addons_stats(self.hass)
return new_data
async def _fetch_stats(self) -> None:
"""Fetch container stats for subscribed entities."""
container_updates = self._container_updates
data = self.hass.data
client = self.supervisor_client
# Fetch core and supervisor stats
updates: dict[str, Awaitable] = {}
if container_updates.get(CORE_CONTAINER, {}).get(CONTAINER_STATS):
updates[DATA_CORE_STATS] = client.homeassistant.stats()
if container_updates.get(SUPERVISOR_CONTAINER, {}).get(CONTAINER_STATS):
updates[DATA_SUPERVISOR_STATS] = client.supervisor.stats()
if updates:
api_results: list[ResponseData] = await asyncio.gather(*updates.values())
for key, result in zip(updates, api_results, strict=True):
data[key] = result.to_dict()
# Fetch addon stats
addons_list = get_addons_list(self.hass) or []
started_addons = {
addon[ATTR_SLUG]
for addon in addons_list
if addon.get("state") in {AddonState.STARTED, AddonState.STARTUP}
}
addons_stats: dict[str, Any] = data.setdefault(DATA_ADDONS_STATS, {})
# Clean up cache for stopped/removed addons
for slug in addons_stats.keys() - started_addons:
del addons_stats[slug]
# Fetch stats for addons with subscribed entities
addon_stats_results = dict(
await asyncio.gather(
*[
self._update_addon_stats(slug)
for slug in started_addons
if container_updates.get(slug, {}).get(CONTAINER_STATS)
]
)
)
addons_stats.update(addon_stats_results)
async def _update_addon_stats(self, slug: str) -> tuple[str, dict[str, Any] | None]:
"""Update single addon stats."""
try:
stats = await self.supervisor_client.addons.addon_stats(slug)
except SupervisorError as err:
_LOGGER.warning("Could not fetch stats for %s: %s", slug, err)
return (slug, None)
return (slug, stats.to_dict())
@callback
def async_enable_container_updates(
self, slug: str, entity_id: str, types: set[str]
) -> CALLBACK_TYPE:
"""Enable stats updates for a container."""
enabled_updates = self._container_updates[slug]
for key in types:
enabled_updates[key].add(entity_id)
@callback
def _remove() -> None:
for key in types:
enabled_updates[key].discard(entity_id)
if not enabled_updates[key]:
del enabled_updates[key]
if not enabled_updates:
self._container_updates.pop(slug, None)
return _remove
class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Class to retrieve Hass.io Add-on status."""
config_entry: ConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
dev_reg: dr.DeviceRegistry,
jobs: SupervisorJobs,
) -> None:
"""Initialize coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=HASSIO_ADDON_UPDATE_INTERVAL,
# We don't want an immediate refresh since we want to avoid
# hammering the Supervisor API on startup
request_refresh_debouncer=Debouncer(
hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False
),
)
self.hassio = hass.data[DATA_COMPONENT]
self.entry_id = config_entry.entry_id
self.dev_reg = dev_reg
self._addon_info_subscriptions: defaultdict[str, set[str]] = defaultdict(set)
self.supervisor_client = get_supervisor_client(hass)
self.jobs = jobs
async def _async_update_data(self) -> dict[str, Any]:
"""Update data via library."""
is_first_update = not self.data
client = self.supervisor_client
try:
installed_addons: list[InstalledAddon] = await client.addons.list()
all_addons = {addon.slug for addon in installed_addons}
# Fetch addon info for all addons on first update, or only
# for addons with subscribed entities on subsequent updates.
addon_info_results = dict(
await asyncio.gather(
*[
self._update_addon_info(slug)
for slug in all_addons
if is_first_update or self._addon_info_subscriptions.get(slug)
]
)
)
except SupervisorError as err:
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
# Update hass.data for legacy accessor functions
data = self.hass.data
addons_list_dicts = [addon.to_dict() for addon in installed_addons]
data[DATA_ADDONS_LIST] = addons_list_dicts
# Update addon info cache in hass.data
addon_info_cache: dict[str, Any] = data.setdefault(DATA_ADDONS_INFO, {})
for slug in addon_info_cache.keys() - all_addons:
del addon_info_cache[slug]
addon_info_cache.update(addon_info_results)
# Deprecated 2026.4.0: Folding addons.list results into supervisor_info
# for compatibility. Written to hass.data only, not coordinator data.
if DATA_SUPERVISOR_INFO in data:
data[DATA_SUPERVISOR_INFO]["addons"] = addons_list_dicts
# Build clean coordinator data
store_data = get_store(self.hass)
if store_data:
repositories = {
repo.slug: repo.name
for repo in StoreInfo.from_dict(store_data).repositories
}
else:
repositories = {}
new_data: dict[str, Any] = {}
new_data[DATA_KEY_ADDONS] = {
(slug := addon[ATTR_SLUG]): {
**addon,
ATTR_AUTO_UPDATE: (addon_info_cache.get(slug) or {}).get(
ATTR_AUTO_UPDATE, False
),
ATTR_REPOSITORY: repositories.get(
repo_slug := addon.get(ATTR_REPOSITORY, ""), repo_slug
),
}
for addon in addons_list_dicts
}
# If this is the initial refresh, register all addons
if is_first_update:
async_register_addons_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
)
# Remove add-ons that are no longer installed from device registry
supervisor_addon_devices = {
list(device.identifiers)[0][1]
for device in self.dev_reg.devices.get_devices_for_config_entry_id(
self.entry_id
)
if device.model == SupervisorEntityModel.ADDON
}
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
async_remove_devices_from_dev_reg(self.dev_reg, stale_addons)
# If there are new add-ons, we should reload the config entry so we can
# create new devices and entities. We can return an empty dict because
# coordinator will be recreated.
if self.data and (
set(new_data[DATA_KEY_ADDONS]) - set(self.data[DATA_KEY_ADDONS])
):
self.hass.async_create_task(
self.hass.config_entries.async_reload(self.entry_id)
)
return {}
return new_data
async def get_changelog(self, addon_slug: str) -> str | None:
"""Get the changelog for an add-on."""
try:
return await self.supervisor_client.store.addon_changelog(addon_slug)
except SupervisorNotFoundError:
return None
async def _update_addon_info(self, slug: str) -> tuple[str, dict[str, Any] | None]:
"""Return the info for an addon."""
try:
info = await self.supervisor_client.addons.addon_info(slug)
except SupervisorError as err:
_LOGGER.warning("Could not fetch info for %s: %s", slug, err)
return (slug, None)
# Translate to legacy hassio names for compatibility
info_dict = info.to_dict()
info_dict["hassio_api"] = info_dict.pop("supervisor_api")
info_dict["hassio_role"] = info_dict.pop("supervisor_role")
return (slug, info_dict)
@callback
def async_enable_addon_info_updates(
self, slug: str, entity_id: str
) -> CALLBACK_TYPE:
"""Enable info updates for an add-on."""
self._addon_info_subscriptions[slug].add(entity_id)
@callback
def _remove() -> None:
self._addon_info_subscriptions[slug].discard(entity_id)
if not self._addon_info_subscriptions[slug]:
del self._addon_info_subscriptions[slug]
return _remove
async def _async_refresh(
self,
log_failures: bool = True,
raise_on_auth_failed: bool = False,
scheduled: bool = False,
raise_on_entry_error: bool = False,
) -> None:
"""Refresh data."""
if not scheduled and not raise_on_auth_failed:
# Force reloading add-on updates for non-scheduled
# updates.
#
# If `raise_on_auth_failed` is set, it means this is
# the first refresh and we do not want to delay
# startup or cause a timeout so we only refresh the
# updates if this is not a scheduled refresh and
# we are not doing the first refresh.
try:
await self.supervisor_client.store.reload()
except SupervisorError as err:
_LOGGER.warning("Error on Supervisor API: %s", err)
await super()._async_refresh(
log_failures, raise_on_auth_failed, scheduled, raise_on_entry_error
)
async def force_addon_info_data_refresh(self, addon_slug: str) -> None:
"""Force refresh of addon info data for a specific addon."""
try:
slug, info = await self._update_addon_info(addon_slug)
if info is not None and DATA_KEY_ADDONS in self.data:
if slug in self.data[DATA_KEY_ADDONS]:
data = deepcopy(self.data)
data[DATA_KEY_ADDONS][slug].update(info)
self.async_set_updated_data(data)
except SupervisorError as err:
_LOGGER.warning("Could not refresh info for %s: %s", addon_slug, err)
class HassioDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
class HassioDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to retrieve Hass.io status."""
config_entry: ConfigEntry
@@ -643,72 +334,80 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
name=DOMAIN,
update_interval=HASSIO_UPDATE_INTERVAL,
# We don't want an immediate refresh since we want to avoid
# hammering the Supervisor API on startup
# fetching the container stats right away and avoid hammering
# the Supervisor API on startup
request_refresh_debouncer=Debouncer(
hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False
),
)
self.hassio = hass.data[DATA_COMPONENT]
self.data = {}
self.entry_id = config_entry.entry_id
self.dev_reg = dev_reg
self.is_hass_os = (get_info(self.hass) or {}).get("hassos") is not None
self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict(
lambda: defaultdict(set)
)
self.supervisor_client = get_supervisor_client(hass)
self.jobs = SupervisorJobs(hass)
async def _async_update_data(self) -> dict[str, Any]:
"""Update data via library."""
is_first_update = not self.data
client = self.supervisor_client
try:
(
info,
core_info,
supervisor_info,
os_info,
host_info,
store_info,
network_info,
) = await asyncio.gather(
client.info(),
client.homeassistant.info(),
client.supervisor.info(),
client.os.info(),
client.host.info(),
client.store.info(),
client.network.info(),
)
mounts_info = await client.mounts.info()
await self.jobs.refresh_data(is_first_update)
await self.force_data_refresh(is_first_update)
except SupervisorError as err:
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
# Build clean coordinator data
new_data: dict[str, Any] = {}
new_data[DATA_KEY_CORE] = core_info.to_dict()
new_data[DATA_KEY_SUPERVISOR] = supervisor_info.to_dict()
new_data[DATA_KEY_HOST] = host_info.to_dict()
new_data[DATA_KEY_MOUNTS] = {mount.name: mount for mount in mounts_info.mounts}
supervisor_info = get_supervisor_info(self.hass) or {}
addons_info = get_addons_info(self.hass) or {}
addons_stats = get_addons_stats(self.hass)
store_data = get_store(self.hass)
mounts_info = await self.supervisor_client.mounts.info()
addons_list = get_addons_list(self.hass) or []
if store_data:
repositories = {
repo.slug: repo.name
for repo in StoreInfo.from_dict(store_data).repositories
}
else:
repositories = {}
new_data[DATA_KEY_ADDONS] = {
(slug := addon[ATTR_SLUG]): {
**addon,
**(addons_stats.get(slug) or {}),
ATTR_AUTO_UPDATE: (addons_info.get(slug) or {}).get(
ATTR_AUTO_UPDATE, False
),
ATTR_REPOSITORY: repositories.get(
repo_slug := addon.get(ATTR_REPOSITORY, ""), repo_slug
),
}
for addon in addons_list
}
if self.is_hass_os:
new_data[DATA_KEY_OS] = os_info.to_dict()
new_data[DATA_KEY_OS] = get_os_info(self.hass)
# Update hass.data for legacy accessor functions
data = self.hass.data
data[DATA_INFO] = info.to_dict()
data[DATA_CORE_INFO] = new_data[DATA_KEY_CORE]
data[DATA_OS_INFO] = new_data.get(DATA_KEY_OS, os_info.to_dict())
data[DATA_HOST_INFO] = new_data[DATA_KEY_HOST]
data[DATA_STORE] = store_info.to_dict()
data[DATA_NETWORK_INFO] = network_info.to_dict()
# Separate dict for hass.data supervisor info since we add deprecated
# compat keys that should not be in coordinator data
data[DATA_SUPERVISOR_INFO] = supervisor_info.to_dict()
# Deprecated 2026.4.0: Folding repositories into supervisor_info for
# compatibility. Written to hass.data only, not coordinator data.
data[DATA_SUPERVISOR_INFO]["repositories"] = data[DATA_STORE][ATTR_REPOSITORIES]
new_data[DATA_KEY_CORE] = {
**(get_core_info(self.hass) or {}),
**get_core_stats(self.hass),
}
new_data[DATA_KEY_SUPERVISOR] = {
**supervisor_info,
**get_supervisor_stats(self.hass),
}
new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {}
new_data[DATA_KEY_MOUNTS] = {mount.name: mount for mount in mounts_info.mounts}
# If this is the initial refresh, register all main components
# If this is the initial refresh, register all addons and return the dict
if is_first_update:
async_register_addons_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
)
async_register_mounts_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_MOUNTS].values()
)
@@ -724,6 +423,17 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
self.entry_id, self.dev_reg, new_data[DATA_KEY_OS]
)
# Remove add-ons that are no longer installed from device registry
supervisor_addon_devices = {
list(device.identifiers)[0][1]
for device in self.dev_reg.devices.get_devices_for_config_entry_id(
self.entry_id
)
if device.model == SupervisorEntityModel.ADDON
}
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
async_remove_devices_from_dev_reg(self.dev_reg, stale_addons)
# Remove mounts that no longer exists from device registry
supervisor_mount_devices = {
device.name
@@ -743,11 +453,12 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
# Remove the OS device if it exists and the installation is not hassos
self.dev_reg.async_remove_device(dev.id)
# If there are new mounts, we should reload the config entry so we can
# If there are new add-ons or mounts, we should reload the config entry so we can
# create new devices and entities. We can return an empty dict because
# coordinator will be recreated.
if self.data and (
set(new_data[DATA_KEY_MOUNTS]) - set(self.data.get(DATA_KEY_MOUNTS, {}))
set(new_data[DATA_KEY_ADDONS]) - set(self.data[DATA_KEY_ADDONS])
or set(new_data[DATA_KEY_MOUNTS]) - set(self.data[DATA_KEY_MOUNTS])
):
self.hass.async_create_task(
self.hass.config_entries.async_reload(self.entry_id)
@@ -756,6 +467,146 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
return new_data
async def get_changelog(self, addon_slug: str) -> str | None:
"""Get the changelog for an add-on."""
try:
return await self.supervisor_client.store.addon_changelog(addon_slug)
except SupervisorNotFoundError:
return None
async def force_data_refresh(self, first_update: bool) -> None:
"""Force update of the addon info."""
container_updates = self._container_updates
data = self.hass.data
client = self.supervisor_client
updates: dict[str, Awaitable[ResponseData]] = {
DATA_INFO: client.info(),
DATA_CORE_INFO: client.homeassistant.info(),
DATA_SUPERVISOR_INFO: client.supervisor.info(),
DATA_OS_INFO: client.os.info(),
DATA_STORE: client.store.info(),
}
if CONTAINER_STATS in container_updates[CORE_CONTAINER]:
updates[DATA_CORE_STATS] = client.homeassistant.stats()
if CONTAINER_STATS in container_updates[SUPERVISOR_CONTAINER]:
updates[DATA_SUPERVISOR_STATS] = client.supervisor.stats()
# Pull off addons.list results for further processing before caching
addons_list, *results = await asyncio.gather(
client.addons.list(), *updates.values()
)
for key, result in zip(updates, cast(list[ResponseData], results), strict=True):
data[key] = result.to_dict()
installed_addons = cast(list[InstalledAddon], addons_list)
data[DATA_ADDONS_LIST] = [addon.to_dict() for addon in installed_addons]
# Deprecated 2026.4.0: Folding repositories and addons.list results into supervisor_info for compatibility
# Can drop this after removal period
data[DATA_SUPERVISOR_INFO].update(
{
"repositories": data[DATA_STORE][ATTR_REPOSITORIES],
"addons": [addon.to_dict() for addon in installed_addons],
}
)
all_addons = {addon.slug for addon in installed_addons}
started_addons = {
addon.slug
for addon in installed_addons
if addon.state in {AddonState.STARTED, AddonState.STARTUP}
}
#
# Update addon info if its the first update or
# there is at least one entity that needs the data.
#
# When entities are added they call async_enable_container_updates
# to enable updates for the endpoints they need via
# async_added_to_hass. This ensures that we only update
# the data for the endpoints that are needed to avoid unnecessary
# API calls since otherwise we would fetch stats for all containers
# and throw them away.
#
for data_key, update_func, enabled_key, wanted_addons, needs_first_update in (
(
DATA_ADDONS_STATS,
self._update_addon_stats,
CONTAINER_STATS,
started_addons,
False,
),
(
DATA_ADDONS_INFO,
self._update_addon_info,
CONTAINER_INFO,
all_addons,
True,
),
):
container_data: dict[str, Any] = data.setdefault(data_key, {})
# Clean up cache
for slug in container_data.keys() - wanted_addons:
del container_data[slug]
# Update cache from API
container_data.update(
dict(
await asyncio.gather(
*[
update_func(slug)
for slug in wanted_addons
if (first_update and needs_first_update)
or enabled_key in container_updates[slug]
]
)
)
)
# Refresh jobs data
await self.jobs.refresh_data(first_update)
async def _update_addon_stats(self, slug: str) -> tuple[str, dict[str, Any] | None]:
"""Update single addon stats."""
try:
stats = await self.supervisor_client.addons.addon_stats(slug)
except SupervisorError as err:
_LOGGER.warning("Could not fetch stats for %s: %s", slug, err)
return (slug, None)
return (slug, stats.to_dict())
async def _update_addon_info(self, slug: str) -> tuple[str, dict[str, Any] | None]:
"""Return the info for an addon."""
try:
info = await self.supervisor_client.addons.addon_info(slug)
except SupervisorError as err:
_LOGGER.warning("Could not fetch info for %s: %s", slug, err)
return (slug, None)
# Translate to legacy hassio names for compatibility
info_dict = info.to_dict()
info_dict["hassio_api"] = info_dict.pop("supervisor_api")
info_dict["hassio_role"] = info_dict.pop("supervisor_role")
return (slug, info_dict)
@callback
def async_enable_container_updates(
self, slug: str, entity_id: str, types: set[str]
) -> CALLBACK_TYPE:
"""Enable updates for an add-on."""
enabled_updates = self._container_updates[slug]
for key in types:
enabled_updates[key].add(entity_id)
@callback
def _remove() -> None:
for key in types:
enabled_updates[key].remove(entity_id)
return _remove
async def _async_refresh(
self,
log_failures: bool = True,
@@ -765,16 +616,14 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
) -> None:
"""Refresh data."""
if not scheduled and not raise_on_auth_failed:
# Force reloading updates of main components for
# non-scheduled updates.
#
# Force refreshing updates for non-scheduled updates
# If `raise_on_auth_failed` is set, it means this is
# the first refresh and we do not want to delay
# startup or cause a timeout so we only refresh the
# updates if this is not a scheduled refresh and
# we are not doing the first refresh.
try:
await self.supervisor_client.reload_updates()
await self.supervisor_client.refresh_updates()
except SupervisorError as err:
_LOGGER.warning("Error on Supervisor API: %s", err)
@@ -782,6 +631,18 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
log_failures, raise_on_auth_failed, scheduled, raise_on_entry_error
)
async def force_addon_info_data_refresh(self, addon_slug: str) -> None:
"""Force refresh of addon info data for a specific addon."""
try:
slug, info = await self._update_addon_info(addon_slug)
if info is not None and DATA_KEY_ADDONS in self.data:
if slug in self.data[DATA_KEY_ADDONS]:
data = deepcopy(self.data)
data[DATA_KEY_ADDONS][slug].update(info)
self.async_set_updated_data(data)
except SupervisorError as err:
_LOGGER.warning("Could not refresh info for %s: %s", addon_slug, err)
@callback
def unload(self) -> None:
"""Clean up when config entry unloaded."""

View File

@@ -11,12 +11,8 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er
from .const import ADDONS_COORDINATOR, COORDINATOR, STATS_COORDINATOR
from .coordinator import (
HassioAddOnDataUpdateCoordinator,
HassioDataUpdateCoordinator,
HassioStatsDataUpdateCoordinator,
)
from .const import ADDONS_COORDINATOR
from .coordinator import HassioDataUpdateCoordinator
async def async_get_config_entry_diagnostics(
@@ -24,9 +20,7 @@ async def async_get_config_entry_diagnostics(
config_entry: ConfigEntry,
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator: HassioDataUpdateCoordinator = hass.data[COORDINATOR]
addons_coordinator: HassioAddOnDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
stats_coordinator: HassioStatsDataUpdateCoordinator = hass.data[STATS_COORDINATOR]
coordinator: HassioDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
device_registry = dr.async_get(hass)
entity_registry = er.async_get(hass)
@@ -59,7 +53,5 @@ async def async_get_config_entry_diagnostics(
return {
"coordinator_data": coordinator.data,
"addons_coordinator_data": addons_coordinator.data,
"stats_coordinator_data": stats_coordinator.data,
"devices": devices,
}

View File

@@ -13,6 +13,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
ATTR_SLUG,
CONTAINER_STATS,
CORE_CONTAINER,
DATA_KEY_ADDONS,
DATA_KEY_CORE,
DATA_KEY_HOST,
@@ -20,79 +21,20 @@ from .const import (
DATA_KEY_OS,
DATA_KEY_SUPERVISOR,
DOMAIN,
KEY_TO_UPDATE_TYPES,
SUPERVISOR_CONTAINER,
)
from .coordinator import (
HassioAddOnDataUpdateCoordinator,
HassioDataUpdateCoordinator,
HassioStatsDataUpdateCoordinator,
)
from .coordinator import HassioDataUpdateCoordinator
class HassioStatsEntity(CoordinatorEntity[HassioStatsDataUpdateCoordinator]):
"""Base entity for container stats (CPU, memory)."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: HassioStatsDataUpdateCoordinator,
entity_description: EntityDescription,
*,
container_id: str,
data_key: str,
device_id: str,
unique_id_prefix: str,
) -> None:
"""Initialize base entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._container_id = container_id
self._data_key = data_key
self._attr_unique_id = f"{unique_id_prefix}_{entity_description.key}"
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device_id)})
@property
def available(self) -> bool:
"""Return True if entity is available."""
if self._data_key == DATA_KEY_ADDONS:
return (
super().available
and DATA_KEY_ADDONS in self.coordinator.data
and self.entity_description.key
in (
self.coordinator.data[DATA_KEY_ADDONS].get(self._container_id) or {}
)
)
return (
super().available
and self._data_key in self.coordinator.data
and self.entity_description.key in self.coordinator.data[self._data_key]
)
async def async_added_to_hass(self) -> None:
"""Subscribe to stats updates."""
await super().async_added_to_hass()
self.async_on_remove(
self.coordinator.async_enable_container_updates(
self._container_id, self.entity_id, {CONTAINER_STATS}
)
)
# Stats are only fetched for containers with subscribed entities.
# The first coordinator refresh (before entities exist) has no
# subscribers, so no stats are fetched. Schedule a debounced
# refresh so that all stats entities registering during platform
# setup are batched into a single API call.
await self.coordinator.async_request_refresh()
class HassioAddonEntity(CoordinatorEntity[HassioAddOnDataUpdateCoordinator]):
class HassioAddonEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
"""Base entity for a Hass.io add-on."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: HassioAddOnDataUpdateCoordinator,
coordinator: HassioDataUpdateCoordinator,
entity_description: EntityDescription,
addon: dict[str, Any],
) -> None:
@@ -114,13 +56,16 @@ class HassioAddonEntity(CoordinatorEntity[HassioAddOnDataUpdateCoordinator]):
)
async def async_added_to_hass(self) -> None:
"""Subscribe to addon info updates."""
"""Subscribe to updates."""
await super().async_added_to_hass()
update_types = KEY_TO_UPDATE_TYPES[self.entity_description.key]
self.async_on_remove(
self.coordinator.async_enable_addon_info_updates(
self._addon_slug, self.entity_id
self.coordinator.async_enable_container_updates(
self._addon_slug, self.entity_id, update_types
)
)
if CONTAINER_STATS in update_types:
await self.coordinator.async_request_refresh()
class HassioOSEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
@@ -201,6 +146,18 @@ class HassioSupervisorEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
in self.coordinator.data[DATA_KEY_SUPERVISOR]
)
async def async_added_to_hass(self) -> None:
"""Subscribe to updates."""
await super().async_added_to_hass()
update_types = KEY_TO_UPDATE_TYPES[self.entity_description.key]
self.async_on_remove(
self.coordinator.async_enable_container_updates(
SUPERVISOR_CONTAINER, self.entity_id, update_types
)
)
if CONTAINER_STATS in update_types:
await self.coordinator.async_request_refresh()
class HassioCoreEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
"""Base Entity for Core."""
@@ -227,6 +184,18 @@ class HassioCoreEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
and self.entity_description.key in self.coordinator.data[DATA_KEY_CORE]
)
async def async_added_to_hass(self) -> None:
"""Subscribe to updates."""
await super().async_added_to_hass()
update_types = KEY_TO_UPDATE_TYPES[self.entity_description.key]
self.async_on_remove(
self.coordinator.async_enable_container_updates(
CORE_CONTAINER, self.entity_id, update_types
)
)
if CONTAINER_STATS in update_types:
await self.coordinator.async_request_refresh()
class HassioMountEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
"""Base Entity for Mount."""

View File

@@ -28,6 +28,7 @@ from homeassistant.helpers.issue_registry import (
)
from .const import (
ADDONS_COORDINATOR,
ATTR_DATA,
ATTR_HEALTHY,
ATTR_SLUG,
@@ -37,7 +38,6 @@ from .const import (
ATTR_UNSUPPORTED_REASONS,
ATTR_UPDATE_KEY,
ATTR_WS_EVENT,
COORDINATOR,
DOMAIN,
EVENT_HEALTH_CHANGED,
EVENT_ISSUE_CHANGED,
@@ -418,7 +418,7 @@ class SupervisorIssues:
def _async_coordinator_refresh(self) -> None:
"""Refresh coordinator to update latest data in entities."""
coordinator: HassioDataUpdateCoordinator | None
if coordinator := self._hass.data.get(COORDINATOR):
if coordinator := self._hass.data.get(ADDONS_COORDINATOR):
coordinator.config_entry.async_create_task(
self._hass, coordinator.async_refresh()
)

View File

@@ -17,24 +17,20 @@ from .const import (
ADDONS_COORDINATOR,
ATTR_CPU_PERCENT,
ATTR_MEMORY_PERCENT,
ATTR_SLUG,
ATTR_VERSION,
ATTR_VERSION_LATEST,
COORDINATOR,
CORE_CONTAINER,
DATA_KEY_ADDONS,
DATA_KEY_CORE,
DATA_KEY_HOST,
DATA_KEY_OS,
DATA_KEY_SUPERVISOR,
STATS_COORDINATOR,
SUPERVISOR_CONTAINER,
)
from .entity import (
HassioAddonEntity,
HassioCoreEntity,
HassioHostEntity,
HassioOSEntity,
HassioStatsEntity,
HassioSupervisorEntity,
)
COMMON_ENTITY_DESCRIPTIONS = (
@@ -67,7 +63,10 @@ STATS_ENTITY_DESCRIPTIONS = (
),
)
ADDON_ENTITY_DESCRIPTIONS = COMMON_ENTITY_DESCRIPTIONS + STATS_ENTITY_DESCRIPTIONS
CORE_ENTITY_DESCRIPTIONS = STATS_ENTITY_DESCRIPTIONS
OS_ENTITY_DESCRIPTIONS = COMMON_ENTITY_DESCRIPTIONS
SUPERVISOR_ENTITY_DESCRIPTIONS = STATS_ENTITY_DESCRIPTIONS
HOST_ENTITY_DESCRIPTIONS = (
SensorEntityDescription(
@@ -115,64 +114,36 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Sensor set up for Hass.io config entry."""
addons_coordinator = hass.data[ADDONS_COORDINATOR]
coordinator = hass.data[COORDINATOR]
stats_coordinator = hass.data[STATS_COORDINATOR]
coordinator = hass.data[ADDONS_COORDINATOR]
entities: list[SensorEntity] = []
# Add-on non-stats sensors (version, version_latest)
entities.extend(
entities: list[
HassioOSSensor | HassioAddonSensor | CoreSensor | SupervisorSensor | HostSensor
] = [
HassioAddonSensor(
addon=addon,
coordinator=addons_coordinator,
coordinator=coordinator,
entity_description=entity_description,
)
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
for entity_description in COMMON_ENTITY_DESCRIPTIONS
)
for addon in coordinator.data[DATA_KEY_ADDONS].values()
for entity_description in ADDON_ENTITY_DESCRIPTIONS
]
# Add-on stats sensors (cpu_percent, memory_percent)
entities.extend(
HassioStatsSensor(
coordinator=stats_coordinator,
CoreSensor(
coordinator=coordinator,
entity_description=entity_description,
container_id=addon[ATTR_SLUG],
data_key=DATA_KEY_ADDONS,
device_id=addon[ATTR_SLUG],
unique_id_prefix=addon[ATTR_SLUG],
)
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
for entity_description in STATS_ENTITY_DESCRIPTIONS
for entity_description in CORE_ENTITY_DESCRIPTIONS
)
# Core stats sensors
entities.extend(
HassioStatsSensor(
coordinator=stats_coordinator,
SupervisorSensor(
coordinator=coordinator,
entity_description=entity_description,
container_id=CORE_CONTAINER,
data_key=DATA_KEY_CORE,
device_id="core",
unique_id_prefix="home_assistant_core",
)
for entity_description in STATS_ENTITY_DESCRIPTIONS
for entity_description in SUPERVISOR_ENTITY_DESCRIPTIONS
)
# Supervisor stats sensors
entities.extend(
HassioStatsSensor(
coordinator=stats_coordinator,
entity_description=entity_description,
container_id=SUPERVISOR_CONTAINER,
data_key=DATA_KEY_SUPERVISOR,
device_id="supervisor",
unique_id_prefix="home_assistant_supervisor",
)
for entity_description in STATS_ENTITY_DESCRIPTIONS
)
# Host sensors
entities.extend(
HostSensor(
coordinator=coordinator,
@@ -181,7 +152,6 @@ async def async_setup_entry(
for entity_description in HOST_ENTITY_DESCRIPTIONS
)
# OS sensors
if coordinator.is_hass_os:
entities.extend(
HassioOSSensor(
@@ -205,21 +175,8 @@ class HassioAddonSensor(HassioAddonEntity, SensorEntity):
]
class HassioStatsSensor(HassioStatsEntity, SensorEntity):
"""Sensor to track container stats."""
@property
def native_value(self) -> str:
"""Return native value of entity."""
if self._data_key == DATA_KEY_ADDONS:
return self.coordinator.data[DATA_KEY_ADDONS][self._container_id][
self.entity_description.key
]
return self.coordinator.data[self._data_key][self.entity_description.key]
class HassioOSSensor(HassioOSEntity, SensorEntity):
"""Sensor to track a Hass.io OS attribute."""
"""Sensor to track a Hass.io add-on attribute."""
@property
def native_value(self) -> str:
@@ -227,6 +184,24 @@ class HassioOSSensor(HassioOSEntity, SensorEntity):
return self.coordinator.data[DATA_KEY_OS][self.entity_description.key]
class CoreSensor(HassioCoreEntity, SensorEntity):
"""Sensor to track a core attribute."""
@property
def native_value(self) -> str:
"""Return native value of entity."""
return self.coordinator.data[DATA_KEY_CORE][self.entity_description.key]
class SupervisorSensor(HassioSupervisorEntity, SensorEntity):
"""Sensor to track a supervisor attribute."""
@property
def native_value(self) -> str:
"""Return native value of entity."""
return self.coordinator.data[DATA_KEY_SUPERVISOR][self.entity_description.key]
class HostSensor(HassioHostEntity, SensorEntity):
"""Sensor to track a host attribute."""

View File

@@ -32,6 +32,7 @@ from homeassistant.helpers import (
from homeassistant.util.dt import now
from .const import (
ADDONS_COORDINATOR,
ATTR_ADDON,
ATTR_ADDONS,
ATTR_APP,
@@ -44,7 +45,6 @@ from .const import (
ATTR_LOCATION,
ATTR_PASSWORD,
ATTR_SLUG,
COORDINATOR,
DOMAIN,
SupervisorEntityModel,
)
@@ -417,7 +417,7 @@ def async_register_network_storage_services(
if (
device.name is None
or device.model != SupervisorEntityModel.MOUNT
or (coordinator := hass.data.get(COORDINATOR)) is None
or (coordinator := hass.data.get(ADDONS_COORDINATOR)) is None
or coordinator.entry_id not in device.config_entries
):
raise ServiceValidationError(

View File

@@ -25,7 +25,6 @@ from .const import (
ATTR_AUTO_UPDATE,
ATTR_VERSION,
ATTR_VERSION_LATEST,
COORDINATOR,
DATA_KEY_ADDONS,
DATA_KEY_CORE,
DATA_KEY_OS,
@@ -52,9 +51,9 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Supervisor update based on a config entry."""
coordinator = hass.data[COORDINATOR]
coordinator = hass.data[ADDONS_COORDINATOR]
entities: list[UpdateEntity] = [
entities = [
SupervisorSupervisorUpdateEntity(
coordinator=coordinator,
entity_description=ENTITY_DESCRIPTION,
@@ -65,6 +64,15 @@ async def async_setup_entry(
),
]
entities.extend(
SupervisorAddonUpdateEntity(
addon=addon,
coordinator=coordinator,
entity_description=ENTITY_DESCRIPTION,
)
for addon in coordinator.data[DATA_KEY_ADDONS].values()
)
if coordinator.is_hass_os:
entities.append(
SupervisorOSUpdateEntity(
@@ -73,16 +81,6 @@ async def async_setup_entry(
)
)
addons_coordinator = hass.data[ADDONS_COORDINATOR]
entities.extend(
SupervisorAddonUpdateEntity(
addon=addon,
coordinator=addons_coordinator,
entity_description=ENTITY_DESCRIPTION,
)
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
)
async_add_entities(entities)

View File

@@ -179,13 +179,13 @@ class HomeConnectAirConditioningEntity(HomeConnectEntity, ClimateEntity):
self.async_on_remove(
self.coordinator.async_add_listener(
self._handle_coordinator_update_fan_mode,
EventKey.HEATING_VENTILATION_AIR_CONDITIONING_AIR_CONDITIONER_FAN_SPEED_MODE,
EventKey.HEATING_VENTILATION_AIR_CONDITIONING_AIR_CONDITIONER_OPTION_FAN_SPEED_MODE,
)
)
self.async_on_remove(
self.coordinator.async_add_listener(
self._handle_coordinator_update,
EventKey(SettingKey.BSH_COMMON_POWER_STATE),
EventKey.BSH_COMMON_SETTING_POWER_STATE,
)
)
@@ -215,9 +215,7 @@ class HomeConnectAirConditioningEntity(HomeConnectEntity, ClimateEntity):
"""Return the fan setting."""
option_value = None
if event := self.appliance.events.get(
EventKey(
OptionKey.HEATING_VENTILATION_AIR_CONDITIONING_AIR_CONDITIONER_FAN_SPEED_MODE
)
EventKey.HEATING_VENTILATION_AIR_CONDITIONING_AIR_CONDITIONER_OPTION_FAN_SPEED_MODE
):
option_value = event.value
return (

View File

@@ -84,7 +84,7 @@ class HomeConnectAirConditioningFanEntity(HomeConnectEntity, FanEntity):
coordinator,
AIR_CONDITIONER_ENTITY_DESCRIPTION,
context_override=(
EventKey.HEATING_VENTILATION_AIR_CONDITIONING_AIR_CONDITIONER_FAN_SPEED_PERCENTAGE
EventKey.HEATING_VENTILATION_AIR_CONDITIONING_AIR_CONDITIONER_OPTION_FAN_SPEED_PERCENTAGE
),
)
self.update_preset_mode()
@@ -104,7 +104,7 @@ class HomeConnectAirConditioningFanEntity(HomeConnectEntity, FanEntity):
self.async_on_remove(
self.coordinator.async_add_listener(
self._handle_coordinator_update_preset_mode,
EventKey.HEATING_VENTILATION_AIR_CONDITIONING_AIR_CONDITIONER_FAN_SPEED_MODE,
EventKey.HEATING_VENTILATION_AIR_CONDITIONING_AIR_CONDITIONER_OPTION_FAN_SPEED_MODE,
)
)

View File

@@ -23,6 +23,6 @@
"iot_class": "cloud_push",
"loggers": ["aiohomeconnect"],
"quality_scale": "platinum",
"requirements": ["aiohomeconnect==0.34.0"],
"requirements": ["aiohomeconnect==0.36.0"],
"zeroconf": ["_homeconnect._tcp.local."]
}

View File

@@ -3,6 +3,7 @@
from __future__ import annotations
import logging
import re
from typing import TYPE_CHECKING
from homeassistant.util.hass_dict import HassKey
@@ -37,3 +38,7 @@ SILABS_MULTIPROTOCOL_ADDON_SLUG = "core_silabs_multiprotocol"
SILABS_FLASHER_ADDON_SLUG = "core_silabs_flasher"
Z2M_EMBER_DOCS_URL = "https://www.zigbee2mqtt.io/guide/adapters/emberznet.html"
# Community add-ons use an 8-char repository hash prefix in their slug
Z2M_ADDON_NAME = "Zigbee2MQTT"
Z2M_ADDON_SLUG_REGEX = re.compile(r"^[0-9a-f]{8}_zigbee2mqtt(?:_edge)?$")

View File

@@ -14,7 +14,12 @@ from universal_silabs_flasher.const import ApplicationType as FlasherApplication
from universal_silabs_flasher.firmware import parse_firmware_image
from universal_silabs_flasher.flasher import BaseFlasher, DeviceSpecificFlasher, Flasher
from homeassistant.components.hassio import AddonError, AddonManager, AddonState
from homeassistant.components.hassio import (
AddonError,
AddonManager,
AddonState,
get_apps_list,
)
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
@@ -26,6 +31,8 @@ from .const import (
OTBR_ADDON_MANAGER_DATA,
OTBR_ADDON_NAME,
OTBR_ADDON_SLUG,
Z2M_ADDON_NAME,
Z2M_ADDON_SLUG_REGEX,
ZIGBEE_FLASHER_ADDON_MANAGER_DATA,
ZIGBEE_FLASHER_ADDON_NAME,
ZIGBEE_FLASHER_ADDON_SLUG,
@@ -84,6 +91,17 @@ def get_zigbee_flasher_addon_manager(hass: HomeAssistant) -> WaitingAddonManager
)
@callback
def get_z2m_addon_manager(hass: HomeAssistant, slug: str) -> WaitingAddonManager:
"""Get the Z2M add-on manager."""
return WaitingAddonManager(
hass,
_LOGGER,
Z2M_ADDON_NAME,
slug,
)
@dataclass(kw_only=True)
class OwningAddon:
"""Owning add-on."""
@@ -212,6 +230,32 @@ async def get_otbr_addon_firmware_info(
)
async def get_z2m_addon_firmware_info(
hass: HomeAssistant, z2m_addon_manager: AddonManager
) -> FirmwareInfo | None:
"""Get firmware info from a Z2M add-on."""
try:
z2m_addon_info = await z2m_addon_manager.async_get_addon_info()
except AddonError:
return None
if z2m_addon_info.state == AddonState.NOT_INSTALLED:
return None
serial = z2m_addon_info.options.get("serial")
if not isinstance(serial, dict) or (z2m_port := serial.get("port")) is None:
return None
return FirmwareInfo(
device=z2m_port,
firmware_type=ApplicationType.EZSP,
firmware_version=None,
source=f"zigbee2mqtt ({z2m_addon_manager.addon_slug})",
owners=[OwningAddon(slug=z2m_addon_manager.addon_slug)],
)
async def guess_hardware_owners(
hass: HomeAssistant, device_path: str
) -> list[FirmwareInfo]:
@@ -221,46 +265,54 @@ async def guess_hardware_owners(
async for firmware_info in hass.data[DATA_COMPONENT].iter_firmware_info():
device_guesses[firmware_info.device].append(firmware_info)
if not is_hassio(hass):
return device_guesses.get(device_path, [])
# It may be possible for the OTBR addon to be present without the integration
if is_hassio(hass):
otbr_addon_manager = get_otbr_addon_manager(hass)
otbr_addon_fw_info = await get_otbr_addon_firmware_info(
hass, otbr_addon_manager
)
otbr_path = (
otbr_addon_fw_info.device if otbr_addon_fw_info is not None else None
)
otbr_addon_manager = get_otbr_addon_manager(hass)
otbr_addon_fw_info = await get_otbr_addon_firmware_info(hass, otbr_addon_manager)
otbr_path = otbr_addon_fw_info.device if otbr_addon_fw_info is not None else None
# Only create a new entry if there are no existing OTBR ones
if otbr_path is not None and not any(
info.source == "otbr" for info in device_guesses[otbr_path]
):
assert otbr_addon_fw_info is not None
device_guesses[otbr_path].append(otbr_addon_fw_info)
# Only create a new entry if there are no existing OTBR ones
if otbr_path is not None and not any(
info.source == "otbr" for info in device_guesses[otbr_path]
):
assert otbr_addon_fw_info is not None
device_guesses[otbr_path].append(otbr_addon_fw_info)
if is_hassio(hass):
multipan_addon_manager = await get_multiprotocol_addon_manager(hass)
multipan_addon_manager = await get_multiprotocol_addon_manager(hass)
try:
multipan_addon_info = await multipan_addon_manager.async_get_addon_info()
except AddonError:
pass
else:
if multipan_addon_info.state != AddonState.NOT_INSTALLED:
multipan_path = multipan_addon_info.options.get("device")
try:
multipan_addon_info = await multipan_addon_manager.async_get_addon_info()
except AddonError:
pass
else:
if multipan_addon_info.state != AddonState.NOT_INSTALLED:
multipan_path = multipan_addon_info.options.get("device")
if multipan_path is not None:
device_guesses[multipan_path].append(
FirmwareInfo(
device=multipan_path,
firmware_type=ApplicationType.CPC,
firmware_version=None,
source="multiprotocol",
owners=[
OwningAddon(slug=multipan_addon_manager.addon_slug)
],
)
if multipan_path is not None:
device_guesses[multipan_path].append(
FirmwareInfo(
device=multipan_path,
firmware_type=ApplicationType.CPC,
firmware_version=None,
source="multiprotocol",
owners=[OwningAddon(slug=multipan_addon_manager.addon_slug)],
)
)
# Z2M can be provided by one of many add-ons, we match them by name
for app_info in get_apps_list(hass) or []:
slug = app_info.get("slug")
if not isinstance(slug, str) or Z2M_ADDON_SLUG_REGEX.fullmatch(slug) is None:
continue
z2m_addon_manager = get_z2m_addon_manager(hass, slug)
z2m_fw_info = await get_z2m_addon_firmware_info(hass, z2m_addon_manager)
if z2m_fw_info is not None:
device_guesses[z2m_fw_info.device].append(z2m_fw_info)
return device_guesses.get(device_path, [])

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
from collections.abc import Awaitable, Callable, Coroutine
from dataclasses import dataclass
from datetime import datetime
from functools import wraps
import logging
from typing import Any, Concatenate
@@ -32,12 +31,11 @@ from homeassistant.exceptions import (
ConfigEntryNotReady,
)
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.httpx_client import get_async_client
from homeassistant.util.ssl import SSL_ALPN_HTTP11_HTTP2
from .const import DOMAIN, UPDATE_INTERVAL
from .const import DOMAIN
from .coordinator import AqualinkDataUpdateCoordinator
from .entity import AqualinkEntity
_LOGGER = logging.getLogger(__name__)
@@ -61,6 +59,7 @@ class AqualinkRuntimeData:
"""Runtime data for Aqualink."""
client: AqualinkClient
coordinators: dict[str, AqualinkDataUpdateCoordinator]
# These will contain the initialized devices
binary_sensors: list[AqualinkBinarySensor]
lights: list[AqualinkLight]
@@ -94,23 +93,47 @@ async def async_setup_entry(hass: HomeAssistant, entry: AqualinkConfigEntry) ->
try:
systems = await aqualink.get_systems()
except AqualinkServiceUnauthorizedException as auth_exception:
await aqualink.close()
raise ConfigEntryAuthFailed(
"Invalid credentials for iAqualink"
) from auth_exception
except AqualinkServiceException as svc_exception:
await aqualink.close()
raise ConfigEntryNotReady(
f"Error while attempting to retrieve systems list: {svc_exception}"
) from svc_exception
systems = list(systems.values())
if not systems:
systems_list = list(systems.values())
if not systems_list:
await aqualink.close()
raise ConfigEntryError("No systems detected or supported")
runtime_data = AqualinkRuntimeData(
aqualink, binary_sensors=[], lights=[], sensors=[], switches=[], thermostats=[]
aqualink,
coordinators={},
binary_sensors=[],
lights=[],
sensors=[],
switches=[],
thermostats=[],
)
for system in systems:
for system in systems_list:
coordinator = AqualinkDataUpdateCoordinator(hass, entry, system)
runtime_data.coordinators[system.serial] = coordinator
try:
await coordinator.async_config_entry_first_refresh()
except ConfigEntryAuthFailed:
await aqualink.close()
raise
try:
devices = await system.get_devices()
except AqualinkServiceUnauthorizedException as auth_exception:
await aqualink.close()
raise ConfigEntryAuthFailed(
"Invalid credentials for iAqualink"
) from auth_exception
except AqualinkServiceException as svc_exception:
await aqualink.close()
raise ConfigEntryNotReady(
@@ -158,32 +181,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: AqualinkConfigEntry) ->
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
async def _async_systems_update(_: datetime) -> None:
"""Refresh internal state for all systems."""
for system in systems:
prev = system.online
try:
await system.update()
except (AqualinkServiceException, httpx.HTTPError) as svc_exception:
if prev is not None:
_LOGGER.warning(
"Failed to refresh system %s state: %s",
system.serial,
svc_exception,
)
await system.aqualink.close()
else:
cur = system.online
if cur and not prev:
_LOGGER.warning("System %s reconnected to iAqualink", system.serial)
async_dispatcher_send(hass, DOMAIN)
entry.async_on_unload(
async_track_time_interval(hass, _async_systems_update, UPDATE_INTERVAL)
)
return True
@@ -204,6 +201,6 @@ def refresh_system[_AqualinkEntityT: AqualinkEntity, **_P](
) -> None:
"""Call decorated function and send update signal to all entities."""
await func(self, *args, **kwargs)
async_dispatcher_send(self.hass, DOMAIN)
self.coordinator.async_update_listeners()
return wrapper

View File

@@ -12,6 +12,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import AqualinkConfigEntry
from .coordinator import AqualinkDataUpdateCoordinator
from .entity import AqualinkEntity
PARALLEL_UPDATES = 0
@@ -24,11 +25,10 @@ async def async_setup_entry(
) -> None:
"""Set up discovered binary sensors."""
async_add_entities(
(
HassAqualinkBinarySensor(dev)
for dev in config_entry.runtime_data.binary_sensors
),
True,
HassAqualinkBinarySensor(
config_entry.runtime_data.coordinators[dev.system.serial], dev
)
for dev in config_entry.runtime_data.binary_sensors
)
@@ -37,9 +37,11 @@ class HassAqualinkBinarySensor(
):
"""Representation of a binary sensor."""
def __init__(self, dev: AqualinkBinarySensor) -> None:
def __init__(
self, coordinator: AqualinkDataUpdateCoordinator, dev: AqualinkBinarySensor
) -> None:
"""Initialize AquaLink binary sensor."""
super().__init__(dev)
super().__init__(coordinator, dev)
self._attr_name = dev.label
if dev.label == "Freeze Protection":
self._attr_device_class = BinarySensorDeviceClass.COLD

View File

@@ -19,6 +19,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import AqualinkConfigEntry, refresh_system
from .coordinator import AqualinkDataUpdateCoordinator
from .entity import AqualinkEntity
from .utils import await_or_reraise
@@ -34,8 +35,10 @@ async def async_setup_entry(
) -> None:
"""Set up discovered switches."""
async_add_entities(
(HassAqualinkThermostat(dev) for dev in config_entry.runtime_data.thermostats),
True,
HassAqualinkThermostat(
config_entry.runtime_data.coordinators[dev.system.serial], dev
)
for dev in config_entry.runtime_data.thermostats
)
@@ -49,9 +52,11 @@ class HassAqualinkThermostat(AqualinkEntity[AqualinkThermostat], ClimateEntity):
| ClimateEntityFeature.TURN_ON
)
def __init__(self, dev: AqualinkThermostat) -> None:
def __init__(
self, coordinator: AqualinkDataUpdateCoordinator, dev: AqualinkThermostat
) -> None:
"""Initialize AquaLink thermostat."""
super().__init__(dev)
super().__init__(coordinator, dev)
self._attr_name = dev.label.split(" ")[0]
self._attr_temperature_unit = (
UnitOfTemperature.FAHRENHEIT

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
from collections.abc import Mapping
from typing import Any
import httpx
@@ -19,12 +20,39 @@ from homeassistant.util.ssl import SSL_ALPN_HTTP11_HTTP2
from .const import DOMAIN
CREDENTIALS_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
}
)
class AqualinkFlowHandler(ConfigFlow, domain=DOMAIN):
"""Aqualink config flow."""
VERSION = 1
async def _async_test_credentials(
self, user_input: dict[str, Any]
) -> dict[str, str]:
"""Validate credentials against iAqualink."""
try:
async with AqualinkClient(
user_input[CONF_USERNAME],
user_input[CONF_PASSWORD],
httpx_client=get_async_client(
self.hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2
),
):
pass
except AqualinkServiceUnauthorizedException:
return {"base": "invalid_auth"}
except AqualinkServiceException, httpx.HTTPError:
return {"base": "cannot_connect"}
return {}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
@@ -32,32 +60,45 @@ class AqualinkFlowHandler(ConfigFlow, domain=DOMAIN):
errors = {}
if user_input is not None:
username = user_input[CONF_USERNAME]
password = user_input[CONF_PASSWORD]
try:
async with AqualinkClient(
username,
password,
httpx_client=get_async_client(
self.hass, alpn_protocols=SSL_ALPN_HTTP11_HTTP2
),
):
pass
except AqualinkServiceUnauthorizedException:
errors["base"] = "invalid_auth"
except AqualinkServiceException, httpx.HTTPError:
errors["base"] = "cannot_connect"
else:
return self.async_create_entry(title=username, data=user_input)
errors = await self._async_test_credentials(user_input)
if not errors:
return self.async_create_entry(
title=user_input[CONF_USERNAME], data=user_input
)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
}
),
data_schema=CREDENTIALS_DATA_SCHEMA,
errors=errors,
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle flow triggered by an authentication failure."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle confirmation of reauthentication."""
errors = {}
reauth_entry = self._get_reauth_entry()
if user_input is not None:
errors = await self._async_test_credentials(user_input)
if not errors:
return self.async_update_reload_and_abort(
reauth_entry,
title=user_input[CONF_USERNAME],
data_updates={
CONF_USERNAME: user_input[CONF_USERNAME],
CONF_PASSWORD: user_input[CONF_PASSWORD],
},
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=CREDENTIALS_DATA_SCHEMA,
errors=errors,
)

View File

@@ -0,0 +1,51 @@
"""Data update coordinator for iaqualink."""
from __future__ import annotations
import logging
from typing import Any
import httpx
from iaqualink.exception import (
AqualinkServiceException,
AqualinkServiceUnauthorizedException,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, UPDATE_INTERVAL
_LOGGER = logging.getLogger(__name__)
class AqualinkDataUpdateCoordinator(DataUpdateCoordinator[None]):
"""Data coordinator for Aqualink systems."""
def __init__(
self, hass: HomeAssistant, config_entry: ConfigEntry, system: Any
) -> None:
"""Initialize the coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=f"{DOMAIN}_{system.serial}",
update_interval=UPDATE_INTERVAL,
)
self.system = system
async def _async_update_data(self) -> None:
"""Refresh internal state for a system."""
try:
await self.system.update()
except AqualinkServiceUnauthorizedException as err:
raise ConfigEntryAuthFailed("Invalid credentials for iAqualink") from err
except (AqualinkServiceException, httpx.HTTPError) as err:
raise UpdateFailed(
f"Unable to update iAqualink system {self.system.serial}: {err}"
) from err
if self.system.online is not True:
raise UpdateFailed(f"iAqualink system {self.system.serial} is offline")

View File

@@ -5,26 +5,28 @@ from __future__ import annotations
from iaqualink.device import AqualinkDevice
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import AqualinkDataUpdateCoordinator
class AqualinkEntity[AqualinkDeviceT: AqualinkDevice](Entity):
class AqualinkEntity[AqualinkDeviceT: AqualinkDevice](
CoordinatorEntity[AqualinkDataUpdateCoordinator]
):
"""Abstract class for all Aqualink platforms.
Entity state is updated via the interval timer within the integration.
Any entity state change via the iaqualink library triggers an internal
state refresh which is then propagated to all the entities in the system
via the refresh_system decorator above to the _update_callback in this
class.
Entity availability and periodic refreshes are driven by the per-system
DataUpdateCoordinator. State changes initiated through the iaqualink
library are propagated back to Home Assistant through the coordinator-aware
entity update flow.
"""
_attr_should_poll = False
def __init__(self, dev: AqualinkDeviceT) -> None:
def __init__(
self, coordinator: AqualinkDataUpdateCoordinator, dev: AqualinkDeviceT
) -> None:
"""Initialize the entity."""
super().__init__(coordinator)
self.dev = dev
self._attr_unique_id = f"{dev.system.serial}_{dev.name}"
self._attr_device_info = DeviceInfo(
@@ -35,18 +37,7 @@ class AqualinkEntity[AqualinkDeviceT: AqualinkDevice](Entity):
name=dev.label,
)
async def async_added_to_hass(self) -> None:
"""Set up a listener when this entity is added to HA."""
self.async_on_remove(
async_dispatcher_connect(self.hass, DOMAIN, self.async_write_ha_state)
)
@property
def assumed_state(self) -> bool:
"""Return whether the state is based on actual reading from the device."""
return self.dev.system.online in [False, None]
@property
def available(self) -> bool:
"""Return whether the device is available or not."""
return self.dev.system.online is True

View File

@@ -17,6 +17,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import AqualinkConfigEntry, refresh_system
from .coordinator import AqualinkDataUpdateCoordinator
from .entity import AqualinkEntity
from .utils import await_or_reraise
@@ -30,17 +31,21 @@ async def async_setup_entry(
) -> None:
"""Set up discovered lights."""
async_add_entities(
(HassAqualinkLight(dev) for dev in config_entry.runtime_data.lights),
True,
HassAqualinkLight(
config_entry.runtime_data.coordinators[dev.system.serial], dev
)
for dev in config_entry.runtime_data.lights
)
class HassAqualinkLight(AqualinkEntity[AqualinkLight], LightEntity):
"""Representation of a light."""
def __init__(self, dev: AqualinkLight) -> None:
def __init__(
self, coordinator: AqualinkDataUpdateCoordinator, dev: AqualinkLight
) -> None:
"""Initialize AquaLink light."""
super().__init__(dev)
super().__init__(coordinator, dev)
self._attr_name = dev.label
if dev.supports_effect:
self._attr_effect_list = list(dev.supported_effects)

View File

@@ -10,6 +10,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import AqualinkConfigEntry
from .coordinator import AqualinkDataUpdateCoordinator
from .entity import AqualinkEntity
PARALLEL_UPDATES = 0
@@ -22,17 +23,21 @@ async def async_setup_entry(
) -> None:
"""Set up discovered sensors."""
async_add_entities(
(HassAqualinkSensor(dev) for dev in config_entry.runtime_data.sensors),
True,
HassAqualinkSensor(
config_entry.runtime_data.coordinators[dev.system.serial], dev
)
for dev in config_entry.runtime_data.sensors
)
class HassAqualinkSensor(AqualinkEntity[AqualinkSensor], SensorEntity):
"""Representation of a sensor."""
def __init__(self, dev: AqualinkSensor) -> None:
def __init__(
self, coordinator: AqualinkDataUpdateCoordinator, dev: AqualinkSensor
) -> None:
"""Initialize AquaLink sensor."""
super().__init__(dev)
super().__init__(coordinator, dev)
self._attr_name = dev.label
if not dev.name.endswith("_temp"):
return

View File

@@ -1,10 +1,21 @@
{
"config": {
"abort": {
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
},
"step": {
"reauth_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::username%]"
},
"description": "Please enter the username and password for your iAqualink account.",
"title": "Reauthenticate iAqualink"
},
"user": {
"data": {
"password": "[%key:common::config_flow::data::password%]",

View File

@@ -11,6 +11,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import AqualinkConfigEntry, refresh_system
from .coordinator import AqualinkDataUpdateCoordinator
from .entity import AqualinkEntity
from .utils import await_or_reraise
@@ -24,17 +25,21 @@ async def async_setup_entry(
) -> None:
"""Set up discovered switches."""
async_add_entities(
(HassAqualinkSwitch(dev) for dev in config_entry.runtime_data.switches),
True,
HassAqualinkSwitch(
config_entry.runtime_data.coordinators[dev.system.serial], dev
)
for dev in config_entry.runtime_data.switches
)
class HassAqualinkSwitch(AqualinkEntity[AqualinkSwitch], SwitchEntity):
"""Representation of a switch."""
def __init__(self, dev: AqualinkSwitch) -> None:
def __init__(
self, coordinator: AqualinkDataUpdateCoordinator, dev: AqualinkSwitch
) -> None:
"""Initialize AquaLink switch."""
super().__init__(dev)
super().__init__(coordinator, dev)
name = self._attr_name = dev.label
if name == "Cleaner":
self._attr_icon = "mdi:robot-vacuum"

View File

@@ -57,6 +57,8 @@ async def async_setup_entry(
class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
"""Represents a Jellyfin Player device."""
_attr_media_image_remotely_accessible = False
def __init__(
self,
coordinator: JellyfinDataUpdateCoordinator,
@@ -168,7 +170,6 @@ class JellyfinMediaPlayer(JellyfinClientEntity, MediaPlayerEntity):
self._attr_media_duration = media_duration
self._attr_media_position = media_position
self._attr_media_position_updated_at = media_position_updated
self._attr_media_image_remotely_accessible = True
@property
def media_image_url(self) -> str | None:

View File

@@ -2,31 +2,31 @@
from __future__ import annotations
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .const import DOMAIN
from .coordinator import LaunchLibraryCoordinator
from .coordinator import LaunchLibraryConfigEntry, LaunchLibraryCoordinator
PLATFORMS = [Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_setup_entry(
hass: HomeAssistant, entry: LaunchLibraryConfigEntry
) -> bool:
"""Set up this integration using UI."""
coordinator = LaunchLibraryCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
hass.data[DOMAIN] = coordinator
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(
hass: HomeAssistant, entry: LaunchLibraryConfigEntry
) -> bool:
"""Handle removal of an entry."""
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
del hass.data[DOMAIN]
return unload_ok
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -16,6 +16,9 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
from .const import DOMAIN
type LaunchLibraryConfigEntry = ConfigEntry[LaunchLibraryCoordinator]
_LOGGER = logging.getLogger(__name__)
@@ -29,12 +32,12 @@ class LaunchLibraryData(TypedDict):
class LaunchLibraryCoordinator(DataUpdateCoordinator[LaunchLibraryData]):
"""Class to manage fetching Launch Library data."""
config_entry: ConfigEntry
config_entry: LaunchLibraryConfigEntry
def __init__(
self,
hass: HomeAssistant,
entry: ConfigEntry,
entry: LaunchLibraryConfigEntry,
) -> None:
"""Initialize the coordinator."""
super().__init__(

View File

@@ -6,20 +6,18 @@ from typing import Any
from pylaunches.types import Event, Launch
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from .const import DOMAIN
from .coordinator import LaunchLibraryCoordinator
from .coordinator import LaunchLibraryConfigEntry
async def async_get_config_entry_diagnostics(
hass: HomeAssistant,
entry: ConfigEntry,
entry: LaunchLibraryConfigEntry,
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator: LaunchLibraryCoordinator = hass.data[DOMAIN]
coordinator = entry.runtime_data
if coordinator.data is None:
return {}

View File

@@ -14,7 +14,6 @@ from homeassistant.components.sensor import (
SensorEntity,
SensorEntityDescription,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_NAME, PERCENTAGE
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
@@ -23,7 +22,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
from homeassistant.util.dt import parse_datetime
from .const import DOMAIN
from .coordinator import LaunchLibraryCoordinator
from .coordinator import LaunchLibraryConfigEntry, LaunchLibraryCoordinator
DEFAULT_NEXT_LAUNCH_NAME = "Next launch"
@@ -118,12 +117,12 @@ SENSOR_DESCRIPTIONS: tuple[LaunchLibrarySensorEntityDescription, ...] = (
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
entry: LaunchLibraryConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the sensor platform."""
name = entry.data.get(CONF_NAME, DEFAULT_NEXT_LAUNCH_NAME)
coordinator: LaunchLibraryCoordinator = hass.data[DOMAIN]
coordinator = entry.runtime_data
async_add_entities(
LaunchLibrarySensor(

View File

@@ -41,7 +41,7 @@ class LGDevice(MediaPlayerEntity):
"""Representation of an LG soundbar device."""
_attr_should_poll = False
_attr_state = MediaPlayerState.OFF
_attr_state = MediaPlayerState.ON # Default to ON to ensure compatibility with models that don't send a powerstatus message
_attr_supported_features = (
MediaPlayerEntityFeature.VOLUME_SET
| MediaPlayerEntityFeature.VOLUME_MUTE

View File

@@ -6,7 +6,7 @@ from collections.abc import Mapping
import logging
from typing import Any
from pyliebherrhomeapi import LiebherrClient
from pyliebherrhomeapi import Device, LiebherrClient
from pyliebherrhomeapi.exceptions import (
LiebherrAuthenticationError,
LiebherrConnectionError,
@@ -31,10 +31,12 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
class LiebherrConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for liebherr."""
async def _validate_api_key(self, api_key: str) -> tuple[list, dict[str, str]]:
async def _validate_api_key(
self, api_key: str
) -> tuple[list[Device], dict[str, str]]:
"""Validate the API key and return devices and errors."""
errors: dict[str, str] = {}
devices: list = []
devices: list[Device] = []
client = LiebherrClient(
api_key=api_key,
session=async_get_clientsession(self.hass),

View File

@@ -6,7 +6,10 @@ import math
from typing import TYPE_CHECKING, Any
from pyliebherrhomeapi import PresentationLightControl
from pyliebherrhomeapi.const import CONTROL_PRESENTATION_LIGHT
from pyliebherrhomeapi.const import (
CONTROL_PRESENTATION_LIGHT,
DEFAULT_PRESENTATION_LIGHT_MAX_BRIGHTNESS,
)
from homeassistant.components.light import ATTR_BRIGHTNESS, ColorMode, LightEntity
from homeassistant.core import HomeAssistant, callback
@@ -17,8 +20,6 @@ from .const import DOMAIN
from .coordinator import LiebherrConfigEntry, LiebherrCoordinator
from .entity import LiebherrEntity
DEFAULT_MAX_BRIGHTNESS_LEVEL = 5
PARALLEL_UPDATES = 1
@@ -108,7 +109,7 @@ class LiebherrPresentationLight(LiebherrEntity, LightEntity):
control = self._light_control
if TYPE_CHECKING:
assert control is not None
max_level = control.max or DEFAULT_MAX_BRIGHTNESS_LEVEL
max_level = control.max or DEFAULT_PRESENTATION_LIGHT_MAX_BRIGHTNESS
if ATTR_BRIGHTNESS in kwargs:
target = max(1, round(kwargs[ATTR_BRIGHTNESS] * max_level / 255))

View File

@@ -7,7 +7,7 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["pyliebherrhomeapi"],
"quality_scale": "gold",
"quality_scale": "platinum",
"requirements": ["pyliebherrhomeapi==0.4.1"],
"zeroconf": [
{

View File

@@ -73,4 +73,4 @@ rules:
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo
strict-typing: done

View File

@@ -9,12 +9,14 @@ from homeassistant.const import CONF_PORT, EVENT_HOMEASSISTANT_STOP
from homeassistant.core import Event, HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from .const import DOMAIN, PLATFORMS
from .const import PLATFORMS
type LiteJetConfigEntry = ConfigEntry[pylitejet.LiteJet]
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: LiteJetConfigEntry) -> bool:
"""Set up LiteJet via a config entry."""
port = entry.data[CONF_PORT]
@@ -38,19 +40,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, handle_stop)
)
hass.data[DOMAIN] = system
entry.runtime_data = system
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: LiteJetConfigEntry) -> bool:
"""Unload a LiteJet config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
await hass.data[DOMAIN].close()
hass.data.pop(DOMAIN)
await entry.runtime_data.close()
return unload_ok

View File

@@ -8,16 +8,12 @@ import pylitejet
from serial import SerialException
import voluptuous as vol
from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
OptionsFlow,
)
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
from homeassistant.const import CONF_PORT
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv
from . import LiteJetConfigEntry
from .const import CONF_DEFAULT_TRANSITION, DOMAIN
@@ -77,7 +73,7 @@ class LiteJetConfigFlow(ConfigFlow, domain=DOMAIN):
@staticmethod
@callback
def async_get_options_flow(
config_entry: ConfigEntry,
config_entry: LiteJetConfigEntry,
) -> LiteJetOptionsFlow:
"""Get the options flow for this handler."""
return LiteJetOptionsFlow()

View File

@@ -2,19 +2,16 @@
from typing import Any
from pylitejet import LiteJet
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from .const import DOMAIN
from . import LiteJetConfigEntry
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: ConfigEntry
hass: HomeAssistant, entry: LiteJetConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for LiteJet config entry."""
system: LiteJet = hass.data[DOMAIN]
system = entry.runtime_data
return {
"model": system.model_name,
"loads": list(system.loads()),

View File

@@ -13,12 +13,12 @@ from homeassistant.components.light import (
LightEntity,
LightEntityFeature,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import LiteJetConfigEntry
from .const import CONF_DEFAULT_TRANSITION, DOMAIN
ATTR_NUMBER = "number"
@@ -26,12 +26,12 @@ ATTR_NUMBER = "number"
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
config_entry: LiteJetConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up entry."""
system: LiteJet = hass.data[DOMAIN]
system = config_entry.runtime_data
entities = []
for index in system.loads():
@@ -52,7 +52,7 @@ class LiteJetLight(LightEntity):
_attr_name = None
def __init__(
self, config_entry: ConfigEntry, system: LiteJet, index: int, name: str
self, config_entry: LiteJetConfigEntry, system: LiteJet, index: int, name: str
) -> None:
"""Initialize a LiteJet light."""
self._config_entry = config_entry

View File

@@ -6,12 +6,12 @@ from typing import Any
from pylitejet import LiteJet, LiteJetError
from homeassistant.components.scene import Scene
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import LiteJetConfigEntry
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
@@ -21,12 +21,12 @@ ATTR_NUMBER = "number"
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
config_entry: LiteJetConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up entry."""
system: LiteJet = hass.data[DOMAIN]
system = config_entry.runtime_data
entities = []
for i in system.scenes():

View File

@@ -5,12 +5,12 @@ from typing import Any
from pylitejet import LiteJet, LiteJetError
from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import LiteJetConfigEntry
from .const import DOMAIN
ATTR_NUMBER = "number"
@@ -18,12 +18,12 @@ ATTR_NUMBER = "number"
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
config_entry: LiteJetConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up entry."""
system: LiteJet = hass.data[DOMAIN]
system = config_entry.runtime_data
entities = []
for i in system.button_switches():

View File

@@ -6,7 +6,6 @@ from collections.abc import Callable
from datetime import datetime
from typing import cast
from pylitejet import LiteJet
import voluptuous as vol
from homeassistant.const import CONF_PLATFORM
@@ -109,7 +108,7 @@ async def async_attach_trigger(
):
hass.add_job(call_action)
system: LiteJet = hass.data[DOMAIN]
system = hass.config_entries.async_loaded_entries(DOMAIN)[0].runtime_data
system.on_switch_pressed(number, pressed)
system.on_switch_released(number, released)

View File

@@ -16,5 +16,5 @@
"iot_class": "cloud_push",
"loggers": ["pylitterbot"],
"quality_scale": "platinum",
"requirements": ["pylitterbot==2025.2.0"]
"requirements": ["pylitterbot==2025.2.1"]
}

View File

@@ -27,6 +27,8 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import ATTR_SENSOR_ID, CONF_SENSOR_ID, DOMAIN
from .coordinator import LuftdatenConfigEntry, LuftdatenDataUpdateCoordinator
PARALLEL_UPDATES = 0
SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="temperature",

Some files were not shown because too many files have changed in this diff Show More