Compare commits

...

99 Commits

Author SHA1 Message Date
Erik Montnemery 10084c8c0c Add trigger timer.time_remaining (#169763) 2026-05-05 23:54:49 -04:00
Erik Montnemery 7e8f5365ce Add method _should_include to EntityTriggerBase (#169837) 2026-05-06 00:50:22 +02:00
Erik Montnemery 65f9dcd7bf Improve condition test helper docstrings (#169871) 2026-05-06 00:32:37 +02:00
epenet 4c8f37fef6 Bump tuya-device-handlers to 0.0.19 (#169848) 2026-05-05 22:23:14 +02:00
Erik Montnemery d1295fa260 Validate yaml matches implementation in automation options_supported tests (#169798) 2026-05-05 22:20:28 +02:00
Diogo Gomes 9b2eea920f Add V2C LED lights (#169778)
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
2026-05-05 22:19:59 +02:00
Petro31 c81c1cbb14 Remove legacy weather template entities (#169734) 2026-05-05 22:18:46 +02:00
Erik Montnemery 11ee05874a Improve trigger test helper docstrings (#169869) 2026-05-05 22:11:08 +02:00
puddly 7d7c47b56e Bump serialx to 1.7.0 (#169867) 2026-05-05 21:06:30 +02:00
epenet dc4210595f Fix flaky test_set_scan_interval_via_platform (#169856)
Co-authored-by: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-05-05 20:49:15 +02:00
Freekers 7430366d9b Enable web search support for gpt-5-nano (#169710) 2026-05-05 20:47:52 +03:00
Crocmagnon ae3bd54ca7 switchbot: remove unwanted future annotations import preventing build on all new PRs (#169863) 2026-05-05 19:40:27 +02:00
Glenn Waters e3ce7fb000 Bump elkm1-lib to 2.2.15 (#169843)
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
2026-05-05 18:50:17 +02:00
epenet 9286b517d3 Add ruff rule to prevent __future__ annotations (#169852)
Co-authored-by: Robert Resch <robert@resch.dev>
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
2026-05-05 18:42:10 +02:00
elgris 4d62e4765d Add a number entity to set display time offset (in minutes) for Switchbot Meter CO2 devices. (#169603)
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
2026-05-05 17:45:47 +02:00
Michael Hansen ea55ef90a6 Bump intents to 2026.5.5 (#169855) 2026-05-05 18:22:22 +03:00
epenet 751765b97b Cleanup from __future__ import annotations (#169850) 2026-05-05 16:35:21 +02:00
Denis Shulyaka 11ed1fe20f Return the requested format for OpenAI TTS (#169839)
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
2026-05-05 10:28:20 -04:00
Joost Lekkerkerker 9b5166769a Add Sensereo matter brand (#169836) 2026-05-05 10:18:01 -04:00
Joost Lekkerkerker 70c2a323ce Add Zunzunbee Zigbee brand (#169838) 2026-05-05 10:17:49 -04:00
Ronald van der Meer 0ec5d6b273 Add API version to Duco diagnostics for support triage (#169802) 2026-05-05 15:48:43 +02:00
Robert Resch b1e8dc2ebb Remove show_advanced_options in Ecovacs and always show all options (#169831) 2026-05-05 15:42:08 +02:00
Artur Pragacz e144804d28 Fix async_unload teardown race in scripts (#169562) 2026-05-05 15:03:37 +02:00
cengelen 8521a49986 Bump growatt server to 2.1.0 (#169495)
Co-authored-by: Copilot <copilot@github.com>
2026-05-05 14:11:50 +02:00
Raj Laud 3587f9613f Bump victron-ble-ha-parser to 0.7.0 (#169736)
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
2026-05-05 13:57:19 +02:00
Jan Bouwhuis 2f1dd3a817 Deprecate MQTT protocol versions 3.x and migrate to version 5 (#169759)
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
2026-05-05 13:43:18 +02:00
wollew 2c2e8db19f Remove deprecated reboot service for Velux gateway (#169796) 2026-05-05 11:08:00 +02:00
Erik Montnemery 64a3f91132 Improve template reload (#169480) 2026-05-05 10:16:22 +02:00
dependabot[bot] bd61c893e4 Bump dawidd6/action-download-artifact from 20 to 21 (#169793)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-05-05 10:12:07 +02:00
renovate[bot] 6bb759b887 Update infrared-protocols to 2.1.0 (#169785)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-05-05 10:11:52 +02:00
Matthias Alphart 280b5ef388 Update xknxproject to 3.9.0 (#169775) 2026-05-05 10:09:24 +02:00
Erik Montnemery 416d4e02a0 Add trigger media_player.unmuted (#169797) 2026-05-05 09:45:45 +02:00
kw6423 c99f261a2d Restore OwnTracks custom device tracker attributes (#169753)
Co-authored-by: Ariel Ebersberger <ariel@ebersberger.io>
2026-05-05 09:44:53 +02:00
Thomas D 9c9a058eb0 Add missing initialization charging power status option to Volvo (#169727) 2026-05-05 09:10:13 +02:00
Nathan Spencer 7b51b929ef Bump pylitterbot to 2025.4.0 (#169652) 2026-05-05 09:05:16 +02:00
Ronald van der Meer 74971ebcd1 Bump python-duco-client to 0.4.0 (#169776) 2026-05-05 08:55:22 +02:00
Åke Strandberg 1f5d80ca44 Add missing code for miele washing machine (#169795) 2026-05-05 08:54:12 +02:00
Erik Montnemery 9075c6a5cb Add trigger media_player.muted (#156736) 2026-05-05 08:22:03 +02:00
Manu ab4162601f Remove YAML import from Duck DNS integration (#169769) 2026-05-05 07:45:40 +02:00
HoffmanEl 38de48ac9d Add data_description to airnow config flow strings (#169783) 2026-05-05 07:43:18 +02:00
Nikolai Rahimi 597d9a2ada Add Mitsubishi Comfort integration (#167472)
Co-authored-by: Nikolai Rahimi <nikolairahimi@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-05-05 00:16:00 +02:00
optimusbasti 71494b6c97 Bump aioautomower to 2.7.5 (#169758) 2026-05-04 22:27:46 +01:00
A. Gideonse 57e66baf53 Update Indevolt integration quality scale to silver (#167843)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-05-04 23:05:11 +02:00
Nathan Spencer 63dfc97346 Limit power status binary sensor to non-LR5 devices (#169659) 2026-05-04 22:51:17 +02:00
shbatm 1b4a7d55c0 Add precipitation device class to WeatherFlow Cloud accumulation sensors (#169638)
Co-authored-by: Claude Opus 4.7 <noreply@anthropic.com>
2026-05-04 22:29:12 +02:00
Matthew Gibson 8c8a863867 Add ptdevices Integration (#156307)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-05-04 22:15:52 +02:00
Keilin Bickar 28d65e987c bump sense-energy to 0.14.1 (#169761) 2026-05-04 21:22:45 +02:00
Daniel Hjelseth Høyer d0c0f02311 Bump pyTibber to 0.37.3 (#169762) 2026-05-04 21:21:57 +02:00
kernelpanic85 f90e9ceb6c Add Celsius and Fahrenheit to Smartthings UNITS mapping (#169686) 2026-05-04 21:20:04 +02:00
G Johansson 553ba5e7ab Add binary sensor to Nord Pool (#169684) 2026-05-04 21:10:06 +02:00
Erwin Douna 6633f16d13 Add system health to Portainer (#169698) 2026-05-04 21:07:16 +02:00
Kamil Breguła 1beeecdf04 Use SensorDeviceClass.UPTIME in WLED (#169708)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
2026-05-04 21:02:15 +02:00
G Johansson 6319b3b4ef Raise repairs on platform setup for command_line (#153565)
Co-authored-by: Copilot <copilot@github.com>
2026-05-04 20:59:28 +02:00
Steve Syrell 2ed550c2c9 Bump Insteon-panel to 0.6.2 (#169757) 2026-05-04 20:55:41 +02:00
Mike Degatano 6f28902a4f Refactor hassio coordinators to use typed dataclasses instead of dicts (#168847)
Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-05-04 20:24:33 +02:00
optimusbasti fcd23353f2 Add set_cover_position_and_tilt service to Overkiz (#169275)
Co-authored-by: optimusbasti <optimusbasti@users.noreply.github.com>
Co-authored-by: ThomasCZ <noreply@users.github.com>
2026-05-04 20:23:26 +02:00
Leonardo Rivera 2846dcc035 Add delete service action to OneDrive integration (#168064)
Co-authored-by: Josef Zweck <josef@zweck.dev>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-05-04 20:17:41 +02:00
Christian Lackas 5858db1cda Use all_devices in ViCare diagnostics for completeness (#169429) 2026-05-04 19:56:36 +02:00
Diogo Gomes 1140d52735 Bump pytrydan to 1.0.0 (#169742) 2026-05-04 19:39:46 +02:00
G Johansson 664354c4fe Fix config flow validation in Nord Pool (#169751) 2026-05-04 19:34:17 +02:00
Petro31 dfb8c7edb8 Fix uptime template sensor (#169743) 2026-05-04 18:09:46 +01:00
Cristoforo Cervino c22edbec30 Add opening/closing state icons to valve domain (#169644) 2026-05-04 18:42:21 +02:00
kw6423 86415c1906 OwnTracks: expose message tst as update_timestamp in device_tracker attribute (#165203)
Co-authored-by: Ariel Ebersberger <31776703+justanotherariel@users.noreply.github.com>
2026-05-04 18:35:18 +02:00
Paul Bottein e4f8d1ac64 Update frontend to 20260429.2 (#169748) 2026-05-04 12:22:51 -04:00
Tom 3f97230c25 Improve ProxmoxVE config flow preparing bug fixing (#169682)
Co-authored-by: Erwin Douna <e.douna@gmail.com>
2026-05-04 17:20:25 +02:00
Simone Chemelli 14aa87f026 Bump pyuptimerobot to 25.0.0 (#169572) 2026-05-04 16:13:55 +01:00
Erik Montnemery 2521f6d825 Adjust mobile_app unknown location test (#169746) 2026-05-04 16:54:24 +02:00
Phil-Rad c80677f86e Add data_description blocks to dnsip strings (#169744) 2026-05-04 16:47:30 +02:00
Erik Montnemery a421a8ab9c Add ExtraStoredData container for mobile_app device_tracker (#169731) 2026-05-04 16:36:03 +02:00
bkobus-bbx bfb85949ff Remove stateclass from blebox powerConsumption sensor (#169435) 2026-05-04 16:26:52 +02:00
Manu 960855eb58 Bump bring-api to 1.1.2 (#169729) 2026-05-04 15:49:53 +02:00
Mike O'Driscoll 395341b9a7 Casper Glow: bump quality to platinum (#167519) 2026-05-04 15:47:01 +02:00
Michael c80e421492 Fix detection of CPU temperature sensor support on olde FRITZ!Box models (#169620) 2026-05-04 14:38:39 +01:00
Allen Porter 05c006038f Update Nest doorbell event to use standard DoorbellEventType.RING (#169691) 2026-05-04 14:34:16 +01:00
HoffmanEl d1b2f69713 Add reconfiguration flow to actron_air integration (#169712)
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
2026-05-04 15:32:00 +02:00
Thomas D bff4aef233 Ignore location FORBIDDEN response for the Volvo integration (#169713) 2026-05-04 15:31:34 +02:00
Khole 88c716901a Check device registration before completing Hive reauth flow (#168035)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Erwin Douna <e.douna@gmail.com>
2026-05-04 15:05:42 +02:00
Marcello fa7ecddb66 Improve availability in Fluss (#168154)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-05-04 14:59:05 +02:00
A. Gideonse db2dfbbc41 Refactored generation variable for indevolt (#169396) 2026-05-04 13:53:02 +01:00
Erik Montnemery bfcf663784 Improve mobile_app device tracker tests (#169724) 2026-05-04 13:49:33 +02:00
A. Gideonse a54b188789 Refactor exceptions to align on library (#169622)
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
2026-05-04 08:42:29 +02:00
Colin 9bea2d149a openevse: Add reathentication flow (#169632) 2026-05-04 08:38:31 +02:00
Matthieu Bauny 77bd6a720d Fix mode description typo in homekit/strings.json (#169605) 2026-05-03 18:29:00 -05:00
Manu 967c374a48 Change uptime sensor to SensorDeviceClass.UPTIME in IronOS integration (#169699) 2026-05-03 22:45:21 +02:00
Midori Kochiya c1c62e6313 Fix M1S-T500 update error (#169651) 2026-05-03 19:00:51 +02:00
karwosts 744e8ae324 Use uptime device_class for Uptime sensor (#169692) 2026-05-03 18:58:53 +02:00
shbatm 11969c69fa Bump pyisy to 3.5.1 (#169663)
Co-authored-by: Claude Opus 4.7 <noreply@anthropic.com>
2026-05-03 14:20:02 +02:00
Ronald van der Meer 4c5f09a3e0 Bump python-duco-client to 0.3.10 (#169677) 2026-05-03 10:39:09 +02:00
OMEGA_RAZER c2f783ed76 Updated prowlpy to 1.1.5 (#169671) 2026-05-03 09:56:23 +02:00
Tom Matheussen a4f16eb68d Bump satel_integra to 1.3.0 (#169668) 2026-05-02 23:56:42 +02:00
SeifEddineMezned 98b28ebb85 Fix grammar in mqtt/strings.json: "Minimal one" → "At least one" (#169666) 2026-05-02 22:08:19 +02:00
Andres Ruiz 849bc03728 Fix race condition in waterfurnace login_credential_error test (#169645) 2026-05-02 14:07:54 +02:00
Andres Ruiz b3e1674852 Catch additional errors as potentially retryable errors during energy data updates (#169646) 2026-05-02 13:52:14 +02:00
jftkcs 4ecd47f533 Fix reasoning summary handling for OpenAI o-models (#168093)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
Co-authored-by: Denis Shulyaka <Shulyaka@gmail.com>
2026-05-02 12:02:51 +03:00
Michael 400f75739f Pass None config entry to schluter coordinator (#169621) 2026-05-02 00:32:30 +02:00
Marc Mueller 01c4ce4278 Improve uv cache save (#169612) 2026-05-02 00:31:50 +02:00
A. Gideonse 73f4be8bb1 Bump indevolt-api to 1.7.1 (#169623) 2026-05-02 00:21:26 +02:00
Dan Raper d177d55cb8 Add state of charge input number to Ohme (#169557) 2026-05-02 00:11:41 +02:00
SeifEddineMezned f5031692e8 Add PARALLEL_UPDATES to goodwe sensor platform (#169624) 2026-05-01 23:57:40 +02:00
319 changed files with 12644 additions and 2841 deletions
+2 -2
View File
@@ -108,7 +108,7 @@ jobs:
- name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev'
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
uses: dawidd6/action-download-artifact@b6e2e70617bc3265edd6dab6c906732b2f1ae151 # v21
with:
github_token: ${{secrets.GITHUB_TOKEN}}
repo: home-assistant/frontend
@@ -119,7 +119,7 @@ jobs:
- name: Download nightly wheels of intents
if: needs.init.outputs.channel == 'dev'
uses: dawidd6/action-download-artifact@8305c0f1062bb0d184d09ef4493ecb9288447732 # v20
uses: dawidd6/action-download-artifact@b6e2e70617bc3265edd6dab6c906732b2f1ae151 # v21
with:
github_token: ${{secrets.GITHUB_TOKEN}}
repo: OHF-Voice/intents-package
+3 -8
View File
@@ -372,14 +372,13 @@ jobs:
RUNNER_OS: ${{ runner.os }}
RUNNER_ARCH: ${{ runner.arch }}
PYTHON_VERSION: ${{ steps.python.outputs.python-version }}
HASH_FILES: ${{ hashFiles('requirements.txt', 'requirements_all.txt', 'requirements_test.txt') }}
HASH_FILES: ${{ hashFiles('requirements.txt', 'requirements_all.txt', 'requirements_test.txt', 'homeassistant/package_constraints.txt') }}
run: |
partial_key="${RUNNER_OS}-${RUNNER_ARCH}-${PYTHON_VERSION}-uv-"
echo "partial_key=${partial_key}" >> $GITHUB_OUTPUT
echo "full_key=${partial_key}${HASH_FILES}" >> $GITHUB_OUTPUT
- name: Restore uv wheel cache
if: steps.cache-venv.outputs.cache-hit != 'true'
id: cache-uv
uses: actions/cache/restore@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: ${{ env.UV_CACHE_DIR }}
@@ -474,17 +473,13 @@ jobs:
run: |
./script/check_dirty
- name: Prune uv cache
if: |
steps.cache-uv.outputs.cache-hit != 'true'
&& (
success()
|| (always() && steps.create-venv.outcome == 'success'))
if: steps.cache-venv.outputs.cache-hit != 'true'
id: prune-uv-cache
run: |
. venv/bin/activate
uv cache prune --ci
- name: Save uv wheel cache
if: steps.prune-uv-cache.outcome == 'success'
if: steps.cache-venv.outputs.cache-hit != 'true'
uses: actions/cache/save@27d5ce7f107fe9357f9df03efb73ab90386fccae # v5.0.5
with:
path: ${{ env.UV_CACHE_DIR }}
+1
View File
@@ -442,6 +442,7 @@ homeassistant.components.private_ble_device.*
homeassistant.components.prometheus.*
homeassistant.components.proximity.*
homeassistant.components.prusalink.*
homeassistant.components.ptdevices.*
homeassistant.components.pure_energie.*
homeassistant.components.purpleair.*
homeassistant.components.pushbullet.*
Generated
+6 -2
View File
@@ -851,8 +851,8 @@ CLAUDE.md @home-assistant/core
/tests/components/input_select/ @home-assistant/core
/homeassistant/components/input_text/ @home-assistant/core
/tests/components/input_text/ @home-assistant/core
/homeassistant/components/insteon/ @teharris1
/tests/components/insteon/ @teharris1
/homeassistant/components/insteon/ @teharris1 @ssyrell
/tests/components/insteon/ @teharris1 @ssyrell
/homeassistant/components/integration/ @dgomes
/tests/components/integration/ @dgomes
/homeassistant/components/intelliclima/ @dvdinth
@@ -1092,6 +1092,8 @@ CLAUDE.md @home-assistant/core
/tests/components/minecraft_server/ @elmurato @zachdeibert
/homeassistant/components/minio/ @tkislan
/tests/components/minio/ @tkislan
/homeassistant/components/mitsubishi_comfort/ @nikolairahimi
/tests/components/mitsubishi_comfort/ @nikolairahimi
/homeassistant/components/moat/ @bdraco
/tests/components/moat/ @bdraco
/homeassistant/components/mobile_app/ @home-assistant/core
@@ -1378,6 +1380,8 @@ CLAUDE.md @home-assistant/core
/tests/components/proxmoxve/ @Corbeno @erwindouna @CoMPaTech
/homeassistant/components/ps4/ @ktnrg45
/tests/components/ps4/ @ktnrg45
/homeassistant/components/ptdevices/ @ParemTech-Inc @frogman85978
/tests/components/ptdevices/ @ParemTech-Inc @frogman85978
/homeassistant/components/pterodactyl/ @elmurato
/tests/components/pterodactyl/ @elmurato
/homeassistant/components/pure_energie/ @klaasnicolaas
+5
View File
@@ -0,0 +1,5 @@
{
"domain": "sensereo",
"name": "Sensereo",
"iot_standards": ["matter"]
}
+5
View File
@@ -0,0 +1,5 @@
{
"domain": "zunzunbee",
"name": "Zunzunbee",
"iot_standards": ["zigbee"]
}
@@ -5,5 +5,5 @@
"documentation": "https://www.home-assistant.io/integrations/acer_projector",
"iot_class": "local_polling",
"quality_scale": "legacy",
"requirements": ["serialx==1.4.1"]
"requirements": ["serialx==1.7.0"]
}
@@ -6,7 +6,12 @@ from typing import Any
from actron_neo_api import ActronAirAPI, ActronAirAuthError
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
from homeassistant.config_entries import (
SOURCE_REAUTH,
SOURCE_RECONFIGURE,
ConfigFlow,
ConfigFlowResult,
)
from homeassistant.const import CONF_API_TOKEN
from homeassistant.exceptions import HomeAssistantError
@@ -105,6 +110,14 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
data_updates={CONF_API_TOKEN: self._api.refresh_token_value},
)
# Check if this is a reconfigure flow
if self.source == SOURCE_RECONFIGURE:
self._abort_if_unique_id_mismatch(reason="wrong_account")
return self.async_update_reload_and_abort(
self._get_reconfigure_entry(),
data_updates={CONF_API_TOKEN: self._api.refresh_token_value},
)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=user_data.email,
@@ -138,6 +151,20 @@ class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
return self.async_show_form(step_id="reauth_confirm")
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfiguration request."""
return await self.async_step_reconfigure_confirm()
async def async_step_reconfigure_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm reconfiguration dialog."""
if user_input is not None:
return await self.async_step_user()
return self.async_show_form(step_id="reconfigure_confirm")
async def async_step_connection_error(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
@@ -60,7 +60,7 @@ rules:
entity-translations: done
exception-translations: done
icon-translations: done
reconfiguration-flow: todo
reconfiguration-flow: done
repair-issues:
status: exempt
comment: This integration does not have any known issues that require repair.
@@ -4,7 +4,8 @@
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
"oauth2_error": "Failed to start authentication flow",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"wrong_account": "You must reauthenticate with the same Actron Air account that was originally configured."
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"wrong_account": "You must authenticate with the same Actron Air account that was originally configured."
},
"error": {
"oauth2_error": "Failed to start authentication flow. Please try again later."
@@ -22,6 +23,10 @@
"description": "Your Actron Air authentication has expired. Select continue to reauthenticate with your Actron Air account. You will be prompted to log in again to restore the connection.",
"title": "Authentication expired"
},
"reconfigure_confirm": {
"description": "Reconfigure your Actron Air account. You will be prompted to log in again. Note: you must use the same account that was originally configured.",
"title": "Reconfigure Actron Air"
},
"timeout": {
"data": {},
"description": "The authentication process timed out. Please try again.",
+7 -1
View File
@@ -17,7 +17,13 @@
"longitude": "[%key:common::config_flow::data::longitude%]",
"radius": "Station radius (miles; optional)"
},
"description": "To generate API key go to {api_key_url}"
"data_description": {
"api_key": "To generate an API key, go to {api_key_url}.",
"latitude": "The latitude of your location.",
"longitude": "The longitude of your location.",
"radius": "The radius in miles around your location to search for reporting stations."
},
"description": "To generate an API key, go to {api_key_url}."
}
}
},
@@ -899,12 +899,13 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
async def async_will_remove_from_hass(self) -> None:
"""Remove listeners when removing automation from Home Assistant."""
await super().async_will_remove_from_hass()
await self._async_disable()
if self.registry_entry and self.registry_entry.entity_id != self.entity_id:
# Entity ID change, do not unload the script or conditions as they will
# be reused.
await self._async_disable()
return
self.action_script.async_unload()
await self._async_disable(stop_actions=False)
await self.action_script.async_unload()
if self._condition is not None:
self._condition.async_unload()
+8 -4
View File
@@ -1,6 +1,6 @@
"""BleBox sensor entities."""
from datetime import datetime
from datetime import datetime, timedelta
import blebox_uniapi.sensor
@@ -30,6 +30,9 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import BleBoxConfigEntry
from .entity import BleBoxEntity
SCAN_INTERVAL = timedelta(seconds=5)
SENSOR_TYPES = (
SensorEntityDescription(
key="pm1",
@@ -53,9 +56,9 @@ SENSOR_TYPES = (
),
SensorEntityDescription(
key="powerConsumption",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
state_class=SensorStateClass.TOTAL,
suggested_display_precision=2,
icon="mdi:lightning-bolt",
),
SensorEntityDescription(
key="humidity",
@@ -150,6 +153,7 @@ class BleBoxSensorEntity(BleBoxEntity[blebox_uniapi.sensor.BaseSensor], SensorEn
@property
def last_reset(self) -> datetime | None:
"""Return the time when the sensor was last reset, if implemented."""
if self.state_class != SensorStateClass.TOTAL:
return None
native_implementation = getattr(self._feature, "last_reset", None)
return native_implementation or super().last_reset
+1 -1
View File
@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["bring_api"],
"quality_scale": "platinum",
"requirements": ["bring-api==1.1.1"]
"requirements": ["bring-api==1.1.2"]
}
@@ -14,6 +14,6 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["pycasperglow"],
"quality_scale": "silver",
"quality_scale": "platinum",
"requirements": ["pycasperglow==1.2.0"]
}
@@ -45,12 +45,12 @@ rules:
comment: No network discovery.
discovery: done
docs-data-update: done
docs-examples: todo
docs-examples: done
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: todo
docs-use-cases: done
dynamic-devices:
status: exempt
comment: Each config entry represents a single device.
+31 -10
View File
@@ -8,14 +8,15 @@ from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.automation import DomainSpec
from homeassistant.helpers.trigger import (
ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST,
EntityNumericalStateChangedTriggerBase,
EntityNumericalStateChangedTriggerWithUnitBase,
EntityNumericalStateCrossedThresholdTriggerBase,
EntityNumericalStateCrossedThresholdTriggerWithUnitBase,
EntityNumericalStateTriggerBase,
EntityNumericalStateTriggerWithUnitBase,
EntityTargetStateTriggerBase,
Trigger,
TriggerConfig,
make_entity_numerical_state_changed_trigger,
make_entity_numerical_state_crossed_threshold_trigger,
make_entity_target_state_trigger,
make_entity_transition_trigger,
)
@@ -75,6 +76,32 @@ class ClimateTargetTemperatureCrossedThresholdTrigger(
"""Trigger for climate target temperature value crossing a threshold."""
class _ClimateTargetHumidityTriggerMixin(EntityNumericalStateTriggerBase):
"""Mixin for climate target humidity triggers."""
_domain_specs = {DOMAIN: DomainSpec(value_source=ATTR_HUMIDITY)}
_valid_unit = "%"
def _should_include(self, state: State) -> bool:
"""Skip climate entities that do not expose a target humidity."""
return (
super()._should_include(state)
and state.attributes.get(ATTR_HUMIDITY) is not None
)
class ClimateTargetHumidityChangedTrigger(
_ClimateTargetHumidityTriggerMixin, EntityNumericalStateChangedTriggerBase
):
"""Trigger for climate target humidity value changes."""
class ClimateTargetHumidityCrossedThresholdTrigger(
_ClimateTargetHumidityTriggerMixin, EntityNumericalStateCrossedThresholdTriggerBase
):
"""Trigger for climate target humidity value crossing a threshold."""
TRIGGERS: dict[str, type[Trigger]] = {
"hvac_mode_changed": HVACModeChangedTrigger,
"started_cooling": make_entity_target_state_trigger(
@@ -83,14 +110,8 @@ TRIGGERS: dict[str, type[Trigger]] = {
"started_drying": make_entity_target_state_trigger(
{DOMAIN: DomainSpec(value_source=ATTR_HVAC_ACTION)}, HVACAction.DRYING
),
"target_humidity_changed": make_entity_numerical_state_changed_trigger(
{DOMAIN: DomainSpec(value_source=ATTR_HUMIDITY)},
valid_unit="%",
),
"target_humidity_crossed_threshold": make_entity_numerical_state_crossed_threshold_trigger(
{DOMAIN: DomainSpec(value_source=ATTR_HUMIDITY)},
valid_unit="%",
),
"target_humidity_changed": ClimateTargetHumidityChangedTrigger,
"target_humidity_crossed_threshold": ClimateTargetHumidityCrossedThresholdTrigger,
"target_temperature_changed": ClimateTargetTemperatureChangedTrigger,
"target_temperature_crossed_threshold": ClimateTargetTemperatureCrossedThresholdTrigger,
"turned_off": make_entity_target_state_trigger(DOMAIN, HVACMode.OFF),
@@ -3,7 +3,10 @@
import asyncio
from datetime import datetime, timedelta
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.components.binary_sensor import (
DOMAIN as BINARY_SENSOR_DOMAIN,
BinarySensorEntity,
)
from homeassistant.const import (
CONF_COMMAND,
CONF_NAME,
@@ -25,6 +28,7 @@ from homeassistant.util import dt as dt_util
from .const import CONF_COMMAND_TIMEOUT, LOGGER, TRIGGER_ENTITY_OPTIONS
from .sensor import CommandSensorData
from .utils import create_platform_yaml_not_supported_issue
DEFAULT_NAME = "Binary Command Sensor"
DEFAULT_PAYLOAD_ON = "ON"
@@ -41,6 +45,7 @@ async def async_setup_platform(
) -> None:
"""Set up the Command line Binary Sensor."""
if not discovery_info:
create_platform_yaml_not_supported_issue(hass, BINARY_SENSOR_DOMAIN)
return
binary_sensor_config = discovery_info
@@ -4,7 +4,7 @@ import asyncio
from datetime import datetime, timedelta
from typing import TYPE_CHECKING, Any
from homeassistant.components.cover import CoverEntity
from homeassistant.components.cover import DOMAIN as COVER_DOMAIN, CoverEntity
from homeassistant.const import (
CONF_COMMAND_CLOSE,
CONF_COMMAND_OPEN,
@@ -26,7 +26,11 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import dt as dt_util, slugify
from .const import CONF_COMMAND_TIMEOUT, LOGGER, TRIGGER_ENTITY_OPTIONS
from .utils import async_call_shell_with_timeout, async_check_output_or_log
from .utils import (
async_call_shell_with_timeout,
async_check_output_or_log,
create_platform_yaml_not_supported_issue,
)
SCAN_INTERVAL = timedelta(seconds=15)
@@ -39,6 +43,7 @@ async def async_setup_platform(
) -> None:
"""Set up cover controlled by shell commands."""
if not discovery_info:
create_platform_yaml_not_supported_issue(hass, COVER_DOMAIN)
return
covers = []
@@ -4,25 +4,29 @@ import logging
import subprocess
from typing import Any
from homeassistant.components.notify import BaseNotificationService
from homeassistant.components.notify import (
DOMAIN as NOTIFY_DOMAIN,
BaseNotificationService,
)
from homeassistant.const import CONF_COMMAND
from homeassistant.core import HomeAssistant
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util.process import kill_subprocess
from .const import CONF_COMMAND_TIMEOUT, LOGGER
from .utils import render_template_args
from .utils import create_platform_yaml_not_supported_issue, render_template_args
_LOGGER = logging.getLogger(__name__)
def get_service(
async def async_get_service(
hass: HomeAssistant,
config: ConfigType,
discovery_info: DiscoveryInfoType | None = None,
) -> CommandLineNotificationService | None:
"""Get the Command Line notification service."""
if not discovery_info:
create_platform_yaml_not_supported_issue(hass, NOTIFY_DOMAIN)
return None
notify_config = discovery_info
@@ -8,6 +8,7 @@ from typing import Any
from jsonpath import jsonpath
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.const import (
CONF_COMMAND,
CONF_NAME,
@@ -32,7 +33,11 @@ from .const import (
LOGGER,
TRIGGER_ENTITY_OPTIONS,
)
from .utils import async_check_output_or_log, render_template_args
from .utils import (
async_check_output_or_log,
create_platform_yaml_not_supported_issue,
render_template_args,
)
DEFAULT_NAME = "Command Sensor"
@@ -47,6 +52,7 @@ async def async_setup_platform(
) -> None:
"""Set up the Command Sensor."""
if not discovery_info:
create_platform_yaml_not_supported_issue(hass, SENSOR_DOMAIN)
return
sensor_config = discovery_info
@@ -1,4 +1,10 @@
{
"issues": {
"platform_yaml_not_supported": {
"description": "Platform YAML setup is not supported.\nChange from configuring it using the `{platform}:` key to using the `command_line:` key directly in configuration.yaml and restart Home Assistant to resolve the issue.\nTo see the detailed documentation, select Learn more.",
"title": "Platform YAML is not supported in Command Line"
}
},
"services": {
"reload": {
"description": "Reloads command line configuration from the YAML-configuration.",
@@ -4,7 +4,11 @@ import asyncio
from datetime import datetime, timedelta
from typing import TYPE_CHECKING, Any
from homeassistant.components.switch import ENTITY_ID_FORMAT, SwitchEntity
from homeassistant.components.switch import (
DOMAIN as SWITCH_DOMAIN,
ENTITY_ID_FORMAT,
SwitchEntity,
)
from homeassistant.const import (
CONF_COMMAND_OFF,
CONF_COMMAND_ON,
@@ -25,7 +29,11 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import dt as dt_util, slugify
from .const import CONF_COMMAND_TIMEOUT, LOGGER, TRIGGER_ENTITY_OPTIONS
from .utils import async_call_shell_with_timeout, async_check_output_or_log
from .utils import (
async_call_shell_with_timeout,
async_check_output_or_log,
create_platform_yaml_not_supported_issue,
)
SCAN_INTERVAL = timedelta(seconds=30)
@@ -38,6 +46,7 @@ async def async_setup_platform(
) -> None:
"""Find and return switches controlled by shell commands."""
if not discovery_info:
create_platform_yaml_not_supported_issue(hass, SWITCH_DOMAIN)
return
switches = []
+18 -1
View File
@@ -4,9 +4,10 @@ import asyncio
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import TemplateError
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from homeassistant.helpers.template import Template
from .const import LOGGER
from .const import DOMAIN, LOGGER
_EXEC_FAILED_CODE = 127
@@ -91,3 +92,19 @@ def render_template_args(hass: HomeAssistant, command: str) -> str | None:
LOGGER.debug("Running command: %s", command)
return command
def create_platform_yaml_not_supported_issue(
hass: HomeAssistant, platform_domain: str
) -> None:
"""Create an issue when platform yaml is used."""
async_create_issue(
hass,
DOMAIN,
f"{platform_domain}_platform_yaml_not_supported",
is_fixable=False,
severity=IssueSeverity.ERROR,
translation_key="platform_yaml_not_supported",
translation_placeholders={"platform": platform_domain},
learn_more_url="https://www.home-assistant.io/integrations/command_line/",
)
@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.3.24"]
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.5.5"]
}
+18 -5
View File
@@ -9,11 +9,18 @@
"step": {
"user": {
"data": {
"hostname": "The hostname for which to perform the DNS query",
"port": "Port for IPV4 lookup",
"port_ipv6": "Port for IPV6 lookup",
"resolver": "Resolver for IPV4 lookup",
"resolver_ipv6": "Resolver for IPV6 lookup"
"hostname": "Hostname",
"port": "IPv4 port",
"port_ipv6": "IPv6 port",
"resolver": "IPv4 resolver",
"resolver_ipv6": "IPv6 resolver"
},
"data_description": {
"hostname": "The hostname for which to perform the DNS query.",
"port": "Port used for the IPv4 lookup.",
"port_ipv6": "Port used for the IPv6 lookup.",
"resolver": "Resolver used for the IPv4 lookup.",
"resolver_ipv6": "Resolver used for the IPv6 lookup."
}
}
}
@@ -50,6 +57,12 @@
"port_ipv6": "[%key:component::dnsip::config::step::user::data::port_ipv6%]",
"resolver": "[%key:component::dnsip::config::step::user::data::resolver%]",
"resolver_ipv6": "[%key:component::dnsip::config::step::user::data::resolver_ipv6%]"
},
"data_description": {
"port": "[%key:component::dnsip::config::step::user::data_description::port%]",
"port_ipv6": "[%key:component::dnsip::config::step::user::data_description::port_ipv6%]",
"resolver": "[%key:component::dnsip::config::step::user::data_description::resolver%]",
"resolver_ipv6": "[%key:component::dnsip::config::step::user::data_description::resolver_ipv6%]"
}
}
}
+1 -25
View File
@@ -2,10 +2,6 @@
import logging
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.typing import ConfigType
@@ -16,18 +12,7 @@ from .services import async_setup_services
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_DOMAIN): cv.string,
vol.Required(CONF_ACCESS_TOKEN): cv.string,
}
)
},
extra=vol.ALLOW_EXTRA,
)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
@@ -35,15 +20,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async_setup_services(hass)
if DOMAIN not in config:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
)
)
return True
@@ -16,7 +16,6 @@ from homeassistant.helpers.selector import (
from .const import DOMAIN
from .helpers import update_duckdns
from .issue import deprecate_yaml_issue
_LOGGER = logging.getLogger(__name__)
@@ -68,18 +67,6 @@ class DuckDnsConfigFlow(ConfigFlow, domain=DOMAIN):
description_placeholders={"url": "https://www.duckdns.org/"},
)
async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult:
"""Import config from yaml."""
self._async_abort_entries_match({CONF_DOMAIN: import_info[CONF_DOMAIN]})
result = await self.async_step_user(import_info)
if errors := result.get("errors"):
deprecate_yaml_issue(self.hass, import_success=False)
return self.async_abort(reason=errors["base"])
deprecate_yaml_issue(self.hass, import_success=True)
return result
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
+1 -35
View File
@@ -1,45 +1,11 @@
"""Issues for Duck DNS integration."""
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
from homeassistant.core import HomeAssistant
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from .const import DOMAIN
@callback
def deprecate_yaml_issue(hass: HomeAssistant, *, import_success: bool) -> None:
"""Deprecate yaml issue."""
if import_success:
async_create_issue(
hass,
HOMEASSISTANT_DOMAIN,
f"deprecated_yaml_{DOMAIN}",
is_fixable=False,
issue_domain=DOMAIN,
breaks_in_ha_version="2026.6.0",
severity=IssueSeverity.WARNING,
translation_key="deprecated_yaml",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "Duck DNS",
},
)
else:
async_create_issue(
hass,
DOMAIN,
"deprecated_yaml_import_issue_error",
breaks_in_ha_version="2026.6.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key="deprecated_yaml_import_issue_error",
translation_placeholders={
"url": "/config/integrations/dashboard/add?domain=duckdns"
},
)
def action_called_without_config_entry(hass: HomeAssistant) -> None:
"""Deprecate the use of action without config entry."""
@@ -49,10 +49,6 @@
"deprecated_call_without_config_entry": {
"description": "Calling the `duckdns.set_txt` action without specifying a config entry is deprecated.\n\nThe `config_entry_id` field will be required in a future release.\n\nPlease update your automations and scripts to include the `config_entry_id` parameter.",
"title": "Detected deprecated use of action without config entry"
},
"deprecated_yaml_import_issue_error": {
"description": "Configuring Duck DNS using YAML is being removed but there was an error when trying to import the YAML configuration.\n\nEnsure the YAML configuration is correct and restart Home Assistant to try again or remove the Duck DNS YAML configuration from your `configuration.yaml` file and continue to [set up the integration]({url}) manually.",
"title": "The Duck DNS YAML configuration import failed"
}
},
"services": {
@@ -13,6 +13,9 @@ from homeassistant.exceptions import HomeAssistantError
from .const import DOMAIN
from .coordinator import DucoConfigEntry
# MAC addresses and serial numbers are redacted because a Duco installer or
# manufacturer could cross-reference them against an installation registry to
# identify the physical location of the device.
TO_REDACT = {
CONF_HOST,
"mac",
@@ -31,9 +34,15 @@ async def async_get_config_entry_diagnostics(
coordinator = entry.runtime_data
board = asdict(coordinator.board_info)
# `time` is a Unix epoch timestamp of the last board info fetch; not useful for support triage.
board.pop("time")
if board["public_api_version"] is None:
board.pop("public_api_version")
if board["software_version"] is None:
board.pop("software_version")
try:
api_info_obj = await coordinator.client.async_get_api_info()
lan_info = await coordinator.client.async_get_lan_info()
duco_diags = await coordinator.client.async_get_diagnostics()
write_remaining = await coordinator.client.async_get_write_req_remaining()
@@ -43,10 +52,15 @@ async def async_get_config_entry_diagnostics(
translation_key="connection_error",
) from err
api_info: dict[str, Any] = {"public_api_version": api_info_obj.public_api_version}
if api_info_obj.reported_api_version is not None:
api_info["reported_api_version"] = api_info_obj.reported_api_version
return async_redact_data(
{
"entry_data": entry.data,
"board_info": board,
"api_info": api_info,
"lan_info": asdict(lan_info),
"nodes": {
str(node_id): asdict(node)
+1 -1
View File
@@ -13,7 +13,7 @@
"iot_class": "local_polling",
"loggers": ["duco"],
"quality_scale": "platinum",
"requirements": ["python-duco-client==0.3.9"],
"requirements": ["python-duco-client==0.4.0"],
"zeroconf": [
{
"name": "duco [[][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][]].*",
@@ -137,10 +137,6 @@ class EcovacsConfigFlow(ConfigFlow, domain=DOMAIN):
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
if not self.show_advanced_options:
return await self.async_step_auth()
if user_input:
self._mode = user_input[CONF_MODE]
return await self.async_step_auth()
+1 -1
View File
@@ -16,5 +16,5 @@
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["elkm1_lib"],
"requirements": ["elkm1-lib==2.2.13"]
"requirements": ["elkm1-lib==2.2.15"]
}
+3 -1
View File
@@ -199,7 +199,9 @@ class ElkSetting(ElkSensor):
_element: Setting
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
self._attr_native_value = self._element.value
self._attr_native_value = (
None if self._element.value is None else str(self._element.value)
)
@property
def extra_state_attributes(self) -> dict[str, Any]:
@@ -6,5 +6,5 @@
"iot_class": "local_push",
"loggers": ["sense_energy"],
"quality_scale": "internal",
"requirements": ["sense-energy==0.14.0"]
"requirements": ["sense-energy==0.14.1"]
}
+5
View File
@@ -29,6 +29,11 @@ class FlussButton(FlussEntity, ButtonEntity):
_attr_name = None
@property
def available(self) -> bool:
"""Return True only when the device is online."""
return super().available and self.device["internetConnected"]
async def async_press(self) -> None:
"""Handle the button press."""
try:
+1 -2
View File
@@ -5,5 +5,4 @@ import logging
DOMAIN = "fluss"
LOGGER = logging.getLogger(__name__)
UPDATE_INTERVAL = 60 # seconds
UPDATE_INTERVAL_TIMEDELTA = timedelta(seconds=UPDATE_INTERVAL)
UPDATE_INTERVAL = timedelta(minutes=30)
+25 -5
View File
@@ -1,5 +1,6 @@
"""DataUpdateCoordinator for Fluss+ integration."""
import asyncio
from typing import Any
from fluss_api import (
@@ -15,12 +16,12 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import slugify
from .const import LOGGER, UPDATE_INTERVAL_TIMEDELTA
from .const import LOGGER, UPDATE_INTERVAL
type FlussConfigEntry = ConfigEntry[FlussDataUpdateCoordinator]
class FlussDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
class FlussDataUpdateCoordinator(DataUpdateCoordinator[dict[str, dict[str, Any]]]):
"""Manages fetching Fluss device data on a schedule."""
def __init__(
@@ -33,11 +34,19 @@ class FlussDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
LOGGER,
name=f"Fluss+ ({slugify(api_key[:8])})",
config_entry=config_entry,
update_interval=UPDATE_INTERVAL_TIMEDELTA,
update_interval=UPDATE_INTERVAL,
)
async def _async_get_connectivity(self, device_id: str) -> bool:
"""Return connectivity for a device; False if the status call fails."""
try:
status = await self.api.async_get_device_status(device_id)
except FlussApiClientError:
return False
return status["status"]["internetConnected"]
async def _async_update_data(self) -> dict[str, dict[str, Any]]:
"""Fetch data from the Fluss API and return as a dictionary keyed by deviceId."""
"""Fetch Fluss+ devices and merge per-device connectivity status."""
try:
devices = await self.api.async_get_devices()
except FlussApiClientAuthenticationError as err:
@@ -45,4 +54,15 @@ class FlussDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
except FlussApiClientError as err:
raise UpdateFailed(f"Error fetching Fluss devices: {err}") from err
return {device["deviceId"]: device for device in devices.get("devices", [])}
device_list = [
device
for device in devices["devices"]
if device["userPermissions"]["canUseWiFi"]
]
connectivity = await asyncio.gather(
*(self._async_get_connectivity(d["deviceId"]) for d in device_list)
)
return {
device["deviceId"]: {**device, "internetConnected": connected}
for device, connected in zip(device_list, connectivity, strict=False)
}
+2 -1
View File
@@ -5,6 +5,7 @@ from dataclasses import dataclass
from datetime import datetime, timedelta
import logging
from fritzconnection.core.exceptions import FritzConnectionException
from fritzconnection.lib.fritzstatus import FritzStatus
from requests.exceptions import RequestException
@@ -143,7 +144,7 @@ def _is_suitable_cpu_temperature(status: FritzStatus) -> bool:
"""Return whether the CPU temperature sensor is suitable."""
try:
cpu_temp = status.get_cpu_temperatures()[0]
except RequestException, IndexError:
except RequestException, IndexError, FritzConnectionException:
_LOGGER.debug("CPU temperature not supported by the device")
return False
if cpu_temp == 0:
@@ -21,5 +21,5 @@
"integration_type": "system",
"preview_features": { "winter_mode": {} },
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20260429.1"]
"requirements": ["home-assistant-frontend==20260429.2"]
}
@@ -39,6 +39,9 @@ from homeassistant.util import dt as dt_util
from .const import DOMAIN
from .coordinator import GoodweConfigEntry, GoodweUpdateCoordinator
# Coordinator handles all data updates, so parallel updates are not needed
PARALLEL_UPDATES = 0
_LOGGER = logging.getLogger(__name__)
# Sensor name of battery SoC
@@ -596,7 +596,9 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
if not self.data:
await self.async_refresh()
return self.api.sph_read_ac_charge_times(settings_data=self.data)
return self.api.sph_read_ac_charge_times(
self.device_id, settings_data=self.data
)
async def read_ac_discharge_times(self) -> dict:
"""Read AC discharge time settings from SPH device cache."""
@@ -609,4 +611,6 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
if not self.data:
await self.async_refresh()
return self.api.sph_read_ac_discharge_times(settings_data=self.data)
return self.api.sph_read_ac_discharge_times(
self.device_id, settings_data=self.data
)
@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["growattServer"],
"quality_scale": "silver",
"requirements": ["growattServer==1.9.0"]
"requirements": ["growattServer==2.1.0"]
}
@@ -1,8 +1,9 @@
"""Binary sensor platform for Hass.io addons."""
from collections.abc import Callable
from dataclasses import dataclass
import itertools
from aiohasupervisor.models import AddonState
from aiohasupervisor.models.mounts import MountState
from homeassistant.components.binary_sensor import (
@@ -14,41 +15,46 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import (
ADDONS_COORDINATOR,
ATTR_STARTED,
ATTR_STATE,
DATA_KEY_ADDONS,
DATA_KEY_MOUNTS,
MAIN_COORDINATOR,
)
from .const import ADDONS_COORDINATOR, MAIN_COORDINATOR
from .entity import HassioAddonEntity, HassioMountEntity
@dataclass(frozen=True)
class HassioBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Hassio binary sensor entity description."""
@dataclass(frozen=True, kw_only=True)
class HassioAddonBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Hass.io add-on binary sensor entity description."""
target: str | None = None
value_fn: Callable[[HassioAddonBinarySensor], bool]
@dataclass(frozen=True, kw_only=True)
class HassioMountBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Hass.io mount binary sensor entity description."""
value_fn: Callable[[HassioMountBinarySensor], bool]
ADDON_ENTITY_DESCRIPTIONS = (
HassioBinarySensorEntityDescription(
HassioAddonBinarySensorEntityDescription(
device_class=BinarySensorDeviceClass.RUNNING,
entity_registry_enabled_default=False,
key=ATTR_STATE,
key="state",
translation_key="state",
target=ATTR_STARTED,
value_fn=lambda entity: (
entity.coordinator.data.addons[entity.addon_slug].addon.state
== AddonState.STARTED
),
),
)
MOUNT_ENTITY_DESCRIPTIONS = (
HassioBinarySensorEntityDescription(
HassioMountBinarySensorEntityDescription(
device_class=BinarySensorDeviceClass.CONNECTIVITY,
entity_registry_enabled_default=False,
key=ATTR_STATE,
key="state",
translation_key="mount",
target=MountState.ACTIVE.value,
value_fn=lambda entity: (
entity.coordinator.data.mounts[entity.mount_name].state == MountState.ACTIVE
),
),
)
@@ -63,57 +69,46 @@ async def async_setup_entry(
coordinator = hass.data[MAIN_COORDINATOR]
async_add_entities(
itertools.chain(
[
[
*[
HassioAddonBinarySensor(
addon=addon,
coordinator=addons_coordinator,
entity_description=entity_description,
)
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
for addon in addons_coordinator.data.addons.values()
for entity_description in ADDON_ENTITY_DESCRIPTIONS
],
[
*[
HassioMountBinarySensor(
mount=mount,
coordinator=coordinator,
entity_description=entity_description,
)
for mount in coordinator.data[DATA_KEY_MOUNTS].values()
for mount in coordinator.data.mounts.values()
for entity_description in MOUNT_ENTITY_DESCRIPTIONS
],
)
]
)
class HassioAddonBinarySensor(HassioAddonEntity, BinarySensorEntity):
"""Binary sensor for Hass.io add-ons."""
entity_description: HassioBinarySensorEntityDescription
entity_description: HassioAddonBinarySensorEntityDescription
@property
def is_on(self) -> bool:
"""Return true if the binary sensor is on."""
value = self.coordinator.data[DATA_KEY_ADDONS][self._addon_slug][
self.entity_description.key
]
if self.entity_description.target is None:
return value
return value == self.entity_description.target
return self.entity_description.value_fn(self)
class HassioMountBinarySensor(HassioMountEntity, BinarySensorEntity):
"""Binary sensor for Hass.io mount."""
entity_description: HassioBinarySensorEntityDescription
entity_description: HassioMountBinarySensorEntityDescription
@property
def is_on(self) -> bool:
"""Return true if the binary sensor is on."""
value = getattr(
self.coordinator.data[DATA_KEY_MOUNTS][self._mount.name],
self.entity_description.key,
)
if self.entity_description.target is None:
return value
return value == self.entity_description.target
return self.entity_description.value_fn(self)
+8 -2
View File
@@ -8,9 +8,11 @@ from homeassistant.util.hass_dict import HassKey
if TYPE_CHECKING:
from aiohasupervisor.models import (
AddonsStats,
HomeAssistantInfo,
HostInfo,
InstalledAddon,
InstalledAddonComplete,
NetworkInfo,
OSInfo,
RootInfo,
@@ -112,8 +114,12 @@ DATA_OS_INFO: HassKey[OSInfo] = HassKey("hassio_os_info")
DATA_NETWORK_INFO: HassKey[NetworkInfo] = HassKey("hassio_network_info")
DATA_SUPERVISOR_INFO: HassKey[SupervisorInfo] = HassKey("hassio_supervisor_info")
DATA_SUPERVISOR_STATS = "hassio_supervisor_stats"
DATA_ADDONS_INFO = "hassio_addons_info"
DATA_ADDONS_STATS = "hassio_addons_stats"
DATA_ADDONS_INFO: HassKey[dict[str, InstalledAddonComplete | None]] = HassKey(
"hassio_addons_info"
)
DATA_ADDONS_STATS: HassKey[dict[str, AddonsStats | None]] = HassKey(
"hassio_addons_stats"
)
DATA_ADDONS_LIST: HassKey[list[InstalledAddon]] = HassKey("hassio_addons_list")
HASSIO_MAIN_UPDATE_INTERVAL = timedelta(minutes=5)
HASSIO_ADDON_UPDATE_INTERVAL = timedelta(minutes=15)
+223 -109
View File
@@ -3,17 +3,20 @@
import asyncio
from collections import defaultdict
from collections.abc import Awaitable
from copy import deepcopy
from dataclasses import dataclass
import logging
from typing import TYPE_CHECKING, Any, cast
from aiohasupervisor import SupervisorError, SupervisorNotFoundError
from aiohasupervisor.models import (
AddonsStats,
AddonState,
CIFSMountResponse,
HomeAssistantInfo,
HomeAssistantStats,
HostInfo,
InstalledAddon,
InstalledAddonComplete,
NetworkInfo,
NFSMountResponse,
OSInfo,
@@ -21,10 +24,11 @@ from aiohasupervisor.models import (
RootInfo,
StoreInfo,
SupervisorInfo,
SupervisorStats,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_MANUFACTURER, ATTR_NAME
from homeassistant.const import ATTR_MANUFACTURER
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.debounce import Debouncer
@@ -34,15 +38,10 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
from .const import (
ATTR_ADDONS,
ATTR_AUTO_UPDATE,
ATTR_DATA,
ATTR_REPOSITORIES,
ATTR_REPOSITORY,
ATTR_SLUG,
ATTR_STARTUP,
ATTR_UPDATE_KEY,
ATTR_URL,
ATTR_VERSION,
ATTR_WS_EVENT,
CONTAINER_STATS,
CORE_CONTAINER,
@@ -53,12 +52,6 @@ from .const import (
DATA_CORE_STATS,
DATA_HOST_INFO,
DATA_INFO,
DATA_KEY_ADDONS,
DATA_KEY_CORE,
DATA_KEY_HOST,
DATA_KEY_MOUNTS,
DATA_KEY_OS,
DATA_KEY_SUPERVISOR,
DATA_KEY_SUPERVISOR_ISSUES,
DATA_NETWORK_INFO,
DATA_OS_INFO,
@@ -86,6 +79,106 @@ if TYPE_CHECKING:
_LOGGER = logging.getLogger(__name__)
@dataclass
class HassioMainData:
"""Data class for HassioMainDataUpdateCoordinator."""
core: HomeAssistantInfo
supervisor: SupervisorInfo
host: HostInfo
mounts: dict[str, CIFSMountResponse | NFSMountResponse]
os: OSInfo | None
def to_dict(self) -> dict[str, Any]:
"""Return a dictionary representation of the data."""
return {
"core": self.core.to_dict(),
"supervisor": self.supervisor.to_dict(),
"host": self.host.to_dict(),
"mounts": {name: mount.to_dict() for name, mount in self.mounts.items()},
"os": self.os.to_dict() if self.os is not None else None,
}
@dataclass
class AddonData:
"""Data for a single installed addon."""
addon: InstalledAddon
auto_update: bool
repository: str
@dataclass
class HassioAddonData:
"""Data class for HassioAddOnDataUpdateCoordinator."""
addons: dict[str, AddonData]
def to_dict(self) -> dict[str, Any]:
"""Return a dictionary representation of the data."""
return {
"addons": {
slug: {
"addon": addon_data.addon.to_dict(),
"auto_update": addon_data.auto_update,
"repository": addon_data.repository,
}
for slug, addon_data in self.addons.items()
},
}
@dataclass
class HassioStatsData:
"""Data class for HassioStatsDataUpdateCoordinator."""
core: HomeAssistantStats | None
supervisor: SupervisorStats | None
addons: dict[str, AddonsStats | None]
def to_dict(self) -> dict[str, Any]:
"""Return a dictionary representation of the data."""
return {
"core": self.core.to_dict() if self.core is not None else None,
"supervisor": (
self.supervisor.to_dict() if self.supervisor is not None else None
),
"addons": {
slug: stats.to_dict() if stats is not None else None
for slug, stats in self.addons.items()
},
}
def _installed_addon_from_complete(info: InstalledAddonComplete) -> InstalledAddon:
"""Build an InstalledAddon from an InstalledAddonComplete object.
InstalledAddonComplete contains a superset of InstalledAddon fields.
This helper extracts only the fields needed for InstalledAddon so fresh
data from an addon_info call can be stored in AddonData.addon.
"""
return InstalledAddon(
advanced=info.advanced,
available=info.available,
build=info.build,
description=info.description,
homeassistant=info.homeassistant,
icon=info.icon,
logo=info.logo,
name=info.name,
repository=info.repository,
slug=info.slug,
stage=info.stage,
update_available=info.update_available,
url=info.url,
version_latest=info.version_latest,
version=info.version,
detached=info.detached,
state=info.state,
)
@callback
def get_info(hass: HomeAssistant) -> dict[str, Any] | None:
"""Return generic information from Supervisor.
@@ -151,7 +244,25 @@ def get_addons_info(hass: HomeAssistant) -> dict[str, dict[str, Any] | None] | N
Async friendly.
"""
return hass.data.get(DATA_ADDONS_INFO)
addons_info: dict[str, InstalledAddonComplete | None] | None = hass.data.get(
DATA_ADDONS_INFO
)
if addons_info is None:
return None
# Converting these fields for compatibility as that is what was returned here.
# We'll leave it this way as long as these component APIs continue to return
# dictionaries. If/when we switch to using the aiohasupervisor models for everything
# internally and externally that will be dropped.
return {
slug: dict(
hassio_api=info.supervisor_api,
hassio_role=info.supervisor_role,
**info.to_dict(),
)
if info is not None
else None
for slug, info in addons_info.items()
}
@callback
@@ -170,7 +281,11 @@ def get_addons_stats(hass: HomeAssistant) -> dict[str, dict[str, Any] | None]:
Async friendly.
"""
return hass.data.get(DATA_ADDONS_STATS) or {}
addons_stats: dict[str, AddonsStats | None] = hass.data.get(DATA_ADDONS_STATS) or {}
return {
slug: stats.to_dict() if stats is not None else None
for slug, stats in addons_stats.items()
}
@callback
@@ -179,7 +294,8 @@ def get_core_stats(hass: HomeAssistant) -> dict[str, Any]:
Async friendly.
"""
return hass.data.get(DATA_CORE_STATS) or {}
stats = hass.data.get(DATA_CORE_STATS)
return stats.to_dict() if stats is not None else {}
@callback
@@ -188,7 +304,8 @@ def get_supervisor_stats(hass: HomeAssistant) -> dict[str, Any]:
Async friendly.
"""
return hass.data.get(DATA_SUPERVISOR_STATS) or {}
stats = hass.data.get(DATA_SUPERVISOR_STATS)
return stats.to_dict() if stats is not None else {}
@callback
@@ -222,19 +339,20 @@ def get_issues_info(hass: HomeAssistant) -> SupervisorIssues | None:
@callback
def async_register_addons_in_dev_reg(
entry_id: str, dev_reg: dr.DeviceRegistry, addons: list[dict[str, Any]]
entry_id: str, dev_reg: dr.DeviceRegistry, addons: list[AddonData]
) -> None:
"""Register addons in the device registry."""
for addon in addons:
for addon_data in addons:
addon = addon_data.addon
params = DeviceInfo(
identifiers={(DOMAIN, addon[ATTR_SLUG])},
identifiers={(DOMAIN, addon.slug)},
model=SupervisorEntityModel.ADDON,
sw_version=addon[ATTR_VERSION],
name=addon[ATTR_NAME],
sw_version=addon.version,
name=addon.name,
entry_type=dr.DeviceEntryType.SERVICE,
configuration_url=f"homeassistant://hassio/addon/{addon[ATTR_SLUG]}",
configuration_url=f"homeassistant://hassio/addon/{addon.slug}",
)
if manufacturer := addon.get(ATTR_REPOSITORY) or addon.get(ATTR_URL):
if manufacturer := addon_data.repository or addon.url:
params[ATTR_MANUFACTURER] = manufacturer
dev_reg.async_get_or_create(config_entry_id=entry_id, **params)
@@ -260,14 +378,14 @@ def async_register_mounts_in_dev_reg(
@callback
def async_register_os_in_dev_reg(
entry_id: str, dev_reg: dr.DeviceRegistry, os_dict: dict[str, Any]
entry_id: str, dev_reg: dr.DeviceRegistry, os_info: OSInfo
) -> None:
"""Register OS in the device registry."""
params = DeviceInfo(
identifiers={(DOMAIN, "OS")},
manufacturer="Home Assistant",
model=SupervisorEntityModel.OS,
sw_version=os_dict[ATTR_VERSION],
sw_version=os_info.version,
name="Home Assistant Operating System",
entry_type=dr.DeviceEntryType.SERVICE,
)
@@ -294,14 +412,14 @@ def async_register_host_in_dev_reg(
def async_register_core_in_dev_reg(
entry_id: str,
dev_reg: dr.DeviceRegistry,
core_dict: dict[str, Any],
core_info: HomeAssistantInfo,
) -> None:
"""Register OS in the device registry."""
"""Register core in the device registry."""
params = DeviceInfo(
identifiers={(DOMAIN, "core")},
manufacturer="Home Assistant",
model=SupervisorEntityModel.CORE,
sw_version=core_dict[ATTR_VERSION],
sw_version=core_info.version,
name="Home Assistant Core",
entry_type=dr.DeviceEntryType.SERVICE,
)
@@ -312,14 +430,14 @@ def async_register_core_in_dev_reg(
def async_register_supervisor_in_dev_reg(
entry_id: str,
dev_reg: dr.DeviceRegistry,
supervisor_dict: dict[str, Any],
supervisor_info: SupervisorInfo,
) -> None:
"""Register OS in the device registry."""
"""Register supervisor in the device registry."""
params = DeviceInfo(
identifiers={(DOMAIN, "supervisor")},
manufacturer="Home Assistant",
model=SupervisorEntityModel.SUPERVISOR,
sw_version=supervisor_dict[ATTR_VERSION],
sw_version=supervisor_info.version,
name="Home Assistant Supervisor",
entry_type=dr.DeviceEntryType.SERVICE,
)
@@ -336,7 +454,7 @@ def async_remove_devices_from_dev_reg(
dev_reg.async_remove_device(dev.id)
class HassioStatsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
class HassioStatsDataUpdateCoordinator(DataUpdateCoordinator[HassioStatsData]):
"""Class to retrieve Hass.io container stats."""
config_entry: ConfigEntry
@@ -358,18 +476,18 @@ class HassioStatsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
lambda: defaultdict(set)
)
async def _async_update_data(self) -> dict[str, Any]:
async def _async_update_data(self) -> HassioStatsData:
"""Update stats data via library."""
try:
await self._fetch_stats()
except SupervisorError as err:
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
new_data: dict[str, Any] = {}
new_data[DATA_KEY_CORE] = get_core_stats(self.hass)
new_data[DATA_KEY_SUPERVISOR] = get_supervisor_stats(self.hass)
new_data[DATA_KEY_ADDONS] = get_addons_stats(self.hass)
return new_data
return HassioStatsData(
core=self.hass.data.get(DATA_CORE_STATS),
supervisor=self.hass.data.get(DATA_SUPERVISOR_STATS),
addons=self.hass.data.get(DATA_ADDONS_STATS) or {},
)
async def _fetch_stats(self) -> None:
"""Fetch container stats for subscribed entities."""
@@ -387,7 +505,7 @@ class HassioStatsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
if updates:
api_results: list[ResponseData] = await asyncio.gather(*updates.values())
for key, result in zip(updates, api_results, strict=True):
data[key] = result.to_dict()
data[key] = result
# Fetch addon stats
addons_list: list[InstalledAddon] = self.hass.data.get(DATA_ADDONS_LIST) or []
@@ -397,7 +515,9 @@ class HassioStatsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
if addon.state in {AddonState.STARTED, AddonState.STARTUP}
}
addons_stats: dict[str, Any] = data.setdefault(DATA_ADDONS_STATS, {})
addons_stats: dict[str, AddonsStats | None] = data.setdefault(
DATA_ADDONS_STATS, {}
)
# Clean up cache for stopped/removed addons
for slug in addons_stats.keys() - started_addons:
@@ -415,14 +535,14 @@ class HassioStatsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
)
addons_stats.update(addon_stats_results)
async def _update_addon_stats(self, slug: str) -> tuple[str, dict[str, Any] | None]:
async def _update_addon_stats(self, slug: str) -> tuple[str, AddonsStats | None]:
"""Update single addon stats."""
try:
stats = await self.supervisor_client.addons.addon_stats(slug)
except SupervisorError as err:
_LOGGER.warning("Could not fetch stats for %s: %s", slug, err)
return (slug, None)
return (slug, stats.to_dict())
return (slug, stats)
@callback
def async_enable_container_updates(
@@ -445,7 +565,7 @@ class HassioStatsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
return _remove
class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator[HassioAddonData]):
"""Class to retrieve Hass.io Add-on status."""
config_entry: ConfigEntry
@@ -476,7 +596,7 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
self.supervisor_client = get_supervisor_client(hass)
self.jobs = jobs
async def _async_update_data(self) -> dict[str, Any]:
async def _async_update_data(self) -> HassioAddonData:
"""Update data via library."""
is_first_update = not self.data
client = self.supervisor_client
@@ -487,7 +607,7 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
# Fetch addon info for all addons on first update, or only
# for addons with subscribed entities on subsequent updates.
addon_info_results = dict(
addon_info_results: dict[str, InstalledAddonComplete | None] = dict(
await asyncio.gather(
*[
self._update_addon_info(slug)
@@ -503,39 +623,35 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
self.hass.data[DATA_ADDONS_LIST] = installed_addons
# Update addon info cache in hass.data
addon_info_cache: dict[str, Any] = self.hass.data.setdefault(
DATA_ADDONS_INFO, {}
)
addon_info_cache = self.hass.data.setdefault(DATA_ADDONS_INFO, {})
for slug in addon_info_cache.keys() - all_addons:
del addon_info_cache[slug]
addon_info_cache.update(addon_info_results)
# Build clean coordinator data
# Build repository name lookup from store data
store = self.hass.data.get(DATA_STORE)
if store:
repositories = {repo.slug: repo.name for repo in store.repositories}
else:
repositories = {}
repositories: dict[str, str] = (
{repo.slug: repo.name for repo in store.repositories} if store else {}
)
addons_list_dicts = [addon.to_dict() for addon in installed_addons]
new_data: dict[str, Any] = {}
new_data[DATA_KEY_ADDONS] = {
(slug := addon[ATTR_SLUG]): {
**addon,
ATTR_AUTO_UPDATE: (addon_info_cache.get(slug) or {}).get(
ATTR_AUTO_UPDATE, False
),
ATTR_REPOSITORY: repositories.get(
repo_slug := addon.get(ATTR_REPOSITORY, ""), repo_slug
),
}
for addon in addons_list_dicts
}
# Build clean coordinator data
new_addons: dict[str, AddonData] = {}
for addon in installed_addons:
addon_info = addon_info_cache.get(addon.slug)
auto_update = addon_info.auto_update if addon_info is not None else False
repo_slug = addon.repository
repository = repositories.get(repo_slug, repo_slug)
new_addons[addon.slug] = AddonData(
addon=addon,
auto_update=auto_update,
repository=repository,
)
new_data = HassioAddonData(addons=new_addons)
# If this is the initial refresh, register all addons
if is_first_update:
async_register_addons_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
self.entry_id, self.dev_reg, list(new_data.addons.values())
)
# Remove add-ons that are no longer installed from device registry
@@ -546,19 +662,16 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
)
if device.model == SupervisorEntityModel.ADDON
}
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
if stale_addons := supervisor_addon_devices - set(new_data.addons):
async_remove_devices_from_dev_reg(self.dev_reg, stale_addons)
# If there are new add-ons, we should reload the config entry so we can
# create new devices and entities. We can return an empty dict because
# create new devices and entities. We can return the new data because
# coordinator will be recreated.
if self.data and (
set(new_data[DATA_KEY_ADDONS]) - set(self.data[DATA_KEY_ADDONS])
):
if self.data and (set(new_data.addons) - set(self.data.addons)):
self.hass.async_create_task(
self.hass.config_entries.async_reload(self.entry_id)
)
return {}
return new_data
@@ -569,18 +682,16 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
except SupervisorNotFoundError:
return None
async def _update_addon_info(self, slug: str) -> tuple[str, dict[str, Any] | None]:
async def _update_addon_info(
self, slug: str
) -> tuple[str, InstalledAddonComplete | None]:
"""Return the info for an addon."""
try:
info = await self.supervisor_client.addons.addon_info(slug)
except SupervisorError as err:
_LOGGER.warning("Could not fetch info for %s: %s", slug, err)
return (slug, None)
# Translate to legacy hassio names for compatibility
info_dict = info.to_dict()
info_dict["hassio_api"] = info_dict.pop("supervisor_api")
info_dict["hassio_role"] = info_dict.pop("supervisor_role")
return (slug, info_dict)
return (slug, info)
@callback
def async_enable_addon_info_updates(
@@ -627,16 +738,26 @@ class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Force refresh of addon info data for a specific addon."""
try:
slug, info = await self._update_addon_info(addon_slug)
if info is not None and DATA_KEY_ADDONS in self.data:
if slug in self.data[DATA_KEY_ADDONS]:
data = deepcopy(self.data)
data[DATA_KEY_ADDONS][slug].update(info)
self.async_set_updated_data(data)
except SupervisorError as err:
_LOGGER.warning("Could not refresh info for %s: %s", addon_slug, err)
return
if info is not None and self.data and slug in self.data.addons:
updated = AddonData(
addon=_installed_addon_from_complete(info),
auto_update=info.auto_update,
repository=self.data.addons[slug].repository,
)
self.async_set_updated_data(
HassioAddonData(addons={**self.data.addons, slug: updated})
)
# Update addon info cache in hass.data
addon_info_cache = self.hass.data.setdefault(DATA_ADDONS_INFO, {})
addon_info_cache[slug] = info
class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[HassioMainData]):
"""Class to retrieve Hass.io status."""
config_entry: ConfigEntry
@@ -679,7 +800,7 @@ class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
):
self.config_entry.async_create_task(self.hass, self.async_request_refresh())
async def _async_update_data(self) -> dict[str, Any]:
async def _async_update_data(self) -> HassioMainData:
"""Update data via library."""
is_first_update = not self.data
client = self.supervisor_client
@@ -722,13 +843,13 @@ class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
# Build clean coordinator data
new_data: dict[str, Any] = {}
new_data[DATA_KEY_CORE] = core_info.to_dict()
new_data[DATA_KEY_SUPERVISOR] = supervisor_info.to_dict()
new_data[DATA_KEY_HOST] = host_info.to_dict()
new_data[DATA_KEY_MOUNTS] = {mount.name: mount for mount in mounts_info.mounts}
if self.is_hass_os:
new_data[DATA_KEY_OS] = os_info.to_dict()
new_data = HassioMainData(
core=core_info,
supervisor=supervisor_info,
host=host_info,
mounts={mount.name: mount for mount in mounts_info.mounts},
os=os_info if self.is_hass_os else None,
)
# Update hass.data for legacy accessor functions
self.hass.data[DATA_INFO] = info
@@ -742,19 +863,15 @@ class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
# If this is the initial refresh, register all main components
if is_first_update:
async_register_mounts_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_MOUNTS].values()
)
async_register_core_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_CORE]
self.entry_id, self.dev_reg, list(new_data.mounts.values())
)
async_register_core_in_dev_reg(self.entry_id, self.dev_reg, new_data.core)
async_register_supervisor_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_SUPERVISOR]
self.entry_id, self.dev_reg, new_data.supervisor
)
async_register_host_in_dev_reg(self.entry_id, self.dev_reg)
if self.is_hass_os:
async_register_os_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_OS]
)
async_register_os_in_dev_reg(self.entry_id, self.dev_reg, os_info)
# Remove mounts that no longer exists from device registry
supervisor_mount_devices = {
@@ -764,7 +881,7 @@ class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
)
if device.model == SupervisorEntityModel.MOUNT
}
if stale_mounts := supervisor_mount_devices - set(new_data[DATA_KEY_MOUNTS]):
if stale_mounts := supervisor_mount_devices - set(new_data.mounts):
async_remove_devices_from_dev_reg(
self.dev_reg, {f"mount_{stale_mount}" for stale_mount in stale_mounts}
)
@@ -776,15 +893,12 @@ class HassioMainDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
self.dev_reg.async_remove_device(dev.id)
# If there are new mounts, we should reload the config entry so we can
# create new devices and entities. We can return an empty dict because
# create new devices and entities. We can return the new data because
# coordinator will be recreated.
if self.data and (
set(new_data[DATA_KEY_MOUNTS]) - set(self.data.get(DATA_KEY_MOUNTS, {}))
):
if self.data and (set(new_data.mounts) - set(self.data.mounts)):
self.hass.async_create_task(
self.hass.config_entries.async_reload(self.entry_id)
)
return {}
return new_data
@@ -56,8 +56,8 @@ async def async_get_config_entry_diagnostics(
devices.append({"device": asdict(device), "entities": entities})
return {
"coordinator_data": coordinator.data,
"addons_coordinator_data": addons_coordinator.data,
"stats_coordinator_data": stats_coordinator.data,
"coordinator_data": coordinator.data.to_dict(),
"addons_coordinator_data": addons_coordinator.data.to_dict(),
"stats_coordinator_data": stats_coordinator.data.to_dict(),
"devices": devices,
}
+54 -74
View File
@@ -1,27 +1,20 @@
"""Base for Hass.io entities."""
from typing import Any
from collections.abc import Callable
from aiohasupervisor.models.mounts import CIFSMountResponse, NFSMountResponse
from aiohasupervisor.models import CIFSMountResponse, HostInfo, NFSMountResponse, OSInfo
from aiohasupervisor.models.base import ContainerStats
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
ATTR_SLUG,
CONTAINER_STATS,
DATA_KEY_ADDONS,
DATA_KEY_CORE,
DATA_KEY_HOST,
DATA_KEY_MOUNTS,
DATA_KEY_OS,
DATA_KEY_SUPERVISOR,
DOMAIN,
)
from .const import CONTAINER_STATS, DOMAIN
from .coordinator import (
AddonData,
HassioAddOnDataUpdateCoordinator,
HassioMainDataUpdateCoordinator,
HassioStatsData,
HassioStatsDataUpdateCoordinator,
)
@@ -37,7 +30,7 @@ class HassioStatsEntity(CoordinatorEntity[HassioStatsDataUpdateCoordinator]):
entity_description: EntityDescription,
*,
container_id: str,
data_key: str,
stats_fn: Callable[[HassioStatsData], ContainerStats | None],
device_id: str,
unique_id_prefix: str,
) -> None:
@@ -45,27 +38,25 @@ class HassioStatsEntity(CoordinatorEntity[HassioStatsDataUpdateCoordinator]):
super().__init__(coordinator)
self.entity_description = entity_description
self._container_id = container_id
self._data_key = data_key
self._stats_fn = stats_fn
self._attr_unique_id = f"{unique_id_prefix}_{entity_description.key}"
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device_id)})
@property
def _stats(self) -> ContainerStats | None:
"""Return the stats object for this entity's container."""
return self._stats_fn(self.coordinator.data)
@property
def stats(self) -> ContainerStats:
"""Return the stats object, asserting it is available."""
assert self._stats is not None
return self._stats
@property
def available(self) -> bool:
"""Return True if entity is available."""
if self._data_key == DATA_KEY_ADDONS:
return (
super().available
and DATA_KEY_ADDONS in self.coordinator.data
and self.entity_description.key
in (
self.coordinator.data[DATA_KEY_ADDONS].get(self._container_id) or {}
)
)
return (
super().available
and self._data_key in self.coordinator.data
and self.entity_description.key in self.coordinator.data[self._data_key]
)
return super().available and self._stats is not None
async def async_added_to_hass(self) -> None:
"""Subscribe to stats updates."""
@@ -92,24 +83,31 @@ class HassioAddonEntity(CoordinatorEntity[HassioAddOnDataUpdateCoordinator]):
self,
coordinator: HassioAddOnDataUpdateCoordinator,
entity_description: EntityDescription,
addon: dict[str, Any],
addon: AddonData,
) -> None:
"""Initialize base entity."""
super().__init__(coordinator)
self.entity_description = entity_description
self._addon_slug = addon[ATTR_SLUG]
self._attr_unique_id = f"{addon[ATTR_SLUG]}_{entity_description.key}"
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, addon[ATTR_SLUG])})
self._addon_slug = addon.addon.slug
self._attr_unique_id = f"{addon.addon.slug}_{entity_description.key}"
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, addon.addon.slug)})
@property
def addon_slug(self) -> str:
"""Return the add-on slug."""
return self._addon_slug
@property
def addon_data(self) -> AddonData:
"""Return the add-on data, asserting it is available."""
data = self.coordinator.data
assert self._addon_slug in data.addons
return data.addons[self._addon_slug]
@property
def available(self) -> bool:
"""Return True if entity is available."""
return (
super().available
and DATA_KEY_ADDONS in self.coordinator.data
and self.entity_description.key
in self.coordinator.data[DATA_KEY_ADDONS].get(self._addon_slug, {})
)
return super().available and self._addon_slug in self.coordinator.data.addons
async def async_added_to_hass(self) -> None:
"""Subscribe to addon info updates."""
@@ -140,11 +138,13 @@ class HassioOSEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
@property
def available(self) -> bool:
"""Return True if entity is available."""
return (
super().available
and DATA_KEY_OS in self.coordinator.data
and self.entity_description.key in self.coordinator.data[DATA_KEY_OS]
)
return super().available and self.coordinator.data.os is not None
@property
def os(self) -> OSInfo:
"""Return the OS info object, asserting it is available."""
assert self.coordinator.data.os is not None
return self.coordinator.data.os
class HassioHostEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
@@ -164,13 +164,10 @@ class HassioHostEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, "host")})
@property
def available(self) -> bool:
"""Return True if entity is available."""
return (
super().available
and DATA_KEY_HOST in self.coordinator.data
and self.entity_description.key in self.coordinator.data[DATA_KEY_HOST]
)
def host(self) -> HostInfo:
"""Return the host info, asserting it is available."""
assert self.coordinator.data.host is not None
return self.coordinator.data.host
class HassioSupervisorEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
@@ -189,16 +186,6 @@ class HassioSupervisorEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator])
self._attr_unique_id = f"home_assistant_supervisor_{entity_description.key}"
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, "supervisor")})
@property
def available(self) -> bool:
"""Return True if entity is available."""
return (
super().available
and DATA_KEY_SUPERVISOR in self.coordinator.data
and self.entity_description.key
in self.coordinator.data[DATA_KEY_SUPERVISOR]
)
class HassioCoreEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
"""Base Entity for Core."""
@@ -216,15 +203,6 @@ class HassioCoreEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
self._attr_unique_id = f"home_assistant_core_{entity_description.key}"
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, "core")})
@property
def available(self) -> bool:
"""Return True if entity is available."""
return (
super().available
and DATA_KEY_CORE in self.coordinator.data
and self.entity_description.key in self.coordinator.data[DATA_KEY_CORE]
)
class HassioMountEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
"""Base Entity for Mount."""
@@ -248,10 +226,12 @@ class HassioMountEntity(CoordinatorEntity[HassioMainDataUpdateCoordinator]):
)
self._mount = mount
@property
def mount_name(self) -> str:
"""Return the mount name."""
return self._mount.name
@property
def available(self) -> bool:
"""Return True if entity is available."""
return (
super().available
and self._mount.name in self.coordinator.data[DATA_KEY_MOUNTS]
)
return super().available and self.mount_name in self.coordinator.data.mounts
+106 -44
View File
@@ -1,5 +1,10 @@
"""Sensor platform for Hass.io addons."""
from collections.abc import Callable
from dataclasses import dataclass
from aiohasupervisor.models.base import ContainerStats
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
@@ -15,19 +20,12 @@ from .const import (
ADDONS_COORDINATOR,
ATTR_CPU_PERCENT,
ATTR_MEMORY_PERCENT,
ATTR_SLUG,
ATTR_VERSION,
ATTR_VERSION_LATEST,
CORE_CONTAINER,
DATA_KEY_ADDONS,
DATA_KEY_CORE,
DATA_KEY_HOST,
DATA_KEY_OS,
DATA_KEY_SUPERVISOR,
MAIN_COORDINATOR,
STATS_COORDINATOR,
SUPERVISOR_CONTAINER,
)
from .coordinator import HassioStatsData
from .entity import (
HassioAddonEntity,
HassioHostEntity,
@@ -35,74 +33,125 @@ from .entity import (
HassioStatsEntity,
)
COMMON_ENTITY_DESCRIPTIONS = (
SensorEntityDescription(
@dataclass(frozen=True, kw_only=True)
class HassioAddonSensorEntityDescription(SensorEntityDescription):
"""Hass.io add-on sensor entity description."""
value_fn: Callable[[HassioAddonSensor], str | None]
@dataclass(frozen=True, kw_only=True)
class HassioStatsSensorEntityDescription(SensorEntityDescription):
"""Hass.io stats sensor entity description."""
value_fn: Callable[[HassioStatsSensor], float]
@dataclass(frozen=True, kw_only=True)
class HassioOSSensorEntityDescription(SensorEntityDescription):
"""Hass.io OS sensor entity description."""
value_fn: Callable[[HassioOSSensor], str | None]
@dataclass(frozen=True, kw_only=True)
class HassioHostSensorEntityDescription(SensorEntityDescription):
"""Hass.io host sensor entity description."""
value_fn: Callable[[HostSensor], str | float | None]
ADDON_ENTITY_DESCRIPTIONS = (
HassioAddonSensorEntityDescription(
entity_registry_enabled_default=False,
key=ATTR_VERSION,
key="version",
translation_key="version",
value_fn=lambda entity: entity.addon_data.addon.version,
),
SensorEntityDescription(
HassioAddonSensorEntityDescription(
entity_registry_enabled_default=False,
key=ATTR_VERSION_LATEST,
key="version_latest",
translation_key="version_latest",
value_fn=lambda entity: entity.addon_data.addon.version_latest,
),
)
STATS_ENTITY_DESCRIPTIONS = (
SensorEntityDescription(
HassioStatsSensorEntityDescription(
entity_registry_enabled_default=False,
key=ATTR_CPU_PERCENT,
translation_key="cpu_percent",
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda entity: entity.stats.cpu_percent,
),
SensorEntityDescription(
HassioStatsSensorEntityDescription(
entity_registry_enabled_default=False,
key=ATTR_MEMORY_PERCENT,
translation_key="memory_percent",
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda entity: entity.stats.memory_percent,
),
)
OS_ENTITY_DESCRIPTIONS = COMMON_ENTITY_DESCRIPTIONS
OS_ENTITY_DESCRIPTIONS = (
HassioOSSensorEntityDescription(
entity_registry_enabled_default=False,
key="version",
translation_key="version",
value_fn=lambda entity: entity.os.version,
),
HassioOSSensorEntityDescription(
entity_registry_enabled_default=False,
key="version_latest",
translation_key="version_latest",
value_fn=lambda entity: entity.os.version_latest,
),
)
HOST_ENTITY_DESCRIPTIONS = (
SensorEntityDescription(
HassioHostSensorEntityDescription(
entity_registry_enabled_default=False,
key="agent_version",
translation_key="agent_version",
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda entity: entity.host.agent_version,
),
SensorEntityDescription(
HassioHostSensorEntityDescription(
entity_registry_enabled_default=False,
key="apparmor_version",
translation_key="apparmor_version",
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda entity: entity.host.apparmor_version,
),
SensorEntityDescription(
HassioHostSensorEntityDescription(
entity_registry_enabled_default=False,
key="disk_total",
translation_key="disk_total",
native_unit_of_measurement=UnitOfInformation.GIGABYTES,
device_class=SensorDeviceClass.DATA_SIZE,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda entity: entity.host.disk_total,
),
SensorEntityDescription(
HassioHostSensorEntityDescription(
entity_registry_enabled_default=False,
key="disk_used",
translation_key="disk_used",
native_unit_of_measurement=UnitOfInformation.GIGABYTES,
device_class=SensorDeviceClass.DATA_SIZE,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda entity: entity.host.disk_used,
),
SensorEntityDescription(
HassioHostSensorEntityDescription(
entity_registry_enabled_default=False,
key="disk_free",
translation_key="disk_free",
native_unit_of_measurement=UnitOfInformation.GIGABYTES,
device_class=SensorDeviceClass.DATA_SIZE,
entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda entity: entity.host.disk_free,
),
)
@@ -126,21 +175,32 @@ async def async_setup_entry(
coordinator=addons_coordinator,
entity_description=entity_description,
)
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
for entity_description in COMMON_ENTITY_DESCRIPTIONS
for addon in addons_coordinator.data.addons.values()
for entity_description in ADDON_ENTITY_DESCRIPTIONS
)
# Add-on stats sensors (cpu_percent, memory_percent)
def stats_fn_factory(
addon_slug: str,
) -> Callable[[HassioStatsData], ContainerStats | None]:
"""Return a stats_fn for the given add-on slug."""
def stats_fn(data: HassioStatsData) -> ContainerStats | None:
"""Return the stats for the given add-on."""
return data.addons.get(addon_slug)
return stats_fn
entities.extend(
HassioStatsSensor(
coordinator=stats_coordinator,
entity_description=entity_description,
container_id=addon[ATTR_SLUG],
data_key=DATA_KEY_ADDONS,
device_id=addon[ATTR_SLUG],
unique_id_prefix=addon[ATTR_SLUG],
container_id=addon.addon.slug,
stats_fn=stats_fn_factory(addon.addon.slug),
device_id=addon.addon.slug,
unique_id_prefix=addon.addon.slug,
)
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
for addon in addons_coordinator.data.addons.values()
for entity_description in STATS_ENTITY_DESCRIPTIONS
)
@@ -150,7 +210,7 @@ async def async_setup_entry(
coordinator=stats_coordinator,
entity_description=entity_description,
container_id=CORE_CONTAINER,
data_key=DATA_KEY_CORE,
stats_fn=lambda data: data.core,
device_id="core",
unique_id_prefix="home_assistant_core",
)
@@ -163,7 +223,7 @@ async def async_setup_entry(
coordinator=stats_coordinator,
entity_description=entity_description,
container_id=SUPERVISOR_CONTAINER,
data_key=DATA_KEY_SUPERVISOR,
stats_fn=lambda data: data.supervisor,
device_id="supervisor",
unique_id_prefix="home_assistant_supervisor",
)
@@ -195,40 +255,42 @@ async def async_setup_entry(
class HassioAddonSensor(HassioAddonEntity, SensorEntity):
"""Sensor to track a Hass.io add-on attribute."""
entity_description: HassioAddonSensorEntityDescription
@property
def native_value(self) -> str:
def native_value(self) -> str | None:
"""Return native value of entity."""
return self.coordinator.data[DATA_KEY_ADDONS][self._addon_slug][
self.entity_description.key
]
return self.entity_description.value_fn(self)
class HassioStatsSensor(HassioStatsEntity, SensorEntity):
"""Sensor to track container stats."""
entity_description: HassioStatsSensorEntityDescription
@property
def native_value(self) -> str:
def native_value(self) -> float:
"""Return native value of entity."""
if self._data_key == DATA_KEY_ADDONS:
return self.coordinator.data[DATA_KEY_ADDONS][self._container_id][
self.entity_description.key
]
return self.coordinator.data[self._data_key][self.entity_description.key]
return self.entity_description.value_fn(self)
class HassioOSSensor(HassioOSEntity, SensorEntity):
"""Sensor to track a Hass.io OS attribute."""
entity_description: HassioOSSensorEntityDescription
@property
def native_value(self) -> str:
def native_value(self) -> str | None:
"""Return native value of entity."""
return self.coordinator.data[DATA_KEY_OS][self.entity_description.key]
return self.entity_description.value_fn(self)
class HostSensor(HassioHostEntity, SensorEntity):
"""Sensor to track a host attribute."""
entity_description: HassioHostSensorEntityDescription
@property
def native_value(self) -> str:
def native_value(self) -> str | float | None:
"""Return native value of entity."""
return self.coordinator.data[DATA_KEY_HOST][self.entity_description.key]
return self.entity_description.value_fn(self)
+10 -10
View File
@@ -4,15 +4,15 @@ import logging
from typing import Any
from aiohasupervisor import SupervisorError
from aiohasupervisor.models import AddonState
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ICON
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import ADDONS_COORDINATOR, ATTR_STARTED, ATTR_STATE, DATA_KEY_ADDONS
from .const import ADDONS_COORDINATOR
from .entity import HassioAddonEntity
from .handler import get_supervisor_client
@@ -20,7 +20,7 @@ _LOGGER = logging.getLogger(__name__)
ENTITY_DESCRIPTION = SwitchEntityDescription(
key=ATTR_STATE,
key="state",
name=None,
icon="mdi:puzzle",
entity_registry_enabled_default=False,
@@ -41,7 +41,7 @@ async def async_setup_entry(
coordinator=coordinator,
entity_description=ENTITY_DESCRIPTION,
)
for addon in coordinator.data[DATA_KEY_ADDONS].values()
for addon in coordinator.data.addons.values()
)
@@ -49,19 +49,19 @@ class HassioAddonSwitch(HassioAddonEntity, SwitchEntity):
"""Switch for Hass.io add-ons."""
@property
def is_on(self) -> bool | None:
def is_on(self) -> bool:
"""Return true if the add-on is on."""
addon_data = self.coordinator.data[DATA_KEY_ADDONS].get(self._addon_slug, {})
state = addon_data.get(self.entity_description.key)
return state == ATTR_STARTED
return (
self.coordinator.data.addons[self._addon_slug].addon.state
== AddonState.STARTED
)
@property
def entity_picture(self) -> str | None:
"""Return the icon of the add-on if any."""
if not self.available:
return None
addon_data = self.coordinator.data[DATA_KEY_ADDONS].get(self._addon_slug, {})
if addon_data.get(ATTR_ICON):
if self.coordinator.data.addons[self._addon_slug].addon.icon:
return f"/api/hassio/addons/{self._addon_slug}/icon"
return None
+24 -32
View File
@@ -13,22 +13,12 @@ from homeassistant.components.update import (
UpdateEntityFeature,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ICON, ATTR_NAME
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import (
ADDONS_COORDINATOR,
ATTR_AUTO_UPDATE,
ATTR_VERSION,
ATTR_VERSION_LATEST,
DATA_KEY_ADDONS,
DATA_KEY_CORE,
DATA_KEY_OS,
DATA_KEY_SUPERVISOR,
MAIN_COORDINATOR,
)
from .const import ADDONS_COORDINATOR, ATTR_VERSION_LATEST, MAIN_COORDINATOR
from .coordinator import AddonData
from .entity import (
HassioAddonEntity,
HassioCoreEntity,
@@ -78,7 +68,7 @@ async def async_setup_entry(
coordinator=addons_coordinator,
entity_description=ENTITY_DESCRIPTION,
)
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
for addon in addons_coordinator.data.addons.values()
)
async_add_entities(entities)
@@ -108,29 +98,29 @@ class SupervisorAddonUpdateEntity(HassioAddonEntity, UpdateEntity):
_version_before_update: str | None = None
@property
def _addon_data(self) -> dict:
def _addon_data(self) -> AddonData:
"""Return the add-on data."""
return self.coordinator.data[DATA_KEY_ADDONS][self._addon_slug]
return self.coordinator.data.addons[self._addon_slug]
@property
def auto_update(self) -> bool:
"""Return true if auto-update is enabled for the add-on."""
return self._addon_data[ATTR_AUTO_UPDATE]
return self._addon_data.auto_update
@property
def title(self) -> str | None:
"""Return the title of the update."""
return self._addon_data[ATTR_NAME]
return self._addon_data.addon.name
@property
def latest_version(self) -> str | None:
"""Latest version available for install."""
return self._addon_data[ATTR_VERSION_LATEST]
return self._addon_data.addon.version_latest
@property
def installed_version(self) -> str | None:
"""Version installed and in use."""
return self._addon_data[ATTR_VERSION]
return self._addon_data.addon.version
@property
def in_progress(self) -> bool | None:
@@ -144,7 +134,7 @@ class SupervisorAddonUpdateEntity(HassioAddonEntity, UpdateEntity):
"""Return the icon of the add-on if any."""
if not self.available:
return None
if self._addon_data[ATTR_ICON]:
if self._addon_data.addon.icon:
return f"/api/hassio/addons/{self._addon_slug}/icon"
return None
@@ -236,14 +226,16 @@ class SupervisorOSUpdateEntity(HassioOSEntity, UpdateEntity):
_attr_title = "Home Assistant Operating System"
@property
def latest_version(self) -> str:
def latest_version(self) -> str | None:
"""Return the latest version."""
return self.coordinator.data[DATA_KEY_OS][ATTR_VERSION_LATEST]
assert self.coordinator.data.os is not None
return self.coordinator.data.os.version_latest
@property
def installed_version(self) -> str:
def installed_version(self) -> str | None:
"""Return the installed version."""
return self.coordinator.data[DATA_KEY_OS][ATTR_VERSION]
assert self.coordinator.data.os is not None
return self.coordinator.data.os.version
@property
def entity_picture(self) -> str | None:
@@ -293,19 +285,19 @@ class SupervisorSupervisorUpdateEntity(HassioSupervisorEntity, UpdateEntity):
return self._attr_in_progress
@property
def latest_version(self) -> str:
def latest_version(self) -> str | None:
"""Return the latest version."""
return self.coordinator.data[DATA_KEY_SUPERVISOR][ATTR_VERSION_LATEST]
return self.coordinator.data.supervisor.version_latest
@property
def installed_version(self) -> str:
"""Return the installed version."""
return self.coordinator.data[DATA_KEY_SUPERVISOR][ATTR_VERSION]
return self.coordinator.data.supervisor.version
@property
def auto_update(self) -> bool:
"""Return true if auto-update is enabled for supervisor."""
return self.coordinator.data[DATA_KEY_SUPERVISOR][ATTR_AUTO_UPDATE]
return self.coordinator.data.supervisor.auto_update
@property
def release_url(self) -> str | None:
@@ -389,14 +381,14 @@ class SupervisorCoreUpdateEntity(HassioCoreEntity, UpdateEntity):
_attr_title = "Home Assistant Core"
@property
def latest_version(self) -> str:
def latest_version(self) -> str | None:
"""Return the latest version."""
return self.coordinator.data[DATA_KEY_CORE][ATTR_VERSION_LATEST]
return self.coordinator.data.core.version_latest
@property
def installed_version(self) -> str:
def installed_version(self) -> str | None:
"""Return the installed version."""
return self.coordinator.data[DATA_KEY_CORE][ATTR_VERSION]
return self.coordinator.data.core.version
@property
def entity_picture(self) -> str | None:
+17 -3
View File
@@ -117,9 +117,22 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN):
if not errors:
_LOGGER.debug("2FA successful")
if self.source == SOURCE_REAUTH:
return await self.async_setup_hive_entry()
self.device_registration = True
return await self.async_step_configuration()
try:
device_registered = await self.hive_auth.is_device_registered()
except HiveApiError as err:
_LOGGER.debug(
"Failed to check whether the Hive device is registered during reauthentication: %s",
err,
)
errors["base"] = "no_internet_available"
else:
if device_registered:
return await self.async_setup_hive_entry()
self.device_registration = True
return await self.async_step_configuration()
else:
self.device_registration = True
return await self.async_step_configuration()
schema = vol.Schema({vol.Required(CONF_CODE): str})
return self.async_show_form(step_id="2fa", data_schema=schema, errors=errors)
@@ -171,6 +184,7 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN):
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Re Authenticate a user."""
self.data = dict(entry_data)
data = {
CONF_USERNAME: entry_data[CONF_USERNAME],
CONF_PASSWORD: entry_data[CONF_PASSWORD],
@@ -60,7 +60,7 @@
"include_exclude_mode": "Inclusion mode",
"mode": "HomeKit mode"
},
"description": "HomeKit can be configured expose a bridge or a single accessory. In accessory mode, only a single entity can be used. Accessory mode is required for media players with the TV or RECEIVER device class to function properly. Entities in the \u201cDomains to include\u201d will be included to HomeKit. You will be able to select which entities to include or exclude from this list on the next screen.",
"description": "HomeKit can be configured to expose a bridge or a single accessory. In accessory mode, only a single entity can be used. Accessory mode is required for media players with the TV or RECEIVER device class to function properly. Entities in the \u201cDomains to include\u201d will be included to HomeKit. You will be able to select which entities to include or exclude from this list on the next screen.",
"title": "Select mode and domains."
},
"yaml": {
@@ -9,5 +9,5 @@
"iot_class": "cloud_push",
"loggers": ["aioautomower"],
"quality_scale": "silver",
"requirements": ["aioautomower==2.7.4"]
"requirements": ["aioautomower==2.7.5"]
}
+2 -2
View File
@@ -1,6 +1,6 @@
"""Button platform for Indevolt integration."""
from dataclasses import dataclass, field
from dataclasses import dataclass
from typing import Final
from indevolt_api import IndevoltRealtimeAction
@@ -20,7 +20,7 @@ PARALLEL_UPDATES = 0
class IndevoltButtonEntityDescription(ButtonEntityDescription):
"""Custom entity description class for Indevolt button entities."""
generation: list[int] = field(default_factory=lambda: [1, 2])
generation: tuple[int, ...] = (1, 2)
BUTTONS: Final = (
@@ -10,7 +10,6 @@ from indevolt_api import (
IndevoltConfig,
IndevoltEnergyMode,
IndevoltRealtimeAction,
TimeOutException,
)
from homeassistant.config_entries import ConfigEntry
@@ -78,10 +77,8 @@ class IndevoltCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Fetch device info once on boot."""
try:
config_data = await self.api.get_config()
except TimeOutException as err:
raise ConfigEntryNotReady(
f"Device config retrieval timed out: {err}"
) from err
except (ClientError, OSError) as err:
raise ConfigEntryNotReady(f"Device config retrieval failed: {err}") from err
# Cache device information
device_data = config_data.get("device", {})
@@ -94,16 +91,16 @@ class IndevoltCoordinator(DataUpdateCoordinator[dict[str, Any]]):
try:
return await self.api.fetch_data(sensor_keys)
except TimeOutException as err:
raise UpdateFailed(f"Device update timed out: {err}") from err
except (ClientError, OSError) as err:
raise UpdateFailed(f"Device update failed: {err}") from err
async def async_push_data(self, sensor_key: str, value: Any) -> bool:
"""Push/write data values to given key on the device."""
try:
return await self.api.set_data(sensor_key, value)
except TimeOutException as err:
except TimeoutError as err:
raise DeviceTimeoutError(f"Device push timed out: {err}") from err
except (ClientError, ConnectionError, OSError) as err:
except (ClientError, OSError) as err:
raise DeviceConnectionError(f"Device push failed: {err}") from err
async def async_switch_energy_mode(
@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/indevolt",
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["indevolt-api==1.6.5"]
"quality_scale": "silver",
"requirements": ["indevolt-api==1.7.1"]
}
+6 -6
View File
@@ -1,6 +1,6 @@
"""Number platform for Indevolt integration."""
from dataclasses import dataclass, field
from dataclasses import dataclass
from typing import Final
from indevolt_api import IndevoltConfig
@@ -27,15 +27,15 @@ PARALLEL_UPDATES = 0
class IndevoltNumberEntityDescription(NumberEntityDescription):
"""Custom entity description class for Indevolt number entities."""
generation: list[int] = field(default_factory=lambda: [1, 2])
read_key: str
write_key: str
generation: tuple[int, ...] = (1, 2)
NUMBERS: Final = (
IndevoltNumberEntityDescription(
key="discharge_limit",
generation=[2],
generation=(2,),
translation_key="discharge_limit",
read_key=IndevoltConfig.READ_DISCHARGE_LIMIT,
write_key=IndevoltConfig.WRITE_DISCHARGE_LIMIT,
@@ -46,7 +46,7 @@ NUMBERS: Final = (
),
IndevoltNumberEntityDescription(
key="max_ac_output_power",
generation=[2],
generation=(2,),
translation_key="max_ac_output_power",
read_key=IndevoltConfig.READ_MAX_AC_OUTPUT_POWER,
write_key=IndevoltConfig.WRITE_MAX_AC_OUTPUT_POWER,
@@ -58,7 +58,7 @@ NUMBERS: Final = (
),
IndevoltNumberEntityDescription(
key="inverter_input_limit",
generation=[2],
generation=(2,),
translation_key="inverter_input_limit",
read_key=IndevoltConfig.READ_INVERTER_INPUT_LIMIT,
write_key=IndevoltConfig.WRITE_INVERTER_INPUT_LIMIT,
@@ -70,7 +70,7 @@ NUMBERS: Final = (
),
IndevoltNumberEntityDescription(
key="feedin_power_limit",
generation=[2],
generation=(2,),
translation_key="feedin_power_limit",
read_key=IndevoltConfig.READ_FEEDIN_POWER_LIMIT,
write_key=IndevoltConfig.WRITE_FEEDIN_POWER_LIMIT,
+1 -1
View File
@@ -25,7 +25,7 @@ class IndevoltSelectEntityDescription(SelectEntityDescription):
write_key: str
value_to_option: dict[IndevoltEnergyMode, str]
unavailable_values: list[IndevoltEnergyMode] = field(default_factory=list)
generation: list[int] = field(default_factory=lambda: [1, 2])
generation: tuple[int, ...] = (1, 2)
SELECTS: Final = (
+5 -5
View File
@@ -1,6 +1,6 @@
"""Switch platform for Indevolt integration."""
from dataclasses import dataclass, field
from dataclasses import dataclass
from typing import Any, Final
from indevolt_api import IndevoltConfig
@@ -29,14 +29,14 @@ class IndevoltSwitchEntityDescription(SwitchEntityDescription):
write_key: str
read_on_value: int = 1
read_off_value: int = 0
generation: list[int] = field(default_factory=lambda: [1, 2])
generation: tuple[int, ...] = (1, 2)
SWITCHES: Final = (
IndevoltSwitchEntityDescription(
key="grid_charging",
translation_key="grid_charging",
generation=[2],
generation=(2,),
read_key=IndevoltConfig.READ_GRID_CHARGING,
write_key=IndevoltConfig.WRITE_GRID_CHARGING,
read_on_value=1001,
@@ -46,7 +46,7 @@ SWITCHES: Final = (
IndevoltSwitchEntityDescription(
key="light",
translation_key="light",
generation=[2],
generation=(2,),
read_key=IndevoltConfig.READ_LIGHT,
write_key=IndevoltConfig.WRITE_LIGHT,
device_class=SwitchDeviceClass.SWITCH,
@@ -54,7 +54,7 @@ SWITCHES: Final = (
IndevoltSwitchEntityDescription(
key="bypass",
translation_key="bypass",
generation=[2],
generation=(2,),
read_key=IndevoltConfig.READ_BYPASS,
write_key=IndevoltConfig.WRITE_BYPASS,
device_class=SwitchDeviceClass.SWITCH,
@@ -5,5 +5,5 @@
"documentation": "https://www.home-assistant.io/integrations/infrared",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["infrared-protocols==2.0.0"]
"requirements": ["infrared-protocols==2.1.0"]
}
@@ -2,7 +2,7 @@
"domain": "insteon",
"name": "Insteon",
"after_dependencies": ["panel_custom"],
"codeowners": ["@teharris1"],
"codeowners": ["@teharris1", "@ssyrell"],
"config_flow": true,
"dependencies": ["http", "usb", "websocket_api"],
"dhcp": [
@@ -19,7 +19,7 @@
"loggers": ["pyinsteon", "pypubsub"],
"requirements": [
"pyinsteon==1.6.4",
"insteon-frontend-home-assistant==0.6.1"
"insteon-frontend-home-assistant==0.6.2"
],
"single_config_entry": true,
"usb": [
@@ -77,10 +77,9 @@ async def async_reload(hass: HomeAssistant, service_call: ServiceCall) -> None:
existing_intents = hass.data[DOMAIN]
for intent_type, conf in existing_intents.items():
if isinstance(conf.get(CONF_ACTION), script.Script):
await conf[CONF_ACTION].async_stop()
conf[CONF_ACTION].async_unload()
intent.async_remove(hass, intent_type)
if isinstance(conf.get(CONF_ACTION), script.Script):
await conf[CONF_ACTION].async_unload()
if not new_config or DOMAIN not in new_config:
hass.data[DOMAIN] = {}
+12 -6
View File
@@ -2,6 +2,7 @@
from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime, timedelta
from enum import StrEnum
from pynecil import LiveDataResponse, OperatingMode, PowerSource
@@ -23,6 +24,7 @@ from homeassistant.const import (
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from homeassistant.util import dt as dt_util
from . import IronOSConfigEntry
from .const import OHM
@@ -56,7 +58,7 @@ class PinecilSensor(StrEnum):
class IronOSSensorEntityDescription(SensorEntityDescription):
"""IronOS sensor entity descriptions."""
value_fn: Callable[[LiveDataResponse, bool], StateType]
value_fn: Callable[[LiveDataResponse, bool], StateType | datetime]
PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = (
@@ -116,10 +118,14 @@ PINECIL_SENSOR_DESCRIPTIONS: tuple[IronOSSensorEntityDescription, ...] = (
IronOSSensorEntityDescription(
key=PinecilSensor.UPTIME,
translation_key=PinecilSensor.UPTIME,
native_unit_of_measurement=UnitOfTime.SECONDS,
device_class=SensorDeviceClass.DURATION,
state_class=SensorStateClass.TOTAL_INCREASING,
value_fn=lambda data, _: data.uptime,
device_class=SensorDeviceClass.UPTIME,
value_fn=(
lambda data, _: (
(dt_util.utcnow() - timedelta(seconds=data.uptime))
if data.uptime is not None
else None
)
),
entity_category=EntityCategory.DIAGNOSTIC,
),
IronOSSensorEntityDescription(
@@ -200,7 +206,7 @@ class IronOSSensorEntity(IronOSBaseEntity, SensorEntity):
coordinator: IronOSLiveDataCoordinator
@property
def native_value(self) -> StateType:
def native_value(self) -> StateType | datetime:
"""Return sensor state."""
return self.entity_description.value_fn(
self.coordinator.data, self.coordinator.has_tip
@@ -24,7 +24,7 @@
"integration_type": "hub",
"iot_class": "local_push",
"loggers": ["pyisy"],
"requirements": ["pyisy==3.4.1"],
"requirements": ["pyisy==3.5.1"],
"ssdp": [
{
"deviceType": "urn:udi-com:device:X_Insteon_Lighting_Device:1",
+1 -1
View File
@@ -12,7 +12,7 @@
"quality_scale": "platinum",
"requirements": [
"xknx==3.15.0",
"xknxproject==3.8.2",
"xknxproject==3.9.0",
"knx-frontend==2026.4.30.60856"
],
"single_config_entry": true
@@ -4,7 +4,7 @@ from collections.abc import Callable
from dataclasses import dataclass
from typing import Generic
from pylitterbot import LitterRobot, LitterRobot4, Robot
from pylitterbot import FeederRobot, LitterRobot, LitterRobot3, LitterRobot4, Robot
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
@@ -30,8 +30,11 @@ class RobotBinarySensorEntityDescription(
is_on_fn: Callable[[_WhiskerEntityT], bool]
BINARY_SENSOR_MAP: dict[type[Robot], tuple[RobotBinarySensorEntityDescription, ...]] = {
LitterRobot: ( # type: ignore[type-abstract] # only used for isinstance check
BINARY_SENSOR_MAP: dict[
type[Robot] | tuple[type[Robot], ...],
tuple[RobotBinarySensorEntityDescription, ...],
] = {
LitterRobot: (
RobotBinarySensorEntityDescription[LitterRobot](
key="sleeping",
translation_key="sleeping",
@@ -56,14 +59,14 @@ BINARY_SENSOR_MAP: dict[type[Robot], tuple[RobotBinarySensorEntityDescription, .
is_on_fn=lambda robot: not robot.is_hopper_removed,
),
),
Robot: ( # type: ignore[type-abstract] # only used for isinstance check
RobotBinarySensorEntityDescription[Robot](
(FeederRobot, LitterRobot3, LitterRobot4): (
RobotBinarySensorEntityDescription[FeederRobot | LitterRobot3 | LitterRobot4](
key="power_status",
translation_key="power_status",
device_class=BinarySensorDeviceClass.PLUG,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
is_on_fn=lambda robot: robot.power_status == "AC",
is_on_fn=lambda robot: robot.power_type == "AC",
),
),
}
@@ -16,5 +16,5 @@
"iot_class": "cloud_push",
"loggers": ["pylitterbot"],
"quality_scale": "platinum",
"requirements": ["pylitterbot==2025.3.2"]
"requirements": ["pylitterbot==2025.4.0"]
}
@@ -123,6 +123,9 @@
}
},
"triggers": {
"muted": {
"trigger": "mdi:volume-mute"
},
"paused_playing": {
"trigger": "mdi:pause"
},
@@ -137,6 +140,9 @@
},
"turned_on": {
"trigger": "mdi:power"
},
"unmuted": {
"trigger": "mdi:volume-high"
}
}
}
@@ -437,6 +437,18 @@
},
"title": "Media player",
"triggers": {
"muted": {
"description": "Triggers after one or more media players are muted.",
"fields": {
"behavior": {
"name": "[%key:component::media_player::common::trigger_behavior_name%]"
},
"for": {
"name": "[%key:component::media_player::common::trigger_for_name%]"
}
},
"name": "Media player muted"
},
"paused_playing": {
"description": "Triggers after one or more media players pause playing.",
"fields": {
@@ -496,6 +508,18 @@
}
},
"name": "Media player turned on"
},
"unmuted": {
"description": "Triggers after one or more media players are unmuted.",
"fields": {
"behavior": {
"name": "[%key:component::media_player::common::trigger_behavior_name%]"
},
"for": {
"name": "[%key:component::media_player::common::trigger_for_name%]"
}
},
"name": "Media player unmuted"
}
}
}
@@ -1,12 +1,79 @@
"""Provides triggers for media players."""
from homeassistant.core import HomeAssistant
from homeassistant.helpers.trigger import Trigger, make_entity_transition_trigger
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
from homeassistant.core import HomeAssistant, State
from homeassistant.helpers.automation import DomainSpec
from homeassistant.helpers.trigger import (
EntityTriggerBase,
Trigger,
make_entity_transition_trigger,
)
from . import MediaPlayerState
from . import ATTR_MEDIA_VOLUME_LEVEL, ATTR_MEDIA_VOLUME_MUTED, MediaPlayerState
from .const import DOMAIN
class _MediaPlayerMutedStateTriggerBase(EntityTriggerBase):
"""Base class for media player muted/unmuted triggers."""
_domain_specs = {DOMAIN: DomainSpec()}
_target_muted: bool
def _has_volume_attributes(self, state: State) -> bool:
"""Check if the state has volume muted or volume level attributes."""
return (
state.attributes.get(ATTR_MEDIA_VOLUME_MUTED) is not None
or state.attributes.get(ATTR_MEDIA_VOLUME_LEVEL) is not None
)
def _should_include(self, state: State) -> bool:
"""Check if an entity should participate in all/count checks.
Entities without volume attributes cannot be muted, so they are
excluded from the check - otherwise an "all" check would never
pass when there are media players without volume support.
"""
return super()._should_include(state) and self._has_volume_attributes(state)
def is_muted(self, state: State) -> bool:
"""Check if the media player is muted."""
return (
state.attributes.get(ATTR_MEDIA_VOLUME_MUTED) is True
or state.attributes.get(ATTR_MEDIA_VOLUME_LEVEL) == 0
)
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
"""Check if the origin state is valid and the state has changed."""
if from_state.state in (STATE_UNAVAILABLE, STATE_UNKNOWN):
return False
if not self._has_volume_attributes(to_state):
return False
return self.is_muted(from_state) != self.is_muted(to_state)
def is_valid_state(self, state: State) -> bool:
"""Check if the new state matches the expected state."""
if not self._has_volume_attributes(state):
return False
return self.is_muted(state) is self._target_muted
class MediaPlayerMutedTrigger(_MediaPlayerMutedStateTriggerBase):
"""Class for media player muted triggers."""
_target_muted = True
class MediaPlayerUnmutedTrigger(_MediaPlayerMutedStateTriggerBase):
"""Class for media player unmuted triggers."""
_target_muted = False
TRIGGERS: dict[str, type[Trigger]] = {
"muted": MediaPlayerMutedTrigger,
"unmuted": MediaPlayerUnmutedTrigger,
"paused_playing": make_entity_transition_trigger(
DOMAIN,
from_states={
@@ -15,6 +15,8 @@
selector:
duration:
muted: *trigger_common
unmuted: *trigger_common
paused_playing: *trigger_common
started_playing: *trigger_common
stopped_playing: *trigger_common
+1
View File
@@ -479,6 +479,7 @@ class WashingMachineProgramId(MieleEnum, missing_to_none=True):
down_filled_items = 129
cottons_eco = 133
quick_power_wash = 146, 10031
quick_intense = 177
eco_40_60 = 190, 10007
bed_linen = 10047
easy_care = 10016
@@ -0,0 +1,95 @@
"""Mitsubishi Comfort integration for Home Assistant."""
import asyncio
import logging
from mitsubishi_comfort import (
DeviceInfo,
IndoorUnit,
KumoStation,
MitsubishiCloudAccount,
)
from mitsubishi_comfort.exceptions import AuthenticationError, DeviceConnectionError
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DEFAULT_CONNECT_TIMEOUT, DEFAULT_RESPONSE_TIMEOUT, DOMAIN, PLATFORMS
from .coordinator import MitsubishiComfortConfigEntry, MitsubishiComfortCoordinator
_LOGGER = logging.getLogger(__name__)
def _make_device(
info: DeviceInfo,
serial: str,
session,
) -> IndoorUnit | KumoStation:
"""Create the appropriate device instance from DeviceInfo."""
cls = IndoorUnit if info.is_indoor_unit else KumoStation
return cls(
name=info.label,
address=info.address,
password_b64=info.password,
crypto_serial_hex=info.crypto_serial,
serial=serial,
connect_timeout=DEFAULT_CONNECT_TIMEOUT,
response_timeout=DEFAULT_RESPONSE_TIMEOUT,
session=session,
)
async def async_setup_entry(
hass: HomeAssistant, entry: MitsubishiComfortConfigEntry
) -> bool:
"""Set up Mitsubishi Comfort from a config entry."""
session = async_get_clientsession(hass)
account = MitsubishiCloudAccount(
entry.data[CONF_USERNAME], entry.data[CONF_PASSWORD], session=session
)
try:
await account.login()
devices = await account.discover_devices()
except AuthenticationError as err:
raise ConfigEntryError("Mitsubishi cloud authentication failed") from err
except DeviceConnectionError as err:
raise ConfigEntryNotReady("Cannot reach Mitsubishi cloud") from err
if not devices:
raise ConfigEntryError(
translation_domain=DOMAIN,
translation_key="no_devices",
)
coordinators: dict[str, MitsubishiComfortCoordinator] = {}
for serial, info in devices.items():
if not info.address or not info.password or not info.crypto_serial:
_LOGGER.warning("Device %s missing credentials, skipping", info.label)
continue
device = _make_device(info, serial, session)
coordinators[serial] = MitsubishiComfortCoordinator(
hass, entry, device, info.mac
)
await asyncio.gather(
*(c.async_config_entry_first_refresh() for c in coordinators.values())
)
entry.runtime_data = coordinators
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(
hass: HomeAssistant, entry: MitsubishiComfortConfigEntry
) -> bool:
"""Unload a config entry."""
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
await asyncio.gather(
*(c.device.close() for c in entry.runtime_data.values()),
return_exceptions=True,
)
return unload_ok
@@ -0,0 +1,287 @@
"""Climate entity for Mitsubishi Comfort integration."""
from typing import Any
from mitsubishi_comfort import FanSpeed, IndoorUnit, Mode, VaneDirection
from homeassistant.components.climate import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
ClimateEntity,
ClimateEntityFeature,
HVACAction,
HVACMode,
)
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import MitsubishiComfortConfigEntry, MitsubishiComfortCoordinator
from .entity import MitsubishiComfortEntity
_MODE_TO_HVAC: dict[str, HVACMode] = {
"off": HVACMode.OFF,
"cool": HVACMode.COOL,
"heat": HVACMode.HEAT,
"dry": HVACMode.DRY,
"vent": HVACMode.FAN_ONLY,
"auto": HVACMode.HEAT_COOL,
"autoCool": HVACMode.HEAT_COOL,
"autoHeat": HVACMode.HEAT_COOL,
}
_HVAC_TO_MODE: dict[HVACMode, Mode] = {
HVACMode.OFF: Mode.OFF,
HVACMode.COOL: Mode.COOL,
HVACMode.HEAT: Mode.HEAT,
HVACMode.DRY: Mode.DRY,
HVACMode.FAN_ONLY: Mode.FAN,
HVACMode.HEAT_COOL: Mode.AUTO,
}
_LIB_MODE_TO_HVAC: dict[Mode, HVACMode] = {v: k for k, v in _HVAC_TO_MODE.items()}
_MODE_TO_ACTION: dict[str, HVACAction] = {
"off": HVACAction.OFF,
"cool": HVACAction.COOLING,
"heat": HVACAction.HEATING,
"dry": HVACAction.DRYING,
"vent": HVACAction.FAN,
"auto": HVACAction.IDLE,
"autoCool": HVACAction.COOLING,
"autoHeat": HVACAction.HEATING,
}
_FAN_SPEED_MAP: dict[str, FanSpeed] = {s.value: s for s in FanSpeed}
_VANE_DIR_MAP: dict[str, VaneDirection] = {d.value: d for d in VaneDirection}
_OPT_MODE = "mode"
_OPT_COOL_SETPOINT = "cool_setpoint"
_OPT_HEAT_SETPOINT = "heat_setpoint"
_OPT_FAN_SPEED = "fan_speed"
_OPT_VANE_DIRECTION = "vane_direction"
async def async_setup_entry(
hass: HomeAssistant,
entry: MitsubishiComfortConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Mitsubishi Comfort climate entities."""
coordinators = entry.runtime_data
async_add_entities(
MitsubishiComfortClimate(coordinator)
for coordinator in coordinators.values()
if isinstance(coordinator.device, IndoorUnit)
)
class MitsubishiComfortClimate(MitsubishiComfortEntity, ClimateEntity):
"""Climate entity for a Mitsubishi indoor unit."""
_attr_name = None
_attr_temperature_unit = UnitOfTemperature.CELSIUS
_enable_turn_on_off_backwards_compatibility = False
def __init__(self, coordinator: MitsubishiComfortCoordinator) -> None:
"""Initialize."""
super().__init__(coordinator)
self._attr_unique_id = self._device.serial
self._optimistic: dict[str, Any] = {}
def _handle_coordinator_update(self) -> None:
"""Clear optimistic state when real data arrives from device."""
self._optimistic.clear()
super()._handle_coordinator_update()
@property
def _effective_mode(self) -> str | None:
return self._optimistic.get(_OPT_MODE, self._device.status.mode)
@property
def hvac_mode(self) -> HVACMode | None:
"""Return the current HVAC mode."""
mode = self._effective_mode
return _MODE_TO_HVAC.get(mode) if mode else None
@property
def hvac_action(self) -> HVACAction | None:
"""Return the current HVAC action."""
mode = self._effective_mode
if mode and self._device.status.standby:
return HVACAction.IDLE
return _MODE_TO_ACTION.get(mode) if mode else None
@property
def hvac_modes(self) -> list[HVACMode]:
"""Return the list of available HVAC modes."""
return [
_LIB_MODE_TO_HVAC[m]
for m in self._device.supported_modes
if m in _LIB_MODE_TO_HVAC
]
@property
def current_temperature(self) -> float | None:
"""Return the current temperature."""
return self._device.status.room_temperature
@property
def current_humidity(self) -> float | None:
"""Return the current humidity."""
return self._device.status.current_humidity
@property
def target_temperature(self) -> float | None:
"""Return the target temperature."""
mode = self._effective_mode
if mode in ("cool", "autoCool"):
return self._optimistic.get(
_OPT_COOL_SETPOINT, self._device.status.cool_setpoint
)
if mode in ("heat", "autoHeat"):
return self._optimistic.get(
_OPT_HEAT_SETPOINT, self._device.status.heat_setpoint
)
return None
@property
def target_temperature_high(self) -> float | None:
"""Return the upper bound target temperature."""
if self._effective_mode in ("auto", "autoCool", "autoHeat"):
return self._optimistic.get(
_OPT_COOL_SETPOINT, self._device.status.cool_setpoint
)
return None
@property
def target_temperature_low(self) -> float | None:
"""Return the lower bound target temperature."""
if self._effective_mode in ("auto", "autoCool", "autoHeat"):
return self._optimistic.get(
_OPT_HEAT_SETPOINT, self._device.status.heat_setpoint
)
return None
@property
def fan_mode(self) -> str | None:
"""Return the current fan mode."""
return self._optimistic.get(_OPT_FAN_SPEED, self._device.status.fan_speed)
@property
def fan_modes(self) -> list[str]:
"""Return the list of available fan modes."""
return [s.value for s in self._device.supported_fan_speeds]
@property
def swing_mode(self) -> str | None:
"""Return the current swing mode."""
return self._optimistic.get(
_OPT_VANE_DIRECTION, self._device.status.vane_direction
)
@property
def swing_modes(self) -> list[str]:
"""Return the list of available swing modes."""
return [d.value for d in self._device.supported_vane_directions]
@property
def min_temp(self) -> float:
"""Return the minimum temperature."""
if self._effective_mode in ("heat", "autoHeat"):
if self._device.status.min_heat_setpoint is not None:
return self._device.status.min_heat_setpoint
if self._device.status.min_cool_setpoint is not None:
return self._device.status.min_cool_setpoint
return super().min_temp
@property
def max_temp(self) -> float:
"""Return the maximum temperature."""
if self._effective_mode in ("heat", "autoHeat"):
if self._device.status.max_heat_setpoint is not None:
return self._device.status.max_heat_setpoint
if self._device.status.max_cool_setpoint is not None:
return self._device.status.max_cool_setpoint
return super().max_temp
@property
def supported_features(self) -> ClimateEntityFeature:
"""Return the list of supported features."""
features = (
ClimateEntityFeature.TARGET_TEMPERATURE
| ClimateEntityFeature.FAN_MODE
| ClimateEntityFeature.TURN_OFF
)
if Mode.AUTO in self._device.supported_modes:
features |= ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
if self._device.supported_vane_directions:
features |= ClimateEntityFeature.SWING_MODE
return features
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set the HVAC mode."""
lib_mode = _HVAC_TO_MODE.get(hvac_mode)
if lib_mode is None:
return
result = await self._device.set_mode(lib_mode)
if result.success:
self._optimistic[_OPT_MODE] = result.value
self.async_write_ha_state()
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set the target temperature."""
mode = self._effective_mode
wrote = False
if ATTR_TARGET_TEMP_HIGH in kwargs:
result = await self._device.set_cool_setpoint(kwargs[ATTR_TARGET_TEMP_HIGH])
if result.success:
self._optimistic[_OPT_COOL_SETPOINT] = result.value
wrote = True
if ATTR_TARGET_TEMP_LOW in kwargs:
result = await self._device.set_heat_setpoint(kwargs[ATTR_TARGET_TEMP_LOW])
if result.success:
self._optimistic[_OPT_HEAT_SETPOINT] = result.value
wrote = True
temp = kwargs.get(ATTR_TEMPERATURE)
if temp is not None:
if mode in ("cool", "autoCool"):
result = await self._device.set_cool_setpoint(temp)
if result.success:
self._optimistic[_OPT_COOL_SETPOINT] = result.value
wrote = True
elif mode in ("heat", "autoHeat"):
result = await self._device.set_heat_setpoint(temp)
if result.success:
self._optimistic[_OPT_HEAT_SETPOINT] = result.value
wrote = True
if wrote:
self.async_write_ha_state()
async def async_set_fan_mode(self, fan_mode: str) -> None:
"""Set the fan mode."""
speed = _FAN_SPEED_MAP.get(fan_mode)
if speed is None:
return
result = await self._device.set_fan_speed(speed)
if result.success:
self._optimistic[_OPT_FAN_SPEED] = result.value
self.async_write_ha_state()
async def async_set_swing_mode(self, swing_mode: str) -> None:
"""Set the swing mode."""
direction = _VANE_DIR_MAP.get(swing_mode)
if direction is None:
return
result = await self._device.set_vane_direction(direction)
if result.success:
self._optimistic[_OPT_VANE_DIRECTION] = result.value
self.async_write_ha_state()
async def async_turn_off(self) -> None:
"""Turn the entity off."""
await self.async_set_hvac_mode(HVACMode.OFF)
@@ -0,0 +1,73 @@
"""Config flow for Mitsubishi Comfort integration."""
import logging
from typing import Any
from mitsubishi_comfort import MitsubishiCloudAccount
from mitsubishi_comfort.exceptions import AuthenticationError, DeviceConnectionError
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
USER_SCHEMA = vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
}
)
class MitsubishiComfortConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle config flow for Mitsubishi Comfort."""
VERSION = 1
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the user setup step."""
errors: dict[str, str] = {}
if user_input is not None:
account = MitsubishiCloudAccount(
user_input[CONF_USERNAME],
user_input[CONF_PASSWORD],
session=async_get_clientsession(self.hass),
)
devices: dict = {}
try:
await account.login()
devices = await account.discover_devices()
except AuthenticationError:
errors["base"] = "invalid_auth"
except DeviceConnectionError:
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected error during setup")
errors["base"] = "unknown"
if not errors:
await self.async_set_unique_id(account.user_id)
self._abort_if_unique_id_configured()
if not devices:
errors["base"] = "no_devices"
else:
return self.async_create_entry(
title=f"Mitsubishi Comfort ({user_input[CONF_USERNAME]})",
data={
CONF_USERNAME: user_input[CONF_USERNAME],
CONF_PASSWORD: user_input[CONF_PASSWORD],
},
)
return self.async_show_form(
step_id="user", data_schema=USER_SCHEMA, errors=errors
)
@@ -0,0 +1,12 @@
"""Constants for the Mitsubishi Comfort integration."""
from datetime import timedelta
from typing import Final
from homeassistant.const import Platform
DOMAIN: Final = "mitsubishi_comfort"
PLATFORMS: Final = [Platform.CLIMATE]
DEFAULT_SCAN_INTERVAL = timedelta(seconds=60)
DEFAULT_CONNECT_TIMEOUT: Final = 1.2
DEFAULT_RESPONSE_TIMEOUT: Final = 8.0
@@ -0,0 +1,56 @@
"""DataUpdateCoordinator for Mitsubishi Comfort devices."""
import logging
from mitsubishi_comfort import IndoorUnit, KumoStation
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
_LOGGER = logging.getLogger(__name__)
type MitsubishiComfortConfigEntry = ConfigEntry[dict[str, MitsubishiComfortCoordinator]]
class MitsubishiComfortCoordinator(DataUpdateCoordinator[IndoorUnit | KumoStation]):
"""Coordinator to poll a single Mitsubishi device."""
def __init__(
self,
hass: HomeAssistant,
entry: MitsubishiComfortConfigEntry,
device: IndoorUnit | KumoStation,
mac: str,
) -> None:
"""Initialize."""
super().__init__(
hass,
_LOGGER,
config_entry=entry,
name=f"mitsubishi_comfort_{device.serial}",
update_interval=DEFAULT_SCAN_INTERVAL,
)
self.device = device
self.mac = mac
self.data = device
async def _async_update_data(self) -> IndoorUnit | KumoStation:
"""Poll the device and return it."""
try:
success = await self.device.update_status()
except Exception as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="communication_error",
translation_placeholders={"device_name": self.device.name},
) from err
if not success:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_failed",
translation_placeholders={"device_name": self.device.name},
)
return self.device
@@ -0,0 +1,34 @@
"""Base entity for Mitsubishi Comfort integration."""
from mitsubishi_comfort import IndoorUnit, KumoStation
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import MitsubishiComfortCoordinator
class MitsubishiComfortEntity(CoordinatorEntity[MitsubishiComfortCoordinator]):
"""Base class for all Mitsubishi Comfort entities."""
_attr_has_entity_name = True
def __init__(self, coordinator: MitsubishiComfortCoordinator) -> None:
"""Initialize."""
super().__init__(coordinator)
device = coordinator.device
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, device.serial)},
connections={(CONNECTION_NETWORK_MAC, coordinator.mac)},
name=device.name,
manufacturer="Mitsubishi",
serial_number=device.serial,
sw_version=device.status.firmware_version,
hw_version=device.status.hardware_version,
)
@property
def _device(self) -> IndoorUnit | KumoStation:
"""Return the underlying device from coordinator data."""
return self.coordinator.data
@@ -0,0 +1,11 @@
{
"domain": "mitsubishi_comfort",
"name": "Mitsubishi Comfort",
"codeowners": ["@nikolairahimi"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/mitsubishi_comfort",
"integration_type": "hub",
"iot_class": "cloud_polling",
"quality_scale": "bronze",
"requirements": ["mitsubishi-comfort==0.3.0"]
}
@@ -0,0 +1,72 @@
rules:
# Bronze
action-setup:
status: exempt
comment: No service actions registered.
appropriate-polling: done
brands: done
common-modules: done
config-flow: done
config-flow-test-coverage: done
dependency-transparency: done
docs-actions:
status: exempt
comment: No service actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
config-entry-unloading: done
log-when-unavailable: done
entity-unavailable: done
action-exceptions:
status: exempt
comment: No service actions registered.
reauthentication-flow: todo
parallel-updates: todo
test-coverage: todo
integration-owner: done
docs-installation-parameters: done
docs-configuration-parameters:
status: exempt
comment: No options flow.
# Gold
entity-translations: todo
entity-device-class: todo
devices: done
entity-category:
status: exempt
comment: Single climate entity per device, no diagnostic entities yet.
entity-disabled-by-default:
status: exempt
comment: Single climate entity per device, enabled by default.
discovery: todo
stale-devices: todo
diagnostics: todo
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
dynamic-devices: todo
discovery-update-info: todo
repair-issues: todo
docs-use-cases: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-data-update: done
docs-known-limitations: done
docs-examples: done
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo
@@ -0,0 +1,36 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"no_devices": "No devices were found on this account",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"user": {
"data": {
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::username%]"
},
"data_description": {
"password": "The password for your Kumo Cloud account.",
"username": "The email address for your Kumo Cloud account."
}
}
}
},
"exceptions": {
"communication_error": {
"message": "Error communicating with {device_name}"
},
"no_devices": {
"message": "No devices were found in your Mitsubishi Comfort account"
},
"update_failed": {
"message": "{device_name} returned no data"
}
}
}
@@ -1,7 +1,11 @@
"""Device tracker for Mobile app."""
from collections.abc import Callable
from typing import Any
from dataclasses import dataclass
import logging
from typing import Any, Self
import voluptuous as vol
from homeassistant.components.device_tracker import (
ATTR_BATTERY,
@@ -23,10 +27,11 @@ from homeassistant.const import (
STATE_HOME,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.restore_state import ExtraStoredData, RestoreEntity
from .const import (
ATTR_ALTITUDE,
@@ -38,8 +43,49 @@ from .const import (
)
from .helpers import device_info
_LOGGER = logging.getLogger(__name__)
ATTR_KEYS = (ATTR_ALTITUDE, ATTR_COURSE, ATTR_SPEED, ATTR_VERTICAL_ACCURACY)
LOCATION_UPDATE_SCHEMA = vol.All(
cv.key_dependency(ATTR_GPS, ATTR_GPS_ACCURACY),
vol.Schema(
{
vol.Optional(ATTR_LOCATION_NAME): cv.string,
vol.Optional(ATTR_GPS): cv.gps,
vol.Optional(ATTR_GPS_ACCURACY): cv.positive_float,
vol.Optional(ATTR_BATTERY): cv.positive_int,
vol.Optional(ATTR_SPEED): cv.positive_int,
vol.Optional(ATTR_ALTITUDE): vol.Coerce(float),
vol.Optional(ATTR_COURSE): cv.positive_int,
vol.Optional(ATTR_VERTICAL_ACCURACY): cv.positive_int,
},
),
)
@dataclass
class MobileAppDeviceTrackerExtraStoredData(ExtraStoredData):
"""Object to hold mobile app device tracker data to be restored."""
data: dict[str, Any]
def as_dict(self) -> dict[str, Any]:
"""Return a dict representation of the stored data."""
return {"data": self.data}
@classmethod
def from_dict(cls, restored: dict[str, Any]) -> Self | None:
"""Initialize a stored mobile app entity data from a dict."""
if (data := restored.get("data")) is None:
return None
try:
validated = LOCATION_UPDATE_SCHEMA(data)
except vol.Invalid as err:
_LOGGER.debug("Discarding invalid restored device tracker data: %s", err)
return None
return cls(validated)
async def async_setup_entry(
hass: HomeAssistant,
@@ -133,6 +179,18 @@ class MobileAppEntity(TrackerEntity, RestoreEntity):
self.update_data,
)
if (extra_data := await self.async_get_last_extra_data()) is not None:
if (
restored := MobileAppDeviceTrackerExtraStoredData.from_dict(
extra_data.as_dict()
)
) is not None:
self._data = restored.data
return
# Fallback for entities saved before MobileAppDeviceTrackerExtraStoredData
# was introduced: reconstruct from the previous state's attributes.
# This can be removed in HA Core 2026.12.
if (state := await self.async_get_last_state()) is None:
return
@@ -145,6 +203,11 @@ class MobileAppEntity(TrackerEntity, RestoreEntity):
data.update({key: attr[key] for key in attr if key in ATTR_KEYS})
self._data = data
@property
def extra_restore_state_data(self) -> MobileAppDeviceTrackerExtraStoredData:
"""Return the entity data to be restored."""
return MobileAppDeviceTrackerExtraStoredData(self._data)
async def async_will_remove_from_hass(self) -> None:
"""Call when entity is being removed from hass."""
await super().async_will_remove_from_hass()
+2 -27
View File
@@ -24,11 +24,6 @@ from homeassistant.components import (
)
from homeassistant.components.binary_sensor import BinarySensorDeviceClass
from homeassistant.components.camera import CameraEntityFeature
from homeassistant.components.device_tracker import (
ATTR_BATTERY,
ATTR_GPS,
ATTR_LOCATION_NAME,
)
from homeassistant.components.frontend import MANIFEST_JSON
from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass
from homeassistant.components.zone import DOMAIN as ZONE_DOMAIN
@@ -36,7 +31,6 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_DEVICE_ID,
ATTR_DOMAIN,
ATTR_GPS_ACCURACY,
ATTR_SERVICE,
ATTR_SERVICE_DATA,
ATTR_SUPPORTED_FEATURES,
@@ -57,11 +51,9 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.util.decorator import Registry
from .const import (
ATTR_ALTITUDE,
ATTR_APP_DATA,
ATTR_APP_VERSION,
ATTR_CAMERA_ENTITY_ID,
ATTR_COURSE,
ATTR_DEVICE_NAME,
ATTR_EVENT_DATA,
ATTR_EVENT_TYPE,
@@ -81,11 +73,9 @@ from .const import (
ATTR_SENSOR_TYPE_SENSOR,
ATTR_SENSOR_UNIQUE_ID,
ATTR_SENSOR_UOM,
ATTR_SPEED,
ATTR_SUPPORTS_ENCRYPTION,
ATTR_TEMPLATE,
ATTR_TEMPLATE_VARIABLES,
ATTR_VERTICAL_ACCURACY,
ATTR_WEBHOOK_DATA,
ATTR_WEBHOOK_ENCRYPTED,
ATTR_WEBHOOK_ENCRYPTED_DATA,
@@ -108,6 +98,7 @@ from .const import (
SIGNAL_LOCATION_UPDATE,
SIGNAL_SENSOR_UPDATE,
)
from .device_tracker import LOCATION_UPDATE_SCHEMA
from .helpers import (
async_is_local_only_user,
decrypt_payload,
@@ -405,23 +396,7 @@ async def webhook_render_template(
@WEBHOOK_COMMANDS.register("update_location")
@validate_schema(
vol.All(
cv.key_dependency(ATTR_GPS, ATTR_GPS_ACCURACY),
vol.Schema(
{
vol.Optional(ATTR_LOCATION_NAME): cv.string,
vol.Optional(ATTR_GPS): cv.gps,
vol.Optional(ATTR_GPS_ACCURACY): cv.positive_float,
vol.Optional(ATTR_BATTERY): cv.positive_int,
vol.Optional(ATTR_SPEED): cv.positive_int,
vol.Optional(ATTR_ALTITUDE): vol.Coerce(float),
vol.Optional(ATTR_COURSE): cv.positive_int,
vol.Optional(ATTR_VERTICAL_ACCURACY): cv.positive_int,
},
),
)
)
@validate_schema(LOCATION_UPDATE_SCHEMA)
async def webhook_update_location(
hass: HomeAssistant, config_entry: ConfigEntry, data: dict[str, Any]
) -> Response:
+29 -1
View File
@@ -11,7 +11,12 @@ import voluptuous as vol
from homeassistant import config as conf_util
from homeassistant.components import websocket_api
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_DISCOVERY, CONF_PLATFORM, SERVICE_RELOAD
from homeassistant.const import (
CONF_DISCOVERY,
CONF_PLATFORM,
CONF_PROTOCOL,
SERVICE_RELOAD,
)
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import (
ConfigValidationError,
@@ -27,6 +32,7 @@ from homeassistant.helpers import (
from homeassistant.helpers.device_registry import DeviceEntry
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import async_get_platforms
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from homeassistant.helpers.reload import async_integration_yaml_config
from homeassistant.helpers.service import async_register_admin_service
from homeassistant.helpers.typing import ConfigType
@@ -73,12 +79,14 @@ from .const import (
DEFAULT_DISCOVERY,
DEFAULT_ENCODING,
DEFAULT_PREFIX,
DEFAULT_PROTOCOL,
DEFAULT_QOS,
DEFAULT_RETAIN,
DOMAIN,
ENTITY_PLATFORMS,
ENTRY_OPTION_FIELDS,
MQTT_CONNECTION_STATE,
PROTOCOL_311,
TEMPLATE_ERRORS,
Platform,
)
@@ -424,6 +432,26 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Load a config entry."""
mqtt_data: MqttData
if (protocol := entry.data.get(CONF_PROTOCOL, PROTOCOL_311)) != DEFAULT_PROTOCOL:
broker: str = entry.data[CONF_BROKER]
async_create_issue(
hass,
DOMAIN,
"protocol_5_migration",
issue_domain=DOMAIN,
is_fixable=True,
breaks_in_ha_version="2027.1.0",
severity=IssueSeverity.WARNING,
learn_more_url="https://www.home-assistant.io/integrations/mqtt/#mqtt-protocol",
data={
"entry_id": entry.entry_id,
"broker": broker,
"protocol": protocol,
},
translation_placeholders={"broker": broker, "protocol": protocol},
translation_key="protocol_5_migration",
)
async def _setup_client() -> tuple[MqttData, dict[str, Any]]:
"""Set up the MQTT client."""
# Fetch configuration
+9 -3
View File
@@ -63,7 +63,6 @@ from .const import (
DEFAULT_ENCODING,
DEFAULT_KEEPALIVE,
DEFAULT_PORT,
DEFAULT_PROTOCOL,
DEFAULT_QOS,
DEFAULT_TRANSPORT,
DEFAULT_WILL,
@@ -74,6 +73,7 @@ from .const import (
MQTT_PROCESSED_SUBSCRIPTIONS,
PROTOCOL_5,
PROTOCOL_31,
PROTOCOL_311,
TRANSPORT_WEBSOCKETS,
)
from .models import (
@@ -331,7 +331,10 @@ class MqttClientSetup:
config = self._config
clean_session: bool | None = None
if (protocol := config.get(CONF_PROTOCOL, DEFAULT_PROTOCOL)) == PROTOCOL_31:
# If no protocol setting is set in the config entry data
# we assume the config was migrated from YAML, and the
# protocol version is defaulting to legacy version 3.1.1.
if (protocol := config.get(CONF_PROTOCOL, PROTOCOL_311)) == PROTOCOL_31:
proto = mqtt.MQTTv31
clean_session = True
elif protocol == PROTOCOL_5:
@@ -420,7 +423,10 @@ class MQTT:
self.loop = hass.loop
self.config_entry = config_entry
self.conf = conf
self.is_mqttv5 = conf.get(CONF_PROTOCOL, DEFAULT_PROTOCOL) == PROTOCOL_5
# If no protocol setting is set in the config entry data
# we assume the config was migrated from YAML, and the
# protocol version is defaulting to legacy version 3.1.1.
self.is_mqttv5 = conf.get(CONF_PROTOCOL, PROTOCOL_311) == PROTOCOL_5
self._simple_subscriptions: defaultdict[str, set[Subscription]] = defaultdict(
set
@@ -4073,6 +4073,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
config: dict[str, Any] = {
CONF_BROKER: addon_discovery_config[CONF_HOST],
CONF_PORT: addon_discovery_config[CONF_PORT],
CONF_PROTOCOL: DEFAULT_PROTOCOL,
CONF_USERNAME: addon_discovery_config.get(CONF_USERNAME),
CONF_PASSWORD: addon_discovery_config.get(CONF_PASSWORD),
CONF_DISCOVERY: DEFAULT_DISCOVERY,
@@ -4301,6 +4302,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
if user_input is not None:
data: dict[str, Any] = self._hassio_discovery.copy()
data[CONF_BROKER] = data.pop(CONF_HOST)
data[CONF_PROTOCOL] = DEFAULT_PROTOCOL
can_connect = await self.hass.async_add_executor_job(
try_connection,
data,
@@ -4312,6 +4314,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
data={
CONF_BROKER: data[CONF_BROKER],
CONF_PORT: data[CONF_PORT],
CONF_PROTOCOL: DEFAULT_PROTOCOL,
CONF_USERNAME: data.get(CONF_USERNAME),
CONF_PASSWORD: data.get(CONF_PASSWORD),
CONF_DISCOVERY: DEFAULT_DISCOVERY,
@@ -5178,6 +5181,8 @@ async def async_get_broker_settings( # noqa: C901
) -> bool:
"""Additional validation on broker settings for better error messages."""
if CONF_PROTOCOL not in validated_user_input:
validated_user_input[CONF_PROTOCOL] = DEFAULT_PROTOCOL
# Get current certificate settings from config entry
certificate: str | None = (
"auto"
+2 -2
View File
@@ -347,14 +347,14 @@ REMOTE_CODE_TEXT = "REMOTE_CODE_TEXT"
PROTOCOL_31 = "3.1"
PROTOCOL_311 = "3.1.1"
PROTOCOL_5 = "5"
SUPPORTED_PROTOCOLS = [PROTOCOL_31, PROTOCOL_311, PROTOCOL_5]
SUPPORTED_PROTOCOLS = [PROTOCOL_5, PROTOCOL_311, PROTOCOL_31]
TRANSPORT_TCP = "tcp"
TRANSPORT_WEBSOCKETS = "websockets"
DEFAULT_PORT = 1883
DEFAULT_KEEPALIVE = 60
DEFAULT_PROTOCOL = PROTOCOL_311
DEFAULT_PROTOCOL = PROTOCOL_5
DEFAULT_TRANSPORT = TRANSPORT_TCP
DEFAULT_BIRTH = {
+63 -8
View File
@@ -6,10 +6,16 @@ import voluptuous as vol
from homeassistant import data_entry_flow
from homeassistant.components.repairs import RepairsFlow
from homeassistant.const import CONF_PORT, CONF_PROTOCOL
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr
from .const import DOMAIN
from .config_flow import try_connection
from .const import DEFAULT_PORT, DOMAIN, PROTOCOL_5
URL_MQTT_BROKER_CONFIGURATION = (
"https://www.home-assistant.io/integrations/mqtt/#broker-configuration"
)
class MQTTDeviceEntryMigration(RepairsFlow):
@@ -50,6 +56,55 @@ class MQTTDeviceEntryMigration(RepairsFlow):
)
class MQTTProtocolV5Migration(RepairsFlow):
"""Handler to migrate to MQTT protocol version 5."""
def __init__(self, entry_id: str, broker: str, protocol: str) -> None:
"""Initialize the flow."""
self.entry_id = entry_id
self.broker = broker
self.protocol = protocol
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> data_entry_flow.FlowResult:
"""Handle the first step of a fix flow."""
return await self.async_step_confirm()
async def async_step_confirm(
self, user_input: dict[str, str] | None = None
) -> data_entry_flow.FlowResult:
"""Handle the confirm step of a fix flow."""
if user_input is not None:
entry = self.hass.config_entries.async_get_entry(self.entry_id)
if TYPE_CHECKING:
assert entry is not None
new_entry_data = entry.data.copy()
new_entry_data[CONF_PROTOCOL] = PROTOCOL_5
# Try the connection with protocol version 5
if await self.hass.async_add_executor_job(
try_connection,
{CONF_PORT: DEFAULT_PORT} | new_entry_data,
):
self.hass.config_entries.async_update_entry(entry, data=new_entry_data)
return self.async_create_entry(data={})
return self.async_abort(
reason="mqtt_broker_migration_to_v5_failed",
description_placeholders={
"broker": self.broker,
"protocol": self.protocol,
"url_mqtt_broker_configuration": URL_MQTT_BROKER_CONFIGURATION,
},
)
return self.async_show_form(
step_id="confirm",
data_schema=vol.Schema({}),
description_placeholders={"broker": self.broker, "protocol": self.protocol},
)
async def async_create_fix_flow(
hass: HomeAssistant,
issue_id: str,
@@ -58,13 +113,13 @@ async def async_create_fix_flow(
"""Create flow."""
if TYPE_CHECKING:
assert data is not None
entry_id = data["entry_id"]
subentry_id = data["subentry_id"]
name = data["name"]
if TYPE_CHECKING:
assert isinstance(entry_id, str)
assert isinstance(subentry_id, str)
assert isinstance(name, str)
entry_id: str = data["entry_id"] # type: ignore[assignment]
if issue_id == "protocol_5_migration":
broker: str = data["broker"] # type: ignore[assignment]
protocol: str = data["protocol"] # type: ignore[assignment]
return MQTTProtocolV5Migration(entry_id, broker, protocol)
subentry_id: str = data["subentry_id"] # type: ignore[assignment]
name: str = data["name"] # type: ignore[assignment]
return MQTTDeviceEntryMigration(
entry_id=entry_id,
subentry_id=subentry_id,
+15 -1
View File
@@ -162,7 +162,7 @@
"component": "Entity"
},
"data_description": {
"component": "Select the entity you want to delete. Minimal one entity is required."
"component": "Select the entity you want to delete. At least one entity is required."
},
"description": "Delete an entity. The entity will be removed from the device. Removing an entity will break any automations or scripts that depend on it.",
"title": "Delete entity"
@@ -1120,6 +1120,20 @@
"description": "Home Assistant detected an invalid config for a manually configured item.\n\nPlatform domain: **{domain}**\nConfiguration file: **{config_file}**\nNear line: **{line}**\nConfiguration found:\n```yaml\n{config}\n```\nError: **{error}**.\n\nMake sure the configuration is valid and [reload](/config/developer-tools/yaml) the manually configured MQTT items or restart Home Assistant to fix this issue.",
"title": "Invalid config found for MQTT {domain} item"
},
"protocol_5_migration": {
"fix_flow": {
"abort": {
"mqtt_broker_migration_to_v5_failed": "Migrating the broker ({broker}) protocol version from {protocol} to 5 failed, and the migration has been aborted.\n\nYour broker may not support MQTT protocol version 5.\n\nPlease [reconfigure your MQTT broker settings]({url_mqtt_broker_configuration}) or upgrade your broker to support MQTT protocol version 5 to fix this issue."
},
"step": {
"confirm": {
"description": "Home Assistant is migrating to MQTT protocol version 5. The currently configured protocol version for broker {broker} is {protocol}. This protocol version is deprecated, and support for it will be removed.\n\nSubmitting this form will try to migrate your MQTT broker configuration to use protocol version 5 to fix this issue.",
"title": "MQTT protocol change required"
}
}
},
"title": "Deprecated MQTT protocol {protocol} in use"
},
"subentry_migration_discovery": {
"fix_flow": {
"step": {
+6 -2
View File
@@ -8,6 +8,7 @@ from google_nest_sdm.event import EventMessage, EventType
from google_nest_sdm.traits import TraitType
from homeassistant.components.event import (
DoorbellEventType,
EventDeviceClass,
EventEntity,
EventEntityDescription,
@@ -42,7 +43,7 @@ ENTITY_DESCRIPTIONS = [
key=EVENT_DOORBELL_CHIME,
translation_key="chime",
device_class=EventDeviceClass.DOORBELL,
event_types=[EVENT_DOORBELL_CHIME],
event_types=[DoorbellEventType.RING],
trait_types=[TraitType.DOORBELL_CHIME],
api_event_types=[EventType.DOORBELL_CHIME],
),
@@ -80,7 +81,7 @@ async def async_setup_entry(
class NestTraitEventEntity(EventEntity):
"""Nest doorbell event entity."""
"""Nest event entity for event entity descriptions."""
entity_description: NestEventEntityDescription
_attr_has_entity_name = True
@@ -113,6 +114,9 @@ class NestTraitEventEntity(EventEntity):
# This event is a duplicate message in the same thread
return
if event_type == EVENT_DOORBELL_CHIME:
event_type = DoorbellEventType.RING
self._trigger_event(
event_type,
{"nest_event_id": nest_event_id},
+1 -1
View File
@@ -113,7 +113,7 @@
"state_attributes": {
"event_type": {
"state": {
"doorbell_chime": "[%key:component::nest::entity::event::chime::name%]"
"ring": "[%key:component::event::entity_component::doorbell::state_attributes::event_type::state::ring%]"
}
}
}
@@ -0,0 +1,81 @@
"""Binary sensor platform for Nord Pool integration."""
from collections.abc import Callable
from dataclasses import dataclass
from homeassistant.components.binary_sensor import (
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.components.sensor import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import NordPoolConfigEntry
from .const import CONF_AREAS
from .coordinator import NordPoolDataUpdateCoordinator
from .entity import NordpoolBaseEntity
PARALLEL_UPDATES = 0
def get_tomorrow_price_available(
entity: NordpoolPriceBinarySensor,
) -> bool:
"""Return tomorrow price availability."""
data = entity.coordinator.get_data_tomorrow()
return bool(data and data.entries and entity.area in data.entries[0].entry)
@dataclass(frozen=True, kw_only=True)
class NordpoolBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Describes Nord Pool binary sensor entity."""
value_fn: Callable[[NordpoolPriceBinarySensor], bool | None]
BINARY_SENSOR_TYPES: tuple[NordpoolBinarySensorEntityDescription, ...] = (
NordpoolBinarySensorEntityDescription(
key="tomorrow_price_available",
translation_key="tomorrow_price_available",
value_fn=get_tomorrow_price_available,
entity_category=EntityCategory.DIAGNOSTIC,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: NordPoolConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Nord Pool binary sensor platform."""
coordinator = entry.runtime_data
areas = coordinator.config_entry.data[CONF_AREAS]
async_add_entities(
NordpoolPriceBinarySensor(coordinator, description, area)
for description in BINARY_SENSOR_TYPES
for area in areas
)
class NordpoolPriceBinarySensor(NordpoolBaseEntity, BinarySensorEntity):
"""Representation of a Nord Pool binary sensor."""
entity_description: NordpoolBinarySensorEntityDescription
def __init__(
self,
coordinator: NordPoolDataUpdateCoordinator,
entity_description: NordpoolBinarySensorEntityDescription,
area: str,
) -> None:
"""Initiate Nord Pool binary sensor."""
super().__init__(coordinator, entity_description, area)
@property
def is_on(self) -> bool | None:
"""Return true if the binary sensor is on."""
return self.entity_description.value_fn(self)
@@ -54,6 +54,8 @@ DATA_SCHEMA = vol.Schema(
async def test_api(hass: HomeAssistant, user_input: dict[str, Any]) -> dict[str, str]:
"""Test fetch data from Nord Pool."""
if not user_input.get(CONF_AREAS):
return {CONF_AREAS: "no_areas"}
client = NordPoolClient(async_get_clientsession(hass))
try:
await client.async_get_delivery_period(
+1 -1
View File
@@ -8,7 +8,7 @@ LOGGER = logging.getLogger(__package__)
DEFAULT_SCAN_INTERVAL = 60
DOMAIN = "nordpool"
PLATFORMS = [Platform.SENSOR]
PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR]
DEFAULT_NAME = "Nord Pool"
CONF_AREAS = "areas"
@@ -164,3 +164,8 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
"""Return the current day data."""
current_day = dt_util.now().date()
return self.data.entries[current_day]
def get_data_tomorrow(self) -> DeliveryPeriodData | None:
"""Return tomorrow's day data if available."""
tomorrow = dt_util.now().date() + timedelta(days=1)
return self.data.entries.get(tomorrow)
@@ -5,6 +5,7 @@
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"no_areas": "No area(s) selected",
"no_data": "API connected but the response was empty"
},
"step": {
@@ -31,6 +32,11 @@
}
},
"entity": {
"binary_sensor": {
"tomorrow_price_available": {
"name": "Tomorrow price available"
}
},
"sensor": {
"block_average": {
"name": "{block} average"
+3
View File
@@ -9,6 +9,9 @@
"preconditioning_duration": {
"default": "mdi:fan-clock"
},
"state_of_charge_input": {
"default": "mdi:battery"
},
"target_percentage": {
"default": "mdi:battery-heart"
}
+12
View File
@@ -28,6 +28,18 @@ class OhmeNumberDescription(OhmeEntityDescription, NumberEntityDescription):
NUMBER_DESCRIPTION = [
OhmeNumberDescription(
key="state_of_charge_input",
translation_key="state_of_charge_input",
value_fn=lambda client: client.battery,
set_fn=lambda client, value: client.async_set_state_of_charge(int(value)),
native_min_value=0,
native_max_value=100,
native_step=1,
native_unit_of_measurement=PERCENTAGE,
entity_registry_enabled_default=False,
available_fn=lambda client: client.status.value != "unplugged",
),
OhmeNumberDescription(
key="target_percentage",
translation_key="target_percentage",

Some files were not shown because too many files have changed in this diff Show More