Compare commits

..

87 Commits

Author SHA1 Message Date
Bram Kragten
1f28fe9933 Bump version to 2026.1.0 2026-01-07 17:46:04 +01:00
Bram Kragten
4465aa264c Update frontend to 20260107.0 (#160434) 2026-01-07 17:45:41 +01:00
Robert Resch
2c1bc96161 Bump deebot-client to 17.0.1 (#160428) 2026-01-07 17:45:40 +01:00
Joost Lekkerkerker
7127159a5b Make Watts depend on the cloud integration (#160424) 2026-01-07 17:45:38 +01:00
Abílio Costa
9f0eb6f077 Support target triggers in automation relation extraction (#160369) 2026-01-07 17:45:37 +01:00
Paul Bottein
da19cc06e3 Fix hvac_mode validation in climate.hvac_mode_changed trigger (#160364) 2026-01-07 17:45:36 +01:00
Bram Kragten
fd92377cf2 Bump version to 2026.1.0b5 2026-01-07 14:53:13 +01:00
Robert Resch
c201938b8b Constraint aiomqtt>=2.5.0 to fix blocking call (#160410)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-07 14:51:49 +01:00
Luke Lashley
b3765204b1 Bump python-roborock to 4.2.1 (#160398) 2026-01-07 14:48:27 +01:00
Luke Lashley
786257e051 Remove q7 total cleaning time for Roborock (#160399) 2026-01-07 14:47:47 +01:00
Allen Porter
9559634151 Update roborock binary sensor tests with snapshots (#159981) 2026-01-07 14:47:41 +01:00
Allen Porter
cf12ed8f08 Improve roborock test accuracy/robustness (#160021) 2026-01-07 14:45:53 +01:00
Michael Hansen
e213f49c75 Bump intents to 2026.1.6 (#160389) 2026-01-07 14:42:00 +01:00
Raphael Hehl
09c7cc113a Bump uiprotect to 8.0.0 (#160384)
Co-authored-by: RaHehl <rahehl@users.noreply.github.com>
2026-01-07 14:41:59 +01:00
dontinelli
e1e7e039a9 Bump solarlog_cli to 0.7.0 (#160382) 2026-01-07 14:41:58 +01:00
Daniel Hjelseth Høyer
05a0f0d23f Bump pyTibber to 0.34.1 (#160380)
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-01-07 14:41:57 +01:00
Artem Draft
d3853019eb Add SSL support in Bravia TV (#160373)
Co-authored-by: Maciej Bieniek <bieniu@users.noreply.github.com>
2026-01-07 14:41:55 +01:00
hanwg
ccbaac55b3 Fix schema validation error in Telegram (#160367) 2026-01-07 14:41:54 +01:00
Xiangxuan Qu
771292ced9 Fix IndexError in Israel Rail sensor when no departures available (#160351)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-01-07 14:41:53 +01:00
TheJulianJES
5d4262e8b3 Bump ZHA to 0.0.83 (#160342) 2026-01-07 14:41:52 +01:00
Paul Tarjan
d96da9a639 Fix Ring integration log flooding for accounts without subscription (#158012)
Co-authored-by: Robert Resch <robert@resch.dev>
2026-01-07 14:41:51 +01:00
Bram Kragten
288a805d0f Bump version to 2026.1.0b4 2026-01-06 17:56:49 +01:00
Bram Kragten
8e55ceea77 Update frontend to 20251229.1 (#160372) 2026-01-06 17:55:34 +01:00
Artem Draft
14f1d9fbad Bump pybravia to 0.4.1 (#160368) 2026-01-06 17:55:32 +01:00
Bram Kragten
eb6582bc24 Fix number or entity choose schema (#160358) 2026-01-06 17:55:32 +01:00
tronikos
4afe67f33d Bump opower to 0.16.0 (#160348) 2026-01-06 17:55:30 +01:00
Mika
5d7b10f569 Fix missing state class to solaredge (#160336) 2026-01-06 17:55:30 +01:00
Daniel Hjelseth Høyer
340c2e48df Bump pyTibber to 0.34.0 (#160333) 2026-01-06 17:55:29 +01:00
J. Nick Koston
86257b1865 Require service_uuid and service_data_uuid to match hue ble (#160321) 2026-01-06 17:55:27 +01:00
Daniel Hjelseth Høyer
eea1adccfd Fix unit for Tibber sensor (#160319) 2026-01-06 17:55:26 +01:00
Frédéric
242be14f88 Add Resideo X2S Smart Thermostat to Matter fan-only mode list (#160260) 2026-01-06 17:55:25 +01:00
Xidorn Quan
7e013b723d Fix rain count sensors' state class of Ecowitt (#158204) 2026-01-06 17:55:24 +01:00
Bram Kragten
4d55939f53 Bump version to 2026.1.0b3 2026-01-05 16:53:53 +01:00
Bram Kragten
e5e7546d49 Fix humidifier trigger turned on icon (#160297) 2026-01-05 16:52:56 +01:00
Joakim Sørensen
e560795d04 Add connection check before registering cloudhook URL (#160284) 2026-01-05 16:52:55 +01:00
epenet
15b0342bd7 Fix Tuya light color data wrapper (#160280) 2026-01-05 16:52:54 +01:00
Jan-Philipp Benecke
8d05a5f3d4 Bump aiowebdav2 to 0.5.0 (#160233) 2026-01-05 16:52:53 +01:00
Samuel Xiao
358ad29b59 Switchbot Cloud: Fixed Robot Vacuum Cleaner S20 had two device_model name (#160230) 2026-01-05 16:52:52 +01:00
J. Nick Koston
5c4f99b828 Bump aiohttp 3.13.3 (#160206) 2026-01-05 16:52:03 +01:00
Erik Montnemery
b3f123c715 Await writes in shopping_list action handlers (#157420) 2026-01-05 16:51:30 +01:00
J. Nick Koston
85c2351af2 Ensure Brotli >= 1.2.0 (#160229) 2026-01-05 16:45:49 +01:00
Josef Zweck
ec19529c99 Remove referral link from fish_audio (#160193) 2026-01-05 16:40:46 +01:00
Vincent Courcelle
d5ebd02afe Bump python-roborock to 4.2.0 (#160184) 2026-01-05 16:40:45 +01:00
wollew
37d82ab795 bump pyvlx version to 0.2.27 (#160139) 2026-01-05 16:40:44 +01:00
mettolen
5d08481137 Bump pyairobotrest to 0.2.0 (#160125) 2026-01-05 16:40:43 +01:00
Maikel Punie
0861b7541d Bump velbusaio to 2026.1.1 (#160116) 2026-01-05 16:40:42 +01:00
Jan Bouwhuis
abf7078842 Fix reolink brightness scaling (#160106) 2026-01-05 16:40:41 +01:00
Michael Hansen
c4012fae4e Bump intents to 2026.1.1 (#160099) 2026-01-05 16:40:40 +01:00
Maikel Punie
d6082ab6c3 Bump velbusaio to 2026.1.0 (#160087) 2026-01-05 16:40:39 +01:00
Austin Mroczek
77367e415f Bump total_connect_client to 2025.12.2 (#160075) 2026-01-05 16:40:38 +01:00
Miguel Camba
6c006c68c1 Update voluptuous and voluptuous-openapi (#160073) 2026-01-05 16:40:37 +01:00
Pete Sage
026fdeb4ce Improve Sonos wait to unjoin timeout (#160011) 2026-01-05 16:40:36 +01:00
cdnninja
1034218e6e add description to string vesync (#160003) 2026-01-05 16:40:35 +01:00
Willem-Jan van Rootselaar
a21062f502 Add schema validation for set_hot_water_schedule service (#159990) 2026-01-05 16:40:34 +01:00
Maikel Punie
2e157f1bc6 Velbus Exception translations (#159627)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-05 16:40:32 +01:00
Paul Tarjan
a697e63b8c Fix Tesla update showing scheduled updates as installing (#158681) 2026-01-05 16:40:31 +01:00
Ben Wolstencroft
d28d55c7db Add support for health_overview API endpoint to Tractive integration (#157960)
Co-authored-by: Maciej Bieniek <bieniu@users.noreply.github.com>
2026-01-05 16:40:30 +01:00
Brett Adams
8863488286 Handle export options when enrolled to VPP in Teslemetry (#157665) 2026-01-05 16:40:29 +01:00
Daniel Hjelseth Høyer
53cfdef1ac Move Tibber to OAuth (#156690)
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-01-05 16:40:28 +01:00
Franck Nijhof
42ea7ecbd6 Bump version to 2026.1.0b2 2025-12-31 15:34:05 +00:00
tronikos
d58d08c350 Filter out duplicate voices without language code in Google Cloud (#160046) 2025-12-31 15:33:49 +00:00
Paul Tarjan
65a259b9df Fix Hikvision thread safety issue when calling async_write_ha_state (#160027) 2025-12-31 15:33:48 +00:00
Luke Lashley
cbfbfbee13 Don't prefer cache for Roborock device fetching (#160022) 2025-12-31 15:33:47 +00:00
David Knowles
e503b37ddc Use WATER device_class for Hydrawise sensors (#160018) 2025-12-31 15:33:45 +00:00
Simone Chemelli
217eef39f3 Bump aioamazondevices to 11.0.2 (#160016)
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-12-31 15:33:44 +00:00
Manu
dcdbce9b21 Convert store image URLs to https in Xbox media resolver (#160015) 2025-12-31 15:33:42 +00:00
Erwin Douna
71db8fe185 Bump portainer 1.0.19 (#160014) 2025-12-31 15:33:41 +00:00
Anders Melchiorsen
9b96cb66d5 Fix netgear_lte unloading (#160008) 2025-12-31 15:33:39 +00:00
Anders Melchiorsen
78bccbbbc2 Move async_setup_services to async_setup for netgear_lte (#160007) 2025-12-31 15:33:38 +00:00
Anders Melchiorsen
b0a8f9575c Bump eternalegypt to 0.0.18 (#160006) 2025-12-31 15:33:36 +00:00
Matthias Alphart
61104a9970 Update knx-frontend to 2025.12.30.151231 (#159999) 2025-12-31 15:33:35 +00:00
Franck Nijhof
8d13dbdd0c Bump version to 2026.1.0b1 2025-12-30 09:14:36 +00:00
Erwin Douna
9afb41004e Portainer fix stopped container for stats (#159964) 2025-12-30 09:14:24 +00:00
Luke Lashley
cdd542f6e6 Bump Python-Roborock to 4.1.0 (#159963) 2025-12-30 09:14:22 +00:00
Joost Lekkerkerker
f520686002 Small cleanup in Feedreader (#159962) 2025-12-30 09:14:20 +00:00
J. Nick Koston
e4d09bb615 Bump aioesphomeapi to 43.9.1 (#159960) 2025-12-30 09:14:19 +00:00
Matthias Alphart
10f6ccf6cc Fix KNX translation references (#159959) 2025-12-30 09:14:17 +00:00
Ernst Klamer
d9fa67b16f bump xiaomi-ble to 1.4.1 (#159954) 2025-12-30 09:14:15 +00:00
Joost Lekkerkerker
cf228ae02b Inject session in Switchbot cloud (#159942) 2025-12-30 09:14:14 +00:00
Joost Lekkerkerker
cb4d62ab9a Add integration_type device to ps4 (#159892) 2025-12-30 09:14:12 +00:00
Joost Lekkerkerker
d2f75aec04 Add integration_type hub to poolsense (#159881) 2025-12-30 09:14:11 +00:00
Joost Lekkerkerker
a609fbc07b Add integration_type hub to pooldose (#159880) 2025-12-30 09:14:09 +00:00
Joost Lekkerkerker
1b9c7ae0ac Add integration_type hub to permobil (#159872) 2025-12-30 09:14:07 +00:00
Joost Lekkerkerker
492f2117fb Add integration_type service to nuheat (#159845) 2025-12-30 09:14:06 +00:00
Joost Lekkerkerker
2346f83635 Add integration_type device to netgear (#159816) 2025-12-30 09:14:04 +00:00
Kamil Breguła
8925bfb182 Add translation of exceptions in met (#155765)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-12-30 09:12:18 +00:00
Franck Nijhof
8f2b1f0eff Bump version to 2026.1.0b0 2025-12-29 19:01:17 +00:00
207 changed files with 2940 additions and 7780 deletions

View File

@@ -40,7 +40,7 @@ env:
CACHE_VERSION: 2
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2026.2"
HA_SHORT_VERSION: "2026.1"
DEFAULT_PYTHON: "3.13.11"
ALL_PYTHON_VERSIONS: "['3.13.11', '3.14.2']"
# 10.3 is the oldest supported version

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "silver",
"requirements": ["airos==0.6.1"]
"requirements": ["airos==0.6.0"]
}

View File

@@ -7,7 +7,7 @@ import asyncio
from collections.abc import Callable, Mapping
from dataclasses import dataclass
import logging
from typing import Any, Protocol, cast
from typing import Any, Literal, Protocol, cast
from propcache.api import cached_property
import voluptuous as vol
@@ -16,7 +16,10 @@ from homeassistant.components import labs, websocket_api
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
from homeassistant.components.labs import async_listen as async_labs_listen
from homeassistant.const import (
ATTR_AREA_ID,
ATTR_ENTITY_ID,
ATTR_FLOOR_ID,
ATTR_LABEL_ID,
ATTR_MODE,
ATTR_NAME,
CONF_ACTIONS,
@@ -30,6 +33,7 @@ from homeassistant.const import (
CONF_OPTIONS,
CONF_PATH,
CONF_PLATFORM,
CONF_TARGET,
CONF_TRIGGERS,
CONF_VARIABLES,
CONF_ZONE,
@@ -588,20 +592,32 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
"""Return True if entity is on."""
return self._async_detach_triggers is not None or self._is_enabled
@property
@cached_property
def referenced_labels(self) -> set[str]:
"""Return a set of referenced labels."""
return self.action_script.referenced_labels
referenced = self.action_script.referenced_labels
@property
for conf in self._trigger_config:
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_LABEL_ID))
return referenced
@cached_property
def referenced_floors(self) -> set[str]:
"""Return a set of referenced floors."""
return self.action_script.referenced_floors
referenced = self.action_script.referenced_floors
for conf in self._trigger_config:
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_FLOOR_ID))
return referenced
@cached_property
def referenced_areas(self) -> set[str]:
"""Return a set of referenced areas."""
return self.action_script.referenced_areas
referenced = self.action_script.referenced_areas
for conf in self._trigger_config:
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_AREA_ID))
return referenced
@property
def referenced_blueprint(self) -> str | None:
@@ -1209,6 +1225,9 @@ def _trigger_extract_devices(trigger_conf: dict) -> list[str]:
if trigger_conf[CONF_PLATFORM] == "tag" and CONF_DEVICE_ID in trigger_conf:
return trigger_conf[CONF_DEVICE_ID] # type: ignore[no-any-return]
if target_devices := _get_targets_from_trigger_config(trigger_conf, CONF_DEVICE_ID):
return target_devices
return []
@@ -1239,9 +1258,28 @@ def _trigger_extract_entities(trigger_conf: dict) -> list[str]:
):
return [trigger_conf[CONF_EVENT_DATA][CONF_ENTITY_ID]]
if target_entities := _get_targets_from_trigger_config(
trigger_conf, CONF_ENTITY_ID
):
return target_entities
return []
@callback
def _get_targets_from_trigger_config(
config: dict,
target: Literal["entity_id", "device_id", "area_id", "floor_id", "label_id"],
) -> list[str]:
"""Extract targets from a target config."""
if not (target_conf := config.get(CONF_TARGET)):
return []
if not (targets := target_conf.get(target)):
return []
return [targets] if isinstance(targets, str) else targets
@websocket_api.websocket_command({"type": "automation/config", "entity_id": str})
def websocket_config(
hass: HomeAssistant,

View File

@@ -11,6 +11,7 @@ from homeassistant.const import CONF_HOST, CONF_MAC, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from .const import CONF_USE_SSL
from .coordinator import BraviaTVConfigEntry, BraviaTVCoordinator
PLATFORMS: Final[list[Platform]] = [
@@ -26,11 +27,12 @@ async def async_setup_entry(
"""Set up a config entry."""
host = config_entry.data[CONF_HOST]
mac = config_entry.data[CONF_MAC]
ssl = config_entry.data.get(CONF_USE_SSL, False)
session = async_create_clientsession(
hass, cookie_jar=CookieJar(unsafe=True, quote_cookie=False)
)
client = BraviaClient(host, mac, session=session)
client = BraviaClient(host, mac, session=session, ssl=ssl)
coordinator = BraviaTVCoordinator(
hass=hass,
config_entry=config_entry,

View File

@@ -28,6 +28,7 @@ from .const import (
ATTR_MODEL,
CONF_NICKNAME,
CONF_USE_PSK,
CONF_USE_SSL,
DOMAIN,
NICKNAME_PREFIX,
)
@@ -46,11 +47,12 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN):
def create_client(self) -> None:
"""Create Bravia TV client from config."""
host = self.device_config[CONF_HOST]
ssl = self.device_config[CONF_USE_SSL]
session = async_create_clientsession(
self.hass,
cookie_jar=CookieJar(unsafe=True, quote_cookie=False),
)
self.client = BraviaClient(host=host, session=session)
self.client = BraviaClient(host=host, session=session, ssl=ssl)
async def gen_instance_ids(self) -> tuple[str, str]:
"""Generate client_id and nickname."""
@@ -123,10 +125,10 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN):
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle authorize step."""
self.create_client()
if user_input is not None:
self.device_config[CONF_USE_PSK] = user_input[CONF_USE_PSK]
self.device_config[CONF_USE_SSL] = user_input[CONF_USE_SSL]
self.create_client()
if user_input[CONF_USE_PSK]:
return await self.async_step_psk()
return await self.async_step_pin()
@@ -136,6 +138,7 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN):
data_schema=vol.Schema(
{
vol.Required(CONF_USE_PSK, default=False): bool,
vol.Required(CONF_USE_SSL, default=False): bool,
}
),
)

View File

@@ -12,6 +12,7 @@ ATTR_MODEL: Final = "model"
CONF_NICKNAME: Final = "nickname"
CONF_USE_PSK: Final = "use_psk"
CONF_USE_SSL: Final = "use_ssl"
DOMAIN: Final = "braviatv"
LEGACY_CLIENT_ID: Final = "HomeAssistant"

View File

@@ -7,7 +7,7 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["pybravia"],
"requirements": ["pybravia==0.3.4"],
"requirements": ["pybravia==0.4.1"],
"ssdp": [
{
"manufacturer": "Sony Corporation",

View File

@@ -15,9 +15,10 @@
"step": {
"authorize": {
"data": {
"use_psk": "Use PSK authentication"
"use_psk": "Use PSK authentication",
"use_ssl": "Use SSL connection"
},
"description": "Make sure that «Control remotely» is enabled on your TV, go to: \nSettings -> Network -> Remote device settings -> Control remotely. \n\nThere are two authorization methods: PIN code or PSK (Pre-Shared Key). \nAuthorization via PSK is recommended as more stable.",
"description": "Make sure that «Control remotely» is enabled on your TV. Go to: \nSettings -> Network -> Remote device settings -> Control remotely. \n\nThere are two authorization methods: PIN code or PSK (Pre-Shared Key). \nAuthorization via PSK is recommended, as it is more stable. \n\nUse an SSL connection only if your TV supports this connection type.",
"title": "Authorize Sony Bravia TV"
},
"confirm": {

View File

@@ -2,9 +2,6 @@
"services": {
"set_hot_water_schedule": {
"service": "mdi:calendar-clock"
},
"sync_time": {
"service": "mdi:timer-sync-outline"
}
}
}

View File

@@ -7,7 +7,7 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["bsblan"],
"requirements": ["python-bsblan==3.1.6"],
"requirements": ["python-bsblan==3.1.4"],
"zeroconf": [
{
"name": "bsb-lan*",

View File

@@ -13,7 +13,6 @@ from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.util import dt as dt_util
from .const import DOMAIN
@@ -31,9 +30,8 @@ ATTR_FRIDAY_SLOTS = "friday_slots"
ATTR_SATURDAY_SLOTS = "saturday_slots"
ATTR_SUNDAY_SLOTS = "sunday_slots"
# Service names
# Service name
SERVICE_SET_HOT_WATER_SCHEDULE = "set_hot_water_schedule"
SERVICE_SYNC_TIME = "sync_time"
# Schema for a single time slot
@@ -205,74 +203,6 @@ async def set_hot_water_schedule(service_call: ServiceCall) -> None:
await entry.runtime_data.slow_coordinator.async_request_refresh()
async def async_sync_time(service_call: ServiceCall) -> None:
"""Synchronize BSB-LAN device time with Home Assistant."""
device_id: str = service_call.data[ATTR_DEVICE_ID]
# Get the device and config entry
device_registry = dr.async_get(service_call.hass)
device_entry = device_registry.async_get(device_id)
if device_entry is None:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="invalid_device_id",
translation_placeholders={"device_id": device_id},
)
# Find the config entry for this device
matching_entries: list[BSBLanConfigEntry] = [
entry
for entry in service_call.hass.config_entries.async_entries(DOMAIN)
if entry.entry_id in device_entry.config_entries
]
if not matching_entries:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="no_config_entry_for_device",
translation_placeholders={"device_id": device_entry.name or device_id},
)
entry = matching_entries[0]
# Verify the config entry is loaded
if entry.state is not ConfigEntryState.LOADED:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="config_entry_not_loaded",
translation_placeholders={"device_name": device_entry.name or device_id},
)
client = entry.runtime_data.client
try:
# Get current device time
device_time = await client.time()
current_time = dt_util.now()
current_time_str = current_time.strftime("%d.%m.%Y %H:%M:%S")
# Only sync if device time differs from HA time
if device_time.time.value != current_time_str:
await client.set_time(current_time_str)
except BSBLANError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="sync_time_failed",
translation_placeholders={
"device_name": device_entry.name or device_id,
"error": str(err),
},
) from err
SYNC_TIME_SCHEMA = vol.Schema(
{
vol.Required(ATTR_DEVICE_ID): cv.string,
}
)
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register the BSB-Lan services."""
@@ -282,10 +212,3 @@ def async_setup_services(hass: HomeAssistant) -> None:
set_hot_water_schedule,
schema=SERVICE_SET_HOT_WATER_SCHEDULE_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SERVICE_SYNC_TIME,
async_sync_time,
schema=SYNC_TIME_SCHEMA,
)

View File

@@ -1,12 +1,3 @@
sync_time:
fields:
device_id:
required: true
example: "abc123device456"
selector:
device:
integration: bsblan
set_hot_water_schedule:
fields:
device_id:

View File

@@ -79,6 +79,9 @@
"invalid_device_id": {
"message": "Invalid device ID: {device_id}"
},
"invalid_time_format": {
"message": "Invalid time format provided"
},
"no_config_entry_for_device": {
"message": "No configuration entry found for device: {device_id}"
},
@@ -105,9 +108,6 @@
},
"setup_general_error": {
"message": "An unknown error occurred while retrieving static device data"
},
"sync_time_failed": {
"message": "Failed to sync time for {device_name}: {error}"
}
},
"services": {
@@ -148,16 +148,6 @@
}
},
"name": "Set hot water schedule"
},
"sync_time": {
"description": "Synchronize Home Assistant time to the BSB-Lan device. Only updates if device time differs from Home Assistant time.",
"fields": {
"device_id": {
"description": "The BSB-LAN device to sync time for.",
"name": "Device"
}
},
"name": "Sync time"
}
}
}

View File

@@ -33,7 +33,7 @@ HVAC_MODE_CHANGED_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST.extend
{
vol.Required(CONF_OPTIONS): {
vol.Required(CONF_HVAC_MODE): vol.All(
cv.ensure_list, vol.Length(min=1), [HVACMode]
cv.ensure_list, vol.Length(min=1), [vol.Coerce(HVACMode)]
),
},
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.1"]
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.6"]
}

View File

@@ -4,6 +4,5 @@
"codeowners": ["@tr4nt0r"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/duckdns",
"integration_type": "service",
"iot_class": "cloud_polling"
}

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
"requirements": ["py-sucks==0.9.11", "deebot-client==17.0.0"]
"requirements": ["py-sucks==0.9.11", "deebot-client==17.0.1"]
}

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
import dataclasses
from datetime import datetime
import logging
from typing import Final
from aioecowitt import EcoWittSensor, EcoWittSensorTypes
@@ -39,6 +40,9 @@ from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM
from . import EcowittConfigEntry
from .entity import EcowittEntity
_LOGGER = logging.getLogger(__name__)
_METRIC: Final = (
EcoWittSensorTypes.TEMPERATURE_C,
EcoWittSensorTypes.RAIN_COUNT_MM,
@@ -57,6 +61,40 @@ _IMPERIAL: Final = (
)
_RAIN_COUNT_SENSORS_STATE_CLASS_MAPPING: Final = {
"eventrainin": SensorStateClass.TOTAL_INCREASING,
"hourlyrainin": None,
"totalrainin": SensorStateClass.TOTAL_INCREASING,
"dailyrainin": SensorStateClass.TOTAL_INCREASING,
"weeklyrainin": SensorStateClass.TOTAL_INCREASING,
"monthlyrainin": SensorStateClass.TOTAL_INCREASING,
"yearlyrainin": SensorStateClass.TOTAL_INCREASING,
"last24hrainin": None,
"eventrainmm": SensorStateClass.TOTAL_INCREASING,
"hourlyrainmm": None,
"totalrainmm": SensorStateClass.TOTAL_INCREASING,
"dailyrainmm": SensorStateClass.TOTAL_INCREASING,
"weeklyrainmm": SensorStateClass.TOTAL_INCREASING,
"monthlyrainmm": SensorStateClass.TOTAL_INCREASING,
"yearlyrainmm": SensorStateClass.TOTAL_INCREASING,
"last24hrainmm": None,
"erain_piezo": SensorStateClass.TOTAL_INCREASING,
"hrain_piezo": None,
"drain_piezo": SensorStateClass.TOTAL_INCREASING,
"wrain_piezo": SensorStateClass.TOTAL_INCREASING,
"mrain_piezo": SensorStateClass.TOTAL_INCREASING,
"yrain_piezo": SensorStateClass.TOTAL_INCREASING,
"last24hrain_piezo": None,
"erain_piezomm": SensorStateClass.TOTAL_INCREASING,
"hrain_piezomm": None,
"drain_piezomm": SensorStateClass.TOTAL_INCREASING,
"wrain_piezomm": SensorStateClass.TOTAL_INCREASING,
"mrain_piezomm": SensorStateClass.TOTAL_INCREASING,
"yrain_piezomm": SensorStateClass.TOTAL_INCREASING,
"last24hrain_piezomm": None,
}
ECOWITT_SENSORS_MAPPING: Final = {
EcoWittSensorTypes.HUMIDITY: SensorEntityDescription(
key="HUMIDITY",
@@ -285,15 +323,15 @@ async def async_setup_entry(
name=sensor.name,
)
# Only total rain needs state class for long-term statistics
if sensor.key in (
"totalrainin",
"totalrainmm",
if sensor.stype in (
EcoWittSensorTypes.RAIN_COUNT_INCHES,
EcoWittSensorTypes.RAIN_COUNT_MM,
):
description = dataclasses.replace(
description,
state_class=SensorStateClass.TOTAL_INCREASING,
)
if sensor.key not in _RAIN_COUNT_SENSORS_STATE_CLASS_MAPPING:
_LOGGER.warning("Unknown rain count sensor: %s", sensor.key)
return
state_class = _RAIN_COUNT_SENSORS_STATE_CLASS_MAPPING[sensor.key]
description = dataclasses.replace(description, state_class=state_class)
async_add_entities([EcowittSensorEntity(sensor, description)])

View File

@@ -17,7 +17,7 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==43.10.1",
"aioesphomeapi==43.9.1",
"esphome-dashboard-api==1.3.0",
"bleak-esphome==3.4.0"
],

View File

@@ -19,9 +19,6 @@ from .coordinator import FeedReaderCoordinator
LOGGER = logging.getLogger(__name__)
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
ATTR_CONTENT = "content"
ATTR_DESCRIPTION = "description"
ATTR_LINK = "link"

View File

@@ -1,94 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: No custom actions are defined.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage:
status: todo
comment: missing test for uniqueness of feed URL.
config-flow:
status: todo
comment: missing data descriptions
dependency-transparency: done
docs-actions:
status: exempt
comment: No custom actions are defined.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: No custom actions are defined.
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow:
status: exempt
comment: No authentication support.
test-coverage:
status: done
comment: Can use freezer for skipping time instead
# Gold
devices: done
diagnostics: todo
discovery-update-info:
status: exempt
comment: No discovery support.
discovery:
status: exempt
comment: No discovery support.
docs-data-update: done
docs-examples: done
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: done
dynamic-devices:
status: exempt
comment: Each config entry, represents one service.
entity-category: done
entity-device-class:
status: exempt
comment: Matches no available event entity class.
entity-disabled-by-default:
status: exempt
comment: Only one entity per config entry.
entity-translations: todo
exception-translations: todo
icon-translations: done
reconfiguration-flow: done
repair-issues:
status: done
comment: Only one repair-issue for yaml-import defined.
stale-devices:
status: exempt
comment: Each config entry, represents one service.
# Platinum
async-dependency:
status: todo
comment: feedparser lib is not async.
inject-websession:
status: todo
comment: feedparser lib doesn't take a session as argument.
strict-typing:
status: todo
comment: feedparser lib is not fully typed.

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["pyfirefly==0.1.10"]
"requirements": ["pyfirefly==0.1.8"]
}

View File

@@ -2,8 +2,6 @@
from typing import TYPE_CHECKING, Any
from fressnapftracker import FressnapfTrackerError
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ColorMode,
@@ -18,7 +16,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import FressnapfTrackerConfigEntry
from .const import DOMAIN
from .entity import FressnapfTrackerEntity
from .services import handle_fressnapf_tracker_exception
PARALLEL_UPDATES = 1
@@ -64,18 +61,12 @@ class FressnapfTrackerLight(FressnapfTrackerEntity, LightEntity):
self.raise_if_not_activatable()
brightness = kwargs.get(ATTR_BRIGHTNESS, 255)
brightness = int((brightness / 255) * 100)
try:
await self.coordinator.client.set_led_brightness(brightness)
except FressnapfTrackerError as e:
handle_fressnapf_tracker_exception(e)
await self.coordinator.client.set_led_brightness(brightness)
await self.coordinator.async_request_refresh()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off the device."""
try:
await self.coordinator.client.set_led_brightness(0)
except FressnapfTrackerError as e:
handle_fressnapf_tracker_exception(e)
await self.coordinator.client.set_led_brightness(0)
await self.coordinator.async_request_refresh()
def raise_if_not_activatable(self) -> None:

View File

@@ -26,7 +26,7 @@ rules:
unique-config-entry: done
# Silver
action-exceptions: done
action-exceptions: todo
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done

View File

@@ -1,21 +0,0 @@
"""Services and service helpers for fressnapf_tracker."""
from fressnapftracker import FressnapfTrackerError, FressnapfTrackerInvalidTokenError
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
from .const import DOMAIN
def handle_fressnapf_tracker_exception(exception: FressnapfTrackerError):
"""Handle the different FressnapfTracker errors."""
if isinstance(exception, FressnapfTrackerInvalidTokenError):
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
) from exception
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="api_error",
translation_placeholders={"error_message": str(exception)},
) from exception

View File

@@ -77,9 +77,6 @@
}
},
"exceptions": {
"api_error": {
"message": "An error occurred while communicating with the Fressnapf Tracker API: {error_message}"
},
"charging": {
"message": "The flashlight cannot be activated while charging."
},

View File

@@ -2,8 +2,6 @@
from typing import TYPE_CHECKING, Any
from fressnapftracker import FressnapfTrackerError
from homeassistant.components.switch import (
SwitchDeviceClass,
SwitchEntity,
@@ -15,7 +13,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import FressnapfTrackerConfigEntry
from .entity import FressnapfTrackerEntity
from .services import handle_fressnapf_tracker_exception
PARALLEL_UPDATES = 1
@@ -46,18 +43,12 @@ class FressnapfTrackerSwitch(FressnapfTrackerEntity, SwitchEntity):
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn on the device."""
try:
await self.coordinator.client.set_energy_saving(True)
except FressnapfTrackerError as e:
handle_fressnapf_tracker_exception(e)
await self.coordinator.client.set_energy_saving(True)
await self.coordinator.async_request_refresh()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off the device."""
try:
await self.coordinator.client.set_energy_saving(False)
except FressnapfTrackerError as e:
handle_fressnapf_tracker_exception(e)
await self.coordinator.client.set_energy_saving(False)
await self.coordinator.async_request_refresh()
@property

View File

@@ -23,5 +23,5 @@
"winter_mode": {}
},
"quality_scale": "internal",
"requirements": ["home-assistant-frontend==20251229.0"]
"requirements": ["home-assistant-frontend==20260107.0"]
}

View File

@@ -4,7 +4,8 @@
"bluetooth": [
{
"connectable": true,
"service_data_uuid": "0000fe0f-0000-1000-8000-00805f9b34fb"
"service_data_uuid": "0000fe0f-0000-1000-8000-00805f9b34fb",
"service_uuid": "0000fe0f-0000-1000-8000-00805f9b34fb"
}
],
"codeowners": ["@flip-dots"],

View File

@@ -66,7 +66,7 @@
"trigger": "mdi:air-humidifier-off"
},
"turned_on": {
"trigger": "mdi:air-humidifier-on"
"trigger": "mdi:air-humidifier"
}
}
}

View File

@@ -116,6 +116,8 @@ class IsraelRailEntitySensor(
@property
def native_value(self) -> StateType | datetime:
"""Return the state of the sensor."""
if self.entity_description.index >= len(self.coordinator.data):
return None
return self.entity_description.value_fn(
self.coordinator.data[self.entity_description.index]
)

View File

@@ -114,26 +114,24 @@ class KnxYamlBinarySensor(_KnxBinarySensor, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of KNX binary sensor."""
self._device = XknxBinarySensor(
xknx=knx_module.xknx,
name=config.get(CONF_NAME, ""),
group_address_state=config[CONF_STATE_ADDRESS],
invert=config[CONF_INVERT],
sync_state=config[CONF_SYNC_STATE],
ignore_internal_state=config[CONF_IGNORE_INTERNAL_STATE],
context_timeout=config.get(CONF_CONTEXT_TIMEOUT),
reset_after=config.get(CONF_RESET_AFTER),
always_callback=True,
)
super().__init__(
knx_module=knx_module,
unique_id=str(self._device.remote_value.group_address_state),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
device=XknxBinarySensor(
xknx=knx_module.xknx,
name=config[CONF_NAME],
group_address_state=config[CONF_STATE_ADDRESS],
invert=config[CONF_INVERT],
sync_state=config[CONF_SYNC_STATE],
ignore_internal_state=config[CONF_IGNORE_INTERNAL_STATE],
context_timeout=config.get(CONF_CONTEXT_TIMEOUT),
reset_after=config.get(CONF_RESET_AFTER),
always_callback=True,
),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
self._attr_force_update = self._device.ignore_internal_state
self._attr_unique_id = str(self._device.remote_value.group_address_state)
class KnxUiBinarySensor(_KnxBinarySensor, KnxUiEntity):

View File

@@ -35,18 +35,19 @@ class KNXButton(KnxYamlEntity, ButtonEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX button."""
self._device = XknxRawValue(
xknx=knx_module.xknx,
name=config.get(CONF_NAME, ""),
payload_length=config[CONF_PAYLOAD_LENGTH],
group_address=config[KNX_ADDRESS],
)
self._payload = config[CONF_PAYLOAD]
super().__init__(
knx_module=knx_module,
unique_id=f"{self._device.remote_value.group_address}_{self._payload}",
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
device=XknxRawValue(
xknx=knx_module.xknx,
name=config[CONF_NAME],
payload_length=config[CONF_PAYLOAD_LENGTH],
group_address=config[KNX_ADDRESS],
),
)
self._payload = config[CONF_PAYLOAD]
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = (
f"{self._device.remote_value.group_address}_{self._payload}"
)
async def async_press(self) -> None:

View File

@@ -119,11 +119,11 @@ async def async_setup_entry(
async_add_entities(entities)
def _create_climate_yaml(xknx: XKNX, config: ConfigType) -> XknxClimate:
def _create_climate(xknx: XKNX, config: ConfigType) -> XknxClimate:
"""Return a KNX Climate device to be used within XKNX."""
climate_mode = XknxClimateMode(
xknx,
name=f"{config.get(CONF_NAME, '')} Mode",
name=f"{config[CONF_NAME]} Mode",
group_address_operation_mode=config.get(
ClimateSchema.CONF_OPERATION_MODE_ADDRESS
),
@@ -164,7 +164,7 @@ def _create_climate_yaml(xknx: XKNX, config: ConfigType) -> XknxClimate:
return XknxClimate(
xknx,
name=config.get(CONF_NAME, ""),
name=config[CONF_NAME],
group_address_temperature=config[ClimateSchema.CONF_TEMPERATURE_ADDRESS],
group_address_target_temperature=config.get(
ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS
@@ -647,17 +647,9 @@ class KnxYamlClimate(_KnxClimate, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of a KNX climate device."""
self._device = _create_climate_yaml(knx_module.xknx, config)
super().__init__(
knx_module=knx_module,
unique_id=(
f"{self._device.temperature.group_address_state}_"
f"{self._device.target_temperature.group_address_state}_"
f"{self._device.target_temperature.group_address}_"
f"{self._device._setpoint_shift.group_address}" # noqa: SLF001
),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
device=_create_climate(knx_module.xknx, config),
)
default_hvac_mode: HVACMode = config[ClimateConf.DEFAULT_CONTROLLER_MODE]
fan_max_step = config[ClimateConf.FAN_MAX_STEP]
@@ -669,6 +661,14 @@ class KnxYamlClimate(_KnxClimate, KnxYamlEntity):
fan_zero_mode=fan_zero_mode,
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = (
f"{self._device.temperature.group_address_state}_"
f"{self._device.target_temperature.group_address_state}_"
f"{self._device.target_temperature.group_address}_"
f"{self._device._setpoint_shift.group_address}" # noqa: SLF001
)
class KnxUiClimate(_KnxClimate, KnxUiEntity):
"""Representation of a KNX climate device configured from the UI."""

View File

@@ -191,34 +191,36 @@ class KnxYamlCover(_KnxCover, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize the cover."""
self._device = XknxCover(
xknx=knx_module.xknx,
name=config.get(CONF_NAME, ""),
group_address_long=config.get(CoverSchema.CONF_MOVE_LONG_ADDRESS),
group_address_short=config.get(CoverSchema.CONF_MOVE_SHORT_ADDRESS),
group_address_stop=config.get(CoverSchema.CONF_STOP_ADDRESS),
group_address_position_state=config.get(
CoverSchema.CONF_POSITION_STATE_ADDRESS
),
group_address_angle=config.get(CoverSchema.CONF_ANGLE_ADDRESS),
group_address_angle_state=config.get(CoverSchema.CONF_ANGLE_STATE_ADDRESS),
group_address_position=config.get(CoverSchema.CONF_POSITION_ADDRESS),
travel_time_down=config[CoverConf.TRAVELLING_TIME_DOWN],
travel_time_up=config[CoverConf.TRAVELLING_TIME_UP],
invert_updown=config[CoverConf.INVERT_UPDOWN],
invert_position=config[CoverConf.INVERT_POSITION],
invert_angle=config[CoverConf.INVERT_ANGLE],
)
super().__init__(
knx_module=knx_module,
unique_id=(
f"{self._device.updown.group_address}_"
f"{self._device.position_target.group_address}"
device=XknxCover(
xknx=knx_module.xknx,
name=config[CONF_NAME],
group_address_long=config.get(CoverSchema.CONF_MOVE_LONG_ADDRESS),
group_address_short=config.get(CoverSchema.CONF_MOVE_SHORT_ADDRESS),
group_address_stop=config.get(CoverSchema.CONF_STOP_ADDRESS),
group_address_position_state=config.get(
CoverSchema.CONF_POSITION_STATE_ADDRESS
),
group_address_angle=config.get(CoverSchema.CONF_ANGLE_ADDRESS),
group_address_angle_state=config.get(
CoverSchema.CONF_ANGLE_STATE_ADDRESS
),
group_address_position=config.get(CoverSchema.CONF_POSITION_ADDRESS),
travel_time_down=config[CoverConf.TRAVELLING_TIME_DOWN],
travel_time_up=config[CoverConf.TRAVELLING_TIME_UP],
invert_updown=config[CoverConf.INVERT_UPDOWN],
invert_position=config[CoverConf.INVERT_POSITION],
invert_angle=config[CoverConf.INVERT_ANGLE],
),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
self.init_base()
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = (
f"{self._device.updown.group_address}_"
f"{self._device.position_target.group_address}"
)
if custom_device_class := config.get(CONF_DEVICE_CLASS):
self._attr_device_class = custom_device_class

View File

@@ -105,21 +105,20 @@ class KnxYamlDate(_KNXDate, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX date."""
self._device = XknxDateDevice(
knx_module.xknx,
name=config.get(CONF_NAME, ""),
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
)
super().__init__(
knx_module=knx_module,
unique_id=str(self._device.remote_value.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
device=XknxDateDevice(
knx_module.xknx,
name=config[CONF_NAME],
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
class KnxUiDate(_KNXDate, KnxUiEntity):

View File

@@ -110,21 +110,20 @@ class KnxYamlDateTime(_KNXDateTime, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX datetime."""
self._device = XknxDateTimeDevice(
knx_module.xknx,
name=config.get(CONF_NAME, ""),
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
)
super().__init__(
knx_module=knx_module,
unique_id=str(self._device.remote_value.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
device=XknxDateTimeDevice(
knx_module.xknx,
name=config[CONF_NAME],
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
class KnxUiDateTime(_KNXDateTime, KnxUiEntity):

View File

@@ -6,7 +6,7 @@ from typing import TYPE_CHECKING, Any
from xknx.devices import Device as XknxDevice
from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, EntityCategory
from homeassistant.const import CONF_ENTITY_CATEGORY, EntityCategory
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_platform import EntityPlatform
@@ -52,11 +52,14 @@ class _KnxEntityBase(Entity):
"""Representation of a KNX entity."""
_attr_should_poll = False
_attr_unique_id: str
_knx_module: KNXModule
_device: XknxDevice
@property
def name(self) -> str:
"""Return the name of the KNX device."""
return self._device.name
@property
def available(self) -> bool:
"""Return True if entity is available."""
@@ -97,23 +100,16 @@ class _KnxEntityBase(Entity):
class KnxYamlEntity(_KnxEntityBase):
"""Representation of a KNX entity configured from YAML."""
def __init__(
self,
knx_module: KNXModule,
unique_id: str,
name: str | None,
entity_category: EntityCategory | None,
) -> None:
def __init__(self, knx_module: KNXModule, device: XknxDevice) -> None:
"""Initialize the YAML entity."""
self._knx_module = knx_module
self._attr_name = name
self._attr_unique_id = unique_id
self._attr_entity_category = entity_category
self._device = device
class KnxUiEntity(_KnxEntityBase):
"""Representation of a KNX UI entity."""
_attr_unique_id: str
_attr_has_entity_name = True
def __init__(
@@ -121,8 +117,6 @@ class KnxUiEntity(_KnxEntityBase):
) -> None:
"""Initialize the UI entity."""
self._knx_module = knx_module
self._attr_name = entity_config[CONF_NAME]
self._attr_unique_id = unique_id
if entity_category := entity_config.get(CONF_ENTITY_CATEGORY):
self._attr_entity_category = EntityCategory(entity_category)

View File

@@ -152,28 +152,32 @@ class KnxYamlFan(_KnxFan, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of KNX fan."""
max_step = config.get(FanConf.MAX_STEP)
self._device = XknxFan(
xknx=knx_module.xknx,
name=config.get(CONF_NAME, ""),
group_address_speed=config.get(KNX_ADDRESS),
group_address_speed_state=config.get(FanSchema.CONF_STATE_ADDRESS),
group_address_oscillation=config.get(FanSchema.CONF_OSCILLATION_ADDRESS),
group_address_oscillation_state=config.get(
FanSchema.CONF_OSCILLATION_STATE_ADDRESS
),
group_address_switch=config.get(FanSchema.CONF_SWITCH_ADDRESS),
group_address_switch_state=config.get(FanSchema.CONF_SWITCH_STATE_ADDRESS),
max_step=max_step,
sync_state=config.get(CONF_SYNC_STATE, True),
)
super().__init__(
knx_module=knx_module,
unique_id=str(self._device.speed.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
device=XknxFan(
xknx=knx_module.xknx,
name=config[CONF_NAME],
group_address_speed=config.get(KNX_ADDRESS),
group_address_speed_state=config.get(FanSchema.CONF_STATE_ADDRESS),
group_address_oscillation=config.get(
FanSchema.CONF_OSCILLATION_ADDRESS
),
group_address_oscillation_state=config.get(
FanSchema.CONF_OSCILLATION_STATE_ADDRESS
),
group_address_switch=config.get(FanSchema.CONF_SWITCH_ADDRESS),
group_address_switch_state=config.get(
FanSchema.CONF_SWITCH_STATE_ADDRESS
),
max_step=max_step,
sync_state=config.get(CONF_SYNC_STATE, True),
),
)
# FanSpeedMode.STEP if max_step is set
self._step_range: tuple[int, int] | None = (1, max_step) if max_step else None
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.speed.group_address)
class KnxUiFan(_KnxFan, KnxUiEntity):

View File

@@ -121,7 +121,7 @@ def _create_yaml_light(xknx: XKNX, config: ConfigType) -> XknxLight:
return XknxLight(
xknx,
name=config.get(CONF_NAME, ""),
name=config[CONF_NAME],
group_address_switch=config.get(KNX_ADDRESS),
group_address_switch_state=config.get(LightSchema.CONF_STATE_ADDRESS),
group_address_brightness=config.get(LightSchema.CONF_BRIGHTNESS_ADDRESS),
@@ -558,16 +558,15 @@ class KnxYamlLight(_KnxLight, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of KNX light."""
self._device = _create_yaml_light(knx_module.xknx, config)
super().__init__(
knx_module=knx_module,
unique_id=self._device_unique_id(),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
device=_create_yaml_light(knx_module.xknx, config),
)
self._attr_color_mode = next(iter(self.supported_color_modes))
self._attr_max_color_temp_kelvin: int = config[LightSchema.CONF_MAX_KELVIN]
self._attr_min_color_temp_kelvin: int = config[LightSchema.CONF_MIN_KELVIN]
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = self._device_unique_id()
def _device_unique_id(self) -> str:
"""Return unique id for this device."""

View File

@@ -33,7 +33,7 @@ def _create_notification_instance(xknx: XKNX, config: ConfigType) -> XknxNotific
"""Return a KNX Notification to be used within XKNX."""
return XknxNotification(
xknx,
name=config.get(CONF_NAME, ""),
name=config[CONF_NAME],
group_address=config[KNX_ADDRESS],
value_type=config[CONF_TYPE],
)
@@ -46,13 +46,12 @@ class KNXNotify(KnxYamlEntity, NotifyEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX notification."""
self._device = _create_notification_instance(knx_module.xknx, config)
super().__init__(
knx_module=knx_module,
unique_id=str(self._device.remote_value.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
device=_create_notification_instance(knx_module.xknx, config),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
async def async_send_message(self, message: str, title: str | None = None) -> None:
"""Send a notification to knx bus."""

View File

@@ -44,7 +44,7 @@ def _create_numeric_value(xknx: XKNX, config: ConfigType) -> NumericValue:
"""Return a KNX NumericValue to be used within XKNX."""
return NumericValue(
xknx,
name=config.get(CONF_NAME, ""),
name=config[CONF_NAME],
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
@@ -59,12 +59,9 @@ class KNXNumber(KnxYamlEntity, RestoreNumber):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX number."""
self._device = _create_numeric_value(knx_module.xknx, config)
super().__init__(
knx_module=knx_module,
unique_id=str(self._device.sensor_value.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
device=_create_numeric_value(knx_module.xknx, config),
)
self._attr_native_max_value = config.get(
NumberSchema.CONF_MAX,
@@ -79,6 +76,8 @@ class KNXNumber(KnxYamlEntity, RestoreNumber):
NumberSchema.CONF_STEP,
self._device.sensor_value.dpt_class.resolution,
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.sensor_value.group_address)
self._attr_native_unit_of_measurement = self._device.unit_of_measurement()
self._device.sensor_value.value = max(0, self._attr_native_min_value)

View File

@@ -83,19 +83,18 @@ class KnxYamlScene(_KnxScene, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize KNX scene."""
self._device = XknxScene(
xknx=knx_module.xknx,
name=config.get(CONF_NAME, ""),
group_address=config[KNX_ADDRESS],
scene_number=config[SceneSchema.CONF_SCENE_NUMBER],
)
super().__init__(
knx_module=knx_module,
unique_id=(
f"{self._device.scene_value.group_address}_{self._device.scene_number}"
device=XknxScene(
xknx=knx_module.xknx,
name=config[CONF_NAME],
group_address=config[KNX_ADDRESS],
scene_number=config[SceneSchema.CONF_SCENE_NUMBER],
),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = (
f"{self._device.scene_value.group_address}_{self._device.scene_number}"
)

View File

@@ -214,22 +214,16 @@ class KNXPlatformSchema(ABC):
}
COMMON_ENTITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
class BinarySensorSchema(KNXPlatformSchema):
"""Voluptuous schema for KNX binary sensors."""
PLATFORM = Platform.BINARY_SENSOR
DEFAULT_NAME = "KNX Binary Sensor"
ENTITY_SCHEMA = vol.All(
COMMON_ENTITY_SCHEMA.extend(
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Optional(CONF_IGNORE_INTERNAL_STATE, default=False): cv.boolean,
vol.Optional(CONF_INVERT, default=False): cv.boolean,
@@ -239,6 +233,7 @@ class BinarySensorSchema(KNXPlatformSchema):
),
vol.Optional(CONF_DEVICE_CLASS): BINARY_SENSOR_DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_RESET_AFTER): cv.positive_float,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
),
)
@@ -250,6 +245,7 @@ class ButtonSchema(KNXPlatformSchema):
PLATFORM = Platform.BUTTON
CONF_VALUE = "value"
DEFAULT_NAME = "KNX Button"
payload_or_value_msg = f"Please use only one of `{CONF_PAYLOAD}` or `{CONF_VALUE}`"
length_or_type_msg = (
@@ -257,8 +253,9 @@ class ButtonSchema(KNXPlatformSchema):
)
ENTITY_SCHEMA = vol.All(
COMMON_ENTITY_SCHEMA.extend(
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(KNX_ADDRESS): ga_validator,
vol.Exclusive(
CONF_PAYLOAD, "payload_or_value", msg=payload_or_value_msg
@@ -272,6 +269,7 @@ class ButtonSchema(KNXPlatformSchema):
vol.Exclusive(
CONF_TYPE, "length_or_type", msg=length_or_type_msg
): object,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
),
vol.Any(
@@ -339,6 +337,7 @@ class ClimateSchema(KNXPlatformSchema):
CONF_SWING_HORIZONTAL_ADDRESS = "swing_horizontal_address"
CONF_SWING_HORIZONTAL_STATE_ADDRESS = "swing_horizontal_state_address"
DEFAULT_NAME = "KNX Climate"
DEFAULT_SETPOINT_SHIFT_MODE = "DPT6010"
DEFAULT_SETPOINT_SHIFT_MAX = 6
DEFAULT_SETPOINT_SHIFT_MIN = -6
@@ -347,8 +346,9 @@ class ClimateSchema(KNXPlatformSchema):
DEFAULT_FAN_SPEED_MODE = "percent"
ENTITY_SCHEMA = vol.All(
COMMON_ENTITY_SCHEMA.extend(
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(
ClimateConf.SETPOINT_SHIFT_MAX, default=DEFAULT_SETPOINT_SHIFT_MAX
): vol.All(int, vol.Range(min=0, max=32)),
@@ -448,10 +448,12 @@ class CoverSchema(KNXPlatformSchema):
CONF_ANGLE_STATE_ADDRESS = "angle_state_address"
DEFAULT_TRAVEL_TIME = 25
DEFAULT_NAME = "KNX Cover"
ENTITY_SCHEMA = vol.All(
COMMON_ENTITY_SCHEMA.extend(
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MOVE_LONG_ADDRESS): ga_list_validator,
vol.Optional(CONF_MOVE_SHORT_ADDRESS): ga_list_validator,
vol.Optional(CONF_STOP_ADDRESS): ga_list_validator,
@@ -469,6 +471,7 @@ class CoverSchema(KNXPlatformSchema):
vol.Optional(CoverConf.INVERT_POSITION, default=False): cv.boolean,
vol.Optional(CoverConf.INVERT_ANGLE, default=False): cv.boolean,
vol.Optional(CONF_DEVICE_CLASS): COVER_DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
),
vol.Any(
@@ -493,12 +496,16 @@ class DateSchema(KNXPlatformSchema):
PLATFORM = Platform.DATE
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
DEFAULT_NAME = "KNX Date"
ENTITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -508,12 +515,16 @@ class DateTimeSchema(KNXPlatformSchema):
PLATFORM = Platform.DATETIME
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
DEFAULT_NAME = "KNX DateTime"
ENTITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -569,9 +580,12 @@ class FanSchema(KNXPlatformSchema):
CONF_SWITCH_ADDRESS = "switch_address"
CONF_SWITCH_STATE_ADDRESS = "switch_state_address"
DEFAULT_NAME = "KNX Fan"
ENTITY_SCHEMA = vol.All(
COMMON_ENTITY_SCHEMA.extend(
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_SWITCH_ADDRESS): ga_list_validator,
@@ -579,6 +593,7 @@ class FanSchema(KNXPlatformSchema):
vol.Optional(CONF_OSCILLATION_ADDRESS): ga_list_validator,
vol.Optional(CONF_OSCILLATION_STATE_ADDRESS): ga_list_validator,
vol.Optional(FanConf.MAX_STEP): cv.byte,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
}
),
@@ -623,6 +638,7 @@ class LightSchema(KNXPlatformSchema):
CONF_MIN_KELVIN = "min_kelvin"
CONF_MAX_KELVIN = "max_kelvin"
DEFAULT_NAME = "KNX Light"
DEFAULT_COLOR_TEMP_MODE = "absolute"
DEFAULT_MIN_KELVIN = 2700 # 370 mireds
DEFAULT_MAX_KELVIN = 6000 # 166 mireds
@@ -654,8 +670,9 @@ class LightSchema(KNXPlatformSchema):
)
ENTITY_SCHEMA = vol.All(
COMMON_ENTITY_SCHEMA.extend(
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_BRIGHTNESS_ADDRESS): ga_list_validator,
@@ -705,6 +722,7 @@ class LightSchema(KNXPlatformSchema):
vol.Optional(CONF_MAX_KELVIN, default=DEFAULT_MAX_KELVIN): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
),
vol.Any(
@@ -750,10 +768,14 @@ class NotifySchema(KNXPlatformSchema):
PLATFORM = Platform.NOTIFY
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
DEFAULT_NAME = "KNX Notify"
ENTITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_TYPE, default="latin_1"): string_type_validator,
vol.Required(KNX_ADDRESS): ga_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -766,10 +788,12 @@ class NumberSchema(KNXPlatformSchema):
CONF_MAX = "max"
CONF_MIN = "min"
CONF_STEP = "step"
DEFAULT_NAME = "KNX Number"
ENTITY_SCHEMA = vol.All(
COMMON_ENTITY_SCHEMA.extend(
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
vol.Optional(CONF_MODE, default=NumberMode.AUTO): vol.Coerce(
NumberMode
@@ -780,6 +804,7 @@ class NumberSchema(KNXPlatformSchema):
vol.Optional(CONF_MAX): vol.Coerce(float),
vol.Optional(CONF_MIN): vol.Coerce(float),
vol.Optional(CONF_STEP): cv.positive_float,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
),
number_limit_sub_validator,
@@ -793,12 +818,15 @@ class SceneSchema(KNXPlatformSchema):
CONF_SCENE_NUMBER = "scene_number"
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
DEFAULT_NAME = "KNX SCENE"
ENTITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Required(SceneConf.SCENE_NUMBER): vol.All(
vol.Coerce(int), vol.Range(min=1, max=64)
),
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -810,10 +838,12 @@ class SelectSchema(KNXPlatformSchema):
CONF_OPTION = "option"
CONF_OPTIONS = "options"
DEFAULT_NAME = "KNX Select"
ENTITY_SCHEMA = vol.All(
COMMON_ENTITY_SCHEMA.extend(
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
vol.Required(CONF_PAYLOAD_LENGTH): vol.All(
@@ -827,6 +857,7 @@ class SelectSchema(KNXPlatformSchema):
],
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
),
select_options_sub_validator,
@@ -841,15 +872,18 @@ class SensorSchema(KNXPlatformSchema):
CONF_ALWAYS_CALLBACK = "always_callback"
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
CONF_SYNC_STATE = CONF_SYNC_STATE
DEFAULT_NAME = "KNX Sensor"
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
ENTITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Optional(CONF_ALWAYS_CALLBACK, default=False): cv.boolean,
vol.Optional(CONF_STATE_CLASS): STATE_CLASSES_SCHEMA,
vol.Required(CONF_TYPE): sensor_type_validator,
vol.Required(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_DEVICE_CLASS): SENSOR_DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -862,13 +896,16 @@ class SwitchSchema(KNXPlatformSchema):
CONF_INVERT = CONF_INVERT
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
DEFAULT_NAME = "KNX Switch"
ENTITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_INVERT, default=False): cv.boolean,
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_DEVICE_CLASS): SWITCH_DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -878,13 +915,17 @@ class TextSchema(KNXPlatformSchema):
PLATFORM = Platform.TEXT
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
DEFAULT_NAME = "KNX Text"
ENTITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
vol.Optional(CONF_TYPE, default="latin_1"): string_type_validator,
vol.Optional(CONF_MODE, default=TextMode.TEXT): vol.Coerce(TextMode),
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -894,12 +935,16 @@ class TimeSchema(KNXPlatformSchema):
PLATFORM = Platform.TIME
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
DEFAULT_NAME = "KNX Time"
ENTITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -924,21 +969,27 @@ class WeatherSchema(KNXPlatformSchema):
CONF_KNX_AIR_PRESSURE_ADDRESS = "address_air_pressure"
CONF_KNX_HUMIDITY_ADDRESS = "address_humidity"
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Required(CONF_KNX_TEMPERATURE_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_SOUTH_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_EAST_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_WEST_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_NORTH_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_WIND_SPEED_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_WIND_BEARING_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_RAIN_ALARM_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_FROST_ALARM_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_WIND_ALARM_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_DAY_NIGHT_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_AIR_PRESSURE_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_HUMIDITY_ADDRESS): ga_list_validator,
}
DEFAULT_NAME = "KNX Weather Station"
ENTITY_SCHEMA = vol.All(
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Required(CONF_KNX_TEMPERATURE_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_SOUTH_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_EAST_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_WEST_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_NORTH_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_WIND_SPEED_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_WIND_BEARING_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_RAIN_ALARM_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_FROST_ALARM_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_WIND_ALARM_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_DAY_NIGHT_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_AIR_PRESSURE_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_HUMIDITY_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
),
)

View File

@@ -49,7 +49,7 @@ def _create_raw_value(xknx: XKNX, config: ConfigType) -> RawValue:
"""Return a KNX RawValue to be used within XKNX."""
return RawValue(
xknx,
name=config.get(CONF_NAME, ""),
name=config[CONF_NAME],
payload_length=config[CONF_PAYLOAD_LENGTH],
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
@@ -65,12 +65,9 @@ class KNXSelect(KnxYamlEntity, SelectEntity, RestoreEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX select."""
self._device = _create_raw_value(knx_module.xknx, config)
super().__init__(
knx_module=knx_module,
unique_id=str(self._device.remote_value.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
device=_create_raw_value(knx_module.xknx, config),
)
self._option_payloads: dict[str, int] = {
option[SelectSchema.CONF_OPTION]: option[CONF_PAYLOAD]
@@ -78,6 +75,8 @@ class KNXSelect(KnxYamlEntity, SelectEntity, RestoreEntity):
}
self._attr_options = list(self._option_payloads)
self._attr_current_option = None
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
async def async_added_to_hass(self) -> None:
"""Restore last state."""

View File

@@ -200,19 +200,16 @@ class KnxYamlSensor(_KnxSensor, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of a KNX sensor."""
self._device = XknxSensor(
knx_module.xknx,
name=config.get(CONF_NAME, ""),
group_address_state=config[SensorSchema.CONF_STATE_ADDRESS],
sync_state=config[CONF_SYNC_STATE],
always_callback=True,
value_type=config[CONF_TYPE],
)
super().__init__(
knx_module=knx_module,
unique_id=str(self._device.sensor_value.group_address_state),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
device=XknxSensor(
knx_module.xknx,
name=config[CONF_NAME],
group_address_state=config[SensorSchema.CONF_STATE_ADDRESS],
sync_state=config[CONF_SYNC_STATE],
always_callback=True,
value_type=config[CONF_TYPE],
),
)
if device_class := config.get(CONF_DEVICE_CLASS):
self._attr_device_class = device_class
@@ -222,6 +219,8 @@ class KnxYamlSensor(_KnxSensor, KnxYamlEntity):
)
self._attr_force_update = config[SensorSchema.CONF_ALWAYS_CALLBACK]
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.sensor_value.group_address_state)
self._attr_native_unit_of_measurement = self._device.unit_of_measurement()
self._attr_state_class = config.get(CONF_STATE_CLASS)
self._attr_extra_state_attributes = {}

View File

@@ -107,21 +107,20 @@ class KnxYamlSwitch(_KnxSwitch, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of KNX switch."""
self._device = XknxSwitch(
xknx=knx_module.xknx,
name=config.get(CONF_NAME, ""),
group_address=config[KNX_ADDRESS],
group_address_state=config.get(SwitchSchema.CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
invert=config[SwitchSchema.CONF_INVERT],
)
super().__init__(
knx_module=knx_module,
unique_id=str(self._device.switch.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
device=XknxSwitch(
xknx=knx_module.xknx,
name=config[CONF_NAME],
group_address=config[KNX_ADDRESS],
group_address_state=config.get(SwitchSchema.CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
invert=config[SwitchSchema.CONF_INVERT],
),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
self._attr_unique_id = str(self._device.switch.group_address)
class KnxUiSwitch(_KnxSwitch, KnxUiEntity):

View File

@@ -112,21 +112,20 @@ class KnxYamlText(_KnxText, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX text."""
self._device = XknxNotification(
knx_module.xknx,
name=config.get(CONF_NAME, ""),
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
value_type=config[CONF_TYPE],
)
super().__init__(
knx_module=knx_module,
unique_id=str(self._device.remote_value.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
device=XknxNotification(
knx_module.xknx,
name=config[CONF_NAME],
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
value_type=config[CONF_TYPE],
),
)
self._attr_mode = config[CONF_MODE]
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
class KnxUiText(_KnxText, KnxUiEntity):

View File

@@ -105,21 +105,20 @@ class KnxYamlTime(_KNXTime, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX time."""
self._device = XknxTimeDevice(
knx_module.xknx,
name=config.get(CONF_NAME, ""),
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
)
super().__init__(
knx_module=knx_module,
unique_id=str(self._device.remote_value.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
device=XknxTimeDevice(
knx_module.xknx,
name=config[CONF_NAME],
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
class KnxUiTime(_KNXTime, KnxUiEntity):

View File

@@ -43,7 +43,7 @@ def _create_weather(xknx: XKNX, config: ConfigType) -> XknxWeather:
"""Return a KNX weather device to be used within XKNX."""
return XknxWeather(
xknx,
name=config.get(CONF_NAME, ""),
name=config[CONF_NAME],
sync_state=config[WeatherSchema.CONF_SYNC_STATE],
group_address_temperature=config[WeatherSchema.CONF_KNX_TEMPERATURE_ADDRESS],
group_address_brightness_south=config.get(
@@ -85,13 +85,12 @@ class KNXWeather(KnxYamlEntity, WeatherEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of a KNX sensor."""
self._device = _create_weather(knx_module.xknx, config)
super().__init__(
knx_module=knx_module,
unique_id=str(self._device._temperature.group_address_state), # noqa: SLF001
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
device=_create_weather(knx_module.xknx, config),
)
self._attr_unique_id = str(self._device._temperature.group_address_state) # noqa: SLF001
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
@property
def native_temperature(self) -> float | None:

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "silver",
"requirements": ["librehardwaremonitor-api==1.7.2"]
"requirements": ["librehardwaremonitor-api==1.6.0"]
}

View File

@@ -154,6 +154,7 @@ SUPPORT_FAN_MODE_DEVICES: set[tuple[int, int]] = {
(0x1209, 0x8027),
(0x1209, 0x8028),
(0x1209, 0x8029),
(0x131A, 0x1000),
}
SystemModeEnum = clusters.Thermostat.Enums.SystemModeEnum

View File

@@ -19,12 +19,7 @@ from homeassistant.helpers.typing import ConfigType
from .api import AsyncConfigEntryAuth
from .const import DOMAIN
from .coordinator import (
MieleAuxDataUpdateCoordinator,
MieleConfigEntry,
MieleDataUpdateCoordinator,
MieleRuntimeData,
)
from .coordinator import MieleConfigEntry, MieleDataUpdateCoordinator
from .services import async_setup_services
PLATFORMS: list[Platform] = [
@@ -80,23 +75,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: MieleConfigEntry) -> boo
) from err
# Setup MieleAPI and coordinator for data fetch
_api = MieleAPI(auth)
_coordinator = MieleDataUpdateCoordinator(hass, entry, _api)
await _coordinator.async_config_entry_first_refresh()
_aux_coordinator = MieleAuxDataUpdateCoordinator(hass, entry, _api)
await _aux_coordinator.async_config_entry_first_refresh()
entry.runtime_data = MieleRuntimeData(_api, _coordinator, _aux_coordinator)
api = MieleAPI(auth)
coordinator = MieleDataUpdateCoordinator(hass, entry, api)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
entry.async_create_background_task(
hass,
entry.runtime_data.api.listen_events(
data_callback=_coordinator.callback_update_data,
actions_callback=_coordinator.callback_update_actions,
coordinator.api.listen_events(
data_callback=coordinator.callback_update_data,
actions_callback=coordinator.callback_update_actions,
),
"pymiele event listener",
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
@@ -116,5 +107,5 @@ async def async_remove_config_entry_device(
identifier
for identifier in device_entry.identifiers
if identifier[0] == DOMAIN
and identifier[1] in config_entry.runtime_data.coordinator.data.devices
and identifier[1] in config_entry.runtime_data.data.devices
)

View File

@@ -264,7 +264,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the binary sensor platform."""
coordinator = config_entry.runtime_data.coordinator
coordinator = config_entry.runtime_data
added_devices: set[str] = set()
def _async_add_new_devices() -> None:

View File

@@ -112,7 +112,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the button platform."""
coordinator = config_entry.runtime_data.coordinator
coordinator = config_entry.runtime_data
added_devices: set[str] = set()
def _async_add_new_devices() -> None:

View File

@@ -138,7 +138,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the climate platform."""
coordinator = config_entry.runtime_data.coordinator
coordinator = config_entry.runtime_data
added_devices: set[str] = set()
def _async_add_new_devices() -> None:

View File

@@ -9,13 +9,7 @@ from datetime import timedelta
import logging
from aiohttp import ClientResponseError
from pymiele import (
MieleAction,
MieleAPI,
MieleDevice,
MieleFillingLevel,
MieleFillingLevels,
)
from pymiele import MieleAction, MieleAPI, MieleDevice
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
@@ -26,16 +20,7 @@ from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
@dataclass
class MieleRuntimeData:
"""Runtime data for the Miele integration."""
api: MieleAPI
coordinator: MieleDataUpdateCoordinator
aux_coordinator: MieleAuxDataUpdateCoordinator
type MieleConfigEntry = ConfigEntry[MieleRuntimeData]
type MieleConfigEntry = ConfigEntry[MieleDataUpdateCoordinator]
@dataclass
@@ -46,15 +31,8 @@ class MieleCoordinatorData:
actions: dict[str, MieleAction]
@dataclass
class MieleAuxCoordinatorData:
"""Data class for storing auxiliary coordinator data."""
filling_levels: dict[str, MieleFillingLevel]
class MieleDataUpdateCoordinator(DataUpdateCoordinator[MieleCoordinatorData]):
"""Main coordinator for Miele data."""
"""Coordinator for Miele data."""
config_entry: MieleConfigEntry
new_device_callbacks: list[Callable[[dict[str, MieleDevice]], None]] = []
@@ -88,7 +66,6 @@ class MieleDataUpdateCoordinator(DataUpdateCoordinator[MieleCoordinatorData]):
}
self.devices = devices
actions = {}
for device_id in devices:
try:
actions_json = await self.api.get_actions(device_id)
@@ -122,7 +99,10 @@ class MieleDataUpdateCoordinator(DataUpdateCoordinator[MieleCoordinatorData]):
device_id: MieleDevice(device) for device_id, device in devices_json.items()
}
self.async_set_updated_data(
MieleCoordinatorData(devices=devices, actions=self.data.actions)
MieleCoordinatorData(
devices=devices,
actions=self.data.actions,
)
)
async def callback_update_actions(self, actions_json: dict[str, dict]) -> None:
@@ -131,34 +111,8 @@ class MieleDataUpdateCoordinator(DataUpdateCoordinator[MieleCoordinatorData]):
device_id: MieleAction(action) for device_id, action in actions_json.items()
}
self.async_set_updated_data(
MieleCoordinatorData(devices=self.data.devices, actions=actions)
)
class MieleAuxDataUpdateCoordinator(DataUpdateCoordinator[MieleAuxCoordinatorData]):
"""Coordinator for Miele data for slowly polled endpoints."""
config_entry: MieleConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: MieleConfigEntry,
api: MieleAPI,
) -> None:
"""Initialize the Miele data coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=timedelta(seconds=60),
)
self.api = api
async def _async_update_data(self) -> MieleAuxCoordinatorData:
"""Fetch data from the Miele API."""
filling_levels_json = await self.api.get_filling_levels()
return MieleAuxCoordinatorData(
filling_levels=MieleFillingLevels(filling_levels_json).filling_levels
MieleCoordinatorData(
devices=self.data.devices,
actions=actions,
)
)

View File

@@ -38,19 +38,13 @@ async def async_get_config_entry_diagnostics(
"devices": redact_identifiers(
{
device_id: device_data.raw
for device_id, device_data in config_entry.runtime_data.coordinator.data.devices.items()
}
),
"filling_levels": redact_identifiers(
{
device_id: filling_level_data.raw
for device_id, filling_level_data in config_entry.runtime_data.aux_coordinator.data.filling_levels.items()
for device_id, device_data in config_entry.runtime_data.data.devices.items()
}
),
"actions": redact_identifiers(
{
device_id: action_data.raw
for device_id, action_data in config_entry.runtime_data.coordinator.data.actions.items()
for device_id, action_data in config_entry.runtime_data.data.actions.items()
}
),
}
@@ -74,19 +68,13 @@ async def async_get_device_diagnostics(
"model_id": device.model_id,
}
coordinator = config_entry.runtime_data.coordinator
aux_coordinator = config_entry.runtime_data.aux_coordinator
coordinator = config_entry.runtime_data
device_id = cast(str, device.serial_number)
miele_data: dict[str, Any] = {
"devices": {
hash_identifier(device_id): coordinator.data.devices[device_id].raw
},
"filling_levels": {
hash_identifier(device_id): aux_coordinator.data.filling_levels[
device_id
].raw
},
"actions": {
hash_identifier(device_id): coordinator.data.actions[device_id].raw
},

View File

@@ -1,18 +1,16 @@
"""Entity base class for the Miele integration."""
from pymiele import MieleAction, MieleAPI, MieleDevice, MieleFillingLevel
from pymiele import MieleAction, MieleAPI, MieleDevice
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DEVICE_TYPE_TAGS, DOMAIN, MANUFACTURER, MieleAppliance, StateStatus
from .coordinator import MieleAuxDataUpdateCoordinator, MieleDataUpdateCoordinator
from .coordinator import MieleDataUpdateCoordinator
class MieleBaseEntity[
_MieleCoordinatorT: MieleDataUpdateCoordinator | MieleAuxDataUpdateCoordinator
](CoordinatorEntity[_MieleCoordinatorT]):
class MieleEntity(CoordinatorEntity[MieleDataUpdateCoordinator]):
"""Base class for Miele entities."""
_attr_has_entity_name = True
@@ -24,7 +22,7 @@ class MieleBaseEntity[
def __init__(
self,
coordinator: _MieleCoordinatorT,
coordinator: MieleDataUpdateCoordinator,
device_id: str,
description: EntityDescription,
) -> None:
@@ -32,26 +30,7 @@ class MieleBaseEntity[
super().__init__(coordinator)
self._device_id = device_id
self.entity_description = description
self._attr_unique_id = MieleBaseEntity.get_unique_id(device_id, description)
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device_id)})
@property
def api(self) -> MieleAPI:
"""Return the api object."""
return self.coordinator.api
class MieleEntity(MieleBaseEntity[MieleDataUpdateCoordinator]):
"""Base class for Miele entities that use the main data coordinator."""
def __init__(
self,
coordinator: MieleDataUpdateCoordinator,
device_id: str,
description: EntityDescription,
) -> None:
"""Initialize the entity."""
super().__init__(coordinator, device_id, description)
self._attr_unique_id = MieleEntity.get_unique_id(device_id, description)
device = self.device
appliance_type = DEVICE_TYPE_TAGS.get(MieleAppliance(device.device_type))
@@ -82,6 +61,11 @@ class MieleEntity(MieleBaseEntity[MieleDataUpdateCoordinator]):
"""Return the actions object."""
return self.coordinator.data.actions[self._device_id]
@property
def api(self) -> MieleAPI:
"""Return the api object."""
return self.coordinator.api
@property
def available(self) -> bool:
"""Return the availability of the entity."""
@@ -91,12 +75,3 @@ class MieleEntity(MieleBaseEntity[MieleDataUpdateCoordinator]):
and self._device_id in self.coordinator.data.devices
and (self.device.state_status is not StateStatus.not_connected)
)
class MieleAuxEntity(MieleBaseEntity[MieleAuxDataUpdateCoordinator]):
"""Base class for Miele entities that use the auxiliary data coordinator."""
@property
def levels(self) -> MieleFillingLevel:
"""Return the filling levels object."""
return self.coordinator.data.filling_levels[self._device_id]

View File

@@ -66,7 +66,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the fan platform."""
coordinator = config_entry.runtime_data.coordinator
coordinator = config_entry.runtime_data
added_devices: set[str] = set()
def _async_add_new_devices() -> None:

View File

@@ -71,9 +71,6 @@
"plate_step_warming": "mdi:alpha-w-circle-outline"
}
},
"power_disk_level": {
"default": "mdi:car-coolant-level"
},
"program_id": {
"default": "mdi:selection-ellipse-arrow-inside"
},
@@ -86,12 +83,6 @@
"remaining_time": {
"default": "mdi:clock-end"
},
"rinse_aid_level": {
"default": "mdi:water-opacity"
},
"salt_level": {
"default": "mdi:shaker-outline"
},
"spin_speed": {
"default": "mdi:sync"
},
@@ -104,12 +95,6 @@
"target_temperature": {
"default": "mdi:thermometer-check"
},
"twin_dos_1_level": {
"default": "mdi:car-coolant-level"
},
"twin_dos_2_level": {
"default": "mdi:car-coolant-level"
},
"water_forecast": {
"default": "mdi:water-outline"
}

View File

@@ -86,7 +86,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the light platform."""
coordinator = config_entry.runtime_data.coordinator
coordinator = config_entry.runtime_data
added_devices: set[str] = set()
def _async_add_new_devices() -> None:

View File

@@ -71,7 +71,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the select platform."""
coordinator = config_entry.runtime_data.coordinator
coordinator = config_entry.runtime_data
added_devices: set[str] = set()
def _async_add_new_devices() -> None:

View File

@@ -8,7 +8,7 @@ from datetime import datetime, timedelta
import logging
from typing import Any, Final, cast
from pymiele import MieleDevice, MieleFillingLevel, MieleTemperature
from pymiele import MieleDevice, MieleTemperature
from homeassistant.components.sensor import (
RestoreSensor,
@@ -44,12 +44,8 @@ from .const import (
StateProgramType,
StateStatus,
)
from .coordinator import (
MieleAuxDataUpdateCoordinator,
MieleConfigEntry,
MieleDataUpdateCoordinator,
)
from .entity import MieleAuxEntity, MieleEntity
from .coordinator import MieleConfigEntry, MieleDataUpdateCoordinator
from .entity import MieleEntity
PARALLEL_UPDATES = 0
@@ -143,13 +139,10 @@ def _convert_finish_timestamp(
@dataclass(frozen=True, kw_only=True)
class MieleSensorDescription[T: (MieleDevice, MieleFillingLevel)](
SensorEntityDescription
):
class MieleSensorDescription(SensorEntityDescription):
"""Class describing Miele sensor entities."""
value_fn: Callable[[T], StateType | datetime]
value_fn: Callable[[MieleDevice], StateType | datetime]
end_value_fn: Callable[[StateType | datetime], StateType | datetime] | None = None
extra_attributes: dict[str, Callable[[MieleDevice], StateType]] | None = None
zone: int | None = None
@@ -157,14 +150,14 @@ class MieleSensorDescription[T: (MieleDevice, MieleFillingLevel)](
@dataclass
class MieleSensorDefinition[T: (MieleDevice, MieleFillingLevel)]:
class MieleSensorDefinition:
"""Class for defining sensor entities."""
types: tuple[MieleAppliance, ...]
description: MieleSensorDescription[T]
description: MieleSensorDescription
SENSOR_TYPES: Final[tuple[MieleSensorDefinition[MieleDevice], ...]] = (
SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
MieleSensorDefinition(
types=(
MieleAppliance.WASHING_MACHINE,
@@ -696,59 +689,6 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition[MieleDevice], ...]] = (
),
)
POLLED_SENSOR_TYPES: Final[tuple[MieleSensorDefinition[MieleFillingLevel], ...]] = (
MieleSensorDefinition(
types=(MieleAppliance.WASHING_MACHINE,),
description=MieleSensorDescription[MieleFillingLevel](
key="twin_dos_1_level",
translation_key="twin_dos_1_level",
value_fn=lambda value: value.twin_dos_container_1_filling_level,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
),
),
MieleSensorDefinition(
types=(MieleAppliance.WASHING_MACHINE,),
description=MieleSensorDescription[MieleFillingLevel](
key="twin_dos_2_level",
translation_key="twin_dos_2_level",
value_fn=lambda value: value.twin_dos_container_2_filling_level,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
),
),
MieleSensorDefinition(
types=(MieleAppliance.DISHWASHER,),
description=MieleSensorDescription[MieleFillingLevel](
key="power_disk_level",
translation_key="power_disk_level",
value_fn=lambda value: None,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
),
),
MieleSensorDefinition(
types=(MieleAppliance.DISHWASHER,),
description=MieleSensorDescription[MieleFillingLevel](
key="salt_level",
translation_key="salt_level",
value_fn=lambda value: value.salt_filling_level,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
),
),
MieleSensorDefinition(
types=(MieleAppliance.DISHWASHER,),
description=MieleSensorDescription[MieleFillingLevel](
key="rinse_aid_level",
translation_key="rinse_aid_level",
value_fn=lambda value: value.rinse_aid_filling_level,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
),
),
)
async def async_setup_entry(
hass: HomeAssistant,
@@ -756,14 +696,11 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the sensor platform."""
coordinator = config_entry.runtime_data.coordinator
aux_coordinator = config_entry.runtime_data.aux_coordinator
coordinator = config_entry.runtime_data
added_devices: set[str] = set() # device_id
added_entities: set[str] = set() # unique_id
def _get_entity_class(
definition: MieleSensorDefinition[MieleDevice],
) -> type[MieleSensor]:
def _get_entity_class(definition: MieleSensorDefinition) -> type[MieleSensor]:
"""Get the entity class for the sensor."""
return {
"state_status": MieleStatusSensor,
@@ -788,7 +725,7 @@ async def async_setup_entry(
)
def _is_sensor_enabled(
definition: MieleSensorDefinition[MieleDevice],
definition: MieleSensorDefinition,
device: MieleDevice,
unique_id: str,
) -> bool:
@@ -811,15 +748,6 @@ async def async_setup_entry(
return False
return True
def _enabled_aux_sensor(
definition: MieleSensorDefinition[MieleFillingLevel], level: MieleFillingLevel
) -> bool:
"""Check if aux sensors are enabled."""
return not (
definition.description.value_fn is not None
and definition.description.value_fn(level) is None
)
def _async_add_devices() -> None:
nonlocal added_devices, added_entities
entities: list = []
@@ -847,11 +775,7 @@ async def async_setup_entry(
continue
# sensors is not enabled, skip
if not _is_sensor_enabled(
definition,
device,
unique_id,
):
if not _is_sensor_enabled(definition, device, unique_id):
continue
added_entities.add(unique_id)
@@ -863,15 +787,6 @@ async def async_setup_entry(
config_entry.async_on_unload(coordinator.async_add_listener(_async_add_devices))
_async_add_devices()
async_add_entities(
MieleAuxSensor(aux_coordinator, device_id, definition.description)
for device_id in aux_coordinator.data.filling_levels
for definition in POLLED_SENSOR_TYPES
if _enabled_aux_sensor(
definition, aux_coordinator.data.filling_levels[device_id]
)
)
APPLIANCE_ICONS = {
MieleAppliance.WASHING_MACHINE: "mdi:washing-machine",
@@ -970,32 +885,6 @@ class MieleRestorableSensor(MieleSensor, RestoreSensor):
super()._handle_coordinator_update()
class MieleAuxSensor(MieleAuxEntity, SensorEntity):
"""Representation of a filling level Sensor."""
entity_description: MieleSensorDescription
def __init__(
self,
coordinator: MieleAuxDataUpdateCoordinator,
device_id: str,
description: MieleSensorDescription,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator, device_id, description)
if description.unique_id_fn is not None:
self._attr_unique_id = description.unique_id_fn(device_id, description)
@property
def native_value(self) -> StateType | datetime:
"""Return the state of the level sensor."""
return (
self.entity_description.value_fn(self.levels)
if self.entity_description.value_fn is not None
else None
)
class MielePlateSensor(MieleSensor):
"""Representation of a Sensor."""

View File

@@ -257,9 +257,6 @@
"plate_step_warm": "Warming"
}
},
"power_disk_level": {
"name": "PowerDisk level"
},
"program_id": {
"name": "Program",
"state": {
@@ -1041,12 +1038,6 @@
"remaining_time": {
"name": "Remaining time"
},
"rinse_aid_level": {
"name": "Rinse aid level"
},
"salt_level": {
"name": "Salt level"
},
"spin_speed": {
"name": "Spin speed"
},
@@ -1089,12 +1080,6 @@
"temperature_zone_3": {
"name": "Temperature zone 3"
},
"twin_dos_1_level": {
"name": "TwinDos 1 level"
},
"twin_dos_2_level": {
"name": "TwinDos 2 level"
},
"water_consumption": {
"name": "Water consumption"
},

View File

@@ -117,7 +117,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the switch platform."""
coordinator = config_entry.runtime_data.coordinator
coordinator = config_entry.runtime_data
added_devices: set[str] = set()
def _async_add_new_devices() -> None:

View File

@@ -128,7 +128,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the vacuum platform."""
coordinator = config_entry.runtime_data.coordinator
coordinator = config_entry.runtime_data
async_add_entities(
MieleVacuum(coordinator, device_id, definition.description)

View File

@@ -69,7 +69,7 @@ class RegistrationsView(HomeAssistantView):
webhook_id = secrets.token_hex()
if cloud.async_active_subscription(hass):
if cloud.async_active_subscription(hass) and cloud.async_is_connected(hass):
data[CONF_CLOUDHOOK_URL] = await async_create_cloud_hook(
hass, webhook_id, None
)

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["nextdns"],
"quality_scale": "platinum",
"requirements": ["nextdns==5.0.0"]
"requirements": ["nextdns==4.1.0"]
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["nibe==2.21.0"]
"requirements": ["nibe==2.20.0"]
}

View File

@@ -9,5 +9,5 @@
"iot_class": "cloud_polling",
"loggers": ["opower"],
"quality_scale": "bronze",
"requirements": ["opower==0.15.9"]
"requirements": ["opower==0.16.0"]
}

View File

@@ -61,10 +61,7 @@ async def async_get_device_diagnostics(
data["execution_history"] = [
repr(execution)
for execution in await client.get_execution_history()
if any(
command.device_url.split("#", 1)[0] == device_url.split("#", 1)[0]
for command in execution.commands
)
if any(command.device_url == device_url for command in execution.commands)
]
return data

View File

@@ -75,15 +75,6 @@
},
{
"macaddress": "84E657*"
},
{
"hostname": "ps5-*"
},
{
"hostname": "ps4-*"
},
{
"hostname": "ps3"
}
],
"documentation": "https://www.home-assistant.io/integrations/playstation_network",

View File

@@ -114,72 +114,32 @@ class PooldoseConfigFlow(ConfigFlow, domain=DOMAIN):
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
if user_input is not None:
host = user_input[CONF_HOST]
serial_number, api_versions, errors = await self._validate_host(host)
if errors:
return self.async_show_form(
step_id="user",
data_schema=SCHEMA_DEVICE,
errors=errors,
# Handle API version info for error display; pass version info when available
# or None when api_versions is None to avoid displaying version details
description_placeholders={
"api_version_is": api_versions.get("api_version_is") or "",
"api_version_should": api_versions.get("api_version_should")
or "",
}
if api_versions
else None,
)
await self.async_set_unique_id(serial_number, raise_on_progress=False)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=f"PoolDose {serial_number}",
data={CONF_HOST: host},
if not user_input:
return self.async_show_form(
step_id="user",
data_schema=SCHEMA_DEVICE,
)
return self.async_show_form(
step_id="user",
data_schema=SCHEMA_DEVICE,
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfigure to change the device host/IP for an existing entry."""
if user_input is not None:
host = user_input[CONF_HOST]
serial_number, api_versions, errors = await self._validate_host(host)
if errors:
return self.async_show_form(
step_id="reconfigure",
data_schema=SCHEMA_DEVICE,
errors=errors,
# Handle API version info for error display identical to other steps
description_placeholders={
"api_version_is": api_versions.get("api_version_is") or "",
"api_version_should": api_versions.get("api_version_should")
or "",
}
if api_versions
else None,
)
# Ensure new serial number matches the existing entry unique_id (serial number)
if serial_number != self._get_reconfigure_entry().unique_id:
return self.async_abort(reason="wrong_device")
# Update the existing config entry with the new host and schedule reload
return self.async_update_reload_and_abort(
self._get_reconfigure_entry(), data_updates={CONF_HOST: host}
host = user_input[CONF_HOST]
serial_number, api_versions, errors = await self._validate_host(host)
if errors:
return self.async_show_form(
step_id="user",
data_schema=SCHEMA_DEVICE,
errors=errors,
# Handle API version info for error display; pass version info when available
# or None when api_versions is None to avoid displaying version details
description_placeholders={
"api_version_is": api_versions.get("api_version_is") or "",
"api_version_should": api_versions.get("api_version_should") or "",
}
if api_versions
else None,
)
return self.async_show_form(
step_id="reconfigure",
# Pre-fill with current host from the entry being reconfigured
data_schema=self.add_suggested_values_to_schema(
SCHEMA_DEVICE, self._get_reconfigure_entry().data
),
await self.async_set_unique_id(serial_number, raise_on_progress=False)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=f"PoolDose {serial_number}",
data={CONF_HOST: host},
)

View File

@@ -1,34 +0,0 @@
"""Diagnostics support for Pooldose."""
from __future__ import annotations
from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.core import HomeAssistant
from . import PooldoseConfigEntry
TO_REDACT = {
"IP",
"MAC",
"WIFI_SSID",
"AP_SSID",
"SERIAL_NUMBER",
"DEVICE_ID",
"OWNERID",
"NAME",
"GROUPNAME",
}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: PooldoseConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator = entry.runtime_data
return {
"device_info": async_redact_data(coordinator.device_info, TO_REDACT),
"data": coordinator.data,
}

View File

@@ -41,7 +41,7 @@ rules:
# Gold
devices: done
diagnostics: done
diagnostics: todo
discovery-update-info: done
discovery: done
docs-data-update: done
@@ -53,20 +53,20 @@ rules:
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: This integration does not support dynamic device discovery, as each config entry represents a single PoolDose device with all available entities.
comment: This integration does not support dynamic devices, as it is designed for a single PoolDose device.
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: done
icon-translations: done
reconfiguration-flow: done
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: This integration does not have any identified cases where repair issues would be needed.
comment: This integration does not provide repair issues, as it is designed for a single PoolDose device with a fixed configuration.
stale-devices:
status: exempt
comment: This integration manages a single device per config entry, so stale device removal is not applicable.
comment: This integration does not support stale devices, as it is designed for a single PoolDose device with a fixed configuration.
# Platinum
async-dependency: done

View File

@@ -4,9 +4,7 @@
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"no_device_info": "Unable to retrieve device information",
"no_serial_number": "No serial number found on the device",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"wrong_device": "The provided device does not match the configured device"
"no_serial_number": "No serial number found on the device"
},
"error": {
"api_not_set": "API version not found in device response. Device firmware may not be compatible with this integration.",
@@ -22,14 +20,6 @@
"description": "A PoolDose device was found on your network at {ip} with MAC address {mac}.\n\nDo you want to add {name} to Home Assistant?",
"title": "Confirm DHCP discovered PoolDose device"
},
"reconfigure": {
"data": {
"host": "[%key:common::config_flow::data::host%]"
},
"data_description": {
"host": "[%key:component::pooldose::config::step::user::data_description::host%]"
}
},
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]"

View File

@@ -164,11 +164,7 @@ class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
@property
def available(self) -> bool:
"""Return if the device is available."""
return (
super().available
and self.endpoint_id in self.coordinator.data
and self.device_name in self.coordinator.data[self.endpoint_id].containers
)
return super().available and self.endpoint_id in self.coordinator.data
@property
def is_on(self) -> bool | None:

View File

@@ -113,9 +113,7 @@ class PortainerButton(PortainerContainerEntity, ButtonEntity):
"""Trigger the Portainer button press service."""
try:
await self.entity_description.press_action(
self.coordinator.portainer,
self.endpoint_id,
self.container_data.container.id,
self.coordinator.portainer, self.endpoint_id, self.device_id
)
except PortainerConnectionError as err:
raise HomeAssistantError(

View File

@@ -50,7 +50,7 @@ class PortainerContainerData:
"""Container data held by the Portainer coordinator."""
container: DockerContainer
stats: DockerContainerStats | None
stats: DockerContainerStats
stats_pre: DockerContainerStats | None
@@ -147,52 +147,47 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
docker_version = await self.portainer.docker_version(endpoint.id)
docker_info = await self.portainer.docker_info(endpoint.id)
prev_endpoint = self.data.get(endpoint.id) if self.data else None
container_map: dict[str, PortainerContainerData] = {}
# Map containers, started and stopped
for container in containers:
container_name = self._get_container_name(container.names[0])
prev_container = (
prev_endpoint.containers[container_name]
if prev_endpoint
else None
container_stats_task = [
(
container,
self.portainer.container_stats(
endpoint_id=endpoint.id,
container_id=container.id,
),
)
container_map[container_name] = PortainerContainerData(
container=container,
stats=None,
stats_pre=prev_container.stats if prev_container else None,
)
# Separately fetch stats for running containers
running_containers = [
container
for container in containers
if container.state == CONTAINER_STATE_RUNNING
]
if running_containers:
container_stats = dict(
zip(
(
self._get_container_name(container.names[0])
for container in running_containers
),
await asyncio.gather(
*(
self.portainer.container_stats(
endpoint_id=endpoint.id,
container_id=container.id,
)
for container in running_containers
)
),
strict=False,
)
)
# Now assign stats to the containers
for container_name, stats in container_stats.items():
container_map[container_name].stats = stats
container_stats_gather = await asyncio.gather(
*[task for _, task in container_stats_task]
)
for (container, _), container_stats in zip(
container_stats_task, container_stats_gather, strict=False
):
container_name = container.names[0].replace("/", " ").strip()
# Store previous stats if available. This is used to calculate deltas for CPU and network usage
# In the first call it will be None, since it has nothing to compare with
# Added a walrus pattern to check if not None on prev_container, to keep mypy happy. :)
container_map[container_name] = PortainerContainerData(
container=container,
stats=container_stats,
stats_pre=(
prev_container.stats
if self.data
and (prev_data := self.data.get(endpoint.id)) is not None
and (
prev_container := prev_data.containers.get(
container_name
)
)
is not None
else None
),
)
except PortainerConnectionError as err:
_LOGGER.exception("Connection error")
raise UpdateFailed(
@@ -233,15 +228,11 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
# Surprise, we also handle containers here :)
current_containers = {
(endpoint.id, container_name)
(endpoint.id, container.container.id)
for endpoint in mapped_endpoints.values()
for container_name in endpoint.containers
for container in endpoint.containers.values()
}
new_containers = current_containers - self.known_containers
if new_containers:
_LOGGER.debug("New containers found: %s", new_containers)
self.known_containers.update(new_containers)
def _get_container_name(self, container_name: str) -> str:
"""Sanitize to get a proper container name."""
return container_name.replace("/", " ").strip()

View File

@@ -7,9 +7,6 @@
"architecture": {
"default": "mdi:cpu-64-bit"
},
"container_state": {
"default": "mdi:state-machine"
},
"containers_count": {
"default": "mdi:database"
},

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["pyportainer==1.0.22"]
"requirements": ["pyportainer==1.0.19"]
}

View File

@@ -49,19 +49,10 @@ CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
translation_key="image",
value_fn=lambda data: data.container.image,
),
PortainerContainerSensorEntityDescription(
key="container_state",
translation_key="container_state",
value_fn=lambda data: data.container.state,
device_class=SensorDeviceClass.ENUM,
options=["running", "exited", "paused", "restarting", "created", "dead"],
),
PortainerContainerSensorEntityDescription(
key="memory_limit",
translation_key="memory_limit",
value_fn=lambda data: (
data.stats.memory_stats.limit if data.stats is not None else 0
),
value_fn=lambda data: data.stats.memory_stats.limit,
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.MEGABYTES,
@@ -72,9 +63,7 @@ CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
PortainerContainerSensorEntityDescription(
key="memory_usage",
translation_key="memory_usage",
value_fn=lambda data: (
data.stats.memory_stats.usage if data.stats is not None else 0
),
value_fn=lambda data: data.stats.memory_stats.usage,
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.MEGABYTES,
@@ -87,9 +76,7 @@ CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
translation_key="memory_usage_percentage",
value_fn=lambda data: (
(data.stats.memory_stats.usage / data.stats.memory_stats.limit) * 100.0
if data.stats is not None
and data.stats.memory_stats.limit > 0
and data.stats.memory_stats.usage > 0
if data.stats.memory_stats.limit > 0 and data.stats.memory_stats.usage > 0
else 0.0
),
native_unit_of_measurement=PERCENTAGE,
@@ -102,8 +89,7 @@ CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
translation_key="cpu_usage_total",
value_fn=lambda data: (
(total_delta / system_delta) * data.stats.cpu_stats.online_cpus * 100.0
if data.stats is not None
and (prev := data.stats_pre) is not None
if (prev := data.stats_pre) is not None
and (
system_delta := (
data.stats.cpu_stats.system_cpu_usage
@@ -261,6 +247,7 @@ async def async_setup_entry(
)
for (endpoint, container) in containers
for entity_description in CONTAINER_SENSORS
if entity_description.value_fn(container) is not None
)
coordinator.new_endpoints_callbacks.append(_async_add_new_endpoints)
@@ -303,11 +290,7 @@ class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
@property
def available(self) -> bool:
"""Return if the device is available."""
return (
super().available
and self.endpoint_id in self.coordinator.data
and self.device_name in self.coordinator.data[self.endpoint_id].containers
)
return super().available and self.endpoint_id in self.coordinator.data
@property
def native_value(self) -> StateType:

View File

@@ -68,17 +68,6 @@
"architecture": {
"name": "Architecture"
},
"container_state": {
"name": "State",
"state": {
"created": "Created",
"dead": "Dead",
"exited": "Exited",
"paused": "Paused",
"restarting": "Restarting",
"running": "Running"
}
},
"containers_count": {
"name": "Container count"
},

View File

@@ -137,19 +137,13 @@ class PortainerContainerSwitch(PortainerContainerEntity, SwitchEntity):
async def async_turn_on(self, **kwargs: Any) -> None:
"""Start (turn on) the container."""
await self.entity_description.turn_on_fn(
"start",
self.coordinator.portainer,
self.endpoint_id,
self.container_data.container.id,
"start", self.coordinator.portainer, self.endpoint_id, self.device_id
)
await self.coordinator.async_request_refresh()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Stop (turn off) the container."""
await self.entity_description.turn_off_fn(
"stop",
self.coordinator.portainer,
self.endpoint_id,
self.container_data.container.id,
"stop", self.coordinator.portainer, self.endpoint_id, self.device_id
)
await self.coordinator.async_request_refresh()

View File

@@ -128,8 +128,9 @@ class RingCam(RingEntity[RingDoorBell], Camera):
self._device = self._get_coordinator_data().get_video_device(
self._device.device_api_id
)
history_data = self._device.last_history
if history_data:
if history_data and self._device.has_subscription:
self._last_event = history_data[0]
# will call async_update to update the attributes and get the
# video url from the api
@@ -154,8 +155,16 @@ class RingCam(RingEntity[RingDoorBell], Camera):
self, width: int | None = None, height: int | None = None
) -> bytes | None:
"""Return a still image response from the camera."""
if self._video_url is None:
if not self._device.has_subscription:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="no_subscription",
)
return None
key = (width, height)
if not (image := self._images.get(key)) and self._video_url is not None:
if not (image := self._images.get(key)):
image = await ffmpeg.async_get_image(
self.hass,
self._video_url,

View File

@@ -151,6 +151,9 @@
"api_timeout": {
"message": "Timeout communicating with Ring API"
},
"no_subscription": {
"message": "Ring Protect subscription required for snapshots"
},
"sdp_m_line_index_required": {
"message": "Error negotiating stream for {device}"
}

View File

@@ -552,7 +552,6 @@ class RoborockB01Q7UpdateCoordinator(RoborockDataUpdateCoordinatorB01):
RoborockB01Props.CLEANING_TIME,
RoborockB01Props.REAL_CLEAN_TIME,
RoborockB01Props.HYPA,
RoborockB01Props.WIND,
]
async def _async_update_data(

View File

@@ -20,7 +20,7 @@
"loggers": ["roborock"],
"quality_scale": "silver",
"requirements": [
"python-roborock==4.1.0",
"python-roborock==4.2.1",
"vacuum-map-parser-roborock==0.1.4"
]
}

View File

@@ -391,15 +391,6 @@ Q7_B01_SENSOR_DESCRIPTIONS = [
translation_key="mop_life_time_left",
entity_category=EntityCategory.DIAGNOSTIC,
),
RoborockSensorDescriptionB01(
key="total_cleaning_time",
value_fn=lambda data: data.real_clean_time,
device_class=SensorDeviceClass.DURATION,
native_unit_of_measurement=UnitOfTime.MINUTES,
suggested_unit_of_measurement=UnitOfTime.HOURS,
translation_key="total_cleaning_time",
entity_category=EntityCategory.DIAGNOSTIC,
),
]

View File

@@ -3,7 +3,7 @@
import logging
from typing import Any
from roborock.data import RoborockStateCode, SCWindMapping, WorkStatusMapping
from roborock.data import RoborockStateCode
from roborock.exceptions import RoborockException
from roborock.roborock_typing import RoborockCommand
import voluptuous as vol
@@ -24,12 +24,8 @@ from .const import (
GET_VACUUM_CURRENT_POSITION_SERVICE_NAME,
SET_VACUUM_GOTO_POSITION_SERVICE_NAME,
)
from .coordinator import (
RoborockB01Q7UpdateCoordinator,
RoborockConfigEntry,
RoborockDataUpdateCoordinator,
)
from .entity import RoborockCoordinatedEntityB01, RoborockCoordinatedEntityV1
from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator
from .entity import RoborockCoordinatedEntityV1
_LOGGER = logging.getLogger(__name__)
@@ -61,20 +57,6 @@ STATE_CODE_TO_STATE = {
RoborockStateCode.device_offline: VacuumActivity.ERROR, # "Device offline"
}
Q7_STATE_CODE_TO_STATE = {
WorkStatusMapping.SLEEPING: VacuumActivity.IDLE,
WorkStatusMapping.WAITING_FOR_ORDERS: VacuumActivity.IDLE,
WorkStatusMapping.PAUSED: VacuumActivity.PAUSED,
WorkStatusMapping.DOCKING: VacuumActivity.RETURNING,
WorkStatusMapping.CHARGING: VacuumActivity.DOCKED,
WorkStatusMapping.SWEEP_MOPING: VacuumActivity.CLEANING,
WorkStatusMapping.SWEEP_MOPING_2: VacuumActivity.CLEANING,
WorkStatusMapping.MOPING: VacuumActivity.CLEANING,
WorkStatusMapping.UPDATING: VacuumActivity.DOCKED,
WorkStatusMapping.MOP_CLEANING: VacuumActivity.DOCKED,
WorkStatusMapping.MOP_AIRDRYING: VacuumActivity.DOCKED,
}
PARALLEL_UPDATES = 0
@@ -87,11 +69,6 @@ async def async_setup_entry(
async_add_entities(
RoborockVacuum(coordinator) for coordinator in config_entry.runtime_data.v1
)
async_add_entities(
RoborockQ7Vacuum(coordinator)
for coordinator in config_entry.runtime_data.b01
if isinstance(coordinator, RoborockB01Q7UpdateCoordinator)
)
platform = entity_platform.async_get_current_platform()
platform.async_register_entity_service(
@@ -264,149 +241,3 @@ class RoborockVacuum(RoborockCoordinatedEntityV1, StateVacuumEntity):
"x": robot_position.x,
"y": robot_position.y,
}
class RoborockQ7Vacuum(RoborockCoordinatedEntityB01, StateVacuumEntity):
"""General Representation of a Roborock vacuum."""
_attr_icon = "mdi:robot-vacuum"
_attr_supported_features = (
VacuumEntityFeature.PAUSE
| VacuumEntityFeature.STOP
| VacuumEntityFeature.RETURN_HOME
| VacuumEntityFeature.FAN_SPEED
| VacuumEntityFeature.SEND_COMMAND
| VacuumEntityFeature.LOCATE
| VacuumEntityFeature.STATE
| VacuumEntityFeature.START
)
_attr_translation_key = DOMAIN
_attr_name = None
coordinator: RoborockB01Q7UpdateCoordinator
def __init__(
self,
coordinator: RoborockB01Q7UpdateCoordinator,
) -> None:
"""Initialize a vacuum."""
StateVacuumEntity.__init__(self)
RoborockCoordinatedEntityB01.__init__(
self,
coordinator.duid_slug,
coordinator,
)
@property
def fan_speed_list(self) -> list[str]:
"""Get the list of available fan speeds."""
return SCWindMapping.keys()
@property
def activity(self) -> VacuumActivity | None:
"""Return the status of the vacuum cleaner."""
if self.coordinator.data.status is not None:
return Q7_STATE_CODE_TO_STATE.get(self.coordinator.data.status)
return None
@property
def fan_speed(self) -> str | None:
"""Return the fan speed of the vacuum cleaner."""
return self.coordinator.data.wind_name
async def async_start(self) -> None:
"""Start the vacuum."""
try:
await self.coordinator.api.start_clean()
except RoborockException as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="command_failed",
translation_placeholders={
"command": "start_clean",
},
) from err
async def async_pause(self) -> None:
"""Pause the vacuum."""
try:
await self.coordinator.api.pause_clean()
except RoborockException as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="command_failed",
translation_placeholders={
"command": "pause_clean",
},
) from err
async def async_stop(self, **kwargs: Any) -> None:
"""Stop the vacuum."""
try:
await self.coordinator.api.stop_clean()
except RoborockException as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="command_failed",
translation_placeholders={
"command": "stop_clean",
},
) from err
async def async_return_to_base(self, **kwargs: Any) -> None:
"""Send vacuum back to base."""
try:
await self.coordinator.api.return_to_dock()
except RoborockException as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="command_failed",
translation_placeholders={
"command": "return_to_dock",
},
) from err
async def async_locate(self, **kwargs: Any) -> None:
"""Locate vacuum."""
try:
await self.coordinator.api.find_me()
except RoborockException as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="command_failed",
translation_placeholders={
"command": "find_me",
},
) from err
async def async_set_fan_speed(self, fan_speed: str, **kwargs: Any) -> None:
"""Set vacuum fan speed."""
try:
await self.coordinator.api.set_fan_speed(
SCWindMapping.from_value(fan_speed)
)
except RoborockException as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="command_failed",
translation_placeholders={
"command": "set_fan_speed",
},
) from err
async def async_send_command(
self,
command: str,
params: dict[str, Any] | list[Any] | None = None,
**kwargs: Any,
) -> None:
"""Send a command to a vacuum cleaner."""
try:
await self.coordinator.api.send(command, params)
except RoborockException as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="command_failed",
translation_placeholders={
"command": command,
},
) from err

View File

@@ -442,7 +442,7 @@ class BlockSleepingClimate(
self._attr_device_info = get_entity_block_device_info(coordinator, sensor_block)
self._attr_name = None # Main device entity
self._channel = int(self._unique_id.split("_")[1])
self._channel = cast(int, self._unique_id.split("_")[1])
@property
def extra_restore_state_data(self) -> ShellyClimateExtraStoredData:
@@ -543,9 +543,8 @@ class BlockSleepingClimate(
"""Set block state (HTTP request)."""
LOGGER.debug("Setting state for entity %s, state: %s", self.name, kwargs)
try:
return await self.coordinator.device.set_thermostat_state(
self._channel,
**kwargs,
return await self.coordinator.device.http_request(
"get", f"thermostat/{self._channel}", kwargs
)
except DeviceConnectionError as err:
self.coordinator.last_update_success = False
@@ -578,7 +577,7 @@ class BlockSleepingClimate(
UnitOfTemperature.FAHRENHEIT,
)
await self.set_state_full_path(target_t_enabled=1, target_t=target_temp)
await self.set_state_full_path(target_t_enabled=1, target_t=f"{target_temp}")
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set hvac mode."""
@@ -586,7 +585,7 @@ class BlockSleepingClimate(
if isinstance(self.target_temperature, float):
self._last_target_temp = self.target_temperature
await self.set_state_full_path(
target_t_enabled=1, target_t=self._attr_min_temp
target_t_enabled=1, target_t=f"{self._attr_min_temp}"
)
if hvac_mode == HVACMode.HEAT:
await self.set_state_full_path(
@@ -600,7 +599,9 @@ class BlockSleepingClimate(
if preset_index == 0:
await self.set_state_full_path(schedule=0)
else:
await self.set_state_full_path(schedule=1, schedule_profile=preset_index)
await self.set_state_full_path(
schedule=1, schedule_profile=f"{preset_index}"
)
async def async_added_to_hass(self) -> None:
"""Handle entity which will be added."""

View File

@@ -46,7 +46,7 @@ SENSOR_TYPES = [
key="lifetime_energy",
json_key="lifeTimeData",
translation_key="lifetime_energy",
state_class=SensorStateClass.TOTAL,
state_class=SensorStateClass.TOTAL_INCREASING,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
),
@@ -55,6 +55,7 @@ SENSOR_TYPES = [
json_key="lastYearData",
translation_key="energy_this_year",
entity_registry_enabled_default=False,
state_class=SensorStateClass.TOTAL_INCREASING,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
),
@@ -63,6 +64,7 @@ SENSOR_TYPES = [
json_key="lastMonthData",
translation_key="energy_this_month",
entity_registry_enabled_default=False,
state_class=SensorStateClass.TOTAL_INCREASING,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
),
@@ -71,6 +73,7 @@ SENSOR_TYPES = [
json_key="lastDayData",
translation_key="energy_today",
entity_registry_enabled_default=False,
state_class=SensorStateClass.TOTAL_INCREASING,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
),
@@ -123,24 +126,32 @@ SENSOR_TYPES = [
json_key="LOAD",
translation_key="power_consumption",
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.POWER,
),
SolarEdgeSensorEntityDescription(
key="solar_power",
json_key="PV",
translation_key="solar_power",
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.POWER,
),
SolarEdgeSensorEntityDescription(
key="grid_power",
json_key="GRID",
translation_key="grid_power",
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.POWER,
),
SolarEdgeSensorEntityDescription(
key="storage_power",
json_key="STORAGE",
translation_key="storage_power",
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.POWER,
),
SolarEdgeSensorEntityDescription(
key="purchased_energy",
@@ -194,6 +205,7 @@ SENSOR_TYPES = [
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.BATTERY,
),
]

View File

@@ -7,5 +7,5 @@
"iot_class": "local_polling",
"loggers": ["solarlog_cli"],
"quality_scale": "platinum",
"requirements": ["solarlog_cli==0.6.1"]
"requirements": ["solarlog_cli==0.7.0"]
}

View File

@@ -37,7 +37,6 @@ SONOS_RADIO = "radio"
SONOS_SHARE = "share"
SONOS_OTHER_ITEM = "other items"
SONOS_AUDIO_BOOK = "audio book"
SONOS_PODCAST = "podcast"
MEDIA_TYPE_DIRECTORY = MediaClass.DIRECTORY
@@ -67,7 +66,6 @@ SONOS_TO_MEDIA_CLASSES = {
SONOS_COMPOSER: MediaClass.COMPOSER,
SONOS_GENRE: MediaClass.GENRE,
SONOS_PLAYLISTS: MediaClass.PLAYLIST,
SONOS_PODCAST: MediaClass.PODCAST,
SONOS_TRACKS: MediaClass.TRACK,
SONOS_SHARE: MediaClass.DIRECTORY,
"object.container": MediaClass.DIRECTORY,
@@ -77,7 +75,6 @@ SONOS_TO_MEDIA_CLASSES = {
"object.container.person.musicArtist": MediaClass.ARTIST,
"object.container.playlistContainer.sameArtist": MediaClass.ARTIST,
"object.container.playlistContainer": MediaClass.PLAYLIST,
"object.container.podcast": MediaClass.PODCAST,
"object.item": MediaClass.TRACK,
"object.item.audioItem.musicTrack": MediaClass.TRACK,
"object.item.audioItem.audioBroadcast": MediaClass.GENRE,
@@ -91,7 +88,6 @@ SONOS_TO_MEDIA_TYPES = {
SONOS_COMPOSER: MediaType.COMPOSER,
SONOS_GENRE: MediaType.GENRE,
SONOS_PLAYLISTS: MediaType.PLAYLIST,
SONOS_PODCAST: MediaType.PODCAST,
SONOS_TRACKS: MediaType.TRACK,
"object.container": MEDIA_TYPE_DIRECTORY,
"object.container.album.musicAlbum": MediaType.ALBUM,
@@ -100,7 +96,6 @@ SONOS_TO_MEDIA_TYPES = {
"object.container.person.musicArtist": MediaType.ARTIST,
"object.container.playlistContainer.sameArtist": MediaType.ARTIST,
"object.container.playlistContainer": MediaType.PLAYLIST,
"object.container.podcast": MediaType.PODCAST,
"object.item.audioItem.musicTrack": MediaType.TRACK,
"object.item.audioItem.audioBook": MediaType.TRACK,
}
@@ -130,7 +125,6 @@ SONOS_TYPES_MAPPING = {
"object.container.person.musicArtist": SONOS_ALBUM_ARTIST,
"object.container.playlistContainer.sameArtist": SONOS_ARTIST,
"object.container.playlistContainer": SONOS_PLAYLISTS,
"object.container.podcast": SONOS_PODCAST,
"object.item": SONOS_OTHER_ITEM,
"object.item.audioItem.musicTrack": SONOS_TRACKS,
"object.item.audioItem.audioBroadcast": SONOS_RADIO,
@@ -155,7 +149,6 @@ PLAYABLE_MEDIA_TYPES = [
MediaType.CONTRIBUTING_ARTIST,
MediaType.GENRE,
MediaType.PLAYLIST,
MediaType.PODCAST,
MediaType.TRACK,
]
@@ -212,4 +205,3 @@ BATTERY_SCAN_INTERVAL = datetime.timedelta(minutes=15)
SCAN_INTERVAL = datetime.timedelta(seconds=10)
DISCOVERY_INTERVAL = datetime.timedelta(seconds=60)
SUBSCRIPTION_TIMEOUT = 1200
LONG_SERVICE_TIMEOUT = 30.0

View File

@@ -12,7 +12,7 @@
"quality_scale": "bronze",
"requirements": [
"defusedxml==0.7.1",
"soco==0.30.14",
"soco==0.30.13",
"sonos-websocket==0.1.3"
],
"ssdp": [

View File

@@ -54,7 +54,6 @@ from . import media_browser
from .const import (
ATTR_QUEUE_POSITION,
DOMAIN,
LONG_SERVICE_TIMEOUT,
MEDIA_TYPE_DIRECTORY,
MEDIA_TYPES_TO_SONOS,
MODELS_LINEIN_AND_TV,
@@ -77,6 +76,7 @@ if TYPE_CHECKING:
_LOGGER = logging.getLogger(__name__)
LONG_SERVICE_TIMEOUT = 30.0
UNJOIN_SERVICE_TIMEOUT = 0.1
VOLUME_INCREMENT = 2

View File

@@ -40,7 +40,6 @@ from .const import (
AVAILABILITY_TIMEOUT,
BATTERY_SCAN_INTERVAL,
DOMAIN,
LONG_SERVICE_TIMEOUT,
SCAN_INTERVAL,
SONOS_CHECK_ACTIVITY,
SONOS_CREATE_ALARM,
@@ -1068,7 +1067,7 @@ class SonosSpeaker:
"""Unjoin the player from a group."""
if self.sonos_group == [self]:
return
self.soco.unjoin(timeout=LONG_SERVICE_TIMEOUT)
self.soco.unjoin()
@staticmethod
async def unjoin_multi(

View File

@@ -7,14 +7,9 @@ from typing import Any
from srpenergy.client import SrpEnergyClient
import voluptuous as vol
from homeassistant.config_entries import (
SOURCE_RECONFIGURE,
SOURCE_USER,
ConfigFlow,
ConfigFlowResult,
)
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_ID, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from .const import CONF_IS_TOU, DOMAIN, LOGGER
@@ -45,71 +40,52 @@ class SRPEnergyConfigFlow(ConfigFlow, domain=DOMAIN):
VERSION = 1
@callback
def _show_form(self, errors: dict[str, Any]) -> ConfigFlowResult:
"""Show the form to the user."""
LOGGER.debug("Show Form")
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(
CONF_NAME, default=self.hass.config.location_name
): str,
vol.Required(CONF_ID): str,
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
vol.Optional(CONF_IS_TOU, default=False): bool,
}
),
errors=errors,
)
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle a flow initialized by the user."""
LOGGER.debug("Config entry")
errors: dict[str, str] = {}
if user_input:
try:
await validate_input(self.hass, user_input)
except ValueError:
# Thrown when the account id is malformed
errors["base"] = "invalid_account"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception: # noqa: BLE001
LOGGER.exception("Unexpected exception")
return self.async_abort(reason="unknown")
else:
await self.async_set_unique_id(user_input[CONF_ID])
if self.source == SOURCE_USER:
self._abort_if_unique_id_configured()
if self.source == SOURCE_RECONFIGURE:
self._abort_if_unique_id_mismatch()
if not user_input:
return self._show_form(errors)
if self.source == SOURCE_USER:
return self.async_create_entry(
title=user_input[CONF_NAME],
data=user_input,
)
return self.async_update_reload_and_abort(
self._get_reconfigure_entry(),
data=user_input,
)
return self.async_show_form(
step_id="user",
data_schema=self.add_suggested_values_to_schema(
data_schema=vol.Schema(
{
vol.Required(CONF_ID): (
str
if self.source == SOURCE_USER
else self._get_reconfigure_entry().data[CONF_ID]
),
vol.Required(
CONF_NAME, default=self.hass.config.location_name
): str,
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
vol.Optional(CONF_IS_TOU, default=False): bool,
}
),
suggested_values=(
user_input or self._get_reconfigure_entry().data
if self.source == SOURCE_RECONFIGURE
else None
),
),
errors=errors,
)
try:
await validate_input(self.hass, user_input)
except ValueError:
# Thrown when the account id is malformed
errors["base"] = "invalid_account"
return self._show_form(errors)
except InvalidAuth:
errors["base"] = "invalid_auth"
return self._show_form(errors)
except Exception: # noqa: BLE001
LOGGER.exception("Unexpected exception")
return self.async_abort(reason="unknown")
async def async_step_reconfigure(
self, user_input: dict[str, Any]
) -> ConfigFlowResult:
"""Handle reconfiguration."""
return await self.async_step_user()
await self.async_set_unique_id(user_input[CONF_ID])
self._abort_if_unique_id_configured()
return self.async_create_entry(title=user_input[CONF_NAME], data=user_input)
class InvalidAuth(HomeAssistantError):

View File

@@ -2,7 +2,6 @@
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"error": {

Some files were not shown because too many files have changed in this diff Show More