Compare commits

...

89 Commits

Author SHA1 Message Date
farmio
336ca369e4 clean up name config for yaml entities 2026-01-04 21:16:52 +01:00
J. Nick Koston
d2deef968a Ensure Brotli >= 1.2.0 (#160229) 2026-01-04 08:08:42 -10:00
Mick Vleeshouwer
6cae1821fb Fix execution history matching to ignore subsystem suffix in diagnostics in Overkiz (#160218) 2026-01-04 11:38:30 +01:00
Jan-Philipp Benecke
8d8046d233 Bump aiowebdav2 to 0.5.0 (#160233) 2026-01-04 11:37:41 +01:00
Samuel Xiao
d7a9a980d0 Switchbot Cloud: Fixed Robot Vacuum Cleaner S20 had two device_model name (#160230) 2026-01-04 11:36:24 +01:00
J. Nick Koston
ff8ad0c9ba Bump aioesphomeapi to 43.10.1 (#160227) 2026-01-03 17:40:15 -10:00
J. Nick Koston
27728cdca8 Bump aiohttp 3.13.3 (#160206) 2026-01-03 16:46:27 -10:00
Erwin Douna
f1eaf78923 Portainer polish ephemeral container ID (#160186) 2026-01-03 21:32:07 +01:00
Willem-Jan van Rootselaar
667b1db594 Bump python-bsblan dependency to version 3.1.6 (#160202) 2026-01-03 21:30:26 +01:00
Josef Zweck
d6cad546e1 Remove referral link from fish_audio (#160193) 2026-01-03 17:12:53 +01:00
Tom
4c8ffa2158 Bump airOS to v0.6.1 adding LiteAP AC support (#160194) 2026-01-03 13:52:08 +01:00
Lukas
933fae9ade Pooldose document exempts (#160166) 2026-01-03 08:59:07 +01:00
Erwin Douna
b6dd9db76e Portainer add state sensor (#160156) 2026-01-03 08:51:58 +01:00
Manu
11487d6856 Set integration type service in Duck DNS (#160172) 2026-01-03 08:51:18 +01:00
Manu
920e938d84 Add discovery for default hostnames to PlayStation Network (#160173) 2026-01-03 08:44:17 +01:00
Kevin Stillhammer
afc256622a raise proper service exceptions in fressnapf_tracker (#159707)
Co-authored-by: Copilot <198982749+Copilot@users.noreply.github.com>
2026-01-02 19:53:16 +01:00
Erwin Douna
bfef048a7c Bump pyportainer 1.0.22 (#160140) 2026-01-02 18:37:14 +01:00
Maikel Punie
bfc8111728 Bump velbus to silver integration scale (#160147) 2026-01-02 18:36:04 +01:00
Maikel Punie
ebd6ae7e80 Velbus mark entities unavailable when connection is terminated (#160143) 2026-01-02 17:43:33 +01:00
MarkGodwin
dd98a85300 Refactor TP-Link Omada config flow tests (#159950)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-01-02 17:41:04 +01:00
Brett Adams
6568a19ce6 Handle export options when enrolled to VPP in Teslemetry (#157665) 2026-01-02 16:52:03 +01:00
wollew
83c1e8d5b5 bump pyvlx version to 0.2.27 (#160139) 2026-01-02 16:49:09 +01:00
Simone Chemelli
c5a06657a3 Remove low level call for Shelly climate (#160065) 2026-01-02 16:47:39 +01:00
Maciej Bieniek
25e54990d2 Bump nextdns to version 5.0.0 (#160138) 2026-01-02 16:33:27 +01:00
Nikoheld
3b2a7ba561 bump nibe to 2.21.0 (#160135) 2026-01-02 16:06:43 +01:00
Åke Strandberg
8f8f896675 Add filling level sensors to miele (#157858) 2026-01-02 15:57:15 +01:00
Willem-Jan van Rootselaar
9539a612a6 Add time synchronization feature to BSB-Lan integration (#156600) 2026-01-02 15:54:37 +01:00
Erwin Douna
d6751eb63f Bump pyportainer 1.0.21 (#160130) 2026-01-02 15:06:46 +01:00
Pete Sage
b462038126 Use long service timeout for Sonos Unjoin (#160110) 2026-01-02 14:18:56 +01:00
cdnninja
ce06446376 Add pm1 and pm10 to vesync (#160072)
Co-authored-by: Josef Zweck <josef@zweck.dev>
2026-01-02 14:15:52 +01:00
Erik Montnemery
8de22e0134 Await writes in shopping_list action handlers (#157420) 2026-01-02 13:41:41 +01:00
mettolen
fbd08d4e42 Bump pyairobotrest to 0.2.0 (#160125) 2026-01-02 12:29:29 +01:00
Zoltán Farkasdi
32e0be4535 netatmo: test_camera webhook testing parametrize and light split (#159772) 2026-01-02 11:00:17 +01:00
Maikel Punie
0423639833 Bump velbusaio to 2026.1.1 (#160116) 2026-01-02 09:16:27 +01:00
Jan Bouwhuis
1244d8aa33 Fix reolink brightness scaling (#160106) 2026-01-01 21:56:35 +01:00
Pete Sage
38c37ab33c Improve Sonos wait to unjoin timeout (#160011) 2026-01-01 20:21:25 +01:00
Willem-Jan van Rootselaar
1636eab2e8 Add schema validation for set_hot_water_schedule service (#159990) 2026-01-01 20:16:54 +01:00
Miguel Camba
737a5811a9 Update voluptuous and voluptuous-openapi (#160073) 2026-01-01 20:07:06 +01:00
Austin Mroczek
5f2da20319 Bump total_connect_client to 2025.12.2 (#160075) 2026-01-01 20:02:56 +01:00
Michael Hansen
2aed4fb8e9 Bump intents to 2026.1.1 (#160099) 2026-01-01 19:58:37 +01:00
Lukas
2b10dc4545 Add reconfiguration flow to pooldose (#159978)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-01-01 17:20:33 +01:00
Maikel Punie
b5d22a63bb Velbus quality docs updates (#160092) 2026-01-01 17:02:30 +01:00
Maikel Punie
e8e19f47cd Velbus Exception translations (#159627)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-01-01 16:51:39 +01:00
Maikel Punie
97e6643cd7 Bump velbusaio to 2026.1.0 (#160087) 2026-01-01 16:50:28 +01:00
Ben Wolstencroft
ee4bb0eef5 Add support for health_overview API endpoint to Tractive integration (#157960)
Co-authored-by: Maciej Bieniek <bieniu@users.noreply.github.com>
2026-01-01 13:06:24 +01:00
Maikel Punie
f82bb8f0b8 Use brightness scale in velbus light (#160041) 2026-01-01 13:03:52 +01:00
cdnninja
79b368cfc3 add description to string vesync (#160003) 2025-12-31 22:20:50 +01:00
cdnninja
6da4a006f2 Add Auto Off Switch to VeSync (#160070) 2025-12-31 22:17:33 +01:00
Allen Porter
e5f3ccb38d Improve roborock test accuracy/robustness (#160021) 2025-12-31 16:32:53 +01:00
tronikos
560b91b93b Filter out duplicate voices without language code in Google Cloud (#160046) 2025-12-31 16:30:53 +01:00
Pete Sage
edd9f50562 bump soco to 0.30.14 for Sonos (#160050) 2025-12-31 16:25:55 +01:00
Paul Tarjan
a4b2e84b03 Fix Hikvision thread safety issue when calling async_write_ha_state (#160027) 2025-12-31 15:52:41 +01:00
rlippmann
9da07c2058 remove domain and service slots from Service object (#160039) 2025-12-31 13:34:02 +01:00
Simone Chemelli
8de6785182 Bump aioamazondevices to 11.0.2 (#160016)
Co-authored-by: Franck Nijhof <git@frenck.dev>
2025-12-31 12:31:32 +01:00
Anders Melchiorsen
77f6fa8116 Bump eternalegypt to 0.0.18 (#160006) 2025-12-31 10:57:58 +01:00
Anders Melchiorsen
6b6f338e7e Fix netgear_lte unloading (#160008) 2025-12-31 10:53:24 +01:00
David Knowles
aa995fb590 Use WATER device_class for Hydrawise sensors (#160018) 2025-12-31 10:47:48 +01:00
Anders Melchiorsen
f0fee87b9e Move async_setup_services to async_setup for netgear_lte (#160007) 2025-12-31 10:43:59 +01:00
Erwin Douna
56ab3bf59b Bump pyfirefly 0.1.10 (#160028) 2025-12-31 09:04:40 +01:00
Luke Lashley
24e2720924 Don't prefer cache for Roborock device fetching (#160022) 2025-12-30 13:21:54 -08:00
Erwin Douna
bacc2f00af Bump portainer 1.0.19 (#160014) 2025-12-30 21:13:24 +01:00
Manu
6de2d6810b Convert store image URLs to https in Xbox media resolver (#160015) 2025-12-30 21:10:51 +01:00
Allen Porter
de07833d92 Update roborock binary sensor tests with snapshots (#159981) 2025-12-30 19:36:32 +01:00
Matthias Alphart
b4eff231c3 Update knx-frontend to 2025.12.30.151231 (#159999) 2025-12-30 18:49:02 +01:00
Luke Lashley
98fea46eea Add support for vacuum entity for Roborock Q7 (#159966) 2025-12-30 07:26:18 -08:00
divers33
18e8821891 Add podcast favorites support to Sonos media browser (#159961)
Co-authored-by: divers33 <divers33@users.noreply.github.com>
2025-12-30 15:14:53 +01:00
Sab44
cc2377d44d Bump librehardwaremonitor-api to version 1.7.2 (#159987) 2025-12-30 12:18:50 +01:00
doomsniper09
8370c6abfb Accept integer coordinates in has_location helper (#159835)
Co-authored-by: Paulus Schoutsen <balloob@gmail.com>
Co-authored-by: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com>
2025-12-30 12:06:23 +01:00
Panda-NZ
2d1a672de5 Add ambient temperature sensor to ToGrill (#159798) 2025-12-30 09:44:23 +01:00
Ernst Klamer
75ea42a834 bump xiaomi-ble to 1.4.1 (#159954) 2025-12-30 00:12:45 +01:00
Lukas
45491e17cd Pooldose Diagnostics (#159965) 2025-12-29 23:03:13 +01:00
Stefan H.
b994f03391 Migrate traccar_server to use entry.runtime_data (#156065)
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-12-29 22:16:01 +01:00
Kamil Breguła
473cb59013 Add translation of exceptions in met (#155765)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-12-29 22:12:40 +01:00
J. Nick Koston
9302926d99 Bump aioesphomeapi to 43.9.1 (#159960) 2025-12-29 11:09:37 -10:00
Branden Cash
d92516b7c9 Implement reconfigure config flow in SRP energy (#151542)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-12-29 21:52:25 +01:00
Luke Lashley
5b561213d3 Bump Python-Roborock to 4.1.0 (#159963) 2025-12-29 21:52:13 +01:00
Erwin Douna
0a16bd4919 Portainer fix stopped container for stats (#159964) 2025-12-29 21:51:46 +01:00
Michael
f74a6e2625 Record current Feedreader integration quality scale and set to silver (#143179)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-12-29 21:36:23 +01:00
Joost Lekkerkerker
ecc271409a Small cleanup in Feedreader (#159962) 2025-12-29 21:31:25 +01:00
Michael
1f63bc3231 Record current Synology DSM integration quality scale (#141245)
Co-authored-by: Joostlek <joostlek@outlook.com>
2025-12-29 21:24:18 +01:00
Joost Lekkerkerker
78adeb837e Inject session in Switchbot cloud (#159942) 2025-12-29 21:18:34 +01:00
Joost Lekkerkerker
bfacf462bf Add integration_type service to nuheat (#159845) 2025-12-29 21:12:23 +01:00
Joost Lekkerkerker
771d40dbf6 Add integration_type hub to permobil (#159872) 2025-12-29 21:12:05 +01:00
Joost Lekkerkerker
8e441242ad Add integration_type hub to pooldose (#159880) 2025-12-29 21:11:46 +01:00
Joost Lekkerkerker
b8a4237ab1 Add integration_type hub to poolsense (#159881) 2025-12-29 21:11:17 +01:00
Joost Lekkerkerker
e92af1ee76 Add integration_type device to ps4 (#159892) 2025-12-29 21:10:52 +01:00
Matthias Alphart
e561c1cebb Fix KNX translation references (#159959) 2025-12-29 20:50:53 +01:00
Franck Nijhof
d77f82f8e8 Bump version to 2026.2.0dev0 (#159956) 2025-12-29 20:38:24 +01:00
Joost Lekkerkerker
fcc3598d7f Add integration_type device to netgear (#159816) 2025-12-29 21:14:58 +02:00
213 changed files with 12801 additions and 2196 deletions

View File

@@ -40,7 +40,7 @@ env:
CACHE_VERSION: 2
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2026.1"
HA_SHORT_VERSION: "2026.2"
DEFAULT_PYTHON: "3.13.11"
ALL_PYTHON_VERSIONS: "['3.13.11', '3.14.2']"
# 10.3 is the oldest supported version

View File

@@ -13,5 +13,5 @@
"iot_class": "local_polling",
"loggers": ["pyairobotrest"],
"quality_scale": "silver",
"requirements": ["pyairobotrest==0.1.0"]
"requirements": ["pyairobotrest==0.2.0"]
}

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "silver",
"requirements": ["airos==0.6.0"]
"requirements": ["airos==0.6.1"]
}

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "platinum",
"requirements": ["aioamazondevices==10.0.0"]
"requirements": ["aioamazondevices==11.0.2"]
}

View File

@@ -2,6 +2,9 @@
"services": {
"set_hot_water_schedule": {
"service": "mdi:calendar-clock"
},
"sync_time": {
"service": "mdi:timer-sync-outline"
}
}
}

View File

@@ -7,7 +7,7 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["bsblan"],
"requirements": ["python-bsblan==3.1.4"],
"requirements": ["python-bsblan==3.1.6"],
"zeroconf": [
{
"name": "bsb-lan*",

View File

@@ -7,11 +7,13 @@ import logging
from typing import TYPE_CHECKING
from bsblan import BSBLANError, DaySchedule, DHWSchedule, TimeSlot
import voluptuous as vol
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers import config_validation as cv, device_registry as dr
from homeassistant.util import dt as dt_util
from .const import DOMAIN
@@ -29,32 +31,32 @@ ATTR_FRIDAY_SLOTS = "friday_slots"
ATTR_SATURDAY_SLOTS = "saturday_slots"
ATTR_SUNDAY_SLOTS = "sunday_slots"
# Service name
# Service names
SERVICE_SET_HOT_WATER_SCHEDULE = "set_hot_water_schedule"
SERVICE_SYNC_TIME = "sync_time"
def _parse_time_value(value: time | str) -> time:
"""Parse a time value from either a time object or string.
# Schema for a single time slot
_SLOT_SCHEMA = vol.Schema(
{
vol.Required("start_time"): cv.time,
vol.Required("end_time"): cv.time,
}
)
Raises ServiceValidationError if the format is invalid.
"""
if isinstance(value, time):
return value
if isinstance(value, str):
try:
parts = value.split(":")
return time(int(parts[0]), int(parts[1]))
except (ValueError, IndexError):
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="invalid_time_format",
) from None
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="invalid_time_format",
)
SERVICE_SET_HOT_WATER_SCHEDULE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_DEVICE_ID): cv.string,
vol.Optional(ATTR_MONDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
vol.Optional(ATTR_TUESDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
vol.Optional(ATTR_WEDNESDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
vol.Optional(ATTR_THURSDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
vol.Optional(ATTR_FRIDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
vol.Optional(ATTR_SATURDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
vol.Optional(ATTR_SUNDAY_SLOTS): vol.All(cv.ensure_list, [_SLOT_SCHEMA]),
}
)
def _convert_time_slots_to_day_schedule(
@@ -62,8 +64,8 @@ def _convert_time_slots_to_day_schedule(
) -> DaySchedule | None:
"""Convert list of time slot dicts to a DaySchedule object.
Example: [{"start_time": "06:00", "end_time": "08:00"},
{"start_time": "17:00", "end_time": "21:00"}]
Example: [{"start_time": time(6, 0), "end_time": time(8, 0)},
{"start_time": time(17, 0), "end_time": time(21, 0)}]
becomes: DaySchedule with two TimeSlot objects
None returns None (don't modify this day).
@@ -77,31 +79,27 @@ def _convert_time_slots_to_day_schedule(
time_slots = []
for slot in slots:
start = slot.get("start_time")
end = slot.get("end_time")
start_time = slot["start_time"]
end_time = slot["end_time"]
if start and end:
start_time = _parse_time_value(start)
end_time = _parse_time_value(end)
# Validate that end time is after start time
if end_time <= start_time:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="end_time_before_start_time",
translation_placeholders={
"start_time": start_time.strftime("%H:%M"),
"end_time": end_time.strftime("%H:%M"),
},
)
time_slots.append(TimeSlot(start=start_time, end=end_time))
LOGGER.debug(
"Created time slot: %s-%s",
start_time.strftime("%H:%M"),
end_time.strftime("%H:%M"),
# Validate that end time is after start time
if end_time <= start_time:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="end_time_before_start_time",
translation_placeholders={
"start_time": start_time.strftime("%H:%M"),
"end_time": end_time.strftime("%H:%M"),
},
)
time_slots.append(TimeSlot(start=start_time, end=end_time))
LOGGER.debug(
"Created time slot: %s-%s",
start_time.strftime("%H:%M"),
end_time.strftime("%H:%M"),
)
LOGGER.debug("Created DaySchedule with %d slots", len(time_slots))
return DaySchedule(slots=time_slots)
@@ -207,6 +205,74 @@ async def set_hot_water_schedule(service_call: ServiceCall) -> None:
await entry.runtime_data.slow_coordinator.async_request_refresh()
async def async_sync_time(service_call: ServiceCall) -> None:
"""Synchronize BSB-LAN device time with Home Assistant."""
device_id: str = service_call.data[ATTR_DEVICE_ID]
# Get the device and config entry
device_registry = dr.async_get(service_call.hass)
device_entry = device_registry.async_get(device_id)
if device_entry is None:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="invalid_device_id",
translation_placeholders={"device_id": device_id},
)
# Find the config entry for this device
matching_entries: list[BSBLanConfigEntry] = [
entry
for entry in service_call.hass.config_entries.async_entries(DOMAIN)
if entry.entry_id in device_entry.config_entries
]
if not matching_entries:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="no_config_entry_for_device",
translation_placeholders={"device_id": device_entry.name or device_id},
)
entry = matching_entries[0]
# Verify the config entry is loaded
if entry.state is not ConfigEntryState.LOADED:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="config_entry_not_loaded",
translation_placeholders={"device_name": device_entry.name or device_id},
)
client = entry.runtime_data.client
try:
# Get current device time
device_time = await client.time()
current_time = dt_util.now()
current_time_str = current_time.strftime("%d.%m.%Y %H:%M:%S")
# Only sync if device time differs from HA time
if device_time.time.value != current_time_str:
await client.set_time(current_time_str)
except BSBLANError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="sync_time_failed",
translation_placeholders={
"device_name": device_entry.name or device_id,
"error": str(err),
},
) from err
SYNC_TIME_SCHEMA = vol.Schema(
{
vol.Required(ATTR_DEVICE_ID): cv.string,
}
)
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register the BSB-Lan services."""
@@ -214,4 +280,12 @@ def async_setup_services(hass: HomeAssistant) -> None:
DOMAIN,
SERVICE_SET_HOT_WATER_SCHEDULE,
set_hot_water_schedule,
schema=SERVICE_SET_HOT_WATER_SCHEDULE_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SERVICE_SYNC_TIME,
async_sync_time,
schema=SYNC_TIME_SCHEMA,
)

View File

@@ -1,3 +1,12 @@
sync_time:
fields:
device_id:
required: true
example: "abc123device456"
selector:
device:
integration: bsblan
set_hot_water_schedule:
fields:
device_id:

View File

@@ -79,9 +79,6 @@
"invalid_device_id": {
"message": "Invalid device ID: {device_id}"
},
"invalid_time_format": {
"message": "Invalid time format provided"
},
"no_config_entry_for_device": {
"message": "No configuration entry found for device: {device_id}"
},
@@ -108,6 +105,9 @@
},
"setup_general_error": {
"message": "An unknown error occurred while retrieving static device data"
},
"sync_time_failed": {
"message": "Failed to sync time for {device_name}: {error}"
}
},
"services": {
@@ -148,6 +148,16 @@
}
},
"name": "Set hot water schedule"
},
"sync_time": {
"description": "Synchronize Home Assistant time to the BSB-Lan device. Only updates if device time differs from Home Assistant time.",
"fields": {
"device_id": {
"description": "The BSB-LAN device to sync time for.",
"name": "Device"
}
},
"name": "Sync time"
}
}
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["hassil==3.5.0", "home-assistant-intents==2025.12.2"]
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.1"]
}

View File

@@ -4,5 +4,6 @@
"codeowners": ["@tr4nt0r"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/duckdns",
"integration_type": "service",
"iot_class": "cloud_polling"
}

View File

@@ -17,7 +17,7 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==43.9.0",
"aioesphomeapi==43.10.1",
"esphome-dashboard-api==1.3.0",
"bleak-esphome==3.4.0"
],

View File

@@ -9,14 +9,12 @@ from homeassistant.util.hass_dict import HassKey
from .const import DOMAIN
from .coordinator import FeedReaderConfigEntry, FeedReaderCoordinator, StoredData
CONF_URLS = "urls"
MY_KEY: HassKey[StoredData] = HassKey(DOMAIN)
FEEDREADER_KEY: HassKey[StoredData] = HassKey(DOMAIN)
async def async_setup_entry(hass: HomeAssistant, entry: FeedReaderConfigEntry) -> bool:
"""Set up Feedreader from a config entry."""
storage = hass.data.setdefault(MY_KEY, StoredData(hass))
storage = hass.data.setdefault(FEEDREADER_KEY, StoredData(hass))
if not storage.is_initialized:
await storage.async_setup()
@@ -42,5 +40,5 @@ async def async_unload_entry(hass: HomeAssistant, entry: FeedReaderConfigEntry)
)
# if this is the last entry, remove the storage
if len(entries) == 1:
hass.data.pop(MY_KEY)
hass.data.pop(FEEDREADER_KEY)
return await hass.config_entries.async_unload_platforms(entry, [Platform.EVENT])

View File

@@ -19,6 +19,9 @@ from .coordinator import FeedReaderCoordinator
LOGGER = logging.getLogger(__name__)
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
ATTR_CONTENT = "content"
ATTR_DESCRIPTION = "description"
ATTR_LINK = "link"
@@ -42,16 +45,15 @@ class FeedReaderEvent(CoordinatorEntity[FeedReaderCoordinator], EventEntity):
_attr_event_types = [EVENT_FEEDREADER]
_attr_name = None
_attr_has_entity_name = True
_attr_translation_key = "latest_feed"
_unrecorded_attributes = frozenset(
{ATTR_CONTENT, ATTR_DESCRIPTION, ATTR_TITLE, ATTR_LINK}
)
coordinator: FeedReaderCoordinator
def __init__(self, coordinator: FeedReaderCoordinator) -> None:
"""Initialize the feedreader event."""
super().__init__(coordinator)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_latest_feed"
self._attr_translation_key = "latest_feed"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, coordinator.config_entry.entry_id)},
name=coordinator.config_entry.title,

View File

@@ -0,0 +1,94 @@
rules:
# Bronze
action-setup:
status: exempt
comment: No custom actions are defined.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage:
status: todo
comment: missing test for uniqueness of feed URL.
config-flow:
status: todo
comment: missing data descriptions
dependency-transparency: done
docs-actions:
status: exempt
comment: No custom actions are defined.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: No custom actions are defined.
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow:
status: exempt
comment: No authentication support.
test-coverage:
status: done
comment: Can use freezer for skipping time instead
# Gold
devices: done
diagnostics: todo
discovery-update-info:
status: exempt
comment: No discovery support.
discovery:
status: exempt
comment: No discovery support.
docs-data-update: done
docs-examples: done
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: done
dynamic-devices:
status: exempt
comment: Each config entry, represents one service.
entity-category: done
entity-device-class:
status: exempt
comment: Matches no available event entity class.
entity-disabled-by-default:
status: exempt
comment: Only one entity per config entry.
entity-translations: todo
exception-translations: todo
icon-translations: done
reconfiguration-flow: done
repair-issues:
status: done
comment: Only one repair-issue for yaml-import defined.
stale-devices:
status: exempt
comment: Each config entry, represents one service.
# Platinum
async-dependency:
status: todo
comment: feedparser lib is not async.
inject-websession:
status: todo
comment: feedparser lib doesn't take a session as argument.
strict-typing:
status: todo
comment: feedparser lib is not fully typed.

View File

@@ -21,12 +21,6 @@
}
}
},
"issues": {
"import_yaml_error_url_error": {
"description": "Configuring the Feedreader using YAML is being removed but there was a connection error when trying to import the YAML configuration for `{url}`.\n\nPlease verify that the URL is reachable and accessible for Home Assistant and restart Home Assistant to try again or remove the Feedreader YAML configuration from your configuration.yaml file and continue to set up the integration manually.",
"title": "The Feedreader YAML configuration import failed"
}
},
"options": {
"step": {
"init": {

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["pyfirefly==0.1.8"]
"requirements": ["pyfirefly==0.1.10"]
}

View File

@@ -35,6 +35,6 @@ BACKEND_MODELS = ["s1", "speech-1.5", "speech-1.6"]
SORT_BY_OPTIONS = ["task_count", "score", "created_at"]
LATENCY_OPTIONS = ["normal", "balanced"]
SIGNUP_URL = "https://fish.audio/?fpr=homeassistant" # codespell:ignore fpr
SIGNUP_URL = "https://fish.audio/"
BILLING_URL = "https://fish.audio/app/billing/"
API_KEYS_URL = "https://fish.audio/app/api-keys/"

View File

@@ -2,6 +2,8 @@
from typing import TYPE_CHECKING, Any
from fressnapftracker import FressnapfTrackerError
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ColorMode,
@@ -16,6 +18,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import FressnapfTrackerConfigEntry
from .const import DOMAIN
from .entity import FressnapfTrackerEntity
from .services import handle_fressnapf_tracker_exception
PARALLEL_UPDATES = 1
@@ -61,12 +64,18 @@ class FressnapfTrackerLight(FressnapfTrackerEntity, LightEntity):
self.raise_if_not_activatable()
brightness = kwargs.get(ATTR_BRIGHTNESS, 255)
brightness = int((brightness / 255) * 100)
await self.coordinator.client.set_led_brightness(brightness)
try:
await self.coordinator.client.set_led_brightness(brightness)
except FressnapfTrackerError as e:
handle_fressnapf_tracker_exception(e)
await self.coordinator.async_request_refresh()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off the device."""
await self.coordinator.client.set_led_brightness(0)
try:
await self.coordinator.client.set_led_brightness(0)
except FressnapfTrackerError as e:
handle_fressnapf_tracker_exception(e)
await self.coordinator.async_request_refresh()
def raise_if_not_activatable(self) -> None:

View File

@@ -26,7 +26,7 @@ rules:
unique-config-entry: done
# Silver
action-exceptions: todo
action-exceptions: done
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done

View File

@@ -0,0 +1,21 @@
"""Services and service helpers for fressnapf_tracker."""
from fressnapftracker import FressnapfTrackerError, FressnapfTrackerInvalidTokenError
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
from .const import DOMAIN
def handle_fressnapf_tracker_exception(exception: FressnapfTrackerError):
"""Handle the different FressnapfTracker errors."""
if isinstance(exception, FressnapfTrackerInvalidTokenError):
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
) from exception
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="api_error",
translation_placeholders={"error_message": str(exception)},
) from exception

View File

@@ -77,6 +77,9 @@
}
},
"exceptions": {
"api_error": {
"message": "An error occurred while communicating with the Fressnapf Tracker API: {error_message}"
},
"charging": {
"message": "The flashlight cannot be activated while charging."
},

View File

@@ -2,6 +2,8 @@
from typing import TYPE_CHECKING, Any
from fressnapftracker import FressnapfTrackerError
from homeassistant.components.switch import (
SwitchDeviceClass,
SwitchEntity,
@@ -13,6 +15,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import FressnapfTrackerConfigEntry
from .entity import FressnapfTrackerEntity
from .services import handle_fressnapf_tracker_exception
PARALLEL_UPDATES = 1
@@ -43,12 +46,18 @@ class FressnapfTrackerSwitch(FressnapfTrackerEntity, SwitchEntity):
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn on the device."""
await self.coordinator.client.set_energy_saving(True)
try:
await self.coordinator.client.set_energy_saving(True)
except FressnapfTrackerError as e:
handle_fressnapf_tracker_exception(e)
await self.coordinator.async_request_refresh()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off the device."""
await self.coordinator.client.set_energy_saving(False)
try:
await self.coordinator.client.set_energy_saving(False)
except FressnapfTrackerError as e:
handle_fressnapf_tracker_exception(e)
await self.coordinator.async_request_refresh()
@property

View File

@@ -48,6 +48,8 @@ async def async_tts_voices(
list_voices_response = await client.list_voices()
for voice in list_voices_response.voices:
language_code = voice.language_codes[0]
if not voice.name.startswith(language_code):
continue
if language_code not in voices:
voices[language_code] = []
voices[language_code].append(voice.name)

View File

@@ -24,7 +24,7 @@ from homeassistant.const import (
CONF_SSL,
CONF_USERNAME,
)
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import config_validation as cv, issue_registry as ir
from homeassistant.helpers.device_registry import DeviceInfo
@@ -227,7 +227,10 @@ class HikvisionBinarySensor(BinarySensorEntity):
# Register callback with pyhik
self._camera.add_update_callback(self._update_callback, self._callback_id)
@callback
def _update_callback(self, msg: str) -> None:
"""Update the sensor's state when callback is triggered."""
self.async_write_ha_state()
"""Update the sensor's state when callback is triggered.
This is called from pyhik's event stream thread, so we use
schedule_update_ha_state which is thread-safe.
"""
self.schedule_update_ha_state()

View File

@@ -67,21 +67,21 @@ FLOW_CONTROLLER_SENSORS: tuple[HydrawiseSensorEntityDescription, ...] = (
HydrawiseSensorEntityDescription(
key="daily_total_water_use",
translation_key="daily_total_water_use",
device_class=SensorDeviceClass.VOLUME,
device_class=SensorDeviceClass.WATER,
suggested_display_precision=1,
value_fn=lambda sensor: _get_water_use(sensor).total_use,
),
HydrawiseSensorEntityDescription(
key="daily_active_water_use",
translation_key="daily_active_water_use",
device_class=SensorDeviceClass.VOLUME,
device_class=SensorDeviceClass.WATER,
suggested_display_precision=1,
value_fn=lambda sensor: _get_water_use(sensor).total_active_use,
),
HydrawiseSensorEntityDescription(
key="daily_inactive_water_use",
translation_key="daily_inactive_water_use",
device_class=SensorDeviceClass.VOLUME,
device_class=SensorDeviceClass.WATER,
suggested_display_precision=1,
value_fn=lambda sensor: _get_water_use(sensor).total_inactive_use,
),
@@ -91,7 +91,7 @@ FLOW_ZONE_SENSORS: tuple[SensorEntityDescription, ...] = (
HydrawiseSensorEntityDescription(
key="daily_active_water_use",
translation_key="daily_active_water_use",
device_class=SensorDeviceClass.VOLUME,
device_class=SensorDeviceClass.WATER,
suggested_display_precision=1,
value_fn=lambda sensor: float(
_get_water_use(sensor).active_use_by_zone_id.get(sensor.zone.id, 0.0)
@@ -204,7 +204,7 @@ class HydrawiseSensor(HydrawiseEntity, SensorEntity):
@property
def native_unit_of_measurement(self) -> str | None:
"""Return the unit_of_measurement of the sensor."""
if self.entity_description.device_class != SensorDeviceClass.VOLUME:
if self.entity_description.device_class != SensorDeviceClass.WATER:
return self.entity_description.native_unit_of_measurement
return (
UnitOfVolume.GALLONS
@@ -217,7 +217,7 @@ class HydrawiseSensor(HydrawiseEntity, SensorEntity):
"""Icon of the entity based on the value."""
if (
self.entity_description.key in FLOW_MEASUREMENT_KEYS
and self.entity_description.device_class == SensorDeviceClass.VOLUME
and self.entity_description.device_class == SensorDeviceClass.WATER
and round(self.state, 2) == 0.0
):
return "mdi:water-outline"

View File

@@ -114,24 +114,26 @@ class KnxYamlBinarySensor(_KnxBinarySensor, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of KNX binary sensor."""
self._device = XknxBinarySensor(
xknx=knx_module.xknx,
name=config.get(CONF_NAME, ""),
group_address_state=config[CONF_STATE_ADDRESS],
invert=config[CONF_INVERT],
sync_state=config[CONF_SYNC_STATE],
ignore_internal_state=config[CONF_IGNORE_INTERNAL_STATE],
context_timeout=config.get(CONF_CONTEXT_TIMEOUT),
reset_after=config.get(CONF_RESET_AFTER),
always_callback=True,
)
super().__init__(
knx_module=knx_module,
device=XknxBinarySensor(
xknx=knx_module.xknx,
name=config[CONF_NAME],
group_address_state=config[CONF_STATE_ADDRESS],
invert=config[CONF_INVERT],
sync_state=config[CONF_SYNC_STATE],
ignore_internal_state=config[CONF_IGNORE_INTERNAL_STATE],
context_timeout=config.get(CONF_CONTEXT_TIMEOUT),
reset_after=config.get(CONF_RESET_AFTER),
always_callback=True,
),
unique_id=str(self._device.remote_value.group_address_state),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
self._attr_force_update = self._device.ignore_internal_state
self._attr_unique_id = str(self._device.remote_value.group_address_state)
class KnxUiBinarySensor(_KnxBinarySensor, KnxUiEntity):

View File

@@ -35,19 +35,18 @@ class KNXButton(KnxYamlEntity, ButtonEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX button."""
super().__init__(
knx_module=knx_module,
device=XknxRawValue(
xknx=knx_module.xknx,
name=config[CONF_NAME],
payload_length=config[CONF_PAYLOAD_LENGTH],
group_address=config[KNX_ADDRESS],
),
self._device = XknxRawValue(
xknx=knx_module.xknx,
name=config.get(CONF_NAME, ""),
payload_length=config[CONF_PAYLOAD_LENGTH],
group_address=config[KNX_ADDRESS],
)
self._payload = config[CONF_PAYLOAD]
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = (
f"{self._device.remote_value.group_address}_{self._payload}"
super().__init__(
knx_module=knx_module,
unique_id=f"{self._device.remote_value.group_address}_{self._payload}",
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
async def async_press(self) -> None:

View File

@@ -119,11 +119,11 @@ async def async_setup_entry(
async_add_entities(entities)
def _create_climate(xknx: XKNX, config: ConfigType) -> XknxClimate:
def _create_climate_yaml(xknx: XKNX, config: ConfigType) -> XknxClimate:
"""Return a KNX Climate device to be used within XKNX."""
climate_mode = XknxClimateMode(
xknx,
name=f"{config[CONF_NAME]} Mode",
name=f"{config.get(CONF_NAME, '')} Mode",
group_address_operation_mode=config.get(
ClimateSchema.CONF_OPERATION_MODE_ADDRESS
),
@@ -164,7 +164,7 @@ def _create_climate(xknx: XKNX, config: ConfigType) -> XknxClimate:
return XknxClimate(
xknx,
name=config[CONF_NAME],
name=config.get(CONF_NAME, ""),
group_address_temperature=config[ClimateSchema.CONF_TEMPERATURE_ADDRESS],
group_address_target_temperature=config.get(
ClimateSchema.CONF_TARGET_TEMPERATURE_ADDRESS
@@ -647,9 +647,17 @@ class KnxYamlClimate(_KnxClimate, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of a KNX climate device."""
self._device = _create_climate_yaml(knx_module.xknx, config)
super().__init__(
knx_module=knx_module,
device=_create_climate(knx_module.xknx, config),
unique_id=(
f"{self._device.temperature.group_address_state}_"
f"{self._device.target_temperature.group_address_state}_"
f"{self._device.target_temperature.group_address}_"
f"{self._device._setpoint_shift.group_address}" # noqa: SLF001
),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
default_hvac_mode: HVACMode = config[ClimateConf.DEFAULT_CONTROLLER_MODE]
fan_max_step = config[ClimateConf.FAN_MAX_STEP]
@@ -661,14 +669,6 @@ class KnxYamlClimate(_KnxClimate, KnxYamlEntity):
fan_zero_mode=fan_zero_mode,
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = (
f"{self._device.temperature.group_address_state}_"
f"{self._device.target_temperature.group_address_state}_"
f"{self._device.target_temperature.group_address}_"
f"{self._device._setpoint_shift.group_address}" # noqa: SLF001
)
class KnxUiClimate(_KnxClimate, KnxUiEntity):
"""Representation of a KNX climate device configured from the UI."""

View File

@@ -191,36 +191,34 @@ class KnxYamlCover(_KnxCover, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize the cover."""
self._device = XknxCover(
xknx=knx_module.xknx,
name=config.get(CONF_NAME, ""),
group_address_long=config.get(CoverSchema.CONF_MOVE_LONG_ADDRESS),
group_address_short=config.get(CoverSchema.CONF_MOVE_SHORT_ADDRESS),
group_address_stop=config.get(CoverSchema.CONF_STOP_ADDRESS),
group_address_position_state=config.get(
CoverSchema.CONF_POSITION_STATE_ADDRESS
),
group_address_angle=config.get(CoverSchema.CONF_ANGLE_ADDRESS),
group_address_angle_state=config.get(CoverSchema.CONF_ANGLE_STATE_ADDRESS),
group_address_position=config.get(CoverSchema.CONF_POSITION_ADDRESS),
travel_time_down=config[CoverConf.TRAVELLING_TIME_DOWN],
travel_time_up=config[CoverConf.TRAVELLING_TIME_UP],
invert_updown=config[CoverConf.INVERT_UPDOWN],
invert_position=config[CoverConf.INVERT_POSITION],
invert_angle=config[CoverConf.INVERT_ANGLE],
)
super().__init__(
knx_module=knx_module,
device=XknxCover(
xknx=knx_module.xknx,
name=config[CONF_NAME],
group_address_long=config.get(CoverSchema.CONF_MOVE_LONG_ADDRESS),
group_address_short=config.get(CoverSchema.CONF_MOVE_SHORT_ADDRESS),
group_address_stop=config.get(CoverSchema.CONF_STOP_ADDRESS),
group_address_position_state=config.get(
CoverSchema.CONF_POSITION_STATE_ADDRESS
),
group_address_angle=config.get(CoverSchema.CONF_ANGLE_ADDRESS),
group_address_angle_state=config.get(
CoverSchema.CONF_ANGLE_STATE_ADDRESS
),
group_address_position=config.get(CoverSchema.CONF_POSITION_ADDRESS),
travel_time_down=config[CoverConf.TRAVELLING_TIME_DOWN],
travel_time_up=config[CoverConf.TRAVELLING_TIME_UP],
invert_updown=config[CoverConf.INVERT_UPDOWN],
invert_position=config[CoverConf.INVERT_POSITION],
invert_angle=config[CoverConf.INVERT_ANGLE],
unique_id=(
f"{self._device.updown.group_address}_"
f"{self._device.position_target.group_address}"
),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
self.init_base()
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = (
f"{self._device.updown.group_address}_"
f"{self._device.position_target.group_address}"
)
if custom_device_class := config.get(CONF_DEVICE_CLASS):
self._attr_device_class = custom_device_class

View File

@@ -105,20 +105,21 @@ class KnxYamlDate(_KNXDate, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX date."""
self._device = XknxDateDevice(
knx_module.xknx,
name=config.get(CONF_NAME, ""),
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
)
super().__init__(
knx_module=knx_module,
device=XknxDateDevice(
knx_module.xknx,
name=config[CONF_NAME],
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
),
unique_id=str(self._device.remote_value.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
class KnxUiDate(_KNXDate, KnxUiEntity):

View File

@@ -110,20 +110,21 @@ class KnxYamlDateTime(_KNXDateTime, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX datetime."""
self._device = XknxDateTimeDevice(
knx_module.xknx,
name=config.get(CONF_NAME, ""),
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
)
super().__init__(
knx_module=knx_module,
device=XknxDateTimeDevice(
knx_module.xknx,
name=config[CONF_NAME],
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
),
unique_id=str(self._device.remote_value.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
class KnxUiDateTime(_KNXDateTime, KnxUiEntity):

View File

@@ -6,7 +6,7 @@ from typing import TYPE_CHECKING, Any
from xknx.devices import Device as XknxDevice
from homeassistant.const import CONF_ENTITY_CATEGORY, EntityCategory
from homeassistant.const import CONF_ENTITY_CATEGORY, CONF_NAME, EntityCategory
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_platform import EntityPlatform
@@ -52,14 +52,11 @@ class _KnxEntityBase(Entity):
"""Representation of a KNX entity."""
_attr_should_poll = False
_attr_unique_id: str
_knx_module: KNXModule
_device: XknxDevice
@property
def name(self) -> str:
"""Return the name of the KNX device."""
return self._device.name
@property
def available(self) -> bool:
"""Return True if entity is available."""
@@ -100,16 +97,23 @@ class _KnxEntityBase(Entity):
class KnxYamlEntity(_KnxEntityBase):
"""Representation of a KNX entity configured from YAML."""
def __init__(self, knx_module: KNXModule, device: XknxDevice) -> None:
def __init__(
self,
knx_module: KNXModule,
unique_id: str,
name: str | None,
entity_category: EntityCategory | None,
) -> None:
"""Initialize the YAML entity."""
self._knx_module = knx_module
self._device = device
self._attr_name = name
self._attr_unique_id = unique_id
self._attr_entity_category = entity_category
class KnxUiEntity(_KnxEntityBase):
"""Representation of a KNX UI entity."""
_attr_unique_id: str
_attr_has_entity_name = True
def __init__(
@@ -117,6 +121,8 @@ class KnxUiEntity(_KnxEntityBase):
) -> None:
"""Initialize the UI entity."""
self._knx_module = knx_module
self._attr_name = entity_config[CONF_NAME]
self._attr_unique_id = unique_id
if entity_category := entity_config.get(CONF_ENTITY_CATEGORY):
self._attr_entity_category = EntityCategory(entity_category)

View File

@@ -152,32 +152,28 @@ class KnxYamlFan(_KnxFan, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of KNX fan."""
max_step = config.get(FanConf.MAX_STEP)
self._device = XknxFan(
xknx=knx_module.xknx,
name=config.get(CONF_NAME, ""),
group_address_speed=config.get(KNX_ADDRESS),
group_address_speed_state=config.get(FanSchema.CONF_STATE_ADDRESS),
group_address_oscillation=config.get(FanSchema.CONF_OSCILLATION_ADDRESS),
group_address_oscillation_state=config.get(
FanSchema.CONF_OSCILLATION_STATE_ADDRESS
),
group_address_switch=config.get(FanSchema.CONF_SWITCH_ADDRESS),
group_address_switch_state=config.get(FanSchema.CONF_SWITCH_STATE_ADDRESS),
max_step=max_step,
sync_state=config.get(CONF_SYNC_STATE, True),
)
super().__init__(
knx_module=knx_module,
device=XknxFan(
xknx=knx_module.xknx,
name=config[CONF_NAME],
group_address_speed=config.get(KNX_ADDRESS),
group_address_speed_state=config.get(FanSchema.CONF_STATE_ADDRESS),
group_address_oscillation=config.get(
FanSchema.CONF_OSCILLATION_ADDRESS
),
group_address_oscillation_state=config.get(
FanSchema.CONF_OSCILLATION_STATE_ADDRESS
),
group_address_switch=config.get(FanSchema.CONF_SWITCH_ADDRESS),
group_address_switch_state=config.get(
FanSchema.CONF_SWITCH_STATE_ADDRESS
),
max_step=max_step,
sync_state=config.get(CONF_SYNC_STATE, True),
),
unique_id=str(self._device.speed.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
# FanSpeedMode.STEP if max_step is set
self._step_range: tuple[int, int] | None = (1, max_step) if max_step else None
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.speed.group_address)
class KnxUiFan(_KnxFan, KnxUiEntity):

View File

@@ -121,7 +121,7 @@ def _create_yaml_light(xknx: XKNX, config: ConfigType) -> XknxLight:
return XknxLight(
xknx,
name=config[CONF_NAME],
name=config.get(CONF_NAME, ""),
group_address_switch=config.get(KNX_ADDRESS),
group_address_switch_state=config.get(LightSchema.CONF_STATE_ADDRESS),
group_address_brightness=config.get(LightSchema.CONF_BRIGHTNESS_ADDRESS),
@@ -558,15 +558,16 @@ class KnxYamlLight(_KnxLight, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of KNX light."""
self._device = _create_yaml_light(knx_module.xknx, config)
super().__init__(
knx_module=knx_module,
device=_create_yaml_light(knx_module.xknx, config),
unique_id=self._device_unique_id(),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
self._attr_color_mode = next(iter(self.supported_color_modes))
self._attr_max_color_temp_kelvin: int = config[LightSchema.CONF_MAX_KELVIN]
self._attr_min_color_temp_kelvin: int = config[LightSchema.CONF_MIN_KELVIN]
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = self._device_unique_id()
def _device_unique_id(self) -> str:
"""Return unique id for this device."""

View File

@@ -13,7 +13,7 @@
"requirements": [
"xknx==3.13.0",
"xknxproject==3.8.2",
"knx-frontend==2025.12.28.215221"
"knx-frontend==2025.12.30.151231"
],
"single_config_entry": true
}

View File

@@ -33,7 +33,7 @@ def _create_notification_instance(xknx: XKNX, config: ConfigType) -> XknxNotific
"""Return a KNX Notification to be used within XKNX."""
return XknxNotification(
xknx,
name=config[CONF_NAME],
name=config.get(CONF_NAME, ""),
group_address=config[KNX_ADDRESS],
value_type=config[CONF_TYPE],
)
@@ -46,12 +46,13 @@ class KNXNotify(KnxYamlEntity, NotifyEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX notification."""
self._device = _create_notification_instance(knx_module.xknx, config)
super().__init__(
knx_module=knx_module,
device=_create_notification_instance(knx_module.xknx, config),
unique_id=str(self._device.remote_value.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
async def async_send_message(self, message: str, title: str | None = None) -> None:
"""Send a notification to knx bus."""

View File

@@ -44,7 +44,7 @@ def _create_numeric_value(xknx: XKNX, config: ConfigType) -> NumericValue:
"""Return a KNX NumericValue to be used within XKNX."""
return NumericValue(
xknx,
name=config[CONF_NAME],
name=config.get(CONF_NAME, ""),
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
@@ -59,9 +59,12 @@ class KNXNumber(KnxYamlEntity, RestoreNumber):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX number."""
self._device = _create_numeric_value(knx_module.xknx, config)
super().__init__(
knx_module=knx_module,
device=_create_numeric_value(knx_module.xknx, config),
unique_id=str(self._device.sensor_value.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
self._attr_native_max_value = config.get(
NumberSchema.CONF_MAX,
@@ -76,8 +79,6 @@ class KNXNumber(KnxYamlEntity, RestoreNumber):
NumberSchema.CONF_STEP,
self._device.sensor_value.dpt_class.resolution,
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.sensor_value.group_address)
self._attr_native_unit_of_measurement = self._device.unit_of_measurement()
self._device.sensor_value.value = max(0, self._attr_native_min_value)

View File

@@ -83,18 +83,19 @@ class KnxYamlScene(_KnxScene, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize KNX scene."""
self._device = XknxScene(
xknx=knx_module.xknx,
name=config.get(CONF_NAME, ""),
group_address=config[KNX_ADDRESS],
scene_number=config[SceneSchema.CONF_SCENE_NUMBER],
)
super().__init__(
knx_module=knx_module,
device=XknxScene(
xknx=knx_module.xknx,
name=config[CONF_NAME],
group_address=config[KNX_ADDRESS],
scene_number=config[SceneSchema.CONF_SCENE_NUMBER],
unique_id=(
f"{self._device.scene_value.group_address}_{self._device.scene_number}"
),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = (
f"{self._device.scene_value.group_address}_{self._device.scene_number}"
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)

View File

@@ -214,16 +214,22 @@ class KNXPlatformSchema(ABC):
}
COMMON_ENTITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
class BinarySensorSchema(KNXPlatformSchema):
"""Voluptuous schema for KNX binary sensors."""
PLATFORM = Platform.BINARY_SENSOR
DEFAULT_NAME = "KNX Binary Sensor"
ENTITY_SCHEMA = vol.All(
vol.Schema(
COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Optional(CONF_IGNORE_INTERNAL_STATE, default=False): cv.boolean,
vol.Optional(CONF_INVERT, default=False): cv.boolean,
@@ -233,7 +239,6 @@ class BinarySensorSchema(KNXPlatformSchema):
),
vol.Optional(CONF_DEVICE_CLASS): BINARY_SENSOR_DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_RESET_AFTER): cv.positive_float,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
),
)
@@ -245,7 +250,6 @@ class ButtonSchema(KNXPlatformSchema):
PLATFORM = Platform.BUTTON
CONF_VALUE = "value"
DEFAULT_NAME = "KNX Button"
payload_or_value_msg = f"Please use only one of `{CONF_PAYLOAD}` or `{CONF_VALUE}`"
length_or_type_msg = (
@@ -253,9 +257,8 @@ class ButtonSchema(KNXPlatformSchema):
)
ENTITY_SCHEMA = vol.All(
vol.Schema(
COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(KNX_ADDRESS): ga_validator,
vol.Exclusive(
CONF_PAYLOAD, "payload_or_value", msg=payload_or_value_msg
@@ -269,7 +272,6 @@ class ButtonSchema(KNXPlatformSchema):
vol.Exclusive(
CONF_TYPE, "length_or_type", msg=length_or_type_msg
): object,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
),
vol.Any(
@@ -337,7 +339,6 @@ class ClimateSchema(KNXPlatformSchema):
CONF_SWING_HORIZONTAL_ADDRESS = "swing_horizontal_address"
CONF_SWING_HORIZONTAL_STATE_ADDRESS = "swing_horizontal_state_address"
DEFAULT_NAME = "KNX Climate"
DEFAULT_SETPOINT_SHIFT_MODE = "DPT6010"
DEFAULT_SETPOINT_SHIFT_MAX = 6
DEFAULT_SETPOINT_SHIFT_MIN = -6
@@ -346,9 +347,8 @@ class ClimateSchema(KNXPlatformSchema):
DEFAULT_FAN_SPEED_MODE = "percent"
ENTITY_SCHEMA = vol.All(
vol.Schema(
COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(
ClimateConf.SETPOINT_SHIFT_MAX, default=DEFAULT_SETPOINT_SHIFT_MAX
): vol.All(int, vol.Range(min=0, max=32)),
@@ -448,12 +448,10 @@ class CoverSchema(KNXPlatformSchema):
CONF_ANGLE_STATE_ADDRESS = "angle_state_address"
DEFAULT_TRAVEL_TIME = 25
DEFAULT_NAME = "KNX Cover"
ENTITY_SCHEMA = vol.All(
vol.Schema(
COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MOVE_LONG_ADDRESS): ga_list_validator,
vol.Optional(CONF_MOVE_SHORT_ADDRESS): ga_list_validator,
vol.Optional(CONF_STOP_ADDRESS): ga_list_validator,
@@ -471,7 +469,6 @@ class CoverSchema(KNXPlatformSchema):
vol.Optional(CoverConf.INVERT_POSITION, default=False): cv.boolean,
vol.Optional(CoverConf.INVERT_ANGLE, default=False): cv.boolean,
vol.Optional(CONF_DEVICE_CLASS): COVER_DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
),
vol.Any(
@@ -496,16 +493,12 @@ class DateSchema(KNXPlatformSchema):
PLATFORM = Platform.DATE
DEFAULT_NAME = "KNX Date"
ENTITY_SCHEMA = vol.Schema(
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -515,16 +508,12 @@ class DateTimeSchema(KNXPlatformSchema):
PLATFORM = Platform.DATETIME
DEFAULT_NAME = "KNX DateTime"
ENTITY_SCHEMA = vol.Schema(
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -580,12 +569,9 @@ class FanSchema(KNXPlatformSchema):
CONF_SWITCH_ADDRESS = "switch_address"
CONF_SWITCH_STATE_ADDRESS = "switch_state_address"
DEFAULT_NAME = "KNX Fan"
ENTITY_SCHEMA = vol.All(
vol.Schema(
COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_SWITCH_ADDRESS): ga_list_validator,
@@ -593,7 +579,6 @@ class FanSchema(KNXPlatformSchema):
vol.Optional(CONF_OSCILLATION_ADDRESS): ga_list_validator,
vol.Optional(CONF_OSCILLATION_STATE_ADDRESS): ga_list_validator,
vol.Optional(FanConf.MAX_STEP): cv.byte,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
}
),
@@ -638,7 +623,6 @@ class LightSchema(KNXPlatformSchema):
CONF_MIN_KELVIN = "min_kelvin"
CONF_MAX_KELVIN = "max_kelvin"
DEFAULT_NAME = "KNX Light"
DEFAULT_COLOR_TEMP_MODE = "absolute"
DEFAULT_MIN_KELVIN = 2700 # 370 mireds
DEFAULT_MAX_KELVIN = 6000 # 166 mireds
@@ -670,9 +654,8 @@ class LightSchema(KNXPlatformSchema):
)
ENTITY_SCHEMA = vol.All(
vol.Schema(
COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_BRIGHTNESS_ADDRESS): ga_list_validator,
@@ -722,7 +705,6 @@ class LightSchema(KNXPlatformSchema):
vol.Optional(CONF_MAX_KELVIN, default=DEFAULT_MAX_KELVIN): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
),
vol.Any(
@@ -768,14 +750,10 @@ class NotifySchema(KNXPlatformSchema):
PLATFORM = Platform.NOTIFY
DEFAULT_NAME = "KNX Notify"
ENTITY_SCHEMA = vol.Schema(
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_TYPE, default="latin_1"): string_type_validator,
vol.Required(KNX_ADDRESS): ga_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -788,12 +766,10 @@ class NumberSchema(KNXPlatformSchema):
CONF_MAX = "max"
CONF_MIN = "min"
CONF_STEP = "step"
DEFAULT_NAME = "KNX Number"
ENTITY_SCHEMA = vol.All(
vol.Schema(
COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
vol.Optional(CONF_MODE, default=NumberMode.AUTO): vol.Coerce(
NumberMode
@@ -804,7 +780,6 @@ class NumberSchema(KNXPlatformSchema):
vol.Optional(CONF_MAX): vol.Coerce(float),
vol.Optional(CONF_MIN): vol.Coerce(float),
vol.Optional(CONF_STEP): cv.positive_float,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
),
number_limit_sub_validator,
@@ -818,15 +793,12 @@ class SceneSchema(KNXPlatformSchema):
CONF_SCENE_NUMBER = "scene_number"
DEFAULT_NAME = "KNX SCENE"
ENTITY_SCHEMA = vol.Schema(
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Required(SceneConf.SCENE_NUMBER): vol.All(
vol.Coerce(int), vol.Range(min=1, max=64)
),
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -838,12 +810,10 @@ class SelectSchema(KNXPlatformSchema):
CONF_OPTION = "option"
CONF_OPTIONS = "options"
DEFAULT_NAME = "KNX Select"
ENTITY_SCHEMA = vol.All(
vol.Schema(
COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
vol.Required(CONF_PAYLOAD_LENGTH): vol.All(
@@ -857,7 +827,6 @@ class SelectSchema(KNXPlatformSchema):
],
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
),
select_options_sub_validator,
@@ -872,18 +841,15 @@ class SensorSchema(KNXPlatformSchema):
CONF_ALWAYS_CALLBACK = "always_callback"
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
CONF_SYNC_STATE = CONF_SYNC_STATE
DEFAULT_NAME = "KNX Sensor"
ENTITY_SCHEMA = vol.Schema(
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Optional(CONF_ALWAYS_CALLBACK, default=False): cv.boolean,
vol.Optional(CONF_STATE_CLASS): STATE_CLASSES_SCHEMA,
vol.Required(CONF_TYPE): sensor_type_validator,
vol.Required(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_DEVICE_CLASS): SENSOR_DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -896,16 +862,13 @@ class SwitchSchema(KNXPlatformSchema):
CONF_INVERT = CONF_INVERT
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
DEFAULT_NAME = "KNX Switch"
ENTITY_SCHEMA = vol.Schema(
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_INVERT, default=False): cv.boolean,
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_DEVICE_CLASS): SWITCH_DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -915,17 +878,13 @@ class TextSchema(KNXPlatformSchema):
PLATFORM = Platform.TEXT
DEFAULT_NAME = "KNX Text"
ENTITY_SCHEMA = vol.Schema(
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
vol.Optional(CONF_TYPE, default="latin_1"): string_type_validator,
vol.Optional(CONF_MODE, default=TextMode.TEXT): vol.Coerce(TextMode),
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -935,16 +894,12 @@ class TimeSchema(KNXPlatformSchema):
PLATFORM = Platform.TIME
DEFAULT_NAME = "KNX Time"
ENTITY_SCHEMA = vol.Schema(
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_RESPOND_TO_READ, default=False): cv.boolean,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Required(KNX_ADDRESS): ga_list_validator,
vol.Optional(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
)
@@ -969,27 +924,21 @@ class WeatherSchema(KNXPlatformSchema):
CONF_KNX_AIR_PRESSURE_ADDRESS = "address_air_pressure"
CONF_KNX_HUMIDITY_ADDRESS = "address_humidity"
DEFAULT_NAME = "KNX Weather Station"
ENTITY_SCHEMA = vol.All(
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Required(CONF_KNX_TEMPERATURE_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_SOUTH_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_EAST_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_WEST_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_NORTH_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_WIND_SPEED_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_WIND_BEARING_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_RAIN_ALARM_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_FROST_ALARM_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_WIND_ALARM_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_DAY_NIGHT_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_AIR_PRESSURE_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_HUMIDITY_ADDRESS): ga_list_validator,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
),
ENTITY_SCHEMA = COMMON_ENTITY_SCHEMA.extend(
{
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Required(CONF_KNX_TEMPERATURE_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_SOUTH_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_EAST_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_WEST_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_BRIGHTNESS_NORTH_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_WIND_SPEED_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_WIND_BEARING_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_RAIN_ALARM_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_FROST_ALARM_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_WIND_ALARM_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_DAY_NIGHT_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_AIR_PRESSURE_ADDRESS): ga_list_validator,
vol.Optional(CONF_KNX_HUMIDITY_ADDRESS): ga_list_validator,
}
)

View File

@@ -49,7 +49,7 @@ def _create_raw_value(xknx: XKNX, config: ConfigType) -> RawValue:
"""Return a KNX RawValue to be used within XKNX."""
return RawValue(
xknx,
name=config[CONF_NAME],
name=config.get(CONF_NAME, ""),
payload_length=config[CONF_PAYLOAD_LENGTH],
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
@@ -65,9 +65,12 @@ class KNXSelect(KnxYamlEntity, SelectEntity, RestoreEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX select."""
self._device = _create_raw_value(knx_module.xknx, config)
super().__init__(
knx_module=knx_module,
device=_create_raw_value(knx_module.xknx, config),
unique_id=str(self._device.remote_value.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
self._option_payloads: dict[str, int] = {
option[SelectSchema.CONF_OPTION]: option[CONF_PAYLOAD]
@@ -75,8 +78,6 @@ class KNXSelect(KnxYamlEntity, SelectEntity, RestoreEntity):
}
self._attr_options = list(self._option_payloads)
self._attr_current_option = None
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
async def async_added_to_hass(self) -> None:
"""Restore last state."""

View File

@@ -200,16 +200,19 @@ class KnxYamlSensor(_KnxSensor, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of a KNX sensor."""
self._device = XknxSensor(
knx_module.xknx,
name=config.get(CONF_NAME, ""),
group_address_state=config[SensorSchema.CONF_STATE_ADDRESS],
sync_state=config[CONF_SYNC_STATE],
always_callback=True,
value_type=config[CONF_TYPE],
)
super().__init__(
knx_module=knx_module,
device=XknxSensor(
knx_module.xknx,
name=config[CONF_NAME],
group_address_state=config[SensorSchema.CONF_STATE_ADDRESS],
sync_state=config[CONF_SYNC_STATE],
always_callback=True,
value_type=config[CONF_TYPE],
),
unique_id=str(self._device.sensor_value.group_address_state),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
if device_class := config.get(CONF_DEVICE_CLASS):
self._attr_device_class = device_class
@@ -219,8 +222,6 @@ class KnxYamlSensor(_KnxSensor, KnxYamlEntity):
)
self._attr_force_update = config[SensorSchema.CONF_ALWAYS_CALLBACK]
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.sensor_value.group_address_state)
self._attr_native_unit_of_measurement = self._device.unit_of_measurement()
self._attr_state_class = config.get(CONF_STATE_CLASS)
self._attr_extra_state_attributes = {}

View File

@@ -154,6 +154,27 @@
}
},
"config_panel": {
"dashboard": {
"connection_flow": {
"description": "Reconfigure KNX connection or import a new KNX keyring file",
"title": "Connection settings"
},
"options_flow": {
"description": "Configure integration settings",
"title": "Integration options"
},
"project_upload": {
"description": "Import a KNX project file to help configure group addresses and datapoint types",
"title": "[%key:component::knx::config_panel::dialogs::project_upload::title%]"
}
},
"dialogs": {
"project_upload": {
"description": "Details such as group address names, datapoint types, devices and group objects are extracted from your project file. The ETS project file itself and its optional password are not stored.\n\n`.knxproj` files exported by ETS 4, 5 or 6 are supported.",
"file_upload_label": "ETS project file",
"title": "Import ETS project"
}
},
"dpt": {
"options": {
"5": "Generic 1-byte unsigned integer",
@@ -845,9 +866,9 @@
},
"mode": {
"description": "Select how the entity is displayed in Home Assistant.",
"label": "[%common::config_flow::data::mode%]",
"label": "[%key:common::config_flow::data::mode%]",
"options": {
"password": "[%common::config_flow::data::password%]",
"password": "[%key:common::config_flow::data::password%]",
"text": "[%key:component::text::entity_component::_::state_attributes::mode::state::text%]"
}
}

View File

@@ -107,20 +107,21 @@ class KnxYamlSwitch(_KnxSwitch, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of KNX switch."""
self._device = XknxSwitch(
xknx=knx_module.xknx,
name=config.get(CONF_NAME, ""),
group_address=config[KNX_ADDRESS],
group_address_state=config.get(SwitchSchema.CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
invert=config[SwitchSchema.CONF_INVERT],
)
super().__init__(
knx_module=knx_module,
device=XknxSwitch(
xknx=knx_module.xknx,
name=config[CONF_NAME],
group_address=config[KNX_ADDRESS],
group_address_state=config.get(SwitchSchema.CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
invert=config[SwitchSchema.CONF_INVERT],
),
unique_id=str(self._device.switch.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_device_class = config.get(CONF_DEVICE_CLASS)
self._attr_unique_id = str(self._device.switch.group_address)
class KnxUiSwitch(_KnxSwitch, KnxUiEntity):

View File

@@ -112,20 +112,21 @@ class KnxYamlText(_KnxText, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX text."""
self._device = XknxNotification(
knx_module.xknx,
name=config.get(CONF_NAME, ""),
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
value_type=config[CONF_TYPE],
)
super().__init__(
knx_module=knx_module,
device=XknxNotification(
knx_module.xknx,
name=config[CONF_NAME],
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
value_type=config[CONF_TYPE],
),
unique_id=str(self._device.remote_value.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
self._attr_mode = config[CONF_MODE]
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
class KnxUiText(_KnxText, KnxUiEntity):

View File

@@ -105,20 +105,21 @@ class KnxYamlTime(_KNXTime, KnxYamlEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize a KNX time."""
self._device = XknxTimeDevice(
knx_module.xknx,
name=config.get(CONF_NAME, ""),
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
)
super().__init__(
knx_module=knx_module,
device=XknxTimeDevice(
knx_module.xknx,
name=config[CONF_NAME],
localtime=False,
group_address=config[KNX_ADDRESS],
group_address_state=config.get(CONF_STATE_ADDRESS),
respond_to_read=config[CONF_RESPOND_TO_READ],
sync_state=config[CONF_SYNC_STATE],
),
unique_id=str(self._device.remote_value.group_address),
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.remote_value.group_address)
class KnxUiTime(_KNXTime, KnxUiEntity):

View File

@@ -43,7 +43,7 @@ def _create_weather(xknx: XKNX, config: ConfigType) -> XknxWeather:
"""Return a KNX weather device to be used within XKNX."""
return XknxWeather(
xknx,
name=config[CONF_NAME],
name=config.get(CONF_NAME, ""),
sync_state=config[WeatherSchema.CONF_SYNC_STATE],
group_address_temperature=config[WeatherSchema.CONF_KNX_TEMPERATURE_ADDRESS],
group_address_brightness_south=config.get(
@@ -85,12 +85,13 @@ class KNXWeather(KnxYamlEntity, WeatherEntity):
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
"""Initialize of a KNX sensor."""
self._device = _create_weather(knx_module.xknx, config)
super().__init__(
knx_module=knx_module,
device=_create_weather(knx_module.xknx, config),
unique_id=str(self._device._temperature.group_address_state), # noqa: SLF001
name=config.get(CONF_NAME),
entity_category=config.get(CONF_ENTITY_CATEGORY),
)
self._attr_unique_id = str(self._device._temperature.group_address_state) # noqa: SLF001
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
@property
def native_temperature(self) -> float | None:

View File

@@ -80,8 +80,6 @@ async def register_panel(hass: HomeAssistant) -> None:
hass=hass,
frontend_url_path=DOMAIN,
webcomponent_name=knx_panel.webcomponent_name,
sidebar_title=DOMAIN.upper(),
sidebar_icon="mdi:bus-electric",
module_url=f"{URL_BASE}/{knx_panel.entrypoint_js}",
embed_iframe=True,
require_admin=True,

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "silver",
"requirements": ["librehardwaremonitor-api==1.6.0"]
"requirements": ["librehardwaremonitor-api==1.7.2"]
}

View File

@@ -116,8 +116,12 @@ class MetDataUpdateCoordinator(DataUpdateCoordinator[MetWeatherData]):
"""Fetch data from Met."""
try:
return await self.weather.fetch_data()
except Exception as err:
raise UpdateFailed(f"Update failed: {err}") from err
except CannotConnect as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_failed",
translation_placeholders={"error": str(err)},
) from err
def track_home(self) -> None:
"""Start tracking changes to HA home setting."""

View File

@@ -19,6 +19,11 @@
}
}
},
"exceptions": {
"update_failed": {
"message": "Update of data from the web site failed: {error}"
}
},
"options": {
"step": {
"init": {

View File

@@ -19,7 +19,12 @@ from homeassistant.helpers.typing import ConfigType
from .api import AsyncConfigEntryAuth
from .const import DOMAIN
from .coordinator import MieleConfigEntry, MieleDataUpdateCoordinator
from .coordinator import (
MieleAuxDataUpdateCoordinator,
MieleConfigEntry,
MieleDataUpdateCoordinator,
MieleRuntimeData,
)
from .services import async_setup_services
PLATFORMS: list[Platform] = [
@@ -75,19 +80,23 @@ async def async_setup_entry(hass: HomeAssistant, entry: MieleConfigEntry) -> boo
) from err
# Setup MieleAPI and coordinator for data fetch
api = MieleAPI(auth)
coordinator = MieleDataUpdateCoordinator(hass, entry, api)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
_api = MieleAPI(auth)
_coordinator = MieleDataUpdateCoordinator(hass, entry, _api)
await _coordinator.async_config_entry_first_refresh()
_aux_coordinator = MieleAuxDataUpdateCoordinator(hass, entry, _api)
await _aux_coordinator.async_config_entry_first_refresh()
entry.runtime_data = MieleRuntimeData(_api, _coordinator, _aux_coordinator)
entry.async_create_background_task(
hass,
coordinator.api.listen_events(
data_callback=coordinator.callback_update_data,
actions_callback=coordinator.callback_update_actions,
entry.runtime_data.api.listen_events(
data_callback=_coordinator.callback_update_data,
actions_callback=_coordinator.callback_update_actions,
),
"pymiele event listener",
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
@@ -107,5 +116,5 @@ async def async_remove_config_entry_device(
identifier
for identifier in device_entry.identifiers
if identifier[0] == DOMAIN
and identifier[1] in config_entry.runtime_data.data.devices
and identifier[1] in config_entry.runtime_data.coordinator.data.devices
)

View File

@@ -264,7 +264,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the binary sensor platform."""
coordinator = config_entry.runtime_data
coordinator = config_entry.runtime_data.coordinator
added_devices: set[str] = set()
def _async_add_new_devices() -> None:

View File

@@ -112,7 +112,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the button platform."""
coordinator = config_entry.runtime_data
coordinator = config_entry.runtime_data.coordinator
added_devices: set[str] = set()
def _async_add_new_devices() -> None:

View File

@@ -138,7 +138,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the climate platform."""
coordinator = config_entry.runtime_data
coordinator = config_entry.runtime_data.coordinator
added_devices: set[str] = set()
def _async_add_new_devices() -> None:

View File

@@ -9,7 +9,13 @@ from datetime import timedelta
import logging
from aiohttp import ClientResponseError
from pymiele import MieleAction, MieleAPI, MieleDevice
from pymiele import (
MieleAction,
MieleAPI,
MieleDevice,
MieleFillingLevel,
MieleFillingLevels,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
@@ -20,7 +26,16 @@ from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
type MieleConfigEntry = ConfigEntry[MieleDataUpdateCoordinator]
@dataclass
class MieleRuntimeData:
"""Runtime data for the Miele integration."""
api: MieleAPI
coordinator: MieleDataUpdateCoordinator
aux_coordinator: MieleAuxDataUpdateCoordinator
type MieleConfigEntry = ConfigEntry[MieleRuntimeData]
@dataclass
@@ -31,8 +46,15 @@ class MieleCoordinatorData:
actions: dict[str, MieleAction]
@dataclass
class MieleAuxCoordinatorData:
"""Data class for storing auxiliary coordinator data."""
filling_levels: dict[str, MieleFillingLevel]
class MieleDataUpdateCoordinator(DataUpdateCoordinator[MieleCoordinatorData]):
"""Coordinator for Miele data."""
"""Main coordinator for Miele data."""
config_entry: MieleConfigEntry
new_device_callbacks: list[Callable[[dict[str, MieleDevice]], None]] = []
@@ -66,6 +88,7 @@ class MieleDataUpdateCoordinator(DataUpdateCoordinator[MieleCoordinatorData]):
}
self.devices = devices
actions = {}
for device_id in devices:
try:
actions_json = await self.api.get_actions(device_id)
@@ -99,10 +122,7 @@ class MieleDataUpdateCoordinator(DataUpdateCoordinator[MieleCoordinatorData]):
device_id: MieleDevice(device) for device_id, device in devices_json.items()
}
self.async_set_updated_data(
MieleCoordinatorData(
devices=devices,
actions=self.data.actions,
)
MieleCoordinatorData(devices=devices, actions=self.data.actions)
)
async def callback_update_actions(self, actions_json: dict[str, dict]) -> None:
@@ -111,8 +131,34 @@ class MieleDataUpdateCoordinator(DataUpdateCoordinator[MieleCoordinatorData]):
device_id: MieleAction(action) for device_id, action in actions_json.items()
}
self.async_set_updated_data(
MieleCoordinatorData(
devices=self.data.devices,
actions=actions,
)
MieleCoordinatorData(devices=self.data.devices, actions=actions)
)
class MieleAuxDataUpdateCoordinator(DataUpdateCoordinator[MieleAuxCoordinatorData]):
"""Coordinator for Miele data for slowly polled endpoints."""
config_entry: MieleConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: MieleConfigEntry,
api: MieleAPI,
) -> None:
"""Initialize the Miele data coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=timedelta(seconds=60),
)
self.api = api
async def _async_update_data(self) -> MieleAuxCoordinatorData:
"""Fetch data from the Miele API."""
filling_levels_json = await self.api.get_filling_levels()
return MieleAuxCoordinatorData(
filling_levels=MieleFillingLevels(filling_levels_json).filling_levels
)

View File

@@ -38,13 +38,19 @@ async def async_get_config_entry_diagnostics(
"devices": redact_identifiers(
{
device_id: device_data.raw
for device_id, device_data in config_entry.runtime_data.data.devices.items()
for device_id, device_data in config_entry.runtime_data.coordinator.data.devices.items()
}
),
"filling_levels": redact_identifiers(
{
device_id: filling_level_data.raw
for device_id, filling_level_data in config_entry.runtime_data.aux_coordinator.data.filling_levels.items()
}
),
"actions": redact_identifiers(
{
device_id: action_data.raw
for device_id, action_data in config_entry.runtime_data.data.actions.items()
for device_id, action_data in config_entry.runtime_data.coordinator.data.actions.items()
}
),
}
@@ -68,13 +74,19 @@ async def async_get_device_diagnostics(
"model_id": device.model_id,
}
coordinator = config_entry.runtime_data
coordinator = config_entry.runtime_data.coordinator
aux_coordinator = config_entry.runtime_data.aux_coordinator
device_id = cast(str, device.serial_number)
miele_data: dict[str, Any] = {
"devices": {
hash_identifier(device_id): coordinator.data.devices[device_id].raw
},
"filling_levels": {
hash_identifier(device_id): aux_coordinator.data.filling_levels[
device_id
].raw
},
"actions": {
hash_identifier(device_id): coordinator.data.actions[device_id].raw
},

View File

@@ -1,16 +1,18 @@
"""Entity base class for the Miele integration."""
from pymiele import MieleAction, MieleAPI, MieleDevice
from pymiele import MieleAction, MieleAPI, MieleDevice, MieleFillingLevel
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DEVICE_TYPE_TAGS, DOMAIN, MANUFACTURER, MieleAppliance, StateStatus
from .coordinator import MieleDataUpdateCoordinator
from .coordinator import MieleAuxDataUpdateCoordinator, MieleDataUpdateCoordinator
class MieleEntity(CoordinatorEntity[MieleDataUpdateCoordinator]):
class MieleBaseEntity[
_MieleCoordinatorT: MieleDataUpdateCoordinator | MieleAuxDataUpdateCoordinator
](CoordinatorEntity[_MieleCoordinatorT]):
"""Base class for Miele entities."""
_attr_has_entity_name = True
@@ -22,7 +24,7 @@ class MieleEntity(CoordinatorEntity[MieleDataUpdateCoordinator]):
def __init__(
self,
coordinator: MieleDataUpdateCoordinator,
coordinator: _MieleCoordinatorT,
device_id: str,
description: EntityDescription,
) -> None:
@@ -30,7 +32,26 @@ class MieleEntity(CoordinatorEntity[MieleDataUpdateCoordinator]):
super().__init__(coordinator)
self._device_id = device_id
self.entity_description = description
self._attr_unique_id = MieleEntity.get_unique_id(device_id, description)
self._attr_unique_id = MieleBaseEntity.get_unique_id(device_id, description)
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device_id)})
@property
def api(self) -> MieleAPI:
"""Return the api object."""
return self.coordinator.api
class MieleEntity(MieleBaseEntity[MieleDataUpdateCoordinator]):
"""Base class for Miele entities that use the main data coordinator."""
def __init__(
self,
coordinator: MieleDataUpdateCoordinator,
device_id: str,
description: EntityDescription,
) -> None:
"""Initialize the entity."""
super().__init__(coordinator, device_id, description)
device = self.device
appliance_type = DEVICE_TYPE_TAGS.get(MieleAppliance(device.device_type))
@@ -61,11 +82,6 @@ class MieleEntity(CoordinatorEntity[MieleDataUpdateCoordinator]):
"""Return the actions object."""
return self.coordinator.data.actions[self._device_id]
@property
def api(self) -> MieleAPI:
"""Return the api object."""
return self.coordinator.api
@property
def available(self) -> bool:
"""Return the availability of the entity."""
@@ -75,3 +91,12 @@ class MieleEntity(CoordinatorEntity[MieleDataUpdateCoordinator]):
and self._device_id in self.coordinator.data.devices
and (self.device.state_status is not StateStatus.not_connected)
)
class MieleAuxEntity(MieleBaseEntity[MieleAuxDataUpdateCoordinator]):
"""Base class for Miele entities that use the auxiliary data coordinator."""
@property
def levels(self) -> MieleFillingLevel:
"""Return the filling levels object."""
return self.coordinator.data.filling_levels[self._device_id]

View File

@@ -66,7 +66,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the fan platform."""
coordinator = config_entry.runtime_data
coordinator = config_entry.runtime_data.coordinator
added_devices: set[str] = set()
def _async_add_new_devices() -> None:

View File

@@ -71,6 +71,9 @@
"plate_step_warming": "mdi:alpha-w-circle-outline"
}
},
"power_disk_level": {
"default": "mdi:car-coolant-level"
},
"program_id": {
"default": "mdi:selection-ellipse-arrow-inside"
},
@@ -83,6 +86,12 @@
"remaining_time": {
"default": "mdi:clock-end"
},
"rinse_aid_level": {
"default": "mdi:water-opacity"
},
"salt_level": {
"default": "mdi:shaker-outline"
},
"spin_speed": {
"default": "mdi:sync"
},
@@ -95,6 +104,12 @@
"target_temperature": {
"default": "mdi:thermometer-check"
},
"twin_dos_1_level": {
"default": "mdi:car-coolant-level"
},
"twin_dos_2_level": {
"default": "mdi:car-coolant-level"
},
"water_forecast": {
"default": "mdi:water-outline"
}

View File

@@ -86,7 +86,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the light platform."""
coordinator = config_entry.runtime_data
coordinator = config_entry.runtime_data.coordinator
added_devices: set[str] = set()
def _async_add_new_devices() -> None:

View File

@@ -71,7 +71,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the select platform."""
coordinator = config_entry.runtime_data
coordinator = config_entry.runtime_data.coordinator
added_devices: set[str] = set()
def _async_add_new_devices() -> None:

View File

@@ -8,7 +8,7 @@ from datetime import datetime, timedelta
import logging
from typing import Any, Final, cast
from pymiele import MieleDevice, MieleTemperature
from pymiele import MieleDevice, MieleFillingLevel, MieleTemperature
from homeassistant.components.sensor import (
RestoreSensor,
@@ -44,8 +44,12 @@ from .const import (
StateProgramType,
StateStatus,
)
from .coordinator import MieleConfigEntry, MieleDataUpdateCoordinator
from .entity import MieleEntity
from .coordinator import (
MieleAuxDataUpdateCoordinator,
MieleConfigEntry,
MieleDataUpdateCoordinator,
)
from .entity import MieleAuxEntity, MieleEntity
PARALLEL_UPDATES = 0
@@ -139,10 +143,13 @@ def _convert_finish_timestamp(
@dataclass(frozen=True, kw_only=True)
class MieleSensorDescription(SensorEntityDescription):
class MieleSensorDescription[T: (MieleDevice, MieleFillingLevel)](
SensorEntityDescription
):
"""Class describing Miele sensor entities."""
value_fn: Callable[[MieleDevice], StateType | datetime]
value_fn: Callable[[T], StateType | datetime]
end_value_fn: Callable[[StateType | datetime], StateType | datetime] | None = None
extra_attributes: dict[str, Callable[[MieleDevice], StateType]] | None = None
zone: int | None = None
@@ -150,14 +157,14 @@ class MieleSensorDescription(SensorEntityDescription):
@dataclass
class MieleSensorDefinition:
class MieleSensorDefinition[T: (MieleDevice, MieleFillingLevel)]:
"""Class for defining sensor entities."""
types: tuple[MieleAppliance, ...]
description: MieleSensorDescription
description: MieleSensorDescription[T]
SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
SENSOR_TYPES: Final[tuple[MieleSensorDefinition[MieleDevice], ...]] = (
MieleSensorDefinition(
types=(
MieleAppliance.WASHING_MACHINE,
@@ -689,6 +696,59 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition, ...]] = (
),
)
POLLED_SENSOR_TYPES: Final[tuple[MieleSensorDefinition[MieleFillingLevel], ...]] = (
MieleSensorDefinition(
types=(MieleAppliance.WASHING_MACHINE,),
description=MieleSensorDescription[MieleFillingLevel](
key="twin_dos_1_level",
translation_key="twin_dos_1_level",
value_fn=lambda value: value.twin_dos_container_1_filling_level,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
),
),
MieleSensorDefinition(
types=(MieleAppliance.WASHING_MACHINE,),
description=MieleSensorDescription[MieleFillingLevel](
key="twin_dos_2_level",
translation_key="twin_dos_2_level",
value_fn=lambda value: value.twin_dos_container_2_filling_level,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
),
),
MieleSensorDefinition(
types=(MieleAppliance.DISHWASHER,),
description=MieleSensorDescription[MieleFillingLevel](
key="power_disk_level",
translation_key="power_disk_level",
value_fn=lambda value: None,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
),
),
MieleSensorDefinition(
types=(MieleAppliance.DISHWASHER,),
description=MieleSensorDescription[MieleFillingLevel](
key="salt_level",
translation_key="salt_level",
value_fn=lambda value: value.salt_filling_level,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
),
),
MieleSensorDefinition(
types=(MieleAppliance.DISHWASHER,),
description=MieleSensorDescription[MieleFillingLevel](
key="rinse_aid_level",
translation_key="rinse_aid_level",
value_fn=lambda value: value.rinse_aid_filling_level,
native_unit_of_measurement=PERCENTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
),
),
)
async def async_setup_entry(
hass: HomeAssistant,
@@ -696,11 +756,14 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the sensor platform."""
coordinator = config_entry.runtime_data
coordinator = config_entry.runtime_data.coordinator
aux_coordinator = config_entry.runtime_data.aux_coordinator
added_devices: set[str] = set() # device_id
added_entities: set[str] = set() # unique_id
def _get_entity_class(definition: MieleSensorDefinition) -> type[MieleSensor]:
def _get_entity_class(
definition: MieleSensorDefinition[MieleDevice],
) -> type[MieleSensor]:
"""Get the entity class for the sensor."""
return {
"state_status": MieleStatusSensor,
@@ -725,7 +788,7 @@ async def async_setup_entry(
)
def _is_sensor_enabled(
definition: MieleSensorDefinition,
definition: MieleSensorDefinition[MieleDevice],
device: MieleDevice,
unique_id: str,
) -> bool:
@@ -748,6 +811,15 @@ async def async_setup_entry(
return False
return True
def _enabled_aux_sensor(
definition: MieleSensorDefinition[MieleFillingLevel], level: MieleFillingLevel
) -> bool:
"""Check if aux sensors are enabled."""
return not (
definition.description.value_fn is not None
and definition.description.value_fn(level) is None
)
def _async_add_devices() -> None:
nonlocal added_devices, added_entities
entities: list = []
@@ -775,7 +847,11 @@ async def async_setup_entry(
continue
# sensors is not enabled, skip
if not _is_sensor_enabled(definition, device, unique_id):
if not _is_sensor_enabled(
definition,
device,
unique_id,
):
continue
added_entities.add(unique_id)
@@ -787,6 +863,15 @@ async def async_setup_entry(
config_entry.async_on_unload(coordinator.async_add_listener(_async_add_devices))
_async_add_devices()
async_add_entities(
MieleAuxSensor(aux_coordinator, device_id, definition.description)
for device_id in aux_coordinator.data.filling_levels
for definition in POLLED_SENSOR_TYPES
if _enabled_aux_sensor(
definition, aux_coordinator.data.filling_levels[device_id]
)
)
APPLIANCE_ICONS = {
MieleAppliance.WASHING_MACHINE: "mdi:washing-machine",
@@ -885,6 +970,32 @@ class MieleRestorableSensor(MieleSensor, RestoreSensor):
super()._handle_coordinator_update()
class MieleAuxSensor(MieleAuxEntity, SensorEntity):
"""Representation of a filling level Sensor."""
entity_description: MieleSensorDescription
def __init__(
self,
coordinator: MieleAuxDataUpdateCoordinator,
device_id: str,
description: MieleSensorDescription,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator, device_id, description)
if description.unique_id_fn is not None:
self._attr_unique_id = description.unique_id_fn(device_id, description)
@property
def native_value(self) -> StateType | datetime:
"""Return the state of the level sensor."""
return (
self.entity_description.value_fn(self.levels)
if self.entity_description.value_fn is not None
else None
)
class MielePlateSensor(MieleSensor):
"""Representation of a Sensor."""

View File

@@ -257,6 +257,9 @@
"plate_step_warm": "Warming"
}
},
"power_disk_level": {
"name": "PowerDisk level"
},
"program_id": {
"name": "Program",
"state": {
@@ -1038,6 +1041,12 @@
"remaining_time": {
"name": "Remaining time"
},
"rinse_aid_level": {
"name": "Rinse aid level"
},
"salt_level": {
"name": "Salt level"
},
"spin_speed": {
"name": "Spin speed"
},
@@ -1080,6 +1089,12 @@
"temperature_zone_3": {
"name": "Temperature zone 3"
},
"twin_dos_1_level": {
"name": "TwinDos 1 level"
},
"twin_dos_2_level": {
"name": "TwinDos 2 level"
},
"water_consumption": {
"name": "Water consumption"
},

View File

@@ -117,7 +117,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the switch platform."""
coordinator = config_entry.runtime_data
coordinator = config_entry.runtime_data.coordinator
added_devices: set[str] = set()
def _async_add_new_devices() -> None:

View File

@@ -128,7 +128,7 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the vacuum platform."""
coordinator = config_entry.runtime_data
coordinator = config_entry.runtime_data.coordinator
async_add_entities(
MieleVacuum(coordinator, device_id, definition.description)

View File

@@ -4,6 +4,7 @@
"codeowners": ["@hacf-fr", "@Quentame", "@starkillerOG"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/netgear",
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["pynetgear"],
"requirements": ["pynetgear==0.10.10"],

View File

@@ -51,7 +51,6 @@ ALL_BINARY_SENSORS = [
PLATFORMS = [
Platform.BINARY_SENSOR,
Platform.NOTIFY,
Platform.SENSOR,
]
@@ -61,6 +60,7 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up Netgear LTE component."""
hass.data[DATA_HASS_CONFIG] = config
async_setup_services(hass)
return True
@@ -96,19 +96,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: NetgearLTEConfigEntry) -
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
async_setup_services(hass)
await discovery.async_load_platform(
hass,
Platform.NOTIFY,
DOMAIN,
{CONF_NAME: entry.title, "modem": modem},
{CONF_NAME: entry.title, "modem": modem, "entry": entry},
hass.data[DATA_HASS_CONFIG],
)
await hass.config_entries.async_forward_entry_setups(
entry, [platform for platform in PLATFORMS if platform != Platform.NOTIFY]
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
@@ -118,7 +114,5 @@ async def async_unload_entry(hass: HomeAssistant, entry: NetgearLTEConfigEntry)
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if not hass.config_entries.async_loaded_entries(DOMAIN):
hass.data.pop(DOMAIN, None)
for service_name in hass.services.async_services()[DOMAIN]:
hass.services.async_remove(DOMAIN, service_name)
return unload_ok

View File

@@ -14,7 +14,7 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from .const import DEFAULT_HOST, DOMAIN, LOGGER, MANUFACTURER
from .const import DEFAULT_HOST, DOMAIN, MANUFACTURER
class NetgearLTEFlowHandler(ConfigFlow, domain=DOMAIN):
@@ -72,9 +72,6 @@ class NetgearLTEFlowHandler(ConfigFlow, domain=DOMAIN):
info = await modem.information()
except Error as ex:
raise InputValidationError("cannot_connect") from ex
except Exception as ex:
LOGGER.exception("Unexpected exception")
raise InputValidationError("unknown") from ex
await modem.logout()
return info

View File

@@ -7,5 +7,5 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["eternalegypt"],
"requirements": ["eternalegypt==0.0.16"]
"requirements": ["eternalegypt==0.0.18"]
}

View File

@@ -38,6 +38,7 @@ class NetgearNotifyService(BaseNotificationService):
"""Initialize the service."""
self.config = config
self.modem: Modem = discovery_info["modem"]
discovery_info["entry"].async_on_unload(self.async_unregister_services)
async def async_send_message(self, message="", **kwargs):
"""Send a message to a user."""

View File

@@ -4,6 +4,7 @@ import voluptuous as vol
from homeassistant.const import CONF_HOST
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers import config_validation as cv
from .const import (
@@ -14,7 +15,6 @@ from .const import (
AUTOCONNECT_MODES,
DOMAIN,
FAILOVER_MODES,
LOGGER,
)
from .coordinator import NetgearLTEConfigEntry
@@ -56,8 +56,11 @@ async def _service_handler(call: ServiceCall) -> None:
break
if not entry or not (modem := entry.runtime_data.modem).token:
LOGGER.error("%s: host %s unavailable", call.service, host)
return
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="config_entry_not_found",
translation_placeholders={"service": call.service},
)
if call.service == SERVICE_DELETE_SMS:
for sms_id in call.data[ATTR_SMS_ID]:

View File

@@ -71,6 +71,11 @@
}
}
},
"exceptions": {
"config_entry_not_found": {
"message": "Failed to perform action \"{service}\". Config entry for target not found"
}
},
"services": {
"connect_lte": {
"description": "Asks the modem to establish the LTE connection.",

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["nextdns"],
"quality_scale": "platinum",
"requirements": ["nextdns==4.1.0"]
"requirements": ["nextdns==5.0.0"]
}

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/nibe_heatpump",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["nibe==2.20.0"]
"requirements": ["nibe==2.21.0"]
}

View File

@@ -10,6 +10,7 @@
}
],
"documentation": "https://www.home-assistant.io/integrations/nuheat",
"integration_type": "device",
"iot_class": "cloud_polling",
"loggers": ["nuheat"],
"requirements": ["nuheat==1.0.1"]

View File

@@ -61,7 +61,10 @@ async def async_get_device_diagnostics(
data["execution_history"] = [
repr(execution)
for execution in await client.get_execution_history()
if any(command.device_url == device_url for command in execution.commands)
if any(
command.device_url.split("#", 1)[0] == device_url.split("#", 1)[0]
for command in execution.commands
)
]
return data

View File

@@ -4,6 +4,7 @@
"codeowners": ["@IsakNyberg"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/permobil",
"integration_type": "device",
"iot_class": "cloud_polling",
"requirements": ["mypermobil==0.1.8"]
}

View File

@@ -75,6 +75,15 @@
},
{
"macaddress": "84E657*"
},
{
"hostname": "ps5-*"
},
{
"hostname": "ps4-*"
},
{
"hostname": "ps3"
}
],
"documentation": "https://www.home-assistant.io/integrations/playstation_network",

View File

@@ -114,32 +114,72 @@ class PooldoseConfigFlow(ConfigFlow, domain=DOMAIN):
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
if not user_input:
return self.async_show_form(
step_id="user",
data_schema=SCHEMA_DEVICE,
if user_input is not None:
host = user_input[CONF_HOST]
serial_number, api_versions, errors = await self._validate_host(host)
if errors:
return self.async_show_form(
step_id="user",
data_schema=SCHEMA_DEVICE,
errors=errors,
# Handle API version info for error display; pass version info when available
# or None when api_versions is None to avoid displaying version details
description_placeholders={
"api_version_is": api_versions.get("api_version_is") or "",
"api_version_should": api_versions.get("api_version_should")
or "",
}
if api_versions
else None,
)
await self.async_set_unique_id(serial_number, raise_on_progress=False)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=f"PoolDose {serial_number}",
data={CONF_HOST: host},
)
host = user_input[CONF_HOST]
serial_number, api_versions, errors = await self._validate_host(host)
if errors:
return self.async_show_form(
step_id="user",
data_schema=SCHEMA_DEVICE,
errors=errors,
# Handle API version info for error display; pass version info when available
# or None when api_versions is None to avoid displaying version details
description_placeholders={
"api_version_is": api_versions.get("api_version_is") or "",
"api_version_should": api_versions.get("api_version_should") or "",
}
if api_versions
else None,
)
await self.async_set_unique_id(serial_number, raise_on_progress=False)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=f"PoolDose {serial_number}",
data={CONF_HOST: host},
return self.async_show_form(
step_id="user",
data_schema=SCHEMA_DEVICE,
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfigure to change the device host/IP for an existing entry."""
if user_input is not None:
host = user_input[CONF_HOST]
serial_number, api_versions, errors = await self._validate_host(host)
if errors:
return self.async_show_form(
step_id="reconfigure",
data_schema=SCHEMA_DEVICE,
errors=errors,
# Handle API version info for error display identical to other steps
description_placeholders={
"api_version_is": api_versions.get("api_version_is") or "",
"api_version_should": api_versions.get("api_version_should")
or "",
}
if api_versions
else None,
)
# Ensure new serial number matches the existing entry unique_id (serial number)
if serial_number != self._get_reconfigure_entry().unique_id:
return self.async_abort(reason="wrong_device")
# Update the existing config entry with the new host and schedule reload
return self.async_update_reload_and_abort(
self._get_reconfigure_entry(), data_updates={CONF_HOST: host}
)
return self.async_show_form(
step_id="reconfigure",
# Pre-fill with current host from the entry being reconfigured
data_schema=self.add_suggested_values_to_schema(
SCHEMA_DEVICE, self._get_reconfigure_entry().data
),
)

View File

@@ -0,0 +1,34 @@
"""Diagnostics support for Pooldose."""
from __future__ import annotations
from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.core import HomeAssistant
from . import PooldoseConfigEntry
TO_REDACT = {
"IP",
"MAC",
"WIFI_SSID",
"AP_SSID",
"SERIAL_NUMBER",
"DEVICE_ID",
"OWNERID",
"NAME",
"GROUPNAME",
}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: PooldoseConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator = entry.runtime_data
return {
"device_info": async_redact_data(coordinator.device_info, TO_REDACT),
"data": coordinator.data,
}

View File

@@ -9,6 +9,7 @@
}
],
"documentation": "https://www.home-assistant.io/integrations/pooldose",
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "silver",
"requirements": ["python-pooldose==0.8.1"]

View File

@@ -41,7 +41,7 @@ rules:
# Gold
devices: done
diagnostics: todo
diagnostics: done
discovery-update-info: done
discovery: done
docs-data-update: done
@@ -53,20 +53,20 @@ rules:
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: This integration does not support dynamic devices, as it is designed for a single PoolDose device.
comment: This integration does not support dynamic device discovery, as each config entry represents a single PoolDose device with all available entities.
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: done
icon-translations: done
reconfiguration-flow: todo
reconfiguration-flow: done
repair-issues:
status: exempt
comment: This integration does not provide repair issues, as it is designed for a single PoolDose device with a fixed configuration.
comment: This integration does not have any identified cases where repair issues would be needed.
stale-devices:
status: exempt
comment: This integration does not support stale devices, as it is designed for a single PoolDose device with a fixed configuration.
comment: This integration manages a single device per config entry, so stale device removal is not applicable.
# Platinum
async-dependency: done

View File

@@ -4,7 +4,9 @@
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"no_device_info": "Unable to retrieve device information",
"no_serial_number": "No serial number found on the device"
"no_serial_number": "No serial number found on the device",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"wrong_device": "The provided device does not match the configured device"
},
"error": {
"api_not_set": "API version not found in device response. Device firmware may not be compatible with this integration.",
@@ -20,6 +22,14 @@
"description": "A PoolDose device was found on your network at {ip} with MAC address {mac}.\n\nDo you want to add {name} to Home Assistant?",
"title": "Confirm DHCP discovered PoolDose device"
},
"reconfigure": {
"data": {
"host": "[%key:common::config_flow::data::host%]"
},
"data_description": {
"host": "[%key:component::pooldose::config::step::user::data_description::host%]"
}
},
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]"

View File

@@ -4,6 +4,7 @@
"codeowners": ["@haemishkyd"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/poolsense",
"integration_type": "device",
"iot_class": "cloud_polling",
"loggers": ["poolsense"],
"requirements": ["poolsense==0.0.8"]

View File

@@ -15,6 +15,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import PortainerConfigEntry
from .const import CONTAINER_STATE_RUNNING
from .coordinator import PortainerContainerData, PortainerCoordinator
from .entity import (
PortainerContainerEntity,
@@ -41,7 +42,7 @@ CONTAINER_SENSORS: tuple[PortainerContainerBinarySensorEntityDescription, ...] =
PortainerContainerBinarySensorEntityDescription(
key="status",
translation_key="status",
state_fn=lambda data: data.container.state == "running",
state_fn=lambda data: data.container.state == CONTAINER_STATE_RUNNING,
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
),
@@ -163,7 +164,11 @@ class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
@property
def available(self) -> bool:
"""Return if the device is available."""
return super().available and self.endpoint_id in self.coordinator.data
return (
super().available
and self.endpoint_id in self.coordinator.data
and self.device_name in self.coordinator.data[self.endpoint_id].containers
)
@property
def is_on(self) -> bool | None:

View File

@@ -113,7 +113,9 @@ class PortainerButton(PortainerContainerEntity, ButtonEntity):
"""Trigger the Portainer button press service."""
try:
await self.entity_description.press_action(
self.coordinator.portainer, self.endpoint_id, self.device_id
self.coordinator.portainer,
self.endpoint_id,
self.container_data.container.id,
)
except PortainerConnectionError as err:
raise HomeAssistantError(

View File

@@ -4,3 +4,5 @@ DOMAIN = "portainer"
DEFAULT_NAME = "Portainer"
ENDPOINT_STATUS_DOWN = 2
CONTAINER_STATE_RUNNING = "running"

View File

@@ -24,7 +24,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, ENDPOINT_STATUS_DOWN
from .const import CONTAINER_STATE_RUNNING, DOMAIN, ENDPOINT_STATUS_DOWN
type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
@@ -50,7 +50,7 @@ class PortainerContainerData:
"""Container data held by the Portainer coordinator."""
container: DockerContainer
stats: DockerContainerStats
stats: DockerContainerStats | None
stats_pre: DockerContainerStats | None
@@ -147,46 +147,52 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
docker_version = await self.portainer.docker_version(endpoint.id)
docker_info = await self.portainer.docker_info(endpoint.id)
prev_endpoint = self.data.get(endpoint.id) if self.data else None
container_map: dict[str, PortainerContainerData] = {}
container_stats_task = [
(
container,
self.portainer.container_stats(
endpoint_id=endpoint.id,
container_id=container.id,
),
# Map containers, started and stopped
for container in containers:
container_name = self._get_container_name(container.names[0])
prev_container = (
prev_endpoint.containers[container_name]
if prev_endpoint
else None
)
for container in containers
]
container_stats_gather = await asyncio.gather(
*[task for _, task in container_stats_task],
)
for (container, _), container_stats in zip(
container_stats_task, container_stats_gather, strict=False
):
container_name = container.names[0].replace("/", " ").strip()
# Store previous stats if available. This is used to calculate deltas for CPU and network usage
# In the first call it will be None, since it has nothing to compare with
# Added a walrus pattern to check if not None on prev_container, to keep mypy happy. :)
container_map[container_name] = PortainerContainerData(
container=container,
stats=container_stats,
stats_pre=(
prev_container.stats
if self.data
and (prev_data := self.data.get(endpoint.id)) is not None
and (
prev_container := prev_data.containers.get(
container_name
)
)
is not None
else None
),
stats=None,
stats_pre=prev_container.stats if prev_container else None,
)
# Separately fetch stats for running containers
running_containers = [
container
for container in containers
if container.state == CONTAINER_STATE_RUNNING
]
if running_containers:
container_stats = dict(
zip(
(
self._get_container_name(container.names[0])
for container in running_containers
),
await asyncio.gather(
*(
self.portainer.container_stats(
endpoint_id=endpoint.id,
container_id=container.id,
)
for container in running_containers
)
),
strict=False,
)
)
# Now assign stats to the containers
for container_name, stats in container_stats.items():
container_map[container_name].stats = stats
except PortainerConnectionError as err:
_LOGGER.exception("Connection error")
raise UpdateFailed(
@@ -227,11 +233,15 @@ class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorD
# Surprise, we also handle containers here :)
current_containers = {
(endpoint.id, container.container.id)
(endpoint.id, container_name)
for endpoint in mapped_endpoints.values()
for container in endpoint.containers.values()
for container_name in endpoint.containers
}
new_containers = current_containers - self.known_containers
if new_containers:
_LOGGER.debug("New containers found: %s", new_containers)
self.known_containers.update(new_containers)
def _get_container_name(self, container_name: str) -> str:
"""Sanitize to get a proper container name."""
return container_name.replace("/", " ").strip()

View File

@@ -7,6 +7,9 @@
"architecture": {
"default": "mdi:cpu-64-bit"
},
"container_state": {
"default": "mdi:state-machine"
},
"containers_count": {
"default": "mdi:database"
},

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["pyportainer==1.0.17"]
"requirements": ["pyportainer==1.0.22"]
}

View File

@@ -49,10 +49,19 @@ CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
translation_key="image",
value_fn=lambda data: data.container.image,
),
PortainerContainerSensorEntityDescription(
key="container_state",
translation_key="container_state",
value_fn=lambda data: data.container.state,
device_class=SensorDeviceClass.ENUM,
options=["running", "exited", "paused", "restarting", "created", "dead"],
),
PortainerContainerSensorEntityDescription(
key="memory_limit",
translation_key="memory_limit",
value_fn=lambda data: data.stats.memory_stats.limit,
value_fn=lambda data: (
data.stats.memory_stats.limit if data.stats is not None else 0
),
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.MEGABYTES,
@@ -63,7 +72,9 @@ CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
PortainerContainerSensorEntityDescription(
key="memory_usage",
translation_key="memory_usage",
value_fn=lambda data: data.stats.memory_stats.usage,
value_fn=lambda data: (
data.stats.memory_stats.usage if data.stats is not None else 0
),
device_class=SensorDeviceClass.DATA_SIZE,
native_unit_of_measurement=UnitOfInformation.BYTES,
suggested_unit_of_measurement=UnitOfInformation.MEGABYTES,
@@ -76,7 +87,9 @@ CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
translation_key="memory_usage_percentage",
value_fn=lambda data: (
(data.stats.memory_stats.usage / data.stats.memory_stats.limit) * 100.0
if data.stats.memory_stats.limit > 0 and data.stats.memory_stats.usage > 0
if data.stats is not None
and data.stats.memory_stats.limit > 0
and data.stats.memory_stats.usage > 0
else 0.0
),
native_unit_of_measurement=PERCENTAGE,
@@ -89,7 +102,8 @@ CONTAINER_SENSORS: tuple[PortainerContainerSensorEntityDescription, ...] = (
translation_key="cpu_usage_total",
value_fn=lambda data: (
(total_delta / system_delta) * data.stats.cpu_stats.online_cpus * 100.0
if (prev := data.stats_pre) is not None
if data.stats is not None
and (prev := data.stats_pre) is not None
and (
system_delta := (
data.stats.cpu_stats.system_cpu_usage
@@ -247,7 +261,6 @@ async def async_setup_entry(
)
for (endpoint, container) in containers
for entity_description in CONTAINER_SENSORS
if entity_description.value_fn(container) is not None
)
coordinator.new_endpoints_callbacks.append(_async_add_new_endpoints)
@@ -290,7 +303,11 @@ class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
@property
def available(self) -> bool:
"""Return if the device is available."""
return super().available and self.endpoint_id in self.coordinator.data
return (
super().available
and self.endpoint_id in self.coordinator.data
and self.device_name in self.coordinator.data[self.endpoint_id].containers
)
@property
def native_value(self) -> StateType:

View File

@@ -68,6 +68,17 @@
"architecture": {
"name": "Architecture"
},
"container_state": {
"name": "State",
"state": {
"created": "Created",
"dead": "Dead",
"exited": "Exited",
"paused": "Paused",
"restarting": "Restarting",
"running": "Running"
}
},
"containers_count": {
"name": "Container count"
},

View File

@@ -137,13 +137,19 @@ class PortainerContainerSwitch(PortainerContainerEntity, SwitchEntity):
async def async_turn_on(self, **kwargs: Any) -> None:
"""Start (turn on) the container."""
await self.entity_description.turn_on_fn(
"start", self.coordinator.portainer, self.endpoint_id, self.device_id
"start",
self.coordinator.portainer,
self.endpoint_id,
self.container_data.container.id,
)
await self.coordinator.async_request_refresh()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Stop (turn off) the container."""
await self.entity_description.turn_off_fn(
"stop", self.coordinator.portainer, self.endpoint_id, self.device_id
"stop",
self.coordinator.portainer,
self.endpoint_id,
self.container_data.container.id,
)
await self.coordinator.async_request_refresh()

View File

@@ -4,6 +4,7 @@
"codeowners": ["@ktnrg45"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/ps4",
"integration_type": "hub",
"iot_class": "local_polling",
"loggers": ["pyps4_2ndscreen"],
"requirements": ["pyps4-2ndscreen==1.3.1"]

View File

@@ -19,6 +19,7 @@ from homeassistant.components.light import (
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util import color as color_util
from .entity import (
ReolinkChannelCoordinatorEntity,
@@ -157,16 +158,16 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity):
@property
def brightness(self) -> int | None:
"""Return the brightness of this light between 0.255."""
"""Return the brightness of this light between 1.255."""
assert self.entity_description.get_brightness_fn is not None
bright_pct = self.entity_description.get_brightness_fn(
self._host.api, self._channel
)
if bright_pct is None:
if not bright_pct:
return None
return round(255 * bright_pct / 100.0)
return color_util.value_to_brightness((1, 100), bright_pct)
@property
def color_temp_kelvin(self) -> int | None:
@@ -189,7 +190,7 @@ class ReolinkLightEntity(ReolinkChannelCoordinatorEntity, LightEntity):
if (
brightness := kwargs.get(ATTR_BRIGHTNESS)
) is not None and self.entity_description.set_brightness_fn is not None:
brightness_pct = int(brightness / 255.0 * 100)
brightness_pct = round(color_util.brightness_to_value((1, 100), brightness))
await self.entity_description.set_brightness_fn(
self._host.api, self._channel, brightness_pct
)

View File

@@ -79,6 +79,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
map_scale=MAP_SCALE,
),
mqtt_session_unauthorized_hook=lambda: entry.async_start_reauth(hass),
prefer_cache=False,
)
except RoborockInvalidCredentials as err:
raise ConfigEntryAuthFailed(

View File

@@ -552,6 +552,7 @@ class RoborockB01Q7UpdateCoordinator(RoborockDataUpdateCoordinatorB01):
RoborockB01Props.CLEANING_TIME,
RoborockB01Props.REAL_CLEAN_TIME,
RoborockB01Props.HYPA,
RoborockB01Props.WIND,
]
async def _async_update_data(

View File

@@ -20,7 +20,7 @@
"loggers": ["roborock"],
"quality_scale": "silver",
"requirements": [
"python-roborock==3.21.0",
"python-roborock==4.1.0",
"vacuum-map-parser-roborock==0.1.4"
]
}

View File

@@ -3,7 +3,7 @@
import logging
from typing import Any
from roborock.data import RoborockStateCode
from roborock.data import RoborockStateCode, SCWindMapping, WorkStatusMapping
from roborock.exceptions import RoborockException
from roborock.roborock_typing import RoborockCommand
import voluptuous as vol
@@ -24,8 +24,12 @@ from .const import (
GET_VACUUM_CURRENT_POSITION_SERVICE_NAME,
SET_VACUUM_GOTO_POSITION_SERVICE_NAME,
)
from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator
from .entity import RoborockCoordinatedEntityV1
from .coordinator import (
RoborockB01Q7UpdateCoordinator,
RoborockConfigEntry,
RoborockDataUpdateCoordinator,
)
from .entity import RoborockCoordinatedEntityB01, RoborockCoordinatedEntityV1
_LOGGER = logging.getLogger(__name__)
@@ -57,6 +61,20 @@ STATE_CODE_TO_STATE = {
RoborockStateCode.device_offline: VacuumActivity.ERROR, # "Device offline"
}
Q7_STATE_CODE_TO_STATE = {
WorkStatusMapping.SLEEPING: VacuumActivity.IDLE,
WorkStatusMapping.WAITING_FOR_ORDERS: VacuumActivity.IDLE,
WorkStatusMapping.PAUSED: VacuumActivity.PAUSED,
WorkStatusMapping.DOCKING: VacuumActivity.RETURNING,
WorkStatusMapping.CHARGING: VacuumActivity.DOCKED,
WorkStatusMapping.SWEEP_MOPING: VacuumActivity.CLEANING,
WorkStatusMapping.SWEEP_MOPING_2: VacuumActivity.CLEANING,
WorkStatusMapping.MOPING: VacuumActivity.CLEANING,
WorkStatusMapping.UPDATING: VacuumActivity.DOCKED,
WorkStatusMapping.MOP_CLEANING: VacuumActivity.DOCKED,
WorkStatusMapping.MOP_AIRDRYING: VacuumActivity.DOCKED,
}
PARALLEL_UPDATES = 0
@@ -69,6 +87,11 @@ async def async_setup_entry(
async_add_entities(
RoborockVacuum(coordinator) for coordinator in config_entry.runtime_data.v1
)
async_add_entities(
RoborockQ7Vacuum(coordinator)
for coordinator in config_entry.runtime_data.b01
if isinstance(coordinator, RoborockB01Q7UpdateCoordinator)
)
platform = entity_platform.async_get_current_platform()
platform.async_register_entity_service(
@@ -241,3 +264,149 @@ class RoborockVacuum(RoborockCoordinatedEntityV1, StateVacuumEntity):
"x": robot_position.x,
"y": robot_position.y,
}
class RoborockQ7Vacuum(RoborockCoordinatedEntityB01, StateVacuumEntity):
"""General Representation of a Roborock vacuum."""
_attr_icon = "mdi:robot-vacuum"
_attr_supported_features = (
VacuumEntityFeature.PAUSE
| VacuumEntityFeature.STOP
| VacuumEntityFeature.RETURN_HOME
| VacuumEntityFeature.FAN_SPEED
| VacuumEntityFeature.SEND_COMMAND
| VacuumEntityFeature.LOCATE
| VacuumEntityFeature.STATE
| VacuumEntityFeature.START
)
_attr_translation_key = DOMAIN
_attr_name = None
coordinator: RoborockB01Q7UpdateCoordinator
def __init__(
self,
coordinator: RoborockB01Q7UpdateCoordinator,
) -> None:
"""Initialize a vacuum."""
StateVacuumEntity.__init__(self)
RoborockCoordinatedEntityB01.__init__(
self,
coordinator.duid_slug,
coordinator,
)
@property
def fan_speed_list(self) -> list[str]:
"""Get the list of available fan speeds."""
return SCWindMapping.keys()
@property
def activity(self) -> VacuumActivity | None:
"""Return the status of the vacuum cleaner."""
if self.coordinator.data.status is not None:
return Q7_STATE_CODE_TO_STATE.get(self.coordinator.data.status)
return None
@property
def fan_speed(self) -> str | None:
"""Return the fan speed of the vacuum cleaner."""
return self.coordinator.data.wind_name
async def async_start(self) -> None:
"""Start the vacuum."""
try:
await self.coordinator.api.start_clean()
except RoborockException as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="command_failed",
translation_placeholders={
"command": "start_clean",
},
) from err
async def async_pause(self) -> None:
"""Pause the vacuum."""
try:
await self.coordinator.api.pause_clean()
except RoborockException as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="command_failed",
translation_placeholders={
"command": "pause_clean",
},
) from err
async def async_stop(self, **kwargs: Any) -> None:
"""Stop the vacuum."""
try:
await self.coordinator.api.stop_clean()
except RoborockException as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="command_failed",
translation_placeholders={
"command": "stop_clean",
},
) from err
async def async_return_to_base(self, **kwargs: Any) -> None:
"""Send vacuum back to base."""
try:
await self.coordinator.api.return_to_dock()
except RoborockException as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="command_failed",
translation_placeholders={
"command": "return_to_dock",
},
) from err
async def async_locate(self, **kwargs: Any) -> None:
"""Locate vacuum."""
try:
await self.coordinator.api.find_me()
except RoborockException as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="command_failed",
translation_placeholders={
"command": "find_me",
},
) from err
async def async_set_fan_speed(self, fan_speed: str, **kwargs: Any) -> None:
"""Set vacuum fan speed."""
try:
await self.coordinator.api.set_fan_speed(
SCWindMapping.from_value(fan_speed)
)
except RoborockException as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="command_failed",
translation_placeholders={
"command": "set_fan_speed",
},
) from err
async def async_send_command(
self,
command: str,
params: dict[str, Any] | list[Any] | None = None,
**kwargs: Any,
) -> None:
"""Send a command to a vacuum cleaner."""
try:
await self.coordinator.api.send(command, params)
except RoborockException as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="command_failed",
translation_placeholders={
"command": command,
},
) from err

Some files were not shown because too many files have changed in this diff Show More